From 89980bf8dd4c967fc4a152ae9ddeb7a88525a40a Mon Sep 17 00:00:00 2001 From: Derek Ho Date: Tue, 1 Aug 2023 11:53:48 -0400 Subject: [PATCH 01/42] fix fail backport (#1915) Signed-off-by: Derek Ho --- .github/workflows/backport.yml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml index e47d8d88c0..9537ddebda 100644 --- a/.github/workflows/backport.yml +++ b/.github/workflows/backport.yml @@ -22,7 +22,9 @@ jobs: installation_id: 22958780 - name: Backport - uses: VachaShah/backport@v1.1.4 + uses: VachaShah/backport@v2.2.0 with: github_token: ${{ steps.github_app_token.outputs.token }} - branch_name: backport/backport-${{ github.event.number }} + head_template: backport/backport-<%= number %>-to-<%= base %> + labels_template: "<%= JSON.stringify([...labels, 'autocut']) %>" + failure_labels: "failed backport" From d00dc4d20b2c44434a52c672603f6b3bacd30692 Mon Sep 17 00:00:00 2001 From: Guian Gumpac Date: Tue, 1 Aug 2023 13:23:35 -0700 Subject: [PATCH 02/42] Fixed imports (#1919) Signed-off-by: Guian Gumpac --- .../sql/opensearch/monitor/OpenSearchResourceMonitor.java | 2 +- .../sql/opensearch/setting/LegacyOpenDistroSettings.java | 2 +- .../sql/opensearch/monitor/OpenSearchResourceMonitorTest.java | 2 +- .../sql/opensearch/setting/OpenSearchSettingsTest.java | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/monitor/OpenSearchResourceMonitor.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/monitor/OpenSearchResourceMonitor.java index e6e070ed52..5ed82c7a5d 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/monitor/OpenSearchResourceMonitor.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/monitor/OpenSearchResourceMonitor.java @@ -11,7 +11,7 @@ import io.github.resilience4j.retry.RetryConfig; import java.util.function.Supplier; import lombok.extern.log4j.Log4j2; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.sql.common.setting.Settings; import org.opensearch.sql.monitor.ResourceMonitor; diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/setting/LegacyOpenDistroSettings.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/setting/LegacyOpenDistroSettings.java index 7ac7e6eccb..3eadea482b 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/setting/LegacyOpenDistroSettings.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/setting/LegacyOpenDistroSettings.java @@ -11,8 +11,8 @@ import java.util.List; import lombok.experimental.UtilityClass; import org.opensearch.common.settings.Setting; -import org.opensearch.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.sql.common.setting.LegacySettings; @UtilityClass diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/monitor/OpenSearchResourceMonitorTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/monitor/OpenSearchResourceMonitorTest.java index 75f1ea7a17..cd27b0710e 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/monitor/OpenSearchResourceMonitorTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/monitor/OpenSearchResourceMonitorTest.java @@ -18,7 +18,7 @@ import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.sql.common.setting.Settings; @ExtendWith(MockitoExtension.class) diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/setting/OpenSearchSettingsTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/setting/OpenSearchSettingsTest.java index 923021f501..b4c8cc8c69 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/setting/OpenSearchSettingsTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/setting/OpenSearchSettingsTest.java @@ -29,7 +29,7 @@ import org.opensearch.cluster.ClusterName; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Setting; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.monitor.jvm.JvmInfo; import org.opensearch.sql.common.setting.LegacySettings; import org.opensearch.sql.common.setting.Settings; From d2de909fc7b988feaa4dfb4289233ea80cbdf16a Mon Sep 17 00:00:00 2001 From: Mitchell Gale Date: Wed, 2 Aug 2023 17:19:51 -0700 Subject: [PATCH 03/42] [Spotless] Adds new line at end of java files (#1896) * Adding spotless plugin to project build.gradle. Signed-off-by: Mitchell Gale Remove commented out spotless changes. Signed-off-by: Mitchell Gale Adding Spotless to DEVELOPER_GUIDE.rst Signed-off-by: Mitchell Gale Added Google Java format to spotless. Signed-off-by: Mitchell Gale Update DEVELOPER_GUIDE.rst Co-authored-by: Yury-Fridlyand * Added apply false for spotless Signed-off-by: Mitchell Gale * Adding ratchetFrom to build.gradle Signed-off-by: Mitchell Gale * Adding license header to build.gradle for spotless. Signed-off-by: Mitchell Gale * Uncommenting all changes to build.gradle for spotless. Signed-off-by: Mitchell Gale * Commented out spotless checks. Signed-off-by: Mitchell Gale * Add specific version for java format spotless (1.17.0) Signed-off-by: Mitchell Gale * Add spotless end with new line check. Signed-off-by: Mitchell Gale * Add new line at end of files missing lines. Signed-off-by: Mitchell Gale * Removed include path Signed-off-by: Mitchell Gale * Correcting ending new line for QueryExemplars. Signed-off-by: Mitchell Gale --------- Signed-off-by: Mitchell Gale Signed-off-by: Mitchell Gale Co-authored-by: Yury-Fridlyand --- build.gradle | 12 +- .../opensearch/sql/common/grok/Converter.java | 336 +++++++++--------- .../org/opensearch/sql/ast/tree/RareTopN.java | 1 - .../arthmetic/ArithmeticFunction.java | 2 +- .../sql/expression/text/TextFunction.java | 1 - .../planner/logical/LogicalRelationTest.java | 2 +- ...undedOutOfOrderWatermarkGeneratorTest.java | 2 +- .../streaming/windowing/WindowTest.java | 2 +- .../assigner/SlidingWindowAssignerTest.java | 2 +- .../assigner/TumblingWindowAssignerTest.java | 2 +- .../AfterWatermarkWindowTriggerTest.java | 2 +- .../storage/write/TableWriteOperatorTest.java | 2 +- .../sql/datasources/encryptor/Encryptor.java | 2 +- .../datasources/encryptor/EncryptorImpl.java | 2 +- .../CreateDataSourceActionRequest.java | 2 +- .../CreateDataSourceActionResponse.java | 2 +- .../DeleteDataSourceActionRequest.java | 2 +- .../DeleteDataSourceActionResponse.java | 2 +- .../transport/GetDataSourceActionRequest.java | 2 +- .../GetDataSourceActionResponse.java | 2 +- .../UpdateDataSourceActionRequest.java | 2 +- .../UpdateDataSourceActionResponse.java | 2 +- .../rest/RestDataSourceQueryAction.java | 2 +- .../service/DataSourceMetadataStorage.java | 2 +- .../TransportCreateDataSourceAction.java | 2 +- .../TransportDeleteDataSourceAction.java | 2 +- .../TransportGetDataSourceAction.java | 2 +- .../TransportUpdateDataSourceAction.java | 2 +- .../sql/datasources/utils/Scheduler.java | 2 +- .../encryptor/EncryptorImplTest.java | 2 +- .../TransportCreateDataSourceActionTest.java | 2 +- .../TransportDeleteDataSourceActionTest.java | 2 +- .../TransportGetDataSourceActionTest.java | 2 +- .../TransportUpdateDataSourceActionTest.java | 2 +- .../sql/datasources/utils/SchedulerTest.java | 2 +- .../opensearch/sql/legacy/domain/Select.java | 1 - .../format/BindingTupleResultSet.java | 1 - .../sql/legacy/parser/SQLOdbcExpr.java | 1 - .../nestedfield/NestedFieldRewriter.java | 1 - .../sql/legacy/spatial/CellFilterParams.java | 1 - .../semantic/SemanticAnalyzerBasicTest.java | 2 +- .../node/scroll/SearchHitRowTest.java | 2 +- .../sql/legacy/unittest/HavingTest.java | 1 - .../planner/physical/MLCommonsOperator.java | 1 - .../planner/physical/MLOperator.java | 1 - .../ppl/antlr/NowLikeFunctionParserTest.java | 148 ++++---- .../sql/ppl/antlr/PPLSyntaxParserTest.java | 1 - .../client/PrometheusClientImpl.java | 2 +- .../storage/PrometheusMetricTable.java | 2 +- .../PrometheusDefaultImplementor.java | 2 +- .../querybuilder/StepParameterResolver.java | 2 +- ...ryExemplarsFunctionImplementationTest.java | 1 - ...eryExemplarsTableFunctionResolverTest.java | 1 - ...xemplarsFunctionTableScanOperatorTest.java | 2 +- .../storage/PrometheusStorageFactoryTest.java | 1 - .../storage/QueryExemplarsTableTest.java | 1 - .../opensearch/sql/spark/utils/TestUtils.java | 1 - 57 files changed, 286 insertions(+), 302 deletions(-) diff --git a/build.gradle b/build.gradle index ff29eb7687..bb0cd162f0 100644 --- a/build.gradle +++ b/build.gradle @@ -83,18 +83,18 @@ repositories { // Spotless checks will be added as PRs are applied to resolve each style issue is approved. spotless { java { -// target fileTree('.') { -// include '**/*.java', 'src/*/java/**/*.java' -// exclude '**/build/**', '**/build-*/**' -// } + target fileTree('.') { + include '**/*.java' + exclude '**/build/**', '**/build-*/**' + } // importOrder() // licenseHeader("/*\n" + // " * Copyright OpenSearch Contributors\n" + // " * SPDX-License-Identifier: Apache-2.0\n" + -// " */\n\n\n") +// " */\n\n") // removeUnusedImports() // trimTrailingWhitespace() -// endWithNewline() + endWithNewline() // googleJavaFormat('1.17.0').reflowLongStrings().groupArtifact('com.google.googlejavaformat:google-java-format') } } diff --git a/common/src/main/java/org/opensearch/sql/common/grok/Converter.java b/common/src/main/java/org/opensearch/sql/common/grok/Converter.java index 72bfa3d7f1..ebbe13f761 100644 --- a/common/src/main/java/org/opensearch/sql/common/grok/Converter.java +++ b/common/src/main/java/org/opensearch/sql/common/grok/Converter.java @@ -1,168 +1,168 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.sql.common.grok; - -import java.time.Instant; -import java.time.LocalDate; -import java.time.LocalDateTime; -import java.time.OffsetDateTime; -import java.time.ZoneId; -import java.time.ZoneOffset; -import java.time.ZonedDateTime; -import java.time.format.DateTimeFormatter; -import java.time.temporal.TemporalAccessor; -import java.util.AbstractMap; -import java.util.Arrays; -import java.util.Collection; -import java.util.List; -import java.util.Map; -import java.util.function.Function; -import java.util.regex.Pattern; -import java.util.stream.Collectors; - -/** - * Convert String argument to the right type. - */ -public class Converter { - - public enum Type { - BYTE(Byte::valueOf), - BOOLEAN(Boolean::valueOf), - SHORT(Short::valueOf), - INT(Integer::valueOf, "integer"), - LONG(Long::valueOf), - FLOAT(Float::valueOf), - DOUBLE(Double::valueOf), - DATETIME(new DateConverter(), "date"), - STRING(v -> v, "text"); - - public final IConverter converter; - public final List aliases; - - Type(IConverter converter, String... aliases) { - this.converter = converter; - this.aliases = Arrays.asList(aliases); - } - } - - private static final Pattern SPLITTER = Pattern.compile("[:;]"); - - private static final Map TYPES = - Arrays.stream(Type.values()) - .collect(Collectors.toMap(t -> t.name().toLowerCase(), t -> t)); - - private static final Map TYPE_ALIASES = - Arrays.stream(Type.values()) - .flatMap(type -> type.aliases.stream() - .map(alias -> new AbstractMap.SimpleEntry<>(alias, type))) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); - - private static Type getType(String key) { - key = key.toLowerCase(); - Type type = TYPES.getOrDefault(key, TYPE_ALIASES.get(key)); - if (type == null) { - throw new IllegalArgumentException("Invalid data type :" + key); - } - return type; - } - - /** - * getConverters. - */ - public static Map> - getConverters(Collection groupNames, Object... params) { - return groupNames.stream() - .filter(Converter::containsDelimiter) - .collect(Collectors.toMap(Function.identity(), key -> { - String[] list = splitGrokPattern(key); - IConverter converter = getType(list[1]).converter; - if (list.length == 3) { - converter = converter.newConverter(list[2], params); - } - return converter; - })); - } - - /** - * getGroupTypes. - */ - public static Map getGroupTypes(Collection groupNames) { - return groupNames.stream() - .filter(Converter::containsDelimiter) - .map(Converter::splitGrokPattern) - .collect(Collectors.toMap( - l -> l[0], - l -> getType(l[1]) - )); - } - - public static String extractKey(String key) { - return splitGrokPattern(key)[0]; - } - - private static boolean containsDelimiter(String string) { - return string.indexOf(':') >= 0 || string.indexOf(';') >= 0; - } - - private static String[] splitGrokPattern(String string) { - return SPLITTER.split(string, 3); - } - - interface IConverter { - - T convert(String value); - - default IConverter newConverter(String param, Object... params) { - return this; - } - } - - - static class DateConverter implements IConverter { - - private final DateTimeFormatter formatter; - private final ZoneId timeZone; - - public DateConverter() { - this.formatter = DateTimeFormatter.ISO_DATE_TIME; - this.timeZone = ZoneOffset.UTC; - } - - private DateConverter(DateTimeFormatter formatter, ZoneId timeZone) { - this.formatter = formatter; - this.timeZone = timeZone; - } - - @Override - public Instant convert(String value) { - TemporalAccessor dt = formatter - .parseBest(value.trim(), ZonedDateTime::from, LocalDateTime::from, OffsetDateTime::from, - Instant::from, - LocalDate::from); - if (dt instanceof ZonedDateTime) { - return ((ZonedDateTime) dt).toInstant(); - } else if (dt instanceof LocalDateTime) { - return ((LocalDateTime) dt).atZone(timeZone).toInstant(); - } else if (dt instanceof OffsetDateTime) { - return ((OffsetDateTime) dt).atZoneSameInstant(timeZone).toInstant(); - } else if (dt instanceof Instant) { - return ((Instant) dt); - } else if (dt instanceof LocalDate) { - return ((LocalDate) dt).atStartOfDay(timeZone).toInstant(); - } else { - return null; - } - } - - @Override - public DateConverter newConverter(String param, Object... params) { - if (!(params.length == 1 && params[0] instanceof ZoneId)) { - throw new IllegalArgumentException("Invalid parameters"); - } - return new DateConverter(DateTimeFormatter.ofPattern(param), (ZoneId) params[0]); - } - } -} +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.common.grok; + +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.OffsetDateTime; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.time.format.DateTimeFormatter; +import java.time.temporal.TemporalAccessor; +import java.util.AbstractMap; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.function.Function; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +/** + * Convert String argument to the right type. + */ +public class Converter { + + public enum Type { + BYTE(Byte::valueOf), + BOOLEAN(Boolean::valueOf), + SHORT(Short::valueOf), + INT(Integer::valueOf, "integer"), + LONG(Long::valueOf), + FLOAT(Float::valueOf), + DOUBLE(Double::valueOf), + DATETIME(new DateConverter(), "date"), + STRING(v -> v, "text"); + + public final IConverter converter; + public final List aliases; + + Type(IConverter converter, String... aliases) { + this.converter = converter; + this.aliases = Arrays.asList(aliases); + } + } + + private static final Pattern SPLITTER = Pattern.compile("[:;]"); + + private static final Map TYPES = + Arrays.stream(Type.values()) + .collect(Collectors.toMap(t -> t.name().toLowerCase(), t -> t)); + + private static final Map TYPE_ALIASES = + Arrays.stream(Type.values()) + .flatMap(type -> type.aliases.stream() + .map(alias -> new AbstractMap.SimpleEntry<>(alias, type))) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + + private static Type getType(String key) { + key = key.toLowerCase(); + Type type = TYPES.getOrDefault(key, TYPE_ALIASES.get(key)); + if (type == null) { + throw new IllegalArgumentException("Invalid data type :" + key); + } + return type; + } + + /** + * getConverters. + */ + public static Map> + getConverters(Collection groupNames, Object... params) { + return groupNames.stream() + .filter(Converter::containsDelimiter) + .collect(Collectors.toMap(Function.identity(), key -> { + String[] list = splitGrokPattern(key); + IConverter converter = getType(list[1]).converter; + if (list.length == 3) { + converter = converter.newConverter(list[2], params); + } + return converter; + })); + } + + /** + * getGroupTypes. + */ + public static Map getGroupTypes(Collection groupNames) { + return groupNames.stream() + .filter(Converter::containsDelimiter) + .map(Converter::splitGrokPattern) + .collect(Collectors.toMap( + l -> l[0], + l -> getType(l[1]) + )); + } + + public static String extractKey(String key) { + return splitGrokPattern(key)[0]; + } + + private static boolean containsDelimiter(String string) { + return string.indexOf(':') >= 0 || string.indexOf(';') >= 0; + } + + private static String[] splitGrokPattern(String string) { + return SPLITTER.split(string, 3); + } + + interface IConverter { + + T convert(String value); + + default IConverter newConverter(String param, Object... params) { + return this; + } + } + + + static class DateConverter implements IConverter { + + private final DateTimeFormatter formatter; + private final ZoneId timeZone; + + public DateConverter() { + this.formatter = DateTimeFormatter.ISO_DATE_TIME; + this.timeZone = ZoneOffset.UTC; + } + + private DateConverter(DateTimeFormatter formatter, ZoneId timeZone) { + this.formatter = formatter; + this.timeZone = timeZone; + } + + @Override + public Instant convert(String value) { + TemporalAccessor dt = formatter + .parseBest(value.trim(), ZonedDateTime::from, LocalDateTime::from, OffsetDateTime::from, + Instant::from, + LocalDate::from); + if (dt instanceof ZonedDateTime) { + return ((ZonedDateTime) dt).toInstant(); + } else if (dt instanceof LocalDateTime) { + return ((LocalDateTime) dt).atZone(timeZone).toInstant(); + } else if (dt instanceof OffsetDateTime) { + return ((OffsetDateTime) dt).atZoneSameInstant(timeZone).toInstant(); + } else if (dt instanceof Instant) { + return ((Instant) dt); + } else if (dt instanceof LocalDate) { + return ((LocalDate) dt).atStartOfDay(timeZone).toInstant(); + } else { + return null; + } + } + + @Override + public DateConverter newConverter(String param, Object... params) { + if (!(params.length == 1 && params[0] instanceof ZoneId)) { + throw new IllegalArgumentException("Invalid parameters"); + } + return new DateConverter(DateTimeFormatter.ofPattern(param), (ZoneId) params[0]); + } + } +} diff --git a/core/src/main/java/org/opensearch/sql/ast/tree/RareTopN.java b/core/src/main/java/org/opensearch/sql/ast/tree/RareTopN.java index c884afd86a..407d37e8e4 100644 --- a/core/src/main/java/org/opensearch/sql/ast/tree/RareTopN.java +++ b/core/src/main/java/org/opensearch/sql/ast/tree/RareTopN.java @@ -57,4 +57,3 @@ public enum CommandType { RARE } } - diff --git a/core/src/main/java/org/opensearch/sql/expression/operator/arthmetic/ArithmeticFunction.java b/core/src/main/java/org/opensearch/sql/expression/operator/arthmetic/ArithmeticFunction.java index 1f4ac3943c..e25c5cda20 100644 --- a/core/src/main/java/org/opensearch/sql/expression/operator/arthmetic/ArithmeticFunction.java +++ b/core/src/main/java/org/opensearch/sql/expression/operator/arthmetic/ArithmeticFunction.java @@ -265,4 +265,4 @@ private static DefaultFunctionResolver subtract() { private static DefaultFunctionResolver subtractFunction() { return subtractBase(BuiltinFunctionName.SUBTRACTFUNCTION.getName()); } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/opensearch/sql/expression/text/TextFunction.java b/core/src/main/java/org/opensearch/sql/expression/text/TextFunction.java index e56c85a0c8..0bbfb65154 100644 --- a/core/src/main/java/org/opensearch/sql/expression/text/TextFunction.java +++ b/core/src/main/java/org/opensearch/sql/expression/text/TextFunction.java @@ -379,4 +379,3 @@ private static ExprValue exprReverse(ExprValue str) { return new ExprStringValue(new StringBuilder(str.stringValue()).reverse().toString()); } } - diff --git a/core/src/test/java/org/opensearch/sql/planner/logical/LogicalRelationTest.java b/core/src/test/java/org/opensearch/sql/planner/logical/LogicalRelationTest.java index dacd84e12b..6426ebb63f 100644 --- a/core/src/test/java/org/opensearch/sql/planner/logical/LogicalRelationTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/logical/LogicalRelationTest.java @@ -33,4 +33,4 @@ public void logicalRelationWithDataSourceHasNoInput() { assertEquals(0, relation.getChild().size()); } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/opensearch/sql/planner/streaming/watermark/BoundedOutOfOrderWatermarkGeneratorTest.java b/core/src/test/java/org/opensearch/sql/planner/streaming/watermark/BoundedOutOfOrderWatermarkGeneratorTest.java index 1d18a16f2a..9522c600b8 100644 --- a/core/src/test/java/org/opensearch/sql/planner/streaming/watermark/BoundedOutOfOrderWatermarkGeneratorTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/streaming/watermark/BoundedOutOfOrderWatermarkGeneratorTest.java @@ -58,4 +58,4 @@ public AssertionHelper shouldGenerateWatermark(long expected) { return this; } } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/opensearch/sql/planner/streaming/windowing/WindowTest.java b/core/src/test/java/org/opensearch/sql/planner/streaming/windowing/WindowTest.java index 9b9aafa933..c8214f5771 100644 --- a/core/src/test/java/org/opensearch/sql/planner/streaming/windowing/WindowTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/streaming/windowing/WindowTest.java @@ -18,4 +18,4 @@ void test() { assertEquals(2000, window.getEndTime()); assertEquals(1999, window.maxTimestamp()); } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/opensearch/sql/planner/streaming/windowing/assigner/SlidingWindowAssignerTest.java b/core/src/test/java/org/opensearch/sql/planner/streaming/windowing/assigner/SlidingWindowAssignerTest.java index fd69065742..886537e088 100644 --- a/core/src/test/java/org/opensearch/sql/planner/streaming/windowing/assigner/SlidingWindowAssignerTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/streaming/windowing/assigner/SlidingWindowAssignerTest.java @@ -49,4 +49,4 @@ void testConstructWithIllegalArguments() { () -> new SlidingWindowAssigner(1000, 0)); assertEquals("Slide size [0] must be positive number", error2.getMessage()); } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/opensearch/sql/planner/streaming/windowing/assigner/TumblingWindowAssignerTest.java b/core/src/test/java/org/opensearch/sql/planner/streaming/windowing/assigner/TumblingWindowAssignerTest.java index 4c98c40f7a..55a8750c11 100644 --- a/core/src/test/java/org/opensearch/sql/planner/streaming/windowing/assigner/TumblingWindowAssignerTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/streaming/windowing/assigner/TumblingWindowAssignerTest.java @@ -36,4 +36,4 @@ void testConstructWithIllegalWindowSize() { () -> new TumblingWindowAssigner(-1)); assertEquals("Window size [-1] must be positive number", error.getMessage()); } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/opensearch/sql/planner/streaming/windowing/trigger/AfterWatermarkWindowTriggerTest.java b/core/src/test/java/org/opensearch/sql/planner/streaming/windowing/trigger/AfterWatermarkWindowTriggerTest.java index 3ef6907c38..d2854549ae 100644 --- a/core/src/test/java/org/opensearch/sql/planner/streaming/windowing/trigger/AfterWatermarkWindowTriggerTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/streaming/windowing/trigger/AfterWatermarkWindowTriggerTest.java @@ -31,4 +31,4 @@ void shouldFireWindowBelowWatermark() { assertEquals(TriggerResult.FIRE, trigger.trigger(new Window(500, 800))); assertEquals(TriggerResult.FIRE, trigger.trigger(new Window(500, 1000))); } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/opensearch/sql/storage/write/TableWriteOperatorTest.java b/core/src/test/java/org/opensearch/sql/storage/write/TableWriteOperatorTest.java index 8780b08276..112192bde0 100644 --- a/core/src/test/java/org/opensearch/sql/storage/write/TableWriteOperatorTest.java +++ b/core/src/test/java/org/opensearch/sql/storage/write/TableWriteOperatorTest.java @@ -67,4 +67,4 @@ public Boolean visitTableWrite(TableWriteOperator node, Object context) { void testGetChild() { assertEquals(Collections.singletonList(child), tableWrite.getChild()); } -} \ No newline at end of file +} diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/encryptor/Encryptor.java b/datasources/src/main/java/org/opensearch/sql/datasources/encryptor/Encryptor.java index 55dc1ef18f..578b66d0ba 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/encryptor/Encryptor.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/encryptor/Encryptor.java @@ -25,4 +25,4 @@ public interface Encryptor { */ String decrypt(String encryptedText); -} \ No newline at end of file +} diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/encryptor/EncryptorImpl.java b/datasources/src/main/java/org/opensearch/sql/datasources/encryptor/EncryptorImpl.java index 18e3e2f257..98f693eca1 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/encryptor/EncryptorImpl.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/encryptor/EncryptorImpl.java @@ -67,4 +67,4 @@ private void validate(String masterKey) { } -} \ No newline at end of file +} diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/CreateDataSourceActionRequest.java b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/CreateDataSourceActionRequest.java index 2926f95220..0cbb2355ca 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/CreateDataSourceActionRequest.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/CreateDataSourceActionRequest.java @@ -46,4 +46,4 @@ public ActionRequestValidationException validate() { return null; } } -} \ No newline at end of file +} diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/CreateDataSourceActionResponse.java b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/CreateDataSourceActionResponse.java index 3b997a7d21..377a249a44 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/CreateDataSourceActionResponse.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/CreateDataSourceActionResponse.java @@ -30,4 +30,4 @@ public CreateDataSourceActionResponse(StreamInput in) throws IOException { public void writeTo(StreamOutput streamOutput) throws IOException { streamOutput.writeString(result); } -} \ No newline at end of file +} diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/DeleteDataSourceActionRequest.java b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/DeleteDataSourceActionRequest.java index bc9d4491d5..1eb2d17bff 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/DeleteDataSourceActionRequest.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/DeleteDataSourceActionRequest.java @@ -48,4 +48,4 @@ public ActionRequestValidationException validate() { } } -} \ No newline at end of file +} diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/DeleteDataSourceActionResponse.java b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/DeleteDataSourceActionResponse.java index 11d4a72274..4bb6e290c5 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/DeleteDataSourceActionResponse.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/DeleteDataSourceActionResponse.java @@ -30,4 +30,4 @@ public void writeTo(StreamOutput streamOutput) throws IOException { streamOutput.writeString(result); } -} \ No newline at end of file +} diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/GetDataSourceActionRequest.java b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/GetDataSourceActionRequest.java index 1e617f1866..23f4898543 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/GetDataSourceActionRequest.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/GetDataSourceActionRequest.java @@ -46,4 +46,4 @@ public ActionRequestValidationException validate() { } } -} \ No newline at end of file +} diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/GetDataSourceActionResponse.java b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/GetDataSourceActionResponse.java index 15437e6a3f..964e5989f8 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/GetDataSourceActionResponse.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/GetDataSourceActionResponse.java @@ -30,4 +30,4 @@ public void writeTo(StreamOutput streamOutput) throws IOException { streamOutput.writeString(result); } -} \ No newline at end of file +} diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/UpdateDataSourceActionRequest.java b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/UpdateDataSourceActionRequest.java index cd5c761d85..11bc2d1e20 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/UpdateDataSourceActionRequest.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/UpdateDataSourceActionRequest.java @@ -44,4 +44,4 @@ public ActionRequestValidationException validate() { return null; } } -} \ No newline at end of file +} diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/UpdateDataSourceActionResponse.java b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/UpdateDataSourceActionResponse.java index 8407b9675d..5f5f6f496a 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/UpdateDataSourceActionResponse.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/UpdateDataSourceActionResponse.java @@ -30,4 +30,4 @@ public UpdateDataSourceActionResponse(StreamInput in) throws IOException { public void writeTo(StreamOutput streamOutput) throws IOException { streamOutput.writeString(result); } -} \ No newline at end of file +} diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/rest/RestDataSourceQueryAction.java b/datasources/src/main/java/org/opensearch/sql/datasources/rest/RestDataSourceQueryAction.java index aa34488675..a56512f838 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/rest/RestDataSourceQueryAction.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/rest/RestDataSourceQueryAction.java @@ -251,4 +251,4 @@ private static boolean isClientError(Exception e) { || e instanceof IllegalStateException; } -} \ No newline at end of file +} diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/service/DataSourceMetadataStorage.java b/datasources/src/main/java/org/opensearch/sql/datasources/service/DataSourceMetadataStorage.java index b54af3195e..e6483900c6 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/service/DataSourceMetadataStorage.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/service/DataSourceMetadataStorage.java @@ -60,4 +60,4 @@ public interface DataSourceMetadataStorage { */ void deleteDataSourceMetadata(String datasourceName); -} \ No newline at end of file +} diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportCreateDataSourceAction.java b/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportCreateDataSourceAction.java index 4d8c51fac7..ce1c1bb157 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportCreateDataSourceAction.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportCreateDataSourceAction.java @@ -57,4 +57,4 @@ protected void doExecute(Task task, CreateDataSourceActionRequest request, } } -} \ No newline at end of file +} diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportDeleteDataSourceAction.java b/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportDeleteDataSourceAction.java index 1d109ca7fc..fe2df1ee51 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportDeleteDataSourceAction.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportDeleteDataSourceAction.java @@ -56,4 +56,4 @@ protected void doExecute(Task task, DeleteDataSourceActionRequest request, } } -} \ No newline at end of file +} diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportGetDataSourceAction.java b/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportGetDataSourceAction.java index 33d08f7cd2..7a36114755 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportGetDataSourceAction.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportGetDataSourceAction.java @@ -92,4 +92,4 @@ protected Object buildJsonObject(DataSourceMetadata response) { }.format(dataSourceMetadata); return responseContent; } -} \ No newline at end of file +} diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportUpdateDataSourceAction.java b/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportUpdateDataSourceAction.java index 4aece69e5b..13f80733e6 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportUpdateDataSourceAction.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportUpdateDataSourceAction.java @@ -56,4 +56,4 @@ protected void doExecute(Task task, UpdateDataSourceActionRequest request, } } -} \ No newline at end of file +} diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/utils/Scheduler.java b/datasources/src/main/java/org/opensearch/sql/datasources/utils/Scheduler.java index 0bc597ed4f..eaac8a7fa3 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/utils/Scheduler.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/utils/Scheduler.java @@ -30,4 +30,4 @@ private static Runnable withCurrentContext(final Runnable task) { task.run(); }; } -} \ No newline at end of file +} diff --git a/datasources/src/test/java/org/opensearch/sql/datasources/encryptor/EncryptorImplTest.java b/datasources/src/test/java/org/opensearch/sql/datasources/encryptor/EncryptorImplTest.java index 8c9b140f09..d62a5a957a 100644 --- a/datasources/src/test/java/org/opensearch/sql/datasources/encryptor/EncryptorImplTest.java +++ b/datasources/src/test/java/org/opensearch/sql/datasources/encryptor/EncryptorImplTest.java @@ -142,4 +142,4 @@ public void testEncryptionAndDecryptionWithEmptyMasterKey() { illegalStateException.getMessage()); } -} \ No newline at end of file +} diff --git a/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportCreateDataSourceActionTest.java b/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportCreateDataSourceActionTest.java index 3dd5c21214..2ece0eb5cd 100644 --- a/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportCreateDataSourceActionTest.java +++ b/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportCreateDataSourceActionTest.java @@ -83,4 +83,4 @@ public void testDoExecuteWithException() { Assertions.assertEquals("Error", exception.getMessage()); } -} \ No newline at end of file +} diff --git a/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportDeleteDataSourceActionTest.java b/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportDeleteDataSourceActionTest.java index 9beeb1a9a9..61b197a805 100644 --- a/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportDeleteDataSourceActionTest.java +++ b/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportDeleteDataSourceActionTest.java @@ -75,4 +75,4 @@ public void testDoExecuteWithException() { Assertions.assertEquals("Error", exception.getMessage()); } -} \ No newline at end of file +} diff --git a/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportGetDataSourceActionTest.java b/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportGetDataSourceActionTest.java index d5506c0a45..0546df643d 100644 --- a/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportGetDataSourceActionTest.java +++ b/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportGetDataSourceActionTest.java @@ -134,4 +134,4 @@ public void testDoExecuteWithException() { Assertions.assertEquals("Error", exception.getMessage()); } -} \ No newline at end of file +} diff --git a/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportUpdateDataSourceActionTest.java b/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportUpdateDataSourceActionTest.java index fecab012d2..2b9305c459 100644 --- a/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportUpdateDataSourceActionTest.java +++ b/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportUpdateDataSourceActionTest.java @@ -84,4 +84,4 @@ public void testDoExecuteWithException() { Assertions.assertEquals("Error", exception.getMessage()); } -} \ No newline at end of file +} diff --git a/datasources/src/test/java/org/opensearch/sql/datasources/utils/SchedulerTest.java b/datasources/src/test/java/org/opensearch/sql/datasources/utils/SchedulerTest.java index d091e77044..e3dac306cd 100644 --- a/datasources/src/test/java/org/opensearch/sql/datasources/utils/SchedulerTest.java +++ b/datasources/src/test/java/org/opensearch/sql/datasources/utils/SchedulerTest.java @@ -42,4 +42,4 @@ public void testSchedule() { Assert.assertTrue(isRun.get()); } -} \ No newline at end of file +} diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/Select.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/Select.java index 8a6d3b265d..cd600d856e 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/Select.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/Select.java @@ -188,4 +188,3 @@ public boolean isSelectAll() { return selectAll; } } - diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/BindingTupleResultSet.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/BindingTupleResultSet.java index c3b5d2e84d..d9eb463572 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/BindingTupleResultSet.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/BindingTupleResultSet.java @@ -58,4 +58,3 @@ public static DataRows buildDataRows(List columnNodes, List hasScript(String expectedCode) { return hasFieldWithValue("script", "has script", is(new Script(expectedCode))); } } - diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/planner/physical/MLCommonsOperator.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/planner/physical/MLCommonsOperator.java index 48e21246b2..de0c23c4e9 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/planner/physical/MLCommonsOperator.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/planner/physical/MLCommonsOperator.java @@ -122,4 +122,3 @@ protected MLAlgoParams convertArgumentToMLParameter(Map argumen } } - diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/planner/physical/MLOperator.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/planner/physical/MLOperator.java index 938ff60157..36834bc23a 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/planner/physical/MLOperator.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/planner/physical/MLOperator.java @@ -109,4 +109,3 @@ protected Map processArgs(Map arguments) { return res; } } - diff --git a/ppl/src/test/java/org/opensearch/sql/ppl/antlr/NowLikeFunctionParserTest.java b/ppl/src/test/java/org/opensearch/sql/ppl/antlr/NowLikeFunctionParserTest.java index fb849f6de7..9f635fdd81 100644 --- a/ppl/src/test/java/org/opensearch/sql/ppl/antlr/NowLikeFunctionParserTest.java +++ b/ppl/src/test/java/org/opensearch/sql/ppl/antlr/NowLikeFunctionParserTest.java @@ -1,74 +1,74 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - - -package org.opensearch.sql.ppl.antlr; - -import static org.junit.Assert.assertNotEquals; - -import java.util.List; -import org.antlr.v4.runtime.tree.ParseTree; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; - -@RunWith(Parameterized.class) -public class NowLikeFunctionParserTest { - - private final PPLSyntaxParser parser = new PPLSyntaxParser(); - - /** - * Set parameterized values used in test. - * @param name Function name - * @param hasFsp Whether function has fsp argument - * @param hasShortcut Whether function has shortcut (call without `()`) - */ - public NowLikeFunctionParserTest(String name, Boolean hasFsp, Boolean hasShortcut) { - this.name = name; - this.hasFsp = hasFsp; - this.hasShortcut = hasShortcut; - } - - /** - * Returns function data to test. - * @return An iterable. - */ - @Parameterized.Parameters(name = "{0}") - public static Iterable functionNames() { - return List.of(new Object[][]{ - {"now", true, false}, - {"current_timestamp", true, true}, - {"localtimestamp", true, true}, - {"localtime", true, true}, - {"sysdate", true, false}, - {"curtime", true, false}, - {"current_time", true, true}, - {"curdate", false, false}, - {"current_date", false, true}, - {"utc_date", false, false}, - {"utc_time", false, false}, - {"utc_timestamp", false, false} - }); - } - - private final String name; - private final Boolean hasFsp; - private final Boolean hasShortcut; - - @Test - public void test_now_like_functions() { - for (var call : hasShortcut ? List.of(name, name + "()") : List.of(name + "()")) { - ParseTree tree = parser.parse("source=t | eval r=" + call); - assertNotEquals(null, tree); - - tree = parser.parse("search source=t | where a=" + call); - assertNotEquals(null, tree); - } - if (hasFsp) { - ParseTree tree = parser.parse("search source=t | where a=" + name + "(0)"); - assertNotEquals(null, tree); - } - } -} +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + + +package org.opensearch.sql.ppl.antlr; + +import static org.junit.Assert.assertNotEquals; + +import java.util.List; +import org.antlr.v4.runtime.tree.ParseTree; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +@RunWith(Parameterized.class) +public class NowLikeFunctionParserTest { + + private final PPLSyntaxParser parser = new PPLSyntaxParser(); + + /** + * Set parameterized values used in test. + * @param name Function name + * @param hasFsp Whether function has fsp argument + * @param hasShortcut Whether function has shortcut (call without `()`) + */ + public NowLikeFunctionParserTest(String name, Boolean hasFsp, Boolean hasShortcut) { + this.name = name; + this.hasFsp = hasFsp; + this.hasShortcut = hasShortcut; + } + + /** + * Returns function data to test. + * @return An iterable. + */ + @Parameterized.Parameters(name = "{0}") + public static Iterable functionNames() { + return List.of(new Object[][]{ + {"now", true, false}, + {"current_timestamp", true, true}, + {"localtimestamp", true, true}, + {"localtime", true, true}, + {"sysdate", true, false}, + {"curtime", true, false}, + {"current_time", true, true}, + {"curdate", false, false}, + {"current_date", false, true}, + {"utc_date", false, false}, + {"utc_time", false, false}, + {"utc_timestamp", false, false} + }); + } + + private final String name; + private final Boolean hasFsp; + private final Boolean hasShortcut; + + @Test + public void test_now_like_functions() { + for (var call : hasShortcut ? List.of(name, name + "()") : List.of(name + "()")) { + ParseTree tree = parser.parse("source=t | eval r=" + call); + assertNotEquals(null, tree); + + tree = parser.parse("search source=t | where a=" + call); + assertNotEquals(null, tree); + } + if (hasFsp) { + ParseTree tree = parser.parse("search source=t | where a=" + name + "(0)"); + assertNotEquals(null, tree); + } + } +} diff --git a/ppl/src/test/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParserTest.java b/ppl/src/test/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParserTest.java index 8ca6c5c84e..57cee7fa1d 100644 --- a/ppl/src/test/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParserTest.java +++ b/ppl/src/test/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParserTest.java @@ -324,4 +324,3 @@ public void testCanParseTimestampdiffFunction() { "SOURCE=test | eval k = TIMESTAMPDIFF(WEEK,'2003-01-02','2003-01-02')")); } } - diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/client/PrometheusClientImpl.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/client/PrometheusClientImpl.java index e16dc4329f..9472be7487 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/client/PrometheusClientImpl.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/client/PrometheusClientImpl.java @@ -126,4 +126,4 @@ private JSONObject readResponse(Response response) throws IOException { } -} \ No newline at end of file +} diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusMetricTable.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusMetricTable.java index b3b63327d0..4844e1f6db 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusMetricTable.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusMetricTable.java @@ -115,4 +115,4 @@ public TableScanBuilder createScanBuilder() { return null; } } -} \ No newline at end of file +} diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/implementor/PrometheusDefaultImplementor.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/implementor/PrometheusDefaultImplementor.java index 221a70e9d6..6d426d13c8 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/implementor/PrometheusDefaultImplementor.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/implementor/PrometheusDefaultImplementor.java @@ -135,4 +135,4 @@ private Optional getSpanExpression(List namedE } -} \ No newline at end of file +} diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/StepParameterResolver.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/StepParameterResolver.java index 54315bb792..2078950a5d 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/StepParameterResolver.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/StepParameterResolver.java @@ -60,4 +60,4 @@ private static Optional getSpanExpression( -} \ No newline at end of file +} diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/implementation/QueryExemplarsFunctionImplementationTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/implementation/QueryExemplarsFunctionImplementationTest.java index b5a52b0e8a..025e3bde06 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/implementation/QueryExemplarsFunctionImplementationTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/implementation/QueryExemplarsFunctionImplementationTest.java @@ -89,4 +89,3 @@ void testApplyArgumentsException() { } - diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/resolver/QueryExemplarsTableFunctionResolverTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/resolver/QueryExemplarsTableFunctionResolverTest.java index 1e296a80c3..3e26b46c8f 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/resolver/QueryExemplarsTableFunctionResolverTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/resolver/QueryExemplarsTableFunctionResolverTest.java @@ -72,4 +72,3 @@ void testResolve() { } } - diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanOperatorTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanOperatorTest.java index bd77eb8c2b..d4e31d4d1e 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanOperatorTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanOperatorTest.java @@ -189,4 +189,4 @@ void testClose() { prometheusQueryExemplarsRequest); queryExemplarsFunctionTableScanOperator.close(); } -} \ No newline at end of file +} diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusStorageFactoryTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusStorageFactoryTest.java index d6a934a015..41ac7ff144 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusStorageFactoryTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusStorageFactoryTest.java @@ -282,4 +282,3 @@ void createDataSourceSuccessWithHostnameRestrictions() { } } - diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/QueryExemplarsTableTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/QueryExemplarsTableTest.java index 81af30769e..19876d398d 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/QueryExemplarsTableTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/QueryExemplarsTableTest.java @@ -79,4 +79,3 @@ void testCreateScanBuilderWithQueryRangeTableFunction() { } } - diff --git a/spark/src/test/java/org/opensearch/sql/spark/utils/TestUtils.java b/spark/src/test/java/org/opensearch/sql/spark/utils/TestUtils.java index 0630a85096..b480e6d9d9 100644 --- a/spark/src/test/java/org/opensearch/sql/spark/utils/TestUtils.java +++ b/spark/src/test/java/org/opensearch/sql/spark/utils/TestUtils.java @@ -23,4 +23,3 @@ public static String getJson(String filename) throws IOException { } } - From 605f3df5a322f51ade829ab6bd54965ee834d1f5 Mon Sep 17 00:00:00 2001 From: Mitchell Gale Date: Thu, 3 Aug 2023 10:07:03 -0700 Subject: [PATCH 04/42] [Spotless] Remove unused imports (#1927) * Adding spotless plugin to project build.gradle. Signed-off-by: Mitchell Gale Remove commented out spotless changes. Signed-off-by: Mitchell Gale Adding Spotless to DEVELOPER_GUIDE.rst Signed-off-by: Mitchell Gale Added Google Java format to spotless. Signed-off-by: Mitchell Gale Update DEVELOPER_GUIDE.rst Co-authored-by: Yury-Fridlyand * Added apply false for spotless Signed-off-by: Mitchell Gale * Adding ratchetFrom to build.gradle Signed-off-by: Mitchell Gale * Adding license header to build.gradle for spotless. Signed-off-by: Mitchell Gale * Uncommenting all changes to build.gradle for spotless. Signed-off-by: Mitchell Gale * Commented out spotless checks. Signed-off-by: Mitchell Gale * Removed trailing whitespace Signed-off-by: Mitchell Gale * Remove unused imports. Signed-off-by: Mitchell Gale * Fixed extra tab in import static statement in ConvertTZFunctionIT.java Signed-off-by: Mitchell Gale * Removed space in LocalClusterState.java Signed-off-by: Mitchell Gale * spotless apply Signed-off-by: Mitchell Gale --------- Signed-off-by: Mitchell Gale Co-authored-by: Yury-Fridlyand --- build.gradle | 4 ++-- .../org/opensearch/sql/common/utils/QueryContext.java | 2 -- .../authinterceptors/AwsSigningInterceptorTest.java | 3 --- .../main/java/org/opensearch/sql/analysis/Analyzer.java | 1 - .../analysis/DataSourceSchemaIdentifierNameResolver.java | 5 ----- .../org/opensearch/sql/analysis/ExpressionAnalyzer.java | 1 - .../opensearch/sql/analysis/NamedExpressionAnalyzer.java | 2 -- .../sql/ast/expression/NestedAllTupleFields.java | 1 - .../java/org/opensearch/sql/ast/tree/TableFunction.java | 2 -- .../executor/streaming/MicroBatchStreamingExecution.java | 2 +- core/src/main/java/org/opensearch/sql/expression/DSL.java | 2 +- .../sql/expression/datetime/DateTimeFunction.java | 2 +- .../sql/planner/logical/LogicalFetchCursor.java | 2 -- .../sql/planner/physical/AggregationOperator.java | 1 - .../sql/planner/physical/PhysicalPlanNodeVisitor.java | 2 +- .../model/DataSourceSchemaIdentifierNameResolverTest.java | 2 -- .../opensearch/sql/executor/pagination/CursorTest.java | 1 - .../streaming/MicroBatchStreamingExecutionTest.java | 2 +- .../sql/expression/datetime/DateTimeFunctionTest.java | 4 ++-- .../opensearch/sql/expression/datetime/StrToDateTest.java | 2 -- .../sql/expression/function/FunctionDSLTestBase.java | 1 - .../sql/expression/function/FunctionDSLimplNoArgTest.java | 1 - .../sql/expression/function/FunctionPropertiesTest.java | 2 -- .../function/RelevanceFunctionResolverTest.java | 2 -- .../operator/arthmetic/MathematicalFunctionTest.java | 2 +- .../operator/predicate/BinaryPredicateOperatorTest.java | 5 ----- .../org/opensearch/sql/planner/SerializablePlanTest.java | 1 - .../sql/planner/logical/LogicalRelationTest.java | 1 - .../sql/planner/optimizer/pattern/PatternsTest.java | 2 -- .../sql/planner/physical/FilterOperatorTest.java | 5 ----- .../sql/planner/physical/NestedOperatorTest.java | 1 - .../opensearch/sql/planner/physical/PhysicalPlanTest.java | 8 -------- .../sql/correctness/runner/resultset/DBResult.java | 1 - .../opensearch/sql/legacy/AggregationExpressionIT.java | 2 -- .../org/opensearch/sql/legacy/CsvFormatResponseIT.java | 2 -- .../test/java/org/opensearch/sql/legacy/HashJoinIT.java | 2 -- .../java/org/opensearch/sql/legacy/TypeInformationIT.java | 1 - .../java/org/opensearch/sql/ppl/ConvertTZFunctionIT.java | 2 -- .../java/org/opensearch/sql/ppl/DateTimeComparisonIT.java | 1 - .../org/opensearch/sql/ppl/DateTimeImplementationIT.java | 1 - .../opensearch/sql/ppl/InformationSchemaCommandIT.java | 5 ----- .../test/java/org/opensearch/sql/ppl/MatchPhraseIT.java | 2 -- .../test/java/org/opensearch/sql/ppl/QueryAnalysisIT.java | 1 - .../java/org/opensearch/sql/ppl/RelevanceFunctionIT.java | 6 ------ .../org/opensearch/sql/ppl/ShowDataSourcesCommandIT.java | 2 -- .../java/org/opensearch/sql/ppl/SimpleQueryStringIT.java | 5 ----- .../test/java/org/opensearch/sql/ppl/SortCommandIT.java | 1 - .../test/java/org/opensearch/sql/ppl/StatsCommandIT.java | 4 ---- .../src/test/java/org/opensearch/sql/sql/AdminIT.java | 1 - .../java/org/opensearch/sql/sql/DateTimeFunctionIT.java | 2 +- .../test/java/org/opensearch/sql/sql/IdentifierIT.java | 2 -- .../org/opensearch/sql/sql/LegacyAPICompatibilityIT.java | 1 - .../org/opensearch/sql/sql/MathematicalFunctionIT.java | 2 +- .../java/org/opensearch/sql/sql/PaginationFallbackIT.java | 1 - .../test/java/org/opensearch/sql/sql/PaginationIT.java | 1 - .../src/test/java/org/opensearch/sql/sql/RawFormatIT.java | 1 - .../java/org/opensearch/sql/sql/SimpleQueryStringIT.java | 2 -- .../antlr/semantic/visitor/OpenSearchMappingLoader.java | 1 - .../opensearch/sql/legacy/esdomain/LocalClusterState.java | 3 --- .../sql/legacy/query/multi/MultiQueryRequestBuilder.java | 1 - .../sql/legacy/rewriter/matchtoterm/TermFieldScope.java | 1 - .../plugin/RestSQLQueryActionCursorFallbackTest.java | 3 --- .../opensearch/sql/legacy/unittest/JSONRequestTest.java | 3 --- .../sql/legacy/unittest/LocalClusterStateTest.java | 2 -- .../sql/legacy/unittest/SqlRequestFactoryTest.java | 1 - .../sql/legacy/unittest/executor/DeleteResultSetTest.java | 2 +- .../legacy/unittest/planner/QueryPlannerBatchTest.java | 1 - .../sql/legacy/unittest/planner/QueryPlannerTest.java | 1 - .../opensearch/sql/legacy/util/CheckScriptContents.java | 1 - .../java/org/opensearch/sql/legacy/util/MatcherUtils.java | 4 ++-- .../opensearch/data/value/OpenSearchExprValueFactory.java | 2 -- .../opensearch/sql/opensearch/mapping/IndexMapping.java | 4 ---- .../sql/opensearch/storage/scan/OpenSearchIndexScan.java | 1 - .../scan/OpenSearchIndexScanAggregationBuilder.java | 5 ----- .../script/aggregation/ExpressionAggregationScript.java | 2 -- .../script/filter/lucene/relevance/NoFieldQuery.java | 1 - .../opensearch/storage/script/sort/SortQueryBuilder.java | 1 - .../sql/opensearch/client/OpenSearchNodeClientTest.java | 1 - .../sql/opensearch/response/OpenSearchResponseTest.java | 2 -- .../sql/opensearch/setting/OpenSearchSettingsTest.java | 4 ---- .../storage/OpenSearchDefaultImplementorTest.java | 1 - .../opensearch/storage/scan/OpenSearchIndexScanTest.java | 1 - .../script/aggregation/AggregationQueryBuilderTest.java | 1 - .../org/opensearch/sql/ppl/utils/ArgumentFactory.java | 1 - .../opensearch/sql/ppl/utils/PPLQueryDataAnonymizer.java | 1 - .../sql/ppl/parser/AstExpressionBuilderTest.java | 2 -- .../response/QueryExemplarsFunctionResponseHandle.java | 1 - .../sql/prometheus/request/PrometheusQueryRequest.java | 2 -- .../sql/prometheus/response/PrometheusResponse.java | 1 - .../sql/prometheus/storage/PrometheusStorageEngine.java | 1 - .../storage/querybuilder/AggregationQueryBuilder.java | 2 -- .../QueryExemplarsFunctionImplementationTest.java | 1 - .../QueryRangeFunctionImplementationTest.java | 3 --- .../resolver/QueryRangeTableFunctionResolverTest.java | 1 - .../sql/prometheus/storage/PrometheusMetricScanTest.java | 1 - .../prometheus/storage/PrometheusStorageFactoryTest.java | 1 - .../response/format/CommandResponseFormatter.java | 2 -- .../org/opensearch/sql/spark/response/SparkResponse.java | 4 ---- .../opensearch/sql/sql/parser/AstExpressionBuilder.java | 1 - .../org/opensearch/sql/sql/antlr/SQLSyntaxParserTest.java | 4 ++-- .../sql/sql/parser/AstAggregationBuilderTest.java | 1 - 101 files changed, 17 insertions(+), 188 deletions(-) diff --git a/build.gradle b/build.gradle index bb0cd162f0..a52322c21e 100644 --- a/build.gradle +++ b/build.gradle @@ -92,8 +92,8 @@ spotless { // " * Copyright OpenSearch Contributors\n" + // " * SPDX-License-Identifier: Apache-2.0\n" + // " */\n\n") -// removeUnusedImports() -// trimTrailingWhitespace() + removeUnusedImports() + trimTrailingWhitespace() endWithNewline() // googleJavaFormat('1.17.0').reflowLongStrings().groupArtifact('com.google.googlejavaformat:google-java-format') } diff --git a/common/src/main/java/org/opensearch/sql/common/utils/QueryContext.java b/common/src/main/java/org/opensearch/sql/common/utils/QueryContext.java index ab11029d73..19c3d9588c 100644 --- a/common/src/main/java/org/opensearch/sql/common/utils/QueryContext.java +++ b/common/src/main/java/org/opensearch/sql/common/utils/QueryContext.java @@ -6,9 +6,7 @@ package org.opensearch.sql.common.utils; -import java.time.LocalDateTime; import java.util.Map; -import java.util.Optional; import java.util.UUID; import org.apache.logging.log4j.ThreadContext; diff --git a/common/src/test/java/org/opensearch/sql/common/authinterceptors/AwsSigningInterceptorTest.java b/common/src/test/java/org/opensearch/sql/common/authinterceptors/AwsSigningInterceptorTest.java index 894f3974ce..21a8bbf6e7 100644 --- a/common/src/test/java/org/opensearch/sql/common/authinterceptors/AwsSigningInterceptorTest.java +++ b/common/src/test/java/org/opensearch/sql/common/authinterceptors/AwsSigningInterceptorTest.java @@ -7,8 +7,6 @@ package org.opensearch.sql.common.authinterceptors; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.auth.AWSSessionCredentials; @@ -26,7 +24,6 @@ import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.junit.jupiter.MockitoExtension; -import org.opensearch.sql.common.authinterceptors.AwsSigningInterceptor; @ExtendWith(MockitoExtension.class) public class AwsSigningInterceptorTest { diff --git a/core/src/main/java/org/opensearch/sql/analysis/Analyzer.java b/core/src/main/java/org/opensearch/sql/analysis/Analyzer.java index 2c4647004c..370dd1a3f1 100644 --- a/core/src/main/java/org/opensearch/sql/analysis/Analyzer.java +++ b/core/src/main/java/org/opensearch/sql/analysis/Analyzer.java @@ -29,7 +29,6 @@ import java.util.stream.Collectors; import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.commons.lang3.tuple.Pair; -import org.apache.commons.math3.analysis.function.Exp; import org.opensearch.sql.DataSourceSchemaName; import org.opensearch.sql.analysis.symbol.Namespace; import org.opensearch.sql.analysis.symbol.Symbol; diff --git a/core/src/main/java/org/opensearch/sql/analysis/DataSourceSchemaIdentifierNameResolver.java b/core/src/main/java/org/opensearch/sql/analysis/DataSourceSchemaIdentifierNameResolver.java index 1bb8316907..a90fc3d2c1 100644 --- a/core/src/main/java/org/opensearch/sql/analysis/DataSourceSchemaIdentifierNameResolver.java +++ b/core/src/main/java/org/opensearch/sql/analysis/DataSourceSchemaIdentifierNameResolver.java @@ -7,13 +7,8 @@ package org.opensearch.sql.analysis; -import com.google.common.collect.ImmutableSet; import java.util.List; -import java.util.Set; -import java.util.stream.Collectors; -import org.opensearch.sql.ast.expression.QualifiedName; import org.opensearch.sql.datasource.DataSourceService; -import org.opensearch.sql.datasource.model.DataSourceMetadata; public class DataSourceSchemaIdentifierNameResolver { diff --git a/core/src/main/java/org/opensearch/sql/analysis/ExpressionAnalyzer.java b/core/src/main/java/org/opensearch/sql/analysis/ExpressionAnalyzer.java index 601e3e00cc..60e5b40a82 100644 --- a/core/src/main/java/org/opensearch/sql/analysis/ExpressionAnalyzer.java +++ b/core/src/main/java/org/opensearch/sql/analysis/ExpressionAnalyzer.java @@ -49,7 +49,6 @@ import org.opensearch.sql.ast.expression.When; import org.opensearch.sql.ast.expression.WindowFunction; import org.opensearch.sql.ast.expression.Xor; -import org.opensearch.sql.common.antlr.SyntaxCheckException; import org.opensearch.sql.data.model.ExprValueUtils; import org.opensearch.sql.data.type.ExprCoreType; import org.opensearch.sql.data.type.ExprType; diff --git a/core/src/main/java/org/opensearch/sql/analysis/NamedExpressionAnalyzer.java b/core/src/main/java/org/opensearch/sql/analysis/NamedExpressionAnalyzer.java index 1d318c5588..d0e6968b07 100644 --- a/core/src/main/java/org/opensearch/sql/analysis/NamedExpressionAnalyzer.java +++ b/core/src/main/java/org/opensearch/sql/analysis/NamedExpressionAnalyzer.java @@ -10,11 +10,9 @@ import org.opensearch.sql.ast.AbstractNodeVisitor; import org.opensearch.sql.ast.expression.Alias; import org.opensearch.sql.ast.expression.QualifiedName; -import org.opensearch.sql.ast.expression.Span; import org.opensearch.sql.ast.expression.UnresolvedExpression; import org.opensearch.sql.expression.DSL; import org.opensearch.sql.expression.NamedExpression; -import org.opensearch.sql.expression.span.SpanExpression; /** * Analyze the Alias node in the {@link AnalysisContext} to construct the list of diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/NestedAllTupleFields.java b/core/src/main/java/org/opensearch/sql/ast/expression/NestedAllTupleFields.java index adf2025e6c..7d94cf8f31 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/NestedAllTupleFields.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/NestedAllTupleFields.java @@ -11,7 +11,6 @@ import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.RequiredArgsConstructor; -import lombok.ToString; import org.opensearch.sql.ast.AbstractNodeVisitor; import org.opensearch.sql.ast.Node; diff --git a/core/src/main/java/org/opensearch/sql/ast/tree/TableFunction.java b/core/src/main/java/org/opensearch/sql/ast/tree/TableFunction.java index 064cbf24fe..70ab852571 100644 --- a/core/src/main/java/org/opensearch/sql/ast/tree/TableFunction.java +++ b/core/src/main/java/org/opensearch/sql/ast/tree/TableFunction.java @@ -11,10 +11,8 @@ import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.RequiredArgsConstructor; -import lombok.Setter; import lombok.ToString; import org.opensearch.sql.ast.AbstractNodeVisitor; -import org.opensearch.sql.ast.expression.Let; import org.opensearch.sql.ast.expression.QualifiedName; import org.opensearch.sql.ast.expression.UnresolvedExpression; diff --git a/core/src/main/java/org/opensearch/sql/executor/streaming/MicroBatchStreamingExecution.java b/core/src/main/java/org/opensearch/sql/executor/streaming/MicroBatchStreamingExecution.java index c31ed18c57..d85c17fe66 100644 --- a/core/src/main/java/org/opensearch/sql/executor/streaming/MicroBatchStreamingExecution.java +++ b/core/src/main/java/org/opensearch/sql/executor/streaming/MicroBatchStreamingExecution.java @@ -21,7 +21,7 @@ import org.opensearch.sql.planner.logical.LogicalPlan; /** - * Micro batch streaming execution. + * Micro batch streaming execution. */ public class MicroBatchStreamingExecution { diff --git a/core/src/main/java/org/opensearch/sql/expression/DSL.java b/core/src/main/java/org/opensearch/sql/expression/DSL.java index 3f1897e483..f1a8110a29 100644 --- a/core/src/main/java/org/opensearch/sql/expression/DSL.java +++ b/core/src/main/java/org/opensearch/sql/expression/DSL.java @@ -534,7 +534,7 @@ public static FunctionExpression str_to_date(FunctionProperties functionProperti Expression... expressions) { return compile(functionProperties, BuiltinFunctionName.STR_TO_DATE, expressions); } - + public static FunctionExpression sec_to_time(Expression... expressions) { return compile(FunctionProperties.None, BuiltinFunctionName.SEC_TO_TIME, expressions); } diff --git a/core/src/main/java/org/opensearch/sql/expression/datetime/DateTimeFunction.java b/core/src/main/java/org/opensearch/sql/expression/datetime/DateTimeFunction.java index cd5ef23d1c..0ea1d02eaf 100644 --- a/core/src/main/java/org/opensearch/sql/expression/datetime/DateTimeFunction.java +++ b/core/src/main/java/org/opensearch/sql/expression/datetime/DateTimeFunction.java @@ -1165,7 +1165,7 @@ private ExprValue exprDateApplyInterval(FunctionProperties functionProperties, var dt = extractDateTime(datetime, functionProperties); return new ExprDatetimeValue(isAdd ? dt.plus(interval) : dt.minus(interval)); } - + /** * Formats date according to format specifier. First argument is time, second is format. * Detailed supported signatures: diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalFetchCursor.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalFetchCursor.java index e4a0482aac..4fc96f3ec1 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalFetchCursor.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalFetchCursor.java @@ -9,8 +9,6 @@ import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.ToString; -import org.opensearch.sql.planner.logical.LogicalPlan; -import org.opensearch.sql.planner.logical.LogicalPlanNodeVisitor; import org.opensearch.sql.storage.StorageEngine; /** diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/AggregationOperator.java b/core/src/main/java/org/opensearch/sql/planner/physical/AggregationOperator.java index 1d9523464b..2c643c986d 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/AggregationOperator.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/AggregationOperator.java @@ -17,7 +17,6 @@ import org.opensearch.sql.expression.NamedExpression; import org.opensearch.sql.expression.aggregation.Aggregator; import org.opensearch.sql.expression.aggregation.NamedAggregator; -import org.opensearch.sql.expression.span.SpanExpression; import org.opensearch.sql.planner.physical.collector.Collector; import org.opensearch.sql.storage.bindingtuple.BindingTuple; diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/PhysicalPlanNodeVisitor.java b/core/src/main/java/org/opensearch/sql/planner/physical/PhysicalPlanNodeVisitor.java index 1e8f08d39f..14a839db27 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/PhysicalPlanNodeVisitor.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/PhysicalPlanNodeVisitor.java @@ -72,7 +72,7 @@ public R visitValues(ValuesOperator node, C context) { public R visitSort(SortOperator node, C context) { return visitNode(node, context); } - + public R visitRareTopN(RareTopNOperator node, C context) { return visitNode(node, context); } diff --git a/core/src/test/java/org/opensearch/sql/analysis/model/DataSourceSchemaIdentifierNameResolverTest.java b/core/src/test/java/org/opensearch/sql/analysis/model/DataSourceSchemaIdentifierNameResolverTest.java index ac429e89a0..c00bd7705d 100644 --- a/core/src/test/java/org/opensearch/sql/analysis/model/DataSourceSchemaIdentifierNameResolverTest.java +++ b/core/src/test/java/org/opensearch/sql/analysis/model/DataSourceSchemaIdentifierNameResolverTest.java @@ -16,9 +16,7 @@ import static org.opensearch.sql.analysis.model.DataSourceSchemaIdentifierNameResolverTest.Identifier.identifierOf; import java.util.Arrays; -import java.util.Collections; import java.util.List; -import java.util.Set; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; diff --git a/core/src/test/java/org/opensearch/sql/executor/pagination/CursorTest.java b/core/src/test/java/org/opensearch/sql/executor/pagination/CursorTest.java index e3e2c8cf33..fe9590b60b 100644 --- a/core/src/test/java/org/opensearch/sql/executor/pagination/CursorTest.java +++ b/core/src/test/java/org/opensearch/sql/executor/pagination/CursorTest.java @@ -9,7 +9,6 @@ import org.junit.jupiter.api.DisplayNameGeneration; import org.junit.jupiter.api.DisplayNameGenerator; import org.junit.jupiter.api.Test; -import org.opensearch.sql.executor.pagination.Cursor; @DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) class CursorTest { diff --git a/core/src/test/java/org/opensearch/sql/executor/streaming/MicroBatchStreamingExecutionTest.java b/core/src/test/java/org/opensearch/sql/executor/streaming/MicroBatchStreamingExecutionTest.java index f0974db13e..589b093c2f 100644 --- a/core/src/test/java/org/opensearch/sql/executor/streaming/MicroBatchStreamingExecutionTest.java +++ b/core/src/test/java/org/opensearch/sql/executor/streaming/MicroBatchStreamingExecutionTest.java @@ -230,7 +230,7 @@ Helper latestOffsetLogShouldBe(Long offsetId) { * StreamingSource impl only for testing. * *

initially, offset is -1, getLatestOffset() will return Optional.emtpy(). - * + * *

call addData() add offset by one. */ static class TestStreamingSource implements StreamingSource { diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/DateTimeFunctionTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/DateTimeFunctionTest.java index 2f650eeda3..f8abfe7580 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/DateTimeFunctionTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/DateTimeFunctionTest.java @@ -559,7 +559,7 @@ public void invalidDayOfYearArgument() { () -> invalidDayOfYearQuery("asdfasdfasdf")) ); } - + @Test public void from_days() { FunctionExpression expression = DSL.from_days(DSL.literal(new ExprLongValue(730669))); @@ -1712,7 +1712,7 @@ private static Stream getInvalidTestDataForTimeFormat() { DSL.literal("%h")) ); } - + @ParameterizedTest(name = "{0}{1}") @MethodSource("getInvalidTestDataForTimeFormat") public void testInvalidTimeFormat(LiteralExpression arg, LiteralExpression format) { diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/StrToDateTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/StrToDateTest.java index b758331a71..a35627247a 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/StrToDateTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/StrToDateTest.java @@ -15,11 +15,9 @@ import java.time.LocalTime; import java.util.stream.Stream; import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; -import org.mockito.junit.jupiter.MockitoExtension; import org.opensearch.sql.data.model.ExprDatetimeValue; import org.opensearch.sql.data.model.ExprNullValue; import org.opensearch.sql.data.model.ExprStringValue; diff --git a/core/src/test/java/org/opensearch/sql/expression/function/FunctionDSLTestBase.java b/core/src/test/java/org/opensearch/sql/expression/function/FunctionDSLTestBase.java index f5f4128451..93d0c32f57 100644 --- a/core/src/test/java/org/opensearch/sql/expression/function/FunctionDSLTestBase.java +++ b/core/src/test/java/org/opensearch/sql/expression/function/FunctionDSLTestBase.java @@ -11,7 +11,6 @@ import org.mockito.junit.jupiter.MockitoExtension; import org.opensearch.sql.data.model.ExprMissingValue; import org.opensearch.sql.data.model.ExprNullValue; -import org.opensearch.sql.data.model.ExprTupleValue; import org.opensearch.sql.data.model.ExprValue; import org.opensearch.sql.data.type.ExprCoreType; import org.opensearch.sql.data.type.ExprType; diff --git a/core/src/test/java/org/opensearch/sql/expression/function/FunctionDSLimplNoArgTest.java b/core/src/test/java/org/opensearch/sql/expression/function/FunctionDSLimplNoArgTest.java index 5d970803ed..907cd49f26 100644 --- a/core/src/test/java/org/opensearch/sql/expression/function/FunctionDSLimplNoArgTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/function/FunctionDSLimplNoArgTest.java @@ -9,7 +9,6 @@ import java.util.List; import org.apache.commons.lang3.tuple.Pair; -import org.junit.jupiter.api.BeforeEach; import org.opensearch.sql.expression.Expression; class FunctionDSLimplNoArgTest extends FunctionDSLimplTestBase { diff --git a/core/src/test/java/org/opensearch/sql/expression/function/FunctionPropertiesTest.java b/core/src/test/java/org/opensearch/sql/expression/function/FunctionPropertiesTest.java index 64ec21e7e1..ed4a8a514d 100644 --- a/core/src/test/java/org/opensearch/sql/expression/function/FunctionPropertiesTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/function/FunctionPropertiesTest.java @@ -17,9 +17,7 @@ import java.time.Clock; import java.time.Instant; import java.time.ZoneId; -import java.util.concurrent.Callable; import java.util.function.Consumer; -import java.util.function.Function; import java.util.stream.Stream; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.DynamicTest; diff --git a/core/src/test/java/org/opensearch/sql/expression/function/RelevanceFunctionResolverTest.java b/core/src/test/java/org/opensearch/sql/expression/function/RelevanceFunctionResolverTest.java index deba721481..85fc3a0276 100644 --- a/core/src/test/java/org/opensearch/sql/expression/function/RelevanceFunctionResolverTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/function/RelevanceFunctionResolverTest.java @@ -6,9 +6,7 @@ package org.opensearch.sql.expression.function; import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertSame; import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; import static org.opensearch.sql.data.type.ExprCoreType.INTEGER; import static org.opensearch.sql.data.type.ExprCoreType.STRING; diff --git a/core/src/test/java/org/opensearch/sql/expression/operator/arthmetic/MathematicalFunctionTest.java b/core/src/test/java/org/opensearch/sql/expression/operator/arthmetic/MathematicalFunctionTest.java index d9f95bc67a..4267805c74 100644 --- a/core/src/test/java/org/opensearch/sql/expression/operator/arthmetic/MathematicalFunctionTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/operator/arthmetic/MathematicalFunctionTest.java @@ -1485,7 +1485,7 @@ public void signum_double_value(Double value) { signum.valueOf(valueEnv()), allOf(hasType(INTEGER), hasValue((int) Math.signum(value)))); assertEquals(String.format("signum(%s)", value), signum.toString()); - } + } /** * Test sinh with byte value. diff --git a/core/src/test/java/org/opensearch/sql/expression/operator/predicate/BinaryPredicateOperatorTest.java b/core/src/test/java/org/opensearch/sql/expression/operator/predicate/BinaryPredicateOperatorTest.java index ad07dbc975..52b1e8d8fc 100644 --- a/core/src/test/java/org/opensearch/sql/expression/operator/predicate/BinaryPredicateOperatorTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/operator/predicate/BinaryPredicateOperatorTest.java @@ -11,10 +11,6 @@ import static org.junit.jupiter.api.Assertions.assertTrue; import static org.opensearch.sql.config.TestConfig.BOOL_TYPE_MISSING_VALUE_FIELD; import static org.opensearch.sql.config.TestConfig.BOOL_TYPE_NULL_VALUE_FIELD; -import static org.opensearch.sql.config.TestConfig.INT_TYPE_MISSING_VALUE_FIELD; -import static org.opensearch.sql.config.TestConfig.INT_TYPE_NULL_VALUE_FIELD; -import static org.opensearch.sql.config.TestConfig.STRING_TYPE_MISSING_VALUE_FIELD; -import static org.opensearch.sql.config.TestConfig.STRING_TYPE_NULL_VALUE_FIELD; import static org.opensearch.sql.data.model.ExprValueUtils.LITERAL_FALSE; import static org.opensearch.sql.data.model.ExprValueUtils.LITERAL_MISSING; import static org.opensearch.sql.data.model.ExprValueUtils.LITERAL_NULL; @@ -25,7 +21,6 @@ import static org.opensearch.sql.data.type.ExprCoreType.BOOLEAN; import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; import static org.opensearch.sql.data.type.ExprCoreType.INTEGER; -import static org.opensearch.sql.data.type.ExprCoreType.STRING; import static org.opensearch.sql.data.type.ExprCoreType.STRUCT; import static org.opensearch.sql.data.type.ExprCoreType.TIMESTAMP; import static org.opensearch.sql.utils.ComparisonUtil.compare; diff --git a/core/src/test/java/org/opensearch/sql/planner/SerializablePlanTest.java b/core/src/test/java/org/opensearch/sql/planner/SerializablePlanTest.java index 8073445dc0..f00ef61ee1 100644 --- a/core/src/test/java/org/opensearch/sql/planner/SerializablePlanTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/SerializablePlanTest.java @@ -6,7 +6,6 @@ package org.opensearch.sql.planner; import static org.junit.jupiter.api.Assertions.assertSame; -import static org.junit.jupiter.api.Assertions.assertThrows; import static org.mockito.Answers.CALLS_REAL_METHODS; import org.junit.jupiter.api.DisplayNameGeneration; diff --git a/core/src/test/java/org/opensearch/sql/planner/logical/LogicalRelationTest.java b/core/src/test/java/org/opensearch/sql/planner/logical/LogicalRelationTest.java index 6426ebb63f..9cf40d3c4d 100644 --- a/core/src/test/java/org/opensearch/sql/planner/logical/LogicalRelationTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/logical/LogicalRelationTest.java @@ -12,7 +12,6 @@ import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; -import org.opensearch.sql.expression.Expression; import org.opensearch.sql.storage.Table; @ExtendWith(MockitoExtension.class) diff --git a/core/src/test/java/org/opensearch/sql/planner/optimizer/pattern/PatternsTest.java b/core/src/test/java/org/opensearch/sql/planner/optimizer/pattern/PatternsTest.java index ef310e3b0e..2cab54d272 100644 --- a/core/src/test/java/org/opensearch/sql/planner/optimizer/pattern/PatternsTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/optimizer/pattern/PatternsTest.java @@ -8,7 +8,6 @@ import static org.junit.jupiter.api.Assertions.assertAll; import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -17,7 +16,6 @@ import org.junit.jupiter.api.DisplayNameGenerator; import org.junit.jupiter.api.Test; import org.opensearch.sql.planner.logical.LogicalFilter; -import org.opensearch.sql.planner.logical.LogicalPaginate; import org.opensearch.sql.planner.logical.LogicalPlan; @DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) diff --git a/core/src/test/java/org/opensearch/sql/planner/physical/FilterOperatorTest.java b/core/src/test/java/org/opensearch/sql/planner/physical/FilterOperatorTest.java index 6a8bcad203..66ca2de157 100644 --- a/core/src/test/java/org/opensearch/sql/planner/physical/FilterOperatorTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/physical/FilterOperatorTest.java @@ -17,17 +17,12 @@ import com.google.common.collect.ImmutableMap; import java.util.LinkedHashMap; import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; -import java.util.stream.Stream; import org.junit.jupiter.api.DisplayNameGeneration; import org.junit.jupiter.api.DisplayNameGenerator; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.AdditionalAnswers; import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; -import org.opensearch.sql.data.model.ExprIntegerValue; import org.opensearch.sql.data.model.ExprTupleValue; import org.opensearch.sql.data.model.ExprValue; import org.opensearch.sql.data.model.ExprValueUtils; diff --git a/core/src/test/java/org/opensearch/sql/planner/physical/NestedOperatorTest.java b/core/src/test/java/org/opensearch/sql/planner/physical/NestedOperatorTest.java index 5f8bf99b0d..5b3744c401 100644 --- a/core/src/test/java/org/opensearch/sql/planner/physical/NestedOperatorTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/physical/NestedOperatorTest.java @@ -8,7 +8,6 @@ import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.contains; import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.when; import static org.opensearch.sql.data.model.ExprValueUtils.collectionValue; import static org.opensearch.sql.data.model.ExprValueUtils.tupleValue; diff --git a/core/src/test/java/org/opensearch/sql/planner/physical/PhysicalPlanTest.java b/core/src/test/java/org/opensearch/sql/planner/physical/PhysicalPlanTest.java index ab3f0ef36d..1e42857205 100644 --- a/core/src/test/java/org/opensearch/sql/planner/physical/PhysicalPlanTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/physical/PhysicalPlanTest.java @@ -5,15 +5,7 @@ package org.opensearch.sql.planner.physical; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.CALLS_REAL_METHODS; -import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; import java.util.List; import org.junit.jupiter.api.DisplayNameGeneration; diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/runner/resultset/DBResult.java b/integ-test/src/test/java/org/opensearch/sql/correctness/runner/resultset/DBResult.java index eb522b008d..52b7d26cc4 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/runner/resultset/DBResult.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/runner/resultset/DBResult.java @@ -12,7 +12,6 @@ import java.util.Collection; import java.util.Collections; import java.util.List; -import java.util.Objects; import java.util.Set; import java.util.stream.Collectors; import lombok.Getter; diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/AggregationExpressionIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/AggregationExpressionIT.java index af6e2ad492..e064300e4f 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/AggregationExpressionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/AggregationExpressionIT.java @@ -12,8 +12,6 @@ import static org.opensearch.sql.util.MatcherUtils.verifySchema; import org.json.JSONObject; -import org.junit.Assume; -import org.junit.Ignore; import org.junit.Test; public class AggregationExpressionIT extends SQLIntegTestCase { diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/CsvFormatResponseIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/CsvFormatResponseIT.java index aa3bf67f58..9952b0c68a 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/CsvFormatResponseIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/CsvFormatResponseIT.java @@ -33,10 +33,8 @@ import org.hamcrest.Matcher; import org.hamcrest.core.AnyOf; import org.junit.Assert; -import org.junit.Assume; import org.junit.Ignore; import org.junit.Test; -import org.junit.jupiter.api.Disabled; import org.opensearch.client.Request; import org.opensearch.client.RequestOptions; import org.opensearch.client.Response; diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/HashJoinIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/HashJoinIT.java index 9cd497e675..f796010bbe 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/HashJoinIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/HashJoinIT.java @@ -10,9 +10,7 @@ import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_ACCOUNT; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_GAME_OF_THRONES; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_UNEXPANDED_OBJECT; -import static org.opensearch.sql.util.MatcherUtils.columnName; import static org.opensearch.sql.util.MatcherUtils.rows; -import static org.opensearch.sql.util.MatcherUtils.verifyColumn; import static org.opensearch.sql.util.MatcherUtils.verifyDataRows; import java.io.IOException; diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/TypeInformationIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/TypeInformationIT.java index 2bd3835a3a..e827059f1b 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/TypeInformationIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/TypeInformationIT.java @@ -11,7 +11,6 @@ import org.json.JSONObject; -import org.junit.Ignore; import org.junit.Test; public class TypeInformationIT extends SQLIntegTestCase { diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/ConvertTZFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/ConvertTZFunctionIT.java index 48cdb9684f..1a244bed85 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/ConvertTZFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/ConvertTZFunctionIT.java @@ -7,14 +7,12 @@ import org.json.JSONObject; import org.junit.Test; - import org.opensearch.sql.legacy.SQLIntegTestCase; import java.io.IOException; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_DATE; import static org.opensearch.sql.util.MatcherUtils.rows; import static org.opensearch.sql.util.MatcherUtils.schema; - import static org.opensearch.sql.util.MatcherUtils.verifyDataRows; import static org.opensearch.sql.util.MatcherUtils.verifySchema; import static org.opensearch.sql.util.MatcherUtils.verifySome; diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeComparisonIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeComparisonIT.java index b795977e7d..272e0a34d3 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeComparisonIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeComparisonIT.java @@ -20,7 +20,6 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.json.JSONObject; import org.junit.After; import org.junit.Before; import org.junit.jupiter.api.Test; diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeImplementationIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeImplementationIT.java index 158f25aadf..5e51a60856 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeImplementationIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeImplementationIT.java @@ -13,7 +13,6 @@ import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_DATE; import static org.opensearch.sql.util.MatcherUtils.rows; import static org.opensearch.sql.util.MatcherUtils.schema; -import static org.opensearch.sql.util.MatcherUtils.verifyDataRows; import static org.opensearch.sql.util.MatcherUtils.verifySchema; import static org.opensearch.sql.util.MatcherUtils.verifySome; diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/InformationSchemaCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/InformationSchemaCommandIT.java index 448bebd377..37909e4726 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/InformationSchemaCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/InformationSchemaCommandIT.java @@ -7,7 +7,6 @@ package org.opensearch.sql.ppl; -import static org.opensearch.sql.legacy.TestUtils.getResponseBody; import static org.opensearch.sql.util.MatcherUtils.columnName; import static org.opensearch.sql.util.MatcherUtils.rows; import static org.opensearch.sql.util.MatcherUtils.verifyColumn; @@ -18,12 +17,8 @@ import java.io.IOException; import org.json.JSONObject; import org.junit.After; -import org.junit.AfterClass; import org.junit.Assert; -import org.junit.Before; import org.junit.BeforeClass; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; import org.opensearch.client.Request; import org.opensearch.client.Response; diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/MatchPhraseIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/MatchPhraseIT.java index 5b9fd07e31..780113de52 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/MatchPhraseIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/MatchPhraseIT.java @@ -12,8 +12,6 @@ import java.io.IOException; import org.json.JSONObject; -import org.junit.Ignore; -import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; public class MatchPhraseIT extends PPLIntegTestCase { diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/QueryAnalysisIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/QueryAnalysisIT.java index dd2fcb84c8..422cc92cd2 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/QueryAnalysisIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/QueryAnalysisIT.java @@ -9,7 +9,6 @@ import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_ACCOUNT; import java.io.IOException; -import org.junit.Ignore; import org.junit.Test; import org.opensearch.client.ResponseException; import org.opensearch.sql.common.antlr.SyntaxCheckException; diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/RelevanceFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/RelevanceFunctionIT.java index b72dc5230f..9e05a2a919 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/RelevanceFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/RelevanceFunctionIT.java @@ -6,15 +6,9 @@ package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BEER; -import static org.opensearch.sql.util.MatcherUtils.rows; -import static org.opensearch.sql.util.MatcherUtils.schema; -import static org.opensearch.sql.util.MatcherUtils.verifyDataRows; -import static org.opensearch.sql.util.MatcherUtils.verifySchema; -import static org.opensearch.sql.util.MatcherUtils.verifySome; import java.io.IOException; -import org.json.JSONObject; import org.junit.Test; public class RelevanceFunctionIT extends PPLIntegTestCase { diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/ShowDataSourcesCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/ShowDataSourcesCommandIT.java index 4190e4274b..4845d30033 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/ShowDataSourcesCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/ShowDataSourcesCommandIT.java @@ -7,7 +7,6 @@ package org.opensearch.sql.ppl; -import static org.opensearch.sql.legacy.TestUtils.getResponseBody; import static org.opensearch.sql.util.MatcherUtils.columnName; import static org.opensearch.sql.util.MatcherUtils.rows; import static org.opensearch.sql.util.MatcherUtils.verifyColumn; @@ -18,7 +17,6 @@ import java.io.IOException; import org.json.JSONObject; import org.junit.After; -import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.jupiter.api.Test; diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/SimpleQueryStringIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/SimpleQueryStringIT.java index ab1edb92a7..abc0ad6f59 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/SimpleQueryStringIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/SimpleQueryStringIT.java @@ -6,11 +6,6 @@ package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BEER; -import static org.opensearch.sql.util.MatcherUtils.rows; -import static org.opensearch.sql.util.MatcherUtils.schema; -import static org.opensearch.sql.util.MatcherUtils.verifyDataRows; -import static org.opensearch.sql.util.MatcherUtils.verifySchema; -import static org.opensearch.sql.util.MatcherUtils.verifySome; import java.io.IOException; diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/SortCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/SortCommandIT.java index 0fd4e9ec86..01befa0541 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/SortCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/SortCommandIT.java @@ -14,7 +14,6 @@ import java.io.IOException; import org.json.JSONObject; -import org.junit.Ignore; import org.junit.Test; public class SortCommandIT extends PPLIntegTestCase { diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/StatsCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/StatsCommandIT.java index cf560c129c..5389f245a4 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/StatsCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/StatsCommandIT.java @@ -15,10 +15,6 @@ import static org.opensearch.sql.util.MatcherUtils.verifySchema; import java.io.IOException; -import java.time.LocalDate; -import java.time.ZoneId; -import java.time.ZonedDateTime; -import java.time.format.DateTimeFormatter; import org.json.JSONObject; import org.junit.jupiter.api.Test; diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/AdminIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/AdminIT.java index 243432790d..8429ab3897 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/AdminIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/AdminIT.java @@ -8,7 +8,6 @@ import static org.hamcrest.Matchers.equalTo; import static org.opensearch.sql.legacy.plugin.RestSqlAction.QUERY_API_ENDPOINT; -import static org.opensearch.sql.util.MatcherUtils.assertJsonEquals; import static org.opensearch.sql.util.TestUtils.getResponseBody; import com.google.common.io.Resources; diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeFunctionIT.java index 2696a9a0d6..ab5aa46853 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeFunctionIT.java @@ -1358,7 +1358,7 @@ public void testBracketedEquivalent() throws IOException { compareBrackets("time", "time", "17:30:00"); compareBrackets("time", "t", "17:30:00"); } - + @Test public void testBracketFails() { assertThrows(ResponseException.class, ()->executeQuery("select {time '2020-09-16'}")); diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/IdentifierIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/IdentifierIT.java index 22632cc4de..8d3f9e1509 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/IdentifierIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/IdentifierIT.java @@ -14,8 +14,6 @@ import static org.opensearch.sql.util.TestUtils.performRequest; import java.io.IOException; -import java.util.ArrayList; -import org.json.JSONArray; import org.json.JSONObject; import org.junit.jupiter.api.Test; import org.opensearch.client.Request; diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/LegacyAPICompatibilityIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/LegacyAPICompatibilityIT.java index adc40a24ec..1f85b2857f 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/LegacyAPICompatibilityIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/LegacyAPICompatibilityIT.java @@ -17,7 +17,6 @@ import java.io.IOException; import org.json.JSONObject; import org.junit.Assert; -import org.junit.Ignore; import org.junit.Test; import org.opensearch.client.Request; import org.opensearch.client.RequestOptions; diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/MathematicalFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/MathematicalFunctionIT.java index 2e05706269..cbb39ead40 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/MathematicalFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/MathematicalFunctionIT.java @@ -39,7 +39,7 @@ public void testPI() throws IOException { schema("PI()", null, "double")); verifyDataRows(result, rows(3.141592653589793)); } - + @Test public void testCeil() throws IOException { JSONObject result = executeQuery("select ceil(0)"); diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/PaginationFallbackIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/PaginationFallbackIT.java index 1f97ddefd1..213c9322e1 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/PaginationFallbackIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/PaginationFallbackIT.java @@ -11,7 +11,6 @@ import static org.opensearch.sql.util.TestUtils.verifyIsV2Cursor; import java.io.IOException; -import org.json.JSONObject; import org.junit.Test; import org.opensearch.sql.legacy.SQLIntegTestCase; import org.opensearch.sql.util.TestUtils; diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/PaginationIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/PaginationIT.java index 69a3607d56..bd8949203c 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/PaginationIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/PaginationIT.java @@ -9,7 +9,6 @@ import static org.junit.Assert.assertTrue; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_CALCS; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_ONLINE; -import static org.opensearch.sql.legacy.plugin.RestSqlAction.EXPLAIN_API_ENDPOINT; import java.io.IOException; diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/RawFormatIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/RawFormatIT.java index b040b97136..eb693a4718 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/RawFormatIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/RawFormatIT.java @@ -6,7 +6,6 @@ package org.opensearch.sql.sql; -import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK_CSV_SANITIZE; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK_RAW_SANITIZE; import static org.opensearch.sql.protocol.response.format.FlatResponseFormatter.CONTENT_TYPE; diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/SimpleQueryStringIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/SimpleQueryStringIT.java index efd23dfdd4..afacc5fefd 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/SimpleQueryStringIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/SimpleQueryStringIT.java @@ -5,12 +5,10 @@ package org.opensearch.sql.sql; -import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK_CSV_SANITIZE; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BEER; import static org.opensearch.sql.protocol.response.format.JsonResponseFormatter.CONTENT_TYPE; import java.io.IOException; -import java.util.Locale; import org.json.JSONObject; import org.junit.Test; diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/visitor/OpenSearchMappingLoader.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/visitor/OpenSearchMappingLoader.java index 7bfca0a015..dca201f25b 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/visitor/OpenSearchMappingLoader.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/visitor/OpenSearchMappingLoader.java @@ -12,7 +12,6 @@ import java.util.HashSet; import java.util.Map; import java.util.Set; -import java.util.stream.Collectors; import org.opensearch.sql.legacy.antlr.semantic.scope.Environment; import org.opensearch.sql.legacy.antlr.semantic.scope.Namespace; import org.opensearch.sql.legacy.antlr.semantic.scope.SemanticContext; diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/LocalClusterState.java b/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/LocalClusterState.java index 37d9322b46..fc69ecff30 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/LocalClusterState.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/LocalClusterState.java @@ -6,8 +6,6 @@ package org.opensearch.sql.legacy.esdomain; -import static org.opensearch.common.settings.Settings.EMPTY; - import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import java.io.IOException; @@ -25,7 +23,6 @@ import org.opensearch.cluster.ClusterState; import org.opensearch.cluster.metadata.IndexNameExpressionResolver; import org.opensearch.cluster.service.ClusterService; -import org.opensearch.common.collect.Tuple; import org.opensearch.common.settings.Setting; import org.opensearch.index.IndexNotFoundException; import org.opensearch.sql.common.setting.Settings; diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/MultiQueryRequestBuilder.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/MultiQueryRequestBuilder.java index b356377d5d..af2a1e90cf 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/MultiQueryRequestBuilder.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/MultiQueryRequestBuilder.java @@ -17,7 +17,6 @@ import org.opensearch.action.search.SearchRequestBuilder; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.sql.legacy.domain.Field; diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/matchtoterm/TermFieldScope.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/matchtoterm/TermFieldScope.java index fd6380d9d1..f8b6e9b05e 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/matchtoterm/TermFieldScope.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/matchtoterm/TermFieldScope.java @@ -7,7 +7,6 @@ package org.opensearch.sql.legacy.rewriter.matchtoterm; import java.util.*; -import java.util.stream.Collectors; import org.json.JSONObject; import org.opensearch.sql.legacy.esdomain.mapping.FieldMappings; import org.opensearch.sql.legacy.esdomain.mapping.IndexMappings; diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/plugin/RestSQLQueryActionCursorFallbackTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/plugin/RestSQLQueryActionCursorFallbackTest.java index a11f4c47d7..2afcdc93f4 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/plugin/RestSQLQueryActionCursorFallbackTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/plugin/RestSQLQueryActionCursorFallbackTest.java @@ -6,8 +6,6 @@ package org.opensearch.sql.legacy.plugin; import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; import static org.opensearch.sql.legacy.plugin.RestSqlAction.QUERY_API_ENDPOINT; import java.io.IOException; @@ -30,7 +28,6 @@ import org.opensearch.rest.BaseRestHandler; import org.opensearch.rest.RestChannel; import org.opensearch.rest.RestRequest; -import org.opensearch.sql.common.antlr.SyntaxCheckException; import org.opensearch.sql.executor.QueryManager; import org.opensearch.sql.executor.execution.QueryPlanFactory; import org.opensearch.sql.sql.SQLService; diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/JSONRequestTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/JSONRequestTest.java index f546f3571a..b70779110a 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/JSONRequestTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/JSONRequestTest.java @@ -9,7 +9,6 @@ import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; -import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyInt; import static org.mockito.Mockito.mock; @@ -24,9 +23,7 @@ import org.json.JSONObject; import org.junit.Before; import org.junit.Ignore; -import org.junit.Rule; import org.junit.Test; -import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.MockedStatic; diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/LocalClusterStateTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/LocalClusterStateTest.java index 4149fd8328..cb8568925d 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/LocalClusterStateTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/LocalClusterStateTest.java @@ -10,9 +10,7 @@ import static org.mockito.Matchers.any; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.doAnswer; -import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/SqlRequestFactoryTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/SqlRequestFactoryTest.java index f93461724d..ec13789d28 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/SqlRequestFactoryTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/SqlRequestFactoryTest.java @@ -8,7 +8,6 @@ import static java.util.Collections.emptyList; import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.spy; import org.junit.Assert; import org.junit.Before; diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/DeleteResultSetTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/DeleteResultSetTest.java index 31388e79e3..a668e9248a 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/DeleteResultSetTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/DeleteResultSetTest.java @@ -31,7 +31,7 @@ public class DeleteResultSetTest { @Mock Delete deleteQuery; - + @Test public void testDeleteResponseForJdbcFormat() throws IOException { diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerBatchTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerBatchTest.java index 52f8e2bc6e..545710e343 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerBatchTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerBatchTest.java @@ -17,7 +17,6 @@ import java.util.Collection; import java.util.List; import org.hamcrest.Matcher; -import org.junit.Ignore; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerTest.java index 775417d669..13344eb204 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerTest.java @@ -19,7 +19,6 @@ import com.alibaba.druid.sql.parser.ParserException; import com.alibaba.druid.sql.parser.SQLExprParser; import com.alibaba.druid.sql.parser.Token; -import com.google.common.collect.ImmutableList; import java.util.Arrays; import java.util.List; import org.apache.lucene.search.TotalHits; diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/util/CheckScriptContents.java b/legacy/src/test/java/org/opensearch/sql/legacy/util/CheckScriptContents.java index 595b6987a7..7b03140ea4 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/util/CheckScriptContents.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/util/CheckScriptContents.java @@ -13,7 +13,6 @@ import static org.mockito.Mockito.any; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.spy; import static org.mockito.Mockito.when; import static org.opensearch.search.builder.SearchSourceBuilder.ScriptField; diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/util/MatcherUtils.java b/legacy/src/test/java/org/opensearch/sql/legacy/util/MatcherUtils.java index 84f19de58b..0b57d460f3 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/util/MatcherUtils.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/util/MatcherUtils.java @@ -227,14 +227,14 @@ public static TypeSafeMatcher columnPattern(String regex) { protected boolean matchesSafely(JSONObject jsonObject) { return ((String)jsonObject.query("/name")).matches(regex); } - + @Override public void describeTo(Description description) { description.appendText(String.format("(column_pattern=%s)", regex)); } }; } - + public static TypeSafeMatcher columnName(String name) { return new TypeSafeMatcher() { @Override diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactory.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactory.java index 95815d5c38..4e3e1ec5c0 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactory.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactory.java @@ -25,13 +25,11 @@ import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.collect.ImmutableMap; import java.time.Instant; import java.time.LocalDate; import java.time.LocalTime; -import java.time.ZoneId; import java.time.ZonedDateTime; import java.time.format.DateTimeParseException; import java.time.temporal.TemporalAccessor; diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/mapping/IndexMapping.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/mapping/IndexMapping.java index 0185ca95b6..2fefd0316f 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/mapping/IndexMapping.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/mapping/IndexMapping.java @@ -6,13 +6,9 @@ package org.opensearch.sql.opensearch.mapping; -import java.util.HashMap; -import java.util.LinkedHashMap; import java.util.Map; -import java.util.stream.Collectors; import lombok.Getter; import lombok.ToString; -import org.apache.commons.lang3.EnumUtils; import org.opensearch.cluster.metadata.MappingMetadata; import org.opensearch.sql.opensearch.data.type.OpenSearchDataType; diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScan.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScan.java index 2ee863b6db..0ca9cde3d2 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScan.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScan.java @@ -6,7 +6,6 @@ package org.opensearch.sql.opensearch.storage.scan; -import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanAggregationBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanAggregationBuilder.java index 84883b5209..d5f89d2579 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanAggregationBuilder.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanAggregationBuilder.java @@ -22,11 +22,6 @@ import org.opensearch.sql.opensearch.storage.serialization.DefaultExpressionSerializer; import org.opensearch.sql.planner.logical.LogicalAggregation; import org.opensearch.sql.planner.logical.LogicalFilter; -import org.opensearch.sql.planner.logical.LogicalHighlight; -import org.opensearch.sql.planner.logical.LogicalLimit; -import org.opensearch.sql.planner.logical.LogicalNested; -import org.opensearch.sql.planner.logical.LogicalPaginate; -import org.opensearch.sql.planner.logical.LogicalProject; import org.opensearch.sql.planner.logical.LogicalSort; /** diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScript.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScript.java index 2a371afaa3..2871bd4a97 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScript.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScript.java @@ -9,8 +9,6 @@ import static java.time.temporal.ChronoUnit.MILLIS; import java.time.LocalTime; -import java.time.ZoneId; -import java.time.ZonedDateTime; import java.util.Map; import lombok.EqualsAndHashCode; import org.apache.lucene.index.LeafReaderContext; diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/NoFieldQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/NoFieldQuery.java index 1467cf8e4b..528b24af6c 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/NoFieldQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/NoFieldQuery.java @@ -7,7 +7,6 @@ import java.util.List; import java.util.Map; -import java.util.Objects; import java.util.stream.Collectors; import org.opensearch.index.query.QueryBuilder; import org.opensearch.sql.common.antlr.SyntaxCheckException; diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/sort/SortQueryBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/sort/SortQueryBuilder.java index 62c923832c..9002df7c8f 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/sort/SortQueryBuilder.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/sort/SortQueryBuilder.java @@ -20,7 +20,6 @@ import org.opensearch.sql.expression.Expression; import org.opensearch.sql.expression.FunctionExpression; import org.opensearch.sql.expression.ReferenceExpression; -import org.opensearch.sql.expression.function.BuiltinFunctionName; import org.opensearch.sql.opensearch.data.type.OpenSearchTextType; /** diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchNodeClientTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchNodeClientTest.java index 9417a1de1d..67c635dc42 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchNodeClientTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchNodeClientTest.java @@ -60,7 +60,6 @@ import org.opensearch.cluster.metadata.MappingMetadata; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.concurrent.ThreadContext; import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.xcontent.DeprecationHandler; import org.opensearch.core.xcontent.NamedXContentRegistry; diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchResponseTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchResponseTest.java index e77819a453..b26847b095 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchResponseTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchResponseTest.java @@ -15,8 +15,6 @@ import static org.mockito.ArgumentMatchers.anyBoolean; import static org.mockito.ArgumentMatchers.anyInt; import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.lenient; import static org.mockito.Mockito.when; import com.google.common.collect.ImmutableMap; diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/setting/OpenSearchSettingsTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/setting/OpenSearchSettingsTest.java index b4c8cc8c69..835798f162 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/setting/OpenSearchSettingsTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/setting/OpenSearchSettingsTest.java @@ -12,18 +12,14 @@ import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.mockito.AdditionalMatchers.not; import static org.mockito.AdditionalMatchers.or; -import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.when; import static org.opensearch.common.unit.TimeValue.timeValueMinutes; -import static org.opensearch.sql.opensearch.setting.LegacyOpenDistroSettings.PPL_ENABLED_SETTING; import static org.opensearch.sql.opensearch.setting.LegacyOpenDistroSettings.legacySettings; import java.util.List; -import java.util.Set; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.AdditionalMatchers; import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; import org.opensearch.cluster.ClusterName; diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchDefaultImplementorTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchDefaultImplementorTest.java index f2b6a70a46..1e44345576 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchDefaultImplementorTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchDefaultImplementorTest.java @@ -19,7 +19,6 @@ import org.opensearch.sql.planner.logical.LogicalML; import org.opensearch.sql.planner.logical.LogicalMLCommons; import org.opensearch.sql.planner.logical.LogicalPlan; -import org.opensearch.sql.storage.Table; @ExtendWith(MockitoExtension.class) public class OpenSearchDefaultImplementorTest { diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanTest.java index 9e96e3cb6a..a34e93dd70 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanTest.java @@ -12,7 +12,6 @@ import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.lenient; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilderTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilderTest.java index 03f5cc8b52..c76567c1e9 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilderTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilderTest.java @@ -52,7 +52,6 @@ import org.opensearch.sql.expression.NamedExpression; import org.opensearch.sql.expression.aggregation.AvgAggregator; import org.opensearch.sql.expression.aggregation.CountAggregator; -import org.opensearch.sql.expression.aggregation.MaxAggregator; import org.opensearch.sql.expression.aggregation.NamedAggregator; import org.opensearch.sql.opensearch.data.type.OpenSearchDataType; import org.opensearch.sql.opensearch.data.type.OpenSearchDateType; diff --git a/ppl/src/main/java/org/opensearch/sql/ppl/utils/ArgumentFactory.java b/ppl/src/main/java/org/opensearch/sql/ppl/utils/ArgumentFactory.java index 09afd2075f..941bfe680e 100644 --- a/ppl/src/main/java/org/opensearch/sql/ppl/utils/ArgumentFactory.java +++ b/ppl/src/main/java/org/opensearch/sql/ppl/utils/ArgumentFactory.java @@ -7,7 +7,6 @@ package org.opensearch.sql.ppl.utils; import static org.opensearch.sql.ppl.antlr.parser.OpenSearchPPLParser.BooleanLiteralContext; -import static org.opensearch.sql.ppl.antlr.parser.OpenSearchPPLParser.DecimalLiteralContext; import static org.opensearch.sql.ppl.antlr.parser.OpenSearchPPLParser.DedupCommandContext; import static org.opensearch.sql.ppl.antlr.parser.OpenSearchPPLParser.FieldsCommandContext; import static org.opensearch.sql.ppl.antlr.parser.OpenSearchPPLParser.IntegerLiteralContext; diff --git a/ppl/src/main/java/org/opensearch/sql/ppl/utils/PPLQueryDataAnonymizer.java b/ppl/src/main/java/org/opensearch/sql/ppl/utils/PPLQueryDataAnonymizer.java index 2f520b55c6..1f0e6f0d52 100644 --- a/ppl/src/main/java/org/opensearch/sql/ppl/utils/PPLQueryDataAnonymizer.java +++ b/ppl/src/main/java/org/opensearch/sql/ppl/utils/PPLQueryDataAnonymizer.java @@ -11,7 +11,6 @@ import com.google.common.collect.ImmutableMap; import java.util.List; import java.util.stream.Collectors; -import lombok.Generated; import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.commons.lang3.tuple.Pair; import org.opensearch.sql.ast.AbstractNodeVisitor; diff --git a/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstExpressionBuilderTest.java b/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstExpressionBuilderTest.java index a6e130eed3..8472e61361 100644 --- a/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstExpressionBuilderTest.java +++ b/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstExpressionBuilderTest.java @@ -16,7 +16,6 @@ import static org.opensearch.sql.ast.dsl.AstDSL.booleanLiteral; import static org.opensearch.sql.ast.dsl.AstDSL.cast; import static org.opensearch.sql.ast.dsl.AstDSL.compare; -import static org.opensearch.sql.ast.dsl.AstDSL.dateLiteral; import static org.opensearch.sql.ast.dsl.AstDSL.defaultFieldsArgs; import static org.opensearch.sql.ast.dsl.AstDSL.defaultSortFieldArgs; import static org.opensearch.sql.ast.dsl.AstDSL.defaultStatsArgs; @@ -41,7 +40,6 @@ import static org.opensearch.sql.ast.dsl.AstDSL.relation; import static org.opensearch.sql.ast.dsl.AstDSL.sort; import static org.opensearch.sql.ast.dsl.AstDSL.stringLiteral; -import static org.opensearch.sql.ast.dsl.AstDSL.timestampLiteral; import static org.opensearch.sql.ast.dsl.AstDSL.unresolvedArg; import static org.opensearch.sql.ast.dsl.AstDSL.xor; diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/response/QueryExemplarsFunctionResponseHandle.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/response/QueryExemplarsFunctionResponseHandle.java index f734159720..f030ce8f7a 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/response/QueryExemplarsFunctionResponseHandle.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/response/QueryExemplarsFunctionResponseHandle.java @@ -10,7 +10,6 @@ import static org.opensearch.sql.prometheus.data.constants.PrometheusFieldConstants.LABELS_KEY; import static org.opensearch.sql.prometheus.data.constants.PrometheusFieldConstants.SERIES_LABELS_KEY; import static org.opensearch.sql.prometheus.data.constants.PrometheusFieldConstants.TIMESTAMP_KEY; -import static org.opensearch.sql.prometheus.data.constants.PrometheusFieldConstants.TRACE_ID_KEY; import static org.opensearch.sql.prometheus.data.constants.PrometheusFieldConstants.VALUE_KEY; import java.time.Instant; diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/request/PrometheusQueryRequest.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/request/PrometheusQueryRequest.java index 176a52a1d9..d287ea4d65 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/request/PrometheusQueryRequest.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/request/PrometheusQueryRequest.java @@ -9,9 +9,7 @@ import lombok.AllArgsConstructor; import lombok.Data; import lombok.EqualsAndHashCode; -import lombok.Getter; import lombok.NoArgsConstructor; -import lombok.Setter; import lombok.ToString; /** diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/response/PrometheusResponse.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/response/PrometheusResponse.java index 2c75588e4c..ca250125e6 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/response/PrometheusResponse.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/response/PrometheusResponse.java @@ -7,7 +7,6 @@ import static org.opensearch.sql.data.type.ExprCoreType.INTEGER; import static org.opensearch.sql.data.type.ExprCoreType.LONG; -import static org.opensearch.sql.prometheus.data.constants.PrometheusFieldConstants.LABELS; import static org.opensearch.sql.prometheus.data.constants.PrometheusFieldConstants.MATRIX_KEY; import static org.opensearch.sql.prometheus.data.constants.PrometheusFieldConstants.METRIC_KEY; import static org.opensearch.sql.prometheus.data.constants.PrometheusFieldConstants.RESULT_KEY; diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusStorageEngine.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusStorageEngine.java index e19b369a97..738eb023b6 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusStorageEngine.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusStorageEngine.java @@ -12,7 +12,6 @@ import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; import lombok.RequiredArgsConstructor; import org.opensearch.sql.DataSourceSchemaName; import org.opensearch.sql.exception.SemanticCheckException; diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/AggregationQueryBuilder.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/AggregationQueryBuilder.java index 76c8c6872e..a141707077 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/AggregationQueryBuilder.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/AggregationQueryBuilder.java @@ -7,12 +7,10 @@ package org.opensearch.sql.prometheus.storage.querybuilder; -import java.sql.Ref; import java.util.List; import java.util.Set; import java.util.stream.Collectors; import lombok.NoArgsConstructor; -import org.apache.commons.lang3.StringUtils; import org.opensearch.sql.expression.NamedExpression; import org.opensearch.sql.expression.ReferenceExpression; import org.opensearch.sql.expression.aggregation.NamedAggregator; diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/implementation/QueryExemplarsFunctionImplementationTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/implementation/QueryExemplarsFunctionImplementationTest.java index 025e3bde06..d6e4a5cef4 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/implementation/QueryExemplarsFunctionImplementationTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/implementation/QueryExemplarsFunctionImplementationTest.java @@ -22,7 +22,6 @@ import org.opensearch.sql.expression.Expression; import org.opensearch.sql.expression.function.FunctionName; import org.opensearch.sql.prometheus.client.PrometheusClient; -import org.opensearch.sql.prometheus.functions.implementation.QueryExemplarFunctionImplementation; import org.opensearch.sql.prometheus.request.PrometheusQueryExemplarsRequest; import org.opensearch.sql.prometheus.storage.QueryExemplarsTable; diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/implementation/QueryRangeFunctionImplementationTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/implementation/QueryRangeFunctionImplementationTest.java index 9732999a92..48337e3f02 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/implementation/QueryRangeFunctionImplementationTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/implementation/QueryRangeFunctionImplementationTest.java @@ -8,11 +8,9 @@ package org.opensearch.sql.prometheus.functions.implementation; import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.List; import org.junit.jupiter.api.Test; @@ -25,7 +23,6 @@ import org.opensearch.sql.expression.Expression; import org.opensearch.sql.expression.function.FunctionName; import org.opensearch.sql.prometheus.client.PrometheusClient; -import org.opensearch.sql.prometheus.functions.implementation.QueryRangeFunctionImplementation; import org.opensearch.sql.prometheus.request.PrometheusQueryRequest; import org.opensearch.sql.prometheus.storage.PrometheusMetricTable; diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/resolver/QueryRangeTableFunctionResolverTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/resolver/QueryRangeTableFunctionResolverTest.java index 0f7aa91abc..2a36600379 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/resolver/QueryRangeTableFunctionResolverTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/resolver/QueryRangeTableFunctionResolverTest.java @@ -31,7 +31,6 @@ import org.opensearch.sql.expression.function.TableFunctionImplementation; import org.opensearch.sql.prometheus.client.PrometheusClient; import org.opensearch.sql.prometheus.functions.implementation.QueryRangeFunctionImplementation; -import org.opensearch.sql.prometheus.functions.resolver.QueryRangeTableFunctionResolver; import org.opensearch.sql.prometheus.request.PrometheusQueryRequest; import org.opensearch.sql.prometheus.storage.PrometheusMetricTable; diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusMetricScanTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusMetricScanTest.java index 68e03c758c..9c0207853c 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusMetricScanTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusMetricScanTest.java @@ -16,7 +16,6 @@ import static org.opensearch.sql.prometheus.constants.TestConstants.QUERY; import static org.opensearch.sql.prometheus.constants.TestConstants.STARTTIME; import static org.opensearch.sql.prometheus.constants.TestConstants.STEP; -import static org.opensearch.sql.prometheus.data.constants.PrometheusFieldConstants.LABELS; import static org.opensearch.sql.prometheus.data.constants.PrometheusFieldConstants.TIMESTAMP; import static org.opensearch.sql.prometheus.data.constants.PrometheusFieldConstants.VALUE; import static org.opensearch.sql.prometheus.utils.TestUtils.getJson; diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusStorageFactoryTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusStorageFactoryTest.java index 41ac7ff144..c566ccdeb4 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusStorageFactoryTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusStorageFactoryTest.java @@ -17,7 +17,6 @@ import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; -import org.opensearch.cluster.ClusterName; import org.opensearch.sql.common.setting.Settings; import org.opensearch.sql.datasource.model.DataSource; import org.opensearch.sql.datasource.model.DataSourceMetadata; diff --git a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/CommandResponseFormatter.java b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/CommandResponseFormatter.java index 68d9be558b..dfd0f91931 100644 --- a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/CommandResponseFormatter.java +++ b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/CommandResponseFormatter.java @@ -7,8 +7,6 @@ import lombok.Getter; import org.opensearch.sql.executor.execution.CommandPlan; -import org.opensearch.sql.opensearch.response.error.ErrorMessage; -import org.opensearch.sql.opensearch.response.error.ErrorMessageFactory; import org.opensearch.sql.protocol.response.QueryResult; /** diff --git a/spark/src/main/java/org/opensearch/sql/spark/response/SparkResponse.java b/spark/src/main/java/org/opensearch/sql/spark/response/SparkResponse.java index 3e348381f2..504d4c96ca 100644 --- a/spark/src/main/java/org/opensearch/sql/spark/response/SparkResponse.java +++ b/spark/src/main/java/org/opensearch/sql/spark/response/SparkResponse.java @@ -9,8 +9,6 @@ import com.google.common.annotations.VisibleForTesting; import lombok.Data; -import lombok.NoArgsConstructor; -import lombok.Setter; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.json.JSONObject; @@ -22,12 +20,10 @@ import org.opensearch.action.search.SearchRequest; import org.opensearch.action.search.SearchResponse; import org.opensearch.client.Client; -import org.opensearch.common.util.concurrent.ThreadContext; import org.opensearch.index.query.QueryBuilder; import org.opensearch.index.query.QueryBuilders; import org.opensearch.search.SearchHit; import org.opensearch.search.builder.SearchSourceBuilder; -import org.opensearch.sql.datasources.exceptions.DataSourceNotFoundException; @Data public class SparkResponse { diff --git a/sql/src/main/java/org/opensearch/sql/sql/parser/AstExpressionBuilder.java b/sql/src/main/java/org/opensearch/sql/sql/parser/AstExpressionBuilder.java index 7279553106..192514250b 100644 --- a/sql/src/main/java/org/opensearch/sql/sql/parser/AstExpressionBuilder.java +++ b/sql/src/main/java/org/opensearch/sql/sql/parser/AstExpressionBuilder.java @@ -103,7 +103,6 @@ import org.opensearch.sql.ast.tree.Sort.SortOption; import org.opensearch.sql.common.utils.StringUtils; import org.opensearch.sql.expression.function.BuiltinFunctionName; -import org.opensearch.sql.sql.antlr.parser.OpenSearchSQLParser; import org.opensearch.sql.sql.antlr.parser.OpenSearchSQLParser.AlternateMultiMatchQueryContext; import org.opensearch.sql.sql.antlr.parser.OpenSearchSQLParser.AndExpressionContext; import org.opensearch.sql.sql.antlr.parser.OpenSearchSQLParser.ColumnNameContext; diff --git a/sql/src/test/java/org/opensearch/sql/sql/antlr/SQLSyntaxParserTest.java b/sql/src/test/java/org/opensearch/sql/sql/antlr/SQLSyntaxParserTest.java index 6d43daa60f..ade4983f58 100644 --- a/sql/src/test/java/org/opensearch/sql/sql/antlr/SQLSyntaxParserTest.java +++ b/sql/src/test/java/org/opensearch/sql/sql/antlr/SQLSyntaxParserTest.java @@ -595,7 +595,7 @@ public void can_parse_sec_to_time_function() { assertNotNull(parser.parse("SELECT sec_to_time(6897)")); assertNotNull(parser.parse("SELECT sec_to_time(6897.123)")); } - + @Test public void can_parse_last_day_function() { assertNotNull(parser.parse("SELECT last_day(\"2017-06-20\")")); @@ -607,7 +607,7 @@ public void can_parse_timestampadd_function() { assertNotNull(parser.parse("SELECT TIMESTAMPADD(MINUTE, 1, '2003-01-02')")); assertNotNull(parser.parse("SELECT TIMESTAMPADD(WEEK,1,'2003-01-02')")); } - + @Test public void can_parse_timestampdiff_function() { assertNotNull(parser.parse("SELECT TIMESTAMPDIFF(MINUTE, '2003-01-02', '2003-01-02')")); diff --git a/sql/src/test/java/org/opensearch/sql/sql/parser/AstAggregationBuilderTest.java b/sql/src/test/java/org/opensearch/sql/sql/parser/AstAggregationBuilderTest.java index 79896d9400..fff789de44 100644 --- a/sql/src/test/java/org/opensearch/sql/sql/parser/AstAggregationBuilderTest.java +++ b/sql/src/test/java/org/opensearch/sql/sql/parser/AstAggregationBuilderTest.java @@ -30,7 +30,6 @@ import org.junit.jupiter.api.DisplayNameGeneration; import org.junit.jupiter.api.DisplayNameGenerator; import org.junit.jupiter.api.Test; -import org.opensearch.sql.ast.expression.AllFields; import org.opensearch.sql.ast.expression.UnresolvedExpression; import org.opensearch.sql.ast.tree.Aggregation; import org.opensearch.sql.ast.tree.UnresolvedPlan; From d14470632812d4ea5264c7f7c4a02c734385a097 Mon Sep 17 00:00:00 2001 From: Mitchell Gale Date: Fri, 4 Aug 2023 10:06:02 -0700 Subject: [PATCH 05/42] fix import order and add import order to build.gradle. (#1929) Signed-off-by: Mitchell Gale --- build.gradle | 2 +- .../sql/bwc/SQLBackwardsCompatibilityIT.java | 33 ++++++----- .../sql/correctness/CorrectnessIT.java | 1 - .../sql/correctness/tests/TestConfigTest.java | 2 +- .../correctness/tests/TestDataSetTest.java | 2 +- .../correctness/tests/TestQuerySetTest.java | 2 +- .../org/opensearch/sql/jdbc/CursorIT.java | 4 +- .../sql/legacy/NestedFieldQueryIT.java | 2 +- .../sql/legacy/OpenSearchSQLRestTestCase.java | 4 +- .../sql/legacy/RestIntegTestCase.java | 2 +- .../sql/legacy/SQLIntegTestCase.java | 57 +++++++++---------- .../sql/legacy/TypeInformationIT.java | 1 - .../sql/ppl/DateTimeComparisonIT.java | 5 +- .../sql/ppl/DateTimeImplementationIT.java | 9 ++- .../org/opensearch/sql/ppl/MultiMatchIT.java | 1 - .../sql/ppl/PositionFunctionIT.java | 7 +-- .../sql/ppl/RelevanceFunctionIT.java | 1 - .../sql/ppl/SimpleQueryStringIT.java | 1 - .../org/opensearch/sql/ppl/StandaloneIT.java | 22 +++---- .../java/org/opensearch/sql/sql/AdminIT.java | 1 - .../sql/sql/ArithmeticFunctionIT.java | 1 - .../sql/sql/ConvertTZFunctionIT.java | 7 ++- .../org/opensearch/sql/sql/CsvFormatIT.java | 1 - .../sql/sql/DateTimeComparisonIT.java | 5 +- .../sql/sql/DateTimeImplementationIT.java | 9 ++- .../sql/sql/HighlightFunctionIT.java | 2 +- .../org/opensearch/sql/sql/LikeQueryIT.java | 11 ++-- .../sql/sql/PaginationBlackboxIT.java | 5 +- .../org/opensearch/sql/sql/PaginationIT.java | 1 - .../sql/sql/PositionFunctionIT.java | 10 ++-- .../org/opensearch/sql/sql/ScoreQueryIT.java | 17 +++--- .../sql/sql/SimpleQueryStringIT.java | 1 - .../sql/sql/StandalonePaginationIT.java | 4 +- .../opensearch/sql/sql/StringLiteralIT.java | 11 ++-- .../opensearch/sql/sql/WildcardQueryIT.java | 5 +- .../opensearch/sql/sql/WindowFunctionIT.java | 1 - .../org/opensearch/sql/util/MatcherUtils.java | 2 +- .../opensearch/sql/util/StandaloneModule.java | 2 +- .../legacy/executor/AsyncRestExecutor.java | 2 +- .../executor/ElasticDefaultRestExecutor.java | 2 +- .../executor/GetIndexRequestRestListener.java | 3 +- .../executor/csv/CSVResultRestExecutor.java | 2 +- .../executor/csv/CSVResultsExtractor.java | 2 +- .../cursor/CursorAsyncRestExecutor.java | 2 +- .../format/PrettyFormatRestExecutor.java | 2 +- .../executor/join/ElasticJoinExecutor.java | 2 +- .../legacy/executor/join/ElasticUtils.java | 2 +- .../sql/legacy/plugin/RestSQLQueryAction.java | 4 +- .../sql/legacy/plugin/RestSqlAction.java | 2 +- .../sql/legacy/plugin/RestSqlStatsAction.java | 2 +- .../legacy/query/join/JoinRequestBuilder.java | 2 +- .../NestedLoopsElasticRequestBuilder.java | 2 +- .../sql/legacy/query/maker/AggMaker.java | 6 +- .../query/multi/MultiQueryRequestBuilder.java | 2 +- .../planner/physical/node/scroll/Scroll.java | 1 - .../semantic/SemanticAnalyzerBasicTest.java | 2 +- .../antlr/semantic/scope/EnvironmentTest.java | 2 +- .../antlr/semantic/scope/SymbolTableTest.java | 2 +- .../esdomain/mapping/FieldMappingTest.java | 2 +- .../unittest/cursor/DefaultCursorTest.java | 2 +- .../expression/model/ExprValueUtilsTest.java | 2 +- .../unittest/planner/QueryPlannerTest.java | 2 +- .../sql/legacy/util/MatcherUtils.java | 2 +- 63 files changed, 143 insertions(+), 167 deletions(-) diff --git a/build.gradle b/build.gradle index a52322c21e..6d30e54354 100644 --- a/build.gradle +++ b/build.gradle @@ -87,7 +87,7 @@ spotless { include '**/*.java' exclude '**/build/**', '**/build-*/**' } -// importOrder() + importOrder() // licenseHeader("/*\n" + // " * Copyright OpenSearch Contributors\n" + // " * SPDX-License-Identifier: Apache-2.0\n" + diff --git a/integ-test/src/test/java/org/opensearch/sql/bwc/SQLBackwardsCompatibilityIT.java b/integ-test/src/test/java/org/opensearch/sql/bwc/SQLBackwardsCompatibilityIT.java index c32a3336c0..799dadcd2d 100644 --- a/integ-test/src/test/java/org/opensearch/sql/bwc/SQLBackwardsCompatibilityIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/bwc/SQLBackwardsCompatibilityIT.java @@ -7,23 +7,6 @@ package org.opensearch.sql.bwc; -import org.json.JSONObject; -import org.junit.Assert; -import org.opensearch.client.Request; -import org.opensearch.client.RequestOptions; -import org.opensearch.client.Response; -import org.opensearch.common.settings.Settings; -import org.opensearch.sql.legacy.SQLIntegTestCase; -import org.opensearch.sql.legacy.TestsConstants; -import org.opensearch.test.rest.OpenSearchRestTestCase; - -import java.io.IOException; -import java.util.List; -import java.util.Locale; -import java.util.Map; -import java.util.Set; -import java.util.stream.Collectors; - import static org.opensearch.sql.legacy.TestUtils.createIndexByRestClient; import static org.opensearch.sql.legacy.TestUtils.isIndexExist; import static org.opensearch.sql.legacy.TestUtils.loadDataByRestClient; @@ -36,6 +19,22 @@ import static org.opensearch.sql.util.MatcherUtils.verifySchema; import static org.opensearch.sql.util.TestUtils.getResponseBody; +import java.io.IOException; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; +import org.json.JSONObject; +import org.junit.Assert; +import org.opensearch.client.Request; +import org.opensearch.client.RequestOptions; +import org.opensearch.client.Response; +import org.opensearch.common.settings.Settings; +import org.opensearch.sql.legacy.SQLIntegTestCase; +import org.opensearch.sql.legacy.TestsConstants; +import org.opensearch.test.rest.OpenSearchRestTestCase; + public class SQLBackwardsCompatibilityIT extends SQLIntegTestCase { private static final ClusterType CLUSTER_TYPE = ClusterType.parse(System.getProperty("tests.rest.bwcsuite")); diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/CorrectnessIT.java b/integ-test/src/test/java/org/opensearch/sql/correctness/CorrectnessIT.java index 889f60f4ad..9ec80c55a7 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/CorrectnessIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/CorrectnessIT.java @@ -10,7 +10,6 @@ import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope; import com.google.common.collect.Maps; - import java.net.URISyntaxException; import java.nio.file.Files; import java.nio.file.Path; diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestConfigTest.java b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestConfigTest.java index c75bca15b1..1abe6ea109 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestConfigTest.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestConfigTest.java @@ -7,11 +7,11 @@ package org.opensearch.sql.correctness.tests; import static java.util.Collections.emptyMap; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.emptyString; import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.is; -import static org.hamcrest.MatcherAssert.assertThat; import com.google.common.collect.ImmutableMap; import java.util.Map; diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestDataSetTest.java b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestDataSetTest.java index 284e167d6b..3967d96658 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestDataSetTest.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestDataSetTest.java @@ -6,9 +6,9 @@ package org.opensearch.sql.correctness.tests; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.contains; import static org.junit.Assert.assertEquals; -import static org.hamcrest.MatcherAssert.assertThat; import org.junit.Test; import org.opensearch.sql.correctness.testset.TestDataSet; diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestQuerySetTest.java b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestQuerySetTest.java index 8ad9e6b921..1c97f743f4 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestQuerySetTest.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestQuerySetTest.java @@ -6,8 +6,8 @@ package org.opensearch.sql.correctness.tests; -import static org.hamcrest.Matchers.contains; import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.contains; import org.junit.Test; import org.opensearch.sql.correctness.testset.TestQuerySet; diff --git a/integ-test/src/test/java/org/opensearch/sql/jdbc/CursorIT.java b/integ-test/src/test/java/org/opensearch/sql/jdbc/CursorIT.java index 7691c00ea5..959621dbad 100644 --- a/integ-test/src/test/java/org/opensearch/sql/jdbc/CursorIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/jdbc/CursorIT.java @@ -28,12 +28,12 @@ import org.junit.AfterClass; import org.junit.Assume; import org.junit.BeforeClass; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.DisplayNameGeneration; import org.junit.jupiter.api.DisplayNameGenerator; +import org.junit.jupiter.api.Test; import org.opensearch.client.Request; import org.opensearch.client.RequestOptions; import org.opensearch.client.Response; diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/NestedFieldQueryIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/NestedFieldQueryIT.java index e568be0ae6..378fbda937 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/NestedFieldQueryIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/NestedFieldQueryIT.java @@ -34,9 +34,9 @@ import org.opensearch.client.ResponseException; import org.opensearch.common.xcontent.LoggingDeprecationHandler; import org.opensearch.common.xcontent.json.JsonXContentParser; +import org.opensearch.core.rest.RestStatus; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.XContentParser; -import org.opensearch.core.rest.RestStatus; import org.opensearch.search.SearchHit; /** diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/OpenSearchSQLRestTestCase.java b/integ-test/src/test/java/org/opensearch/sql/legacy/OpenSearchSQLRestTestCase.java index e057c58969..7b5961de35 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/OpenSearchSQLRestTestCase.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/OpenSearchSQLRestTestCase.java @@ -6,6 +6,8 @@ package org.opensearch.sql.legacy; +import static java.util.Collections.unmodifiableList; + import java.io.IOException; import java.util.ArrayList; import java.util.List; @@ -40,8 +42,6 @@ import org.opensearch.common.util.io.IOUtils; import org.opensearch.test.rest.OpenSearchRestTestCase; -import static java.util.Collections.unmodifiableList; - /** * OpenSearch SQL integration test base class to support both security disabled and enabled OpenSearch cluster. * Allows interaction with multiple external test clusters using OpenSearch's {@link RestClient}. diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/RestIntegTestCase.java b/integ-test/src/test/java/org/opensearch/sql/legacy/RestIntegTestCase.java index e6f567e2db..50440facb6 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/RestIntegTestCase.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/RestIntegTestCase.java @@ -45,9 +45,9 @@ import org.opensearch.client.Request; import org.opensearch.client.Response; import org.opensearch.common.Strings; -import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.rest.RestStatus; +import org.opensearch.core.xcontent.XContentBuilder; /** * SQL plugin integration test base class (migrated from SQLIntegTestCase) diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/SQLIntegTestCase.java b/integ-test/src/test/java/org/opensearch/sql/legacy/SQLIntegTestCase.java index 7216c03d08..58e55c4101 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/SQLIntegTestCase.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/SQLIntegTestCase.java @@ -6,35 +6,6 @@ package org.opensearch.sql.legacy; -import com.google.common.base.Strings; -import com.google.gson.Gson; -import org.apache.commons.lang3.StringUtils; -import org.json.JSONArray; -import org.json.JSONObject; -import org.junit.AfterClass; -import org.junit.Assert; -import org.junit.Before; -import org.opensearch.client.Request; -import org.opensearch.client.RequestOptions; -import org.opensearch.client.Response; -import org.opensearch.client.RestClient; -import org.opensearch.sql.common.setting.Settings; - -import javax.management.MBeanServerInvocationHandler; -import javax.management.ObjectName; -import javax.management.remote.JMXConnector; -import javax.management.remote.JMXConnectorFactory; -import javax.management.remote.JMXServiceURL; -import java.io.IOException; -import java.io.UnsupportedEncodingException; -import java.net.URLEncoder; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.Locale; -import org.opensearch.sql.datasource.model.DataSourceMetadata; - import static com.google.common.base.Strings.isNullOrEmpty; import static org.opensearch.sql.legacy.TestUtils.createIndexByRestClient; import static org.opensearch.sql.legacy.TestUtils.getAccountIndexMapping; @@ -69,6 +40,34 @@ import static org.opensearch.sql.legacy.plugin.RestSqlAction.EXPLAIN_API_ENDPOINT; import static org.opensearch.sql.legacy.plugin.RestSqlAction.QUERY_API_ENDPOINT; +import com.google.common.base.Strings; +import com.google.gson.Gson; +import java.io.IOException; +import java.io.UnsupportedEncodingException; +import java.net.URLEncoder; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.Locale; +import javax.management.MBeanServerInvocationHandler; +import javax.management.ObjectName; +import javax.management.remote.JMXConnector; +import javax.management.remote.JMXConnectorFactory; +import javax.management.remote.JMXServiceURL; +import org.apache.commons.lang3.StringUtils; +import org.json.JSONArray; +import org.json.JSONObject; +import org.junit.AfterClass; +import org.junit.Assert; +import org.junit.Before; +import org.opensearch.client.Request; +import org.opensearch.client.RequestOptions; +import org.opensearch.client.Response; +import org.opensearch.client.RestClient; +import org.opensearch.sql.common.setting.Settings; +import org.opensearch.sql.datasource.model.DataSourceMetadata; + /** * OpenSearch Rest integration test base for SQL testing */ diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/TypeInformationIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/TypeInformationIT.java index e827059f1b..646a38b011 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/TypeInformationIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/TypeInformationIT.java @@ -10,7 +10,6 @@ import static org.opensearch.sql.util.MatcherUtils.verifySchema; import org.json.JSONObject; - import org.junit.Test; public class TypeInformationIT extends SQLIntegTestCase { diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeComparisonIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeComparisonIT.java index 272e0a34d3..4fb61ae2e9 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeComparisonIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeComparisonIT.java @@ -13,13 +13,12 @@ import static org.opensearch.sql.util.MatcherUtils.verifyDataRows; import static org.opensearch.sql.util.MatcherUtils.verifySchema; +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import java.io.IOException; import java.time.LocalDate; import java.util.Arrays; import java.util.TimeZone; - -import com.carrotsearch.randomizedtesting.annotations.Name; -import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.junit.After; import org.junit.Before; import org.junit.jupiter.api.Test; diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeImplementationIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeImplementationIT.java index 5e51a60856..3f24b619f5 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeImplementationIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeImplementationIT.java @@ -5,17 +5,16 @@ package org.opensearch.sql.ppl; -import org.json.JSONObject; -import org.junit.Test; - -import java.io.IOException; - import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_DATE; import static org.opensearch.sql.util.MatcherUtils.rows; import static org.opensearch.sql.util.MatcherUtils.schema; import static org.opensearch.sql.util.MatcherUtils.verifySchema; import static org.opensearch.sql.util.MatcherUtils.verifySome; +import java.io.IOException; +import org.json.JSONObject; +import org.junit.Test; + public class DateTimeImplementationIT extends PPLIntegTestCase { diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/MultiMatchIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/MultiMatchIT.java index 0113d77cf2..6562c551da 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/MultiMatchIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/MultiMatchIT.java @@ -9,7 +9,6 @@ import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BEER; import java.io.IOException; - import org.json.JSONObject; import org.junit.Test; diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/PositionFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/PositionFunctionIT.java index 24319a0cb8..59aade8bbd 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/PositionFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/PositionFunctionIT.java @@ -5,14 +5,13 @@ package org.opensearch.sql.ppl; -import org.junit.Test; - -import java.io.IOException; - import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_CALCS; import static org.opensearch.sql.util.MatcherUtils.rows; import static org.opensearch.sql.util.MatcherUtils.verifyDataRows; +import java.io.IOException; +import org.junit.Test; + public class PositionFunctionIT extends PPLIntegTestCase { @Override public void init() throws IOException { diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/RelevanceFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/RelevanceFunctionIT.java index 9e05a2a919..7c57bd5481 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/RelevanceFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/RelevanceFunctionIT.java @@ -8,7 +8,6 @@ import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BEER; import java.io.IOException; - import org.junit.Test; public class RelevanceFunctionIT extends PPLIntegTestCase { diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/SimpleQueryStringIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/SimpleQueryStringIT.java index abc0ad6f59..46111b902e 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/SimpleQueryStringIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/SimpleQueryStringIT.java @@ -8,7 +8,6 @@ import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BEER; import java.io.IOException; - import org.json.JSONObject; import org.junit.Test; diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/StandaloneIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/StandaloneIT.java index b1fcbf7d1b..8ef8787597 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/StandaloneIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/StandaloneIT.java @@ -31,11 +31,11 @@ import org.opensearch.sql.analysis.ExpressionAnalyzer; import org.opensearch.sql.common.response.ResponseListener; import org.opensearch.sql.common.setting.Settings; -import org.opensearch.sql.datasources.service.DataSourceMetadataStorage; import org.opensearch.sql.datasource.DataSourceService; -import org.opensearch.sql.datasources.service.DataSourceServiceImpl; -import org.opensearch.sql.datasources.auth.DataSourceUserAuthorizationHelper; import org.opensearch.sql.datasource.model.DataSourceMetadata; +import org.opensearch.sql.datasources.auth.DataSourceUserAuthorizationHelper; +import org.opensearch.sql.datasources.service.DataSourceMetadataStorage; +import org.opensearch.sql.datasources.service.DataSourceServiceImpl; import org.opensearch.sql.executor.ExecutionEngine; import org.opensearch.sql.executor.ExecutionEngine.QueryResponse; import org.opensearch.sql.executor.QueryManager; @@ -45,25 +45,25 @@ import org.opensearch.sql.expression.function.BuiltinFunctionRepository; import org.opensearch.sql.monitor.AlwaysHealthyMonitor; import org.opensearch.sql.monitor.ResourceMonitor; +import org.opensearch.sql.opensearch.client.OpenSearchClient; +import org.opensearch.sql.opensearch.client.OpenSearchRestClient; import org.opensearch.sql.opensearch.executor.OpenSearchExecutionEngine; import org.opensearch.sql.opensearch.executor.protector.ExecutionProtector; import org.opensearch.sql.opensearch.executor.protector.OpenSearchExecutionProtector; +import org.opensearch.sql.opensearch.security.SecurityAccess; +import org.opensearch.sql.opensearch.storage.OpenSearchDataSourceFactory; import org.opensearch.sql.opensearch.storage.OpenSearchStorageEngine; import org.opensearch.sql.planner.Planner; import org.opensearch.sql.planner.optimizer.LogicalPlanOptimizer; import org.opensearch.sql.ppl.antlr.PPLSyntaxParser; -import org.opensearch.sql.sql.SQLService; -import org.opensearch.sql.sql.antlr.SQLSyntaxParser; -import org.opensearch.sql.storage.StorageEngine; -import org.opensearch.sql.util.ExecuteOnCallerThreadQueryManager; -import org.opensearch.sql.opensearch.client.OpenSearchClient; -import org.opensearch.sql.opensearch.client.OpenSearchRestClient; -import org.opensearch.sql.opensearch.security.SecurityAccess; -import org.opensearch.sql.opensearch.storage.OpenSearchDataSourceFactory; import org.opensearch.sql.ppl.domain.PPLQueryRequest; import org.opensearch.sql.protocol.response.QueryResult; import org.opensearch.sql.protocol.response.format.SimpleJsonResponseFormatter; +import org.opensearch.sql.sql.SQLService; +import org.opensearch.sql.sql.antlr.SQLSyntaxParser; import org.opensearch.sql.storage.DataSourceFactory; +import org.opensearch.sql.storage.StorageEngine; +import org.opensearch.sql.util.ExecuteOnCallerThreadQueryManager; /** * Run PPL with query engine outside OpenSearch cluster. This IT doesn't require our plugin diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/AdminIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/AdminIT.java index 8429ab3897..1ca21041a3 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/AdminIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/AdminIT.java @@ -16,7 +16,6 @@ import java.nio.file.Files; import java.nio.file.Paths; import java.util.Locale; - import org.json.JSONArray; import org.json.JSONObject; import org.junit.Test; diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/ArithmeticFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/ArithmeticFunctionIT.java index 5b6c742e28..dd99cf2e75 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/ArithmeticFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/ArithmeticFunctionIT.java @@ -15,7 +15,6 @@ import java.io.IOException; import java.util.Locale; - import org.json.JSONObject; import org.junit.jupiter.api.Test; import org.opensearch.client.Request; diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/ConvertTZFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/ConvertTZFunctionIT.java index 308fe7cdcd..b719edd5b0 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/ConvertTZFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/ConvertTZFunctionIT.java @@ -5,14 +5,15 @@ package org.opensearch.sql.sql; -import org.junit.Test; -import org.opensearch.sql.legacy.SQLIntegTestCase; -import java.io.IOException; import static org.opensearch.sql.util.MatcherUtils.rows; import static org.opensearch.sql.util.MatcherUtils.schema; import static org.opensearch.sql.util.MatcherUtils.verifyDataRows; import static org.opensearch.sql.util.MatcherUtils.verifySchema; +import java.io.IOException; +import org.junit.Test; +import org.opensearch.sql.legacy.SQLIntegTestCase; + public class ConvertTZFunctionIT extends SQLIntegTestCase { diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/CsvFormatIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/CsvFormatIT.java index aa2737cbac..3af4db89de 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/CsvFormatIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/CsvFormatIT.java @@ -11,7 +11,6 @@ import java.io.IOException; import java.util.Locale; - import org.junit.Test; import org.opensearch.client.Request; import org.opensearch.client.Response; diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeComparisonIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeComparisonIT.java index 108687da27..e935b269f1 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeComparisonIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeComparisonIT.java @@ -14,14 +14,13 @@ import static org.opensearch.sql.util.MatcherUtils.verifySchema; import static org.opensearch.sql.util.TestUtils.getResponseBody; +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import java.io.IOException; import java.time.LocalDate; import java.util.Arrays; import java.util.Locale; import java.util.TimeZone; - -import com.carrotsearch.randomizedtesting.annotations.Name; -import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.json.JSONObject; import org.junit.After; import org.junit.Before; diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeImplementationIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeImplementationIT.java index ff2c4c07a6..94a5b4fb16 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeImplementationIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeImplementationIT.java @@ -5,16 +5,15 @@ package org.opensearch.sql.sql; -import org.junit.Test; -import org.opensearch.sql.legacy.SQLIntegTestCase; - -import java.io.IOException; - import static org.opensearch.sql.util.MatcherUtils.rows; import static org.opensearch.sql.util.MatcherUtils.schema; import static org.opensearch.sql.util.MatcherUtils.verifyDataRows; import static org.opensearch.sql.util.MatcherUtils.verifySchema; +import java.io.IOException; +import org.junit.Test; +import org.opensearch.sql.legacy.SQLIntegTestCase; + public class DateTimeImplementationIT extends SQLIntegTestCase { diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/HighlightFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/HighlightFunctionIT.java index 0ab6d5c70f..d55972691c 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/HighlightFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/HighlightFunctionIT.java @@ -11,12 +11,12 @@ import static org.opensearch.sql.util.MatcherUtils.verifySchema; import com.google.common.collect.ImmutableMap; +import java.util.List; import org.json.JSONArray; import org.json.JSONObject; import org.junit.Test; import org.opensearch.sql.legacy.SQLIntegTestCase; import org.opensearch.sql.legacy.TestsConstants; -import java.util.List; public class HighlightFunctionIT extends SQLIntegTestCase { diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/LikeQueryIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/LikeQueryIT.java index f0e82adb6f..0dbb0404f9 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/LikeQueryIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/LikeQueryIT.java @@ -6,16 +6,15 @@ package org.opensearch.sql.sql; -import org.json.JSONObject; -import org.junit.Test; -import org.opensearch.sql.legacy.SQLIntegTestCase; - -import java.io.IOException; - import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_WILDCARD; import static org.opensearch.sql.util.MatcherUtils.rows; import static org.opensearch.sql.util.MatcherUtils.verifyDataRows; +import java.io.IOException; +import org.json.JSONObject; +import org.junit.Test; +import org.opensearch.sql.legacy.SQLIntegTestCase; + public class LikeQueryIT extends SQLIntegTestCase { @Override protected void init() throws Exception { diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/PaginationBlackboxIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/PaginationBlackboxIT.java index 2a34dabd79..e6f4e18468 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/PaginationBlackboxIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/PaginationBlackboxIT.java @@ -6,12 +6,11 @@ package org.opensearch.sql.sql; +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import java.io.IOException; import java.util.ArrayList; import java.util.List; - -import com.carrotsearch.randomizedtesting.annotations.Name; -import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import lombok.SneakyThrows; import org.json.JSONArray; import org.json.JSONObject; diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/PaginationIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/PaginationIT.java index bd8949203c..224a1e95e4 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/PaginationIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/PaginationIT.java @@ -11,7 +11,6 @@ import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_ONLINE; import java.io.IOException; - import lombok.SneakyThrows; import org.json.JSONArray; import org.json.JSONObject; diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/PositionFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/PositionFunctionIT.java index f51a3a0977..d0587eab7f 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/PositionFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/PositionFunctionIT.java @@ -5,16 +5,16 @@ package org.opensearch.sql.sql; -import org.json.JSONObject; -import org.junit.Test; -import org.opensearch.sql.legacy.SQLIntegTestCase; -import org.opensearch.sql.legacy.TestsConstants; - import static org.opensearch.sql.util.MatcherUtils.rows; import static org.opensearch.sql.util.MatcherUtils.schema; import static org.opensearch.sql.util.MatcherUtils.verifyDataRows; import static org.opensearch.sql.util.MatcherUtils.verifySchema; +import org.json.JSONObject; +import org.junit.Test; +import org.opensearch.sql.legacy.SQLIntegTestCase; +import org.opensearch.sql.legacy.TestsConstants; + public class PositionFunctionIT extends SQLIntegTestCase { @Override diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/ScoreQueryIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/ScoreQueryIT.java index 03df7d0e29..e824b1ab2b 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/ScoreQueryIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/ScoreQueryIT.java @@ -5,21 +5,20 @@ package org.opensearch.sql.sql; -import org.json.JSONObject; -import org.junit.Assert; -import org.junit.Test; -import org.opensearch.sql.legacy.SQLIntegTestCase; -import org.opensearch.sql.legacy.TestsConstants; - -import java.io.IOException; -import java.util.Locale; - import static org.hamcrest.Matchers.containsString; import static org.opensearch.sql.util.MatcherUtils.rows; import static org.opensearch.sql.util.MatcherUtils.schema; import static org.opensearch.sql.util.MatcherUtils.verifyDataRows; import static org.opensearch.sql.util.MatcherUtils.verifySchema; +import java.io.IOException; +import java.util.Locale; +import org.json.JSONObject; +import org.junit.Assert; +import org.junit.Test; +import org.opensearch.sql.legacy.SQLIntegTestCase; +import org.opensearch.sql.legacy.TestsConstants; + public class ScoreQueryIT extends SQLIntegTestCase { @Override protected void init() throws Exception { diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/SimpleQueryStringIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/SimpleQueryStringIT.java index afacc5fefd..44f4e5ca9c 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/SimpleQueryStringIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/SimpleQueryStringIT.java @@ -9,7 +9,6 @@ import static org.opensearch.sql.protocol.response.format.JsonResponseFormatter.CONTENT_TYPE; import java.io.IOException; - import org.json.JSONObject; import org.junit.Test; import org.opensearch.client.Request; diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/StandalonePaginationIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/StandalonePaginationIT.java index aad39c4074..4738d233bf 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/StandalonePaginationIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/StandalonePaginationIT.java @@ -33,13 +33,13 @@ import org.opensearch.sql.datasource.DataSourceService; import org.opensearch.sql.datasources.service.DataSourceServiceImpl; import org.opensearch.sql.executor.ExecutionEngine; -import org.opensearch.sql.executor.pagination.PlanSerializer; import org.opensearch.sql.executor.QueryService; +import org.opensearch.sql.executor.pagination.Cursor; +import org.opensearch.sql.executor.pagination.PlanSerializer; import org.opensearch.sql.expression.DSL; import org.opensearch.sql.legacy.SQLIntegTestCase; import org.opensearch.sql.opensearch.client.OpenSearchClient; import org.opensearch.sql.opensearch.client.OpenSearchRestClient; -import org.opensearch.sql.executor.pagination.Cursor; import org.opensearch.sql.opensearch.storage.OpenSearchDataSourceFactory; import org.opensearch.sql.opensearch.storage.OpenSearchIndex; import org.opensearch.sql.planner.PlanContext; diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/StringLiteralIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/StringLiteralIT.java index ba77ea1c2f..e54000f80d 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/StringLiteralIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/StringLiteralIT.java @@ -6,17 +6,16 @@ package org.opensearch.sql.sql; -import org.json.JSONObject; -import org.junit.Test; -import org.opensearch.sql.legacy.SQLIntegTestCase; - -import java.io.IOException; - import static org.opensearch.sql.util.MatcherUtils.rows; import static org.opensearch.sql.util.MatcherUtils.schema; import static org.opensearch.sql.util.MatcherUtils.verifyDataRows; import static org.opensearch.sql.util.MatcherUtils.verifySchema; +import java.io.IOException; +import org.json.JSONObject; +import org.junit.Test; +import org.opensearch.sql.legacy.SQLIntegTestCase; + public class StringLiteralIT extends SQLIntegTestCase { diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/WildcardQueryIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/WildcardQueryIT.java index 3f6a3afb72..030c07c5fa 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/WildcardQueryIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/WildcardQueryIT.java @@ -7,15 +7,14 @@ package org.opensearch.sql.sql; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_WILDCARD; +import static org.opensearch.sql.util.MatcherUtils.rows; +import static org.opensearch.sql.util.MatcherUtils.verifyDataRows; import java.io.IOException; import org.json.JSONObject; import org.junit.Test; import org.opensearch.sql.legacy.SQLIntegTestCase; -import static org.opensearch.sql.util.MatcherUtils.rows; -import static org.opensearch.sql.util.MatcherUtils.verifyDataRows; - public class WildcardQueryIT extends SQLIntegTestCase { @Override protected void init() throws Exception { diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/WindowFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/WindowFunctionIT.java index ac042b4a47..b586125af3 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/WindowFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/WindowFunctionIT.java @@ -10,7 +10,6 @@ import static org.opensearch.sql.util.MatcherUtils.verifyDataRows; import static org.opensearch.sql.util.MatcherUtils.verifyDataRowsInOrder; - import org.json.JSONObject; import org.junit.Test; import org.opensearch.sql.legacy.SQLIntegTestCase; diff --git a/integ-test/src/test/java/org/opensearch/sql/util/MatcherUtils.java b/integ-test/src/test/java/org/opensearch/sql/util/MatcherUtils.java index f5fbcf9666..4cb2aa299d 100644 --- a/integ-test/src/test/java/org/opensearch/sql/util/MatcherUtils.java +++ b/integ-test/src/test/java/org/opensearch/sql/util/MatcherUtils.java @@ -6,6 +6,7 @@ package org.opensearch.sql.util; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.arrayContainingInAnyOrder; @@ -17,7 +18,6 @@ import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.hasItems; import static org.junit.Assert.assertEquals; -import static org.hamcrest.MatcherAssert.assertThat; import com.google.common.base.Strings; import com.google.gson.JsonParser; diff --git a/integ-test/src/test/java/org/opensearch/sql/util/StandaloneModule.java b/integ-test/src/test/java/org/opensearch/sql/util/StandaloneModule.java index c347ea5244..ad8afc47ca 100644 --- a/integ-test/src/test/java/org/opensearch/sql/util/StandaloneModule.java +++ b/integ-test/src/test/java/org/opensearch/sql/util/StandaloneModule.java @@ -15,10 +15,10 @@ import org.opensearch.sql.common.setting.Settings; import org.opensearch.sql.datasource.DataSourceService; import org.opensearch.sql.executor.ExecutionEngine; -import org.opensearch.sql.executor.pagination.PlanSerializer; import org.opensearch.sql.executor.QueryManager; import org.opensearch.sql.executor.QueryService; import org.opensearch.sql.executor.execution.QueryPlanFactory; +import org.opensearch.sql.executor.pagination.PlanSerializer; import org.opensearch.sql.expression.function.BuiltinFunctionRepository; import org.opensearch.sql.monitor.AlwaysHealthyMonitor; import org.opensearch.sql.monitor.ResourceMonitor; diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/AsyncRestExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/AsyncRestExecutor.java index 1df0036bab..d251585f89 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/AsyncRestExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/AsyncRestExecutor.java @@ -15,9 +15,9 @@ import org.opensearch.OpenSearchException; import org.opensearch.client.Client; import org.opensearch.common.unit.TimeValue; +import org.opensearch.core.rest.RestStatus; import org.opensearch.rest.BytesRestResponse; import org.opensearch.rest.RestChannel; -import org.opensearch.core.rest.RestStatus; import org.opensearch.sql.common.setting.Settings; import org.opensearch.sql.common.utils.QueryContext; import org.opensearch.sql.legacy.esdomain.LocalClusterState; diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/ElasticDefaultRestExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/ElasticDefaultRestExecutor.java index 35ab9facb3..220903d49d 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/ElasticDefaultRestExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/ElasticDefaultRestExecutor.java @@ -18,11 +18,11 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.action.search.SearchScrollRequest; import org.opensearch.client.Client; +import org.opensearch.core.rest.RestStatus; import org.opensearch.index.reindex.BulkIndexByScrollResponseContentListener; import org.opensearch.index.reindex.DeleteByQueryRequest; import org.opensearch.rest.BytesRestResponse; import org.opensearch.rest.RestChannel; -import org.opensearch.core.rest.RestStatus; import org.opensearch.rest.action.RestStatusToXContentListener; import org.opensearch.search.SearchHits; import org.opensearch.sql.legacy.exception.SqlParseException; diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/GetIndexRequestRestListener.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/GetIndexRequestRestListener.java index 591319c74c..be6677a405 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/GetIndexRequestRestListener.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/GetIndexRequestRestListener.java @@ -8,18 +8,17 @@ import java.io.IOException; import java.util.List; - import org.opensearch.action.admin.indices.get.GetIndexRequest; import org.opensearch.action.admin.indices.get.GetIndexResponse; import org.opensearch.cluster.metadata.AliasMetadata; import org.opensearch.cluster.metadata.MappingMetadata; import org.opensearch.common.settings.Settings; +import org.opensearch.core.rest.RestStatus; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.rest.BytesRestResponse; import org.opensearch.rest.RestChannel; import org.opensearch.rest.RestResponse; -import org.opensearch.core.rest.RestStatus; import org.opensearch.rest.action.RestBuilderListener; import org.opensearch.sql.legacy.antlr.semantic.SemanticAnalysisException; diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CSVResultRestExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CSVResultRestExecutor.java index ae7623e3a2..da99652e13 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CSVResultRestExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CSVResultRestExecutor.java @@ -10,9 +10,9 @@ import java.util.List; import java.util.Map; import org.opensearch.client.Client; +import org.opensearch.core.rest.RestStatus; import org.opensearch.rest.BytesRestResponse; import org.opensearch.rest.RestChannel; -import org.opensearch.core.rest.RestStatus; import org.opensearch.sql.legacy.executor.QueryActionElasticExecutor; import org.opensearch.sql.legacy.executor.RestExecutor; import org.opensearch.sql.legacy.query.QueryAction; diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CSVResultsExtractor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CSVResultsExtractor.java index 70cdd91452..a22d96c133 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CSVResultsExtractor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CSVResultsExtractor.java @@ -14,6 +14,7 @@ import java.util.Set; import java.util.stream.Collectors; import org.opensearch.common.document.DocumentField; +import org.opensearch.geo.search.aggregations.metrics.GeoBounds; import org.opensearch.search.SearchHit; import org.opensearch.search.SearchHits; import org.opensearch.search.aggregations.Aggregation; @@ -21,7 +22,6 @@ import org.opensearch.search.aggregations.bucket.MultiBucketsAggregation; import org.opensearch.search.aggregations.bucket.SingleBucketAggregation; import org.opensearch.search.aggregations.metrics.ExtendedStats; -import org.opensearch.geo.search.aggregations.metrics.GeoBounds; import org.opensearch.search.aggregations.metrics.NumericMetricsAggregation; import org.opensearch.search.aggregations.metrics.Percentile; import org.opensearch.search.aggregations.metrics.Percentiles; diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorAsyncRestExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorAsyncRestExecutor.java index 92703dde2a..9b8e70c168 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorAsyncRestExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorAsyncRestExecutor.java @@ -13,9 +13,9 @@ import org.apache.logging.log4j.Logger; import org.opensearch.client.Client; import org.opensearch.common.unit.TimeValue; +import org.opensearch.core.rest.RestStatus; import org.opensearch.rest.BytesRestResponse; import org.opensearch.rest.RestChannel; -import org.opensearch.core.rest.RestStatus; import org.opensearch.sql.common.setting.Settings; import org.opensearch.sql.common.utils.QueryContext; import org.opensearch.sql.legacy.esdomain.LocalClusterState; diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/PrettyFormatRestExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/PrettyFormatRestExecutor.java index 65fd6b7022..411fb90a24 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/PrettyFormatRestExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/PrettyFormatRestExecutor.java @@ -13,9 +13,9 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.client.Client; import org.opensearch.core.common.Strings; +import org.opensearch.core.rest.RestStatus; import org.opensearch.rest.BytesRestResponse; import org.opensearch.rest.RestChannel; -import org.opensearch.core.rest.RestStatus; import org.opensearch.sql.legacy.cursor.Cursor; import org.opensearch.sql.legacy.cursor.DefaultCursor; import org.opensearch.sql.legacy.exception.SqlParseException; diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/ElasticJoinExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/ElasticJoinExecutor.java index f7d1fbf641..e8536567dd 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/ElasticJoinExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/ElasticJoinExecutor.java @@ -22,11 +22,11 @@ import org.opensearch.client.Client; import org.opensearch.common.document.DocumentField; import org.opensearch.common.unit.TimeValue; +import org.opensearch.core.rest.RestStatus; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.mapper.MapperService; import org.opensearch.rest.BytesRestResponse; import org.opensearch.rest.RestChannel; -import org.opensearch.core.rest.RestStatus; import org.opensearch.search.SearchHit; import org.opensearch.search.SearchHits; import org.opensearch.search.sort.FieldSortBuilder; diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/ElasticUtils.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/ElasticUtils.java index aa6ea05389..7269e271f4 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/ElasticUtils.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/ElasticUtils.java @@ -17,10 +17,10 @@ import org.opensearch.action.search.SearchRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.client.Client; -import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.xcontent.ToXContent.Params; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.search.SearchHit; diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/plugin/RestSQLQueryAction.java b/legacy/src/main/java/org/opensearch/sql/legacy/plugin/RestSQLQueryAction.java index 9ee9a8a683..cd8056aed1 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/plugin/RestSQLQueryAction.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/plugin/RestSQLQueryAction.java @@ -16,11 +16,11 @@ import org.apache.logging.log4j.Logger; import org.opensearch.client.node.NodeClient; import org.opensearch.common.inject.Injector; +import org.opensearch.core.rest.RestStatus; import org.opensearch.rest.BaseRestHandler; import org.opensearch.rest.BytesRestResponse; import org.opensearch.rest.RestChannel; import org.opensearch.rest.RestRequest; -import org.opensearch.core.rest.RestStatus; import org.opensearch.sql.common.antlr.SyntaxCheckException; import org.opensearch.sql.common.response.ResponseListener; import org.opensearch.sql.common.utils.QueryContext; @@ -30,11 +30,11 @@ import org.opensearch.sql.legacy.metrics.Metrics; import org.opensearch.sql.opensearch.security.SecurityAccess; import org.opensearch.sql.protocol.response.QueryResult; +import org.opensearch.sql.protocol.response.format.CommandResponseFormatter; import org.opensearch.sql.protocol.response.format.CsvResponseFormatter; import org.opensearch.sql.protocol.response.format.Format; import org.opensearch.sql.protocol.response.format.JdbcResponseFormatter; import org.opensearch.sql.protocol.response.format.JsonResponseFormatter; -import org.opensearch.sql.protocol.response.format.CommandResponseFormatter; import org.opensearch.sql.protocol.response.format.RawResponseFormatter; import org.opensearch.sql.protocol.response.format.ResponseFormatter; import org.opensearch.sql.sql.SQLService; diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/plugin/RestSqlAction.java b/legacy/src/main/java/org/opensearch/sql/legacy/plugin/RestSqlAction.java index 9a15cc9e21..69ed469fed 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/plugin/RestSqlAction.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/plugin/RestSqlAction.java @@ -28,12 +28,12 @@ import org.opensearch.client.node.NodeClient; import org.opensearch.common.inject.Injector; import org.opensearch.common.settings.Settings; +import org.opensearch.core.rest.RestStatus; import org.opensearch.index.IndexNotFoundException; import org.opensearch.rest.BaseRestHandler; import org.opensearch.rest.BytesRestResponse; import org.opensearch.rest.RestChannel; import org.opensearch.rest.RestRequest; -import org.opensearch.core.rest.RestStatus; import org.opensearch.sql.common.antlr.SyntaxCheckException; import org.opensearch.sql.common.utils.QueryContext; import org.opensearch.sql.exception.ExpressionEvaluationException; diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/plugin/RestSqlStatsAction.java b/legacy/src/main/java/org/opensearch/sql/legacy/plugin/RestSqlStatsAction.java index d300ea7177..cf3a3e3f96 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/plugin/RestSqlStatsAction.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/plugin/RestSqlStatsAction.java @@ -17,11 +17,11 @@ import org.apache.logging.log4j.Logger; import org.opensearch.client.node.NodeClient; import org.opensearch.common.settings.Settings; +import org.opensearch.core.rest.RestStatus; import org.opensearch.rest.BaseRestHandler; import org.opensearch.rest.BytesRestResponse; import org.opensearch.rest.RestController; import org.opensearch.rest.RestRequest; -import org.opensearch.core.rest.RestStatus; import org.opensearch.sql.common.utils.QueryContext; import org.opensearch.sql.legacy.executor.format.ErrorMessageFactory; import org.opensearch.sql.legacy.metrics.Metrics; diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/join/JoinRequestBuilder.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/join/JoinRequestBuilder.java index c7b28d8a0a..195abcadec 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/join/JoinRequestBuilder.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/join/JoinRequestBuilder.java @@ -12,8 +12,8 @@ import org.opensearch.action.ActionRequestBuilder; import org.opensearch.action.ActionResponse; import org.opensearch.action.search.MultiSearchRequest; -import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.sql.legacy.query.SqlElasticRequestBuilder; diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/join/NestedLoopsElasticRequestBuilder.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/join/NestedLoopsElasticRequestBuilder.java index 899e0f5e1d..c14d8f3012 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/join/NestedLoopsElasticRequestBuilder.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/join/NestedLoopsElasticRequestBuilder.java @@ -10,8 +10,8 @@ import java.io.IOException; import org.json.JSONObject; import org.json.JSONStringer; -import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.sql.legacy.domain.Condition; diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/maker/AggMaker.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/maker/AggMaker.java index 0c9caab03d..9c3f1104a7 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/maker/AggMaker.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/maker/AggMaker.java @@ -18,14 +18,16 @@ import java.util.Map; import java.util.stream.Collectors; import org.apache.commons.lang3.StringUtils; -import org.opensearch.core.common.ParsingException; import org.opensearch.common.xcontent.LoggingDeprecationHandler; import org.opensearch.common.xcontent.json.JsonXContent; import org.opensearch.common.xcontent.json.JsonXContentParser; +import org.opensearch.core.common.ParsingException; import org.opensearch.core.common.Strings; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.geo.search.aggregations.bucket.geogrid.GeoGridAggregationBuilder; import org.opensearch.geo.search.aggregations.bucket.geogrid.GeoHashGridAggregationBuilder; +import org.opensearch.geo.search.aggregations.metrics.GeoBoundsAggregationBuilder; import org.opensearch.join.aggregations.JoinAggregationBuilders; import org.opensearch.script.Script; import org.opensearch.script.ScriptType; @@ -35,7 +37,6 @@ import org.opensearch.search.aggregations.BucketOrder; import org.opensearch.search.aggregations.InternalOrder; import org.opensearch.search.aggregations.bucket.filter.FilterAggregationBuilder; -import org.opensearch.geo.search.aggregations.bucket.geogrid.GeoGridAggregationBuilder; import org.opensearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; import org.opensearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.opensearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder; @@ -45,7 +46,6 @@ import org.opensearch.search.aggregations.bucket.range.RangeAggregationBuilder; import org.opensearch.search.aggregations.bucket.terms.IncludeExclude; import org.opensearch.search.aggregations.bucket.terms.TermsAggregationBuilder; -import org.opensearch.geo.search.aggregations.metrics.GeoBoundsAggregationBuilder; import org.opensearch.search.aggregations.metrics.PercentilesAggregationBuilder; import org.opensearch.search.aggregations.metrics.ScriptedMetricAggregationBuilder; import org.opensearch.search.aggregations.metrics.TopHitsAggregationBuilder; diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/MultiQueryRequestBuilder.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/MultiQueryRequestBuilder.java index af2a1e90cf..2a5e356073 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/MultiQueryRequestBuilder.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/MultiQueryRequestBuilder.java @@ -15,8 +15,8 @@ import org.opensearch.action.ActionRequestBuilder; import org.opensearch.action.ActionResponse; import org.opensearch.action.search.SearchRequestBuilder; -import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.sql.legacy.domain.Field; diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/Scroll.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/Scroll.java index 6dd8ff6e89..840acb1b17 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/Scroll.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/Scroll.java @@ -9,7 +9,6 @@ import java.util.Arrays; import java.util.Collection; import java.util.Objects; - import org.opensearch.action.search.ClearScrollResponse; import org.opensearch.action.search.SearchResponse; import org.opensearch.client.Client; diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerBasicTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerBasicTest.java index 1d594db2bc..6f6b09b737 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerBasicTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerBasicTest.java @@ -6,10 +6,10 @@ package org.opensearch.sql.legacy.antlr.semantic; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.aMapWithSize; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.hasEntry; -import static org.hamcrest.MatcherAssert.assertThat; import static org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchDataType.BOOLEAN; import static org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchDataType.DATE; import static org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchDataType.DOUBLE; diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/scope/EnvironmentTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/scope/EnvironmentTest.java index d9e9271728..05d8b048e2 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/scope/EnvironmentTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/scope/EnvironmentTest.java @@ -6,10 +6,10 @@ package org.opensearch.sql.legacy.antlr.semantic.scope; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.aMapWithSize; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.hasEntry; -import static org.hamcrest.MatcherAssert.assertThat; import static org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchDataType.BOOLEAN; import static org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchDataType.DATE; import static org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchDataType.KEYWORD; diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/scope/SymbolTableTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/scope/SymbolTableTest.java index 3ccc75da62..fcbc9bf7b6 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/scope/SymbolTableTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/scope/SymbolTableTest.java @@ -6,10 +6,10 @@ package org.opensearch.sql.legacy.antlr.semantic.scope; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.aMapWithSize; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.hasEntry; -import static org.hamcrest.MatcherAssert.assertThat; import static org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchDataType.BOOLEAN; import static org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchDataType.DATE; import static org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchDataType.KEYWORD; diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMappingTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMappingTest.java index fe8b25ed1c..cca69d8af9 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMappingTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMappingTest.java @@ -7,8 +7,8 @@ package org.opensearch.sql.legacy.esdomain.mapping; import static java.util.Collections.emptyMap; -import static org.hamcrest.Matchers.is; import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; import static org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsResponse.FieldMappingMetadata; import com.google.common.collect.ImmutableMap; diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/cursor/DefaultCursorTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/cursor/DefaultCursorTest.java index cfb70dc83c..d1e032ba1c 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/cursor/DefaultCursorTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/cursor/DefaultCursorTest.java @@ -6,10 +6,10 @@ package org.opensearch.sql.legacy.unittest.cursor; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.emptyOrNullString; import static org.hamcrest.Matchers.startsWith; import static org.junit.Assert.assertEquals; -import static org.hamcrest.MatcherAssert.assertThat; import java.util.ArrayList; import java.util.Collections; diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/model/ExprValueUtilsTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/model/ExprValueUtilsTest.java index 150afcacd3..2555df4f13 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/model/ExprValueUtilsTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/model/ExprValueUtilsTest.java @@ -6,8 +6,8 @@ package org.opensearch.sql.legacy.unittest.expression.model; -import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; import org.junit.Rule; import org.junit.Test; diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerTest.java index 13344eb204..456889fe53 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerTest.java @@ -38,9 +38,9 @@ import org.opensearch.action.search.SearchScrollRequestBuilder; import org.opensearch.client.Client; import org.opensearch.cluster.ClusterName; -import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.unit.TimeValue; +import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.search.SearchHit; import org.opensearch.search.SearchHits; import org.opensearch.sql.legacy.domain.JoinSelect; diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/util/MatcherUtils.java b/legacy/src/test/java/org/opensearch/sql/legacy/util/MatcherUtils.java index 0b57d460f3..95eed26670 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/util/MatcherUtils.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/util/MatcherUtils.java @@ -6,6 +6,7 @@ package org.opensearch.sql.legacy.util; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.arrayContainingInAnyOrder; @@ -17,7 +18,6 @@ import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.hasItems; import static org.junit.Assert.assertEquals; -import static org.hamcrest.MatcherAssert.assertThat; import com.google.common.base.Strings; import java.util.ArrayList; From 88f36bd4ee38a8a6c233eb0677d664a1ebe92c88 Mon Sep 17 00:00:00 2001 From: Mitchell Gale Date: Wed, 9 Aug 2023 08:59:24 -0700 Subject: [PATCH 06/42] Fixing string format change (#334) (#1943) * Fixing strings change from main OS repo. * Fixing issues. * Fixing changed syntax to toStirng. * Fixing changed syntax to toStirng. * add tosttring fix to RestSQLQueryActionCursorFallbackTest * added unused function --------- Signed-off-by: Mitchell Gale --- .../test/java/org/opensearch/sql/legacy/RestIntegTestCase.java | 3 +-- .../legacy/query/planner/physical/node/join/BlockHashJoin.java | 2 +- .../sql/legacy/query/planner/physical/node/scroll/Scroll.java | 2 +- .../org/opensearch/sql/legacy/utils/JsonPrettyFormatter.java | 3 +-- .../legacy/plugin/RestSQLQueryActionCursorFallbackTest.java | 3 +-- .../org/opensearch/sql/legacy/unittest/QueryFunctionsTest.java | 2 +- .../opensearch/sql/plugin/rest/RestQuerySettingsAction.java | 3 +-- 7 files changed, 7 insertions(+), 11 deletions(-) diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/RestIntegTestCase.java b/integ-test/src/test/java/org/opensearch/sql/legacy/RestIntegTestCase.java index 50440facb6..dd48d82114 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/RestIntegTestCase.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/RestIntegTestCase.java @@ -44,7 +44,6 @@ import org.junit.Before; import org.opensearch.client.Request; import org.opensearch.client.Response; -import org.opensearch.common.Strings; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.rest.RestStatus; import org.opensearch.core.xcontent.XContentBuilder; @@ -164,7 +163,7 @@ protected static void updateClusterSetting(String settingKey, Object value, bool .endObject() .endObject(); Request request = new Request("PUT", "_cluster/settings"); - request.setJsonEntity(Strings.toString(builder)); + request.setJsonEntity(builder.toString()); Response response = client().performRequest(request); Assert .assertEquals(RestStatus.OK, RestStatus.fromCode(response.getStatusLine().getStatusCode())); diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/BlockHashJoin.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/BlockHashJoin.java index cfb6265e52..19c0ae41d2 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/BlockHashJoin.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/BlockHashJoin.java @@ -16,8 +16,8 @@ import java.util.List; import java.util.Map; import java.util.Objects; -import org.opensearch.common.Strings; import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.common.Strings; import org.opensearch.index.query.BoolQueryBuilder; import org.opensearch.sql.legacy.query.planner.core.ExecuteParams; import org.opensearch.sql.legacy.query.planner.physical.PhysicalOperator; diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/Scroll.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/Scroll.java index 840acb1b17..2d781d7c3d 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/Scroll.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/Scroll.java @@ -12,9 +12,9 @@ import org.opensearch.action.search.ClearScrollResponse; import org.opensearch.action.search.SearchResponse; import org.opensearch.client.Client; -import org.opensearch.common.Strings; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.common.Strings; import org.opensearch.index.query.BoolQueryBuilder; import org.opensearch.index.query.QueryBuilder; import org.opensearch.search.SearchHit; diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/utils/JsonPrettyFormatter.java b/legacy/src/main/java/org/opensearch/sql/legacy/utils/JsonPrettyFormatter.java index 827f968bde..ecc86877ee 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/utils/JsonPrettyFormatter.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/utils/JsonPrettyFormatter.java @@ -8,7 +8,6 @@ import com.fasterxml.jackson.core.JsonFactory; import java.io.IOException; -import org.opensearch.common.Strings; import org.opensearch.common.xcontent.LoggingDeprecationHandler; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.json.JsonXContentParser; @@ -37,7 +36,7 @@ public static String format(String jsonString) throws IOException { ){ contentBuilder.copyCurrentStructure(contentParser); } - return Strings.toString(contentBuilder); + return contentBuilder.toString(); } private JsonPrettyFormatter() { diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/plugin/RestSQLQueryActionCursorFallbackTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/plugin/RestSQLQueryActionCursorFallbackTest.java index 2afcdc93f4..64e5d161b7 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/plugin/RestSQLQueryActionCursorFallbackTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/plugin/RestSQLQueryActionCursorFallbackTest.java @@ -20,7 +20,6 @@ import org.mockito.Mockito; import org.mockito.junit.MockitoJUnitRunner; import org.opensearch.client.node.NodeClient; -import org.opensearch.common.Strings; import org.opensearch.common.inject.Injector; import org.opensearch.common.inject.ModulesBuilder; import org.opensearch.common.util.concurrent.ThreadContext; @@ -93,7 +92,7 @@ private static SQLQueryRequest createSqlQueryRequest(String query, Optional source) { XContentBuilder builder = XContentFactory.jsonBuilder(); builder.map(source); Settings.Builder settingsBuilder = Settings.builder() - .loadFromSource(Strings.toString(builder), builder.contentType()); + .loadFromSource(builder.toString(), builder.contentType()); settingsBuilder.keys().removeIf(key -> { for (String prefix : SETTINGS_PREFIX) { if (key.startsWith(prefix)) { From aa88b4143db0a7eb0509d6cc15d82de8507ebbe0 Mon Sep 17 00:00:00 2001 From: Mitchell Gale Date: Wed, 9 Aug 2023 10:55:43 -0700 Subject: [PATCH 07/42] [Spotless] Applying Google Code Format for core/src/main files #1 (#1930) * # This is a combination of 9 commits. # This is the 1st commit message: Fix create_index/create_index_with_IOException issue caused by OpenSearch PR change (#1899) * Added setDefaultMediaType for create_index and create_index_with_IOException Signed-off-by: Mitchell Gale (cherry picked from commit 7b932a7c882cbabbc9fcafcd864a39bb161bd122) Signed-off-by: Mitchell Gale Resolving merge conflicts for pre tag in java docs. Signed-off-by: Mitchell Gale running spotless check on newly pre tagged javadocs. Signed-off-by: Mitchell Gale Converts java doc table to proper java doc table. Signed-off-by: Mitchell Gale Resolving merge conflicts for pre tag in java docs 2 Signed-off-by: Mitchell Gale running spotless check on newly pre tagged javadocs. Signed-off-by: Mitchell Gale cherry pick 60c0018373b25e674d15c430f9754bcf2d02fb2a Signed-off-by: Mitchell Gale Addressed PR comment for table format in AbstractExprValue. Signed-off-by: Mitchell Gale Removed pre tag from ExpressionReferenceOptimizer. Signed-off-by: Mitchell Gale Removed pre tag from AstDSL.java Signed-off-by: Mitchell Gale Removed pre tag from AstDSL.java Signed-off-by: Mitchell Gale Removed pre tag from SelectExpressionAnalyzer.java Signed-off-by: Mitchell Gale fixed java doc in QualifiedName.java Signed-off-by: Mitchell Gale Removing checkstyle test for core and added spotless for relevant directories. Signed-off-by: Mitchell Gale # This is the commit message #2: Fixing spacing around headers in ExpressionReferenceOptimizer.java SelectExpressionAnalyzer.java Signed-off-by: Mitchell Gale # This is the commit message #3: Fix breaking changes. Disable some flaky tests in legacy. Signed-off-by: Yury-Fridlyand (cherry picked from commit 809e6567e77e5f47a93f3475e4badb7e3053b88c) Signed-off-by: Mitchell Gale # This is the commit message #4: Resolving merge conflicts for pre tag in java docs. Signed-off-by: Mitchell Gale # This is the commit message #5: running spotless check on newly pre tagged javadocs. Signed-off-by: Mitchell Gale # This is the commit message #6: Fixed java doc spelling and improving string concatination. Signed-off-by: Mitchell Gale # This is the commit message #7: improving string concatination. Signed-off-by: Mitchell Gale # This is the commit message #8: Improving failure format on some functions. Signed-off-by: Mitchell Gale # This is the commit message #9: spotless apply and fix of build.gradle Signed-off-by: Mitchell Gale * parent 496fe8e5df5cfa3d15f8c6312bd89d54a51d6a83 author Mitchell Gale 1691080710 -0700 committer Mitchell Gale 1691080766 -0700 Fix create_index/create_index_with_IOException issue caused by OpenSearch PR change (#1899) * Added setDefaultMediaType for create_index and create_index_with_IOException Signed-off-by: Mitchell Gale (cherry picked from commit 7b932a7c882cbabbc9fcafcd864a39bb161bd122) Signed-off-by: Mitchell Gale Resolving merge conflicts for pre tag in java docs. Signed-off-by: Mitchell Gale running spotless check on newly pre tagged javadocs. Signed-off-by: Mitchell Gale Converts java doc table to proper java doc table. Signed-off-by: Mitchell Gale Resolving merge conflicts for pre tag in java docs 2 Signed-off-by: Mitchell Gale running spotless check on newly pre tagged javadocs. Signed-off-by: Mitchell Gale cherry pick 60c0018373b25e674d15c430f9754bcf2d02fb2a Signed-off-by: Mitchell Gale Addressed PR comment for table format in AbstractExprValue. Signed-off-by: Mitchell Gale Removed pre tag from ExpressionReferenceOptimizer. Signed-off-by: Mitchell Gale Removed pre tag from AstDSL.java Signed-off-by: Mitchell Gale Removed pre tag from AstDSL.java Signed-off-by: Mitchell Gale Removed pre tag from SelectExpressionAnalyzer.java Signed-off-by: Mitchell Gale fixed java doc in QualifiedName.java Signed-off-by: Mitchell Gale Removing checkstyle test for core and added spotless for relevant directories. Signed-off-by: Mitchell Gale Fixing spacing around headers in ExpressionReferenceOptimizer.java SelectExpressionAnalyzer.java Signed-off-by: Mitchell Gale Fix breaking changes. Disable some flaky tests in legacy. Signed-off-by: Yury-Fridlyand (cherry picked from commit 809e6567e77e5f47a93f3475e4badb7e3053b88c) Signed-off-by: Mitchell Gale Resolving merge conflicts for pre tag in java docs. Signed-off-by: Mitchell Gale running spotless check on newly pre tagged javadocs. Signed-off-by: Mitchell Gale Fixed java doc spelling and improving string concatination. Signed-off-by: Mitchell Gale improving string concatination. Signed-off-by: Mitchell Gale Improving failure format on some functions. Signed-off-by: Mitchell Gale spotless apply and fix of build.gradle Signed-off-by: Mitchell Gale Resolving merge conflicts for pre tag in java docs. Signed-off-by: Mitchell Gale running spotless check on newly pre tagged javadocs. Signed-off-by: Mitchell Gale Converts java doc table to proper java doc table. Signed-off-by: Mitchell Gale Removing unused import in OpenSearchRestClientTest.java Signed-off-by: Mitchell Gale * running spotless apply. Signed-off-by: Mitchell Gale * Addressed comments in PR 5. Signed-off-by: Mitchell Gale --------- Signed-off-by: Mitchell Gale --- build.gradle | 7 +- core/build.gradle | 3 + .../sql/analysis/AnalysisContext.java | 21 +- .../org/opensearch/sql/analysis/Analyzer.java | 241 +++++++++-------- ...ataSourceSchemaIdentifierNameResolver.java | 20 +- .../sql/analysis/ExpressionAnalyzer.java | 142 +++++----- .../ExpressionReferenceOptimizer.java | 83 +++--- .../sql/analysis/HighlightAnalyzer.java | 5 +- .../sql/analysis/NamedExpressionAnalyzer.java | 15 +- .../sql/analysis/NestedAnalyzer.java | 68 ++--- .../sql/analysis/QualifierAnalyzer.java | 22 +- .../analysis/SelectExpressionAnalyzer.java | 97 ++++--- .../sql/analysis/TypeEnvironment.java | 36 +-- .../analysis/WindowExpressionAnalyzer.java | 57 ++-- .../sql/analysis/symbol/Namespace.java | 7 +- .../sql/analysis/symbol/Symbol.java | 5 +- .../sql/analysis/symbol/SymbolTable.java | 78 +++--- .../sql/ast/AbstractNodeVisitor.java | 6 +- .../java/org/opensearch/sql/ast/Node.java | 5 +- .../org/opensearch/sql/ast/dsl/AstDSL.java | 10 +- .../sql/ast/expression/AggregateFunction.java | 9 +- .../opensearch/sql/ast/expression/Alias.java | 21 +- .../sql/ast/expression/AllFields.java | 8 +- .../opensearch/sql/ast/expression/And.java | 5 +- .../sql/ast/expression/Argument.java | 5 +- .../sql/ast/expression/AttributeList.java | 8 +- .../sql/ast/expression/Between.java | 4 +- .../opensearch/sql/ast/expression/Case.java | 18 +- .../opensearch/sql/ast/expression/Cast.java | 19 +- .../sql/ast/expression/Compare.java | 1 - .../sql/ast/expression/DataType.java | 9 +- .../sql/ast/expression/EqualTo.java | 11 +- .../opensearch/sql/ast/expression/Field.java | 9 +- .../sql/ast/expression/Function.java | 13 +- .../sql/ast/expression/HighlightFunction.java | 4 +- .../org/opensearch/sql/ast/expression/In.java | 8 +- .../sql/ast/expression/Interval.java | 1 - .../sql/ast/expression/IntervalUnit.java | 7 +- .../opensearch/sql/ast/expression/Let.java | 5 +- .../sql/ast/expression/Literal.java | 6 +- .../opensearch/sql/ast/expression/Map.java | 5 +- .../ast/expression/NestedAllTupleFields.java | 8 +- .../opensearch/sql/ast/expression/Not.java | 5 +- .../org/opensearch/sql/ast/expression/Or.java | 5 +- .../sql/ast/expression/ParseMethod.java | 4 +- .../sql/ast/expression/QualifiedName.java | 36 ++- .../ast/expression/RelevanceFieldList.java | 12 +- .../sql/ast/expression/ScoreFunction.java | 4 +- .../opensearch/sql/ast/expression/Span.java | 6 +- .../sql/ast/expression/SpanUnit.java | 5 +- .../ast/expression/UnresolvedArgument.java | 5 +- .../ast/expression/UnresolvedAttribute.java | 6 +- .../ast/expression/UnresolvedExpression.java | 1 - .../opensearch/sql/ast/expression/When.java | 14 +- .../sql/ast/expression/WindowFunction.java | 2 - .../opensearch/sql/ast/expression/Xor.java | 5 +- .../opensearch/sql/ast/statement/Explain.java | 4 +- .../opensearch/sql/ast/statement/Query.java | 4 +- .../sql/ast/statement/Statement.java | 4 +- .../java/org/opensearch/sql/ast/tree/AD.java | 1 - .../opensearch/sql/ast/tree/Aggregation.java | 31 +-- .../opensearch/sql/ast/tree/CloseCursor.java | 9 +- .../org/opensearch/sql/ast/tree/Dedupe.java | 5 +- .../org/opensearch/sql/ast/tree/Eval.java | 5 +- .../opensearch/sql/ast/tree/FetchCursor.java | 8 +- .../org/opensearch/sql/ast/tree/Filter.java | 5 +- .../org/opensearch/sql/ast/tree/Head.java | 5 +- .../org/opensearch/sql/ast/tree/Kmeans.java | 1 - .../org/opensearch/sql/ast/tree/Limit.java | 2 - .../java/org/opensearch/sql/ast/tree/ML.java | 9 +- .../org/opensearch/sql/ast/tree/Paginate.java | 8 +- .../org/opensearch/sql/ast/tree/Parse.java | 25 +- .../org/opensearch/sql/ast/tree/Project.java | 12 +- .../org/opensearch/sql/ast/tree/RareTopN.java | 5 +- .../org/opensearch/sql/ast/tree/Relation.java | 22 +- .../sql/ast/tree/RelationSubquery.java | 9 +- .../org/opensearch/sql/ast/tree/Rename.java | 1 - .../org/opensearch/sql/ast/tree/Sort.java | 18 +- .../sql/ast/tree/TableFunction.java | 8 +- .../sql/ast/tree/UnresolvedPlan.java | 5 +- .../org/opensearch/sql/ast/tree/Values.java | 6 +- .../data/model/AbstractExprNumberValue.java | 5 +- .../sql/data/model/AbstractExprValue.java | 51 ++-- .../sql/data/model/ExprBooleanValue.java | 5 +- .../sql/data/model/ExprByteValue.java | 5 +- .../sql/data/model/ExprCollectionValue.java | 13 +- .../sql/data/model/ExprDateValue.java | 13 +- .../sql/data/model/ExprDatetimeValue.java | 20 +- .../sql/data/model/ExprDoubleValue.java | 5 +- .../sql/data/model/ExprFloatValue.java | 5 +- .../sql/data/model/ExprIntegerValue.java | 5 +- .../sql/data/model/ExprIntervalValue.java | 11 +- .../sql/data/model/ExprLongValue.java | 5 +- .../sql/data/model/ExprMissingValue.java | 16 +- .../sql/data/model/ExprNullValue.java | 12 +- .../sql/data/model/ExprShortValue.java | 5 +- .../sql/data/model/ExprStringValue.java | 13 +- .../sql/data/model/ExprTimeValue.java | 17 +- .../sql/data/model/ExprTimestampValue.java | 29 +- .../sql/data/model/ExprTupleValue.java | 13 +- .../opensearch/sql/data/model/ExprValue.java | 81 ++---- .../sql/data/model/ExprValueUtils.java | 32 +-- .../sql/data/type/ExprCoreType.java | 56 ++-- .../opensearch/sql/data/type/ExprType.java | 28 +- .../sql/data/type/WideningTypeRule.java | 27 +- .../sql/data/utils/ExprValueOrdering.java | 1 - .../data/utils/NaturalExprValueOrdering.java | 1 - .../utils/NullsFirstExprValueOrdering.java | 1 - .../utils/NullsLastExprValueOrdering.java | 1 - .../data/utils/ReverseExprValueOrdering.java | 1 - .../sql/datasource/DataSourceService.java | 23 +- .../sql/datasource/model/DataSource.java | 8 +- .../datasource/model/DataSourceMetadata.java | 17 +- .../sql/ast/expression/CastTest.java | 2 - .../sql/ast/expression/QualifiedNameTest.java | 2 - .../sql/data/model/DateTimeValueTest.java | 92 ++++--- .../sql/data/model/ExprBooleanValueTest.java | 9 +- .../data/model/ExprCollectionValueTest.java | 6 +- .../sql/data/model/ExprIntervalValueTest.java | 9 +- .../sql/data/model/ExprMissingValueTest.java | 7 +- .../sql/data/model/ExprNullValueTest.java | 6 +- .../sql/data/model/ExprNumberValueTest.java | 6 +- .../sql/data/model/ExprStringValueTest.java | 8 +- .../sql/data/model/ExprTupleValueTest.java | 5 +- .../sql/data/model/ExprValueCompareTest.java | 248 +++++++++--------- .../sql/data/model/ExprValueUtilsTest.java | 218 +++++++-------- .../sql/data/type/ExprTypeTest.java | 1 - .../sql/data/utils/ExprValueOrderingTest.java | 7 +- .../NullsFirstExprValueOrderingTest.java | 1 - .../utils/NullsLastExprValueOrderingTest.java | 1 - .../utils/ReverseExprValueOrderingTest.java | 1 - .../expression/datetime/TimestampTest.java | 2 +- 132 files changed, 1104 insertions(+), 1486 deletions(-) diff --git a/build.gradle b/build.gradle index 6d30e54354..2f1ce25212 100644 --- a/build.gradle +++ b/build.gradle @@ -84,7 +84,10 @@ repositories { spotless { java { target fileTree('.') { - include '**/*.java' + include 'core/src/main/java/org/opensearch/sql/analysis/**/*.java', + 'core/src/test/java/org/opensearch/sql/data/**/*.java', + 'core/src/test/java/org/opensearch/sql/datasource/**/*.java', + 'core/src/test/java/org/opensearch/sql/ast/**/*.java' exclude '**/build/**', '**/build-*/**' } importOrder() @@ -95,7 +98,7 @@ spotless { removeUnusedImports() trimTrailingWhitespace() endWithNewline() -// googleJavaFormat('1.17.0').reflowLongStrings().groupArtifact('com.google.googlejavaformat:google-java-format') + googleJavaFormat('1.17.0').reflowLongStrings().groupArtifact('com.google.googlejavaformat:google-java-format') } } diff --git a/core/build.gradle b/core/build.gradle index 8205638138..89fac623f2 100644 --- a/core/build.gradle +++ b/core/build.gradle @@ -34,6 +34,9 @@ repositories { mavenCentral() } +checkstyleMain.ignoreFailures = true +checkstyleTest.ignoreFailures = true + pitest { targetClasses = ['org.opensearch.sql.*'] pitestVersion = '1.9.0' diff --git a/core/src/main/java/org/opensearch/sql/analysis/AnalysisContext.java b/core/src/main/java/org/opensearch/sql/analysis/AnalysisContext.java index 4704d0566b..f1f29e9b38 100644 --- a/core/src/main/java/org/opensearch/sql/analysis/AnalysisContext.java +++ b/core/src/main/java/org/opensearch/sql/analysis/AnalysisContext.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.analysis; import java.util.ArrayList; @@ -13,19 +12,14 @@ import org.opensearch.sql.expression.NamedExpression; import org.opensearch.sql.expression.function.FunctionProperties; -/** - * The context used for Analyzer. - */ +/** The context used for Analyzer. */ public class AnalysisContext { - /** - * Environment stack for symbol scope management. - */ + /** Environment stack for symbol scope management. */ private TypeEnvironment environment; - @Getter - private final List namedParseExpressions; - @Getter - private final FunctionProperties functionProperties; + @Getter private final List namedParseExpressions; + + @Getter private final FunctionProperties functionProperties; public AnalysisContext() { this(new TypeEnvironment(null)); @@ -33,6 +27,7 @@ public AnalysisContext() { /** * Class CTOR. + * * @param environment Env to set to a new instance. */ public AnalysisContext(TypeEnvironment environment) { @@ -41,9 +36,7 @@ public AnalysisContext(TypeEnvironment environment) { this.functionProperties = new FunctionProperties(); } - /** - * Push a new environment. - */ + /** Push a new environment. */ public void push() { environment = new TypeEnvironment(environment); } diff --git a/core/src/main/java/org/opensearch/sql/analysis/Analyzer.java b/core/src/main/java/org/opensearch/sql/analysis/Analyzer.java index 370dd1a3f1..ad3713ec9a 100644 --- a/core/src/main/java/org/opensearch/sql/analysis/Analyzer.java +++ b/core/src/main/java/org/opensearch/sql/analysis/Analyzer.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.analysis; import static org.opensearch.sql.analysis.DataSourceSchemaIdentifierNameResolver.DEFAULT_DATASOURCE_NAME; @@ -121,9 +120,7 @@ public class Analyzer extends AbstractNodeVisitor private final BuiltinFunctionRepository repository; - /** - * Constructor. - */ + /** Constructor. */ public Analyzer( ExpressionAnalyzer expressionAnalyzer, DataSourceService dataSourceService, @@ -142,8 +139,8 @@ public LogicalPlan analyze(UnresolvedPlan unresolved, AnalysisContext context) { @Override public LogicalPlan visitRelation(Relation node, AnalysisContext context) { QualifiedName qualifiedName = node.getTableQualifiedName(); - DataSourceSchemaIdentifierNameResolver dataSourceSchemaIdentifierNameResolver - = new DataSourceSchemaIdentifierNameResolver(dataSourceService, qualifiedName.getParts()); + DataSourceSchemaIdentifierNameResolver dataSourceSchemaIdentifierNameResolver = + new DataSourceSchemaIdentifierNameResolver(dataSourceService, qualifiedName.getParts()); String tableName = dataSourceSchemaIdentifierNameResolver.getIdentifierName(); context.push(); TypeEnvironment curEnv = context.peek(); @@ -151,28 +148,30 @@ public LogicalPlan visitRelation(Relation node, AnalysisContext context) { if (DATASOURCES_TABLE_NAME.equals(tableName)) { table = new DataSourceTable(dataSourceService); } else { - table = dataSourceService - .getDataSource(dataSourceSchemaIdentifierNameResolver.getDataSourceName()) - .getStorageEngine() - .getTable(new DataSourceSchemaName( - dataSourceSchemaIdentifierNameResolver.getDataSourceName(), - dataSourceSchemaIdentifierNameResolver.getSchemaName()), - dataSourceSchemaIdentifierNameResolver.getIdentifierName()); + table = + dataSourceService + .getDataSource(dataSourceSchemaIdentifierNameResolver.getDataSourceName()) + .getStorageEngine() + .getTable( + new DataSourceSchemaName( + dataSourceSchemaIdentifierNameResolver.getDataSourceName(), + dataSourceSchemaIdentifierNameResolver.getSchemaName()), + dataSourceSchemaIdentifierNameResolver.getIdentifierName()); } table.getFieldTypes().forEach((k, v) -> curEnv.define(new Symbol(Namespace.FIELD_NAME, k), v)); - table.getReservedFieldTypes().forEach( - (k, v) -> curEnv.addReservedWord(new Symbol(Namespace.FIELD_NAME, k), v) - ); + table + .getReservedFieldTypes() + .forEach((k, v) -> curEnv.addReservedWord(new Symbol(Namespace.FIELD_NAME, k), v)); // Put index name or its alias in index namespace on type environment so qualifier // can be removed when analyzing qualified name. The value (expr type) here doesn't matter. - curEnv.define(new Symbol(Namespace.INDEX_NAME, - (node.getAlias() == null) ? tableName : node.getAlias()), STRUCT); + curEnv.define( + new Symbol(Namespace.INDEX_NAME, (node.getAlias() == null) ? tableName : node.getAlias()), + STRUCT); return new LogicalRelation(tableName, table); } - @Override public LogicalPlan visitRelationSubquery(RelationSubquery node, AnalysisContext context) { LogicalPlan subquery = analyze(node.getChild().get(0), context); @@ -188,30 +187,41 @@ public LogicalPlan visitRelationSubquery(RelationSubquery node, AnalysisContext @Override public LogicalPlan visitTableFunction(TableFunction node, AnalysisContext context) { QualifiedName qualifiedName = node.getFunctionName(); - DataSourceSchemaIdentifierNameResolver dataSourceSchemaIdentifierNameResolver - = new DataSourceSchemaIdentifierNameResolver(this.dataSourceService, - qualifiedName.getParts()); - - FunctionName functionName - = FunctionName.of(dataSourceSchemaIdentifierNameResolver.getIdentifierName()); - List arguments = node.getArguments().stream() - .map(unresolvedExpression -> this.expressionAnalyzer.analyze(unresolvedExpression, context)) - .collect(Collectors.toList()); - TableFunctionImplementation tableFunctionImplementation - = (TableFunctionImplementation) repository.compile(context.getFunctionProperties(), - dataSourceService - .getDataSource(dataSourceSchemaIdentifierNameResolver.getDataSourceName()) - .getStorageEngine().getFunctions(), functionName, arguments); + DataSourceSchemaIdentifierNameResolver dataSourceSchemaIdentifierNameResolver = + new DataSourceSchemaIdentifierNameResolver( + this.dataSourceService, qualifiedName.getParts()); + + FunctionName functionName = + FunctionName.of(dataSourceSchemaIdentifierNameResolver.getIdentifierName()); + List arguments = + node.getArguments().stream() + .map( + unresolvedExpression -> + this.expressionAnalyzer.analyze(unresolvedExpression, context)) + .collect(Collectors.toList()); + TableFunctionImplementation tableFunctionImplementation = + (TableFunctionImplementation) + repository.compile( + context.getFunctionProperties(), + dataSourceService + .getDataSource(dataSourceSchemaIdentifierNameResolver.getDataSourceName()) + .getStorageEngine() + .getFunctions(), + functionName, + arguments); context.push(); TypeEnvironment curEnv = context.peek(); Table table = tableFunctionImplementation.applyArguments(); table.getFieldTypes().forEach((k, v) -> curEnv.define(new Symbol(Namespace.FIELD_NAME, k), v)); - table.getReservedFieldTypes().forEach( - (k, v) -> curEnv.addReservedWord(new Symbol(Namespace.FIELD_NAME, k), v) - ); - curEnv.define(new Symbol(Namespace.INDEX_NAME, - dataSourceSchemaIdentifierNameResolver.getIdentifierName()), STRUCT); - return new LogicalRelation(dataSourceSchemaIdentifierNameResolver.getIdentifierName(), + table + .getReservedFieldTypes() + .forEach((k, v) -> curEnv.addReservedWord(new Symbol(Namespace.FIELD_NAME, k), v)); + curEnv.define( + new Symbol( + Namespace.INDEX_NAME, dataSourceSchemaIdentifierNameResolver.getIdentifierName()), + STRUCT); + return new LogicalRelation( + dataSourceSchemaIdentifierNameResolver.getIdentifierName(), tableFunctionImplementation.applyArguments()); } @@ -233,30 +243,28 @@ public LogicalPlan visitFilter(Filter node, AnalysisContext context) { } /** - * Ensure NESTED function is not used in GROUP BY, and HAVING clauses. - * Fallback to legacy engine. Can remove when support is added for NESTED function in WHERE, - * GROUP BY, ORDER BY, and HAVING clauses. + * Ensure NESTED function is not used in GROUP BY, and HAVING clauses. Fallback to legacy engine. + * Can remove when support is added for NESTED function in WHERE, GROUP BY, ORDER BY, and HAVING + * clauses. + * * @param condition : Filter condition */ private void verifySupportsCondition(Expression condition) { if (condition instanceof FunctionExpression) { - if (((FunctionExpression) condition).getFunctionName().getFunctionName().equalsIgnoreCase( - BuiltinFunctionName.NESTED.name() - )) { + if (((FunctionExpression) condition) + .getFunctionName() + .getFunctionName() + .equalsIgnoreCase(BuiltinFunctionName.NESTED.name())) { throw new SyntaxCheckException( "Falling back to legacy engine. Nested function is not supported in WHERE," - + " GROUP BY, and HAVING clauses." - ); + + " GROUP BY, and HAVING clauses."); } - ((FunctionExpression)condition).getArguments().stream() - .forEach(e -> verifySupportsCondition(e) - ); + ((FunctionExpression) condition) + .getArguments().stream().forEach(e -> verifySupportsCondition(e)); } } - /** - * Build {@link LogicalRename}. - */ + /** Build {@link LogicalRename}. */ @Override public LogicalPlan visitRename(Rename node, AnalysisContext context) { LogicalPlan child = node.getChild().get(0).accept(this, context); @@ -267,8 +275,8 @@ public LogicalPlan visitRename(Rename node, AnalysisContext context) { // We should define the new target field in the context instead of analyze it. if (renameMap.getTarget() instanceof Field) { ReferenceExpression target = - new ReferenceExpression(((Field) renameMap.getTarget()).getField().toString(), - origin.type()); + new ReferenceExpression( + ((Field) renameMap.getTarget()).getField().toString(), origin.type()); ReferenceExpression originExpr = DSL.ref(origin.toString(), origin.type()); TypeEnvironment curEnv = context.peek(); curEnv.remove(originExpr); @@ -283,17 +291,15 @@ public LogicalPlan visitRename(Rename node, AnalysisContext context) { return new LogicalRename(child, renameMapBuilder.build()); } - /** - * Build {@link LogicalAggregation}. - */ + /** Build {@link LogicalAggregation}. */ @Override public LogicalPlan visitAggregation(Aggregation node, AnalysisContext context) { final LogicalPlan child = node.getChild().get(0).accept(this, context); ImmutableList.Builder aggregatorBuilder = new ImmutableList.Builder<>(); for (UnresolvedExpression expr : node.getAggExprList()) { NamedExpression aggExpr = namedExpressionAnalyzer.analyze(expr, context); - aggregatorBuilder - .add(new NamedAggregator(aggExpr.getNameOrAlias(), (Aggregator) aggExpr.getDelegated())); + aggregatorBuilder.add( + new NamedAggregator(aggExpr.getNameOrAlias(), (Aggregator) aggExpr.getDelegated())); } ImmutableList.Builder groupbyBuilder = new ImmutableList.Builder<>(); @@ -313,16 +319,17 @@ public LogicalPlan visitAggregation(Aggregation node, AnalysisContext context) { // new context context.push(); TypeEnvironment newEnv = context.peek(); - aggregators.forEach(aggregator -> newEnv.define(new Symbol(Namespace.FIELD_NAME, - aggregator.getName()), aggregator.type())); - groupBys.forEach(group -> newEnv.define(new Symbol(Namespace.FIELD_NAME, - group.getNameOrAlias()), group.type())); + aggregators.forEach( + aggregator -> + newEnv.define( + new Symbol(Namespace.FIELD_NAME, aggregator.getName()), aggregator.type())); + groupBys.forEach( + group -> + newEnv.define(new Symbol(Namespace.FIELD_NAME, group.getNameOrAlias()), group.type())); return new LogicalAggregation(child, aggregators, groupBys); } - /** - * Build {@link LogicalRareTopN}. - */ + /** Build {@link LogicalRareTopN}. */ @Override public LogicalPlan visitRareTopN(RareTopN node, AnalysisContext context) { final LogicalPlan child = node.getChild().get(0).accept(this, context); @@ -342,10 +349,10 @@ public LogicalPlan visitRareTopN(RareTopN node, AnalysisContext context) { // new context context.push(); TypeEnvironment newEnv = context.peek(); - groupBys.forEach(group -> newEnv.define(new Symbol(Namespace.FIELD_NAME, - group.toString()), group.type())); - fields.forEach(field -> newEnv.define(new Symbol(Namespace.FIELD_NAME, - field.toString()), field.type())); + groupBys.forEach( + group -> newEnv.define(new Symbol(Namespace.FIELD_NAME, group.toString()), group.type())); + fields.forEach( + field -> newEnv.define(new Symbol(Namespace.FIELD_NAME, field.toString()), field.type())); List options = node.getNoOfResults(); Integer noOfResults = (Integer) options.get(0).getValue().getValue(); @@ -396,28 +403,28 @@ public LogicalPlan visitProject(Project node, AnalysisContext context) { } List namedExpressions = - selectExpressionAnalyzer.analyze(node.getProjectList(), context, + selectExpressionAnalyzer.analyze( + node.getProjectList(), + context, new ExpressionReferenceOptimizer(expressionAnalyzer.getRepository(), child)); for (UnresolvedExpression expr : node.getProjectList()) { - NestedAnalyzer nestedAnalyzer = new NestedAnalyzer( - namedExpressions, expressionAnalyzer, child - ); + NestedAnalyzer nestedAnalyzer = + new NestedAnalyzer(namedExpressions, expressionAnalyzer, child); child = nestedAnalyzer.analyze(expr, context); } // new context context.push(); TypeEnvironment newEnv = context.peek(); - namedExpressions.forEach(expr -> newEnv.define(new Symbol(Namespace.FIELD_NAME, - expr.getNameOrAlias()), expr.type())); + namedExpressions.forEach( + expr -> + newEnv.define(new Symbol(Namespace.FIELD_NAME, expr.getNameOrAlias()), expr.type())); List namedParseExpressions = context.getNamedParseExpressions(); return new LogicalProject(child, namedExpressions, namedParseExpressions); } - /** - * Build {@link LogicalEval}. - */ + /** Build {@link LogicalEval}. */ @Override public LogicalPlan visitEval(Eval node, AnalysisContext context) { LogicalPlan child = node.getChild().get(0).accept(this, context); @@ -434,9 +441,7 @@ public LogicalPlan visitEval(Eval node, AnalysisContext context) { return new LogicalEval(child, expressionsBuilder.build()); } - /** - * Build {@link ParseExpression} to context and skip to child nodes. - */ + /** Build {@link ParseExpression} to context and skip to child nodes. */ @Override public LogicalPlan visitParse(Parse node, AnalysisContext context) { LogicalPlan child = node.getChild().get(0).accept(this, context); @@ -447,18 +452,19 @@ public LogicalPlan visitParse(Parse node, AnalysisContext context) { Expression patternExpression = DSL.literal(pattern); TypeEnvironment curEnv = context.peek(); - ParseUtils.getNamedGroupCandidates(parseMethod, pattern, arguments).forEach(group -> { - ParseExpression expr = ParseUtils.createParseExpression(parseMethod, sourceField, - patternExpression, DSL.literal(group)); - curEnv.define(new Symbol(Namespace.FIELD_NAME, group), expr.type()); - context.getNamedParseExpressions().add(new NamedExpression(group, expr)); - }); + ParseUtils.getNamedGroupCandidates(parseMethod, pattern, arguments) + .forEach( + group -> { + ParseExpression expr = + ParseUtils.createParseExpression( + parseMethod, sourceField, patternExpression, DSL.literal(group)); + curEnv.define(new Symbol(Namespace.FIELD_NAME, group), expr.type()); + context.getNamedParseExpressions().add(new NamedExpression(group, expr)); + }); return child; } - /** - * Build {@link LogicalSort}. - */ + /** Build {@link LogicalSort}. */ @Override public LogicalPlan visitSort(Sort node, AnalysisContext context) { LogicalPlan child = node.getChild().get(0).accept(this, context); @@ -472,8 +478,7 @@ public LogicalPlan visitSort(Sort node, AnalysisContext context) { var analyzed = expressionAnalyzer.analyze(sortField.getField(), context); if (analyzed == null) { throw new UnsupportedOperationException( - String.format("Invalid use of expression %s", sortField.getField()) - ); + String.format("Invalid use of expression %s", sortField.getField())); } Expression expression = optimizer.optimize(analyzed, context); return ImmutablePair.of(analyzeSortOption(sortField.getFieldArgs()), expression); @@ -482,9 +487,7 @@ public LogicalPlan visitSort(Sort node, AnalysisContext context) { return new LogicalSort(child, sortList); } - /** - * Build {@link LogicalDedupe}. - */ + /** Build {@link LogicalDedupe}. */ @Override public LogicalPlan visitDedupe(Dedupe node, AnalysisContext context) { LogicalPlan child = node.getChild().get(0).accept(this, context); @@ -504,9 +507,7 @@ public LogicalPlan visitDedupe(Dedupe node, AnalysisContext context) { consecutive); } - /** - * Logical head is identical to {@link LogicalLimit}. - */ + /** Logical head is identical to {@link LogicalLimit}. */ public LogicalPlan visitHead(Head node, AnalysisContext context) { LogicalPlan child = node.getChild().get(0).accept(this, context); return new LogicalLimit(child, node.getSize(), node.getFrom()); @@ -517,16 +518,15 @@ public LogicalPlan visitValues(Values node, AnalysisContext context) { List> values = node.getValues(); List> valueExprs = new ArrayList<>(); for (List value : values) { - valueExprs.add(value.stream() - .map(val -> (LiteralExpression) expressionAnalyzer.analyze(val, context)) - .collect(Collectors.toList())); + valueExprs.add( + value.stream() + .map(val -> (LiteralExpression) expressionAnalyzer.analyze(val, context)) + .collect(Collectors.toList())); } return new LogicalValues(valueExprs); } - /** - * Build {@link LogicalMLCommons} for Kmeans command. - */ + /** Build {@link LogicalMLCommons} for Kmeans command. */ @Override public LogicalPlan visitKmeans(Kmeans node, AnalysisContext context) { LogicalPlan child = node.getChild().get(0).accept(this, context); @@ -538,9 +538,7 @@ public LogicalPlan visitKmeans(Kmeans node, AnalysisContext context) { return new LogicalMLCommons(child, "kmeans", options); } - /** - * Build {@link LogicalAD} for AD command. - */ + /** Build {@link LogicalAD} for AD command. */ @Override public LogicalPlan visitAD(AD node, AnalysisContext context) { LogicalPlan child = node.getChild().get(0).accept(this, context); @@ -553,21 +551,21 @@ public LogicalPlan visitAD(AD node, AnalysisContext context) { currentEnv.define(new Symbol(Namespace.FIELD_NAME, RCF_ANOMALOUS), ExprCoreType.BOOLEAN); } else { currentEnv.define(new Symbol(Namespace.FIELD_NAME, RCF_ANOMALY_GRADE), ExprCoreType.DOUBLE); - currentEnv.define(new Symbol(Namespace.FIELD_NAME, - (String) node.getArguments().get(TIME_FIELD).getValue()), ExprCoreType.TIMESTAMP); + currentEnv.define( + new Symbol(Namespace.FIELD_NAME, (String) node.getArguments().get(TIME_FIELD).getValue()), + ExprCoreType.TIMESTAMP); } return new LogicalAD(child, options); } - /** - * Build {@link LogicalML} for ml command. - */ + /** Build {@link LogicalML} for ml command. */ @Override public LogicalPlan visitML(ML node, AnalysisContext context) { LogicalPlan child = node.getChild().get(0).accept(this, context); TypeEnvironment currentEnv = context.peek(); node.getOutputSchema(currentEnv).entrySet().stream() - .forEach(v -> currentEnv.define(new Symbol(Namespace.FIELD_NAME, v.getKey()), v.getValue())); + .forEach( + v -> currentEnv.define(new Symbol(Namespace.FIELD_NAME, v.getKey()), v.getValue())); return new LogicalML(child, node.getArguments()); } @@ -580,8 +578,9 @@ public LogicalPlan visitPaginate(Paginate paginate, AnalysisContext context) { @Override public LogicalPlan visitFetchCursor(FetchCursor cursor, AnalysisContext context) { - return new LogicalFetchCursor(cursor.getCursor(), - dataSourceService.getDataSource(DEFAULT_DATASOURCE_NAME).getStorageEngine()); + return new LogicalFetchCursor( + cursor.getCursor(), + dataSourceService.getDataSource(DEFAULT_DATASOURCE_NAME).getStorageEngine()); } @Override @@ -590,13 +589,13 @@ public LogicalPlan visitCloseCursor(CloseCursor closeCursor, AnalysisContext con } /** - * The first argument is always "asc", others are optional. - * Given nullFirst argument, use its value. Otherwise just use DEFAULT_ASC/DESC. + * The first argument is always "asc", others are optional. Given nullFirst argument, use its + * value. Otherwise just use DEFAULT_ASC/DESC. */ private SortOption analyzeSortOption(List fieldArgs) { Boolean asc = (Boolean) fieldArgs.get(0).getValue().getValue(); - Optional nullFirst = fieldArgs.stream() - .filter(option -> "nullFirst".equals(option.getArgName())).findFirst(); + Optional nullFirst = + fieldArgs.stream().filter(option -> "nullFirst".equals(option.getArgName())).findFirst(); if (nullFirst.isPresent()) { Boolean isNullFirst = (Boolean) nullFirst.get().getValue().getValue(); diff --git a/core/src/main/java/org/opensearch/sql/analysis/DataSourceSchemaIdentifierNameResolver.java b/core/src/main/java/org/opensearch/sql/analysis/DataSourceSchemaIdentifierNameResolver.java index a90fc3d2c1..01145dc7df 100644 --- a/core/src/main/java/org/opensearch/sql/analysis/DataSourceSchemaIdentifierNameResolver.java +++ b/core/src/main/java/org/opensearch/sql/analysis/DataSourceSchemaIdentifierNameResolver.java @@ -24,19 +24,17 @@ public class DataSourceSchemaIdentifierNameResolver { private static final String DOT = "."; /** - * Data model for capturing dataSourceName, schema and identifier from - * fully qualifiedName. In the current state, it is used to capture - * DataSourceSchemaTable name and DataSourceSchemaFunction in case of table - * functions. + * Data model for capturing dataSourceName, schema and identifier from fully qualifiedName. In the + * current state, it is used to capture DataSourceSchemaTable name and DataSourceSchemaFunction in + * case of table functions. * * @param dataSourceService {@link DataSourceService}. - * @param parts parts of qualifiedName. + * @param parts parts of qualifiedName. */ - public DataSourceSchemaIdentifierNameResolver(DataSourceService dataSourceService, - List parts) { + public DataSourceSchemaIdentifierNameResolver( + DataSourceService dataSourceService, List parts) { this.dataSourceService = dataSourceService; - List remainingParts - = captureSchemaName(captureDataSourceName(parts)); + List remainingParts = captureSchemaName(captureDataSourceName(parts)); identifierName = String.join(DOT, remainingParts); } @@ -52,7 +50,6 @@ public String getSchemaName() { return schemaName; } - // Capture datasource name and return remaining parts(schema name and table name) // from the fully qualified name. private List captureDataSourceName(List parts) { @@ -69,12 +66,11 @@ private List captureDataSourceName(List parts) { private List captureSchemaName(List parts) { if (parts.size() > 1 && (DEFAULT_SCHEMA_NAME.equals(parts.get(0)) - || INFORMATION_SCHEMA_NAME.contains(parts.get(0)))) { + || INFORMATION_SCHEMA_NAME.contains(parts.get(0)))) { schemaName = parts.get(0); return parts.subList(1, parts.size()); } else { return parts; } } - } diff --git a/core/src/main/java/org/opensearch/sql/analysis/ExpressionAnalyzer.java b/core/src/main/java/org/opensearch/sql/analysis/ExpressionAnalyzer.java index 60e5b40a82..8e586f68ff 100644 --- a/core/src/main/java/org/opensearch/sql/analysis/ExpressionAnalyzer.java +++ b/core/src/main/java/org/opensearch/sql/analysis/ExpressionAnalyzer.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.analysis; import static org.opensearch.sql.ast.dsl.AstDSL.and; @@ -77,19 +76,19 @@ * Expression}. */ public class ExpressionAnalyzer extends AbstractNodeVisitor { - @Getter - private final BuiltinFunctionRepository repository; + @Getter private final BuiltinFunctionRepository repository; @Override public Expression visitCast(Cast node, AnalysisContext context) { final Expression expression = node.getExpression().accept(this, context); - return (Expression) repository - .compile(context.getFunctionProperties(), node.convertFunctionName(), + return (Expression) + repository.compile( + context.getFunctionProperties(), + node.convertFunctionName(), Collections.singletonList(expression)); } - public ExpressionAnalyzer( - BuiltinFunctionRepository repository) { + public ExpressionAnalyzer(BuiltinFunctionRepository repository) { this.repository = repository; } @@ -112,8 +111,8 @@ public Expression visitEqualTo(EqualTo node, AnalysisContext context) { @Override public Expression visitLiteral(Literal node, AnalysisContext context) { - return DSL - .literal(ExprValueUtils.fromObjectValue(node.getValue(), node.getType().getCoreType())); + return DSL.literal( + ExprValueUtils.fromObjectValue(node.getValue(), node.getType().getCoreType())); } @Override @@ -162,8 +161,12 @@ public Expression visitAggregateFunction(AggregateFunction node, AnalysisContext for (UnresolvedExpression arg : node.getArgList()) { builder.add(arg.accept(this, context)); } - Aggregator aggregator = (Aggregator) repository.compile( - context.getFunctionProperties(), builtinFunctionName.get().getName(), builder.build()); + Aggregator aggregator = + (Aggregator) + repository.compile( + context.getFunctionProperties(), + builtinFunctionName.get().getName(), + builder.build()); aggregator.distinct(node.getDistinct()); if (node.condition() != null) { aggregator.condition(analyze(node.condition(), context)); @@ -176,8 +179,8 @@ public Expression visitAggregateFunction(AggregateFunction node, AnalysisContext @Override public Expression visitRelevanceFieldList(RelevanceFieldList node, AnalysisContext context) { - return new LiteralExpression(ExprValueUtils.tupleValue( - ImmutableMap.copyOf(node.getFieldList()))); + return new LiteralExpression( + ExprValueUtils.tupleValue(ImmutableMap.copyOf(node.getFieldList()))); } @Override @@ -185,19 +188,19 @@ public Expression visitFunction(Function node, AnalysisContext context) { FunctionName functionName = FunctionName.of(node.getFuncName()); List arguments = node.getFuncArgs().stream() - .map(unresolvedExpression -> { - var ret = analyze(unresolvedExpression, context); - if (ret == null) { - throw new UnsupportedOperationException( - String.format("Invalid use of expression %s", unresolvedExpression) - ); - } else { - return ret; - } - }) + .map( + unresolvedExpression -> { + var ret = analyze(unresolvedExpression, context); + if (ret == null) { + throw new UnsupportedOperationException( + String.format("Invalid use of expression %s", unresolvedExpression)); + } else { + return ret; + } + }) .collect(Collectors.toList()); - return (Expression) repository.compile(context.getFunctionProperties(), - functionName, arguments); + return (Expression) + repository.compile(context.getFunctionProperties(), functionName, arguments); } @SuppressWarnings("unchecked") @@ -219,18 +222,20 @@ public Expression visitHighlightFunction(HighlightFunction node, AnalysisContext /** * visitScoreFunction removes the score function from the AST and replaces it with the child - * relevance function node. If the optional boost variable is provided, the boost argument - * of the relevance function is combined. + * relevance function node. If the optional boost variable is provided, the boost argument of the + * relevance function is combined. * - * @param node score function node + * @param node score function node * @param context analysis context for the query * @return resolved relevance function */ public Expression visitScoreFunction(ScoreFunction node, AnalysisContext context) { Literal boostArg = node.getRelevanceFieldWeight(); if (!boostArg.getType().equals(DataType.DOUBLE)) { - throw new SemanticCheckException(String.format("Expected boost type '%s' but got '%s'", - DataType.DOUBLE.name(), boostArg.getType().name())); + throw new SemanticCheckException( + String.format( + "Expected boost type '%s' but got '%s'", + DataType.DOUBLE.name(), boostArg.getType().name())); } Double thisBoostValue = ((Double) boostArg.getValue()); @@ -248,10 +253,9 @@ public Expression visitScoreFunction(ScoreFunction node, AnalysisContext context Literal boostArgLiteral = (Literal) ((UnresolvedArgument) expr).getValue(); Double boostValue = Double.parseDouble((String) boostArgLiteral.getValue()) * thisBoostValue; - UnresolvedArgument newBoostArg = new UnresolvedArgument( - argumentName, - new Literal(boostValue.toString(), DataType.STRING) - ); + UnresolvedArgument newBoostArg = + new UnresolvedArgument( + argumentName, new Literal(boostValue.toString(), DataType.STRING)); updatedFuncArgs.add(newBoostArg); } else { updatedFuncArgs.add(expr); @@ -260,18 +264,18 @@ public Expression visitScoreFunction(ScoreFunction node, AnalysisContext context // since nothing was found, add an argument if (!doesFunctionContainBoostArgument) { - UnresolvedArgument newBoostArg = new UnresolvedArgument( + UnresolvedArgument newBoostArg = + new UnresolvedArgument( "boost", new Literal(Double.toString(thisBoostValue), DataType.STRING)); updatedFuncArgs.add(newBoostArg); } // create a new function expression with boost argument and resolve it - Function updatedRelevanceQueryUnresolvedExpr = new Function( - relevanceQueryUnresolvedExpr.getFuncName(), - updatedFuncArgs); + Function updatedRelevanceQueryUnresolvedExpr = + new Function(relevanceQueryUnresolvedExpr.getFuncName(), updatedFuncArgs); OpenSearchFunctions.OpenSearchFunction relevanceQueryExpr = - (OpenSearchFunctions.OpenSearchFunction) updatedRelevanceQueryUnresolvedExpr - .accept(this, context); + (OpenSearchFunctions.OpenSearchFunction) + updatedRelevanceQueryUnresolvedExpr.accept(this, context); relevanceQueryExpr.setScoreTracked(true); return relevanceQueryExpr; } @@ -300,16 +304,16 @@ public Expression visitCompare(Compare node, AnalysisContext context) { Expression left = analyze(node.getLeft(), context); Expression right = analyze(node.getRight(), context); return (Expression) - repository.compile(context.getFunctionProperties(), - functionName, Arrays.asList(left, right)); + repository.compile( + context.getFunctionProperties(), functionName, Arrays.asList(left, right)); } @Override public Expression visitBetween(Between node, AnalysisContext context) { return and( - compare(">=", node.getValue(), node.getLowerBound()), - compare("<=", node.getValue(), node.getUpperBound()) - ).accept(this, context); + compare(">=", node.getValue(), node.getLowerBound()), + compare("<=", node.getValue(), node.getUpperBound())) + .accept(this, context); } @Override @@ -320,16 +324,18 @@ public Expression visitCase(Case node, AnalysisContext context) { whens.add((WhenClause) analyze(when, context)); } else { // Merge case value and condition (compare value) into a single equal condition - whens.add((WhenClause) analyze( - new When( - new Function("=", Arrays.asList(node.getCaseValue(), when.getCondition())), - when.getResult() - ), context)); + whens.add( + (WhenClause) + analyze( + new When( + new Function("=", Arrays.asList(node.getCaseValue(), when.getCondition())), + when.getResult()), + context)); } } - Expression defaultResult = (node.getElseClause() == null) - ? null : analyze(node.getElseClause(), context); + Expression defaultResult = + (node.getElseClause() == null) ? null : analyze(node.getElseClause(), context); CaseClause caseClause = new CaseClause(whens, defaultResult); // To make this simple, require all result type same regardless of implicit convert @@ -345,8 +351,7 @@ public Expression visitCase(Case node, AnalysisContext context) { @Override public Expression visitWhen(When node, AnalysisContext context) { return new WhenClause( - analyze(node.getCondition(), context), - analyze(node.getResult(), context)); + analyze(node.getCondition(), context), analyze(node.getResult(), context)); } @Override @@ -370,16 +375,13 @@ public Expression visitQualifiedName(QualifiedName node, AnalysisContext context // check for reserved words in the identifier for (String part : node.getParts()) { for (TypeEnvironment typeEnv = context.peek(); - typeEnv != null; - typeEnv = typeEnv.getParent()) { - Optional exprType = typeEnv.getReservedSymbolTable().lookup( - new Symbol(Namespace.FIELD_NAME, part)); + typeEnv != null; + typeEnv = typeEnv.getParent()) { + Optional exprType = + typeEnv.getReservedSymbolTable().lookup(new Symbol(Namespace.FIELD_NAME, part)); if (exprType.isPresent()) { return visitMetadata( - qualifierAnalyzer.unqualified(node), - (ExprCoreType) exprType.get(), - context - ); + qualifierAnalyzer.unqualified(node), (ExprCoreType) exprType.get(), context); } } } @@ -400,15 +402,15 @@ public Expression visitUnresolvedArgument(UnresolvedArgument node, AnalysisConte } /** - * If QualifiedName is actually a reserved metadata field, return the expr type associated - * with the metadata field. - * @param ident metadata field name + * If QualifiedName is actually a reserved metadata field, return the expr type associated with + * the metadata field. + * + * @param ident metadata field name * @param context analysis context * @return DSL reference */ - private Expression visitMetadata(String ident, - ExprCoreType exprCoreType, - AnalysisContext context) { + private Expression visitMetadata( + String ident, ExprCoreType exprCoreType, AnalysisContext context) { return DSL.ref(ident, exprCoreType); } @@ -421,8 +423,8 @@ private Expression visitIdentifier(String ident, AnalysisContext context) { } TypeEnvironment typeEnv = context.peek(); - ReferenceExpression ref = DSL.ref(ident, - typeEnv.resolve(new Symbol(Namespace.FIELD_NAME, ident))); + ReferenceExpression ref = + DSL.ref(ident, typeEnv.resolve(new Symbol(Namespace.FIELD_NAME, ident))); return ref; } diff --git a/core/src/main/java/org/opensearch/sql/analysis/ExpressionReferenceOptimizer.java b/core/src/main/java/org/opensearch/sql/analysis/ExpressionReferenceOptimizer.java index eaf5c4abca..398f848f16 100644 --- a/core/src/main/java/org/opensearch/sql/analysis/ExpressionReferenceOptimizer.java +++ b/core/src/main/java/org/opensearch/sql/analysis/ExpressionReferenceOptimizer.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.analysis; import java.util.HashMap; @@ -26,25 +25,25 @@ import org.opensearch.sql.planner.logical.LogicalWindow; /** - * The optimizer used to replace the expression referred in the SelectClause - * e.g. The query SELECT abs(name), sum(age)-avg(age) FROM test GROUP BY abs(name). - * will be translated the AST - * Project[abs(age), sub(sum(age), avg(age)) - * Agg(agg=[sum(age), avg(age)], group=[abs(age)]] - * Relation - * The sum(age) and avg(age) in the Project could be replace by the analyzed reference, the - * LogicalPlan should be - * LogicalProject[Ref("abs(age)"), sub(Ref("sum(age)"), Ref("avg(age)")) - * LogicalAgg(agg=[sum(age), avg(age)], group=[abs(age)]] - * LogicalRelation + * The optimizer used to replace the expression referred in the SelectClause
e.g. The query + * SELECT abs(name), sum(age)-avg(age) FROM test GROUP BY abs(name).
+ * will be translated the AST
+ * Project[abs(age), sub(sum(age), avg(age))
+ * &ensp Agg(agg=[sum(age), avg(age)], group=[abs(age)]]
+ * &emsp Relation
+ * The sum(age) and avg(age) in the Project could be replaced by the analyzed reference, the + * LogicalPlan should be
+ * LogicalProject[Ref("abs(age)"), sub(Ref("sum(age)"), Ref("avg(age)"))
+ * &ensp LogicalAgg(agg=[sum(age), avg(age)], group=[abs(age)]]
+ * &emsp LogicalRelation */ public class ExpressionReferenceOptimizer extends ExpressionNodeVisitor { private final BuiltinFunctionRepository repository; /** - * The map of expression and it's reference. - * For example, The NamedAggregator should produce the map of Aggregator to Ref(name) + * The map of expression and it's reference. For example, The NamedAggregator should produce the + * map of Aggregator to Ref(name) */ private final Map expressionMap = new HashMap<>(); @@ -69,17 +68,16 @@ public Expression visitFunction(FunctionExpression node, AnalysisContext context return expressionMap.get(node); } else { final List args = - node.getArguments().stream().map(expr -> expr.accept(this, context)) + node.getArguments().stream() + .map(expr -> expr.accept(this, context)) .collect(Collectors.toList()); - Expression optimizedFunctionExpression = (Expression) repository.compile( - context.getFunctionProperties(), - node.getFunctionName(), - args - ); + Expression optimizedFunctionExpression = + (Expression) + repository.compile(context.getFunctionProperties(), node.getFunctionName(), args); // Propagate scoreTracked for OpenSearch functions if (optimizedFunctionExpression instanceof OpenSearchFunctions.OpenSearchFunction) { - ((OpenSearchFunctions.OpenSearchFunction) optimizedFunctionExpression).setScoreTracked( - ((OpenSearchFunctions.OpenSearchFunction)node).isScoreTracked()); + ((OpenSearchFunctions.OpenSearchFunction) optimizedFunctionExpression) + .setScoreTracked(((OpenSearchFunctions.OpenSearchFunction) node).isScoreTracked()); } return optimizedFunctionExpression; } @@ -98,19 +96,17 @@ public Expression visitNamed(NamedExpression node, AnalysisContext context) { return node.getDelegated().accept(this, context); } - /** - * Implement this because Case/When is not registered in function repository. - */ + /** Implement this because Case/When is not registered in function repository. */ @Override public Expression visitCase(CaseClause node, AnalysisContext context) { if (expressionMap.containsKey(node)) { return expressionMap.get(node); } - List whenClauses = node.getWhenClauses() - .stream() - .map(expr -> (WhenClause) expr.accept(this, context)) - .collect(Collectors.toList()); + List whenClauses = + node.getWhenClauses().stream() + .map(expr -> (WhenClause) expr.accept(this, context)) + .collect(Collectors.toList()); Expression defaultResult = null; if (node.getDefaultResult() != null) { defaultResult = node.getDefaultResult().accept(this, context); @@ -121,14 +117,10 @@ public Expression visitCase(CaseClause node, AnalysisContext context) { @Override public Expression visitWhen(WhenClause node, AnalysisContext context) { return new WhenClause( - node.getCondition().accept(this, context), - node.getResult().accept(this, context)); + node.getCondition().accept(this, context), node.getResult().accept(this, context)); } - - /** - * Expression Map Builder. - */ + /** Expression Map Builder. */ class ExpressionMapBuilder extends LogicalPlanNodeVisitor { @Override @@ -140,20 +132,27 @@ public Void visitNode(LogicalPlan plan, Void context) { @Override public Void visitAggregation(LogicalAggregation plan, Void context) { // Create the mapping for all the aggregator. - plan.getAggregatorList().forEach(namedAggregator -> expressionMap - .put(namedAggregator.getDelegated(), - new ReferenceExpression(namedAggregator.getName(), namedAggregator.type()))); + plan.getAggregatorList() + .forEach( + namedAggregator -> + expressionMap.put( + namedAggregator.getDelegated(), + new ReferenceExpression(namedAggregator.getName(), namedAggregator.type()))); // Create the mapping for all the group by. - plan.getGroupByList().forEach(groupBy -> expressionMap - .put(groupBy.getDelegated(), - new ReferenceExpression(groupBy.getNameOrAlias(), groupBy.type()))); + plan.getGroupByList() + .forEach( + groupBy -> + expressionMap.put( + groupBy.getDelegated(), + new ReferenceExpression(groupBy.getNameOrAlias(), groupBy.type()))); return null; } @Override public Void visitWindow(LogicalWindow plan, Void context) { Expression windowFunc = plan.getWindowFunction(); - expressionMap.put(windowFunc, + expressionMap.put( + windowFunc, new ReferenceExpression(((NamedExpression) windowFunc).getName(), windowFunc.type())); return visitNode(plan, context); } diff --git a/core/src/main/java/org/opensearch/sql/analysis/HighlightAnalyzer.java b/core/src/main/java/org/opensearch/sql/analysis/HighlightAnalyzer.java index 0a15c6bac8..386c6e9d9f 100644 --- a/core/src/main/java/org/opensearch/sql/analysis/HighlightAnalyzer.java +++ b/core/src/main/java/org/opensearch/sql/analysis/HighlightAnalyzer.java @@ -14,10 +14,7 @@ import org.opensearch.sql.planner.logical.LogicalHighlight; import org.opensearch.sql.planner.logical.LogicalPlan; -/** - * Analyze the highlight in the {@link AnalysisContext} to construct the {@link - * LogicalPlan}. - */ +/** Analyze the highlight in the {@link AnalysisContext} to construct the {@link LogicalPlan}. */ @RequiredArgsConstructor public class HighlightAnalyzer extends AbstractNodeVisitor { private final ExpressionAnalyzer expressionAnalyzer; diff --git a/core/src/main/java/org/opensearch/sql/analysis/NamedExpressionAnalyzer.java b/core/src/main/java/org/opensearch/sql/analysis/NamedExpressionAnalyzer.java index d0e6968b07..43bd411b42 100644 --- a/core/src/main/java/org/opensearch/sql/analysis/NamedExpressionAnalyzer.java +++ b/core/src/main/java/org/opensearch/sql/analysis/NamedExpressionAnalyzer.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.analysis; import lombok.RequiredArgsConstructor; @@ -15,19 +14,15 @@ import org.opensearch.sql.expression.NamedExpression; /** - * Analyze the Alias node in the {@link AnalysisContext} to construct the list of - * {@link NamedExpression}. + * Analyze the Alias node in the {@link AnalysisContext} to construct the list of {@link + * NamedExpression}. */ @RequiredArgsConstructor -public class NamedExpressionAnalyzer extends - AbstractNodeVisitor { +public class NamedExpressionAnalyzer extends AbstractNodeVisitor { private final ExpressionAnalyzer expressionAnalyzer; - /** - * Analyze Select fields. - */ - public NamedExpression analyze(UnresolvedExpression expression, - AnalysisContext analysisContext) { + /** Analyze Select fields. */ + public NamedExpression analyze(UnresolvedExpression expression, AnalysisContext analysisContext) { return expression.accept(this, analysisContext); } diff --git a/core/src/main/java/org/opensearch/sql/analysis/NestedAnalyzer.java b/core/src/main/java/org/opensearch/sql/analysis/NestedAnalyzer.java index f050824557..ef8f142801 100644 --- a/core/src/main/java/org/opensearch/sql/analysis/NestedAnalyzer.java +++ b/core/src/main/java/org/opensearch/sql/analysis/NestedAnalyzer.java @@ -27,8 +27,7 @@ import org.opensearch.sql.planner.logical.LogicalPlan; /** - * Analyze the Nested Function in the {@link AnalysisContext} to construct the {@link - * LogicalPlan}. + * Analyze the Nested Function in the {@link AnalysisContext} to construct the {@link LogicalPlan}. */ @RequiredArgsConstructor public class NestedAnalyzer extends AbstractNodeVisitor { @@ -52,15 +51,15 @@ public LogicalPlan visitNestedAllTupleFields(NestedAllTupleFields node, Analysis for (NamedExpression namedExpr : namedExpressions) { if (isNestedFunction(namedExpr.getDelegated())) { ReferenceExpression field = - (ReferenceExpression) ((FunctionExpression) namedExpr.getDelegated()) - .getArguments().get(0); + (ReferenceExpression) + ((FunctionExpression) namedExpr.getDelegated()).getArguments().get(0); // If path is same as NestedAllTupleFields path - if (field.getAttr().substring(0, field.getAttr().lastIndexOf(".")) + if (field + .getAttr() + .substring(0, field.getAttr().lastIndexOf(".")) .equalsIgnoreCase(node.getPath())) { - args.add(Map.of( - "field", field, - "path", new ReferenceExpression(node.getPath(), STRING))); + args.add(Map.of("field", field, "path", new ReferenceExpression(node.getPath(), STRING))); } } } @@ -75,20 +74,24 @@ public LogicalPlan visitFunction(Function node, AnalysisContext context) { List expressions = node.getFuncArgs(); validateArgs(expressions); ReferenceExpression nestedField = - (ReferenceExpression)expressionAnalyzer.analyze(expressions.get(0), context); + (ReferenceExpression) expressionAnalyzer.analyze(expressions.get(0), context); Map args; // Path parameter is supplied if (expressions.size() == 2) { - args = Map.of( - "field", nestedField, - "path", (ReferenceExpression)expressionAnalyzer.analyze(expressions.get(1), context) - ); + args = + Map.of( + "field", + nestedField, + "path", + (ReferenceExpression) expressionAnalyzer.analyze(expressions.get(1), context)); } else { - args = Map.of( - "field", (ReferenceExpression)expressionAnalyzer.analyze(expressions.get(0), context), - "path", generatePath(nestedField.toString()) - ); + args = + Map.of( + "field", + (ReferenceExpression) expressionAnalyzer.analyze(expressions.get(0), context), + "path", + generatePath(nestedField.toString())); } return mergeChildIfLogicalNested(new ArrayList<>(Arrays.asList(args))); @@ -97,8 +100,9 @@ public LogicalPlan visitFunction(Function node, AnalysisContext context) { } /** - * NestedAnalyzer visits all functions in SELECT clause, creates logical plans for each and - * merges them. This is to avoid another merge rule in LogicalPlanOptimizer:create(). + * NestedAnalyzer visits all functions in SELECT clause, creates logical plans for each and merges + * them. This is to avoid another merge rule in LogicalPlanOptimizer:create(). + * * @param args field and path params to add to logical plan. * @return child of logical nested with added args, or new LogicalNested. */ @@ -113,34 +117,33 @@ private LogicalPlan mergeChildIfLogicalNested(List args) { if (args.size() < 1 || args.size() > 2) { throw new IllegalArgumentException( - "on nested object only allowed 2 parameters (field,path) or 1 parameter (field)" - ); + "on nested object only allowed 2 parameters (field,path) or 1 parameter (field)"); } for (int i = 0; i < args.size(); i++) { if (!(args.get(i) instanceof QualifiedName)) { throw new IllegalArgumentException( - String.format("Illegal nested field name: %s", args.get(i).toString()) - ); + String.format("Illegal nested field name: %s", args.get(i).toString())); } - if (i == 0 && ((QualifiedName)args.get(i)).getParts().size() < 2) { + if (i == 0 && ((QualifiedName) args.get(i)).getParts().size() < 2) { throw new IllegalArgumentException( - String.format("Illegal nested field name: %s", args.get(i).toString()) - ); + String.format("Illegal nested field name: %s", args.get(i).toString())); } } } /** * Generate nested path dynamically. Assumes at least one level of nesting in supplied string. + * * @param field : Nested field to generate path of. * @return : Path of field derived from last level of nesting. */ @@ -150,12 +153,15 @@ public static ReferenceExpression generatePath(String field) { /** * Check if supplied expression is a nested function. + * * @param expr Expression checking if is nested function. * @return True if expression is a nested function. */ public static Boolean isNestedFunction(Expression expr) { return (expr instanceof FunctionExpression - && ((FunctionExpression) expr).getFunctionName().getFunctionName() - .equalsIgnoreCase(BuiltinFunctionName.NESTED.name())); + && ((FunctionExpression) expr) + .getFunctionName() + .getFunctionName() + .equalsIgnoreCase(BuiltinFunctionName.NESTED.name())); } } diff --git a/core/src/main/java/org/opensearch/sql/analysis/QualifierAnalyzer.java b/core/src/main/java/org/opensearch/sql/analysis/QualifierAnalyzer.java index d1e31d0079..27dd6a2243 100644 --- a/core/src/main/java/org/opensearch/sql/analysis/QualifierAnalyzer.java +++ b/core/src/main/java/org/opensearch/sql/analysis/QualifierAnalyzer.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.analysis; import java.util.Arrays; @@ -15,9 +14,7 @@ import org.opensearch.sql.common.antlr.SyntaxCheckException; import org.opensearch.sql.exception.SemanticCheckException; -/** - * Analyzer that analyzes qualifier(s) in a full field name. - */ +/** Analyzer that analyzes qualifier(s) in a full field name. */ @RequiredArgsConstructor public class QualifierAnalyzer { @@ -28,11 +25,11 @@ public String unqualified(String... parts) { } /** - * Get unqualified name if its qualifier symbol found is in index namespace - * on type environment. Unqualified name means name with qualifier removed. - * For example, unqualified name of "accounts.age" or "acc.age" is "age". + * Get unqualified name if its qualifier symbol found is in index namespace on type environment. + * Unqualified name means name with qualifier removed. For example, unqualified name of + * "accounts.age" or "acc.age" is "age". * - * @return unqualified name if criteria met above, otherwise original name + * @return unqualified name if criteria met above, otherwise original name */ public String unqualified(QualifiedName fullName) { return isQualifierIndexOrAlias(fullName) ? fullName.rest().toString() : fullName.toString(); @@ -66,10 +63,11 @@ private void resolveQualifierSymbol(QualifiedName fullName, String qualifier) { } catch (SemanticCheckException e) { // Throw syntax check intentionally to indicate fall back to old engine. // Need change to semantic check exception in future. - throw new SyntaxCheckException(String.format( - "The qualifier [%s] of qualified name [%s] must be an field name, index name or its " - + "alias", qualifier, fullName)); + throw new SyntaxCheckException( + String.format( + "The qualifier [%s] of qualified name [%s] must be an field name, index name or its " + + "alias", + qualifier, fullName)); } } - } diff --git a/core/src/main/java/org/opensearch/sql/analysis/SelectExpressionAnalyzer.java b/core/src/main/java/org/opensearch/sql/analysis/SelectExpressionAnalyzer.java index 734f37378b..5e46cfa629 100644 --- a/core/src/main/java/org/opensearch/sql/analysis/SelectExpressionAnalyzer.java +++ b/core/src/main/java/org/opensearch/sql/analysis/SelectExpressionAnalyzer.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.analysis; import com.google.common.collect.ImmutableList; @@ -30,23 +29,21 @@ import org.opensearch.sql.expression.ReferenceExpression; /** - * Analyze the select list in the {@link AnalysisContext} to construct the list of - * {@link NamedExpression}. + * Analyze the select list in the {@link AnalysisContext} to construct the list of {@link + * NamedExpression}. */ @RequiredArgsConstructor public class SelectExpressionAnalyzer - extends - AbstractNodeVisitor, AnalysisContext> { + extends AbstractNodeVisitor, AnalysisContext> { private final ExpressionAnalyzer expressionAnalyzer; private ExpressionReferenceOptimizer optimizer; - /** - * Analyze Select fields. - */ - public List analyze(List selectList, - AnalysisContext analysisContext, - ExpressionReferenceOptimizer optimizer) { + /** Analyze Select fields. */ + public List analyze( + List selectList, + AnalysisContext analysisContext, + ExpressionReferenceOptimizer optimizer) { this.optimizer = optimizer; ImmutableList.Builder builder = new ImmutableList.Builder<>(); for (UnresolvedExpression unresolvedExpression : selectList) { @@ -68,50 +65,49 @@ public List visitAlias(Alias node, AnalysisContext context) { } Expression expr = referenceIfSymbolDefined(node, context); - return Collections.singletonList(DSL.named( - unqualifiedNameIfFieldOnly(node, context), - expr, - node.getAlias())); + return Collections.singletonList( + DSL.named(unqualifiedNameIfFieldOnly(node, context), expr, node.getAlias())); } /** * The Alias could be - * 1. SELECT name, AVG(age) FROM s BY name -> - * Project(Alias("name", expr), Alias("AVG(age)", aggExpr)) - * Agg(Alias("AVG(age)", aggExpr)) - * 2. SELECT length(name), AVG(age) FROM s BY length(name) - * Project(Alias("name", expr), Alias("AVG(age)", aggExpr)) - * Agg(Alias("AVG(age)", aggExpr)) - * 3. SELECT length(name) as l, AVG(age) FROM s BY l - * Project(Alias("name", expr, l), Alias("AVG(age)", aggExpr)) - * Agg(Alias("AVG(age)", aggExpr), Alias("length(name)", groupExpr)) + * + *

    + *
  1. SELECT name, AVG(age) FROM s BY name -> Project(Alias("name", expr), Alias("AVG(age)", + * aggExpr)) Agg(Alias("AVG(age)", aggExpr)) + *
  2. SELECT length(name), AVG(age) FROM s BY length(name) Project(Alias("name", expr), + * Alias("AVG(age)", aggExpr)) Agg(Alias("AVG(age)", aggExpr)) + *
  3. SELECT length(name) as l, AVG(age) FROM s BY l Project(Alias("name", expr, l), + * Alias("AVG(age)", aggExpr)) Agg(Alias("AVG(age)", aggExpr), Alias("length(name)", + * groupExpr)) + *
*/ - private Expression referenceIfSymbolDefined(Alias expr, - AnalysisContext context) { + private Expression referenceIfSymbolDefined(Alias expr, AnalysisContext context) { UnresolvedExpression delegatedExpr = expr.getDelegated(); // Pass named expression because expression like window function loses full name // (OVER clause) and thus depends on name in alias to be replaced correctly return optimizer.optimize( DSL.named( - expr.getName(), - delegatedExpr.accept(expressionAnalyzer, context), - expr.getAlias()), + expr.getName(), delegatedExpr.accept(expressionAnalyzer, context), expr.getAlias()), context); } @Override - public List visitAllFields(AllFields node, - AnalysisContext context) { + public List visitAllFields(AllFields node, AnalysisContext context) { TypeEnvironment environment = context.peek(); Map lookupAllFields = environment.lookupAllFields(Namespace.FIELD_NAME); - return lookupAllFields.entrySet().stream().map(entry -> DSL.named(entry.getKey(), - new ReferenceExpression(entry.getKey(), entry.getValue()))).collect(Collectors.toList()); + return lookupAllFields.entrySet().stream() + .map( + entry -> + DSL.named( + entry.getKey(), new ReferenceExpression(entry.getKey(), entry.getValue()))) + .collect(Collectors.toList()); } @Override - public List visitNestedAllTupleFields(NestedAllTupleFields node, - AnalysisContext context) { + public List visitNestedAllTupleFields( + NestedAllTupleFields node, AnalysisContext context) { TypeEnvironment environment = context.peek(); Map lookupAllTupleFields = environment.lookupAllTupleFields(Namespace.FIELD_NAME); @@ -121,25 +117,25 @@ public List visitNestedAllTupleFields(NestedAllTupleFields node Pattern p = Pattern.compile(node.getPath() + "\\.[^\\.]+$"); return lookupAllTupleFields.entrySet().stream() .filter(field -> p.matcher(field.getKey()).find()) - .map(entry -> { - Expression nestedFunc = new Function( - "nested", - List.of( - new QualifiedName(List.of(entry.getKey().split("\\.")))) - ).accept(expressionAnalyzer, context); - return DSL.named("nested(" + entry.getKey() + ")", nestedFunc); - }) + .map( + entry -> { + Expression nestedFunc = + new Function( + "nested", + List.of(new QualifiedName(List.of(entry.getKey().split("\\."))))) + .accept(expressionAnalyzer, context); + return DSL.named("nested(" + entry.getKey() + ")", nestedFunc); + }) .collect(Collectors.toList()); } /** - * Get unqualified name if select item is just a field. For example, suppose an index - * named "accounts", return "age" for "SELECT accounts.age". But do nothing for expression - * in "SELECT ABS(accounts.age)". - * Note that an assumption is made implicitly that original name field in Alias must be - * the same as the values in QualifiedName. This is true because AST builder does this. - * Otherwise, what unqualified() returns will override Alias's name as NamedExpression's name - * even though the QualifiedName doesn't have qualifier. + * Get unqualified name if select item is just a field. For example, suppose an index named + * "accounts", return "age" for "SELECT accounts.age". But do nothing for expression in "SELECT + * ABS(accounts.age)". Note that an assumption is made implicitly that original name field in + * Alias must be the same as the values in QualifiedName. This is true because AST builder does + * this. Otherwise, what unqualified() returns will override Alias's name as NamedExpression's + * name even though the QualifiedName doesn't have qualifier. */ private String unqualifiedNameIfFieldOnly(Alias node, AnalysisContext context) { UnresolvedExpression selectItem = node.getDelegated(); @@ -149,5 +145,4 @@ private String unqualifiedNameIfFieldOnly(Alias node, AnalysisContext context) { } return node.getName(); } - } diff --git a/core/src/main/java/org/opensearch/sql/analysis/TypeEnvironment.java b/core/src/main/java/org/opensearch/sql/analysis/TypeEnvironment.java index 17d203f66f..8baab64810 100644 --- a/core/src/main/java/org/opensearch/sql/analysis/TypeEnvironment.java +++ b/core/src/main/java/org/opensearch/sql/analysis/TypeEnvironment.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.analysis; import static org.opensearch.sql.analysis.symbol.Namespace.FIELD_NAME; @@ -21,16 +20,12 @@ import org.opensearch.sql.expression.ReferenceExpression; import org.opensearch.sql.expression.env.Environment; -/** - * The definition of Type Environment. - */ +/** The definition of Type Environment. */ public class TypeEnvironment implements Environment { - @Getter - private final TypeEnvironment parent; + @Getter private final TypeEnvironment parent; private final SymbolTable symbolTable; - @Getter - private final SymbolTable reservedSymbolTable; + @Getter private final SymbolTable reservedSymbolTable; /** * Constructor with empty symbol tables. @@ -69,15 +64,14 @@ public ExprType resolve(Symbol symbol) { return typeOptional.get(); } } - throw new SemanticCheckException( - String.format("can't resolve %s in type env", symbol)); + throw new SemanticCheckException(String.format("can't resolve %s in type env", symbol)); } /** * Resolve all fields in the current environment. * - * @param namespace a namespace - * @return all symbols in the namespace + * @param namespace a namespace + * @return all symbols in the namespace */ public Map lookupAllFields(Namespace namespace) { Map result = new LinkedHashMap<>(); @@ -87,8 +81,9 @@ public Map lookupAllFields(Namespace namespace) { /** * Resolve all fields in the current environment. - * @param namespace a namespace - * @return all symbols in the namespace + * + * @param namespace a namespace + * @return all symbols in the namespace */ public Map lookupAllTupleFields(Namespace namespace) { Map result = new LinkedHashMap<>(); @@ -100,7 +95,7 @@ public Map lookupAllTupleFields(Namespace namespace) { * Define symbol with the type. * * @param symbol symbol to define - * @param type type + * @param type type */ public void define(Symbol symbol, ExprType type) { symbolTable.store(symbol, type); @@ -119,19 +114,14 @@ public void remove(Symbol symbol) { symbolTable.remove(symbol); } - /** - * Remove ref. - */ + /** Remove ref. */ public void remove(ReferenceExpression ref) { remove(new Symbol(FIELD_NAME, ref.getAttr())); } - /** - * Clear all fields in the current environment. - */ + /** Clear all fields in the current environment. */ public void clearAllFields() { - lookupAllFields(FIELD_NAME).keySet().forEach( - v -> remove(new Symbol(Namespace.FIELD_NAME, v))); + lookupAllFields(FIELD_NAME).keySet().forEach(v -> remove(new Symbol(Namespace.FIELD_NAME, v))); } public void addReservedWord(Symbol symbol, ExprType type) { diff --git a/core/src/main/java/org/opensearch/sql/analysis/WindowExpressionAnalyzer.java b/core/src/main/java/org/opensearch/sql/analysis/WindowExpressionAnalyzer.java index 3abcf9e140..c4229e4664 100644 --- a/core/src/main/java/org/opensearch/sql/analysis/WindowExpressionAnalyzer.java +++ b/core/src/main/java/org/opensearch/sql/analysis/WindowExpressionAnalyzer.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.analysis; import static org.opensearch.sql.ast.tree.Sort.SortOption.DEFAULT_ASC; @@ -29,28 +28,25 @@ import org.opensearch.sql.planner.logical.LogicalWindow; /** - * Window expression analyzer that analyzes window function expression in expression list - * in project operator. + * Window expression analyzer that analyzes window function expression in expression list in project + * operator. */ @RequiredArgsConstructor public class WindowExpressionAnalyzer extends AbstractNodeVisitor { - /** - * Expression analyzer. - */ + /** Expression analyzer. */ private final ExpressionAnalyzer expressionAnalyzer; - /** - * Child node to be wrapped by a new window operator. - */ + /** Child node to be wrapped by a new window operator. */ private final LogicalPlan child; /** - * Analyze the given project item and return window operator (with child node inside) - * if the given project item is a window function. - * @param projectItem project item - * @param context analysis context - * @return window operator or original child if not windowed + * Analyze the given project item and return window operator (with child node inside) if the given + * project item is a window function. + * + * @param projectItem project item + * @param context analysis context + * @return window operator or original child if not windowed */ public LogicalPlan analyze(UnresolvedExpression projectItem, AnalysisContext context) { LogicalPlan window = projectItem.accept(this, context); @@ -77,26 +73,24 @@ public LogicalPlan visitAlias(Alias node, AnalysisContext context) { return new LogicalWindow(child, namedWindowFunction, windowDefinition); } return new LogicalWindow( - new LogicalSort(child, allSortItems), - namedWindowFunction, - windowDefinition); + new LogicalSort(child, allSortItems), namedWindowFunction, windowDefinition); } private List analyzePartitionList(WindowFunction node, AnalysisContext context) { - return node.getPartitionByList() - .stream() - .map(expr -> expressionAnalyzer.analyze(expr, context)) - .collect(Collectors.toList()); + return node.getPartitionByList().stream() + .map(expr -> expressionAnalyzer.analyze(expr, context)) + .collect(Collectors.toList()); } - private List> analyzeSortList(WindowFunction node, - AnalysisContext context) { - return node.getSortList() - .stream() - .map(pair -> ImmutablePair - .of(analyzeSortOption(pair.getLeft()), - expressionAnalyzer.analyze(pair.getRight(), context))) - .collect(Collectors.toList()); + private List> analyzeSortList( + WindowFunction node, AnalysisContext context) { + return node.getSortList().stream() + .map( + pair -> + ImmutablePair.of( + analyzeSortOption(pair.getLeft()), + expressionAnalyzer.analyze(pair.getRight(), context))) + .collect(Collectors.toList()); } /** @@ -107,9 +101,6 @@ private SortOption analyzeSortOption(SortOption option) { if (option.getNullOrder() == null) { return (option.getSortOrder() == DESC) ? DEFAULT_DESC : DEFAULT_ASC; } - return new SortOption( - (option.getSortOrder() == DESC) ? DESC : ASC, - option.getNullOrder()); + return new SortOption((option.getSortOrder() == DESC) ? DESC : ASC, option.getNullOrder()); } - } diff --git a/core/src/main/java/org/opensearch/sql/analysis/symbol/Namespace.java b/core/src/main/java/org/opensearch/sql/analysis/symbol/Namespace.java index b5203033a8..8211207b2e 100644 --- a/core/src/main/java/org/opensearch/sql/analysis/symbol/Namespace.java +++ b/core/src/main/java/org/opensearch/sql/analysis/symbol/Namespace.java @@ -3,14 +3,10 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.analysis.symbol; -/** - * Namespace of symbol to avoid naming conflict. - */ +/** Namespace of symbol to avoid naming conflict. */ public enum Namespace { - INDEX_NAME("Index"), FIELD_NAME("Field"), FUNCTION_NAME("Function"); @@ -20,5 +16,4 @@ public enum Namespace { Namespace(String name) { this.name = name; } - } diff --git a/core/src/main/java/org/opensearch/sql/analysis/symbol/Symbol.java b/core/src/main/java/org/opensearch/sql/analysis/symbol/Symbol.java index 8cc9505710..98fa4b3569 100644 --- a/core/src/main/java/org/opensearch/sql/analysis/symbol/Symbol.java +++ b/core/src/main/java/org/opensearch/sql/analysis/symbol/Symbol.java @@ -3,16 +3,13 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.analysis.symbol; import lombok.Getter; import lombok.RequiredArgsConstructor; import lombok.ToString; -/** - * Symbol in the scope. - */ +/** Symbol in the scope. */ @ToString @Getter @RequiredArgsConstructor diff --git a/core/src/main/java/org/opensearch/sql/analysis/symbol/SymbolTable.java b/core/src/main/java/org/opensearch/sql/analysis/symbol/SymbolTable.java index be7435c288..8bb6824a63 100644 --- a/core/src/main/java/org/opensearch/sql/analysis/symbol/SymbolTable.java +++ b/core/src/main/java/org/opensearch/sql/analysis/symbol/SymbolTable.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.analysis.symbol; import static java.util.Collections.emptyMap; @@ -17,21 +16,16 @@ import java.util.TreeMap; import org.opensearch.sql.data.type.ExprType; -/** - * Symbol table for symbol definition and resolution. - */ +/** Symbol table for symbol definition and resolution. */ public class SymbolTable { - /** - * Two-dimension hash table to manage symbols with type in different namespace. - */ + /** Two-dimension hash table to manage symbols with type in different namespace. */ private Map> tableByNamespace = new EnumMap<>(Namespace.class); /** - * Two-dimension hash table to manage symbols with type in different namespace. - * Comparing with tableByNamespace, orderedTable use the LinkedHashMap to keep the order of - * symbol. + * Two-dimension hash table to manage symbols with type in different namespace. Comparing with + * tableByNamespace, orderedTable use the LinkedHashMap to keep the order of symbol. */ private Map> orderedTable = new EnumMap<>(Namespace.class); @@ -40,38 +34,32 @@ public class SymbolTable { * Store symbol with the type. Create new map for namespace for the first time. * * @param symbol symbol to define - * @param type symbol type + * @param type symbol type */ public void store(Symbol symbol, ExprType type) { - tableByNamespace.computeIfAbsent( - symbol.getNamespace(), - ns -> new TreeMap<>() - ).put(symbol.getName(), type); + tableByNamespace + .computeIfAbsent(symbol.getNamespace(), ns -> new TreeMap<>()) + .put(symbol.getName(), type); - orderedTable.computeIfAbsent( - symbol.getNamespace(), - ns -> new LinkedHashMap<>() - ).put(symbol.getName(), type); + orderedTable + .computeIfAbsent(symbol.getNamespace(), ns -> new LinkedHashMap<>()) + .put(symbol.getName(), type); } - /** - * Remove a symbol from SymbolTable. - */ + /** Remove a symbol from SymbolTable. */ public void remove(Symbol symbol) { tableByNamespace.computeIfPresent( symbol.getNamespace(), (k, v) -> { v.remove(symbol.getName()); return v; - } - ); + }); orderedTable.computeIfPresent( symbol.getNamespace(), (k, v) -> { v.remove(symbol.getName()); return v; - } - ); + }); } /** @@ -104,42 +92,42 @@ public Map lookupByPrefix(Symbol prefix) { } /** - * Look up all top level symbols in the namespace. - * this function is mainly used by SELECT * use case to get the top level fields - * Todo. currently, the top level fields is the field which doesn't include "." in the name or - * the prefix doesn't exist in the symbol table. - * e.g. The symbol table includes person, person.name, person/2.0. - * person, is the top level field - * person.name, isn't the top level field, because the prefix (person) in symbol table - * person/2.0, is the top level field, because the prefix (person/2) isn't in symbol table + * Look up all top level symbols in the namespace. this function is mainly used by SELECT * use + * case to get the top level fields Todo. currently, the top level fields is the field which + * doesn't include "." in the name or the prefix doesn't exist in the symbol table. e.g. The + * symbol table includes person, person.name, person/2.0. person, is the top level field + * person.name, isn't the top level field, because the prefix (person) in symbol table person/2.0, + * is the top level field, because the prefix (person/2) isn't in symbol table * - * @param namespace a namespace - * @return all symbols in the namespace map + * @param namespace a namespace + * @return all symbols in the namespace map */ public Map lookupAllFields(Namespace namespace) { final LinkedHashMap allSymbols = orderedTable.getOrDefault(namespace, new LinkedHashMap<>()); final LinkedHashMap results = new LinkedHashMap<>(); - allSymbols.entrySet().stream().filter(entry -> { - String symbolName = entry.getKey(); - int lastDot = symbolName.lastIndexOf("."); - return -1 == lastDot || !allSymbols.containsKey(symbolName.substring(0, lastDot)); - }).forEach(entry -> results.put(entry.getKey(), entry.getValue())); + allSymbols.entrySet().stream() + .filter( + entry -> { + String symbolName = entry.getKey(); + int lastDot = symbolName.lastIndexOf("."); + return -1 == lastDot || !allSymbols.containsKey(symbolName.substring(0, lastDot)); + }) + .forEach(entry -> results.put(entry.getKey(), entry.getValue())); return results; } /** * Look up all top level symbols in the namespace. * - * @param namespace a namespace - * @return all symbols in the namespace map + * @param namespace a namespace + * @return all symbols in the namespace map */ public Map lookupAllTupleFields(Namespace namespace) { final LinkedHashMap allSymbols = orderedTable.getOrDefault(namespace, new LinkedHashMap<>()); final LinkedHashMap result = new LinkedHashMap<>(); - allSymbols.entrySet().stream() - .forEach(entry -> result.put(entry.getKey(), entry.getValue())); + allSymbols.entrySet().stream().forEach(entry -> result.put(entry.getKey(), entry.getValue())); return result; } diff --git a/core/src/main/java/org/opensearch/sql/ast/AbstractNodeVisitor.java b/core/src/main/java/org/opensearch/sql/ast/AbstractNodeVisitor.java index f02bc07ccc..973b10310b 100644 --- a/core/src/main/java/org/opensearch/sql/ast/AbstractNodeVisitor.java +++ b/core/src/main/java/org/opensearch/sql/ast/AbstractNodeVisitor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast; import org.opensearch.sql.ast.expression.AggregateFunction; @@ -62,9 +61,7 @@ import org.opensearch.sql.ast.tree.TableFunction; import org.opensearch.sql.ast.tree.Values; -/** - * AST nodes visitor Defines the traverse path. - */ +/** AST nodes visitor Defines the traverse path. */ public abstract class AbstractNodeVisitor { public T visit(Node node, C context) { @@ -73,6 +70,7 @@ public T visit(Node node, C context) { /** * Visit child node. + * * @param node {@link Node} * @param context Context * @return Return Type. diff --git a/core/src/main/java/org/opensearch/sql/ast/Node.java b/core/src/main/java/org/opensearch/sql/ast/Node.java index f3147eeb43..faaf51f221 100644 --- a/core/src/main/java/org/opensearch/sql/ast/Node.java +++ b/core/src/main/java/org/opensearch/sql/ast/Node.java @@ -3,16 +3,13 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast; import java.util.List; import lombok.EqualsAndHashCode; import lombok.ToString; -/** - * AST node. - */ +/** AST node. */ @EqualsAndHashCode @ToString public abstract class Node { diff --git a/core/src/main/java/org/opensearch/sql/ast/dsl/AstDSL.java b/core/src/main/java/org/opensearch/sql/ast/dsl/AstDSL.java index d5f10fcfd4..4ceb387076 100644 --- a/core/src/main/java/org/opensearch/sql/ast/dsl/AstDSL.java +++ b/core/src/main/java/org/opensearch/sql/ast/dsl/AstDSL.java @@ -249,11 +249,13 @@ public static Function function(String funcName, UnresolvedExpression... funcArg } /** + *
    * CASE
-   *     WHEN search_condition THEN result_expr
-   *     [WHEN search_condition THEN result_expr] ...
-   *     [ELSE result_expr]
+   *    WHEN search_condition THEN result_expr
+ * [WHEN search_condition THEN result_expr] ... + * [ELSE result_expr] * END + *
*/ public UnresolvedExpression caseWhen(UnresolvedExpression elseClause, When... whenClauses) { @@ -261,11 +263,13 @@ public UnresolvedExpression caseWhen(UnresolvedExpression elseClause, } /** + *
    * CASE case_value_expr
    *     WHEN compare_expr THEN result_expr
    *     [WHEN compare_expr THEN result_expr] ...
    *     [ELSE result_expr]
    * END
+   * 
*/ public UnresolvedExpression caseWhen(UnresolvedExpression caseValueExpr, UnresolvedExpression elseClause, diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/AggregateFunction.java b/core/src/main/java/org/opensearch/sql/ast/expression/AggregateFunction.java index e8f730d7e9..5208e39623 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/AggregateFunction.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/AggregateFunction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import java.util.Collections; @@ -17,8 +16,8 @@ import org.opensearch.sql.common.utils.StringUtils; /** - * Expression node of aggregate functions. - * Params include aggregate function name (AVG, SUM, MAX etc.) and the field to aggregate. + * Expression node of aggregate functions. Params include aggregate function name (AVG, SUM, MAX + * etc.) and the field to aggregate. */ @Getter @EqualsAndHashCode(callSuper = false) @@ -27,13 +26,16 @@ public class AggregateFunction extends UnresolvedExpression { private final String funcName; private final UnresolvedExpression field; private final List argList; + @Setter @Accessors(fluent = true) private UnresolvedExpression condition; + private Boolean distinct = false; /** * Constructor. + * * @param funcName function name. * @param field {@link UnresolvedExpression}. */ @@ -45,6 +47,7 @@ public AggregateFunction(String funcName, UnresolvedExpression field) { /** * Constructor. + * * @param funcName function name. * @param field {@link UnresolvedExpression}. * @param distinct whether distinct field is specified or not. diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/Alias.java b/core/src/main/java/org/opensearch/sql/ast/expression/Alias.java index 4183b19a3e..7b3078629b 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/Alias.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/Alias.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import lombok.AllArgsConstructor; @@ -14,10 +13,10 @@ import org.opensearch.sql.ast.AbstractNodeVisitor; /** - * Alias abstraction that associate an unnamed expression with a name and an optional alias. - * The name and alias information preserved is useful for semantic analysis and response - * formatting eventually. This can avoid restoring the info in toString() method which is - * inaccurate because original info is already lost. + * Alias abstraction that associate an unnamed expression with a name and an optional alias. The + * name and alias information preserved is useful for semantic analysis and response formatting + * eventually. This can avoid restoring the info in toString() method which is inaccurate because + * original info is already lost. */ @AllArgsConstructor @EqualsAndHashCode(callSuper = false) @@ -26,19 +25,13 @@ @ToString public class Alias extends UnresolvedExpression { - /** - * Original field name. - */ + /** Original field name. */ private final String name; - /** - * Expression aliased. - */ + /** Expression aliased. */ private final UnresolvedExpression delegated; - /** - * Optional field alias. - */ + /** Optional field alias. */ private String alias; @Override diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/AllFields.java b/core/src/main/java/org/opensearch/sql/ast/expression/AllFields.java index 1f5d919817..b9b90ea24a 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/AllFields.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/AllFields.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import java.util.Collections; @@ -13,16 +12,13 @@ import org.opensearch.sql.ast.AbstractNodeVisitor; import org.opensearch.sql.ast.Node; -/** - * Represent the All fields which is been used in SELECT *. - */ +/** Represent the All fields which is been used in SELECT *. */ @ToString @EqualsAndHashCode(callSuper = false) public class AllFields extends UnresolvedExpression { public static final AllFields INSTANCE = new AllFields(); - private AllFields() { - } + private AllFields() {} public static AllFields of() { return INSTANCE; diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/And.java b/core/src/main/java/org/opensearch/sql/ast/expression/And.java index 8d8c48f3b2..565f1eb4ab 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/And.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/And.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import java.util.Arrays; @@ -14,9 +13,7 @@ import lombok.ToString; import org.opensearch.sql.ast.AbstractNodeVisitor; -/** - * Expression node of logic AND. - */ +/** Expression node of logic AND. */ @Getter @ToString @EqualsAndHashCode(callSuper = false) diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/Argument.java b/core/src/main/java/org/opensearch/sql/ast/expression/Argument.java index f054710a32..4c2a485ea7 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/Argument.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/Argument.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import java.util.Arrays; @@ -14,9 +13,7 @@ import lombok.ToString; import org.opensearch.sql.ast.AbstractNodeVisitor; -/** - * Argument. - */ +/** Argument. */ @Getter @ToString @RequiredArgsConstructor diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/AttributeList.java b/core/src/main/java/org/opensearch/sql/ast/expression/AttributeList.java index 7e1fdb1516..d137a83957 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/AttributeList.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/AttributeList.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import com.google.common.collect.ImmutableList; @@ -14,15 +13,12 @@ import lombok.ToString; import org.opensearch.sql.ast.AbstractNodeVisitor; -/** - * Expression node that includes a list of Expression nodes. - */ +/** Expression node that includes a list of Expression nodes. */ @ToString @EqualsAndHashCode(callSuper = false) @AllArgsConstructor public class AttributeList extends UnresolvedExpression { - @Getter - private List attrList; + @Getter private List attrList; @Override public List getChild() { diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/Between.java b/core/src/main/java/org/opensearch/sql/ast/expression/Between.java index 886c9a9282..e13c3fb187 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/Between.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/Between.java @@ -12,9 +12,7 @@ import org.opensearch.sql.ast.AbstractNodeVisitor; import org.opensearch.sql.ast.Node; -/** - * Unresolved expression for BETWEEN. - */ +/** Unresolved expression for BETWEEN. */ @Data @EqualsAndHashCode(callSuper = false) public class Between extends UnresolvedExpression { diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/Case.java b/core/src/main/java/org/opensearch/sql/ast/expression/Case.java index 81c74f3ea4..583bd0de97 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/Case.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/Case.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import com.google.common.collect.ImmutableList; @@ -15,29 +14,23 @@ import org.opensearch.sql.ast.AbstractNodeVisitor; import org.opensearch.sql.ast.Node; -/** - * AST node that represents CASE clause similar as Switch statement in programming language. - */ +/** AST node that represents CASE clause similar as Switch statement in programming language. */ @AllArgsConstructor @EqualsAndHashCode(callSuper = false) @Getter @ToString public class Case extends UnresolvedExpression { - /** - * Value to be compared by WHEN statements. Null in the case of CASE WHEN conditions. - */ + /** Value to be compared by WHEN statements. Null in the case of CASE WHEN conditions. */ private final UnresolvedExpression caseValue; /** - * Expression list that represents WHEN statements. Each is a mapping from condition - * to its result. + * Expression list that represents WHEN statements. Each is a mapping from condition to its + * result. */ private final List whenClauses; - /** - * Expression that represents ELSE statement result. - */ + /** Expression that represents ELSE statement result. */ private final UnresolvedExpression elseClause; @Override @@ -58,5 +51,4 @@ public List getChild() { public T accept(AbstractNodeVisitor nodeVisitor, C context) { return nodeVisitor.visitCase(this, context); } - } diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/Cast.java b/core/src/main/java/org/opensearch/sql/ast/expression/Cast.java index 9121dbd87c..2019346fb5 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/Cast.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/Cast.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import static org.opensearch.sql.expression.function.BuiltinFunctionName.CAST_TO_BOOLEAN; @@ -33,9 +32,7 @@ import org.opensearch.sql.data.type.ExprType; import org.opensearch.sql.expression.function.FunctionName; -/** - * AST node that represents Cast clause. - */ +/** AST node that represents Cast clause. */ @AllArgsConstructor @EqualsAndHashCode(callSuper = false) @Getter @@ -59,19 +56,16 @@ public class Cast extends UnresolvedExpression { .put("datetime", CAST_TO_DATETIME.getName()) .build(); - /** - * The source expression cast from. - */ + /** The source expression cast from. */ private final UnresolvedExpression expression; - /** - * Expression that represents name of the target type. - */ + /** Expression that represents name of the target type. */ private final UnresolvedExpression convertedType; /** * Check if the given function name is a cast function or not. - * @param name function name + * + * @param name function name * @return true if cast function, otherwise false. */ public static boolean isCastFunction(FunctionName name) { @@ -80,7 +74,8 @@ public static boolean isCastFunction(FunctionName name) { /** * Get the cast function name for a given target data type. - * @param targetType target data type + * + * @param targetType target data type * @return cast function name corresponding */ public static FunctionName getCastFunctionName(ExprType targetType) { diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/Compare.java b/core/src/main/java/org/opensearch/sql/ast/expression/Compare.java index 25cf3e0f73..8ba6ba5116 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/Compare.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/Compare.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import java.util.Arrays; diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/DataType.java b/core/src/main/java/org/opensearch/sql/ast/expression/DataType.java index 8755a15177..0b9add8f55 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/DataType.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/DataType.java @@ -3,17 +3,13 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import lombok.Getter; import lombok.RequiredArgsConstructor; import org.opensearch.sql.data.type.ExprCoreType; -/** - * The DataType defintion in AST. - * Question, could we use {@link ExprCoreType} directly in AST? - */ +/** The DataType defintion in AST. Question, could we use {@link ExprCoreType} directly in AST? */ @RequiredArgsConstructor public enum DataType { TYPE_ERROR(ExprCoreType.UNKNOWN), @@ -32,6 +28,5 @@ public enum DataType { TIMESTAMP(ExprCoreType.TIMESTAMP), INTERVAL(ExprCoreType.INTERVAL); - @Getter - private final ExprCoreType coreType; + @Getter private final ExprCoreType coreType; } diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/EqualTo.java b/core/src/main/java/org/opensearch/sql/ast/expression/EqualTo.java index 806f897abf..344f3c5164 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/EqualTo.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/EqualTo.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import java.util.Arrays; @@ -14,17 +13,13 @@ import lombok.ToString; import org.opensearch.sql.ast.AbstractNodeVisitor; -/** - * Expression node of binary operator or comparison relation EQUAL. - */ +/** Expression node of binary operator or comparison relation EQUAL. */ @ToString @EqualsAndHashCode(callSuper = false) @AllArgsConstructor public class EqualTo extends UnresolvedExpression { - @Getter - private UnresolvedExpression left; - @Getter - private UnresolvedExpression right; + @Getter private UnresolvedExpression left; + @Getter private UnresolvedExpression right; @Override public List getChild() { diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/Field.java b/core/src/main/java/org/opensearch/sql/ast/expression/Field.java index 9a8109fbe3..0a2d726ad4 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/Field.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/Field.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import com.google.common.collect.ImmutableList; @@ -23,16 +22,12 @@ public class Field extends UnresolvedExpression { private final List fieldArgs; - /** - * Constructor of Field. - */ + /** Constructor of Field. */ public Field(UnresolvedExpression field) { this(field, Collections.emptyList()); } - /** - * Constructor of Field. - */ + /** Constructor of Field. */ public Field(UnresolvedExpression field, List fieldArgs) { this.field = field; this.fieldArgs = fieldArgs; diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/Function.java b/core/src/main/java/org/opensearch/sql/ast/expression/Function.java index c712d860f4..184342c5e3 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/Function.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/Function.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import java.util.Collections; @@ -14,10 +13,9 @@ import lombok.RequiredArgsConstructor; import org.opensearch.sql.ast.AbstractNodeVisitor; - /** - * Expression node of scalar function. - * Params include function name (@funcName) and function arguments (@funcArgs) + * Expression node of scalar function. Params include function name (@funcName) and function + * arguments (@funcArgs) */ @Getter @EqualsAndHashCode(callSuper = false) @@ -38,9 +36,8 @@ public R accept(AbstractNodeVisitor nodeVisitor, C context) { @Override public String toString() { - return String.format("%s(%s)", funcName, - funcArgs.stream() - .map(Object::toString) - .collect(Collectors.joining(", "))); + return String.format( + "%s(%s)", + funcName, funcArgs.stream().map(Object::toString).collect(Collectors.joining(", "))); } } diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/HighlightFunction.java b/core/src/main/java/org/opensearch/sql/ast/expression/HighlightFunction.java index 0d4e57a78c..128d9327e8 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/HighlightFunction.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/HighlightFunction.java @@ -13,9 +13,7 @@ import lombok.ToString; import org.opensearch.sql.ast.AbstractNodeVisitor; -/** - * Expression node of Highlight function. - */ +/** Expression node of Highlight function. */ @AllArgsConstructor @EqualsAndHashCode(callSuper = false) @Getter diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/In.java b/core/src/main/java/org/opensearch/sql/ast/expression/In.java index 9ce1c124cb..38c1b91b43 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/In.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/In.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import java.util.Arrays; @@ -15,10 +14,9 @@ import org.opensearch.sql.ast.AbstractNodeVisitor; /** - * Expression node of one-to-many mapping relation IN. - * Params include the field expression and/or wildcard field expression, - * nested field expression (@field). - * And the values that the field is mapped to (@valueList). + * Expression node of one-to-many mapping relation IN. Params include the field expression and/or + * wildcard field expression, nested field expression (@field). And the values that the field is + * mapped to (@valueList). */ @Getter @ToString diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/Interval.java b/core/src/main/java/org/opensearch/sql/ast/expression/Interval.java index 84b6ba02d1..c26f829f48 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/Interval.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/Interval.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import java.util.Collections; diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/IntervalUnit.java b/core/src/main/java/org/opensearch/sql/ast/expression/IntervalUnit.java index 2a86c89cf6..19e1b07e39 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/IntervalUnit.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/IntervalUnit.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import com.google.common.collect.ImmutableList; @@ -13,7 +12,7 @@ @Getter @RequiredArgsConstructor -public enum IntervalUnit { +public enum IntervalUnit { UNKNOWN, MICROSECOND, @@ -44,9 +43,7 @@ public enum IntervalUnit { INTERVAL_UNITS = builder.add(IntervalUnit.values()).build(); } - /** - * Util method to get interval unit given the unit name. - */ + /** Util method to get interval unit given the unit name. */ public static IntervalUnit of(String unit) { return INTERVAL_UNITS.stream() .filter(v -> unit.equalsIgnoreCase(v.name())) diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/Let.java b/core/src/main/java/org/opensearch/sql/ast/expression/Let.java index cea2a091e5..2f63a25f10 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/Let.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/Let.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import com.google.common.collect.ImmutableList; @@ -14,9 +13,7 @@ import lombok.ToString; import org.opensearch.sql.ast.AbstractNodeVisitor; -/** - * Represent the assign operation. e.g. velocity = distance/speed. - */ +/** Represent the assign operation. e.g. velocity = distance/speed. */ @Getter @ToString @EqualsAndHashCode(callSuper = false) diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/Literal.java b/core/src/main/java/org/opensearch/sql/ast/expression/Literal.java index 3ff360dbf0..80c877dd97 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/Literal.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/Literal.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import com.google.common.collect.ImmutableList; @@ -14,9 +13,8 @@ import org.opensearch.sql.ast.AbstractNodeVisitor; /** - * Expression node of literal type - * Params include literal value (@value) and - * literal data type (@type) which can be selected from {@link DataType}. + * Expression node of literal type Params include literal value (@value) and literal data type + * (@type) which can be selected from {@link DataType}. */ @Getter @EqualsAndHashCode(callSuper = false) diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/Map.java b/core/src/main/java/org/opensearch/sql/ast/expression/Map.java index 45e98f127f..73d8d77261 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/Map.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/Map.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import java.util.Arrays; @@ -14,9 +13,7 @@ import lombok.ToString; import org.opensearch.sql.ast.AbstractNodeVisitor; -/** - * Expression node of one-to-one mapping relation. - */ +/** Expression node of one-to-one mapping relation. */ @Getter @ToString @EqualsAndHashCode(callSuper = false) diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/NestedAllTupleFields.java b/core/src/main/java/org/opensearch/sql/ast/expression/NestedAllTupleFields.java index 7d94cf8f31..be89dd9f19 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/NestedAllTupleFields.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/NestedAllTupleFields.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import java.util.Collections; @@ -14,14 +13,11 @@ import org.opensearch.sql.ast.AbstractNodeVisitor; import org.opensearch.sql.ast.Node; -/** - * Represents all tuple fields used in nested function. - */ +/** Represents all tuple fields used in nested function. */ @RequiredArgsConstructor @EqualsAndHashCode(callSuper = false) public class NestedAllTupleFields extends UnresolvedExpression { - @Getter - private final String path; + @Getter private final String path; @Override public List getChild() { diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/Not.java b/core/src/main/java/org/opensearch/sql/ast/expression/Not.java index 2926c7e5cd..423cb088ef 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/Not.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/Not.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import java.util.Arrays; @@ -14,9 +13,7 @@ import lombok.ToString; import org.opensearch.sql.ast.AbstractNodeVisitor; -/** - * Expression node of the logic NOT. - */ +/** Expression node of the logic NOT. */ @Getter @ToString @EqualsAndHashCode(callSuper = false) diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/Or.java b/core/src/main/java/org/opensearch/sql/ast/expression/Or.java index b0dabb6e4e..cc59170d31 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/Or.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/Or.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import java.util.Arrays; @@ -14,9 +13,7 @@ import lombok.ToString; import org.opensearch.sql.ast.AbstractNodeVisitor; -/** - * Expression node of the logic OR. - */ +/** Expression node of the logic OR. */ @Getter @ToString @EqualsAndHashCode(callSuper = false) diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/ParseMethod.java b/core/src/main/java/org/opensearch/sql/ast/expression/ParseMethod.java index 83a46323e6..7a2587c5f0 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/ParseMethod.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/ParseMethod.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import lombok.Getter; @@ -15,6 +14,5 @@ public enum ParseMethod { GROK("grok"), PATTERNS("patterns"); - @Getter - private final String name; + @Getter private final String name; } diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/QualifiedName.java b/core/src/main/java/org/opensearch/sql/ast/expression/QualifiedName.java index 8b16119dc0..73c6e3782a 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/QualifiedName.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/QualifiedName.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import static java.util.Objects.requireNonNull; @@ -29,9 +28,7 @@ public QualifiedName(String name) { this.parts = Collections.singletonList(name); } - /** - * QualifiedName Constructor. - */ + /** QualifiedName Constructor. */ public QualifiedName(Iterable parts) { List partsList = StreamSupport.stream(parts.spliterator(), false).collect(toList()); if (partsList.isEmpty()) { @@ -40,9 +37,7 @@ public QualifiedName(Iterable parts) { this.parts = partsList; } - /** - * Construct {@link QualifiedName} from list of string. - */ + /** Construct {@link QualifiedName} from list of string. */ public static QualifiedName of(String first, String... rest) { requireNonNull(first); ArrayList parts = new ArrayList<>(); @@ -55,9 +50,7 @@ public static QualifiedName of(Iterable parts) { return new QualifiedName(parts); } - /** - * Get Prefix of {@link QualifiedName}. - */ + /** Get Prefix of {@link QualifiedName}. */ public Optional getPrefix() { if (parts.size() == 1) { return Optional.empty(); @@ -71,7 +64,8 @@ public String getSuffix() { /** * Get first part of the qualified name. - * @return first part + * + * @return first part */ public Optional first() { if (parts.size() == 1) { @@ -81,17 +75,17 @@ public Optional first() { } /** - * Get rest parts of the qualified name. Assume that there must be remaining parts - * so caller is responsible for the check (first() or size() must be called first). - * For example: - * {@code - * QualifiedName name = ... - * Optional first = name.first(); - * if (first.isPresent()) { - * name.rest() ... - * } + * Get rest parts of the qualified name. Assume that there must be remaining parts so caller is + * responsible for the check (first() or size() must be called first).
+ * For example:
+ * {@code
+ *   QualifiedName name = ...
+ *   Optional first = name.first();
+ *   if (first.isPresent()) {
+ *   name.rest() ...
+ *   }
* } - * @return rest part(s) + * @return rest part(s) */ public QualifiedName rest() { return QualifiedName.of(parts.subList(1, parts.size())); diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/RelevanceFieldList.java b/core/src/main/java/org/opensearch/sql/ast/expression/RelevanceFieldList.java index 3166fe45c3..a9e26a611f 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/RelevanceFieldList.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/RelevanceFieldList.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import java.util.List; @@ -13,14 +12,11 @@ import lombok.Getter; import org.opensearch.sql.ast.AbstractNodeVisitor; -/** - * Expression node that includes a list of RelevanceField nodes. - */ +/** Expression node that includes a list of RelevanceField nodes. */ @EqualsAndHashCode(callSuper = false) @AllArgsConstructor public class RelevanceFieldList extends UnresolvedExpression { - @Getter - private java.util.Map fieldList; + @Getter private java.util.Map fieldList; @Override public List getChild() { @@ -34,9 +30,7 @@ public R accept(AbstractNodeVisitor nodeVisitor, C context) { @Override public String toString() { - return fieldList - .entrySet() - .stream() + return fieldList.entrySet().stream() .map(e -> String.format("\"%s\" ^ %s", e.getKey(), e.getValue())) .collect(Collectors.joining(", ")); } diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/ScoreFunction.java b/core/src/main/java/org/opensearch/sql/ast/expression/ScoreFunction.java index 1b73f9bd95..26d784dd8c 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/ScoreFunction.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/ScoreFunction.java @@ -13,8 +13,8 @@ import org.opensearch.sql.ast.AbstractNodeVisitor; /** - * Expression node of Score function. - * Score takes a relevance-search expression as an argument and returns it + * Expression node of Score function. Score takes a relevance-search expression as an argument and + * returns it */ @AllArgsConstructor @EqualsAndHashCode(callSuper = false) diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/Span.java b/core/src/main/java/org/opensearch/sql/ast/expression/Span.java index e57205c19c..edd309b22d 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/Span.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/Span.java @@ -13,10 +13,7 @@ import lombok.ToString; import org.opensearch.sql.ast.AbstractNodeVisitor; -/** - * Span expression node. - * Params include field expression and the span value. - */ +/** Span expression node. Params include field expression and the span value. */ @Getter @EqualsAndHashCode(callSuper = false) @RequiredArgsConstructor @@ -35,5 +32,4 @@ public List getChild() { public R accept(AbstractNodeVisitor nodeVisitor, C context) { return nodeVisitor.visitSpan(this, context); } - } diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/SpanUnit.java b/core/src/main/java/org/opensearch/sql/ast/expression/SpanUnit.java index f1f3fb19d4..5252781edb 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/SpanUnit.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/SpanUnit.java @@ -42,9 +42,7 @@ public enum SpanUnit { SPAN_UNITS = builder.add(SpanUnit.values()).build(); } - /** - * Util method to get span unit given the unit name. - */ + /** Util method to get span unit given the unit name. */ public static SpanUnit of(String unit) { switch (unit) { case "": @@ -64,5 +62,4 @@ public static SpanUnit of(String unit) { public static String getName(SpanUnit unit) { return unit.name; } - } diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/UnresolvedArgument.java b/core/src/main/java/org/opensearch/sql/ast/expression/UnresolvedArgument.java index e4def038ed..2c6eee46e9 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/UnresolvedArgument.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/UnresolvedArgument.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import java.util.Arrays; @@ -13,9 +12,7 @@ import lombok.ToString; import org.opensearch.sql.ast.AbstractNodeVisitor; -/** - * Argument. - */ +/** Argument. */ @Getter @ToString @EqualsAndHashCode(callSuper = false) diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/UnresolvedAttribute.java b/core/src/main/java/org/opensearch/sql/ast/expression/UnresolvedAttribute.java index f4bc88853f..e1754cbf76 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/UnresolvedAttribute.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/UnresolvedAttribute.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import com.google.common.collect.ImmutableList; @@ -15,9 +14,8 @@ import org.opensearch.sql.ast.AbstractNodeVisitor; /** - * Expression node, representing the syntax that is not resolved to - * any other expression nodes yet but non-negligible - * This expression is often created as the index name, field name etc. + * Expression node, representing the syntax that is not resolved to any other expression nodes yet + * but non-negligible This expression is often created as the index name, field name etc. */ @ToString @EqualsAndHashCode(callSuper = false) diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/UnresolvedExpression.java b/core/src/main/java/org/opensearch/sql/ast/expression/UnresolvedExpression.java index ee3922f797..9b2d530b7f 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/UnresolvedExpression.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/UnresolvedExpression.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import lombok.EqualsAndHashCode; diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/When.java b/core/src/main/java/org/opensearch/sql/ast/expression/When.java index a52870b408..db747592e5 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/When.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/When.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import com.google.common.collect.ImmutableList; @@ -15,23 +14,17 @@ import org.opensearch.sql.ast.AbstractNodeVisitor; import org.opensearch.sql.ast.Node; -/** - * AST node that represents WHEN clause. - */ +/** AST node that represents WHEN clause. */ @EqualsAndHashCode(callSuper = false) @Getter @RequiredArgsConstructor @ToString public class When extends UnresolvedExpression { - /** - * WHEN condition, either a search condition or compare value if case value present. - */ + /** WHEN condition, either a search condition or compare value if case value present. */ private final UnresolvedExpression condition; - /** - * Result to return if condition matched. - */ + /** Result to return if condition matched. */ private final UnresolvedExpression result; @Override @@ -43,5 +36,4 @@ public List getChild() { public T accept(AbstractNodeVisitor nodeVisitor, C context) { return nodeVisitor.visitWhen(this, context); } - } diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/WindowFunction.java b/core/src/main/java/org/opensearch/sql/ast/expression/WindowFunction.java index 9a7535e1fe..47f5265765 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/WindowFunction.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/WindowFunction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import com.google.common.collect.ImmutableList; @@ -42,5 +41,4 @@ public List getChild() { public T accept(AbstractNodeVisitor nodeVisitor, C context) { return nodeVisitor.visitWindowFunction(this, context); } - } diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/Xor.java b/core/src/main/java/org/opensearch/sql/ast/expression/Xor.java index 731feccd33..40db015fd7 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/Xor.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/Xor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import java.util.Arrays; @@ -14,9 +13,7 @@ import lombok.ToString; import org.opensearch.sql.ast.AbstractNodeVisitor; -/** - * Expression node of the logic XOR. - */ +/** Expression node of the logic XOR. */ @Getter @ToString @EqualsAndHashCode(callSuper = false) diff --git a/core/src/main/java/org/opensearch/sql/ast/statement/Explain.java b/core/src/main/java/org/opensearch/sql/ast/statement/Explain.java index 1e364d69fd..5ee260c130 100644 --- a/core/src/main/java/org/opensearch/sql/ast/statement/Explain.java +++ b/core/src/main/java/org/opensearch/sql/ast/statement/Explain.java @@ -12,9 +12,7 @@ import lombok.EqualsAndHashCode; import org.opensearch.sql.ast.AbstractNodeVisitor; -/** - * Explain Statement. - */ +/** Explain Statement. */ @Data @EqualsAndHashCode(callSuper = false) public class Explain extends Statement { diff --git a/core/src/main/java/org/opensearch/sql/ast/statement/Query.java b/core/src/main/java/org/opensearch/sql/ast/statement/Query.java index 82efdde4dd..6366451b72 100644 --- a/core/src/main/java/org/opensearch/sql/ast/statement/Query.java +++ b/core/src/main/java/org/opensearch/sql/ast/statement/Query.java @@ -16,9 +16,7 @@ import org.opensearch.sql.ast.AbstractNodeVisitor; import org.opensearch.sql.ast.tree.UnresolvedPlan; -/** - * Query Statement. - */ +/** Query Statement. */ @Getter @Setter @ToString diff --git a/core/src/main/java/org/opensearch/sql/ast/statement/Statement.java b/core/src/main/java/org/opensearch/sql/ast/statement/Statement.java index e32a8dbfd8..d90071a0ca 100644 --- a/core/src/main/java/org/opensearch/sql/ast/statement/Statement.java +++ b/core/src/main/java/org/opensearch/sql/ast/statement/Statement.java @@ -11,9 +11,7 @@ import org.opensearch.sql.ast.AbstractNodeVisitor; import org.opensearch.sql.ast.Node; -/** - * Statement is the high interface of core engine. - */ +/** Statement is the high interface of core engine. */ public abstract class Statement extends Node { @Override public R accept(AbstractNodeVisitor visitor, C context) { diff --git a/core/src/main/java/org/opensearch/sql/ast/tree/AD.java b/core/src/main/java/org/opensearch/sql/ast/tree/AD.java index e9aee25c23..0e1cc33db9 100644 --- a/core/src/main/java/org/opensearch/sql/ast/tree/AD.java +++ b/core/src/main/java/org/opensearch/sql/ast/tree/AD.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.tree; import com.google.common.collect.ImmutableList; diff --git a/core/src/main/java/org/opensearch/sql/ast/tree/Aggregation.java b/core/src/main/java/org/opensearch/sql/ast/tree/Aggregation.java index e9fa26e981..f098d0ec53 100644 --- a/core/src/main/java/org/opensearch/sql/ast/tree/Aggregation.java +++ b/core/src/main/java/org/opensearch/sql/ast/tree/Aggregation.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.tree; import com.google.common.collect.ImmutableList; @@ -17,9 +16,7 @@ import org.opensearch.sql.ast.expression.Argument; import org.opensearch.sql.ast.expression.UnresolvedExpression; -/** - * Logical plan node of Aggregation, the interface for building aggregation actions in queries. - */ +/** Logical plan node of Aggregation, the interface for building aggregation actions in queries. */ @Getter @Setter @ToString @@ -32,23 +29,21 @@ public class Aggregation extends UnresolvedPlan { private List argExprList; private UnresolvedPlan child; - /** - * Aggregation Constructor without span and argument. - */ - public Aggregation(List aggExprList, - List sortExprList, - List groupExprList) { + /** Aggregation Constructor without span and argument. */ + public Aggregation( + List aggExprList, + List sortExprList, + List groupExprList) { this(aggExprList, sortExprList, groupExprList, null, Collections.emptyList()); } - /** - * Aggregation Constructor. - */ - public Aggregation(List aggExprList, - List sortExprList, - List groupExprList, - UnresolvedExpression span, - List argExprList) { + /** Aggregation Constructor. */ + public Aggregation( + List aggExprList, + List sortExprList, + List groupExprList, + UnresolvedExpression span, + List argExprList) { this.aggExprList = aggExprList; this.sortExprList = sortExprList; this.groupExprList = groupExprList; diff --git a/core/src/main/java/org/opensearch/sql/ast/tree/CloseCursor.java b/core/src/main/java/org/opensearch/sql/ast/tree/CloseCursor.java index cf82c2b070..832b79d34a 100644 --- a/core/src/main/java/org/opensearch/sql/ast/tree/CloseCursor.java +++ b/core/src/main/java/org/opensearch/sql/ast/tree/CloseCursor.java @@ -9,15 +9,10 @@ import org.opensearch.sql.ast.AbstractNodeVisitor; import org.opensearch.sql.ast.Node; -/** - * AST node to represent close cursor operation. - * Actually a wrapper to the AST. - */ +/** AST node to represent close cursor operation. Actually a wrapper to the AST. */ public class CloseCursor extends UnresolvedPlan { - /** - * An instance of {@link FetchCursor}. - */ + /** An instance of {@link FetchCursor}. */ private UnresolvedPlan cursor; @Override diff --git a/core/src/main/java/org/opensearch/sql/ast/tree/Dedupe.java b/core/src/main/java/org/opensearch/sql/ast/tree/Dedupe.java index 6514d65a04..3173f42f9c 100644 --- a/core/src/main/java/org/opensearch/sql/ast/tree/Dedupe.java +++ b/core/src/main/java/org/opensearch/sql/ast/tree/Dedupe.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.tree; import com.google.common.collect.ImmutableList; @@ -18,9 +17,7 @@ import org.opensearch.sql.ast.expression.Argument; import org.opensearch.sql.ast.expression.Field; -/** - * AST node represent Dedupe operation. - */ +/** AST node represent Dedupe operation. */ @Getter @Setter @ToString diff --git a/core/src/main/java/org/opensearch/sql/ast/tree/Eval.java b/core/src/main/java/org/opensearch/sql/ast/tree/Eval.java index 184f2ac497..ecceabd757 100644 --- a/core/src/main/java/org/opensearch/sql/ast/tree/Eval.java +++ b/core/src/main/java/org/opensearch/sql/ast/tree/Eval.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.tree; import com.google.common.collect.ImmutableList; @@ -16,9 +15,7 @@ import org.opensearch.sql.ast.AbstractNodeVisitor; import org.opensearch.sql.ast.expression.Let; -/** - * AST node represent Eval operation. - */ +/** AST node represent Eval operation. */ @Getter @Setter @ToString diff --git a/core/src/main/java/org/opensearch/sql/ast/tree/FetchCursor.java b/core/src/main/java/org/opensearch/sql/ast/tree/FetchCursor.java index aa327c295b..d6a260ad0b 100644 --- a/core/src/main/java/org/opensearch/sql/ast/tree/FetchCursor.java +++ b/core/src/main/java/org/opensearch/sql/ast/tree/FetchCursor.java @@ -10,15 +10,11 @@ import lombok.RequiredArgsConstructor; import org.opensearch.sql.ast.AbstractNodeVisitor; -/** - * An unresolved plan that represents fetching the next - * batch in paginationed plan. - */ +/** An unresolved plan that represents fetching the next batch in paginated plan. */ @RequiredArgsConstructor @EqualsAndHashCode(callSuper = false) public class FetchCursor extends UnresolvedPlan { - @Getter - final String cursor; + @Getter final String cursor; @Override public T accept(AbstractNodeVisitor nodeVisitor, C context) { diff --git a/core/src/main/java/org/opensearch/sql/ast/tree/Filter.java b/core/src/main/java/org/opensearch/sql/ast/tree/Filter.java index 8128078930..6c57275db9 100644 --- a/core/src/main/java/org/opensearch/sql/ast/tree/Filter.java +++ b/core/src/main/java/org/opensearch/sql/ast/tree/Filter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.tree; import com.google.common.collect.ImmutableList; @@ -14,9 +13,7 @@ import org.opensearch.sql.ast.AbstractNodeVisitor; import org.opensearch.sql.ast.expression.UnresolvedExpression; -/** - * Logical plan node of Filter, the interface for building filters in queries. - */ +/** Logical plan node of Filter, the interface for building filters in queries. */ @ToString @EqualsAndHashCode(callSuper = false) @Getter diff --git a/core/src/main/java/org/opensearch/sql/ast/tree/Head.java b/core/src/main/java/org/opensearch/sql/ast/tree/Head.java index 8590c212ad..bf6b2caed1 100644 --- a/core/src/main/java/org/opensearch/sql/ast/tree/Head.java +++ b/core/src/main/java/org/opensearch/sql/ast/tree/Head.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.tree; import com.google.common.collect.ImmutableList; @@ -16,9 +15,7 @@ import lombok.ToString; import org.opensearch.sql.ast.AbstractNodeVisitor; -/** - * AST node represent Head operation. - */ +/** AST node represent Head operation. */ @Getter @Setter @ToString diff --git a/core/src/main/java/org/opensearch/sql/ast/tree/Kmeans.java b/core/src/main/java/org/opensearch/sql/ast/tree/Kmeans.java index 5d2e32c28b..ed03efed8a 100644 --- a/core/src/main/java/org/opensearch/sql/ast/tree/Kmeans.java +++ b/core/src/main/java/org/opensearch/sql/ast/tree/Kmeans.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.tree; import com.google.common.collect.ImmutableList; diff --git a/core/src/main/java/org/opensearch/sql/ast/tree/Limit.java b/core/src/main/java/org/opensearch/sql/ast/tree/Limit.java index f9b8a8feab..9290776172 100644 --- a/core/src/main/java/org/opensearch/sql/ast/tree/Limit.java +++ b/core/src/main/java/org/opensearch/sql/ast/tree/Limit.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.tree; import com.google.common.collect.ImmutableList; @@ -38,5 +37,4 @@ public List getChild() { public T accept(AbstractNodeVisitor visitor, C context) { return visitor.visitLimit(this, context); } - } diff --git a/core/src/main/java/org/opensearch/sql/ast/tree/ML.java b/core/src/main/java/org/opensearch/sql/ast/tree/ML.java index 2f83a993b7..320fb2bcf7 100644 --- a/core/src/main/java/org/opensearch/sql/ast/tree/ML.java +++ b/core/src/main/java/org/opensearch/sql/ast/tree/ML.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.tree; import static org.opensearch.sql.utils.MLCommonsConstants.ACTION; @@ -66,7 +65,7 @@ public List getChild() { } private String getAction() { - return (String) arguments.get(ACTION).getValue(); + return (String) arguments.get(ACTION).getValue(); } /** @@ -85,7 +84,7 @@ public Map getOutputSchema(TypeEnvironment env) { return getPredictOutputSchema(); default: throw new IllegalArgumentException( - "Action error. Please indicate train, predict or trainandpredict."); + "Action error. Please indicate train, predict or trainandpredict."); } } @@ -122,8 +121,8 @@ public Map getPredictOutputSchema() { * @return the schema */ public Map getTrainOutputSchema() { - boolean isAsync = arguments.containsKey(ASYNC) - ? (boolean) arguments.get(ASYNC).getValue() : false; + boolean isAsync = + arguments.containsKey(ASYNC) ? (boolean) arguments.get(ASYNC).getValue() : false; Map res = new HashMap<>(Map.of(STATUS, ExprCoreType.STRING)); if (isAsync) { res.put(TASKID, ExprCoreType.STRING); diff --git a/core/src/main/java/org/opensearch/sql/ast/tree/Paginate.java b/core/src/main/java/org/opensearch/sql/ast/tree/Paginate.java index 55e0e8c7a6..69b11600c9 100644 --- a/core/src/main/java/org/opensearch/sql/ast/tree/Paginate.java +++ b/core/src/main/java/org/opensearch/sql/ast/tree/Paginate.java @@ -13,16 +13,12 @@ import org.opensearch.sql.ast.AbstractNodeVisitor; import org.opensearch.sql.ast.Node; -/** - * AST node to represent pagination operation. - * Actually a wrapper to the AST. - */ +/** AST node to represent pagination operation. Actually a wrapper to the AST. */ @RequiredArgsConstructor @EqualsAndHashCode(callSuper = false) @ToString public class Paginate extends UnresolvedPlan { - @Getter - private final int pageSize; + @Getter private final int pageSize; private UnresolvedPlan child; public Paginate(int pageSize, UnresolvedPlan child) { diff --git a/core/src/main/java/org/opensearch/sql/ast/tree/Parse.java b/core/src/main/java/org/opensearch/sql/ast/tree/Parse.java index 02a69c93af..e91c4a68ff 100644 --- a/core/src/main/java/org/opensearch/sql/ast/tree/Parse.java +++ b/core/src/main/java/org/opensearch/sql/ast/tree/Parse.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.tree; import com.google.common.collect.ImmutableList; @@ -20,9 +19,7 @@ import org.opensearch.sql.ast.expression.ParseMethod; import org.opensearch.sql.ast.expression.UnresolvedExpression; -/** - * AST node represent Parse with regex operation. - */ +/** AST node represent Parse with regex operation. */ @Getter @Setter @ToString @@ -30,29 +27,19 @@ @RequiredArgsConstructor @AllArgsConstructor public class Parse extends UnresolvedPlan { - /** - * Method used to parse a field. - */ + /** Method used to parse a field. */ private final ParseMethod parseMethod; - /** - * Field. - */ + /** Field. */ private final UnresolvedExpression sourceField; - /** - * Pattern. - */ + /** Pattern. */ private final Literal pattern; - /** - * Optional arguments. - */ + /** Optional arguments. */ private final Map arguments; - /** - * Child Plan. - */ + /** Child Plan. */ private UnresolvedPlan child; @Override diff --git a/core/src/main/java/org/opensearch/sql/ast/tree/Project.java b/core/src/main/java/org/opensearch/sql/ast/tree/Project.java index 33c7128855..cffb4dfdce 100644 --- a/core/src/main/java/org/opensearch/sql/ast/tree/Project.java +++ b/core/src/main/java/org/opensearch/sql/ast/tree/Project.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.tree; import com.google.common.collect.ImmutableList; @@ -17,15 +16,12 @@ import org.opensearch.sql.ast.expression.Argument; import org.opensearch.sql.ast.expression.UnresolvedExpression; -/** - * Logical plan node of Project, the interface for building the list of searching fields. - */ +/** Logical plan node of Project, the interface for building the list of searching fields. */ @ToString @Getter @EqualsAndHashCode(callSuper = false) public class Project extends UnresolvedPlan { - @Setter - private List projectList; + @Setter private List projectList; private List argExprList; private UnresolvedPlan child; @@ -43,9 +39,7 @@ public boolean hasArgument() { return !argExprList.isEmpty(); } - /** - * The Project could been used to exclude fields from the source. - */ + /** The Project could been used to exclude fields from the source. */ public boolean isExcluded() { if (hasArgument()) { Argument argument = argExprList.get(0); diff --git a/core/src/main/java/org/opensearch/sql/ast/tree/RareTopN.java b/core/src/main/java/org/opensearch/sql/ast/tree/RareTopN.java index 407d37e8e4..2cbe170541 100644 --- a/core/src/main/java/org/opensearch/sql/ast/tree/RareTopN.java +++ b/core/src/main/java/org/opensearch/sql/ast/tree/RareTopN.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.tree; import java.util.Collections; @@ -19,9 +18,7 @@ import org.opensearch.sql.ast.expression.Field; import org.opensearch.sql.ast.expression.UnresolvedExpression; -/** - * AST node represent RareTopN operation. - */ +/** AST node represent RareTopN operation. */ @Getter @Setter @ToString diff --git a/core/src/main/java/org/opensearch/sql/ast/tree/Relation.java b/core/src/main/java/org/opensearch/sql/ast/tree/Relation.java index 8c3868329f..ec5264a86b 100644 --- a/core/src/main/java/org/opensearch/sql/ast/tree/Relation.java +++ b/core/src/main/java/org/opensearch/sql/ast/tree/Relation.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.tree; import com.google.common.collect.ImmutableList; @@ -18,9 +17,7 @@ import org.opensearch.sql.ast.expression.QualifiedName; import org.opensearch.sql.ast.expression.UnresolvedExpression; -/** - * Logical plan node of Relation, the interface for building the searching sources. - */ +/** Logical plan node of Relation, the interface for building the searching sources. */ @AllArgsConstructor @ToString @EqualsAndHashCode(callSuper = false) @@ -39,9 +36,7 @@ public Relation(UnresolvedExpression tableName, String alias) { this.alias = alias; } - /** - * Optional alias name for the relation. - */ + /** Optional alias name for the relation. */ private String alias; /** @@ -72,9 +67,9 @@ public String getAlias() { } /** - * Get Qualified name preservs parts of the user given identifiers. - * This can later be utilized to determine DataSource,Schema and Table Name during - * Analyzer stage. So Passing QualifiedName directly to Analyzer Stage. + * Get Qualified name preservs parts of the user given identifiers. This can later be utilized to + * determine DataSource,Schema and Table Name during Analyzer stage. So Passing QualifiedName + * directly to Analyzer Stage. * * @return TableQualifiedName. */ @@ -82,9 +77,10 @@ public QualifiedName getTableQualifiedName() { if (tableName.size() == 1) { return (QualifiedName) tableName.get(0); } else { - return new QualifiedName(tableName.stream() - .map(UnresolvedExpression::toString) - .collect(Collectors.joining(COMMA))); + return new QualifiedName( + tableName.stream() + .map(UnresolvedExpression::toString) + .collect(Collectors.joining(COMMA))); } } diff --git a/core/src/main/java/org/opensearch/sql/ast/tree/RelationSubquery.java b/core/src/main/java/org/opensearch/sql/ast/tree/RelationSubquery.java index 89122bea7f..5ab4d71dd9 100644 --- a/core/src/main/java/org/opensearch/sql/ast/tree/RelationSubquery.java +++ b/core/src/main/java/org/opensearch/sql/ast/tree/RelationSubquery.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.tree; import com.google.common.collect.ImmutableList; @@ -14,9 +13,7 @@ import lombok.ToString; import org.opensearch.sql.ast.AbstractNodeVisitor; -/** - * Logical plan node of RelationSubquery. - */ +/** Logical plan node of RelationSubquery. */ @AllArgsConstructor @EqualsAndHashCode(callSuper = false) @RequiredArgsConstructor @@ -25,9 +22,7 @@ public class RelationSubquery extends UnresolvedPlan { private UnresolvedPlan query; private String alias; - /** - * Take subquery alias as table name. - */ + /** Take subquery alias as table name. */ public String getAliasAsTableName() { return alias; } diff --git a/core/src/main/java/org/opensearch/sql/ast/tree/Rename.java b/core/src/main/java/org/opensearch/sql/ast/tree/Rename.java index 69700c871c..e6f760aca0 100644 --- a/core/src/main/java/org/opensearch/sql/ast/tree/Rename.java +++ b/core/src/main/java/org/opensearch/sql/ast/tree/Rename.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.tree; import com.google.common.collect.ImmutableList; diff --git a/core/src/main/java/org/opensearch/sql/ast/tree/Sort.java b/core/src/main/java/org/opensearch/sql/ast/tree/Sort.java index 5fb4139bea..073cb7aa1b 100644 --- a/core/src/main/java/org/opensearch/sql/ast/tree/Sort.java +++ b/core/src/main/java/org/opensearch/sql/ast/tree/Sort.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.tree; import static org.opensearch.sql.ast.tree.Sort.NullOrder.NULL_FIRST; @@ -22,9 +21,7 @@ import org.opensearch.sql.ast.AbstractNodeVisitor; import org.opensearch.sql.ast.expression.Field; -/** - * AST node for Sort {@link Sort#sortList} represent a list of sort expression and sort options. - */ +/** AST node for Sort {@link Sort#sortList} represent a list of sort expression and sort options. */ @ToString @EqualsAndHashCode(callSuper = false) @Getter @@ -50,19 +47,14 @@ public T accept(AbstractNodeVisitor nodeVisitor, C context) { return nodeVisitor.visitSort(this, context); } - /** - * Sort Options. - */ + /** Sort Options. */ @Data public static class SortOption { - /** - * Default ascending sort option, null first. - */ + /** Default ascending sort option, null first. */ public static SortOption DEFAULT_ASC = new SortOption(ASC, NULL_FIRST); - /** - * Default descending sort option, null last. - */ + + /** Default descending sort option, null last. */ public static SortOption DEFAULT_DESC = new SortOption(DESC, NULL_LAST); private final SortOrder sortOrder; diff --git a/core/src/main/java/org/opensearch/sql/ast/tree/TableFunction.java b/core/src/main/java/org/opensearch/sql/ast/tree/TableFunction.java index 70ab852571..a67a943dcb 100644 --- a/core/src/main/java/org/opensearch/sql/ast/tree/TableFunction.java +++ b/core/src/main/java/org/opensearch/sql/ast/tree/TableFunction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.tree; import com.google.common.collect.ImmutableList; @@ -16,9 +15,7 @@ import org.opensearch.sql.ast.expression.QualifiedName; import org.opensearch.sql.ast.expression.UnresolvedExpression; -/** - * ASTNode for Table Function. - */ +/** AST Node for Table Function. */ @ToString @EqualsAndHashCode(callSuper = false) @RequiredArgsConstructor @@ -26,8 +23,7 @@ public class TableFunction extends UnresolvedPlan { private final UnresolvedExpression functionName; - @Getter - private final List arguments; + @Getter private final List arguments; public QualifiedName getFunctionName() { return (QualifiedName) functionName; diff --git a/core/src/main/java/org/opensearch/sql/ast/tree/UnresolvedPlan.java b/core/src/main/java/org/opensearch/sql/ast/tree/UnresolvedPlan.java index 672a4602ed..3074303b0d 100644 --- a/core/src/main/java/org/opensearch/sql/ast/tree/UnresolvedPlan.java +++ b/core/src/main/java/org/opensearch/sql/ast/tree/UnresolvedPlan.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.tree; import lombok.EqualsAndHashCode; @@ -11,9 +10,7 @@ import org.opensearch.sql.ast.AbstractNodeVisitor; import org.opensearch.sql.ast.Node; -/** - * Abstract unresolved plan. - */ +/** Abstract unresolved plan. */ @EqualsAndHashCode(callSuper = false) @ToString public abstract class UnresolvedPlan extends Node { diff --git a/core/src/main/java/org/opensearch/sql/ast/tree/Values.java b/core/src/main/java/org/opensearch/sql/ast/tree/Values.java index 5a662912f9..65d7e8d7cb 100644 --- a/core/src/main/java/org/opensearch/sql/ast/tree/Values.java +++ b/core/src/main/java/org/opensearch/sql/ast/tree/Values.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.tree; import com.google.common.collect.ImmutableList; @@ -16,9 +15,7 @@ import org.opensearch.sql.ast.Node; import org.opensearch.sql.ast.expression.Literal; -/** - * AST node class for a sequence of literal values. - */ +/** AST node class for a sequence of literal values. */ @ToString @Getter @EqualsAndHashCode(callSuper = false) @@ -41,5 +38,4 @@ public T accept(AbstractNodeVisitor nodeVisitor, C context) { public List getChild() { return ImmutableList.of(); } - } diff --git a/core/src/main/java/org/opensearch/sql/data/model/AbstractExprNumberValue.java b/core/src/main/java/org/opensearch/sql/data/model/AbstractExprNumberValue.java index 1f6363c068..48781df847 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/AbstractExprNumberValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/AbstractExprNumberValue.java @@ -3,15 +3,12 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import com.google.common.base.Objects; import lombok.RequiredArgsConstructor; -/** - * Expression Number Value. - */ +/** Expression Number Value. */ @RequiredArgsConstructor public abstract class AbstractExprNumberValue extends AbstractExprValue { private final Number value; diff --git a/core/src/main/java/org/opensearch/sql/data/model/AbstractExprValue.java b/core/src/main/java/org/opensearch/sql/data/model/AbstractExprValue.java index ad2c2ddb49..f5ac4d493b 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/AbstractExprValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/AbstractExprValue.java @@ -3,23 +3,18 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import org.opensearch.sql.exception.ExpressionEvaluationException; -/** - * Abstract ExprValue. - */ +/** Abstract ExprValue. */ public abstract class AbstractExprValue implements ExprValue { - /** - * The customize compareTo logic. - */ + /** The customize compareTo logic. */ @Override public int compareTo(ExprValue other) { if (this.isNull() || this.isMissing() || other.isNull() || other.isMissing()) { throw new IllegalStateException( - "[BUG] Unreachable, Comparing with NULL or MISSING is undefined"); + "[BUG] Unreachable, Comparing with NULL or MISSING is undefined"); } if ((this.isNumber() && other.isNumber()) || (this.isDateTime() && other.isDateTime()) @@ -36,11 +31,33 @@ public int compareTo(ExprValue other) { /** * The customize equals logic. * The table below list the NULL and MISSING handling logic. - * A B A == B - * NULL NULL TRUE - * NULL MISSING FALSE - * MISSING NULL FALSE - * MISSING MISSING TRUE + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
ABA == B
NULLNULLTRUE
NULLMISSINGFALSE
MISSINGNULLFALSE
MISSINGMISSINGTRUE
*/ @Override public boolean equals(Object o) { @@ -59,13 +76,9 @@ public boolean equals(Object o) { } } - /** - * The expression value compare. - */ + /** The expression value compare. */ public abstract int compare(ExprValue other); - /** - * The expression value equal. - */ + /** The expression value equal. */ public abstract boolean equal(ExprValue other); } diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprBooleanValue.java b/core/src/main/java/org/opensearch/sql/data/model/ExprBooleanValue.java index d655c0dabb..ee97cf2228 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprBooleanValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprBooleanValue.java @@ -3,16 +3,13 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import com.google.common.base.Objects; import org.opensearch.sql.data.type.ExprCoreType; import org.opensearch.sql.data.type.ExprType; -/** - * Expression Boolean Value. - */ +/** Expression Boolean Value. */ public class ExprBooleanValue extends AbstractExprValue { private static final ExprBooleanValue TRUE = new ExprBooleanValue(true); private static final ExprBooleanValue FALSE = new ExprBooleanValue(false); diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprByteValue.java b/core/src/main/java/org/opensearch/sql/data/model/ExprByteValue.java index b39e6e9d7f..8e74a83cf3 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprByteValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprByteValue.java @@ -3,15 +3,12 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import org.opensearch.sql.data.type.ExprCoreType; import org.opensearch.sql.data.type.ExprType; -/** - * Expression Byte Value. - */ +/** Expression Byte Value. */ public class ExprByteValue extends AbstractExprNumberValue { public ExprByteValue(Number value) { diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprCollectionValue.java b/core/src/main/java/org/opensearch/sql/data/model/ExprCollectionValue.java index 1326733263..d1f56c3d76 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprCollectionValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprCollectionValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import com.google.common.base.Objects; @@ -15,9 +14,7 @@ import org.opensearch.sql.data.type.ExprCoreType; import org.opensearch.sql.data.type.ExprType; -/** - * Expression Collection Value. - */ +/** Expression Collection Value. */ @RequiredArgsConstructor public class ExprCollectionValue extends AbstractExprValue { private final List valueList; @@ -43,9 +40,7 @@ public List collectionValue() { @Override public String toString() { - return valueList.stream() - .map(Object::toString) - .collect(Collectors.joining(", ", "[", "]")); + return valueList.stream().map(Object::toString).collect(Collectors.joining(", ", "[", "]")); } @Override @@ -68,9 +63,7 @@ public boolean equal(ExprValue o) { } } - /** - * Only compare the size of the list. - */ + /** Only compare the size of the list. */ @Override public int compare(ExprValue other) { return Integer.compare(valueList.size(), other.collectionValue().size()); diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprDateValue.java b/core/src/main/java/org/opensearch/sql/data/model/ExprDateValue.java index 57ce87df47..3f3f67a4fa 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprDateValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprDateValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import static org.opensearch.sql.utils.DateTimeFormatters.DATE_TIME_FORMATTER_VARIABLE_NANOS_OPTIONAL; @@ -22,23 +21,19 @@ import org.opensearch.sql.data.type.ExprType; import org.opensearch.sql.exception.SemanticCheckException; -/** - * Expression Date Value. - */ +/** Expression Date Value. */ @RequiredArgsConstructor public class ExprDateValue extends AbstractExprValue { private final LocalDate date; - /** - * Constructor of ExprDateValue. - */ + /** Constructor of ExprDateValue. */ public ExprDateValue(String date) { try { this.date = LocalDate.parse(date, DATE_TIME_FORMATTER_VARIABLE_NANOS_OPTIONAL); } catch (DateTimeParseException e) { - throw new SemanticCheckException(String.format("date:%s in unsupported format, please use " - + "yyyy-MM-dd", date)); + throw new SemanticCheckException( + String.format("date:%s in unsupported format, please use 'yyyy-MM-dd'", date)); } } diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprDatetimeValue.java b/core/src/main/java/org/opensearch/sql/data/model/ExprDatetimeValue.java index 8d40aaf82c..305958043f 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprDatetimeValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprDatetimeValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import static org.opensearch.sql.utils.DateTimeFormatters.DATE_TIME_FORMATTER_WITH_TZ; @@ -23,20 +22,19 @@ import org.opensearch.sql.data.type.ExprType; import org.opensearch.sql.exception.SemanticCheckException; - @RequiredArgsConstructor public class ExprDatetimeValue extends AbstractExprValue { private final LocalDateTime datetime; - /** - * Constructor with datetime string as input. - */ + /** Constructor with datetime string as input. */ public ExprDatetimeValue(String datetime) { try { this.datetime = LocalDateTime.parse(datetime, DATE_TIME_FORMATTER_WITH_TZ); } catch (DateTimeParseException e) { - throw new SemanticCheckException(String.format("datetime:%s in unsupported format, please " - + "use yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]", datetime)); + throw new SemanticCheckException( + String.format( + "datetime:%s in unsupported format, please use 'yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]'", + datetime)); } } @@ -77,9 +75,11 @@ public boolean equal(ExprValue other) { @Override public String value() { - return String.format("%s %s", DateTimeFormatter.ISO_DATE.format(datetime), - DateTimeFormatter.ISO_TIME.format((datetime.getNano() == 0) - ? datetime.truncatedTo(ChronoUnit.SECONDS) : datetime)); + return String.format( + "%s %s", + DateTimeFormatter.ISO_DATE.format(datetime), + DateTimeFormatter.ISO_TIME.format( + (datetime.getNano() == 0) ? datetime.truncatedTo(ChronoUnit.SECONDS) : datetime)); } @Override diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprDoubleValue.java b/core/src/main/java/org/opensearch/sql/data/model/ExprDoubleValue.java index 171b064e68..c192fe901c 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprDoubleValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprDoubleValue.java @@ -3,15 +3,12 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import org.opensearch.sql.data.type.ExprCoreType; import org.opensearch.sql.data.type.ExprType; -/** - * Expression Double Value. - */ +/** Expression Double Value. */ public class ExprDoubleValue extends AbstractExprNumberValue { public ExprDoubleValue(Number value) { diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprFloatValue.java b/core/src/main/java/org/opensearch/sql/data/model/ExprFloatValue.java index dc454b4b50..6d321687fb 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprFloatValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprFloatValue.java @@ -3,15 +3,12 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import org.opensearch.sql.data.type.ExprCoreType; import org.opensearch.sql.data.type.ExprType; -/** - * Expression Float Value. - */ +/** Expression Float Value. */ public class ExprFloatValue extends AbstractExprNumberValue { public ExprFloatValue(Number value) { diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprIntegerValue.java b/core/src/main/java/org/opensearch/sql/data/model/ExprIntegerValue.java index 06947766fc..81321c1f12 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprIntegerValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprIntegerValue.java @@ -3,15 +3,12 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import org.opensearch.sql.data.type.ExprCoreType; import org.opensearch.sql.data.type.ExprType; -/** - * Expression Integer Value. - */ +/** Expression Integer Value. */ public class ExprIntegerValue extends AbstractExprNumberValue { public ExprIntegerValue(Number value) { diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprIntervalValue.java b/core/src/main/java/org/opensearch/sql/data/model/ExprIntervalValue.java index 25a3115e8c..754520d7c8 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprIntervalValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprIntervalValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import java.time.temporal.TemporalAmount; @@ -27,7 +26,8 @@ public int compare(ExprValue other) { TemporalAmount otherInterval = other.intervalValue(); if (!interval.getClass().equals(other.intervalValue().getClass())) { throw new ExpressionEvaluationException( - String.format("invalid to compare intervals with units %s and %s", + String.format( + "invalid to compare intervals with units %s and %s", unit(), ((ExprIntervalValue) other).unit())); } return Long.compare( @@ -49,12 +49,9 @@ public ExprType type() { return ExprCoreType.INTERVAL; } - /** - * Util method to get temporal unit stored locally. - */ + /** Util method to get temporal unit stored locally. */ public TemporalUnit unit() { - return interval.getUnits() - .stream() + return interval.getUnits().stream() .filter(v -> interval.get(v) != 0) .findAny() .orElse(interval.getUnits().get(0)); diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprLongValue.java b/core/src/main/java/org/opensearch/sql/data/model/ExprLongValue.java index 1df590246c..537a164490 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprLongValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprLongValue.java @@ -3,15 +3,12 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import org.opensearch.sql.data.type.ExprCoreType; import org.opensearch.sql.data.type.ExprType; -/** - * Expression Long Value. - */ +/** Expression Long Value. */ public class ExprLongValue extends AbstractExprNumberValue { public ExprLongValue(Number value) { diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprMissingValue.java b/core/src/main/java/org/opensearch/sql/data/model/ExprMissingValue.java index 9908074773..80add24042 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprMissingValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprMissingValue.java @@ -3,21 +3,17 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import java.util.Objects; import org.opensearch.sql.data.type.ExprCoreType; import org.opensearch.sql.data.type.ExprType; -/** - * Expression Missing Value. - */ +/** Expression Missing Value. */ public class ExprMissingValue extends AbstractExprValue { private static final ExprMissingValue instance = new ExprMissingValue(); - private ExprMissingValue() { - } + private ExprMissingValue() {} public static ExprMissingValue of() { return instance; @@ -40,13 +36,13 @@ public boolean isMissing() { @Override public int compare(ExprValue other) { - throw new IllegalStateException(String.format("[BUG] Unreachable, Comparing with MISSING is " - + "undefined")); + throw new IllegalStateException( + String.format("[BUG] Unreachable, Comparing with MISSING is " + "undefined")); } /** - * Missing value is equal to Missing value. - * Notes, this function should only used for Java Object Compare. + * Missing value is equal to Missing value. Notes, this function should only used for Java Object + * Compare. */ @Override public boolean equal(ExprValue other) { diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprNullValue.java b/core/src/main/java/org/opensearch/sql/data/model/ExprNullValue.java index 54d4811d33..ac2e185e65 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprNullValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprNullValue.java @@ -3,21 +3,17 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import java.util.Objects; import org.opensearch.sql.data.type.ExprCoreType; import org.opensearch.sql.data.type.ExprType; -/** - * Expression Null Value. - */ +/** Expression Null Value. */ public class ExprNullValue extends AbstractExprValue { private static final ExprNullValue instance = new ExprNullValue(); - private ExprNullValue() { - } + private ExprNullValue() {} @Override public int hashCode() { @@ -55,8 +51,8 @@ public int compare(ExprValue other) { } /** - * NULL value is equal to NULL value. - * Notes, this function should only used for Java Object Compare. + * NULL value is equal to NULL value. Notes, this function should only used for Java Object + * Compare. */ @Override public boolean equal(ExprValue other) { diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprShortValue.java b/core/src/main/java/org/opensearch/sql/data/model/ExprShortValue.java index 3e5f6858bc..418cefa166 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprShortValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprShortValue.java @@ -3,15 +3,12 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import org.opensearch.sql.data.type.ExprCoreType; import org.opensearch.sql.data.type.ExprType; -/** - * Expression Short Value. - */ +/** Expression Short Value. */ public class ExprShortValue extends AbstractExprNumberValue { public ExprShortValue(Number value) { diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprStringValue.java b/core/src/main/java/org/opensearch/sql/data/model/ExprStringValue.java index c41c23d6ac..7745af62b6 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprStringValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprStringValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import java.time.LocalDate; @@ -15,9 +14,7 @@ import org.opensearch.sql.data.type.ExprType; import org.opensearch.sql.exception.SemanticCheckException; -/** - * Expression String Value. - */ +/** Expression String Value. */ @RequiredArgsConstructor public class ExprStringValue extends AbstractExprValue { private final String value; @@ -44,11 +41,13 @@ public LocalDateTime datetimeValue() { } catch (SemanticCheckException e) { try { return new ExprDatetimeValue( - LocalDateTime.of(new ExprDateValue(value).dateValue(), LocalTime.of(0, 0, 0))) + LocalDateTime.of(new ExprDateValue(value).dateValue(), LocalTime.of(0, 0, 0))) .datetimeValue(); } catch (SemanticCheckException exception) { - throw new SemanticCheckException(String.format("datetime:%s in unsupported format, please " - + "use yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]", value)); + throw new SemanticCheckException( + String.format( + "datetime:%s in unsupported format, please use 'yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]'", + value)); } } } diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprTimeValue.java b/core/src/main/java/org/opensearch/sql/data/model/ExprTimeValue.java index db5bf7cb52..c22b423c7d 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprTimeValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprTimeValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import static java.time.format.DateTimeFormatter.ISO_LOCAL_TIME; @@ -23,23 +22,20 @@ import org.opensearch.sql.exception.SemanticCheckException; import org.opensearch.sql.expression.function.FunctionProperties; -/** - * Expression Time Value. - */ +/** Expression Time Value. */ @RequiredArgsConstructor public class ExprTimeValue extends AbstractExprValue { private final LocalTime time; - /** - * Constructor of ExprTimeValue. - */ + /** Constructor of ExprTimeValue. */ public ExprTimeValue(String time) { try { this.time = LocalTime.parse(time, DATE_TIME_FORMATTER_VARIABLE_NANOS_OPTIONAL); } catch (DateTimeParseException e) { - throw new SemanticCheckException(String.format("time:%s in unsupported format, please use " - + "HH:mm:ss[.SSSSSSSSS]", time)); + throw new SemanticCheckException( + String.format( + "time:%s in unsupported format, please use 'HH:mm:ss[.SSSSSSSSS]'", time)); } } @@ -67,8 +63,7 @@ public LocalDateTime datetimeValue(FunctionProperties functionProperties) { } public Instant timestampValue(FunctionProperties functionProperties) { - return ZonedDateTime.of(dateValue(functionProperties), timeValue(), UTC_ZONE_ID) - .toInstant(); + return ZonedDateTime.of(dateValue(functionProperties), timeValue(), UTC_ZONE_ID).toInstant(); } @Override diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprTimestampValue.java b/core/src/main/java/org/opensearch/sql/data/model/ExprTimestampValue.java index d15cee5e71..455a379b03 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprTimestampValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprTimestampValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import static org.opensearch.sql.utils.DateTimeFormatters.DATE_TIME_FORMATTER_VARIABLE_NANOS; @@ -22,33 +21,33 @@ import org.opensearch.sql.data.type.ExprType; import org.opensearch.sql.exception.SemanticCheckException; -/** - * Expression Timestamp Value. - */ +/** Expression Timestamp Value. */ @RequiredArgsConstructor public class ExprTimestampValue extends AbstractExprValue { private final Instant timestamp; - /** - * Constructor. - */ + /** Constructor. */ public ExprTimestampValue(String timestamp) { try { - this.timestamp = LocalDateTime.parse(timestamp, DATE_TIME_FORMATTER_VARIABLE_NANOS) - .atZone(UTC_ZONE_ID) - .toInstant(); + this.timestamp = + LocalDateTime.parse(timestamp, DATE_TIME_FORMATTER_VARIABLE_NANOS) + .atZone(UTC_ZONE_ID) + .toInstant(); } catch (DateTimeParseException e) { - throw new SemanticCheckException(String.format("timestamp:%s in unsupported format, please " - + "use yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]", timestamp)); + throw new SemanticCheckException( + String.format( + "timestamp:%s in unsupported format, please use 'yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]'", + timestamp)); } - } @Override public String value() { - return timestamp.getNano() == 0 ? DATE_TIME_FORMATTER_WITHOUT_NANO.withZone(UTC_ZONE_ID) - .format(timestamp.truncatedTo(ChronoUnit.SECONDS)) + return timestamp.getNano() == 0 + ? DATE_TIME_FORMATTER_WITHOUT_NANO + .withZone(UTC_ZONE_ID) + .format(timestamp.truncatedTo(ChronoUnit.SECONDS)) : DATE_TIME_FORMATTER_VARIABLE_NANOS.withZone(UTC_ZONE_ID).format(timestamp); } diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprTupleValue.java b/core/src/main/java/org/opensearch/sql/data/model/ExprTupleValue.java index 749de931ee..856075bed8 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprTupleValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprTupleValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import java.util.Iterator; @@ -18,9 +17,7 @@ import org.opensearch.sql.storage.bindingtuple.BindingTuple; import org.opensearch.sql.storage.bindingtuple.LazyBindingTuple; -/** - * Expression Tuple Value. - */ +/** Expression Tuple Value. */ @RequiredArgsConstructor public class ExprTupleValue extends AbstractExprValue { @@ -47,8 +44,7 @@ public ExprType type() { @Override public String toString() { - return valueMap.entrySet() - .stream() + return valueMap.entrySet().stream() .map(entry -> String.format("%s:%s", entry.getKey(), entry.getValue())) .collect(Collectors.joining(",", "{", "}")); } @@ -70,6 +66,7 @@ public ExprValue keyValue(String key) { /** * Override the equals method. + * * @return true for equal, otherwise false. */ public boolean equal(ExprValue o) { @@ -91,9 +88,7 @@ public boolean equal(ExprValue o) { } } - /** - * Only compare the size of the map. - */ + /** Only compare the size of the map. */ @Override public int compare(ExprValue other) { return Integer.compare(valueMap.size(), other.tupleValue().size()); diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprValue.java b/core/src/main/java/org/opensearch/sql/data/model/ExprValue.java index 1ae03de37b..86bead77b7 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import java.io.Serializable; @@ -19,18 +18,12 @@ import org.opensearch.sql.exception.ExpressionEvaluationException; import org.opensearch.sql.storage.bindingtuple.BindingTuple; -/** - * The definition of the Expression Value. - */ +/** The definition of the Expression Value. */ public interface ExprValue extends Serializable, Comparable { - /** - * Get the Object value of the Expression Value. - */ + /** Get the Object value of the Expression Value. */ Object value(); - /** - * Get the {@link ExprCoreType} of the Expression Value. - */ + /** Get the {@link ExprCoreType} of the Expression Value. */ ExprType type(); /** @@ -69,136 +62,104 @@ default boolean isDateTime() { return false; } - /** - * Get the {@link BindingTuple}. - */ + /** Get the {@link BindingTuple}. */ default BindingTuple bindingTuples() { return BindingTuple.EMPTY; } - /** - * Get byte value. - */ + /** Get byte value. */ default Byte byteValue() { throw new ExpressionEvaluationException( "invalid to get byteValue from value of type " + type()); } - /** - * Get short value. - */ + /** Get short value. */ default Short shortValue() { throw new ExpressionEvaluationException( "invalid to get shortValue from value of type " + type()); } - /** - * Get integer value. - */ + /** Get integer value. */ default Integer integerValue() { throw new ExpressionEvaluationException( "invalid to get integerValue from value of type " + type()); } - /** - * Get long value. - */ + /** Get long value. */ default Long longValue() { throw new ExpressionEvaluationException( "invalid to get longValue from value of type " + type()); } - /** - * Get float value. - */ + /** Get float value. */ default Float floatValue() { throw new ExpressionEvaluationException( "invalid to get floatValue from value of type " + type()); } - /** - * Get float value. - */ + /** Get float value. */ default Double doubleValue() { throw new ExpressionEvaluationException( "invalid to get doubleValue from value of type " + type()); } - /** - * Get string value. - */ + /** Get string value. */ default String stringValue() { throw new ExpressionEvaluationException( "invalid to get stringValue from value of type " + type()); } - /** - * Get boolean value. - */ + /** Get boolean value. */ default Boolean booleanValue() { throw new ExpressionEvaluationException( "invalid to get booleanValue from value of type " + type()); } - /** - * Get timestamp value. - */ + /** Get timestamp value. */ default Instant timestampValue() { throw new ExpressionEvaluationException( "invalid to get timestampValue from value of type " + type()); } - /** - * Get time value. - */ + /** Get time value. */ default LocalTime timeValue() { throw new ExpressionEvaluationException( "invalid to get timeValue from value of type " + type()); } - /** - * Get date value. - */ + /** Get date value. */ default LocalDate dateValue() { throw new ExpressionEvaluationException( "invalid to get dateValue from value of type " + type()); } - /** - * Get datetime value. - */ + /** Get datetime value. */ default LocalDateTime datetimeValue() { throw new ExpressionEvaluationException( "invalid to get datetimeValue from value of type " + type()); } - /** - * Get interval value. - */ + /** Get interval value. */ default TemporalAmount intervalValue() { throw new ExpressionEvaluationException( "invalid to get intervalValue from value of type " + type()); } - /** - * Get map value. - */ + /** Get map value. */ default Map tupleValue() { throw new ExpressionEvaluationException( "invalid to get tupleValue from value of type " + type()); } - /** - * Get collection value. - */ + /** Get collection value. */ default List collectionValue() { throw new ExpressionEvaluationException( "invalid to get collectionValue from value of type " + type()); } /** - * Get the value specified by key from {@link ExprTupleValue}. - * This method only be implemented in {@link ExprTupleValue}. + * Get the value specified by key from {@link ExprTupleValue}. This method only be implemented in + * {@link ExprTupleValue}. */ default ExprValue keyValue(String key) { return ExprMissingValue.of(); diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprValueUtils.java b/core/src/main/java/org/opensearch/sql/data/model/ExprValueUtils.java index 43a3140ef3..a259eb9fba 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprValueUtils.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprValueUtils.java @@ -18,9 +18,7 @@ import org.opensearch.sql.data.type.ExprCoreType; import org.opensearch.sql.exception.ExpressionEvaluationException; -/** - * The definition of {@link ExprValue} factory. - */ +/** The definition of {@link ExprValue} factory. */ @UtilityClass public class ExprValueUtils { public static final ExprValue LITERAL_TRUE = ExprBooleanValue.of(true); @@ -80,19 +78,15 @@ public static ExprValue timestampValue(Instant value) { return new ExprTimestampValue(value); } - /** - * {@link ExprTupleValue} constructor. - */ + /** {@link ExprTupleValue} constructor. */ public static ExprValue tupleValue(Map map) { LinkedHashMap valueMap = new LinkedHashMap<>(); - map.forEach((k, v) -> valueMap - .put(k, v instanceof ExprValue ? (ExprValue) v : fromObjectValue(v))); + map.forEach( + (k, v) -> valueMap.put(k, v instanceof ExprValue ? (ExprValue) v : fromObjectValue(v))); return new ExprTupleValue(valueMap); } - /** - * {@link ExprCollectionValue} constructor. - */ + /** {@link ExprCollectionValue} constructor. */ public static ExprValue collectionValue(List list) { List valueList = new ArrayList<>(); list.forEach(o -> valueList.add(fromObjectValue(o))); @@ -107,9 +101,7 @@ public static ExprValue nullValue() { return ExprNullValue.of(); } - /** - * Construct ExprValue from Object. - */ + /** Construct ExprValue from Object. */ public static ExprValue fromObjectValue(Object o) { if (null == o) { return LITERAL_NULL; @@ -149,19 +141,17 @@ public static ExprValue fromObjectValue(Object o) { } } - /** - * Construct ExprValue from Object with ExprCoreType. - */ + /** Construct ExprValue from Object with ExprCoreType. */ public static ExprValue fromObjectValue(Object o, ExprCoreType type) { switch (type) { case TIMESTAMP: - return new ExprTimestampValue((String)o); + return new ExprTimestampValue((String) o); case DATE: - return new ExprDateValue((String)o); + return new ExprDateValue((String) o); case TIME: - return new ExprTimeValue((String)o); + return new ExprTimeValue((String) o); case DATETIME: - return new ExprDatetimeValue((String)o); + return new ExprDatetimeValue((String) o); default: return fromObjectValue(o); } diff --git a/core/src/main/java/org/opensearch/sql/data/type/ExprCoreType.java b/core/src/main/java/org/opensearch/sql/data/type/ExprCoreType.java index 815f94a9df..f1979d8666 100644 --- a/core/src/main/java/org/opensearch/sql/data/type/ExprCoreType.java +++ b/core/src/main/java/org/opensearch/sql/data/type/ExprCoreType.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.type; import com.google.common.collect.ImmutableMap; @@ -15,25 +14,18 @@ import java.util.Set; import java.util.stream.Collectors; -/** - * Expression Type. - */ +/** Expression Type. */ public enum ExprCoreType implements ExprType { - /** - * Unknown due to unsupported data type. - */ + /** Unknown due to unsupported data type. */ UNKNOWN, /** - * Undefined type for special literal such as NULL. - * As the root of data type tree, it is compatible with any other type. - * In other word, undefined type is the "narrowest" type. + * Undefined type for special literal such as NULL. As the root of data type tree, it is + * compatible with any other type. In other word, undefined type is the "narrowest" type. */ UNDEFINED, - /** - * Numbers. - */ + /** Numbers. */ BYTE(UNDEFINED), SHORT(BYTE), INTEGER(SHORT), @@ -41,43 +33,29 @@ public enum ExprCoreType implements ExprType { FLOAT(LONG), DOUBLE(FLOAT), - /** - * String. - */ + /** String. */ STRING(UNDEFINED), - /** - * Boolean. - */ + /** Boolean. */ BOOLEAN(STRING), - /** - * Date. - */ + /** Date. */ DATE(STRING), TIME(STRING), DATETIME(STRING, DATE, TIME), TIMESTAMP(STRING, DATETIME), INTERVAL(UNDEFINED), - /** - * Struct. - */ + /** Struct. */ STRUCT(UNDEFINED), - /** - * Array. - */ + /** Array. */ ARRAY(UNDEFINED); - /** - * Parents (wider/compatible types) of current base type. - */ + /** Parents (wider/compatible types) of current base type. */ private final List parents = new ArrayList<>(); - /** - * The mapping between Type and legacy JDBC type name. - */ + /** The mapping between Type and legacy JDBC type name. */ private static final Map LEGACY_TYPE_NAME_MAPPING = new ImmutableMap.Builder() .put(STRUCT, "OBJECT") @@ -116,14 +94,12 @@ public String legacyTypeName() { return LEGACY_TYPE_NAME_MAPPING.getOrDefault(this, this.name()); } - /** - * Return all the valid ExprCoreType. - */ + /** Return all the valid ExprCoreType. */ public static List coreTypes() { return Arrays.stream(ExprCoreType.values()) - .filter(type -> type != UNKNOWN) - .filter(type -> type != UNDEFINED) - .collect(Collectors.toList()); + .filter(type -> type != UNKNOWN) + .filter(type -> type != UNDEFINED) + .collect(Collectors.toList()); } public static Set numberTypes() { diff --git a/core/src/main/java/org/opensearch/sql/data/type/ExprType.java b/core/src/main/java/org/opensearch/sql/data/type/ExprType.java index 782714ba70..58d6ee346b 100644 --- a/core/src/main/java/org/opensearch/sql/data/type/ExprType.java +++ b/core/src/main/java/org/opensearch/sql/data/type/ExprType.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.type; import static org.opensearch.sql.data.type.ExprCoreType.UNKNOWN; @@ -13,13 +12,9 @@ import org.opensearch.sql.data.model.ExprValue; import org.opensearch.sql.expression.Expression; -/** - * The Type of {@link Expression} and {@link ExprValue}. - */ +/** The Type of {@link Expression} and {@link ExprValue}. */ public interface ExprType { - /** - * Is compatible with other types. - */ + /** Is compatible with other types. */ default boolean isCompatible(ExprType other) { if (this.equals(other)) { return true; @@ -37,30 +32,25 @@ default boolean isCompatible(ExprType other) { } /** - * Should cast this type to other type or not. By default, cast is always required - * if the given type is different from this type. + * Should cast this type to other type or not. By default, cast is always required if the given + * type is different from this type. + * * @param other other data type - * @return true if cast is required, otherwise false + * @return true if cast is required, otherwise false */ default boolean shouldCast(ExprType other) { return !this.equals(other); } - /** - * Get the parent type. - */ + /** Get the parent type. */ default List getParent() { return Arrays.asList(UNKNOWN); } - /** - * Get the type name. - */ + /** Get the type name. */ String typeName(); - /** - * Get the legacy type name for old engine. - */ + /** Get the legacy type name for old engine. */ default String legacyTypeName() { return typeName(); } diff --git a/core/src/main/java/org/opensearch/sql/data/type/WideningTypeRule.java b/core/src/main/java/org/opensearch/sql/data/type/WideningTypeRule.java index e1f356782f..c9b5c29157 100644 --- a/core/src/main/java/org/opensearch/sql/data/type/WideningTypeRule.java +++ b/core/src/main/java/org/opensearch/sql/data/type/WideningTypeRule.java @@ -13,15 +13,17 @@ /** * The definition of widening type rule for expression value. - * ExprType Widens to data types - * INTEGER LONG, FLOAT, DOUBLE - * LONG FLOAT, DOUBLE - * FLOAT DOUBLE - * DOUBLE DOUBLE - * STRING STRING - * BOOLEAN BOOLEAN - * ARRAY ARRAY - * STRUCT STRUCT + * + * + * + * + * + * + * + * + * + * + *
ExprTypeWidens to data types
INTEGERLONG, FLOAT, DOUBLE
LONGFLOAT, DOUBLE
FLOATDOUBLE
DOUBLEDOUBLE
STRINGSTRING
BOOLEANBOOLEAN
ARRAYARRAY
STRUCTSTRUCT
*/ @UtilityClass public class WideningTypeRule { @@ -53,10 +55,9 @@ private static int distance(ExprType type1, ExprType type2, int distance) { } /** - * The max type among two types. The max is defined as follow - * if type1 could widen to type2, then max is type2, vice versa - * if type1 could't widen to type2 and type2 could't widen to type1, - * then throw {@link ExpressionEvaluationException}. + * The max type among two types. The max is defined as follow if type1 could widen to type2, then + * max is type2, vice versa if type1 couldn't widen to type2 and type2 could't widen to type1, then + * throw {@link ExpressionEvaluationException}. * * @param type1 type1 * @param type2 type2 diff --git a/core/src/main/java/org/opensearch/sql/data/utils/ExprValueOrdering.java b/core/src/main/java/org/opensearch/sql/data/utils/ExprValueOrdering.java index ef390dc53b..812c8dba3d 100644 --- a/core/src/main/java/org/opensearch/sql/data/utils/ExprValueOrdering.java +++ b/core/src/main/java/org/opensearch/sql/data/utils/ExprValueOrdering.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.utils; import com.google.common.collect.Ordering; diff --git a/core/src/main/java/org/opensearch/sql/data/utils/NaturalExprValueOrdering.java b/core/src/main/java/org/opensearch/sql/data/utils/NaturalExprValueOrdering.java index 13c3606f72..f3285fd6f9 100644 --- a/core/src/main/java/org/opensearch/sql/data/utils/NaturalExprValueOrdering.java +++ b/core/src/main/java/org/opensearch/sql/data/utils/NaturalExprValueOrdering.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.utils; import com.google.common.collect.Ordering; diff --git a/core/src/main/java/org/opensearch/sql/data/utils/NullsFirstExprValueOrdering.java b/core/src/main/java/org/opensearch/sql/data/utils/NullsFirstExprValueOrdering.java index 03890bba61..82fac55e0c 100644 --- a/core/src/main/java/org/opensearch/sql/data/utils/NullsFirstExprValueOrdering.java +++ b/core/src/main/java/org/opensearch/sql/data/utils/NullsFirstExprValueOrdering.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.utils; import com.google.common.collect.Ordering; diff --git a/core/src/main/java/org/opensearch/sql/data/utils/NullsLastExprValueOrdering.java b/core/src/main/java/org/opensearch/sql/data/utils/NullsLastExprValueOrdering.java index 589d4b3043..0221e50887 100644 --- a/core/src/main/java/org/opensearch/sql/data/utils/NullsLastExprValueOrdering.java +++ b/core/src/main/java/org/opensearch/sql/data/utils/NullsLastExprValueOrdering.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.utils; import com.google.common.collect.Ordering; diff --git a/core/src/main/java/org/opensearch/sql/data/utils/ReverseExprValueOrdering.java b/core/src/main/java/org/opensearch/sql/data/utils/ReverseExprValueOrdering.java index 65fceacf99..ca3cb251f4 100644 --- a/core/src/main/java/org/opensearch/sql/data/utils/ReverseExprValueOrdering.java +++ b/core/src/main/java/org/opensearch/sql/data/utils/ReverseExprValueOrdering.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.utils; import com.google.common.collect.Ordering; diff --git a/core/src/main/java/org/opensearch/sql/datasource/DataSourceService.java b/core/src/main/java/org/opensearch/sql/datasource/DataSourceService.java index 9167737a70..3d6ddc864e 100644 --- a/core/src/main/java/org/opensearch/sql/datasource/DataSourceService.java +++ b/core/src/main/java/org/opensearch/sql/datasource/DataSourceService.java @@ -9,9 +9,7 @@ import org.opensearch.sql.datasource.model.DataSource; import org.opensearch.sql.datasource.model.DataSourceMetadata; -/** - * DataSource Service manage {@link DataSource}. - */ +/** DataSource Service manage {@link DataSource}. */ public interface DataSourceService { /** @@ -22,21 +20,19 @@ public interface DataSourceService { */ DataSource getDataSource(String dataSourceName); - /** - * Returns all dataSource Metadata objects. The returned objects won't contain - * any of the credential info. + * Returns all dataSource Metadata objects. The returned objects won't contain any of the + * credential info. * - * @param isDefaultDataSourceRequired is used to specify - * if default opensearch connector is required in the output list. + * @param isDefaultDataSourceRequired is used to specify if default opensearch connector is + * required in the output list. * @return set of {@link DataSourceMetadata}. */ Set getDataSourceMetadata(boolean isDefaultDataSourceRequired); - /** - * Returns dataSourceMetadata object with specific name. - * The returned objects won't contain any crendetial info. + * Returns dataSourceMetadata object with specific name. The returned objects won't contain any + * crendetial info. * * @param name name of the {@link DataSource}. * @return set of {@link DataSourceMetadata}. @@ -57,7 +53,6 @@ public interface DataSourceService { */ void updateDataSource(DataSourceMetadata dataSourceMetadata); - /** * Deletes {@link DataSource} corresponding to the DataSource name. * @@ -66,8 +61,8 @@ public interface DataSourceService { void deleteDataSource(String dataSourceName); /** - * Returns true {@link Boolean} if datasource with dataSourceName exists - * or else false {@link Boolean}. + * Returns true {@link Boolean} if datasource with dataSourceName exists or else false {@link + * Boolean}. * * @param dataSourceName name of the {@link DataSource}. */ diff --git a/core/src/main/java/org/opensearch/sql/datasource/model/DataSource.java b/core/src/main/java/org/opensearch/sql/datasource/model/DataSource.java index 5deb460961..9623102efb 100644 --- a/core/src/main/java/org/opensearch/sql/datasource/model/DataSource.java +++ b/core/src/main/java/org/opensearch/sql/datasource/model/DataSource.java @@ -12,9 +12,7 @@ import lombok.RequiredArgsConstructor; import org.opensearch.sql.storage.StorageEngine; -/** - * Each user configured datasource mapping to one instance of DataSource per JVM. - */ +/** Each user configured datasource mapping to one instance of DataSource per JVM. */ @Getter @RequiredArgsConstructor @EqualsAndHashCode @@ -24,7 +22,5 @@ public class DataSource { private final DataSourceType connectorType; - @EqualsAndHashCode.Exclude - private final StorageEngine storageEngine; - + @EqualsAndHashCode.Exclude private final StorageEngine storageEngine; } diff --git a/core/src/main/java/org/opensearch/sql/datasource/model/DataSourceMetadata.java b/core/src/main/java/org/opensearch/sql/datasource/model/DataSourceMetadata.java index 7945f8aec3..8e5ff7e1a6 100644 --- a/core/src/main/java/org/opensearch/sql/datasource/model/DataSourceMetadata.java +++ b/core/src/main/java/org/opensearch/sql/datasource/model/DataSourceMetadata.java @@ -5,7 +5,6 @@ package org.opensearch.sql.datasource.model; - import static org.opensearch.sql.analysis.DataSourceSchemaIdentifierNameResolver.DEFAULT_DATASOURCE_NAME; import com.fasterxml.jackson.annotation.JsonFormat; @@ -30,25 +29,25 @@ @JsonIgnoreProperties(ignoreUnknown = true) public class DataSourceMetadata { - @JsonProperty - private String name; + @JsonProperty private String name; @JsonProperty @JsonFormat(with = JsonFormat.Feature.ACCEPT_CASE_INSENSITIVE_PROPERTIES) private DataSourceType connector; - @JsonProperty - private List allowedRoles; + @JsonProperty private List allowedRoles; - @JsonProperty - private Map properties; + @JsonProperty private Map properties; /** * Default OpenSearch {@link DataSourceMetadata}. Which is used to register default OpenSearch * {@link DataSource} to {@link DataSourceService}. */ public static DataSourceMetadata defaultOpenSearchDataSourceMetadata() { - return new DataSourceMetadata(DEFAULT_DATASOURCE_NAME, - DataSourceType.OPENSEARCH, Collections.emptyList(), ImmutableMap.of()); + return new DataSourceMetadata( + DEFAULT_DATASOURCE_NAME, + DataSourceType.OPENSEARCH, + Collections.emptyList(), + ImmutableMap.of()); } } diff --git a/core/src/test/java/org/opensearch/sql/ast/expression/CastTest.java b/core/src/test/java/org/opensearch/sql/ast/expression/CastTest.java index d9386ab122..741db07eb3 100644 --- a/core/src/test/java/org/opensearch/sql/ast/expression/CastTest.java +++ b/core/src/test/java/org/opensearch/sql/ast/expression/CastTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -23,5 +22,4 @@ void cast_to_int_and_integer_should_convert_to_same_function_impl() { new Cast(booleanLiteral(true), stringLiteral("INT")).convertFunctionName(), new Cast(booleanLiteral(true), stringLiteral("INTEGER")).convertFunctionName()); } - } diff --git a/core/src/test/java/org/opensearch/sql/ast/expression/QualifiedNameTest.java b/core/src/test/java/org/opensearch/sql/ast/expression/QualifiedNameTest.java index b0ab66bc0e..c211f36ba7 100644 --- a/core/src/test/java/org/opensearch/sql/ast/expression/QualifiedNameTest.java +++ b/core/src/test/java/org/opensearch/sql/ast/expression/QualifiedNameTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -36,5 +35,4 @@ void should_return_empty_if_only_single_part() { assertFalse(name.first().isPresent()); assertFalse(name.getPrefix().isPresent()); } - } diff --git a/core/src/test/java/org/opensearch/sql/data/model/DateTimeValueTest.java b/core/src/test/java/org/opensearch/sql/data/model/DateTimeValueTest.java index c78545f9ae..01fe4a5e4e 100644 --- a/core/src/test/java/org/opensearch/sql/data/model/DateTimeValueTest.java +++ b/core/src/test/java/org/opensearch/sql/data/model/DateTimeValueTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -47,13 +46,14 @@ public void timeValueInterfaceTest() { assertEquals(today, timeValue.dateValue(functionProperties)); assertEquals(today.atTime(1, 1, 1), timeValue.datetimeValue(functionProperties)); - assertEquals(ZonedDateTime.of(LocalTime.parse("01:01:01").atDate(today), - UTC_ZONE_ID).toInstant(), timeValue.timestampValue(functionProperties)); + assertEquals( + ZonedDateTime.of(LocalTime.parse("01:01:01").atDate(today), UTC_ZONE_ID).toInstant(), + timeValue.timestampValue(functionProperties)); assertEquals("01:01:01", timeValue.value()); assertEquals("TIME '01:01:01'", timeValue.toString()); - exception = assertThrows(ExpressionEvaluationException.class, - () -> integerValue(1).timeValue()); + exception = + assertThrows(ExpressionEvaluationException.class, () -> integerValue(1).timeValue()); assertEquals("invalid to get timeValue from value of type INTEGER", exception.getMessage()); } @@ -62,14 +62,17 @@ public void timestampValueInterfaceTest() { ExprValue timestampValue = new ExprTimestampValue("2020-07-07 01:01:01"); assertEquals(TIMESTAMP, timestampValue.type()); - assertEquals(ZonedDateTime.of(LocalDateTime.parse("2020-07-07T01:01:01"), - UTC_ZONE_ID).toInstant(), timestampValue.timestampValue()); + assertEquals( + ZonedDateTime.of(LocalDateTime.parse("2020-07-07T01:01:01"), UTC_ZONE_ID).toInstant(), + timestampValue.timestampValue()); assertEquals("2020-07-07 01:01:01", timestampValue.value()); assertEquals("TIMESTAMP '2020-07-07 01:01:01'", timestampValue.toString()); assertEquals(LocalDate.parse("2020-07-07"), timestampValue.dateValue()); assertEquals(LocalTime.parse("01:01:01"), timestampValue.timeValue()); assertEquals(LocalDateTime.parse("2020-07-07T01:01:01"), timestampValue.datetimeValue()); - assertThrows(ExpressionEvaluationException.class, () -> integerValue(1).timestampValue(), + assertThrows( + ExpressionEvaluationException.class, + () -> integerValue(1).timestampValue(), "invalid to get timestampValue from value of type INTEGER"); } @@ -80,12 +83,12 @@ public void dateValueInterfaceTest() { assertEquals(LocalDate.parse("2012-07-07"), dateValue.dateValue()); assertEquals(LocalTime.parse("00:00:00"), dateValue.timeValue()); assertEquals(LocalDateTime.parse("2012-07-07T00:00:00"), dateValue.datetimeValue()); - assertEquals(ZonedDateTime.of(LocalDateTime.parse("2012-07-07T00:00:00"), - UTC_ZONE_ID).toInstant(), dateValue.timestampValue()); + assertEquals( + ZonedDateTime.of(LocalDateTime.parse("2012-07-07T00:00:00"), UTC_ZONE_ID).toInstant(), + dateValue.timestampValue()); ExpressionEvaluationException exception = assertThrows(ExpressionEvaluationException.class, () -> integerValue(1).dateValue()); - assertEquals("invalid to get dateValue from value of type INTEGER", - exception.getMessage()); + assertEquals("invalid to get dateValue from value of type INTEGER", exception.getMessage()); } @Test @@ -95,10 +98,13 @@ public void datetimeValueInterfaceTest() { assertEquals(LocalDateTime.parse("2020-08-17T19:44:00"), datetimeValue.datetimeValue()); assertEquals(LocalDate.parse("2020-08-17"), datetimeValue.dateValue()); assertEquals(LocalTime.parse("19:44:00"), datetimeValue.timeValue()); - assertEquals(ZonedDateTime.of(LocalDateTime.parse("2020-08-17T19:44:00"), - UTC_ZONE_ID).toInstant(), datetimeValue.timestampValue()); + assertEquals( + ZonedDateTime.of(LocalDateTime.parse("2020-08-17T19:44:00"), UTC_ZONE_ID).toInstant(), + datetimeValue.timestampValue()); assertEquals("DATETIME '2020-08-17 19:44:00'", datetimeValue.toString()); - assertThrows(ExpressionEvaluationException.class, () -> integerValue(1).datetimeValue(), + assertThrows( + ExpressionEvaluationException.class, + () -> integerValue(1).datetimeValue(), "invalid to get datetimeValue from value of type INTEGER"); } @@ -106,37 +112,38 @@ public void datetimeValueInterfaceTest() { public void dateInUnsupportedFormat() { SemanticCheckException exception = assertThrows(SemanticCheckException.class, () -> new ExprDateValue("2020-07-07Z")); - assertEquals("date:2020-07-07Z in unsupported format, please use yyyy-MM-dd", - exception.getMessage()); + assertEquals( + "date:2020-07-07Z in unsupported format, please use 'yyyy-MM-dd'", exception.getMessage()); } @Test public void timeInUnsupportedFormat() { SemanticCheckException exception = assertThrows(SemanticCheckException.class, () -> new ExprTimeValue("01:01:0")); - assertEquals("time:01:01:0 in unsupported format, please use HH:mm:ss[.SSSSSSSSS]", + assertEquals( + "time:01:01:0 in unsupported format, please use 'HH:mm:ss[.SSSSSSSSS]'", exception.getMessage()); } @Test public void timestampInUnsupportedFormat() { SemanticCheckException exception = - assertThrows(SemanticCheckException.class, - () -> new ExprTimestampValue("2020-07-07T01:01:01Z")); + assertThrows( + SemanticCheckException.class, () -> new ExprTimestampValue("2020-07-07T01:01:01Z")); assertEquals( "timestamp:2020-07-07T01:01:01Z in unsupported format, " - + "please use yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]", + + "please use 'yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]'", exception.getMessage()); } @Test public void datetimeInUnsupportedFormat() { SemanticCheckException exception = - assertThrows(SemanticCheckException.class, - () -> new ExprDatetimeValue("2020-07-07T01:01:01Z")); + assertThrows( + SemanticCheckException.class, () -> new ExprDatetimeValue("2020-07-07T01:01:01Z")); assertEquals( "datetime:2020-07-07T01:01:01Z in unsupported format, " - + "please use yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]", + + "please use 'yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]'", exception.getMessage()); } @@ -150,11 +157,12 @@ public void stringDateTimeValue() { assertEquals("\"2020-08-17 19:44:00\"", stringValue.toString()); SemanticCheckException exception = - assertThrows(SemanticCheckException.class, + assertThrows( + SemanticCheckException.class, () -> new ExprStringValue("2020-07-07T01:01:01Z").datetimeValue()); assertEquals( "datetime:2020-07-07T01:01:01Z in unsupported format, " - + "please use yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]", + + "please use 'yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]'", exception.getMessage()); } @@ -167,10 +175,10 @@ public void stringDateValue() { assertEquals("\"2020-08-17\"", stringValue.toString()); SemanticCheckException exception = - assertThrows(SemanticCheckException.class, - () -> new ExprStringValue("2020-07-07Z").dateValue()); - assertEquals("date:2020-07-07Z in unsupported format, please use yyyy-MM-dd", - exception.getMessage()); + assertThrows( + SemanticCheckException.class, () -> new ExprStringValue("2020-07-07Z").dateValue()); + assertEquals( + "date:2020-07-07Z in unsupported format, please use 'yyyy-MM-dd'", exception.getMessage()); } @Test @@ -181,9 +189,10 @@ public void stringTimeValue() { assertEquals("\"19:44:00\"", stringValue.toString()); SemanticCheckException exception = - assertThrows(SemanticCheckException.class, - () -> new ExprStringValue("01:01:0").timeValue()); - assertEquals("time:01:01:0 in unsupported format, please use HH:mm:ss[.SSSSSSSSS]", + assertThrows( + SemanticCheckException.class, () -> new ExprStringValue("01:01:0").timeValue()); + assertEquals( + "time:01:01:0 in unsupported format, please use 'HH:mm:ss[.SSSSSSSSS]'", exception.getMessage()); } @@ -247,32 +256,33 @@ public void datetimeWithVariableNanoPrecision() { @Test public void timestampOverMaxNanoPrecision() { SemanticCheckException exception = - assertThrows(SemanticCheckException.class, + assertThrows( + SemanticCheckException.class, () -> new ExprTimestampValue("2020-07-07 01:01:01.1234567890")); assertEquals( - "timestamp:2020-07-07 01:01:01.1234567890 in unsupported format, " - + "please use yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]", + "timestamp:2020-07-07 01:01:01.1234567890 in unsupported format, please use " + + "'yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]'", exception.getMessage()); } @Test public void datetimeOverMaxNanoPrecision() { SemanticCheckException exception = - assertThrows(SemanticCheckException.class, + assertThrows( + SemanticCheckException.class, () -> new ExprDatetimeValue("2020-07-07 01:01:01.1234567890")); assertEquals( "datetime:2020-07-07 01:01:01.1234567890 in unsupported format, " - + "please use yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]", + + "please use 'yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]'", exception.getMessage()); } @Test public void timeOverMaxNanoPrecision() { SemanticCheckException exception = - assertThrows(SemanticCheckException.class, - () -> new ExprTimeValue("01:01:01.1234567890")); + assertThrows(SemanticCheckException.class, () -> new ExprTimeValue("01:01:01.1234567890")); assertEquals( - "time:01:01:01.1234567890 in unsupported format, please use HH:mm:ss[.SSSSSSSSS]", + "time:01:01:01.1234567890 in unsupported format, please use 'HH:mm:ss[.SSSSSSSSS]'", exception.getMessage()); } } diff --git a/core/src/test/java/org/opensearch/sql/data/model/ExprBooleanValueTest.java b/core/src/test/java/org/opensearch/sql/data/model/ExprBooleanValueTest.java index 07aac33129..b77112528c 100644 --- a/core/src/test/java/org/opensearch/sql/data/model/ExprBooleanValueTest.java +++ b/core/src/test/java/org/opensearch/sql/data/model/ExprBooleanValueTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -29,8 +28,8 @@ public void equal() { ExprValue v2 = ExprBooleanValue.of(true); assertTrue(v1.equals(v2)); assertTrue(v2.equals(v1)); - assertEquals(0, ((ExprBooleanValue)v1).compare((ExprBooleanValue)v2)); - assertEquals(0, ((ExprBooleanValue)v2).compare((ExprBooleanValue)v1)); + assertEquals(0, ((ExprBooleanValue) v1).compare((ExprBooleanValue) v2)); + assertEquals(0, ((ExprBooleanValue) v2).compare((ExprBooleanValue) v1)); } @Test @@ -44,7 +43,9 @@ public void compare() { @Test public void invalid_get_value() { ExprDateValue value = new ExprDateValue("2020-08-20"); - assertThrows(ExpressionEvaluationException.class, value::booleanValue, + assertThrows( + ExpressionEvaluationException.class, + value::booleanValue, String.format("invalid to get booleanValue from value of type %s", value.type())); } diff --git a/core/src/test/java/org/opensearch/sql/data/model/ExprCollectionValueTest.java b/core/src/test/java/org/opensearch/sql/data/model/ExprCollectionValueTest.java index e61bdb111d..edd4e0d844 100644 --- a/core/src/test/java/org/opensearch/sql/data/model/ExprCollectionValueTest.java +++ b/core/src/test/java/org/opensearch/sql/data/model/ExprCollectionValueTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -48,8 +47,9 @@ public void compare_collection_with_int_object() { @Test public void comparabilityTest() { ExprValue collectionValue = ExprValueUtils.collectionValue(Arrays.asList(0, 1)); - ExpressionEvaluationException exception = assertThrows(ExpressionEvaluationException.class, - () -> compare(collectionValue, collectionValue)); + ExpressionEvaluationException exception = + assertThrows( + ExpressionEvaluationException.class, () -> compare(collectionValue, collectionValue)); assertEquals("ExprCollectionValue instances are not comparable", exception.getMessage()); } } diff --git a/core/src/test/java/org/opensearch/sql/data/model/ExprIntervalValueTest.java b/core/src/test/java/org/opensearch/sql/data/model/ExprIntervalValueTest.java index ff86ad70a1..36785d383e 100644 --- a/core/src/test/java/org/opensearch/sql/data/model/ExprIntervalValueTest.java +++ b/core/src/test/java/org/opensearch/sql/data/model/ExprIntervalValueTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -41,14 +40,18 @@ public void compare() { public void invalid_compare() { ExprIntervalValue v1 = new ExprIntervalValue(Period.ofYears(1)); ExprIntervalValue v2 = new ExprIntervalValue(Duration.ofHours(1)); - assertThrows(ExpressionEvaluationException.class, () -> v1.compare(v2), + assertThrows( + ExpressionEvaluationException.class, + () -> v1.compare(v2), String.format("invalid to compare intervals with units %s and %s", v1.unit(), v2.unit())); } @Test public void invalid_get_value() { ExprDateValue value = new ExprDateValue("2020-08-20"); - assertThrows(ExpressionEvaluationException.class, value::intervalValue, + assertThrows( + ExpressionEvaluationException.class, + value::intervalValue, String.format("invalid to get intervalValue from value of type %s", value.type())); } diff --git a/core/src/test/java/org/opensearch/sql/data/model/ExprMissingValueTest.java b/core/src/test/java/org/opensearch/sql/data/model/ExprMissingValueTest.java index 871f312c6f..121ceb6ce2 100644 --- a/core/src/test/java/org/opensearch/sql/data/model/ExprMissingValueTest.java +++ b/core/src/test/java/org/opensearch/sql/data/model/ExprMissingValueTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -19,7 +18,6 @@ import org.opensearch.sql.data.type.ExprCoreType; import org.opensearch.sql.exception.ExpressionEvaluationException; - class ExprMissingValueTest { @Test @@ -51,8 +49,9 @@ public void equal() { @Test public void comparabilityTest() { - ExpressionEvaluationException exception = assertThrows(ExpressionEvaluationException.class, - () -> compare(LITERAL_MISSING, LITERAL_MISSING)); + ExpressionEvaluationException exception = + assertThrows( + ExpressionEvaluationException.class, () -> compare(LITERAL_MISSING, LITERAL_MISSING)); assertEquals("invalid to call compare operation on missing value", exception.getMessage()); } } diff --git a/core/src/test/java/org/opensearch/sql/data/model/ExprNullValueTest.java b/core/src/test/java/org/opensearch/sql/data/model/ExprNullValueTest.java index f7087a949d..81bcf8f7b3 100644 --- a/core/src/test/java/org/opensearch/sql/data/model/ExprNullValueTest.java +++ b/core/src/test/java/org/opensearch/sql/data/model/ExprNullValueTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -50,8 +49,9 @@ public void equal() { @Test public void comparabilityTest() { - ExpressionEvaluationException exception = assertThrows(ExpressionEvaluationException.class, - () -> compare(LITERAL_NULL, LITERAL_NULL)); + ExpressionEvaluationException exception = + assertThrows( + ExpressionEvaluationException.class, () -> compare(LITERAL_NULL, LITERAL_NULL)); assertEquals("invalid to call compare operation on null value", exception.getMessage()); } } diff --git a/core/src/test/java/org/opensearch/sql/data/model/ExprNumberValueTest.java b/core/src/test/java/org/opensearch/sql/data/model/ExprNumberValueTest.java index 8c3f9dc742..ee245d73b4 100644 --- a/core/src/test/java/org/opensearch/sql/data/model/ExprNumberValueTest.java +++ b/core/src/test/java/org/opensearch/sql/data/model/ExprNumberValueTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -17,8 +16,9 @@ public class ExprNumberValueTest { @Test public void getShortValueFromIncompatibleExprValue() { ExprBooleanValue booleanValue = ExprBooleanValue.of(true); - ExpressionEvaluationException exception = Assertions - .assertThrows(ExpressionEvaluationException.class, () -> booleanValue.shortValue()); + ExpressionEvaluationException exception = + Assertions.assertThrows( + ExpressionEvaluationException.class, () -> booleanValue.shortValue()); assertEquals("invalid to get shortValue from value of type BOOLEAN", exception.getMessage()); } diff --git a/core/src/test/java/org/opensearch/sql/data/model/ExprStringValueTest.java b/core/src/test/java/org/opensearch/sql/data/model/ExprStringValueTest.java index abbb730a0c..2a5e5033f7 100644 --- a/core/src/test/java/org/opensearch/sql/data/model/ExprStringValueTest.java +++ b/core/src/test/java/org/opensearch/sql/data/model/ExprStringValueTest.java @@ -27,8 +27,8 @@ public void equal() { ExprValue v2 = ExprValueUtils.stringValue("str"); assertTrue(v1.equals(v2)); assertTrue(v2.equals(v1)); - assertEquals(0, ((ExprStringValue)v1).compare((ExprStringValue)v2)); - assertEquals(0, ((ExprStringValue)v2).compare((ExprStringValue)v1)); + assertEquals(0, ((ExprStringValue) v1).compare((ExprStringValue) v2)); + assertEquals(0, ((ExprStringValue) v2).compare((ExprStringValue) v1)); } @Test @@ -42,7 +42,9 @@ public void compare() { @Test public void invalid_get_value() { ExprDateValue value = new ExprDateValue("2020-08-20"); - assertThrows(ExpressionEvaluationException.class, value::stringValue, + assertThrows( + ExpressionEvaluationException.class, + value::stringValue, String.format("invalid to get intervalValue from value of type %s", value.type())); } diff --git a/core/src/test/java/org/opensearch/sql/data/model/ExprTupleValueTest.java b/core/src/test/java/org/opensearch/sql/data/model/ExprTupleValueTest.java index f2568e5cb4..567e1e78db 100644 --- a/core/src/test/java/org/opensearch/sql/data/model/ExprTupleValueTest.java +++ b/core/src/test/java/org/opensearch/sql/data/model/ExprTupleValueTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -52,8 +51,8 @@ public void compare_tuple_with_different_size() { @Test public void comparabilityTest() { ExprValue tupleValue = ExprValueUtils.tupleValue(ImmutableMap.of("integer_value", 2)); - ExpressionEvaluationException exception = assertThrows(ExpressionEvaluationException.class, - () -> compare(tupleValue, tupleValue)); + ExpressionEvaluationException exception = + assertThrows(ExpressionEvaluationException.class, () -> compare(tupleValue, tupleValue)); assertEquals("ExprTupleValue instances are not comparable", exception.getMessage()); } } diff --git a/core/src/test/java/org/opensearch/sql/data/model/ExprValueCompareTest.java b/core/src/test/java/org/opensearch/sql/data/model/ExprValueCompareTest.java index 3ece5e9a96..b965dff643 100644 --- a/core/src/test/java/org/opensearch/sql/data/model/ExprValueCompareTest.java +++ b/core/src/test/java/org/opensearch/sql/data/model/ExprValueCompareTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -42,136 +41,148 @@ public void dateValueCompare() { @Test public void datetimeValueCompare() { - assertEquals(0, + assertEquals( + 0, new ExprDatetimeValue("2012-08-07 18:00:00") .compareTo(new ExprDatetimeValue("2012-08-07 18:00:00"))); - assertEquals(1, + assertEquals( + 1, new ExprDatetimeValue("2012-08-07 19:00:00") .compareTo(new ExprDatetimeValue("2012-08-07 18:00:00"))); - assertEquals(-1, + assertEquals( + -1, new ExprDatetimeValue("2012-08-07 18:00:00") .compareTo(new ExprDatetimeValue("2012-08-07 19:00:00"))); } @Test public void timestampValueCompare() { - assertEquals(0, + assertEquals( + 0, new ExprTimestampValue("2012-08-07 18:00:00") .compareTo(new ExprTimestampValue("2012-08-07 18:00:00"))); - assertEquals(1, + assertEquals( + 1, new ExprTimestampValue("2012-08-07 19:00:00") .compareTo(new ExprTimestampValue("2012-08-07 18:00:00"))); - assertEquals(-1, + assertEquals( + -1, new ExprTimestampValue("2012-08-07 18:00:00") .compareTo(new ExprTimestampValue("2012-08-07 19:00:00"))); } private static Stream getEqualDatetimeValuesOfDifferentTypes() { return Stream.of( - Arguments.of(new ExprTimestampValue("1961-04-12 09:07:00"), - new ExprDatetimeValue("1961-04-12 09:07:00")), - Arguments.of(new ExprTimestampValue("1984-11-22 00:00:00"), - new ExprDateValue("1984-11-22")), - Arguments.of(new ExprTimestampValue(LocalDate.now() + " 00:00:00"), - new ExprDateValue(LocalDate.now())), - Arguments.of(new ExprDatetimeValue(LocalDate.now() + " 17:42:15"), - new ExprTimeValue("17:42:15")), - Arguments.of(new ExprDatetimeValue("2012-08-07 19:14:38"), - new ExprTimestampValue("2012-08-07 19:14:38")), - Arguments.of(new ExprDateValue("2012-08-07"), - new ExprDatetimeValue("2012-08-07 00:00:00")), - Arguments.of(new ExprDateValue("2007-01-27"), - new ExprDatetimeValue("2007-01-27 00:00:00")), - Arguments.of(new ExprDateValue(LocalDate.now()), - new ExprTimeValue("00:00:00")), - Arguments.of(new ExprTimestampValue("1984-11-22 00:00:00"), - new ExprDateValue("1984-11-22")), - Arguments.of(new ExprTimeValue("19:14:38"), - new ExprDatetimeValue(LocalDate.now() + " 19:14:38")), - Arguments.of(new ExprTimeValue("17:42:15"), - new ExprTimestampValue(LocalDate.now() + " 17:42:15")) - ); + Arguments.of( + new ExprTimestampValue("1961-04-12 09:07:00"), + new ExprDatetimeValue("1961-04-12 09:07:00")), + Arguments.of( + new ExprTimestampValue("1984-11-22 00:00:00"), new ExprDateValue("1984-11-22")), + Arguments.of( + new ExprTimestampValue(LocalDate.now() + " 00:00:00"), + new ExprDateValue(LocalDate.now())), + Arguments.of( + new ExprDatetimeValue(LocalDate.now() + " 17:42:15"), new ExprTimeValue("17:42:15")), + Arguments.of( + new ExprDatetimeValue("2012-08-07 19:14:38"), + new ExprTimestampValue("2012-08-07 19:14:38")), + Arguments.of(new ExprDateValue("2012-08-07"), new ExprDatetimeValue("2012-08-07 00:00:00")), + Arguments.of(new ExprDateValue("2007-01-27"), new ExprDatetimeValue("2007-01-27 00:00:00")), + Arguments.of(new ExprDateValue(LocalDate.now()), new ExprTimeValue("00:00:00")), + Arguments.of( + new ExprTimestampValue("1984-11-22 00:00:00"), new ExprDateValue("1984-11-22")), + Arguments.of( + new ExprTimeValue("19:14:38"), new ExprDatetimeValue(LocalDate.now() + " 19:14:38")), + Arguments.of( + new ExprTimeValue("17:42:15"), new ExprTimestampValue(LocalDate.now() + " 17:42:15"))); } /** - * We can't compare directly ExprValues of different datetime types, we need to use - * `FunctionProperties` object to extract comparable values. + * We can't compare directly ExprValues of different datetime types, we need to use + * `FunctionProperties` object to extract comparable values. */ @ParameterizedTest @MethodSource("getEqualDatetimeValuesOfDifferentTypes") public void compareEqDifferentDateTimeValueTypes(ExprValue left, ExprValue right) { - assertEquals(0, extractDateTime(left, functionProperties) - .compareTo(extractDateTime(right, functionProperties))); - assertEquals(0, extractDateTime(right, functionProperties) - .compareTo(extractDateTime(left, functionProperties))); + assertEquals( + 0, + extractDateTime(left, functionProperties) + .compareTo(extractDateTime(right, functionProperties))); + assertEquals( + 0, + extractDateTime(right, functionProperties) + .compareTo(extractDateTime(left, functionProperties))); } private static Stream getNotEqualDatetimeValuesOfDifferentTypes() { return Stream.of( - Arguments.of(new ExprDatetimeValue("2012-08-07 19:14:38"), + Arguments.of( + new ExprDatetimeValue("2012-08-07 19:14:38"), new ExprTimestampValue("1961-04-12 09:07:00")), - Arguments.of(new ExprDatetimeValue("2012-08-07 19:14:38"), - new ExprTimeValue("09:07:00")), - Arguments.of(new ExprDatetimeValue(LocalDate.now() + " 19:14:38"), - new ExprTimeValue("09:07:00")), - Arguments.of(new ExprDatetimeValue("2012-08-07 00:00:00"), - new ExprDateValue("1961-04-12")), - Arguments.of(new ExprDatetimeValue("1961-04-12 19:14:38"), - new ExprDateValue("1961-04-12")), - Arguments.of(new ExprDateValue("1984-11-22"), - new ExprDatetimeValue("1961-04-12 19:14:38")), - Arguments.of(new ExprDateValue("1984-11-22"), - new ExprTimestampValue("2020-09-16 17:30:00")), - Arguments.of(new ExprDateValue("1984-11-22"), - new ExprTimeValue("19:14:38")), - Arguments.of(new ExprTimeValue("19:14:38"), - new ExprDateValue(LocalDate.now())), - Arguments.of(new ExprTimeValue("19:14:38"), - new ExprDatetimeValue("2012-08-07 09:07:00")), - Arguments.of(new ExprTimeValue("19:14:38"), - new ExprTimestampValue("1984-02-03 04:05:07")), - Arguments.of(new ExprTimestampValue("2012-08-07 19:14:38"), + Arguments.of(new ExprDatetimeValue("2012-08-07 19:14:38"), new ExprTimeValue("09:07:00")), + Arguments.of( + new ExprDatetimeValue(LocalDate.now() + " 19:14:38"), new ExprTimeValue("09:07:00")), + Arguments.of(new ExprDatetimeValue("2012-08-07 00:00:00"), new ExprDateValue("1961-04-12")), + Arguments.of(new ExprDatetimeValue("1961-04-12 19:14:38"), new ExprDateValue("1961-04-12")), + Arguments.of(new ExprDateValue("1984-11-22"), new ExprDatetimeValue("1961-04-12 19:14:38")), + Arguments.of( + new ExprDateValue("1984-11-22"), new ExprTimestampValue("2020-09-16 17:30:00")), + Arguments.of(new ExprDateValue("1984-11-22"), new ExprTimeValue("19:14:38")), + Arguments.of(new ExprTimeValue("19:14:38"), new ExprDateValue(LocalDate.now())), + Arguments.of(new ExprTimeValue("19:14:38"), new ExprDatetimeValue("2012-08-07 09:07:00")), + Arguments.of(new ExprTimeValue("19:14:38"), new ExprTimestampValue("1984-02-03 04:05:07")), + Arguments.of( + new ExprTimestampValue("2012-08-07 19:14:38"), new ExprDatetimeValue("1961-04-12 09:07:00")), - Arguments.of(new ExprTimestampValue("2012-08-07 19:14:38"), - new ExprTimeValue("09:07:00")), - Arguments.of(new ExprTimestampValue(LocalDate.now() + " 19:14:38"), - new ExprTimeValue("09:07:00")), - Arguments.of(new ExprTimestampValue("2012-08-07 00:00:00"), - new ExprDateValue("1961-04-12")), - Arguments.of(new ExprTimestampValue("1961-04-12 19:14:38"), - new ExprDateValue("1961-04-12")) - ); + Arguments.of(new ExprTimestampValue("2012-08-07 19:14:38"), new ExprTimeValue("09:07:00")), + Arguments.of( + new ExprTimestampValue(LocalDate.now() + " 19:14:38"), new ExprTimeValue("09:07:00")), + Arguments.of( + new ExprTimestampValue("2012-08-07 00:00:00"), new ExprDateValue("1961-04-12")), + Arguments.of( + new ExprTimestampValue("1961-04-12 19:14:38"), new ExprDateValue("1961-04-12"))); } /** - * We can't compare directly ExprValues of different datetime types, we need to use - * `FunctionProperties` object to extract comparable values. + * We can't compare directly ExprValues of different datetime types, we need to use + * `FunctionProperties` object to extract comparable values. */ @ParameterizedTest @MethodSource("getNotEqualDatetimeValuesOfDifferentTypes") public void compareNeqDifferentDateTimeValueTypes(ExprValue left, ExprValue right) { - assertNotEquals(0, extractDateTime(left, functionProperties) - .compareTo(extractDateTime(right, functionProperties))); - assertNotEquals(0, extractDateTime(right, functionProperties) - .compareTo(extractDateTime(left, functionProperties))); + assertNotEquals( + 0, + extractDateTime(left, functionProperties) + .compareTo(extractDateTime(right, functionProperties))); + assertNotEquals( + 0, + extractDateTime(right, functionProperties) + .compareTo(extractDateTime(left, functionProperties))); } @Test public void compareDateTimeWithNotADateTime() { - var exception = assertThrows(ExpressionEvaluationException.class, () -> - new ExprDoubleValue(3.1415).compareTo(new ExprIntervalValue(Period.ofDays(1)))); - assertEquals("compare expected value have same type, but with [DOUBLE, INTERVAL]", + var exception = + assertThrows( + ExpressionEvaluationException.class, + () -> new ExprDoubleValue(3.1415).compareTo(new ExprIntervalValue(Period.ofDays(1)))); + assertEquals( + "compare expected value have same type, but with [DOUBLE, INTERVAL]", exception.getMessage()); - exception = assertThrows(ExpressionEvaluationException.class, () -> - new ExprDateValue("1961-04-12").compareTo(new ExprIntegerValue(1))); - assertEquals("compare expected value have same type, but with [DATE, INTEGER]", - exception.getMessage()); + exception = + assertThrows( + ExpressionEvaluationException.class, + () -> new ExprDateValue("1961-04-12").compareTo(new ExprIntegerValue(1))); + assertEquals( + "compare expected value have same type, but with [DATE, INTEGER]", exception.getMessage()); - exception = assertThrows(ExpressionEvaluationException.class, () -> - new ExprStringValue("something").compareTo(new ExprTimeValue("10:20:30"))); - assertEquals("compare expected value have same type, but with [STRING, TIME]", - exception.getMessage()); + exception = + assertThrows( + ExpressionEvaluationException.class, + () -> new ExprStringValue("something").compareTo(new ExprTimeValue("10:20:30"))); + assertEquals( + "compare expected value have same type, but with [STRING, TIME]", exception.getMessage()); } @Test @@ -194,8 +205,7 @@ private static Stream getEqualNumericValuesOfDifferentTypes() { Arguments.of(new ExprIntegerValue(42), new ExprShortValue(42)), Arguments.of(new ExprIntegerValue(42), new ExprLongValue(42)), Arguments.of(new ExprIntegerValue(42), new ExprFloatValue(42)), - Arguments.of(new ExprIntegerValue(42), new ExprDoubleValue(42)) - ); + Arguments.of(new ExprIntegerValue(42), new ExprDoubleValue(42))); } @ParameterizedTest @@ -211,8 +221,7 @@ private static Stream getNotEqualNumericValuesOfDifferentTypes() { Arguments.of(new ExprIntegerValue(42), new ExprShortValue(146)), Arguments.of(new ExprIntegerValue(42), new ExprLongValue(100500)), Arguments.of(new ExprIntegerValue(42), new ExprFloatValue(-1.5)), - Arguments.of(new ExprIntegerValue(42), new ExprDoubleValue(1468.84138)) - ); + Arguments.of(new ExprIntegerValue(42), new ExprDoubleValue(1468.84138))); } @ParameterizedTest @@ -231,47 +240,50 @@ public void stringValueCompare() { @Test public void intervalValueCompare() { - assertEquals(0, new ExprIntervalValue(Period.ofDays(1)) - .compareTo(new ExprIntervalValue(Period.ofDays(1)))); - assertEquals(1, new ExprIntervalValue(Period.ofDays(2)) - .compareTo(new ExprIntervalValue(Period.ofDays(1)))); - assertEquals(-1, new ExprIntervalValue(Period.ofDays(1)) - .compareTo(new ExprIntervalValue(Period.ofDays(2)))); + assertEquals( + 0, + new ExprIntervalValue(Period.ofDays(1)).compareTo(new ExprIntervalValue(Period.ofDays(1)))); + assertEquals( + 1, + new ExprIntervalValue(Period.ofDays(2)).compareTo(new ExprIntervalValue(Period.ofDays(1)))); + assertEquals( + -1, + new ExprIntervalValue(Period.ofDays(1)).compareTo(new ExprIntervalValue(Period.ofDays(2)))); } @Test public void missingCompareToMethodShouldNotBeenCalledDirectly() { - IllegalStateException exception = assertThrows(IllegalStateException.class, - () -> LITERAL_MISSING.compareTo(LITERAL_FALSE)); - assertEquals("[BUG] Unreachable, Comparing with NULL or MISSING is undefined", - exception.getMessage()); + IllegalStateException exception = + assertThrows(IllegalStateException.class, () -> LITERAL_MISSING.compareTo(LITERAL_FALSE)); + assertEquals( + "[BUG] Unreachable, Comparing with NULL or MISSING is undefined", exception.getMessage()); - exception = assertThrows(IllegalStateException.class, - () -> LITERAL_FALSE.compareTo(LITERAL_MISSING)); - assertEquals("[BUG] Unreachable, Comparing with NULL or MISSING is undefined", - exception.getMessage()); + exception = + assertThrows(IllegalStateException.class, () -> LITERAL_FALSE.compareTo(LITERAL_MISSING)); + assertEquals( + "[BUG] Unreachable, Comparing with NULL or MISSING is undefined", exception.getMessage()); - exception = assertThrows(IllegalStateException.class, - () -> ExprMissingValue.of().compare(LITERAL_MISSING)); - assertEquals("[BUG] Unreachable, Comparing with MISSING is undefined", - exception.getMessage()); + exception = + assertThrows( + IllegalStateException.class, () -> ExprMissingValue.of().compare(LITERAL_MISSING)); + assertEquals("[BUG] Unreachable, Comparing with MISSING is undefined", exception.getMessage()); } @Test public void nullCompareToMethodShouldNotBeenCalledDirectly() { - IllegalStateException exception = assertThrows(IllegalStateException.class, - () -> LITERAL_NULL.compareTo(LITERAL_FALSE)); - assertEquals("[BUG] Unreachable, Comparing with NULL or MISSING is undefined", - exception.getMessage()); + IllegalStateException exception = + assertThrows(IllegalStateException.class, () -> LITERAL_NULL.compareTo(LITERAL_FALSE)); + assertEquals( + "[BUG] Unreachable, Comparing with NULL or MISSING is undefined", exception.getMessage()); - exception = assertThrows(IllegalStateException.class, - () -> LITERAL_FALSE.compareTo(LITERAL_NULL)); - assertEquals("[BUG] Unreachable, Comparing with NULL or MISSING is undefined", - exception.getMessage()); + exception = + assertThrows(IllegalStateException.class, () -> LITERAL_FALSE.compareTo(LITERAL_NULL)); + assertEquals( + "[BUG] Unreachable, Comparing with NULL or MISSING is undefined", exception.getMessage()); - exception = assertThrows(IllegalStateException.class, - () -> ExprNullValue.of().compare(LITERAL_MISSING)); - assertEquals("[BUG] Unreachable, Comparing with NULL is undefined", - exception.getMessage()); + exception = + assertThrows( + IllegalStateException.class, () -> ExprNullValue.of().compare(LITERAL_MISSING)); + assertEquals("[BUG] Unreachable, Comparing with NULL is undefined", exception.getMessage()); } } diff --git a/core/src/test/java/org/opensearch/sql/data/model/ExprValueUtilsTest.java b/core/src/test/java/org/opensearch/sql/data/model/ExprValueUtilsTest.java index c4a00fe5ca..c879384955 100644 --- a/core/src/test/java/org/opensearch/sql/data/model/ExprValueUtilsTest.java +++ b/core/src/test/java/org/opensearch/sql/data/model/ExprValueUtilsTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import static org.hamcrest.MatcherAssert.assertThat; @@ -58,71 +57,87 @@ public class ExprValueUtilsTest { testTuple.put("1", new ExprIntegerValue(1)); } - private static List numberValues = Stream.of((byte) 1, (short) 1, 1, 1L, 1f, 1D) - .map(ExprValueUtils::fromObjectValue).collect(Collectors.toList()); + private static List numberValues = + Stream.of((byte) 1, (short) 1, 1, 1L, 1f, 1D) + .map(ExprValueUtils::fromObjectValue) + .collect(Collectors.toList()); - private static List nonNumberValues = Arrays.asList( - new ExprStringValue("1"), - ExprBooleanValue.of(true), - new ExprCollectionValue(ImmutableList.of(new ExprIntegerValue(1))), - new ExprTupleValue(testTuple), - new ExprDateValue("2012-08-07"), - new ExprTimeValue("18:00:00"), - new ExprDatetimeValue("2012-08-07 18:00:00"), - new ExprTimestampValue("2012-08-07 18:00:00"), - new ExprIntervalValue(Duration.ofSeconds(100))); + private static List nonNumberValues = + Arrays.asList( + new ExprStringValue("1"), + ExprBooleanValue.of(true), + new ExprCollectionValue(ImmutableList.of(new ExprIntegerValue(1))), + new ExprTupleValue(testTuple), + new ExprDateValue("2012-08-07"), + new ExprTimeValue("18:00:00"), + new ExprDatetimeValue("2012-08-07 18:00:00"), + new ExprTimestampValue("2012-08-07 18:00:00"), + new ExprIntervalValue(Duration.ofSeconds(100))); private static List allValues = Lists.newArrayList(Iterables.concat(numberValues, nonNumberValues)); - private static List> numberValueExtractor = Arrays.asList( - ExprValueUtils::getByteValue, - ExprValueUtils::getShortValue, - ExprValueUtils::getIntegerValue, - ExprValueUtils::getLongValue, - ExprValueUtils::getFloatValue, - ExprValueUtils::getDoubleValue); - private static List> nonNumberValueExtractor = Arrays.asList( - ExprValueUtils::getStringValue, - ExprValueUtils::getBooleanValue, - ExprValueUtils::getCollectionValue, - ExprValueUtils::getTupleValue - ); - private static List> dateAndTimeValueExtractor = Arrays.asList( - ExprValue::dateValue, - ExprValue::timeValue, - ExprValue::datetimeValue, - ExprValue::timestampValue, - ExprValue::intervalValue); - private static List> allValueExtractor = Lists.newArrayList( - Iterables.concat(numberValueExtractor, nonNumberValueExtractor, dateAndTimeValueExtractor)); + private static List> numberValueExtractor = + Arrays.asList( + ExprValueUtils::getByteValue, + ExprValueUtils::getShortValue, + ExprValueUtils::getIntegerValue, + ExprValueUtils::getLongValue, + ExprValueUtils::getFloatValue, + ExprValueUtils::getDoubleValue); + private static List> nonNumberValueExtractor = + Arrays.asList( + ExprValueUtils::getStringValue, + ExprValueUtils::getBooleanValue, + ExprValueUtils::getCollectionValue, + ExprValueUtils::getTupleValue); + private static List> dateAndTimeValueExtractor = + Arrays.asList( + ExprValue::dateValue, + ExprValue::timeValue, + ExprValue::datetimeValue, + ExprValue::timestampValue, + ExprValue::intervalValue); + private static List> allValueExtractor = + Lists.newArrayList( + Iterables.concat( + numberValueExtractor, nonNumberValueExtractor, dateAndTimeValueExtractor)); private static List numberTypes = - Arrays.asList(ExprCoreType.BYTE, ExprCoreType.SHORT, ExprCoreType.INTEGER, ExprCoreType.LONG, - ExprCoreType.FLOAT, ExprCoreType.DOUBLE); - private static List nonNumberTypes = - Arrays.asList(STRING, BOOLEAN, ARRAY, STRUCT); + Arrays.asList( + ExprCoreType.BYTE, + ExprCoreType.SHORT, + ExprCoreType.INTEGER, + ExprCoreType.LONG, + ExprCoreType.FLOAT, + ExprCoreType.DOUBLE); + private static List nonNumberTypes = Arrays.asList(STRING, BOOLEAN, ARRAY, STRUCT); private static List dateAndTimeTypes = Arrays.asList(DATE, TIME, DATETIME, TIMESTAMP, INTERVAL); private static List allTypes = Lists.newArrayList(Iterables.concat(numberTypes, nonNumberTypes, dateAndTimeTypes)); private static Stream getValueTestArgumentStream() { - List expectedValues = Arrays.asList((byte) 1, (short) 1, 1, 1L, 1f, 1D, "1", true, - Arrays.asList(integerValue(1)), - ImmutableMap.of("1", integerValue(1)), - LocalDate.parse("2012-08-07"), - LocalTime.parse("18:00:00"), - LocalDateTime.parse("2012-08-07T18:00:00"), - ZonedDateTime.of(LocalDateTime.parse("2012-08-07T18:00:00"), UTC_ZONE_ID).toInstant(), - Duration.ofSeconds(100) - ); + List expectedValues = + Arrays.asList( + (byte) 1, + (short) 1, + 1, + 1L, + 1f, + 1D, + "1", + true, + Arrays.asList(integerValue(1)), + ImmutableMap.of("1", integerValue(1)), + LocalDate.parse("2012-08-07"), + LocalTime.parse("18:00:00"), + LocalDateTime.parse("2012-08-07T18:00:00"), + ZonedDateTime.of(LocalDateTime.parse("2012-08-07T18:00:00"), UTC_ZONE_ID).toInstant(), + Duration.ofSeconds(100)); Stream.Builder builder = Stream.builder(); for (int i = 0; i < expectedValues.size(); i++) { - builder.add(Arguments.of( - allValues.get(i), - allValueExtractor.get(i), - expectedValues.get(i))); + builder.add(Arguments.of(allValues.get(i), allValueExtractor.get(i), expectedValues.get(i))); } return builder.build(); } @@ -130,16 +145,13 @@ private static Stream getValueTestArgumentStream() { private static Stream getTypeTestArgumentStream() { Stream.Builder builder = Stream.builder(); for (int i = 0; i < allValues.size(); i++) { - builder.add(Arguments.of( - allValues.get(i), - allTypes.get(i))); + builder.add(Arguments.of(allValues.get(i), allTypes.get(i))); } return builder.build(); } private static Stream invalidGetNumberValueArgumentStream() { - return Lists.cartesianProduct(nonNumberValues, numberValueExtractor) - .stream() + return Lists.cartesianProduct(nonNumberValues, numberValueExtractor).stream() .map(list -> Arguments.of(list.get(0), list.get(1))); } @@ -151,23 +163,20 @@ private static Stream invalidConvert() { extractorWithTypeList.add( new AbstractMap.SimpleEntry<>(nonNumberValueExtractor.get(i), nonNumberTypes.get(i))); } - return Lists.cartesianProduct(allValues, extractorWithTypeList) - .stream() - .filter(list -> { - ExprValue value = (ExprValue) list.get(0); - Map.Entry, ExprCoreType> entry = - (Map.Entry, - ExprCoreType>) list - .get(1); - return entry.getValue() != value.type(); - }) - .map(list -> { - Map.Entry, ExprCoreType> entry = - (Map.Entry, - ExprCoreType>) list - .get(1); - return Arguments.of(list.get(0), entry.getKey(), entry.getValue()); - }); + return Lists.cartesianProduct(allValues, extractorWithTypeList).stream() + .filter( + list -> { + ExprValue value = (ExprValue) list.get(0); + Map.Entry, ExprCoreType> entry = + (Map.Entry, ExprCoreType>) list.get(1); + return entry.getValue() != value.type(); + }) + .map( + list -> { + Map.Entry, ExprCoreType> entry = + (Map.Entry, ExprCoreType>) list.get(1); + return Arguments.of(list.get(0), entry.getKey(), entry.getValue()); + }); } @ParameterizedTest(name = "the value of ExprValue:{0} is: {2} ") @@ -182,36 +191,33 @@ public void getType(ExprValue value, ExprCoreType expectType) { assertEquals(expectType, value.type()); } - /** - * Test Invalid to get number. - */ + /** Test Invalid to get number. */ @ParameterizedTest(name = "invalid to get number value of ExprValue:{0}") @MethodSource("invalidGetNumberValueArgumentStream") public void invalidGetNumberValue(ExprValue value, Function extractor) { - Exception exception = assertThrows(ExpressionEvaluationException.class, - () -> extractor.apply(value)); + Exception exception = + assertThrows(ExpressionEvaluationException.class, () -> extractor.apply(value)); assertThat(exception.getMessage(), Matchers.containsString("invalid")); } - /** - * Test Invalid to convert. - */ + /** Test Invalid to convert. */ @ParameterizedTest(name = "invalid convert ExprValue:{0} to ExprType:{2}") @MethodSource("invalidConvert") - public void invalidConvertExprValue(ExprValue value, Function extractor, - ExprCoreType toType) { - Exception exception = assertThrows(ExpressionEvaluationException.class, - () -> extractor.apply(value)); + public void invalidConvertExprValue( + ExprValue value, Function extractor, ExprCoreType toType) { + Exception exception = + assertThrows(ExpressionEvaluationException.class, () -> extractor.apply(value)); assertThat(exception.getMessage(), Matchers.containsString("invalid")); } @Test public void unSupportedObject() { - Exception exception = assertThrows(ExpressionEvaluationException.class, - () -> ExprValueUtils.fromObjectValue(integerValue(1))); + Exception exception = + assertThrows( + ExpressionEvaluationException.class, + () -> ExprValueUtils.fromObjectValue(integerValue(1))); assertEquals( - "unsupported object " - + "class org.opensearch.sql.data.model.ExprIntegerValue", + "unsupported object " + "class org.opensearch.sql.data.model.ExprIntegerValue", exception.getMessage()); } @@ -228,13 +234,14 @@ public void bindingTuples() { @Test public void constructDateAndTimeValue() { - assertEquals(new ExprDateValue("2012-07-07"), - ExprValueUtils.fromObjectValue("2012-07-07", DATE)); - assertEquals(new ExprTimeValue("01:01:01"), - ExprValueUtils.fromObjectValue("01:01:01", TIME)); - assertEquals(new ExprDatetimeValue("2012-07-07 01:01:01"), + assertEquals( + new ExprDateValue("2012-07-07"), ExprValueUtils.fromObjectValue("2012-07-07", DATE)); + assertEquals(new ExprTimeValue("01:01:01"), ExprValueUtils.fromObjectValue("01:01:01", TIME)); + assertEquals( + new ExprDatetimeValue("2012-07-07 01:01:01"), ExprValueUtils.fromObjectValue("2012-07-07 01:01:01", DATETIME)); - assertEquals(new ExprTimestampValue("2012-07-07 01:01:01"), + assertEquals( + new ExprTimestampValue("2012-07-07 01:01:01"), ExprValueUtils.fromObjectValue("2012-07-07 01:01:01", TIMESTAMP)); } @@ -244,17 +251,20 @@ public void hashCodeTest() { assertEquals(new ExprShortValue(1).hashCode(), new ExprShortValue(1).hashCode()); assertEquals(new ExprIntegerValue(1).hashCode(), new ExprIntegerValue(1).hashCode()); assertEquals(new ExprStringValue("1").hashCode(), new ExprStringValue("1").hashCode()); - assertEquals(new ExprCollectionValue(ImmutableList.of(new ExprIntegerValue(1))).hashCode(), + assertEquals( + new ExprCollectionValue(ImmutableList.of(new ExprIntegerValue(1))).hashCode(), new ExprCollectionValue(ImmutableList.of(new ExprIntegerValue(1))).hashCode()); - assertEquals(new ExprTupleValue(testTuple).hashCode(), - new ExprTupleValue(testTuple).hashCode()); - assertEquals(new ExprDateValue("2012-08-07").hashCode(), - new ExprDateValue("2012-08-07").hashCode()); - assertEquals(new ExprTimeValue("18:00:00").hashCode(), - new ExprTimeValue("18:00:00").hashCode()); - assertEquals(new ExprDatetimeValue("2012-08-07 18:00:00").hashCode(), + assertEquals( + new ExprTupleValue(testTuple).hashCode(), new ExprTupleValue(testTuple).hashCode()); + assertEquals( + new ExprDateValue("2012-08-07").hashCode(), new ExprDateValue("2012-08-07").hashCode()); + assertEquals( + new ExprTimeValue("18:00:00").hashCode(), new ExprTimeValue("18:00:00").hashCode()); + assertEquals( + new ExprDatetimeValue("2012-08-07 18:00:00").hashCode(), new ExprDatetimeValue("2012-08-07 18:00:00").hashCode()); - assertEquals(new ExprTimestampValue("2012-08-07 18:00:00").hashCode(), + assertEquals( + new ExprTimestampValue("2012-08-07 18:00:00").hashCode(), new ExprTimestampValue("2012-08-07 18:00:00").hashCode()); } } diff --git a/core/src/test/java/org/opensearch/sql/data/type/ExprTypeTest.java b/core/src/test/java/org/opensearch/sql/data/type/ExprTypeTest.java index 7db856d092..1def15cc6f 100644 --- a/core/src/test/java/org/opensearch/sql/data/type/ExprTypeTest.java +++ b/core/src/test/java/org/opensearch/sql/data/type/ExprTypeTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.type; import static org.hamcrest.MatcherAssert.assertThat; diff --git a/core/src/test/java/org/opensearch/sql/data/utils/ExprValueOrderingTest.java b/core/src/test/java/org/opensearch/sql/data/utils/ExprValueOrderingTest.java index ec00aebe18..cf9bb538c4 100644 --- a/core/src/test/java/org/opensearch/sql/data/utils/ExprValueOrderingTest.java +++ b/core/src/test/java/org/opensearch/sql/data/utils/ExprValueOrderingTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.utils; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -32,10 +31,8 @@ @ExtendWith(MockitoExtension.class) class ExprValueOrderingTest { - @Mock - ExprValue left; - @Mock - ExprValue right; + @Mock ExprValue left; + @Mock ExprValue right; @Test public void natural() { diff --git a/core/src/test/java/org/opensearch/sql/data/utils/NullsFirstExprValueOrderingTest.java b/core/src/test/java/org/opensearch/sql/data/utils/NullsFirstExprValueOrderingTest.java index c13f95ad63..97014fc410 100644 --- a/core/src/test/java/org/opensearch/sql/data/utils/NullsFirstExprValueOrderingTest.java +++ b/core/src/test/java/org/opensearch/sql/data/utils/NullsFirstExprValueOrderingTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.utils; import static org.junit.jupiter.api.Assertions.assertEquals; diff --git a/core/src/test/java/org/opensearch/sql/data/utils/NullsLastExprValueOrderingTest.java b/core/src/test/java/org/opensearch/sql/data/utils/NullsLastExprValueOrderingTest.java index ab36247089..3625dc3760 100644 --- a/core/src/test/java/org/opensearch/sql/data/utils/NullsLastExprValueOrderingTest.java +++ b/core/src/test/java/org/opensearch/sql/data/utils/NullsLastExprValueOrderingTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.utils; import static org.junit.jupiter.api.Assertions.assertEquals; diff --git a/core/src/test/java/org/opensearch/sql/data/utils/ReverseExprValueOrderingTest.java b/core/src/test/java/org/opensearch/sql/data/utils/ReverseExprValueOrderingTest.java index 297079fca5..ad88eb2c76 100644 --- a/core/src/test/java/org/opensearch/sql/data/utils/ReverseExprValueOrderingTest.java +++ b/core/src/test/java/org/opensearch/sql/data/utils/ReverseExprValueOrderingTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.utils; import static org.junit.jupiter.api.Assertions.assertEquals; diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/TimestampTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/TimestampTest.java index 957dac5c74..677ad9310e 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/TimestampTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/TimestampTest.java @@ -59,7 +59,7 @@ public void timestamp_one_arg_string_invalid_format(String value, String testNam var exception = assertThrows(SemanticCheckException.class, () -> DSL.timestamp(functionProperties, DSL.literal(value)).valueOf()); assertEquals(String.format("timestamp:%s in unsupported format, please " - + "use yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]", value), exception.getMessage()); + + "use 'yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]'", value), exception.getMessage()); } @Test From 3db0d4f2b88270b96a77165f20e5332378adc4f8 Mon Sep 17 00:00:00 2001 From: Guian Gumpac Date: Wed, 9 Aug 2023 11:11:55 -0700 Subject: [PATCH 08/42] Updated H2 version (#1928) Signed-off-by: Guian Gumpac --- integ-test/build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/integ-test/build.gradle b/integ-test/build.gradle index 4a5f2015e0..b52cfd5f22 100644 --- a/integ-test/build.gradle +++ b/integ-test/build.gradle @@ -100,7 +100,7 @@ dependencies { testImplementation('org.junit.jupiter:junit-jupiter-api:5.6.2') testRuntimeOnly('org.junit.jupiter:junit-jupiter-engine:5.6.2') - testImplementation group: 'com.h2database', name: 'h2', version: '2.1.214' + testImplementation group: 'com.h2database', name: 'h2', version: '2.2.220' testImplementation group: 'org.xerial', name: 'sqlite-jdbc', version: '3.41.2.2' testImplementation group: 'com.google.code.gson', name: 'gson', version: '2.8.9' From b610ce96f5a7772d7ebf15f0bea27d1103c63c92 Mon Sep 17 00:00:00 2001 From: Mitchell Gale Date: Wed, 9 Aug 2023 13:03:02 -0700 Subject: [PATCH 09/42] [Spotless] Applying Google Code Format for core/src/main files #2 (#1931) * Applied spotless changes to `core/stc/main/.../expression` and more. Signed-off-by: Mitchell Gale * Applied all spotless for PR 2 for GJF Signed-off-by: Mitchell Gale * Apply spotless to fix custom fixes. Signed-off-by: Mitchell Gale * Remove unused
Signed-off-by: Mitchell Gale * ignoring core checkstyle failures. Signed-off-by: Mitchell Gale * addressed PR comments. Signed-off-by: Mitchell Gale * Addressing PR 2 comments. Signed-off-by: Mitchell Gale * Ran spotless apply Signed-off-by: Mitchell Gale --------- Signed-off-by: Mitchell Gale Signed-off-by: Mitchell Gale --- build.gradle | 6 +- .../ExpressionEvaluationException.java | 5 +- .../sql/exception/NoCursorException.java | 7 +- .../sql/exception/QueryEngineException.java | 5 +- .../sql/exception/SemanticCheckException.java | 5 +- .../UnsupportedCursorRequestException.java | 7 +- .../sql/executor/ExecutionContext.java | 7 +- .../sql/executor/ExecutionEngine.java | 34 +- .../org/opensearch/sql/executor/Explain.java | 169 +- .../org/opensearch/sql/executor/QueryId.java | 12 +- .../opensearch/sql/executor/QueryManager.java | 5 +- .../opensearch/sql/executor/QueryService.java | 40 +- .../sql/executor/execution/AbstractPlan.java | 16 +- .../sql/executor/execution/CommandPlan.java | 19 +- .../sql/executor/execution/ExplainPlan.java | 15 +- .../sql/executor/execution/QueryPlan.java | 17 +- .../executor/execution/QueryPlanFactory.java | 65 +- .../execution/StreamingQueryPlan.java | 27 +- .../pagination/CanPaginateVisitor.java | 44 +- .../sql/executor/pagination/Cursor.java | 3 +- .../executor/pagination/PlanSerializer.java | 36 +- .../sql/executor/streaming/Batch.java | 4 +- .../MicroBatchStreamingExecution.java | 12 +- .../sql/executor/streaming/Offset.java | 4 +- .../executor/streaming/StreamingSource.java | 4 +- .../org/opensearch/sql/expression/DSL.java | 146 +- .../opensearch/sql/expression/Expression.java | 29 +- .../sql/expression/ExpressionNodeVisitor.java | 24 +- .../sql/expression/FunctionExpression.java | 12 +- .../sql/expression/HighlightExpression.java | 30 +- .../sql/expression/LiteralExpression.java | 5 +- .../expression/NamedArgumentExpression.java | 4 +- .../sql/expression/NamedExpression.java | 21 +- .../sql/expression/ReferenceExpression.java | 10 +- .../aggregation/AggregationState.java | 9 +- .../expression/aggregation/Aggregator.java | 32 +- .../aggregation/AggregatorFunction.java | 155 +- .../expression/aggregation/AvgAggregator.java | 56 +- .../aggregation/CountAggregator.java | 5 +- .../expression/aggregation/MaxAggregator.java | 5 +- .../expression/aggregation/MinAggregator.java | 10 +- .../aggregation/NamedAggregator.java | 29 +- .../aggregation/StdDevAggregator.java | 24 +- .../expression/aggregation/SumAggregator.java | 13 +- .../aggregation/TakeAggregator.java | 9 +- .../aggregation/VarianceAggregator.java | 18 +- .../conditional/cases/CaseClause.java | 28 +- .../conditional/cases/WhenClause.java | 23 +- .../expression/datetime/CalendarLookup.java | 5 +- .../datetime/DateTimeFormatterUtil.java | 246 +-- .../expression/datetime/DateTimeFunction.java | 1456 +++++++++-------- .../expression/datetime/IntervalClause.java | 4 +- .../sql/expression/env/Environment.java | 19 +- .../function/BuiltinFunctionName.java | 48 +- .../function/BuiltinFunctionRepository.java | 57 +- .../function/DefaultFunctionResolver.java | 46 +- .../expression/function/FunctionBuilder.java | 7 +- .../sql/expression/function/FunctionDSL.java | 381 +++-- .../function/FunctionImplementation.java | 13 +- .../sql/expression/function/FunctionName.java | 8 +- .../function/FunctionProperties.java | 36 +- .../expression/function/FunctionResolver.java | 4 +- .../function/FunctionSignature.java | 19 +- .../function/OpenSearchFunctions.java | 57 +- .../function/RelevanceFunctionResolver.java | 26 +- .../function/SerializableBiFunction.java | 8 +- .../function/SerializableFunction.java | 4 +- .../function/SerializableNoArgFunction.java | 8 +- .../function/SerializableQuadFunction.java | 1 - .../function/SerializableTriFunction.java | 1 - .../function/TableFunctionImplementation.java | 5 +- .../arthmetic/ArithmeticFunction.java | 379 +++-- .../arthmetic/MathematicalFunction.java | 952 +++++++---- .../operator/convert/TypeCastOperator.java | 246 +-- .../predicate/BinaryPredicateOperator.java | 343 +++- .../predicate/UnaryPredicateOperator.java | 103 +- .../sql/expression/parse/GrokExpression.java | 17 +- .../sql/expression/parse/ParseExpression.java | 20 +- .../expression/parse/PatternsExpression.java | 24 +- .../sql/expression/parse/RegexExpression.java | 12 +- .../sql/expression/span/SpanExpression.java | 46 +- .../expression/system/SystemFunctions.java | 25 +- .../sql/expression/text/TextFunction.java | 303 ++-- .../expression/window/WindowDefinition.java | 9 +- .../window/WindowFunctionExpression.java | 22 +- .../expression/window/WindowFunctions.java | 15 +- .../aggregation/AggregateWindowFunction.java | 6 +- .../window/frame/CurrentRowWindowFrame.java | 21 +- .../window/frame/PeerRowsWindowFrame.java | 60 +- .../expression/window/frame/WindowFrame.java | 27 +- .../window/ranking/DenseRankFunction.java | 6 +- .../window/ranking/RankFunction.java | 11 +- .../window/ranking/RankingWindowFunction.java | 30 +- .../window/ranking/RowNumberFunction.java | 2 - .../sql/monitor/AlwaysHealthyMonitor.java | 12 +- .../sql/monitor/ResourceMonitor.java | 5 +- 96 files changed, 3533 insertions(+), 2887 deletions(-) diff --git a/build.gradle b/build.gradle index 2f1ce25212..f6f390505d 100644 --- a/build.gradle +++ b/build.gradle @@ -84,7 +84,11 @@ repositories { spotless { java { target fileTree('.') { - include 'core/src/main/java/org/opensearch/sql/analysis/**/*.java', + include 'core/src/main/java/org/opensearch/sql/monitor/**/*.java', + 'core/src/main/java/org/opensearch/sql/expression/**/*.java', + 'core/src/main/java/org/opensearch/sql/executor/**/*.java', + 'core/src/main/java/org/opensearch/sql/exception/**/*.java', + 'core/src/main/java/org/opensearch/sql/analysis/**/*.java', 'core/src/test/java/org/opensearch/sql/data/**/*.java', 'core/src/test/java/org/opensearch/sql/datasource/**/*.java', 'core/src/test/java/org/opensearch/sql/ast/**/*.java' diff --git a/core/src/main/java/org/opensearch/sql/exception/ExpressionEvaluationException.java b/core/src/main/java/org/opensearch/sql/exception/ExpressionEvaluationException.java index 65ea187666..d11f6e4f9e 100644 --- a/core/src/main/java/org/opensearch/sql/exception/ExpressionEvaluationException.java +++ b/core/src/main/java/org/opensearch/sql/exception/ExpressionEvaluationException.java @@ -3,12 +3,9 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.exception; -/** - * Exception for Expression Evaluation. - */ +/** Exception for Expression Evaluation. */ public class ExpressionEvaluationException extends QueryEngineException { public ExpressionEvaluationException(String message) { super(message); diff --git a/core/src/main/java/org/opensearch/sql/exception/NoCursorException.java b/core/src/main/java/org/opensearch/sql/exception/NoCursorException.java index 9383bece57..57f75c2cb0 100644 --- a/core/src/main/java/org/opensearch/sql/exception/NoCursorException.java +++ b/core/src/main/java/org/opensearch/sql/exception/NoCursorException.java @@ -6,8 +6,7 @@ package org.opensearch.sql.exception; /** - * This should be thrown on serialization of a PhysicalPlan tree if paging is finished. - * Processing of such exception should outcome of responding no cursor to the user. + * This should be thrown on serialization of a PhysicalPlan tree if paging is finished. Processing + * of such exception should outcome of responding no cursor to the user. */ -public class NoCursorException extends RuntimeException { -} +public class NoCursorException extends RuntimeException {} diff --git a/core/src/main/java/org/opensearch/sql/exception/QueryEngineException.java b/core/src/main/java/org/opensearch/sql/exception/QueryEngineException.java index ce90ecff5c..b3d13bef71 100644 --- a/core/src/main/java/org/opensearch/sql/exception/QueryEngineException.java +++ b/core/src/main/java/org/opensearch/sql/exception/QueryEngineException.java @@ -3,12 +3,9 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.exception; -/** - * Query analysis abstract exception. - */ +/** Query analysis abstract exception. */ public class QueryEngineException extends RuntimeException { public QueryEngineException(String message) { diff --git a/core/src/main/java/org/opensearch/sql/exception/SemanticCheckException.java b/core/src/main/java/org/opensearch/sql/exception/SemanticCheckException.java index 8673dbfc3c..6e0c184af8 100644 --- a/core/src/main/java/org/opensearch/sql/exception/SemanticCheckException.java +++ b/core/src/main/java/org/opensearch/sql/exception/SemanticCheckException.java @@ -3,12 +3,9 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.exception; -/** - * Semantic Check Exception. - */ +/** Semantic Check Exception. */ public class SemanticCheckException extends QueryEngineException { public SemanticCheckException(String message) { super(message); diff --git a/core/src/main/java/org/opensearch/sql/exception/UnsupportedCursorRequestException.java b/core/src/main/java/org/opensearch/sql/exception/UnsupportedCursorRequestException.java index 6ed8e02e5f..8117304571 100644 --- a/core/src/main/java/org/opensearch/sql/exception/UnsupportedCursorRequestException.java +++ b/core/src/main/java/org/opensearch/sql/exception/UnsupportedCursorRequestException.java @@ -5,8 +5,5 @@ package org.opensearch.sql.exception; -/** - * This should be thrown by V2 engine to support fallback scenario. - */ -public class UnsupportedCursorRequestException extends RuntimeException { -} +/** This should be thrown by V2 engine to support fallback scenario. */ +public class UnsupportedCursorRequestException extends RuntimeException {} diff --git a/core/src/main/java/org/opensearch/sql/executor/ExecutionContext.java b/core/src/main/java/org/opensearch/sql/executor/ExecutionContext.java index 8a3162068f..22d14972eb 100644 --- a/core/src/main/java/org/opensearch/sql/executor/ExecutionContext.java +++ b/core/src/main/java/org/opensearch/sql/executor/ExecutionContext.java @@ -9,12 +9,9 @@ import lombok.Getter; import org.opensearch.sql.storage.split.Split; -/** - * Execution context hold planning related information. - */ +/** Execution context hold planning related information. */ public class ExecutionContext { - @Getter - private final Optional split; + @Getter private final Optional split; public ExecutionContext(Split split) { this.split = Optional.of(split); diff --git a/core/src/main/java/org/opensearch/sql/executor/ExecutionEngine.java b/core/src/main/java/org/opensearch/sql/executor/ExecutionEngine.java index 9465da22c9..43b8ccb62e 100644 --- a/core/src/main/java/org/opensearch/sql/executor/ExecutionEngine.java +++ b/core/src/main/java/org/opensearch/sql/executor/ExecutionEngine.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.executor; import java.util.List; @@ -17,39 +16,33 @@ import org.opensearch.sql.executor.pagination.Cursor; import org.opensearch.sql.planner.physical.PhysicalPlan; -/** - * Execution engine that encapsulates execution details. - */ +/** Execution engine that encapsulates execution details. */ public interface ExecutionEngine { /** - * Execute physical plan and call back response listener. + * Execute physical plan and call back response listener.
* Todo. deprecated this interface after finalize {@link ExecutionContext}. * - * @param plan executable physical plan + * @param plan executable physical plan * @param listener response listener */ void execute(PhysicalPlan plan, ResponseListener listener); - /** - * Execute physical plan with {@link ExecutionContext} and call back response listener. - */ - void execute(PhysicalPlan plan, ExecutionContext context, - ResponseListener listener); + /** Execute physical plan with {@link ExecutionContext} and call back response listener. */ + void execute( + PhysicalPlan plan, ExecutionContext context, ResponseListener listener); /** - * Explain physical plan and call back response listener. The reason why this has to - * be part of execution engine interface is that the physical plan probably needs to - * be executed to get more info for profiling, such as actual execution time, rows fetched etc. + * Explain physical plan and call back response listener. The reason why this has to be part of + * execution engine interface is that the physical plan probably needs to be executed to get more + * info for profiling, such as actual execution time, rows fetched etc. * - * @param plan physical plan to explain + * @param plan physical plan to explain * @param listener response listener */ void explain(PhysicalPlan plan, ResponseListener listener); - /** - * Data class that encapsulates ExprValue. - */ + /** Data class that encapsulates ExprValue. */ @Data class QueryResponse { private final Schema schema; @@ -70,8 +63,8 @@ public static class Column { } /** - * Data class that encapsulates explain result. This can help decouple core engine - * from concrete explain response format. + * Data class that encapsulates explain result. This can help decouple core engine from concrete + * explain response format. */ @Data class ExplainResponse { @@ -86,5 +79,4 @@ class ExplainResponseNode { private Map description; private List children; } - } diff --git a/core/src/main/java/org/opensearch/sql/executor/Explain.java b/core/src/main/java/org/opensearch/sql/executor/Explain.java index 7c16e0b720..0f05b99383 100644 --- a/core/src/main/java/org/opensearch/sql/executor/Explain.java +++ b/core/src/main/java/org/opensearch/sql/executor/Explain.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.executor; import com.google.common.collect.ImmutableMap; @@ -35,11 +34,9 @@ import org.opensearch.sql.planner.physical.WindowOperator; import org.opensearch.sql.storage.TableScanOperator; -/** - * Visitor that explains a physical plan to JSON format. - */ +/** Visitor that explains a physical plan to JSON format. */ public class Explain extends PhysicalPlanNodeVisitor - implements Function { + implements Function { @Override public ExplainResponse apply(PhysicalPlan plan) { @@ -48,109 +45,160 @@ public ExplainResponse apply(PhysicalPlan plan) { @Override public ExplainResponseNode visitProject(ProjectOperator node, Object context) { - return explain(node, context, explainNode -> explainNode.setDescription(ImmutableMap.of( - "fields", node.getProjectList().toString()))); + return explain( + node, + context, + explainNode -> + explainNode.setDescription( + ImmutableMap.of("fields", node.getProjectList().toString()))); } @Override public ExplainResponseNode visitFilter(FilterOperator node, Object context) { - return explain(node, context, explainNode -> explainNode.setDescription(ImmutableMap.of( - "conditions", node.getConditions().toString()))); + return explain( + node, + context, + explainNode -> + explainNode.setDescription( + ImmutableMap.of("conditions", node.getConditions().toString()))); } @Override public ExplainResponseNode visitSort(SortOperator node, Object context) { - return explain(node, context, explainNode -> explainNode.setDescription(ImmutableMap.of( - "sortList", describeSortList(node.getSortList())))); + return explain( + node, + context, + explainNode -> + explainNode.setDescription( + ImmutableMap.of("sortList", describeSortList(node.getSortList())))); } @Override public ExplainResponseNode visitTableScan(TableScanOperator node, Object context) { - return explain(node, context, explainNode -> explainNode.setDescription(ImmutableMap.of( - "request", node.toString()))); + return explain( + node, + context, + explainNode -> explainNode.setDescription(ImmutableMap.of("request", node.toString()))); } @Override public ExplainResponseNode visitAggregation(AggregationOperator node, Object context) { - return explain(node, context, explainNode -> explainNode.setDescription(ImmutableMap.of( - "aggregators", node.getAggregatorList().toString(), - "groupBy", node.getGroupByExprList().toString()))); + return explain( + node, + context, + explainNode -> + explainNode.setDescription( + ImmutableMap.of( + "aggregators", node.getAggregatorList().toString(), + "groupBy", node.getGroupByExprList().toString()))); } @Override public ExplainResponseNode visitWindow(WindowOperator node, Object context) { - return explain(node, context, explainNode -> explainNode.setDescription(ImmutableMap.of( - "function", node.getWindowFunction().toString(), - "definition", ImmutableMap.of( - "partitionBy", node.getWindowDefinition().getPartitionByList().toString(), - "sortList", describeSortList(node.getWindowDefinition().getSortList()))))); + return explain( + node, + context, + explainNode -> + explainNode.setDescription( + ImmutableMap.of( + "function", node.getWindowFunction().toString(), + "definition", + ImmutableMap.of( + "partitionBy", + node.getWindowDefinition().getPartitionByList().toString(), + "sortList", + describeSortList(node.getWindowDefinition().getSortList()))))); } @Override public ExplainResponseNode visitRename(RenameOperator node, Object context) { Map renameMappingDescription = - node.getMapping() - .entrySet() - .stream() - .collect(Collectors.toMap( - e -> e.getKey().toString(), - e -> e.getValue().toString())); + node.getMapping().entrySet().stream() + .collect(Collectors.toMap(e -> e.getKey().toString(), e -> e.getValue().toString())); - return explain(node, context, explainNode -> explainNode.setDescription(ImmutableMap.of( - "mapping", renameMappingDescription))); + return explain( + node, + context, + explainNode -> + explainNode.setDescription(ImmutableMap.of("mapping", renameMappingDescription))); } @Override public ExplainResponseNode visitRemove(RemoveOperator node, Object context) { - return explain(node, context, explainNode -> explainNode.setDescription(ImmutableMap.of( - "removeList", node.getRemoveList().toString()))); + return explain( + node, + context, + explainNode -> + explainNode.setDescription( + ImmutableMap.of("removeList", node.getRemoveList().toString()))); } @Override public ExplainResponseNode visitEval(EvalOperator node, Object context) { - return explain(node, context, explainNode -> explainNode.setDescription(ImmutableMap.of( - "expressions", convertPairListToMap(node.getExpressionList())))); + return explain( + node, + context, + explainNode -> + explainNode.setDescription( + ImmutableMap.of("expressions", convertPairListToMap(node.getExpressionList())))); } @Override public ExplainResponseNode visitDedupe(DedupeOperator node, Object context) { - return explain(node, context, explainNode -> explainNode.setDescription(ImmutableMap.of( - "dedupeList", node.getDedupeList().toString(), - "allowedDuplication", node.getAllowedDuplication(), - "keepEmpty", node.getKeepEmpty(), - "consecutive", node.getConsecutive()))); + return explain( + node, + context, + explainNode -> + explainNode.setDescription( + ImmutableMap.of( + "dedupeList", node.getDedupeList().toString(), + "allowedDuplication", node.getAllowedDuplication(), + "keepEmpty", node.getKeepEmpty(), + "consecutive", node.getConsecutive()))); } @Override public ExplainResponseNode visitRareTopN(RareTopNOperator node, Object context) { - return explain(node, context, explainNode -> explainNode.setDescription(ImmutableMap.of( - "commandType", node.getCommandType(), - "noOfResults", node.getNoOfResults(), - "fields", node.getFieldExprList().toString(), - "groupBy", node.getGroupByExprList().toString() - ))); + return explain( + node, + context, + explainNode -> + explainNode.setDescription( + ImmutableMap.of( + "commandType", node.getCommandType(), + "noOfResults", node.getNoOfResults(), + "fields", node.getFieldExprList().toString(), + "groupBy", node.getGroupByExprList().toString()))); } @Override public ExplainResponseNode visitValues(ValuesOperator node, Object context) { - return explain(node, context, explainNode -> explainNode.setDescription(ImmutableMap.of( - "values", node.getValues()))); + return explain( + node, + context, + explainNode -> explainNode.setDescription(ImmutableMap.of("values", node.getValues()))); } @Override public ExplainResponseNode visitLimit(LimitOperator node, Object context) { - return explain(node, context, explanNode -> explanNode.setDescription(ImmutableMap.of( - "limit", node.getLimit(), "offset", node.getOffset()))); + return explain( + node, + context, + explanNode -> + explanNode.setDescription( + ImmutableMap.of("limit", node.getLimit(), "offset", node.getOffset()))); } @Override public ExplainResponseNode visitNested(NestedOperator node, Object context) { - return explain(node, context, explanNode -> explanNode.setDescription(ImmutableMap.of( - "nested", node.getFields()))); + return explain( + node, + context, + explanNode -> explanNode.setDescription(ImmutableMap.of("nested", node.getFields()))); } - protected ExplainResponseNode explain(PhysicalPlan node, Object context, - Consumer doExplain) { + protected ExplainResponseNode explain( + PhysicalPlan node, Object context, Consumer doExplain) { ExplainResponseNode explainNode = new ExplainResponseNode(getOperatorName(node)); List children = new ArrayList<>(); @@ -169,19 +217,18 @@ private String getOperatorName(PhysicalPlan node) { private Map convertPairListToMap(List> pairs) { return pairs.stream() - .collect(Collectors.toMap( - p -> p.getLeft().toString(), - p -> p.getRight().toString())); + .collect(Collectors.toMap(p -> p.getLeft().toString(), p -> p.getRight().toString())); } private Map> describeSortList( List> sortList) { return sortList.stream() - .collect(Collectors.toMap( - p -> p.getRight().toString(), - p -> ImmutableMap.of( - "sortOrder", p.getLeft().getSortOrder().toString(), - "nullOrder", p.getLeft().getNullOrder().toString()))); + .collect( + Collectors.toMap( + p -> p.getRight().toString(), + p -> + ImmutableMap.of( + "sortOrder", p.getLeft().getSortOrder().toString(), + "nullOrder", p.getLeft().getNullOrder().toString()))); } - } diff --git a/core/src/main/java/org/opensearch/sql/executor/QueryId.java b/core/src/main/java/org/opensearch/sql/executor/QueryId.java index 933cb5d82d..eea8166e2a 100644 --- a/core/src/main/java/org/opensearch/sql/executor/QueryId.java +++ b/core/src/main/java/org/opensearch/sql/executor/QueryId.java @@ -12,18 +12,14 @@ import org.apache.commons.lang3.RandomStringUtils; import org.opensearch.sql.executor.execution.AbstractPlan; -/** - * Query id of {@link AbstractPlan}. - */ +/** Query id of {@link AbstractPlan}. */ public class QueryId { - /** - * Query id. - */ - @Getter - private final String queryId; + /** Query id. */ + @Getter private final String queryId; /** * Generate {@link QueryId}. + * * @return {@link QueryId}. */ public static QueryId queryId() { diff --git a/core/src/main/java/org/opensearch/sql/executor/QueryManager.java b/core/src/main/java/org/opensearch/sql/executor/QueryManager.java index 5b41d7ce2e..44d6a1cd84 100644 --- a/core/src/main/java/org/opensearch/sql/executor/QueryManager.java +++ b/core/src/main/java/org/opensearch/sql/executor/QueryManager.java @@ -11,13 +11,14 @@ import org.opensearch.sql.executor.execution.AbstractPlan; /** - * QueryManager is the high-level interface of core engine. - * Frontend submit {@link AbstractPlan} to QueryManager. + * QueryManager is the high-level interface of core engine. Frontend submit an {@link AbstractPlan} + * to QueryManager. */ public interface QueryManager { /** * Submit {@link AbstractPlan}. + * * @param queryPlan {@link AbstractPlan}. * @return {@link QueryId}. */ diff --git a/core/src/main/java/org/opensearch/sql/executor/QueryService.java b/core/src/main/java/org/opensearch/sql/executor/QueryService.java index 94e7081920..3e939212bf 100644 --- a/core/src/main/java/org/opensearch/sql/executor/QueryService.java +++ b/core/src/main/java/org/opensearch/sql/executor/QueryService.java @@ -18,9 +18,7 @@ import org.opensearch.sql.planner.logical.LogicalPlan; import org.opensearch.sql.planner.physical.PhysicalPlan; -/** - * The low level interface of core engine. - */ +/** The low level interface of core engine. */ @RequiredArgsConstructor public class QueryService { @@ -31,14 +29,14 @@ public class QueryService { private final Planner planner; /** - * Execute the {@link UnresolvedPlan}, using {@link ResponseListener} to get response. + * Execute the {@link UnresolvedPlan}, using {@link ResponseListener} to get response.
* Todo. deprecated this interface after finalize {@link PlanContext}. * - * @param plan {@link UnresolvedPlan} + * @param plan {@link UnresolvedPlan} * @param listener {@link ResponseListener} */ - public void execute(UnresolvedPlan plan, - ResponseListener listener) { + public void execute( + UnresolvedPlan plan, ResponseListener listener) { try { executePlan(analyze(plan), PlanContext.emptyPlanContext(), listener); } catch (Exception e) { @@ -48,22 +46,24 @@ public void execute(UnresolvedPlan plan, /** * Execute the {@link UnresolvedPlan}, with {@link PlanContext} and using {@link ResponseListener} - * to get response. + * to get response.
* Todo. Pass split from PlanContext to ExecutionEngine in following PR. * * @param plan {@link LogicalPlan} * @param planContext {@link PlanContext} * @param listener {@link ResponseListener} */ - public void executePlan(LogicalPlan plan, - PlanContext planContext, - ResponseListener listener) { + public void executePlan( + LogicalPlan plan, + PlanContext planContext, + ResponseListener listener) { try { planContext .getSplit() .ifPresentOrElse( split -> executionEngine.execute(plan(plan), new ExecutionContext(split), listener), - () -> executionEngine.execute( + () -> + executionEngine.execute( plan(plan), ExecutionContext.emptyExecutionContext(), listener)); } catch (Exception e) { listener.onFailure(e); @@ -71,14 +71,14 @@ public void executePlan(LogicalPlan plan, } /** - * Explain the query in {@link UnresolvedPlan} using {@link ResponseListener} to - * get and format explain response. + * Explain the query in {@link UnresolvedPlan} using {@link ResponseListener} to get and format + * explain response. * * @param plan {@link UnresolvedPlan} * @param listener {@link ResponseListener} for explain response */ - public void explain(UnresolvedPlan plan, - ResponseListener listener) { + public void explain( + UnresolvedPlan plan, ResponseListener listener) { try { executionEngine.explain(plan(analyze(plan)), listener); } catch (Exception e) { @@ -86,16 +86,12 @@ public void explain(UnresolvedPlan plan, } } - /** - * Analyze {@link UnresolvedPlan}. - */ + /** Analyze {@link UnresolvedPlan}. */ public LogicalPlan analyze(UnresolvedPlan plan) { return analyzer.analyze(plan, new AnalysisContext()); } - /** - * Translate {@link LogicalPlan} to {@link PhysicalPlan}. - */ + /** Translate {@link LogicalPlan} to {@link PhysicalPlan}. */ public PhysicalPlan plan(LogicalPlan plan) { return planner.plan(plan); } diff --git a/core/src/main/java/org/opensearch/sql/executor/execution/AbstractPlan.java b/core/src/main/java/org/opensearch/sql/executor/execution/AbstractPlan.java index 1654293c04..23091777ce 100644 --- a/core/src/main/java/org/opensearch/sql/executor/execution/AbstractPlan.java +++ b/core/src/main/java/org/opensearch/sql/executor/execution/AbstractPlan.java @@ -8,28 +8,20 @@ package org.opensearch.sql.executor.execution; - import lombok.Getter; import lombok.RequiredArgsConstructor; import org.opensearch.sql.common.response.ResponseListener; import org.opensearch.sql.executor.ExecutionEngine; import org.opensearch.sql.executor.QueryId; -/** - * AbstractPlan represent the execution entity of the Statement. - */ +/** AbstractPlan represent the execution entity of the Statement. */ @RequiredArgsConstructor public abstract class AbstractPlan { - /** - * Uniq query id. - */ - @Getter - private final QueryId queryId; + /** Uniq query id. */ + @Getter private final QueryId queryId; - /** - * Start query execution. - */ + /** Start query execution. */ public abstract void execute(); /** diff --git a/core/src/main/java/org/opensearch/sql/executor/execution/CommandPlan.java b/core/src/main/java/org/opensearch/sql/executor/execution/CommandPlan.java index 0ea5266084..103e0e1081 100644 --- a/core/src/main/java/org/opensearch/sql/executor/execution/CommandPlan.java +++ b/core/src/main/java/org/opensearch/sql/executor/execution/CommandPlan.java @@ -15,26 +15,25 @@ import org.opensearch.sql.executor.QueryService; /** - * Query plan which does not reflect a search query being executed. - * It contains a command or an action, for example, a DDL query. + * Query plan which does not reflect a search query being executed. It contains a command or an + * action, for example, a DDL query. */ public class CommandPlan extends AbstractPlan { - /** - * The query plan ast. - */ + /** The query plan ast. */ protected final UnresolvedPlan plan; - /** - * Query service. - */ + /** Query service. */ protected final QueryService queryService; protected final ResponseListener listener; /** Constructor. */ - public CommandPlan(QueryId queryId, UnresolvedPlan plan, QueryService queryService, - ResponseListener listener) { + public CommandPlan( + QueryId queryId, + UnresolvedPlan plan, + QueryService queryService, + ResponseListener listener) { super(queryId); this.plan = plan; this.queryService = queryService; diff --git a/core/src/main/java/org/opensearch/sql/executor/execution/ExplainPlan.java b/core/src/main/java/org/opensearch/sql/executor/execution/ExplainPlan.java index 8c784f82ed..7868a39001 100644 --- a/core/src/main/java/org/opensearch/sql/executor/execution/ExplainPlan.java +++ b/core/src/main/java/org/opensearch/sql/executor/execution/ExplainPlan.java @@ -12,21 +12,18 @@ import org.opensearch.sql.executor.ExecutionEngine; import org.opensearch.sql.executor.QueryId; -/** - * Explain plan. - */ +/** Explain plan. */ public class ExplainPlan extends AbstractPlan { private final AbstractPlan plan; private final ResponseListener explainListener; - /** - * Constructor. - */ - public ExplainPlan(QueryId queryId, - AbstractPlan plan, - ResponseListener explainListener) { + /** Constructor. */ + public ExplainPlan( + QueryId queryId, + AbstractPlan plan, + ResponseListener explainListener) { super(queryId); this.plan = plan; this.explainListener = explainListener; diff --git a/core/src/main/java/org/opensearch/sql/executor/execution/QueryPlan.java b/core/src/main/java/org/opensearch/sql/executor/execution/QueryPlan.java index aeecf3e76f..0ebdb875f6 100644 --- a/core/src/main/java/org/opensearch/sql/executor/execution/QueryPlan.java +++ b/core/src/main/java/org/opensearch/sql/executor/execution/QueryPlan.java @@ -17,19 +17,13 @@ import org.opensearch.sql.executor.QueryId; import org.opensearch.sql.executor.QueryService; -/** - * Query plan which includes a select query. - */ +/** Query plan which includes a select query. */ public class QueryPlan extends AbstractPlan { - /** - * The query plan ast. - */ + /** The query plan ast. */ protected final UnresolvedPlan plan; - /** - * Query service. - */ + /** Query service. */ protected final QueryService queryService; protected final ResponseListener listener; @@ -75,8 +69,9 @@ public void execute() { @Override public void explain(ResponseListener listener) { if (pageSize.isPresent()) { - listener.onFailure(new NotImplementedException( - "`explain` feature for paginated requests is not implemented yet.")); + listener.onFailure( + new NotImplementedException( + "`explain` feature for paginated requests is not implemented yet.")); } else { queryService.explain(plan, listener); } diff --git a/core/src/main/java/org/opensearch/sql/executor/execution/QueryPlanFactory.java b/core/src/main/java/org/opensearch/sql/executor/execution/QueryPlanFactory.java index 3273eb3c18..52d7126e17 100644 --- a/core/src/main/java/org/opensearch/sql/executor/execution/QueryPlanFactory.java +++ b/core/src/main/java/org/opensearch/sql/executor/execution/QueryPlanFactory.java @@ -27,9 +27,7 @@ import org.opensearch.sql.executor.QueryService; import org.opensearch.sql.executor.pagination.CanPaginateVisitor; -/** - * QueryExecution Factory. - */ +/** QueryExecution Factory. */ @RequiredArgsConstructor public class QueryPlanFactory extends AbstractNodeVisitor< @@ -38,14 +36,12 @@ public class QueryPlanFactory Optional>, Optional>>> { - /** - * Query Service. - */ + /** Query Service. */ private final QueryService queryService; /** - * NO_CONSUMER_RESPONSE_LISTENER should never be called. It is only used as constructor - * parameter of {@link QueryPlan}. + * NO_CONSUMER_RESPONSE_LISTENER should never be called. It is only used as constructor parameter + * of {@link QueryPlan}. */ @VisibleForTesting protected static final ResponseListener @@ -64,9 +60,7 @@ public void onFailure(Exception e) { } }; - /** - * Create QueryExecution from Statement. - */ + /** Create QueryExecution from Statement. */ public AbstractPlan create( Statement statement, Optional> queryListener, @@ -74,12 +68,12 @@ public AbstractPlan create( return statement.accept(this, Pair.of(queryListener, explainListener)); } - /** - * Creates a QueryPlan from a cursor. - */ - public AbstractPlan create(String cursor, boolean isExplain, - ResponseListener queryResponseListener, - ResponseListener explainListener) { + /** Creates a QueryPlan from a cursor. */ + public AbstractPlan create( + String cursor, + boolean isExplain, + ResponseListener queryResponseListener, + ResponseListener explainListener) { QueryId queryId = QueryId.queryId(); var plan = new QueryPlan(queryId, new FetchCursor(cursor), queryService, queryResponseListener); return isExplain ? new ExplainPlan(queryId, plan, explainListener) : plan; @@ -89,27 +83,32 @@ boolean canConvertToCursor(UnresolvedPlan plan) { return plan.accept(new CanPaginateVisitor(), null); } - /** - * Creates a {@link CloseCursor} command on a cursor. - */ - public AbstractPlan createCloseCursor(String cursor, - ResponseListener queryResponseListener) { - return new CommandPlan(QueryId.queryId(), new CloseCursor().attach(new FetchCursor(cursor)), - queryService, queryResponseListener); + /** Creates a {@link CloseCursor} command on a cursor. */ + public AbstractPlan createCloseCursor( + String cursor, ResponseListener queryResponseListener) { + return new CommandPlan( + QueryId.queryId(), + new CloseCursor().attach(new FetchCursor(cursor)), + queryService, + queryResponseListener); } @Override public AbstractPlan visitQuery( Query node, - Pair>, - Optional>> + Pair< + Optional>, + Optional>> context) { Preconditions.checkArgument( context.getLeft().isPresent(), "[BUG] query listener must be not null"); if (node.getFetchSize() > 0) { if (canConvertToCursor(node.getPlan())) { - return new QueryPlan(QueryId.queryId(), node.getPlan(), node.getFetchSize(), + return new QueryPlan( + QueryId.queryId(), + node.getPlan(), + node.getFetchSize(), queryService, context.getLeft().get()); } else { @@ -117,24 +116,24 @@ public AbstractPlan visitQuery( throw new UnsupportedCursorRequestException(); } } else { - return new QueryPlan(QueryId.queryId(), node.getPlan(), queryService, - context.getLeft().get()); + return new QueryPlan( + QueryId.queryId(), node.getPlan(), queryService, context.getLeft().get()); } } @Override public AbstractPlan visitExplain( Explain node, - Pair>, - Optional>> + Pair< + Optional>, + Optional>> context) { Preconditions.checkArgument( context.getRight().isPresent(), "[BUG] explain listener must be not null"); return new ExplainPlan( QueryId.queryId(), - create(node.getStatement(), - Optional.of(NO_CONSUMER_RESPONSE_LISTENER), Optional.empty()), + create(node.getStatement(), Optional.of(NO_CONSUMER_RESPONSE_LISTENER), Optional.empty()), context.getRight().get()); } } diff --git a/core/src/main/java/org/opensearch/sql/executor/execution/StreamingQueryPlan.java b/core/src/main/java/org/opensearch/sql/executor/execution/StreamingQueryPlan.java index 9bb37b064c..5adb9644e9 100644 --- a/core/src/main/java/org/opensearch/sql/executor/execution/StreamingQueryPlan.java +++ b/core/src/main/java/org/opensearch/sql/executor/execution/StreamingQueryPlan.java @@ -24,9 +24,7 @@ import org.opensearch.sql.planner.logical.LogicalPlanNodeVisitor; import org.opensearch.sql.planner.logical.LogicalRelation; -/** - * Streaming Query Plan. - */ +/** Streaming Query Plan. */ public class StreamingQueryPlan extends QueryPlan { private static final Logger log = LogManager.getLogger(StreamingQueryPlan.class); @@ -35,14 +33,13 @@ public class StreamingQueryPlan extends QueryPlan { private MicroBatchStreamingExecution streamingExecution; - /** - * constructor. - */ - public StreamingQueryPlan(QueryId queryId, - UnresolvedPlan plan, - QueryService queryService, - ResponseListener listener, - ExecutionStrategy executionStrategy) { + /** constructor. */ + public StreamingQueryPlan( + QueryId queryId, + UnresolvedPlan plan, + QueryService queryService, + ResponseListener listener, + ExecutionStrategy executionStrategy) { super(queryId, plan, queryService, listener); this.executionStrategy = executionStrategy; @@ -70,15 +67,13 @@ public void execute() { } interface ExecutionStrategy { - /** - * execute task. - */ + /** execute task. */ void execute(Runnable task) throws InterruptedException; } /** - * execute task with fixed interval. - * if task run time < interval, trigger next task on next interval. + * execute task with fixed interval.
+ * if task run time < interval, trigger next task on next interval.
* if task run time >= interval, trigger next task immediately. */ @RequiredArgsConstructor diff --git a/core/src/main/java/org/opensearch/sql/executor/pagination/CanPaginateVisitor.java b/core/src/main/java/org/opensearch/sql/executor/pagination/CanPaginateVisitor.java index e304c132bd..9f14ba1e5d 100644 --- a/core/src/main/java/org/opensearch/sql/executor/pagination/CanPaginateVisitor.java +++ b/core/src/main/java/org/opensearch/sql/executor/pagination/CanPaginateVisitor.java @@ -41,18 +41,26 @@ import org.opensearch.sql.expression.function.BuiltinFunctionName; /** - * Use this unresolved plan visitor to check if a plan can be serialized by PaginatedPlanCache. - * If
plan.accept(new CanPaginateVisitor(...))
returns true, - * then PaginatedPlanCache.convertToCursor will succeed. Otherwise, it will fail. - * The purpose of this visitor is to activate legacy engine fallback mechanism. - * Currently, V2 engine does not support queries with: - * - aggregation (GROUP BY clause or aggregation functions like min/max) - * - in memory aggregation (window function) - * - LIMIT/OFFSET clause(s) - * - without FROM clause - * - JOIN - * - a subquery - * V2 also requires that the table being queried should be an OpenSearch index. + * Use this unresolved plan visitor to check if a plan can be serialized by PaginatedPlanCache.
+ * If + * + *
plan.accept(new CanPaginateVisitor(...))
+ * + * returns true, PaginatedPlanCache.convertToCursor will succeed.
+ * Otherwise, it will fail.
+ * The purpose of this visitor is to activate legacy engine fallback mechanism.
+ * Currently, V2 engine does not support queries with:
+ * + *
    + *
  • aggregation (GROUP BY clause or aggregation functions like min/max) + *
  • in memory aggregation (window function) + *
  • LIMIT/OFFSET clause(s) + *
  • without FROM clause + *
  • JOIN + *
  • a subquery + *
+ * + * V2 also requires that the table being queried should be an OpenSearch index.
* See PaginatedPlanCache.canConvertToCursor for usage. */ public class CanPaginateVisitor extends AbstractNodeVisitor { @@ -80,8 +88,8 @@ protected Boolean canPaginate(Node node, Object context) { // https://github.com/opensearch-project/sql/issues/1471 @Override public Boolean visitSort(Sort node, Object context) { - return node.getSortList().stream().allMatch(f -> f.getField() instanceof QualifiedName - && visitField(f, context)) + return node.getSortList().stream() + .allMatch(f -> f.getField() instanceof QualifiedName && visitField(f, context)) && canPaginate(node, context); } @@ -116,8 +124,8 @@ public Boolean visitLiteral(Literal node, Object context) { @Override public Boolean visitField(Field node, Object context) { - return canPaginate(node, context) && node.getFieldArgs().stream() - .allMatch(n -> n.accept(this, context)); + return canPaginate(node, context) + && node.getFieldArgs().stream().allMatch(n -> n.accept(this, context)); } @Override @@ -192,8 +200,8 @@ public Boolean visitFunction(Function node, Object context) { @Override public Boolean visitIn(In node, Object context) { - return canPaginate(node, context) && node.getValueList().stream() - .allMatch(n -> n.accept(this, context)); + return canPaginate(node, context) + && node.getValueList().stream().allMatch(n -> n.accept(this, context)); } @Override diff --git a/core/src/main/java/org/opensearch/sql/executor/pagination/Cursor.java b/core/src/main/java/org/opensearch/sql/executor/pagination/Cursor.java index bb320f5c67..59a14a2d72 100644 --- a/core/src/main/java/org/opensearch/sql/executor/pagination/Cursor.java +++ b/core/src/main/java/org/opensearch/sql/executor/pagination/Cursor.java @@ -14,8 +14,7 @@ public class Cursor { public static final Cursor None = new Cursor(null); - @Getter - private final String data; + @Getter private final String data; public String toString() { return data; diff --git a/core/src/main/java/org/opensearch/sql/executor/pagination/PlanSerializer.java b/core/src/main/java/org/opensearch/sql/executor/pagination/PlanSerializer.java index 07cf174d73..40a6cdfa84 100644 --- a/core/src/main/java/org/opensearch/sql/executor/pagination/PlanSerializer.java +++ b/core/src/main/java/org/opensearch/sql/executor/pagination/PlanSerializer.java @@ -24,8 +24,8 @@ import org.opensearch.sql.storage.StorageEngine; /** - * This class is entry point to paged requests. It is responsible to cursor serialization - * and deserialization. + * This class is entry point to paged requests. It is responsible to cursor serialization and + * deserialization. */ @RequiredArgsConstructor public class PlanSerializer { @@ -33,14 +33,11 @@ public class PlanSerializer { private final StorageEngine engine; - - /** - * Converts a physical plan tree to a cursor. - */ + /** Converts a physical plan tree to a cursor. */ public Cursor convertToCursor(PhysicalPlan plan) { try { - return new Cursor(CURSOR_PREFIX - + serialize(((SerializablePlan) plan).getPlanForSerialization())); + return new Cursor( + CURSOR_PREFIX + serialize(((SerializablePlan) plan).getPlanForSerialization())); // ClassCastException thrown when a plan in the tree doesn't implement SerializablePlan } catch (NotSerializableException | ClassCastException | NoCursorException e) { return Cursor.None; @@ -49,6 +46,7 @@ public Cursor convertToCursor(PhysicalPlan plan) { /** * Serializes and compresses the object. + * * @param object The object. * @return Encoded binary data. */ @@ -61,9 +59,12 @@ protected String serialize(Serializable object) throws NotSerializableException ByteArrayOutputStream out = new ByteArrayOutputStream(); // GZIP provides 35-45%, lzma from apache commons-compress has few % better compression - GZIPOutputStream gzip = new GZIPOutputStream(out) { { - this.def.setLevel(Deflater.BEST_COMPRESSION); - } }; + GZIPOutputStream gzip = + new GZIPOutputStream(out) { + { + this.def.setLevel(Deflater.BEST_COMPRESSION); + } + }; gzip.write(output.toByteArray()); gzip.close(); @@ -77,24 +78,23 @@ protected String serialize(Serializable object) throws NotSerializableException /** * Decompresses and deserializes the binary data. + * * @param code Encoded binary data. * @return An object. */ protected Serializable deserialize(String code) { try { - GZIPInputStream gzip = new GZIPInputStream( - new ByteArrayInputStream(HashCode.fromString(code).asBytes())); - ObjectInputStream objectInput = new CursorDeserializationStream( - new ByteArrayInputStream(gzip.readAllBytes())); + GZIPInputStream gzip = + new GZIPInputStream(new ByteArrayInputStream(HashCode.fromString(code).asBytes())); + ObjectInputStream objectInput = + new CursorDeserializationStream(new ByteArrayInputStream(gzip.readAllBytes())); return (Serializable) objectInput.readObject(); } catch (Exception e) { throw new IllegalStateException("Failed to deserialize object", e); } } - /** - * Converts a cursor to a physical plan tree. - */ + /** Converts a cursor to a physical plan tree. */ public PhysicalPlan convertToPlan(String cursor) { if (!cursor.startsWith(CURSOR_PREFIX)) { throw new UnsupportedOperationException("Unsupported cursor"); diff --git a/core/src/main/java/org/opensearch/sql/executor/streaming/Batch.java b/core/src/main/java/org/opensearch/sql/executor/streaming/Batch.java index cd7d7dae5a..ab03c4fb53 100644 --- a/core/src/main/java/org/opensearch/sql/executor/streaming/Batch.java +++ b/core/src/main/java/org/opensearch/sql/executor/streaming/Batch.java @@ -8,9 +8,7 @@ import lombok.Data; import org.opensearch.sql.storage.split.Split; -/** - * A batch of streaming execution. - */ +/** A batch of streaming execution. */ @Data public class Batch { private final Split split; diff --git a/core/src/main/java/org/opensearch/sql/executor/streaming/MicroBatchStreamingExecution.java b/core/src/main/java/org/opensearch/sql/executor/streaming/MicroBatchStreamingExecution.java index d85c17fe66..4e05484f15 100644 --- a/core/src/main/java/org/opensearch/sql/executor/streaming/MicroBatchStreamingExecution.java +++ b/core/src/main/java/org/opensearch/sql/executor/streaming/MicroBatchStreamingExecution.java @@ -20,9 +20,7 @@ import org.opensearch.sql.planner.PlanContext; import org.opensearch.sql.planner.logical.LogicalPlan; -/** - * Micro batch streaming execution. - */ +/** Micro batch streaming execution. */ public class MicroBatchStreamingExecution { private static final Logger log = LogManager.getLogger(MicroBatchStreamingExecution.class); @@ -46,9 +44,7 @@ public class MicroBatchStreamingExecution { /** keep track the latest commit batchId. */ private final MetadataLog committedLog; - /** - * Constructor. - */ + /** Constructor. */ public MicroBatchStreamingExecution( StreamingSource source, LogicalPlan batchPlan, @@ -63,9 +59,7 @@ public MicroBatchStreamingExecution( this.committedLog = committedLog; } - /** - * Pull the {@link Batch} from {@link StreamingSource} and execute the {@link Batch}. - */ + /** Pull the {@link Batch} from {@link StreamingSource} and execute the {@link Batch}. */ public void execute() { Long latestBatchId = offsetLog.getLatest().map(Pair::getKey).orElse(INITIAL_LATEST_BATCH_ID); Long latestCommittedBatchId = diff --git a/core/src/main/java/org/opensearch/sql/executor/streaming/Offset.java b/core/src/main/java/org/opensearch/sql/executor/streaming/Offset.java index 00f040e437..27960da84d 100644 --- a/core/src/main/java/org/opensearch/sql/executor/streaming/Offset.java +++ b/core/src/main/java/org/opensearch/sql/executor/streaming/Offset.java @@ -7,9 +7,7 @@ import lombok.Data; -/** - * Offset. - */ +/** Offset. */ @Data public class Offset { diff --git a/core/src/main/java/org/opensearch/sql/executor/streaming/StreamingSource.java b/core/src/main/java/org/opensearch/sql/executor/streaming/StreamingSource.java index ebd3fa714b..2b45a45d9a 100644 --- a/core/src/main/java/org/opensearch/sql/executor/streaming/StreamingSource.java +++ b/core/src/main/java/org/opensearch/sql/executor/streaming/StreamingSource.java @@ -7,9 +7,7 @@ import java.util.Optional; -/** - * Streaming source. - */ +/** Streaming source. */ public interface StreamingSource { /** * Get current {@link Offset} of stream data. diff --git a/core/src/main/java/org/opensearch/sql/expression/DSL.java b/core/src/main/java/org/opensearch/sql/expression/DSL.java index f1a8110a29..4341668b69 100644 --- a/core/src/main/java/org/opensearch/sql/expression/DSL.java +++ b/core/src/main/java/org/opensearch/sql/expression/DSL.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression; import java.util.Arrays; @@ -29,8 +28,7 @@ public class DSL { - private DSL() { - } + private DSL() {} public static LiteralExpression literal(Byte value) { return new LiteralExpression(ExprValueUtils.byteValue(value)); @@ -68,9 +66,7 @@ public static LiteralExpression literal(ExprValue value) { return new LiteralExpression(value); } - /** - * Wrap a number to {@link LiteralExpression}. - */ + /** Wrap a number to {@link LiteralExpression}. */ public static LiteralExpression literal(Number value) { if (value instanceof Integer) { return new LiteralExpression(ExprValueUtils.integerValue(value.intValue())); @@ -88,21 +84,21 @@ public static ReferenceExpression ref(String ref, ExprType type) { } /** - * Wrap a named expression if not yet. The intent is that different languages may use - * Alias or not when building AST. This caused either named or unnamed expression - * is resolved by analyzer. To make unnamed expression acceptable for logical project, - * it is required to wrap it by named expression here before passing to logical project. + * Wrap a named expression if not yet. The intent is that different languages may use Alias or not + * when building AST. This caused either named or unnamed expression is resolved by analyzer. To + * make unnamed expression acceptable for logical project, it is required to wrap it by named + * expression here before passing to logical project. * - * @param expression expression - * @return expression if named already or expression wrapped by named expression. + * @param expression expression + * @return expression if named already or expression wrapped by named expression. */ public static NamedExpression named(Expression expression) { if (expression instanceof NamedExpression) { return (NamedExpression) expression; } if (expression instanceof ParseExpression) { - return named(((ParseExpression) expression).getIdentifier().valueOf().stringValue(), - expression); + return named( + ((ParseExpression) expression).getIdentifier().valueOf().stringValue(), expression); } return named(expression.toString(), expression); } @@ -127,18 +123,18 @@ public static NamedArgumentExpression namedArgument(String name, String value) { return namedArgument(name, literal(value)); } - public static GrokExpression grok(Expression sourceField, Expression pattern, - Expression identifier) { + public static GrokExpression grok( + Expression sourceField, Expression pattern, Expression identifier) { return new GrokExpression(sourceField, pattern, identifier); } - public static RegexExpression regex(Expression sourceField, Expression pattern, - Expression identifier) { + public static RegexExpression regex( + Expression sourceField, Expression pattern, Expression identifier) { return new RegexExpression(sourceField, pattern, identifier); } - public static PatternsExpression patterns(Expression sourceField, Expression pattern, - Expression identifier) { + public static PatternsExpression patterns( + Expression sourceField, Expression pattern, Expression identifier) { return new PatternsExpression(sourceField, pattern, identifier); } @@ -268,7 +264,6 @@ public static FunctionExpression signum(Expression... expressions) { public static FunctionExpression sinh(Expression... expressions) { return compile(FunctionProperties.None, BuiltinFunctionName.SINH, expressions); - } public static FunctionExpression sqrt(Expression... expressions) { @@ -364,8 +359,7 @@ public static FunctionExpression dayname(Expression... expressions) { } public static FunctionExpression dayofmonth( - FunctionProperties functionProperties, - Expression... expressions) { + FunctionProperties functionProperties, Expression... expressions) { return compile(functionProperties, BuiltinFunctionName.DAYOFMONTH, expressions); } @@ -379,8 +373,7 @@ public static FunctionExpression dayofyear(Expression... expressions) { } public static FunctionExpression day_of_month( - FunctionProperties functionProperties, - Expression... expressions) { + FunctionProperties functionProperties, Expression... expressions) { return compile(functionProperties, BuiltinFunctionName.DAY_OF_MONTH, expressions); } @@ -394,8 +387,8 @@ public static FunctionExpression day_of_week( return compile(functionProperties, BuiltinFunctionName.DAY_OF_WEEK, expressions); } - public static FunctionExpression extract(FunctionProperties functionProperties, - Expression... expressions) { + public static FunctionExpression extract( + FunctionProperties functionProperties, Expression... expressions) { return compile(functionProperties, BuiltinFunctionName.EXTRACT, expressions); } @@ -419,8 +412,8 @@ public static FunctionExpression hour_of_day(Expression... expressions) { return compile(FunctionProperties.None, BuiltinFunctionName.HOUR_OF_DAY, expressions); } - public static FunctionExpression last_day(FunctionProperties functionProperties, - Expression... expressions) { + public static FunctionExpression last_day( + FunctionProperties functionProperties, Expression... expressions) { return compile(functionProperties, BuiltinFunctionName.LAST_DAY, expressions); } @@ -477,14 +470,13 @@ public static FunctionExpression timestamp(Expression... expressions) { return timestamp(FunctionProperties.None, expressions); } - public static FunctionExpression timestamp(FunctionProperties functionProperties, - Expression... expressions) { + public static FunctionExpression timestamp( + FunctionProperties functionProperties, Expression... expressions) { return compile(functionProperties, BuiltinFunctionName.TIMESTAMP, expressions); } public static FunctionExpression date_format( - FunctionProperties functionProperties, - Expression... expressions) { + FunctionProperties functionProperties, Expression... expressions) { return compile(functionProperties, BuiltinFunctionName.DATE_FORMAT, expressions); } @@ -492,8 +484,8 @@ public static FunctionExpression to_days(Expression... expressions) { return compile(FunctionProperties.None, BuiltinFunctionName.TO_DAYS, expressions); } - public static FunctionExpression to_seconds(FunctionProperties functionProperties, - Expression... expressions) { + public static FunctionExpression to_seconds( + FunctionProperties functionProperties, Expression... expressions) { return compile(functionProperties, BuiltinFunctionName.TO_SECONDS, expressions); } @@ -506,8 +498,8 @@ public static FunctionExpression week( return compile(functionProperties, BuiltinFunctionName.WEEK, expressions); } - public static FunctionExpression weekday(FunctionProperties functionProperties, - Expression... expressions) { + public static FunctionExpression weekday( + FunctionProperties functionProperties, Expression... expressions) { return compile(functionProperties, BuiltinFunctionName.WEEKDAY, expressions); } @@ -530,13 +522,13 @@ public static FunctionExpression yearweek( return compile(functionProperties, BuiltinFunctionName.YEARWEEK, expressions); } - public static FunctionExpression str_to_date(FunctionProperties functionProperties, - Expression... expressions) { + public static FunctionExpression str_to_date( + FunctionProperties functionProperties, Expression... expressions) { return compile(functionProperties, BuiltinFunctionName.STR_TO_DATE, expressions); } public static FunctionExpression sec_to_time(Expression... expressions) { - return compile(FunctionProperties.None, BuiltinFunctionName.SEC_TO_TIME, expressions); + return compile(FunctionProperties.None, BuiltinFunctionName.SEC_TO_TIME, expressions); } public static FunctionExpression substr(Expression... expressions) { @@ -771,8 +763,7 @@ public static FunctionExpression iffunction(Expression... expressions) { return compile(FunctionProperties.None, BuiltinFunctionName.IF, expressions); } - public static Expression cases(Expression defaultResult, - WhenClause... whenClauses) { + public static Expression cases(Expression defaultResult, WhenClause... whenClauses) { return new CaseClause(Arrays.asList(whenClauses), defaultResult); } @@ -884,53 +875,52 @@ public static FunctionExpression score_query(Expression... args) { return compile(FunctionProperties.None, BuiltinFunctionName.SCORE_QUERY, args); } - public static FunctionExpression now(FunctionProperties functionProperties, - Expression... args) { + public static FunctionExpression now(FunctionProperties functionProperties, Expression... args) { return compile(functionProperties, BuiltinFunctionName.NOW, args); } - public static FunctionExpression current_timestamp(FunctionProperties functionProperties, - Expression... args) { + public static FunctionExpression current_timestamp( + FunctionProperties functionProperties, Expression... args) { return compile(functionProperties, BuiltinFunctionName.CURRENT_TIMESTAMP, args); } - public static FunctionExpression localtimestamp(FunctionProperties functionProperties, - Expression... args) { + public static FunctionExpression localtimestamp( + FunctionProperties functionProperties, Expression... args) { return compile(functionProperties, BuiltinFunctionName.LOCALTIMESTAMP, args); } - public static FunctionExpression localtime(FunctionProperties functionProperties, - Expression... args) { + public static FunctionExpression localtime( + FunctionProperties functionProperties, Expression... args) { return compile(functionProperties, BuiltinFunctionName.LOCALTIME, args); } - public static FunctionExpression sysdate(FunctionProperties functionProperties, - Expression... args) { + public static FunctionExpression sysdate( + FunctionProperties functionProperties, Expression... args) { return compile(functionProperties, BuiltinFunctionName.SYSDATE, args); } - public static FunctionExpression curtime(FunctionProperties functionProperties, - Expression... args) { + public static FunctionExpression curtime( + FunctionProperties functionProperties, Expression... args) { return compile(functionProperties, BuiltinFunctionName.CURTIME, args); } - public static FunctionExpression current_time(FunctionProperties functionProperties, - Expression... args) { + public static FunctionExpression current_time( + FunctionProperties functionProperties, Expression... args) { return compile(functionProperties, BuiltinFunctionName.CURRENT_TIME, args); } - public static FunctionExpression curdate(FunctionProperties functionProperties, - Expression... args) { + public static FunctionExpression curdate( + FunctionProperties functionProperties, Expression... args) { return compile(functionProperties, BuiltinFunctionName.CURDATE, args); } - public static FunctionExpression current_date(FunctionProperties functionProperties, - Expression... args) { + public static FunctionExpression current_date( + FunctionProperties functionProperties, Expression... args) { return compile(functionProperties, BuiltinFunctionName.CURRENT_DATE, args); } - public static FunctionExpression time_format(FunctionProperties functionProperties, - Expression... expressions) { + public static FunctionExpression time_format( + FunctionProperties functionProperties, Expression... expressions) { return compile(functionProperties, BuiltinFunctionName.TIME_FORMAT, expressions); } @@ -938,38 +928,36 @@ public static FunctionExpression timestampadd(Expression... expressions) { return timestampadd(FunctionProperties.None, expressions); } - public static FunctionExpression timestampadd(FunctionProperties functionProperties, - Expression... expressions) { + public static FunctionExpression timestampadd( + FunctionProperties functionProperties, Expression... expressions) { return compile(functionProperties, BuiltinFunctionName.TIMESTAMPADD, expressions); } - public static FunctionExpression timestampdiff(FunctionProperties functionProperties, - Expression... expressions) { + public static FunctionExpression timestampdiff( + FunctionProperties functionProperties, Expression... expressions) { return compile(functionProperties, BuiltinFunctionName.TIMESTAMPDIFF, expressions); } - - public static FunctionExpression utc_date(FunctionProperties functionProperties, - Expression... args) { + public static FunctionExpression utc_date( + FunctionProperties functionProperties, Expression... args) { return compile(functionProperties, BuiltinFunctionName.UTC_DATE, args); } - public static FunctionExpression utc_time(FunctionProperties functionProperties, - Expression... args) { + public static FunctionExpression utc_time( + FunctionProperties functionProperties, Expression... args) { return compile(functionProperties, BuiltinFunctionName.UTC_TIME, args); } - public static FunctionExpression utc_timestamp(FunctionProperties functionProperties, - Expression... args) { + public static FunctionExpression utc_timestamp( + FunctionProperties functionProperties, Expression... args) { return compile(functionProperties, BuiltinFunctionName.UTC_TIMESTAMP, args); - } @SuppressWarnings("unchecked") - private static - T compile(FunctionProperties functionProperties, - BuiltinFunctionName bfn, Expression... args) { - return (T) BuiltinFunctionRepository.getInstance().compile(functionProperties, - bfn.getName(), Arrays.asList(args)); + private static T compile( + FunctionProperties functionProperties, BuiltinFunctionName bfn, Expression... args) { + return (T) + BuiltinFunctionRepository.getInstance() + .compile(functionProperties, bfn.getName(), Arrays.asList(args)); } } diff --git a/core/src/main/java/org/opensearch/sql/expression/Expression.java b/core/src/main/java/org/opensearch/sql/expression/Expression.java index 25a8173efa..e4a5f908e5 100644 --- a/core/src/main/java/org/opensearch/sql/expression/Expression.java +++ b/core/src/main/java/org/opensearch/sql/expression/Expression.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression; import java.io.Serializable; @@ -11,36 +10,28 @@ import org.opensearch.sql.data.type.ExprType; import org.opensearch.sql.expression.env.Environment; -/** - * The definition of the resolved expression. - */ +/** The definition of the resolved expression. */ public interface Expression extends Serializable { - /** - * Evaluate the value of expression that does not depend on value environment. - */ + /** Evaluate the value of expression that does not depend on value environment. */ default ExprValue valueOf() { return valueOf(null); } - /** - * Evaluate the value of expression in the value environment. - */ + /** Evaluate the value of expression in the value environment. */ ExprValue valueOf(Environment valueEnv); - /** - * The type of the expression. - */ + /** The type of the expression. */ ExprType type(); /** * Accept a visitor to visit current expression node. - * @param visitor visitor - * @param context context - * @param result type - * @param context type - * @return result accumulated by visitor when visiting + * + * @param visitor visitor + * @param context context + * @param result type + * @param context type + * @return result accumulated by visitor when visiting */ T accept(ExpressionNodeVisitor visitor, C context); - } diff --git a/core/src/main/java/org/opensearch/sql/expression/ExpressionNodeVisitor.java b/core/src/main/java/org/opensearch/sql/expression/ExpressionNodeVisitor.java index e3d4e38674..17dc7f5cd7 100644 --- a/core/src/main/java/org/opensearch/sql/expression/ExpressionNodeVisitor.java +++ b/core/src/main/java/org/opensearch/sql/expression/ExpressionNodeVisitor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression; import org.opensearch.sql.expression.aggregation.Aggregator; @@ -15,8 +14,9 @@ /** * Abstract visitor for expression tree nodes. - * @param type of return value to accumulate when visiting. - * @param type of context. + * + * @param type of return value to accumulate when visiting. + * @param type of context. */ public abstract class ExpressionNodeVisitor { @@ -26,9 +26,10 @@ public T visitNode(Expression node, C context) { /** * Visit children nodes in function arguments. - * @param node function node - * @param context context - * @return result + * + * @param node function node + * @param context context + * @return result */ public T visitChildren(FunctionImplementation node, C context) { T result = defaultResult(); @@ -81,10 +82,13 @@ public T visitNamedAggregator(NamedAggregator node, C context) { } /** - * Call visitFunction() by default rather than visitChildren(). - * This makes CASE/WHEN able to be handled: - * 1) by visitFunction() if not overwritten: ex. FilterQueryBuilder - * 2) by visitCase/When() otherwise if any special logic: ex. ExprReferenceOptimizer + * Call visitFunction() by default rather than visitChildren(). This makes CASE/WHEN able to be + * handled: + * + *
    + *
  1. by visitFunction() if not overwritten: ex. FilterQueryBuilder + *
  2. by visitCase/When() otherwise if any special logic: ex. ExprReferenceOptimizer + *
*/ public T visitCase(CaseClause node, C context) { return visitFunction(node, context); diff --git a/core/src/main/java/org/opensearch/sql/expression/FunctionExpression.java b/core/src/main/java/org/opensearch/sql/expression/FunctionExpression.java index 2a695f26e6..b67eb38c00 100644 --- a/core/src/main/java/org/opensearch/sql/expression/FunctionExpression.java +++ b/core/src/main/java/org/opensearch/sql/expression/FunctionExpression.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression; import java.util.List; @@ -14,22 +13,17 @@ import org.opensearch.sql.expression.function.FunctionImplementation; import org.opensearch.sql.expression.function.FunctionName; -/** - * Function Expression. - */ +/** Function Expression. */ @EqualsAndHashCode @RequiredArgsConstructor @ToString public abstract class FunctionExpression implements Expression, FunctionImplementation { - @Getter - private final FunctionName functionName; + @Getter private final FunctionName functionName; - @Getter - private final List arguments; + @Getter private final List arguments; @Override public T accept(ExpressionNodeVisitor visitor, C context) { return visitor.visitFunction(this, context); } - } diff --git a/core/src/main/java/org/opensearch/sql/expression/HighlightExpression.java b/core/src/main/java/org/opensearch/sql/expression/HighlightExpression.java index 804c38a6f7..79cc07f048 100644 --- a/core/src/main/java/org/opensearch/sql/expression/HighlightExpression.java +++ b/core/src/main/java/org/opensearch/sql/expression/HighlightExpression.java @@ -20,9 +20,7 @@ import org.opensearch.sql.expression.env.Environment; import org.opensearch.sql.expression.function.BuiltinFunctionName; -/** - * Highlight Expression. - */ +/** Highlight Expression. */ @Getter public class HighlightExpression extends FunctionExpression { private final Expression highlightField; @@ -30,17 +28,19 @@ public class HighlightExpression extends FunctionExpression { /** * HighlightExpression Constructor. + * * @param highlightField : Highlight field for expression. */ public HighlightExpression(Expression highlightField) { super(BuiltinFunctionName.HIGHLIGHT.getName(), List.of(highlightField)); this.highlightField = highlightField; - this.type = this.highlightField.toString().contains("*") - ? ExprCoreType.STRUCT : ExprCoreType.ARRAY; + this.type = + this.highlightField.toString().contains("*") ? ExprCoreType.STRUCT : ExprCoreType.ARRAY; } /** * Return collection value matching highlight field. + * * @param valueEnv : Dataset to parse value from. * @return : collection value of highlight fields. */ @@ -57,15 +57,15 @@ public ExprValue valueOf(Environment valueEnv) { // used in conjunction with other highlight calls, we need to ensure // only wildcard regex matching is mapped to wildcard call. if (this.type == ExprCoreType.STRUCT && value.type() == ExprCoreType.STRUCT) { - value = new ExprTupleValue( - new LinkedHashMap(value.tupleValue() - .entrySet() - .stream() - .filter(s -> matchesHighlightRegex(s.getKey(), - StringUtils.unquoteText(highlightField.toString()))) - .collect(Collectors.toMap( - e -> e.getKey(), - e -> e.getValue())))); + value = + new ExprTupleValue( + new LinkedHashMap( + value.tupleValue().entrySet().stream() + .filter( + s -> + matchesHighlightRegex( + s.getKey(), StringUtils.unquoteText(highlightField.toString()))) + .collect(Collectors.toMap(e -> e.getKey(), e -> e.getValue())))); if (value.tupleValue().isEmpty()) { value = ExprValueUtils.missingValue(); } @@ -76,6 +76,7 @@ public ExprValue valueOf(Environment valueEnv) { /** * Get type for HighlightExpression. + * * @return : Expression type. */ @Override @@ -90,6 +91,7 @@ public T accept(ExpressionNodeVisitor visitor, C context) { /** * Check if field matches the wildcard pattern used in highlight query. + * * @param field Highlight selected field for query * @param pattern Wildcard regex to match field against * @return True if field matches wildcard pattern diff --git a/core/src/main/java/org/opensearch/sql/expression/LiteralExpression.java b/core/src/main/java/org/opensearch/sql/expression/LiteralExpression.java index adb8e197d1..eba03e8430 100644 --- a/core/src/main/java/org/opensearch/sql/expression/LiteralExpression.java +++ b/core/src/main/java/org/opensearch/sql/expression/LiteralExpression.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression; import lombok.EqualsAndHashCode; @@ -12,9 +11,7 @@ import org.opensearch.sql.data.type.ExprType; import org.opensearch.sql.expression.env.Environment; -/** - * Literal Expression. - */ +/** Literal Expression. */ @EqualsAndHashCode @RequiredArgsConstructor public class LiteralExpression implements Expression { diff --git a/core/src/main/java/org/opensearch/sql/expression/NamedArgumentExpression.java b/core/src/main/java/org/opensearch/sql/expression/NamedArgumentExpression.java index 0f4601f1bf..c3ce60bd4a 100644 --- a/core/src/main/java/org/opensearch/sql/expression/NamedArgumentExpression.java +++ b/core/src/main/java/org/opensearch/sql/expression/NamedArgumentExpression.java @@ -13,9 +13,7 @@ import org.opensearch.sql.data.type.ExprType; import org.opensearch.sql.expression.env.Environment; -/** - * Named argument expression that represents function argument with name. - */ +/** Named argument expression that represents function argument with name. */ @RequiredArgsConstructor @Getter @EqualsAndHashCode diff --git a/core/src/main/java/org/opensearch/sql/expression/NamedExpression.java b/core/src/main/java/org/opensearch/sql/expression/NamedExpression.java index 26996eb93d..03118311a9 100644 --- a/core/src/main/java/org/opensearch/sql/expression/NamedExpression.java +++ b/core/src/main/java/org/opensearch/sql/expression/NamedExpression.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression; import com.google.common.base.Strings; @@ -16,8 +15,8 @@ import org.opensearch.sql.expression.env.Environment; /** - * Named expression that represents expression with name. - * Please see more details in associated unresolved expression operator + * Named expression that represents expression with name.
+ * Please see more details in associated unresolved expression operator
* {@link org.opensearch.sql.ast.expression.Alias}. */ @AllArgsConstructor @@ -26,19 +25,13 @@ @RequiredArgsConstructor public class NamedExpression implements Expression { - /** - * Expression name. - */ + /** Expression name. */ private final String name; - /** - * Expression that being named. - */ + /** Expression that being named. */ private final Expression delegated; - /** - * Optional alias. - */ + /** Optional alias. */ private String alias; @Override @@ -53,7 +46,8 @@ public ExprType type() { /** * Get expression name using name or its alias (if it's present). - * @return expression name + * + * @return expression name */ public String getNameOrAlias() { return Strings.isNullOrEmpty(alias) ? name : alias; @@ -68,5 +62,4 @@ public T accept(ExpressionNodeVisitor visitor, C context) { public String toString() { return getNameOrAlias(); } - } diff --git a/core/src/main/java/org/opensearch/sql/expression/ReferenceExpression.java b/core/src/main/java/org/opensearch/sql/expression/ReferenceExpression.java index 3c5b2af23c..eb510e3b8a 100644 --- a/core/src/main/java/org/opensearch/sql/expression/ReferenceExpression.java +++ b/core/src/main/java/org/opensearch/sql/expression/ReferenceExpression.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression; import static org.opensearch.sql.utils.ExpressionUtils.PATH_SEP; @@ -22,16 +21,15 @@ @EqualsAndHashCode @RequiredArgsConstructor public class ReferenceExpression implements Expression { - @Getter - private final String attr; + @Getter private final String attr; - @Getter - private final List paths; + @Getter private final List paths; private final ExprType type; /** * Constructor of ReferenceExpression. + * * @param ref the field name. e.g. addr.state/addr. * @param type type. */ @@ -63,6 +61,7 @@ public String toString() { } /** + *
    * Resolve the ExprValue from {@link ExprTupleValue} using paths.
    * Considering the following sample data.
    * {
@@ -95,6 +94,7 @@ public String toString() {
    *
    * @param value {@link ExprTupleValue}.
    * @return {@link ExprTupleValue}.
+   * 
*/ public ExprValue resolve(ExprTupleValue value) { return resolve(value, paths); diff --git a/core/src/main/java/org/opensearch/sql/expression/aggregation/AggregationState.java b/core/src/main/java/org/opensearch/sql/expression/aggregation/AggregationState.java index 345c6c00dd..478e3caf54 100644 --- a/core/src/main/java/org/opensearch/sql/expression/aggregation/AggregationState.java +++ b/core/src/main/java/org/opensearch/sql/expression/aggregation/AggregationState.java @@ -3,18 +3,13 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.aggregation; import org.opensearch.sql.data.model.ExprValue; import org.opensearch.sql.storage.bindingtuple.BindingTuple; -/** - * Maintain the state when {@link Aggregator} iterate on the {@link BindingTuple}. - */ +/** Maintain the state when {@link Aggregator} iterate on the {@link BindingTuple}. */ public interface AggregationState { - /** - * Get {@link ExprValue} result. - */ + /** Get {@link ExprValue} result. */ ExprValue result(); } diff --git a/core/src/main/java/org/opensearch/sql/expression/aggregation/Aggregator.java b/core/src/main/java/org/opensearch/sql/expression/aggregation/Aggregator.java index a122ea6540..a2a3ce76c3 100644 --- a/core/src/main/java/org/opensearch/sql/expression/aggregation/Aggregator.java +++ b/core/src/main/java/org/opensearch/sql/expression/aggregation/Aggregator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.aggregation; import java.util.List; @@ -26,36 +25,34 @@ import org.opensearch.sql.storage.bindingtuple.BindingTuple; /** - * Aggregator which will iterate on the {@link BindingTuple}s to aggregate the result. - * The Aggregator is not well fit into Expression, because it has side effect. - * But we still want to make it implement {@link Expression} interface to make - * {@link ExpressionAnalyzer} easier. + * Aggregator which will iterate on the {@link BindingTuple}s to aggregate the result. The + * Aggregator is not well fit into Expression, because it has side effect. But we still want to make + * it implement {@link Expression} interface to make {@link ExpressionAnalyzer} easier. */ @EqualsAndHashCode @RequiredArgsConstructor public abstract class Aggregator implements FunctionImplementation, Expression { - @Getter - private final FunctionName functionName; - @Getter - private final List arguments; + @Getter private final FunctionName functionName; + @Getter private final List arguments; protected final ExprCoreType returnType; + @Setter @Getter @Accessors(fluent = true) protected Expression condition; + @Setter @Getter @Accessors(fluent = true) protected Boolean distinct = false; - /** - * Create an {@link AggregationState} which will be used for aggregation. - */ + /** Create an {@link AggregationState} which will be used for aggregation. */ public abstract S create(); /** * Iterate on {@link ExprValue}. + * * @param value {@link ExprValue} * @param state {@link AggregationState} * @return {@link AggregationState} @@ -63,9 +60,9 @@ public abstract class Aggregator protected abstract S iterate(ExprValue value, S state); /** - * Let the aggregator iterate on the {@link BindingTuple} - * To filter out ExprValues that are missing, null or cannot satisfy {@link #condition} - * Before the specific aggregator iterating ExprValue in the tuple. + * Let the aggregator iterate on the {@link BindingTuple} To filter out ExprValues that are + * missing, null or cannot satisfy {@link #condition} Before the specific aggregator iterating + * ExprValue in the tuple. * * @param tuple {@link BindingTuple} * @param state {@link AggregationState} @@ -95,14 +92,11 @@ public T accept(ExpressionNodeVisitor visitor, C context) { return visitor.visitAggregator(this, context); } - /** - * Util method to get value of condition in aggregation filter. - */ + /** Util method to get value of condition in aggregation filter. */ public boolean conditionValue(BindingTuple tuple) { if (condition == null) { return true; } return ExprValueUtils.getBooleanValue(condition.valueOf(tuple)); } - } diff --git a/core/src/main/java/org/opensearch/sql/expression/aggregation/AggregatorFunction.java b/core/src/main/java/org/opensearch/sql/expression/aggregation/AggregatorFunction.java index a24eeca1c1..4a1d4d309b 100644 --- a/core/src/main/java/org/opensearch/sql/expression/aggregation/AggregatorFunction.java +++ b/core/src/main/java/org/opensearch/sql/expression/aggregation/AggregatorFunction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.aggregation; import static org.opensearch.sql.data.type.ExprCoreType.ARRAY; @@ -35,12 +34,11 @@ import org.opensearch.sql.expression.function.FunctionSignature; /** - * The definition of aggregator function - * avg, Accepts two numbers and produces a number. - * sum, Accepts two numbers and produces a number. - * max, Accepts two numbers and produces a number. - * min, Accepts two numbers and produces a number. - * count, Accepts two numbers and produces a number. + * The definition of aggregator functions avg, sum, min, max and + * count.
+ * All of them accept a list of numbers and produce a number. avg, min and + * max also accept datetime types.
+ * count accepts values of all types. */ @UtilityClass public class AggregatorFunction { @@ -67,26 +65,37 @@ private static DefaultFunctionResolver avg() { return new DefaultFunctionResolver( functionName, new ImmutableMap.Builder() - .put(new FunctionSignature(functionName, Collections.singletonList(DOUBLE)), + .put( + new FunctionSignature(functionName, Collections.singletonList(DOUBLE)), (functionProperties, arguments) -> new AvgAggregator(arguments, DOUBLE)) - .put(new FunctionSignature(functionName, Collections.singletonList(DATE)), + .put( + new FunctionSignature(functionName, Collections.singletonList(DATE)), (functionProperties, arguments) -> new AvgAggregator(arguments, DATE)) - .put(new FunctionSignature(functionName, Collections.singletonList(DATETIME)), + .put( + new FunctionSignature(functionName, Collections.singletonList(DATETIME)), (functionProperties, arguments) -> new AvgAggregator(arguments, DATETIME)) - .put(new FunctionSignature(functionName, Collections.singletonList(TIME)), + .put( + new FunctionSignature(functionName, Collections.singletonList(TIME)), (functionProperties, arguments) -> new AvgAggregator(arguments, TIME)) - .put(new FunctionSignature(functionName, Collections.singletonList(TIMESTAMP)), + .put( + new FunctionSignature(functionName, Collections.singletonList(TIMESTAMP)), (functionProperties, arguments) -> new AvgAggregator(arguments, TIMESTAMP)) - .build() - ); + .build()); } private static DefaultFunctionResolver count() { FunctionName functionName = BuiltinFunctionName.COUNT.getName(); - DefaultFunctionResolver functionResolver = new DefaultFunctionResolver(functionName, - ExprCoreType.coreTypes().stream().collect(Collectors.toMap( - type -> new FunctionSignature(functionName, Collections.singletonList(type)), - type -> (functionProperties, arguments) -> new CountAggregator(arguments, INTEGER)))); + DefaultFunctionResolver functionResolver = + new DefaultFunctionResolver( + functionName, + ExprCoreType.coreTypes().stream() + .collect( + Collectors.toMap( + type -> + new FunctionSignature(functionName, Collections.singletonList(type)), + type -> + (functionProperties, arguments) -> + new CountAggregator(arguments, INTEGER)))); return functionResolver; } @@ -95,16 +104,19 @@ private static DefaultFunctionResolver sum() { return new DefaultFunctionResolver( functionName, new ImmutableMap.Builder() - .put(new FunctionSignature(functionName, Collections.singletonList(INTEGER)), + .put( + new FunctionSignature(functionName, Collections.singletonList(INTEGER)), (functionProperties, arguments) -> new SumAggregator(arguments, INTEGER)) - .put(new FunctionSignature(functionName, Collections.singletonList(LONG)), + .put( + new FunctionSignature(functionName, Collections.singletonList(LONG)), (functionProperties, arguments) -> new SumAggregator(arguments, LONG)) - .put(new FunctionSignature(functionName, Collections.singletonList(FLOAT)), + .put( + new FunctionSignature(functionName, Collections.singletonList(FLOAT)), (functionProperties, arguments) -> new SumAggregator(arguments, FLOAT)) - .put(new FunctionSignature(functionName, Collections.singletonList(DOUBLE)), + .put( + new FunctionSignature(functionName, Collections.singletonList(DOUBLE)), (functionProperties, arguments) -> new SumAggregator(arguments, DOUBLE)) - .build() - ); + .build()); } private static DefaultFunctionResolver min() { @@ -112,23 +124,32 @@ private static DefaultFunctionResolver min() { return new DefaultFunctionResolver( functionName, new ImmutableMap.Builder() - .put(new FunctionSignature(functionName, Collections.singletonList(INTEGER)), + .put( + new FunctionSignature(functionName, Collections.singletonList(INTEGER)), (functionProperties, arguments) -> new MinAggregator(arguments, INTEGER)) - .put(new FunctionSignature(functionName, Collections.singletonList(LONG)), + .put( + new FunctionSignature(functionName, Collections.singletonList(LONG)), (functionProperties, arguments) -> new MinAggregator(arguments, LONG)) - .put(new FunctionSignature(functionName, Collections.singletonList(FLOAT)), + .put( + new FunctionSignature(functionName, Collections.singletonList(FLOAT)), (functionProperties, arguments) -> new MinAggregator(arguments, FLOAT)) - .put(new FunctionSignature(functionName, Collections.singletonList(DOUBLE)), + .put( + new FunctionSignature(functionName, Collections.singletonList(DOUBLE)), (functionProperties, arguments) -> new MinAggregator(arguments, DOUBLE)) - .put(new FunctionSignature(functionName, Collections.singletonList(STRING)), + .put( + new FunctionSignature(functionName, Collections.singletonList(STRING)), (functionProperties, arguments) -> new MinAggregator(arguments, STRING)) - .put(new FunctionSignature(functionName, Collections.singletonList(DATE)), + .put( + new FunctionSignature(functionName, Collections.singletonList(DATE)), (functionProperties, arguments) -> new MinAggregator(arguments, DATE)) - .put(new FunctionSignature(functionName, Collections.singletonList(DATETIME)), + .put( + new FunctionSignature(functionName, Collections.singletonList(DATETIME)), (functionProperties, arguments) -> new MinAggregator(arguments, DATETIME)) - .put(new FunctionSignature(functionName, Collections.singletonList(TIME)), + .put( + new FunctionSignature(functionName, Collections.singletonList(TIME)), (functionProperties, arguments) -> new MinAggregator(arguments, TIME)) - .put(new FunctionSignature(functionName, Collections.singletonList(TIMESTAMP)), + .put( + new FunctionSignature(functionName, Collections.singletonList(TIMESTAMP)), (functionProperties, arguments) -> new MinAggregator(arguments, TIMESTAMP)) .build()); } @@ -138,26 +159,34 @@ private static DefaultFunctionResolver max() { return new DefaultFunctionResolver( functionName, new ImmutableMap.Builder() - .put(new FunctionSignature(functionName, Collections.singletonList(INTEGER)), + .put( + new FunctionSignature(functionName, Collections.singletonList(INTEGER)), (functionProperties, arguments) -> new MaxAggregator(arguments, INTEGER)) - .put(new FunctionSignature(functionName, Collections.singletonList(LONG)), + .put( + new FunctionSignature(functionName, Collections.singletonList(LONG)), (functionProperties, arguments) -> new MaxAggregator(arguments, LONG)) - .put(new FunctionSignature(functionName, Collections.singletonList(FLOAT)), + .put( + new FunctionSignature(functionName, Collections.singletonList(FLOAT)), (functionProperties, arguments) -> new MaxAggregator(arguments, FLOAT)) - .put(new FunctionSignature(functionName, Collections.singletonList(DOUBLE)), + .put( + new FunctionSignature(functionName, Collections.singletonList(DOUBLE)), (functionProperties, arguments) -> new MaxAggregator(arguments, DOUBLE)) - .put(new FunctionSignature(functionName, Collections.singletonList(STRING)), + .put( + new FunctionSignature(functionName, Collections.singletonList(STRING)), (functionProperties, arguments) -> new MaxAggregator(arguments, STRING)) - .put(new FunctionSignature(functionName, Collections.singletonList(DATE)), + .put( + new FunctionSignature(functionName, Collections.singletonList(DATE)), (functionProperties, arguments) -> new MaxAggregator(arguments, DATE)) - .put(new FunctionSignature(functionName, Collections.singletonList(DATETIME)), + .put( + new FunctionSignature(functionName, Collections.singletonList(DATETIME)), (functionProperties, arguments) -> new MaxAggregator(arguments, DATETIME)) - .put(new FunctionSignature(functionName, Collections.singletonList(TIME)), + .put( + new FunctionSignature(functionName, Collections.singletonList(TIME)), (functionProperties, arguments) -> new MaxAggregator(arguments, TIME)) - .put(new FunctionSignature(functionName, Collections.singletonList(TIMESTAMP)), + .put( + new FunctionSignature(functionName, Collections.singletonList(TIMESTAMP)), (functionProperties, arguments) -> new MaxAggregator(arguments, TIMESTAMP)) - .build() - ); + .build()); } private static DefaultFunctionResolver varSamp() { @@ -165,10 +194,10 @@ private static DefaultFunctionResolver varSamp() { return new DefaultFunctionResolver( functionName, new ImmutableMap.Builder() - .put(new FunctionSignature(functionName, Collections.singletonList(DOUBLE)), + .put( + new FunctionSignature(functionName, Collections.singletonList(DOUBLE)), (functionProperties, arguments) -> varianceSample(arguments, DOUBLE)) - .build() - ); + .build()); } private static DefaultFunctionResolver varPop() { @@ -176,10 +205,10 @@ private static DefaultFunctionResolver varPop() { return new DefaultFunctionResolver( functionName, new ImmutableMap.Builder() - .put(new FunctionSignature(functionName, Collections.singletonList(DOUBLE)), + .put( + new FunctionSignature(functionName, Collections.singletonList(DOUBLE)), (functionProperties, arguments) -> variancePopulation(arguments, DOUBLE)) - .build() - ); + .build()); } private static DefaultFunctionResolver stddevSamp() { @@ -187,10 +216,10 @@ private static DefaultFunctionResolver stddevSamp() { return new DefaultFunctionResolver( functionName, new ImmutableMap.Builder() - .put(new FunctionSignature(functionName, Collections.singletonList(DOUBLE)), + .put( + new FunctionSignature(functionName, Collections.singletonList(DOUBLE)), (functionProperties, arguments) -> stddevSample(arguments, DOUBLE)) - .build() - ); + .build()); } private static DefaultFunctionResolver stddevPop() { @@ -198,20 +227,22 @@ private static DefaultFunctionResolver stddevPop() { return new DefaultFunctionResolver( functionName, new ImmutableMap.Builder() - .put(new FunctionSignature(functionName, Collections.singletonList(DOUBLE)), + .put( + new FunctionSignature(functionName, Collections.singletonList(DOUBLE)), (functionProperties, arguments) -> stddevPopulation(arguments, DOUBLE)) - .build() - ); + .build()); } private static DefaultFunctionResolver take() { FunctionName functionName = BuiltinFunctionName.TAKE.getName(); - DefaultFunctionResolver functionResolver = new DefaultFunctionResolver(functionName, - new ImmutableMap.Builder() - .put(new FunctionSignature(functionName, ImmutableList.of(STRING, INTEGER)), - (functionProperties, arguments) -> new TakeAggregator(arguments, ARRAY)) - .build()); + DefaultFunctionResolver functionResolver = + new DefaultFunctionResolver( + functionName, + new ImmutableMap.Builder() + .put( + new FunctionSignature(functionName, ImmutableList.of(STRING, INTEGER)), + (functionProperties, arguments) -> new TakeAggregator(arguments, ARRAY)) + .build()); return functionResolver; } - } diff --git a/core/src/main/java/org/opensearch/sql/expression/aggregation/AvgAggregator.java b/core/src/main/java/org/opensearch/sql/expression/aggregation/AvgAggregator.java index a899a6b45b..c528968018 100644 --- a/core/src/main/java/org/opensearch/sql/expression/aggregation/AvgAggregator.java +++ b/core/src/main/java/org/opensearch/sql/expression/aggregation/AvgAggregator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.aggregation; import static java.time.temporal.ChronoUnit.MILLIS; @@ -27,14 +26,14 @@ import org.opensearch.sql.expression.function.BuiltinFunctionName; /** - * The average aggregator aggregate the value evaluated by the expression. - * If the expression evaluated result is NULL or MISSING, then the result is NULL. + * The average aggregator aggregate the value evaluated by the expression. If the expression + * evaluated result is NULL or MISSING, then the result is NULL. */ public class AvgAggregator extends Aggregator { /** - * To process by different ways different data types, we need to store the type. - * Input data has the same type as the result. + * To process by different ways different data types, we need to store the type. Input data has + * the same type as the result. */ private final ExprCoreType dataType; @@ -56,7 +55,7 @@ public AvgState create() { return new TimeAvgState(); case DOUBLE: return new DoubleAvgState(); - default: //unreachable code - we don't expose signatures for unsupported types + default: // unreachable code - we don't expose signatures for unsupported types throw new IllegalArgumentException( String.format("avg aggregation over %s type is not supported", dataType)); } @@ -72,9 +71,7 @@ public String toString() { return String.format(Locale.ROOT, "avg(%s)", format(getArguments())); } - /** - * Average State. - */ + /** Average State. */ protected abstract static class AvgState implements AggregationState { protected ExprValue count; protected ExprValue total; @@ -117,15 +114,16 @@ public ExprValue result() { } return new ExprDateValue( - new ExprTimestampValue(Instant.ofEpochMilli( - DSL.divide(DSL.literal(total), DSL.literal(count)).valueOf().longValue())) - .dateValue()); + new ExprTimestampValue( + Instant.ofEpochMilli( + DSL.divide(DSL.literal(total), DSL.literal(count)).valueOf().longValue())) + .dateValue()); } @Override protected AvgState iterate(ExprValue value) { - total = DSL.add(DSL.literal(total), DSL.literal(value.timestampValue().toEpochMilli())) - .valueOf(); + total = + DSL.add(DSL.literal(total), DSL.literal(value.timestampValue().toEpochMilli())).valueOf(); return super.iterate(value); } } @@ -138,15 +136,16 @@ public ExprValue result() { } return new ExprDatetimeValue( - new ExprTimestampValue(Instant.ofEpochMilli( - DSL.divide(DSL.literal(total), DSL.literal(count)).valueOf().longValue())) - .datetimeValue()); + new ExprTimestampValue( + Instant.ofEpochMilli( + DSL.divide(DSL.literal(total), DSL.literal(count)).valueOf().longValue())) + .datetimeValue()); } @Override protected AvgState iterate(ExprValue value) { - total = DSL.add(DSL.literal(total), DSL.literal(value.timestampValue().toEpochMilli())) - .valueOf(); + total = + DSL.add(DSL.literal(total), DSL.literal(value.timestampValue().toEpochMilli())).valueOf(); return super.iterate(value); } } @@ -158,14 +157,15 @@ public ExprValue result() { return ExprNullValue.of(); } - return new ExprTimestampValue(Instant.ofEpochMilli( - DSL.divide(DSL.literal(total), DSL.literal(count)).valueOf().longValue())); + return new ExprTimestampValue( + Instant.ofEpochMilli( + DSL.divide(DSL.literal(total), DSL.literal(count)).valueOf().longValue())); } @Override protected AvgState iterate(ExprValue value) { - total = DSL.add(DSL.literal(total), DSL.literal(value.timestampValue().toEpochMilli())) - .valueOf(); + total = + DSL.add(DSL.literal(total), DSL.literal(value.timestampValue().toEpochMilli())).valueOf(); return super.iterate(value); } } @@ -177,14 +177,16 @@ public ExprValue result() { return ExprNullValue.of(); } - return new ExprTimeValue(LocalTime.MIN.plus( - DSL.divide(DSL.literal(total), DSL.literal(count)).valueOf().longValue(), MILLIS)); + return new ExprTimeValue( + LocalTime.MIN.plus( + DSL.divide(DSL.literal(total), DSL.literal(count)).valueOf().longValue(), MILLIS)); } @Override protected AvgState iterate(ExprValue value) { - total = DSL.add(DSL.literal(total), - DSL.literal(MILLIS.between(LocalTime.MIN, value.timeValue()))).valueOf(); + total = + DSL.add(DSL.literal(total), DSL.literal(MILLIS.between(LocalTime.MIN, value.timeValue()))) + .valueOf(); return super.iterate(value); } } diff --git a/core/src/main/java/org/opensearch/sql/expression/aggregation/CountAggregator.java b/core/src/main/java/org/opensearch/sql/expression/aggregation/CountAggregator.java index 813842cadc..c4c02eb1d3 100644 --- a/core/src/main/java/org/opensearch/sql/expression/aggregation/CountAggregator.java +++ b/core/src/main/java/org/opensearch/sql/expression/aggregation/CountAggregator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.aggregation; import static org.opensearch.sql.utils.ExpressionUtils.format; @@ -43,9 +42,7 @@ public String toString() { : String.format(Locale.ROOT, "count(%s)", format(getArguments())); } - /** - * Count State. - */ + /** Count State. */ protected static class CountState implements AggregationState { protected int count; diff --git a/core/src/main/java/org/opensearch/sql/expression/aggregation/MaxAggregator.java b/core/src/main/java/org/opensearch/sql/expression/aggregation/MaxAggregator.java index e9123c0ac2..863d4603a7 100644 --- a/core/src/main/java/org/opensearch/sql/expression/aggregation/MaxAggregator.java +++ b/core/src/main/java/org/opensearch/sql/expression/aggregation/MaxAggregator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.aggregation; import static org.opensearch.sql.data.model.ExprValueUtils.LITERAL_NULL; @@ -45,9 +44,7 @@ protected static class MaxState implements AggregationState { } public void max(ExprValue value) { - maxResult = maxResult.isNull() ? value - : (maxResult.compareTo(value) > 0) - ? maxResult : value; + maxResult = maxResult.isNull() ? value : (maxResult.compareTo(value) > 0) ? maxResult : value; } @Override diff --git a/core/src/main/java/org/opensearch/sql/expression/aggregation/MinAggregator.java b/core/src/main/java/org/opensearch/sql/expression/aggregation/MinAggregator.java index 897fe857ff..c1b9ebcd4f 100644 --- a/core/src/main/java/org/opensearch/sql/expression/aggregation/MinAggregator.java +++ b/core/src/main/java/org/opensearch/sql/expression/aggregation/MinAggregator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.aggregation; import static org.opensearch.sql.data.model.ExprValueUtils.LITERAL_NULL; @@ -16,8 +15,8 @@ import org.opensearch.sql.expression.function.BuiltinFunctionName; /** - * The minimum aggregator aggregate the value evaluated by the expression. - * If the expression evaluated result is NULL or MISSING, then the result is NULL. + * The minimum aggregator aggregate the value evaluated by the expression. If the expression + * evaluated result is NULL or MISSING, then the result is NULL. */ public class MinAggregator extends Aggregator { @@ -25,7 +24,6 @@ public MinAggregator(List arguments, ExprCoreType returnType) { super(BuiltinFunctionName.MIN.getName(), arguments, returnType); } - @Override public MinState create() { return new MinState(); @@ -50,9 +48,7 @@ protected static class MinState implements AggregationState { } public void min(ExprValue value) { - minResult = minResult.isNull() ? value - : (minResult.compareTo(value) < 0) - ? minResult : value; + minResult = minResult.isNull() ? value : (minResult.compareTo(value) < 0) ? minResult : value; } @Override diff --git a/core/src/main/java/org/opensearch/sql/expression/aggregation/NamedAggregator.java b/core/src/main/java/org/opensearch/sql/expression/aggregation/NamedAggregator.java index 510c5d1e45..fa84b74ba5 100644 --- a/core/src/main/java/org/opensearch/sql/expression/aggregation/NamedAggregator.java +++ b/core/src/main/java/org/opensearch/sql/expression/aggregation/NamedAggregator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.aggregation; import lombok.EqualsAndHashCode; @@ -12,35 +11,27 @@ import org.opensearch.sql.expression.ExpressionNodeVisitor; /** - * NamedAggregator expression that represents expression with name. - * Please see more details in associated unresolved expression operator + * NamedAggregator expression that represents expression with name. Please see more details in + * associated unresolved expression operator
* {@link org.opensearch.sql.ast.expression.Alias}. */ @EqualsAndHashCode(callSuper = false) public class NamedAggregator extends Aggregator { - /** - * Aggregator name. - */ + /** Aggregator name. */ private final String name; - /** - * Aggregator that being named. - */ - @Getter - private final Aggregator delegated; + /** Aggregator that being named. */ + @Getter private final Aggregator delegated; /** - * NamedAggregator. - * The aggregator properties {@link #condition} and {@link #distinct} - * are inherited by named aggregator to avoid errors introduced by the property inconsistency. + * NamedAggregator. The aggregator properties {@link #condition} and {@link #distinct} are + * inherited by named aggregator to avoid errors introduced by the property inconsistency. * * @param name name * @param delegated delegated */ - public NamedAggregator( - String name, - Aggregator delegated) { + public NamedAggregator(String name, Aggregator delegated) { super(delegated.getFunctionName(), delegated.getArguments(), delegated.returnType); this.name = name; this.delegated = delegated; @@ -60,7 +51,8 @@ protected AggregationState iterate(ExprValue value, AggregationState state) { /** * Get expression name using name or its alias (if it's present). - * @return expression name + * + * @return expression name */ public String getName() { return name; @@ -75,5 +67,4 @@ public T accept(ExpressionNodeVisitor visitor, C context) { public String toString() { return getName(); } - } diff --git a/core/src/main/java/org/opensearch/sql/expression/aggregation/StdDevAggregator.java b/core/src/main/java/org/opensearch/sql/expression/aggregation/StdDevAggregator.java index 0cd8494449..d5422bc788 100644 --- a/core/src/main/java/org/opensearch/sql/expression/aggregation/StdDevAggregator.java +++ b/core/src/main/java/org/opensearch/sql/expression/aggregation/StdDevAggregator.java @@ -26,26 +26,18 @@ import org.opensearch.sql.expression.Expression; import org.opensearch.sql.expression.function.BuiltinFunctionName; -/** - * StandardDeviation Aggregator. - */ +/** StandardDeviation Aggregator. */ public class StdDevAggregator extends Aggregator { private final boolean isSampleStdDev; - /** - * Build Population Variance {@link VarianceAggregator}. - */ - public static Aggregator stddevPopulation(List arguments, - ExprCoreType returnType) { + /** Build Population Variance {@link VarianceAggregator}. */ + public static Aggregator stddevPopulation(List arguments, ExprCoreType returnType) { return new StdDevAggregator(false, arguments, returnType); } - /** - * Build Sample Variance {@link VarianceAggregator}. - */ - public static Aggregator stddevSample(List arguments, - ExprCoreType returnType) { + /** Build Sample Variance {@link VarianceAggregator}. */ + public static Aggregator stddevSample(List arguments, ExprCoreType returnType) { return new StdDevAggregator(true, arguments, returnType); } @@ -53,7 +45,7 @@ public static Aggregator stddevSample(List arguments, * VarianceAggregator constructor. * * @param isSampleStdDev true for sample standard deviation aggregator, false for population - * standard deviation aggregator. + * standard deviation aggregator. * @param arguments aggregator arguments. * @param returnType aggregator return types. */ @@ -74,8 +66,8 @@ public StdDevAggregator.StdDevState create() { } @Override - protected StdDevAggregator.StdDevState iterate(ExprValue value, - StdDevAggregator.StdDevState state) { + protected StdDevAggregator.StdDevState iterate( + ExprValue value, StdDevAggregator.StdDevState state) { state.evaluate(value); return state; } diff --git a/core/src/main/java/org/opensearch/sql/expression/aggregation/SumAggregator.java b/core/src/main/java/org/opensearch/sql/expression/aggregation/SumAggregator.java index f5b042034a..d637721980 100644 --- a/core/src/main/java/org/opensearch/sql/expression/aggregation/SumAggregator.java +++ b/core/src/main/java/org/opensearch/sql/expression/aggregation/SumAggregator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.aggregation; import static org.opensearch.sql.data.model.ExprValueUtils.doubleValue; @@ -28,8 +27,8 @@ import org.opensearch.sql.expression.function.BuiltinFunctionName; /** - * The sum aggregator aggregate the value evaluated by the expression. - * If the expression evaluated result is NULL or MISSING, then the result is NULL. + * The sum aggregator aggregate the value evaluated by the expression. If the expression evaluated + * result is NULL or MISSING, then the result is NULL. */ public class SumAggregator extends Aggregator { @@ -54,9 +53,7 @@ public String toString() { return String.format(Locale.ROOT, "sum(%s)", format(getArguments())); } - /** - * Sum State. - */ + /** Sum State. */ protected static class SumState implements AggregationState { private final ExprCoreType type; @@ -69,9 +66,7 @@ protected static class SumState implements AggregationState { isEmptyCollection = true; } - /** - * Add value to current sumResult. - */ + /** Add value to current sumResult. */ public void add(ExprValue value) { switch (type) { case INTEGER: diff --git a/core/src/main/java/org/opensearch/sql/expression/aggregation/TakeAggregator.java b/core/src/main/java/org/opensearch/sql/expression/aggregation/TakeAggregator.java index cff08bb098..8791973353 100644 --- a/core/src/main/java/org/opensearch/sql/expression/aggregation/TakeAggregator.java +++ b/core/src/main/java/org/opensearch/sql/expression/aggregation/TakeAggregator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.aggregation; import static org.opensearch.sql.utils.ExpressionUtils.format; @@ -18,8 +17,8 @@ import org.opensearch.sql.expression.function.BuiltinFunctionName; /** - * The take aggregator keeps and returns the original values of a field. - * If the field value is NULL or MISSING, then it is skipped. + * The take aggregator keeps and returns the original values of a field. If the field value is NULL + * or MISSING, then it is skipped. */ public class TakeAggregator extends Aggregator { @@ -43,9 +42,7 @@ public String toString() { return String.format(Locale.ROOT, "take(%s)", format(getArguments())); } - /** - * Take State. - */ + /** Take State. */ protected static class TakeState implements AggregationState { protected int index; protected int size; diff --git a/core/src/main/java/org/opensearch/sql/expression/aggregation/VarianceAggregator.java b/core/src/main/java/org/opensearch/sql/expression/aggregation/VarianceAggregator.java index bd9f0948f6..920830d266 100644 --- a/core/src/main/java/org/opensearch/sql/expression/aggregation/VarianceAggregator.java +++ b/core/src/main/java/org/opensearch/sql/expression/aggregation/VarianceAggregator.java @@ -26,26 +26,18 @@ import org.opensearch.sql.expression.Expression; import org.opensearch.sql.expression.function.BuiltinFunctionName; -/** - * Variance Aggregator. - */ +/** Variance Aggregator. */ public class VarianceAggregator extends Aggregator { private final boolean isSampleVariance; - /** - * Build Population Variance {@link VarianceAggregator}. - */ - public static Aggregator variancePopulation(List arguments, - ExprCoreType returnType) { + /** Build Population Variance {@link VarianceAggregator}. */ + public static Aggregator variancePopulation(List arguments, ExprCoreType returnType) { return new VarianceAggregator(false, arguments, returnType); } - /** - * Build Sample Variance {@link VarianceAggregator}. - */ - public static Aggregator varianceSample(List arguments, - ExprCoreType returnType) { + /** Build Sample Variance {@link VarianceAggregator}. */ + public static Aggregator varianceSample(List arguments, ExprCoreType returnType) { return new VarianceAggregator(true, arguments, returnType); } diff --git a/core/src/main/java/org/opensearch/sql/expression/conditional/cases/CaseClause.java b/core/src/main/java/org/opensearch/sql/expression/conditional/cases/CaseClause.java index ad7860a6dc..e18f33c6ae 100644 --- a/core/src/main/java/org/opensearch/sql/expression/conditional/cases/CaseClause.java +++ b/core/src/main/java/org/opensearch/sql/expression/conditional/cases/CaseClause.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.conditional.cases; import static org.opensearch.sql.data.type.ExprCoreType.UNDEFINED; @@ -32,19 +31,13 @@ @ToString public class CaseClause extends FunctionExpression { - /** - * List of WHEN clauses. - */ + /** List of WHEN clauses. */ private final List whenClauses; - /** - * Default result if none of WHEN conditions match. - */ + /** Default result if none of WHEN conditions match. */ private final Expression defaultResult; - /** - * Initialize case clause. - */ + /** Initialize case clause. */ public CaseClause(List whenClauses, Expression defaultResult) { super(FunctionName.of("case"), concatArgs(whenClauses, defaultResult)); this.whenClauses = whenClauses; @@ -75,15 +68,13 @@ public T accept(ExpressionNodeVisitor visitor, C context) { } /** - * Get types of each result in WHEN clause and ELSE clause. - * Exclude UNKNOWN type from NULL literal which means NULL in THEN or ELSE clause - * is not included in result. + * Get types of each result in WHEN clause and ELSE clause. Exclude UNKNOWN type from NULL literal + * which means NULL in THEN or ELSE clause is not included in result. + * * @return all result types. Use list so caller can generate friendly error message. */ public List allResultTypes() { - List types = whenClauses.stream() - .map(WhenClause::type) - .collect(Collectors.toList()); + List types = whenClauses.stream().map(WhenClause::type).collect(Collectors.toList()); if (defaultResult != null) { types.add(defaultResult.type()); } @@ -92,8 +83,8 @@ public List allResultTypes() { return types; } - private static List concatArgs(List whenClauses, - Expression defaultResult) { + private static List concatArgs( + List whenClauses, Expression defaultResult) { ImmutableList.Builder args = ImmutableList.builder(); whenClauses.forEach(args::add); @@ -102,5 +93,4 @@ private static List concatArgs(List whenClauses, } return args.build(); } - } diff --git a/core/src/main/java/org/opensearch/sql/expression/conditional/cases/WhenClause.java b/core/src/main/java/org/opensearch/sql/expression/conditional/cases/WhenClause.java index fd2eeab983..7eb731a1f2 100644 --- a/core/src/main/java/org/opensearch/sql/expression/conditional/cases/WhenClause.java +++ b/core/src/main/java/org/opensearch/sql/expression/conditional/cases/WhenClause.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.conditional.cases; import com.google.common.collect.ImmutableList; @@ -18,27 +17,19 @@ import org.opensearch.sql.expression.env.Environment; import org.opensearch.sql.expression.function.FunctionName; -/** - * WHEN clause that consists of a condition and a result corresponding. - */ +/** WHEN clause that consists of a condition and a result corresponding. */ @EqualsAndHashCode(callSuper = false) @Getter @ToString public class WhenClause extends FunctionExpression { - /** - * Condition that must be a predicate. - */ + /** Condition that must be a predicate. */ private final Expression condition; - /** - * Result to return if condition is evaluated to true. - */ + /** Result to return if condition is evaluated to true. */ private final Expression result; - /** - * Initialize when clause. - */ + /** Initialize when clause. */ public WhenClause(Expression condition, Expression result) { super(FunctionName.of("when"), ImmutableList.of(condition, result)); this.condition = condition; @@ -47,8 +38,9 @@ public WhenClause(Expression condition, Expression result) { /** * Evaluate when condition. - * @param valueEnv value env - * @return is condition satisfied + * + * @param valueEnv value env + * @return is condition satisfied */ public boolean isTrue(Environment valueEnv) { ExprValue result = condition.valueOf(valueEnv); @@ -72,5 +64,4 @@ public ExprType type() { public T accept(ExpressionNodeVisitor visitor, C context) { return visitor.visitWhen(this, context); } - } diff --git a/core/src/main/java/org/opensearch/sql/expression/datetime/CalendarLookup.java b/core/src/main/java/org/opensearch/sql/expression/datetime/CalendarLookup.java index c5b6343991..0837075f7c 100644 --- a/core/src/main/java/org/opensearch/sql/expression/datetime/CalendarLookup.java +++ b/core/src/main/java/org/opensearch/sql/expression/datetime/CalendarLookup.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.datetime; import com.google.common.collect.ImmutableList; @@ -18,6 +17,7 @@ class CalendarLookup { /** * Get a calendar for the specific mode. + * * @param mode Mode to get calendar for. * @param date Date to get calendar for. */ @@ -38,6 +38,7 @@ private static Calendar getCalendar(int mode, LocalDate date) { /** * Set first day of week, minimal days in first week and date in calendar. + * * @param firstDayOfWeek the given first day of the week. * @param minimalDaysInWeek the given minimal days required in the first week of the year. * @param date the given date. @@ -52,6 +53,7 @@ private static Calendar getCalendar(int firstDayOfWeek, int minimalDaysInWeek, L /** * Returns week number for date according to mode. + * * @param mode Integer for mode. Valid mode values are 0 to 7. * @param date LocalDate for date. */ @@ -68,6 +70,7 @@ static int getWeekNumber(int mode, LocalDate date) { /** * Returns year for date according to mode. + * * @param mode Integer for mode. Valid mode values are 0 to 7. * @param date LocalDate for date. */ diff --git a/core/src/main/java/org/opensearch/sql/expression/datetime/DateTimeFormatterUtil.java b/core/src/main/java/org/opensearch/sql/expression/datetime/DateTimeFormatterUtil.java index 55bfa67f3f..13f9a077e4 100644 --- a/core/src/main/java/org/opensearch/sql/expression/datetime/DateTimeFormatterUtil.java +++ b/core/src/main/java/org/opensearch/sql/expression/datetime/DateTimeFormatterUtil.java @@ -28,8 +28,8 @@ import org.opensearch.sql.expression.function.FunctionProperties; /** - * This class converts a SQL style DATE_FORMAT format specifier and converts it to a - * Java SimpleDateTime format. + * This class converts a SQL style DATE_FORMAT format specifier and converts it to a Java + * SimpleDateTime format. */ class DateTimeFormatterUtil { private static final int SUFFIX_SPECIAL_START_TH = 11; @@ -39,8 +39,7 @@ class DateTimeFormatterUtil { private static final String NANO_SEC_FORMAT = "'%06d'"; private static final Map SUFFIX_CONVERTER = - ImmutableMap.builder() - .put(1, "st").put(2, "nd").put(3, "rd").build(); + ImmutableMap.builder().put(1, "st").put(2, "nd").put(3, "rd").build(); // The following have special cases that need handling outside of the format options provided // by the DateTimeFormatter class. @@ -50,52 +49,70 @@ interface DateTimeFormatHandler { private static final Map DATE_HANDLERS = ImmutableMap.builder() - .put("%a", (date) -> "EEE") // %a => EEE - Abbreviated weekday name (Sun..Sat) - .put("%b", (date) -> "LLL") // %b => LLL - Abbreviated month name (Jan..Dec) - .put("%c", (date) -> "MM") // %c => MM - Month, numeric (0..12) - .put("%d", (date) -> "dd") // %d => dd - Day of the month, numeric (00..31) - .put("%e", (date) -> "d") // %e => d - Day of the month, numeric (0..31) - .put("%H", (date) -> "HH") // %H => HH - (00..23) - .put("%h", (date) -> "hh") // %h => hh - (01..12) - .put("%I", (date) -> "hh") // %I => hh - (01..12) - .put("%i", (date) -> "mm") // %i => mm - Minutes, numeric (00..59) - .put("%j", (date) -> "DDD") // %j => DDD - (001..366) - .put("%k", (date) -> "H") // %k => H - (0..23) - .put("%l", (date) -> "h") // %l => h - (1..12) - .put("%p", (date) -> "a") // %p => a - AM or PM - .put("%M", (date) -> "LLLL") // %M => LLLL - Month name (January..December) - .put("%m", (date) -> "MM") // %m => MM - Month, numeric (00..12) - .put("%r", (date) -> "hh:mm:ss a") // %r => hh:mm:ss a - hh:mm:ss followed by AM or PM - .put("%S", (date) -> "ss") // %S => ss - Seconds (00..59) - .put("%s", (date) -> "ss") // %s => ss - Seconds (00..59) - .put("%T", (date) -> "HH:mm:ss") // %T => HH:mm:ss - .put("%W", (date) -> "EEEE") // %W => EEEE - Weekday name (Sunday..Saturday) - .put("%Y", (date) -> "yyyy") // %Y => yyyy - Year, numeric, 4 digits - .put("%y", (date) -> "yy") // %y => yy - Year, numeric, 2 digits - // The following are not directly supported by DateTimeFormatter. - .put("%D", (date) -> // %w - Day of month with English suffix - String.format("'%d%s'", date.getDayOfMonth(), getSuffix(date.getDayOfMonth()))) - .put("%f", (date) -> // %f - Microseconds - String.format(NANO_SEC_FORMAT, (date.getNano() / 1000))) - .put("%w", (date) -> // %w - Day of week (0 indexed) - String.format("'%d'", date.getDayOfWeek().getValue())) - .put("%U", (date) -> // %U Week where Sunday is the first day - WEEK() mode 0 - String.format("'%d'", CalendarLookup.getWeekNumber(0, date.toLocalDate()))) - .put("%u", (date) -> // %u Week where Monday is the first day - WEEK() mode 1 - String.format("'%d'", CalendarLookup.getWeekNumber(1, date.toLocalDate()))) - .put("%V", (date) -> // %V Week where Sunday is the first day - WEEK() mode 2 used with %X - String.format("'%d'", CalendarLookup.getWeekNumber(2, date.toLocalDate()))) - .put("%v", (date) -> // %v Week where Monday is the first day - WEEK() mode 3 used with %x - String.format("'%d'", CalendarLookup.getWeekNumber(3, date.toLocalDate()))) - .put("%X", (date) -> // %X Year for week where Sunday is the first day, 4 digits used with %V - String.format("'%d'", CalendarLookup.getYearNumber(2, date.toLocalDate()))) - .put("%x", (date) -> // %x Year for week where Monday is the first day, 4 digits used with %v - String.format("'%d'", CalendarLookup.getYearNumber(3, date.toLocalDate()))) - .build(); - - //Handlers for the time_format function. - //Some format specifiers return 0 or null to align with MySQL. - //https://dev.mysql.com/doc/refman/8.0/en/date-and-time-functions.html#function_time-format + .put("%a", (date) -> "EEE") // %a => EEE - Abbreviated weekday name (Sun..Sat) + .put("%b", (date) -> "LLL") // %b => LLL - Abbreviated month name (Jan..Dec) + .put("%c", (date) -> "MM") // %c => MM - Month, numeric (0..12) + .put("%d", (date) -> "dd") // %d => dd - Day of the month, numeric (00..31) + .put("%e", (date) -> "d") // %e => d - Day of the month, numeric (0..31) + .put("%H", (date) -> "HH") // %H => HH - (00..23) + .put("%h", (date) -> "hh") // %h => hh - (01..12) + .put("%I", (date) -> "hh") // %I => hh - (01..12) + .put("%i", (date) -> "mm") // %i => mm - Minutes, numeric (00..59) + .put("%j", (date) -> "DDD") // %j => DDD - (001..366) + .put("%k", (date) -> "H") // %k => H - (0..23) + .put("%l", (date) -> "h") // %l => h - (1..12) + .put("%p", (date) -> "a") // %p => a - AM or PM + .put("%M", (date) -> "LLLL") // %M => LLLL - Month name (January..December) + .put("%m", (date) -> "MM") // %m => MM - Month, numeric (00..12) + .put("%r", (date) -> "hh:mm:ss a") // %r => hh:mm:ss a - hh:mm:ss followed by AM or PM + .put("%S", (date) -> "ss") // %S => ss - Seconds (00..59) + .put("%s", (date) -> "ss") // %s => ss - Seconds (00..59) + .put("%T", (date) -> "HH:mm:ss") // %T => HH:mm:ss + .put("%W", (date) -> "EEEE") // %W => EEEE - Weekday name (Sunday..Saturday) + .put("%Y", (date) -> "yyyy") // %Y => yyyy - Year, numeric, 4 digits + .put("%y", (date) -> "yy") // %y => yy - Year, numeric, 2 digits + // The following are not directly supported by DateTimeFormatter. + .put( + "%D", + (date) -> // %w - Day of month with English suffix + String.format("'%d%s'", date.getDayOfMonth(), getSuffix(date.getDayOfMonth()))) + .put( + "%f", + (date) -> // %f - Microseconds + String.format(NANO_SEC_FORMAT, (date.getNano() / 1000))) + .put( + "%w", + (date) -> // %w - Day of week (0 indexed) + String.format("'%d'", date.getDayOfWeek().getValue())) + .put( + "%U", + (date) -> // %U Week where Sunday is the first day - WEEK() mode 0 + String.format("'%d'", CalendarLookup.getWeekNumber(0, date.toLocalDate()))) + .put( + "%u", + (date) -> // %u Week where Monday is the first day - WEEK() mode 1 + String.format("'%d'", CalendarLookup.getWeekNumber(1, date.toLocalDate()))) + .put( + "%V", + (date) -> // %V Week where Sunday is the first day - WEEK() mode 2 used with %X + String.format("'%d'", CalendarLookup.getWeekNumber(2, date.toLocalDate()))) + .put( + "%v", + (date) -> // %v Week where Monday is the first day - WEEK() mode 3 used with %x + String.format("'%d'", CalendarLookup.getWeekNumber(3, date.toLocalDate()))) + .put( + "%X", + (date) -> // %X Year for week where Sunday is the first day, 4 digits used with %V + String.format("'%d'", CalendarLookup.getYearNumber(2, date.toLocalDate()))) + .put( + "%x", + (date) -> // %x Year for week where Monday is the first day, 4 digits used with %v + String.format("'%d'", CalendarLookup.getYearNumber(3, date.toLocalDate()))) + .build(); + + // Handlers for the time_format function. + // Some format specifiers return 0 or null to align with MySQL. + // https://dev.mysql.com/doc/refman/8.0/en/date-and-time-functions.html#function_time-format private static final Map TIME_HANDLERS = ImmutableMap.builder() .put("%a", (date) -> null) @@ -121,8 +138,8 @@ interface DateTimeFormatHandler { .put("%Y", (date) -> "0000") .put("%y", (date) -> "00") .put("%D", (date) -> null) - .put("%f", (date) -> // %f - Microseconds - String.format(NANO_SEC_FORMAT, (date.getNano() / 1000))) + // %f - Microseconds + .put("%f", (date) -> String.format(NANO_SEC_FORMAT, (date.getNano() / 1000))) .put("%w", (date) -> null) .put("%U", (date) -> null) .put("%u", (date) -> null) @@ -157,7 +174,7 @@ interface DateTimeFormatHandler { .put("%Y", "u") // %Y => yyyy - Year, numeric, 4 digits .put("%y", "u") // %y => yy - Year, numeric, 2 digits .put("%f", "n") // %f => n - Nanoseconds - //The following have been implemented but cannot be aligned with + // The following have been implemented but cannot be aligned with // MySQL due to the limitations of the DatetimeFormatter .put("%D", "d") // %w - Day of month with English suffix .put("%w", "e") // %w - Day of week (0 indexed) @@ -170,20 +187,19 @@ interface DateTimeFormatHandler { .build(); private static final Pattern pattern = Pattern.compile("%."); - private static final Pattern CHARACTERS_WITH_NO_MOD_LITERAL_BEHIND_PATTERN - = Pattern.compile("(? handler, - LocalDateTime datetime) { + static ExprValue getFormattedString( + ExprValue formatExpr, Map handler, LocalDateTime datetime) { StringBuffer cleanFormat = getCleanFormat(formatExpr); final Matcher matcher = pattern.matcher(cleanFormat.toString()); final StringBuffer format = new StringBuffer(); try { while (matcher.find()) { - matcher.appendReplacement(format, - handler.getOrDefault(matcher.group(), (d) -> - String.format("'%s'", matcher.group().replaceFirst(MOD_LITERAL, ""))) + matcher.appendReplacement( + format, + handler + .getOrDefault( + matcher.group(), + (d) -> String.format("'%s'", matcher.group().replaceFirst(MOD_LITERAL, ""))) .getFormat(datetime)); } } catch (Exception e) { @@ -219,12 +238,13 @@ static ExprValue getFormattedString(ExprValue formatExpr, // English Locale matches SQL requirements. // 'AM'/'PM' instead of 'a.m.'/'p.m.' // 'Sat' instead of 'Sat.' etc - return new ExprStringValue(datetime.format( - DateTimeFormatter.ofPattern(format.toString(), Locale.ENGLISH))); + return new ExprStringValue( + datetime.format(DateTimeFormatter.ofPattern(format.toString(), Locale.ENGLISH))); } /** * Format the date using the date format String. + * * @param dateExpr the date ExprValue of Date/Datetime/Timestamp/String type. * @param formatExpr the format ExprValue of String type. * @return Date formatted using format and returned as a String. @@ -242,13 +262,14 @@ static ExprValue getFormattedDateOfToday(ExprValue formatExpr, ExprValue time, C /** * Format the date using the date format String. + * * @param timeExpr the date ExprValue of Date/Datetime/Timestamp/String type. * @param formatExpr the format ExprValue of String type. * @return Date formatted using format and returned as a String. */ static ExprValue getFormattedTime(ExprValue timeExpr, ExprValue formatExpr) { - //Initializes DateTime with LocalDate.now(). This is safe because the date is ignored. - //The time_format function will only return 0 or null for invalid string format specifiers. + // Initializes DateTime with LocalDate.now(). This is safe because the date is ignored. + // The time_format function will only return 0 or null for invalid string format specifiers. final LocalDateTime time = LocalDateTime.of(LocalDate.now(), timeExpr.timeValue()); return getFormattedString(formatExpr, TIME_HANDLERS, time); @@ -266,30 +287,33 @@ private static boolean canGetTime(TemporalAccessor ta) { && ta.isSupported(ChronoField.SECOND_OF_MINUTE)); } - static ExprValue parseStringWithDateOrTime(FunctionProperties fp, - ExprValue datetimeStringExpr, - ExprValue formatExpr) { + static ExprValue parseStringWithDateOrTime( + FunctionProperties fp, ExprValue datetimeStringExpr, ExprValue formatExpr) { - //Replace patterns with % for Java DateTimeFormatter + // Replace patterns with % for Java DateTimeFormatter StringBuffer cleanFormat = getCleanFormat(formatExpr); final Matcher matcher = pattern.matcher(cleanFormat.toString()); final StringBuffer format = new StringBuffer(); while (matcher.find()) { - matcher.appendReplacement(format, - STR_TO_DATE_FORMATS.getOrDefault(matcher.group(), + matcher.appendReplacement( + format, + STR_TO_DATE_FORMATS.getOrDefault( + matcher.group(), String.format("'%s'", matcher.group().replaceFirst(MOD_LITERAL, "")))); } matcher.appendTail(format); TemporalAccessor taWithMissingFields; - //Return NULL for invalid parse in string to align with MySQL + // Return NULL for invalid parse in string to align with MySQL try { - //Get Temporal Accessor to initially parse string without default values - taWithMissingFields = new DateTimeFormatterBuilder() - .appendPattern(format.toString()) - .toFormatter().withResolverStyle(ResolverStyle.STRICT) - .parseUnresolved(datetimeStringExpr.stringValue(), new ParsePosition(0)); + // Get Temporal Accessor to initially parse string without default values + taWithMissingFields = + new DateTimeFormatterBuilder() + .appendPattern(format.toString()) + .toFormatter() + .withResolverStyle(ResolverStyle.STRICT) + .parseUnresolved(datetimeStringExpr.stringValue(), new ParsePosition(0)); if (taWithMissingFields == null) { throw new DateTimeException("Input string could not be parsed properly."); } @@ -300,31 +324,42 @@ static ExprValue parseStringWithDateOrTime(FunctionProperties fp, return ExprNullValue.of(); } - int year = taWithMissingFields.isSupported(ChronoField.YEAR) - ? taWithMissingFields.get(ChronoField.YEAR) : 2000; - - int month = taWithMissingFields.isSupported(ChronoField.MONTH_OF_YEAR) - ? taWithMissingFields.get(ChronoField.MONTH_OF_YEAR) : 1; - - int day = taWithMissingFields.isSupported(ChronoField.DAY_OF_MONTH) - ? taWithMissingFields.get(ChronoField.DAY_OF_MONTH) : 1; - - int hour = taWithMissingFields.isSupported(ChronoField.HOUR_OF_DAY) - ? taWithMissingFields.get(ChronoField.HOUR_OF_DAY) : 0; - - int minute = taWithMissingFields.isSupported(ChronoField.MINUTE_OF_HOUR) - ? taWithMissingFields.get(ChronoField.MINUTE_OF_HOUR) : 0; - - int second = taWithMissingFields.isSupported(ChronoField.SECOND_OF_MINUTE) - ? taWithMissingFields.get(ChronoField.SECOND_OF_MINUTE) : 0; - - //Fill returned datetime with current date if only Time information was parsed + int year = + taWithMissingFields.isSupported(ChronoField.YEAR) + ? taWithMissingFields.get(ChronoField.YEAR) + : 2000; + + int month = + taWithMissingFields.isSupported(ChronoField.MONTH_OF_YEAR) + ? taWithMissingFields.get(ChronoField.MONTH_OF_YEAR) + : 1; + + int day = + taWithMissingFields.isSupported(ChronoField.DAY_OF_MONTH) + ? taWithMissingFields.get(ChronoField.DAY_OF_MONTH) + : 1; + + int hour = + taWithMissingFields.isSupported(ChronoField.HOUR_OF_DAY) + ? taWithMissingFields.get(ChronoField.HOUR_OF_DAY) + : 0; + + int minute = + taWithMissingFields.isSupported(ChronoField.MINUTE_OF_HOUR) + ? taWithMissingFields.get(ChronoField.MINUTE_OF_HOUR) + : 0; + + int second = + taWithMissingFields.isSupported(ChronoField.SECOND_OF_MINUTE) + ? taWithMissingFields.get(ChronoField.SECOND_OF_MINUTE) + : 0; + + // Fill returned datetime with current date if only Time information was parsed LocalDateTime output; if (!canGetDate(taWithMissingFields)) { - output = LocalDateTime.of( - LocalDate.now(fp.getQueryStartClock()), - LocalTime.of(hour, minute, second) - ); + output = + LocalDateTime.of( + LocalDate.now(fp.getQueryStartClock()), LocalTime.of(hour, minute, second)); } else { output = LocalDateTime.of(year, month, day, hour, minute, second); } @@ -334,6 +369,7 @@ static ExprValue parseStringWithDateOrTime(FunctionProperties fp, /** * Returns English suffix of incoming value. + * * @param val Incoming value. * @return English suffix as String (st, nd, rd, th) */ diff --git a/core/src/main/java/org/opensearch/sql/expression/datetime/DateTimeFunction.java b/core/src/main/java/org/opensearch/sql/expression/datetime/DateTimeFunction.java index 0ea1d02eaf..d17d59d358 100644 --- a/core/src/main/java/org/opensearch/sql/expression/datetime/DateTimeFunction.java +++ b/core/src/main/java/org/opensearch/sql/expression/datetime/DateTimeFunction.java @@ -3,10 +3,8 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.datetime; - import static java.time.temporal.ChronoUnit.DAYS; import static java.time.temporal.ChronoUnit.HOURS; import static java.time.temporal.ChronoUnit.MICROS; @@ -100,14 +98,13 @@ import org.opensearch.sql.utils.DateTimeUtils; /** - * The definition of date and time functions. - * 1) have the clear interface for function define. - * 2) the implementation should rely on ExprValue. + * The definition of date and time functions. 1) have the clear interface for function define. 2) + * the implementation should rely on ExprValue. */ @UtilityClass @SuppressWarnings("unchecked") public class DateTimeFunction { - //The number of seconds per day + // The number of seconds per day public static final long SECONDS_PER_DAY = 86400; // The number of days from year zero to year 1970. @@ -254,11 +251,12 @@ public void register(BuiltinFunctionRepository repository) { * `now(y) return different values. */ private FunctionResolver now(FunctionName functionName) { - return define(functionName, + return define( + functionName, implWithProperties( - functionProperties -> new ExprDatetimeValue( - formatNow(functionProperties.getQueryStartClock())), DATETIME) - ); + functionProperties -> + new ExprDatetimeValue(formatNow(functionProperties.getQueryStartClock())), + DATETIME)); } private FunctionResolver now() { @@ -277,25 +275,28 @@ private FunctionResolver localtime() { return now(BuiltinFunctionName.LOCALTIME.getName()); } - /** - * SYSDATE() returns the time at which it executes. - */ + /** SYSDATE() returns the time at which it executes. */ private FunctionResolver sysdate() { - return define(BuiltinFunctionName.SYSDATE.getName(), - implWithProperties(functionProperties - -> new ExprDatetimeValue(formatNow(Clock.systemDefaultZone())), DATETIME), - FunctionDSL.implWithProperties((functionProperties, v) -> new ExprDatetimeValue( - formatNow(Clock.systemDefaultZone(), v.integerValue())), DATETIME, INTEGER) - ); + return define( + BuiltinFunctionName.SYSDATE.getName(), + implWithProperties( + functionProperties -> new ExprDatetimeValue(formatNow(Clock.systemDefaultZone())), + DATETIME), + FunctionDSL.implWithProperties( + (functionProperties, v) -> + new ExprDatetimeValue(formatNow(Clock.systemDefaultZone(), v.integerValue())), + DATETIME, + INTEGER)); } - /** - * Synonym for @see `now`. - */ + /** Synonym for @see `now`. */ private FunctionResolver curtime(FunctionName functionName) { - return define(functionName, - implWithProperties(functionProperties -> new ExprTimeValue( - formatNow(functionProperties.getQueryStartClock()).toLocalTime()), TIME)); + return define( + functionName, + implWithProperties( + functionProperties -> + new ExprTimeValue(formatNow(functionProperties.getQueryStartClock()).toLocalTime()), + TIME)); } private FunctionResolver curtime() { @@ -307,9 +308,12 @@ private FunctionResolver current_time() { } private FunctionResolver curdate(FunctionName functionName) { - return define(functionName, - implWithProperties(functionProperties -> new ExprDateValue( - formatNow(functionProperties.getQueryStartClock()).toLocalDate()), DATE)); + return define( + functionName, + implWithProperties( + functionProperties -> + new ExprDateValue(formatNow(functionProperties.getQueryStartClock()).toLocalDate()), + DATE)); } private FunctionResolver curdate() { @@ -321,32 +325,32 @@ private FunctionResolver current_date() { } /** - * A common signature for `date_add` and `date_sub`. - * Specify a start date and add/subtract a temporal amount to/from the date. + * A common signature for `date_add` and `date_sub`.
+ * Specify a start date and add/subtract a temporal amount to/from the date.
* The return type depends on the date type and the interval unit. Detailed supported signatures: - * (DATE/DATETIME/TIMESTAMP/TIME, INTERVAL) -> DATETIME - * MySQL has these signatures too - * (DATE, INTERVAL) -> DATE // when interval has no time part - * (TIME, INTERVAL) -> TIME // when interval has no date part - * (STRING, INTERVAL) -> STRING // when argument has date or datetime string, - * // result has date or datetime depending on interval type + *
+ * (DATE/DATETIME/TIMESTAMP/TIME, INTERVAL) -> DATETIME
+ * MySQL has these signatures too
+ * (DATE, INTERVAL) -> DATE // when interval has no time part
+ * (TIME, INTERVAL) -> TIME // when interval has no date part
+ * (STRING, INTERVAL) -> STRING // when argument has date or datetime string,
+ * // result has date or datetime depending on interval type
*/ private Stream> get_date_add_date_sub_signatures( SerializableTriFunction function) { return Stream.of( implWithProperties(nullMissingHandlingWithProperties(function), DATETIME, DATE, INTERVAL), - implWithProperties(nullMissingHandlingWithProperties(function), - DATETIME, DATETIME, INTERVAL), - implWithProperties(nullMissingHandlingWithProperties(function), - DATETIME, TIMESTAMP, INTERVAL), - implWithProperties(nullMissingHandlingWithProperties(function), DATETIME, TIME, INTERVAL) - ); + implWithProperties( + nullMissingHandlingWithProperties(function), DATETIME, DATETIME, INTERVAL), + implWithProperties( + nullMissingHandlingWithProperties(function), DATETIME, TIMESTAMP, INTERVAL), + implWithProperties(nullMissingHandlingWithProperties(function), DATETIME, TIME, INTERVAL)); } /** - * A common signature for `adddate` and `subdate`. - * Adds/subtracts an integer number of days to/from the first argument. - * (DATE, LONG) -> DATE + * A common signature for `adddate` and `subdate`.
+ * Adds/subtracts an integer number of days to/from the first argument.
+ * (DATE, LONG) -> DATE
* (TIME/DATETIME/TIMESTAMP, LONG) -> DATETIME */ private Stream> get_adddate_subdate_signatures( @@ -355,87 +359,128 @@ private FunctionResolver current_date() { implWithProperties(nullMissingHandlingWithProperties(function), DATE, DATE, LONG), implWithProperties(nullMissingHandlingWithProperties(function), DATETIME, DATETIME, LONG), implWithProperties(nullMissingHandlingWithProperties(function), DATETIME, TIMESTAMP, LONG), - implWithProperties(nullMissingHandlingWithProperties(function), DATETIME, TIME, LONG) - ); + implWithProperties(nullMissingHandlingWithProperties(function), DATETIME, TIME, LONG)); } private DefaultFunctionResolver adddate() { - return define(BuiltinFunctionName.ADDDATE.getName(), + return define( + BuiltinFunctionName.ADDDATE.getName(), (SerializableFunction>[]) (Stream.concat( - get_date_add_date_sub_signatures(DateTimeFunction::exprAddDateInterval), - get_adddate_subdate_signatures(DateTimeFunction::exprAddDateDays)) + get_date_add_date_sub_signatures(DateTimeFunction::exprAddDateInterval), + get_adddate_subdate_signatures(DateTimeFunction::exprAddDateDays)) .toArray(SerializableFunction[]::new))); } /** - * Adds expr2 to expr1 and returns the result. - * (TIME, TIME/DATE/DATETIME/TIMESTAMP) -> TIME - * (DATE/DATETIME/TIMESTAMP, TIME/DATE/DATETIME/TIMESTAMP) -> DATETIME - * TODO: MySQL has these signatures too - * (STRING, STRING/TIME) -> STRING // second arg - string with time only - * (x, STRING) -> NULL // second arg - string with timestamp - * (x, STRING/DATE) -> x // second arg - string with date only + * Adds expr2 to expr1 and returns the result.
+ * (TIME, TIME/DATE/DATETIME/TIMESTAMP) -> TIME
+ * (DATE/DATETIME/TIMESTAMP, TIME/DATE/DATETIME/TIMESTAMP) -> DATETIME
+ * TODO: MySQL has these signatures too
+ * (STRING, STRING/TIME) -> STRING // second arg - string with time only
+ * (x, STRING) -> NULL // second arg - string with timestamp
+ * (x, STRING/DATE) -> x // second arg - string with date only */ private DefaultFunctionResolver addtime() { - return define(BuiltinFunctionName.ADDTIME.getName(), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - TIME, TIME, TIME), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - TIME, TIME, DATE), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - TIME, TIME, DATETIME), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - TIME, TIME, TIMESTAMP), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - DATETIME, DATETIME, TIME), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - DATETIME, DATETIME, DATE), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - DATETIME, DATETIME, DATETIME), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - DATETIME, DATETIME, TIMESTAMP), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - DATETIME, DATE, TIME), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - DATETIME, DATE, DATE), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - DATETIME, DATE, DATETIME), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - DATETIME, DATE, TIMESTAMP), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - DATETIME, TIMESTAMP, TIME), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - DATETIME, TIMESTAMP, DATE), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - DATETIME, TIMESTAMP, DATETIME), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - DATETIME, TIMESTAMP, TIMESTAMP) - ); - } - - /** - * Converts date/time from a specified timezone to another specified timezone. - * The supported signatures: - * (DATETIME, STRING, STRING) -> DATETIME - * (STRING, STRING, STRING) -> DATETIME - */ - private DefaultFunctionResolver convert_tz() { - return define(BuiltinFunctionName.CONVERT_TZ.getName(), - impl(nullMissingHandling(DateTimeFunction::exprConvertTZ), - DATETIME, DATETIME, STRING, STRING), - impl(nullMissingHandling(DateTimeFunction::exprConvertTZ), - DATETIME, STRING, STRING, STRING) - ); + return define( + BuiltinFunctionName.ADDTIME.getName(), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), TIME, TIME, TIME), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), TIME, TIME, DATE), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), TIME, TIME, DATETIME), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), + TIME, + TIME, + TIMESTAMP), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), + DATETIME, + DATETIME, + TIME), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), + DATETIME, + DATETIME, + DATE), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), + DATETIME, + DATETIME, + DATETIME), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), + DATETIME, + DATETIME, + TIMESTAMP), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), DATETIME, DATE, TIME), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), DATETIME, DATE, DATE), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), + DATETIME, + DATE, + DATETIME), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), + DATETIME, + DATE, + TIMESTAMP), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), + DATETIME, + TIMESTAMP, + TIME), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), + DATETIME, + TIMESTAMP, + DATE), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), + DATETIME, + TIMESTAMP, + DATETIME), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), + DATETIME, + TIMESTAMP, + TIMESTAMP)); } /** - * Extracts the date part of a date and time value. - * Also to construct a date type. The supported signatures: - * STRING/DATE/DATETIME/TIMESTAMP -> DATE + * Converts date/time from a specified timezone to another specified timezone.
+ * The supported signatures:
+ * (DATETIME, STRING, STRING) -> DATETIME
+ * (STRING, STRING, STRING) -> DATETIME + */ + private DefaultFunctionResolver convert_tz() { + return define( + BuiltinFunctionName.CONVERT_TZ.getName(), + impl( + nullMissingHandling(DateTimeFunction::exprConvertTZ), + DATETIME, + DATETIME, + STRING, + STRING), + impl( + nullMissingHandling(DateTimeFunction::exprConvertTZ), + DATETIME, + STRING, + STRING, + STRING)); + } + + /** + * Extracts the date part of a date and time value. Also to construct a date type. The supported + * signatures: STRING/DATE/DATETIME/TIMESTAMP -> DATE */ private DefaultFunctionResolver date() { - return define(BuiltinFunctionName.DATE.getName(), + return define( + BuiltinFunctionName.DATE.getName(), impl(nullMissingHandling(DateTimeFunction::exprDate), DATE, STRING), impl(nullMissingHandling(DateTimeFunction::exprDate), DATE, DATE), impl(nullMissingHandling(DateTimeFunction::exprDate), DATE, DATETIME), @@ -447,273 +492,310 @@ private DefaultFunctionResolver date() { * (DATE/DATETIME/TIMESTAMP/TIME, DATE/DATETIME/TIMESTAMP/TIME) -> LONG */ private DefaultFunctionResolver datediff() { - return define(BuiltinFunctionName.DATEDIFF.getName(), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), - LONG, DATE, DATE), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), - LONG, DATETIME, DATE), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), - LONG, DATE, DATETIME), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), - LONG, DATETIME, DATETIME), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), - LONG, DATE, TIME), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), - LONG, TIME, DATE), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), - LONG, TIME, TIME), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), - LONG, TIMESTAMP, DATE), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), - LONG, DATE, TIMESTAMP), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), - LONG, TIMESTAMP, TIMESTAMP), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), - LONG, TIMESTAMP, TIME), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), - LONG, TIME, TIMESTAMP), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), - LONG, TIMESTAMP, DATETIME), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), - LONG, DATETIME, TIMESTAMP), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), - LONG, TIME, DATETIME), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), - LONG, DATETIME, TIME)); - } - - /** - * Specify a datetime with time zone field and a time zone to convert to. - * Returns a local date time. - * (STRING, STRING) -> DATETIME + return define( + BuiltinFunctionName.DATEDIFF.getName(), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), LONG, DATE, DATE), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), + LONG, + DATETIME, + DATE), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), + LONG, + DATE, + DATETIME), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), + LONG, + DATETIME, + DATETIME), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), LONG, DATE, TIME), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), LONG, TIME, DATE), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), LONG, TIME, TIME), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), + LONG, + TIMESTAMP, + DATE), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), + LONG, + DATE, + TIMESTAMP), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), + LONG, + TIMESTAMP, + TIMESTAMP), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), + LONG, + TIMESTAMP, + TIME), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), + LONG, + TIME, + TIMESTAMP), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), + LONG, + TIMESTAMP, + DATETIME), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), + LONG, + DATETIME, + TIMESTAMP), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), + LONG, + TIME, + DATETIME), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), + LONG, + DATETIME, + TIME)); + } + + /** + * Specify a datetime with time zone field and a time zone to convert to.
+ * Returns a local date time.
+ * (STRING, STRING) -> DATETIME
* (STRING) -> DATETIME */ private FunctionResolver datetime() { - return define(BuiltinFunctionName.DATETIME.getName(), - impl(nullMissingHandling(DateTimeFunction::exprDateTime), - DATETIME, STRING, STRING), - impl(nullMissingHandling(DateTimeFunction::exprDateTimeNoTimezone), - DATETIME, STRING) - ); + return define( + BuiltinFunctionName.DATETIME.getName(), + impl(nullMissingHandling(DateTimeFunction::exprDateTime), DATETIME, STRING, STRING), + impl(nullMissingHandling(DateTimeFunction::exprDateTimeNoTimezone), DATETIME, STRING)); } private DefaultFunctionResolver date_add() { - return define(BuiltinFunctionName.DATE_ADD.getName(), + return define( + BuiltinFunctionName.DATE_ADD.getName(), (SerializableFunction>[]) get_date_add_date_sub_signatures(DateTimeFunction::exprAddDateInterval) .toArray(SerializableFunction[]::new)); } private DefaultFunctionResolver date_sub() { - return define(BuiltinFunctionName.DATE_SUB.getName(), + return define( + BuiltinFunctionName.DATE_SUB.getName(), (SerializableFunction>[]) get_date_add_date_sub_signatures(DateTimeFunction::exprSubDateInterval) .toArray(SerializableFunction[]::new)); } - /** - * DAY(STRING/DATE/DATETIME/TIMESTAMP). return the day of the month (1-31). - */ + /** DAY(STRING/DATE/DATETIME/TIMESTAMP). return the day of the month (1-31). */ private DefaultFunctionResolver day() { - return define(BuiltinFunctionName.DAY.getName(), + return define( + BuiltinFunctionName.DAY.getName(), impl(nullMissingHandling(DateTimeFunction::exprDayOfMonth), INTEGER, DATE), impl(nullMissingHandling(DateTimeFunction::exprDayOfMonth), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprDayOfMonth), INTEGER, TIMESTAMP), - impl(nullMissingHandling(DateTimeFunction::exprDayOfMonth), INTEGER, STRING) - ); + impl(nullMissingHandling(DateTimeFunction::exprDayOfMonth), INTEGER, STRING)); } /** - * DAYNAME(STRING/DATE/DATETIME/TIMESTAMP). - * return the name of the weekday for date, including Monday, Tuesday, Wednesday, - * Thursday, Friday, Saturday and Sunday. + * DAYNAME(STRING/DATE/DATETIME/TIMESTAMP). return the name of the weekday for date, including + * Monday, Tuesday, Wednesday, Thursday, Friday, Saturday and Sunday. */ private DefaultFunctionResolver dayName() { - return define(BuiltinFunctionName.DAYNAME.getName(), + return define( + BuiltinFunctionName.DAYNAME.getName(), impl(nullMissingHandling(DateTimeFunction::exprDayName), STRING, DATE), impl(nullMissingHandling(DateTimeFunction::exprDayName), STRING, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprDayName), STRING, TIMESTAMP), - impl(nullMissingHandling(DateTimeFunction::exprDayName), STRING, STRING) - ); + impl(nullMissingHandling(DateTimeFunction::exprDayName), STRING, STRING)); } - /** - * DAYOFMONTH(STRING/DATE/DATETIME/TIMESTAMP). return the day of the month (1-31). - */ + /** DAYOFMONTH(STRING/DATE/DATETIME/TIMESTAMP). return the day of the month (1-31). */ private DefaultFunctionResolver dayOfMonth(BuiltinFunctionName name) { - return define(name.getName(), - implWithProperties(nullMissingHandlingWithProperties( - (functionProperties, arg) -> DateTimeFunction.dayOfMonthToday( - functionProperties.getQueryStartClock())), INTEGER, TIME), + return define( + name.getName(), + implWithProperties( + nullMissingHandlingWithProperties( + (functionProperties, arg) -> + DateTimeFunction.dayOfMonthToday(functionProperties.getQueryStartClock())), + INTEGER, + TIME), impl(nullMissingHandling(DateTimeFunction::exprDayOfMonth), INTEGER, DATE), impl(nullMissingHandling(DateTimeFunction::exprDayOfMonth), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprDayOfMonth), INTEGER, STRING), - impl(nullMissingHandling(DateTimeFunction::exprDayOfMonth), INTEGER, TIMESTAMP) - ); + impl(nullMissingHandling(DateTimeFunction::exprDayOfMonth), INTEGER, TIMESTAMP)); } /** - * DAYOFWEEK(STRING/DATE/DATETIME/TIME/TIMESTAMP). - * return the weekday index for date (1 = Sunday, 2 = Monday, ..., 7 = Saturday). + * DAYOFWEEK(STRING/DATE/DATETIME/TIME/TIMESTAMP). return the weekday index for date (1 = Sunday, + * 2 = Monday, ..., 7 = Saturday). */ private DefaultFunctionResolver dayOfWeek(FunctionName name) { - return define(name, - implWithProperties(nullMissingHandlingWithProperties( - (functionProperties, arg) -> DateTimeFunction.dayOfWeekToday( - functionProperties.getQueryStartClock())), INTEGER, TIME), + return define( + name, + implWithProperties( + nullMissingHandlingWithProperties( + (functionProperties, arg) -> + DateTimeFunction.dayOfWeekToday(functionProperties.getQueryStartClock())), + INTEGER, + TIME), impl(nullMissingHandling(DateTimeFunction::exprDayOfWeek), INTEGER, DATE), impl(nullMissingHandling(DateTimeFunction::exprDayOfWeek), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprDayOfWeek), INTEGER, TIMESTAMP), - impl(nullMissingHandling(DateTimeFunction::exprDayOfWeek), INTEGER, STRING) - ); + impl(nullMissingHandling(DateTimeFunction::exprDayOfWeek), INTEGER, STRING)); } - /** - * DAYOFYEAR(STRING/DATE/DATETIME/TIMESTAMP). - * return the day of the year for date (1-366). - */ + /** DAYOFYEAR(STRING/DATE/DATETIME/TIMESTAMP). return the day of the year for date (1-366). */ private DefaultFunctionResolver dayOfYear(BuiltinFunctionName dayOfYear) { - return define(dayOfYear.getName(), - implWithProperties(nullMissingHandlingWithProperties((functionProperties, arg) - -> DateTimeFunction.dayOfYearToday( - functionProperties.getQueryStartClock())), INTEGER, TIME), + return define( + dayOfYear.getName(), + implWithProperties( + nullMissingHandlingWithProperties( + (functionProperties, arg) -> + DateTimeFunction.dayOfYearToday(functionProperties.getQueryStartClock())), + INTEGER, + TIME), impl(nullMissingHandling(DateTimeFunction::exprDayOfYear), INTEGER, DATE), impl(nullMissingHandling(DateTimeFunction::exprDayOfYear), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprDayOfYear), INTEGER, TIMESTAMP), - impl(nullMissingHandling(DateTimeFunction::exprDayOfYear), INTEGER, STRING) - ); + impl(nullMissingHandling(DateTimeFunction::exprDayOfYear), INTEGER, STRING)); } private DefaultFunctionResolver extract() { - return define(BuiltinFunctionName.EXTRACT.getName(), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprExtractForTime), - LONG, STRING, TIME), + return define( + BuiltinFunctionName.EXTRACT.getName(), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprExtractForTime), + LONG, + STRING, + TIME), impl(nullMissingHandling(DateTimeFunction::exprExtract), LONG, STRING, DATE), impl(nullMissingHandling(DateTimeFunction::exprExtract), LONG, STRING, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprExtract), LONG, STRING, TIMESTAMP), - impl(nullMissingHandling(DateTimeFunction::exprExtract), LONG, STRING, STRING) - ); + impl(nullMissingHandling(DateTimeFunction::exprExtract), LONG, STRING, STRING)); } - /** - * FROM_DAYS(LONG). return the date value given the day number N. - */ + /** FROM_DAYS(LONG). return the date value given the day number N. */ private DefaultFunctionResolver from_days() { - return define(BuiltinFunctionName.FROM_DAYS.getName(), + return define( + BuiltinFunctionName.FROM_DAYS.getName(), impl(nullMissingHandling(DateTimeFunction::exprFromDays), DATE, LONG)); } private FunctionResolver from_unixtime() { - return define(BuiltinFunctionName.FROM_UNIXTIME.getName(), + return define( + BuiltinFunctionName.FROM_UNIXTIME.getName(), impl(nullMissingHandling(DateTimeFunction::exprFromUnixTime), DATETIME, DOUBLE), - impl(nullMissingHandling(DateTimeFunction::exprFromUnixTimeFormat), - STRING, DOUBLE, STRING)); + impl( + nullMissingHandling(DateTimeFunction::exprFromUnixTimeFormat), STRING, DOUBLE, STRING)); } private DefaultFunctionResolver get_format() { - return define(BuiltinFunctionName.GET_FORMAT.getName(), - impl(nullMissingHandling(DateTimeFunction::exprGetFormat), STRING, STRING, STRING) - ); + return define( + BuiltinFunctionName.GET_FORMAT.getName(), + impl(nullMissingHandling(DateTimeFunction::exprGetFormat), STRING, STRING, STRING)); } - /** - * HOUR(STRING/TIME/DATETIME/DATE/TIMESTAMP). return the hour value for time. - */ + /** HOUR(STRING/TIME/DATETIME/DATE/TIMESTAMP). return the hour value for time. */ private DefaultFunctionResolver hour(BuiltinFunctionName name) { - return define(name.getName(), + return define( + name.getName(), impl(nullMissingHandling(DateTimeFunction::exprHour), INTEGER, STRING), impl(nullMissingHandling(DateTimeFunction::exprHour), INTEGER, TIME), impl(nullMissingHandling(DateTimeFunction::exprHour), INTEGER, DATE), impl(nullMissingHandling(DateTimeFunction::exprHour), INTEGER, DATETIME), - impl(nullMissingHandling(DateTimeFunction::exprHour), INTEGER, TIMESTAMP) - ); + impl(nullMissingHandling(DateTimeFunction::exprHour), INTEGER, TIMESTAMP)); } - private DefaultFunctionResolver last_day() { - return define(BuiltinFunctionName.LAST_DAY.getName(), + private DefaultFunctionResolver last_day() { + return define( + BuiltinFunctionName.LAST_DAY.getName(), impl(nullMissingHandling(DateTimeFunction::exprLastDay), DATE, STRING), - implWithProperties(nullMissingHandlingWithProperties((functionProperties, arg) - -> DateTimeFunction.exprLastDayToday( - functionProperties.getQueryStartClock())), DATE, TIME), + implWithProperties( + nullMissingHandlingWithProperties( + (functionProperties, arg) -> + DateTimeFunction.exprLastDayToday(functionProperties.getQueryStartClock())), + DATE, + TIME), impl(nullMissingHandling(DateTimeFunction::exprLastDay), DATE, DATE), impl(nullMissingHandling(DateTimeFunction::exprLastDay), DATE, DATETIME), - impl(nullMissingHandling(DateTimeFunction::exprLastDay), DATE, TIMESTAMP) - ); + impl(nullMissingHandling(DateTimeFunction::exprLastDay), DATE, TIMESTAMP)); } private FunctionResolver makedate() { - return define(BuiltinFunctionName.MAKEDATE.getName(), + return define( + BuiltinFunctionName.MAKEDATE.getName(), impl(nullMissingHandling(DateTimeFunction::exprMakeDate), DATE, DOUBLE, DOUBLE)); } private FunctionResolver maketime() { - return define(BuiltinFunctionName.MAKETIME.getName(), + return define( + BuiltinFunctionName.MAKETIME.getName(), impl(nullMissingHandling(DateTimeFunction::exprMakeTime), TIME, DOUBLE, DOUBLE, DOUBLE)); } - /** - * MICROSECOND(STRING/TIME/DATETIME/TIMESTAMP). return the microsecond value for time. - */ + /** MICROSECOND(STRING/TIME/DATETIME/TIMESTAMP). return the microsecond value for time. */ private DefaultFunctionResolver microsecond() { - return define(BuiltinFunctionName.MICROSECOND.getName(), + return define( + BuiltinFunctionName.MICROSECOND.getName(), impl(nullMissingHandling(DateTimeFunction::exprMicrosecond), INTEGER, STRING), impl(nullMissingHandling(DateTimeFunction::exprMicrosecond), INTEGER, TIME), impl(nullMissingHandling(DateTimeFunction::exprMicrosecond), INTEGER, DATETIME), - impl(nullMissingHandling(DateTimeFunction::exprMicrosecond), INTEGER, TIMESTAMP) - ); + impl(nullMissingHandling(DateTimeFunction::exprMicrosecond), INTEGER, TIMESTAMP)); } - /** - * MINUTE(STRING/TIME/DATETIME/TIMESTAMP). return the minute value for time. - */ + /** MINUTE(STRING/TIME/DATETIME/TIMESTAMP). return the minute value for time. */ private DefaultFunctionResolver minute(BuiltinFunctionName name) { - return define(name.getName(), + return define( + name.getName(), impl(nullMissingHandling(DateTimeFunction::exprMinute), INTEGER, STRING), impl(nullMissingHandling(DateTimeFunction::exprMinute), INTEGER, TIME), impl(nullMissingHandling(DateTimeFunction::exprMinute), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprMinute), INTEGER, DATE), - impl(nullMissingHandling(DateTimeFunction::exprMinute), INTEGER, TIMESTAMP) - ); + impl(nullMissingHandling(DateTimeFunction::exprMinute), INTEGER, TIMESTAMP)); } - /** - * MINUTE(STRING/TIME/DATETIME/TIMESTAMP). return the minute value for time. - */ + /** MINUTE(STRING/TIME/DATETIME/TIMESTAMP). return the minute value for time. */ private DefaultFunctionResolver minute_of_day() { - return define(BuiltinFunctionName.MINUTE_OF_DAY.getName(), + return define( + BuiltinFunctionName.MINUTE_OF_DAY.getName(), impl(nullMissingHandling(DateTimeFunction::exprMinuteOfDay), INTEGER, STRING), impl(nullMissingHandling(DateTimeFunction::exprMinuteOfDay), INTEGER, TIME), impl(nullMissingHandling(DateTimeFunction::exprMinuteOfDay), INTEGER, DATE), impl(nullMissingHandling(DateTimeFunction::exprMinuteOfDay), INTEGER, DATETIME), - impl(nullMissingHandling(DateTimeFunction::exprMinuteOfDay), INTEGER, TIMESTAMP) - ); + impl(nullMissingHandling(DateTimeFunction::exprMinuteOfDay), INTEGER, TIMESTAMP)); } - /** - * MONTH(STRING/DATE/DATETIME/TIMESTAMP). return the month for date (1-12). - */ + /** MONTH(STRING/DATE/DATETIME/TIMESTAMP). return the month for date (1-12). */ private DefaultFunctionResolver month(BuiltinFunctionName month) { - return define(month.getName(), - implWithProperties(nullMissingHandlingWithProperties((functionProperties, arg) - -> DateTimeFunction.monthOfYearToday( - functionProperties.getQueryStartClock())), INTEGER, TIME), + return define( + month.getName(), + implWithProperties( + nullMissingHandlingWithProperties( + (functionProperties, arg) -> + DateTimeFunction.monthOfYearToday(functionProperties.getQueryStartClock())), + INTEGER, + TIME), impl(nullMissingHandling(DateTimeFunction::exprMonth), INTEGER, DATE), impl(nullMissingHandling(DateTimeFunction::exprMonth), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprMonth), INTEGER, TIMESTAMP), - impl(nullMissingHandling(DateTimeFunction::exprMonth), INTEGER, STRING) - ); + impl(nullMissingHandling(DateTimeFunction::exprMonth), INTEGER, STRING)); } - /** - * MONTHNAME(STRING/DATE/DATETIME/TIMESTAMP). return the full name of the month for date. - */ + /** MONTHNAME(STRING/DATE/DATETIME/TIMESTAMP). return the full name of the month for date. */ private DefaultFunctionResolver monthName() { - return define(BuiltinFunctionName.MONTHNAME.getName(), + return define( + BuiltinFunctionName.MONTHNAME.getName(), impl(nullMissingHandling(DateTimeFunction::exprMonthName), STRING, DATE), impl(nullMissingHandling(DateTimeFunction::exprMonthName), STRING, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprMonthName), STRING, TIMESTAMP), - impl(nullMissingHandling(DateTimeFunction::exprMonthName), STRING, STRING) - ); + impl(nullMissingHandling(DateTimeFunction::exprMonthName), STRING, STRING)); } /** @@ -721,130 +803,164 @@ private DefaultFunctionResolver monthName() { * (INTEGER, INTEGER) -> INTEGER */ private DefaultFunctionResolver period_add() { - return define(BuiltinFunctionName.PERIOD_ADD.getName(), - impl(nullMissingHandling(DateTimeFunction::exprPeriodAdd), INTEGER, INTEGER, INTEGER) - ); + return define( + BuiltinFunctionName.PERIOD_ADD.getName(), + impl(nullMissingHandling(DateTimeFunction::exprPeriodAdd), INTEGER, INTEGER, INTEGER)); } /** - * Returns the number of months between periods P1 and P2. - * P1 and P2 should be in the format YYMM or YYYYMM. + * Returns the number of months between periods P1 and P2. P1 and P2 should be in the format YYMM + * or YYYYMM.
* (INTEGER, INTEGER) -> INTEGER */ private DefaultFunctionResolver period_diff() { - return define(BuiltinFunctionName.PERIOD_DIFF.getName(), - impl(nullMissingHandling(DateTimeFunction::exprPeriodDiff), INTEGER, INTEGER, INTEGER) - ); + return define( + BuiltinFunctionName.PERIOD_DIFF.getName(), + impl(nullMissingHandling(DateTimeFunction::exprPeriodDiff), INTEGER, INTEGER, INTEGER)); } - /** - * QUARTER(STRING/DATE/DATETIME/TIMESTAMP). return the month for date (1-4). - */ + /** QUARTER(STRING/DATE/DATETIME/TIMESTAMP). return the month for date (1-4). */ private DefaultFunctionResolver quarter() { - return define(BuiltinFunctionName.QUARTER.getName(), + return define( + BuiltinFunctionName.QUARTER.getName(), impl(nullMissingHandling(DateTimeFunction::exprQuarter), INTEGER, DATE), impl(nullMissingHandling(DateTimeFunction::exprQuarter), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprQuarter), INTEGER, TIMESTAMP), - impl(nullMissingHandling(DateTimeFunction::exprQuarter), INTEGER, STRING) - ); + impl(nullMissingHandling(DateTimeFunction::exprQuarter), INTEGER, STRING)); } private DefaultFunctionResolver sec_to_time() { - return define(BuiltinFunctionName.SEC_TO_TIME.getName(), + return define( + BuiltinFunctionName.SEC_TO_TIME.getName(), impl((nullMissingHandling(DateTimeFunction::exprSecToTime)), TIME, INTEGER), impl((nullMissingHandling(DateTimeFunction::exprSecToTime)), TIME, LONG), impl((nullMissingHandling(DateTimeFunction::exprSecToTimeWithNanos)), TIME, DOUBLE), - impl((nullMissingHandling(DateTimeFunction::exprSecToTimeWithNanos)), TIME, FLOAT) - ); + impl((nullMissingHandling(DateTimeFunction::exprSecToTimeWithNanos)), TIME, FLOAT)); } - /** - * SECOND(STRING/TIME/DATETIME/TIMESTAMP). return the second value for time. - */ + /** SECOND(STRING/TIME/DATETIME/TIMESTAMP). return the second value for time. */ private DefaultFunctionResolver second(BuiltinFunctionName name) { - return define(name.getName(), + return define( + name.getName(), impl(nullMissingHandling(DateTimeFunction::exprSecond), INTEGER, STRING), impl(nullMissingHandling(DateTimeFunction::exprSecond), INTEGER, TIME), impl(nullMissingHandling(DateTimeFunction::exprSecond), INTEGER, DATE), impl(nullMissingHandling(DateTimeFunction::exprSecond), INTEGER, DATETIME), - impl(nullMissingHandling(DateTimeFunction::exprSecond), INTEGER, TIMESTAMP) - ); + impl(nullMissingHandling(DateTimeFunction::exprSecond), INTEGER, TIMESTAMP)); } private DefaultFunctionResolver subdate() { - return define(BuiltinFunctionName.SUBDATE.getName(), + return define( + BuiltinFunctionName.SUBDATE.getName(), (SerializableFunction>[]) (Stream.concat( - get_date_add_date_sub_signatures(DateTimeFunction::exprSubDateInterval), - get_adddate_subdate_signatures(DateTimeFunction::exprSubDateDays)) + get_date_add_date_sub_signatures(DateTimeFunction::exprSubDateInterval), + get_adddate_subdate_signatures(DateTimeFunction::exprSubDateDays)) .toArray(SerializableFunction[]::new))); } /** - * Subtracts expr2 from expr1 and returns the result. - * (TIME, TIME/DATE/DATETIME/TIMESTAMP) -> TIME - * (DATE/DATETIME/TIMESTAMP, TIME/DATE/DATETIME/TIMESTAMP) -> DATETIME - * TODO: MySQL has these signatures too - * (STRING, STRING/TIME) -> STRING // second arg - string with time only - * (x, STRING) -> NULL // second arg - string with timestamp - * (x, STRING/DATE) -> x // second arg - string with date only + * Subtracts expr2 from expr1 and returns the result.
+ * (TIME, TIME/DATE/DATETIME/TIMESTAMP) -> TIME
+ * (DATE/DATETIME/TIMESTAMP, TIME/DATE/DATETIME/TIMESTAMP) -> DATETIME
+ * TODO: MySQL has these signatures too
+ * (STRING, STRING/TIME) -> STRING // second arg - string with time only
+ * (x, STRING) -> NULL // second arg - string with timestamp
+ * (x, STRING/DATE) -> x // second arg - string with date only */ private DefaultFunctionResolver subtime() { - return define(BuiltinFunctionName.SUBTIME.getName(), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - TIME, TIME, TIME), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - TIME, TIME, DATE), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - TIME, TIME, DATETIME), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - TIME, TIME, TIMESTAMP), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - DATETIME, DATETIME, TIME), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - DATETIME, DATETIME, DATE), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - DATETIME, DATETIME, DATETIME), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - DATETIME, DATETIME, TIMESTAMP), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - DATETIME, DATE, TIME), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - DATETIME, DATE, DATE), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - DATETIME, DATE, DATETIME), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - DATETIME, DATE, TIMESTAMP), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - DATETIME, TIMESTAMP, TIME), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - DATETIME, TIMESTAMP, DATE), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - DATETIME, TIMESTAMP, DATETIME), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - DATETIME, TIMESTAMP, TIMESTAMP) - ); - } - - /** - * Extracts a date, time, or datetime from the given string. - * It accomplishes this using another string which specifies the input format. + return define( + BuiltinFunctionName.SUBTIME.getName(), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), TIME, TIME, TIME), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), TIME, TIME, DATE), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), TIME, TIME, DATETIME), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), + TIME, + TIME, + TIMESTAMP), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), + DATETIME, + DATETIME, + TIME), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), + DATETIME, + DATETIME, + DATE), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), + DATETIME, + DATETIME, + DATETIME), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), + DATETIME, + DATETIME, + TIMESTAMP), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), DATETIME, DATE, TIME), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), DATETIME, DATE, DATE), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), + DATETIME, + DATE, + DATETIME), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), + DATETIME, + DATE, + TIMESTAMP), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), + DATETIME, + TIMESTAMP, + TIME), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), + DATETIME, + TIMESTAMP, + DATE), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), + DATETIME, + TIMESTAMP, + DATETIME), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), + DATETIME, + TIMESTAMP, + TIMESTAMP)); + } + + /** + * Extracts a date, time, or datetime from the given string. It accomplishes this using another + * string which specifies the input format. */ private DefaultFunctionResolver str_to_date() { - return define(BuiltinFunctionName.STR_TO_DATE.getName(), + return define( + BuiltinFunctionName.STR_TO_DATE.getName(), implWithProperties( - nullMissingHandlingWithProperties((functionProperties, arg, format) - -> DateTimeFunction.exprStrToDate(functionProperties, arg, format)), - DATETIME, STRING, STRING)); + nullMissingHandlingWithProperties( + (functionProperties, arg, format) -> + DateTimeFunction.exprStrToDate(functionProperties, arg, format)), + DATETIME, + STRING, + STRING)); } /** - * Extracts the time part of a date and time value. - * Also to construct a time type. The supported signatures: - * STRING/DATE/DATETIME/TIME/TIMESTAMP -> TIME + * Extracts the time part of a date and time value. Also to construct a time type. The supported + * signatures: STRING/DATE/DATETIME/TIME/TIMESTAMP -> TIME */ private DefaultFunctionResolver time() { - return define(BuiltinFunctionName.TIME.getName(), + return define( + BuiltinFunctionName.TIME.getName(), impl(nullMissingHandling(DateTimeFunction::exprTime), TIME, STRING), impl(nullMissingHandling(DateTimeFunction::exprTime), TIME, DATE), impl(nullMissingHandling(DateTimeFunction::exprTime), TIME, DATETIME), @@ -853,18 +969,19 @@ private DefaultFunctionResolver time() { } /** - * Returns different between two times as a time. - * (TIME, TIME) -> TIME - * MySQL has these signatures too - * (DATE, DATE) -> TIME // result is > 24 hours - * (DATETIME, DATETIME) -> TIME // result is > 24 hours - * (TIMESTAMP, TIMESTAMP) -> TIME // result is > 24 hours - * (x, x) -> NULL // when args have different types - * (STRING, STRING) -> TIME // argument strings contain same types only - * (STRING, STRING) -> NULL // argument strings are different types + * Returns different between two times as a time.
+ * (TIME, TIME) -> TIME
+ * MySQL has these signatures too
+ * (DATE, DATE) -> TIME // result is > 24 hours
+ * (DATETIME, DATETIME) -> TIME // result is > 24 hours
+ * (TIMESTAMP, TIMESTAMP) -> TIME // result is > 24 hours
+ * (x, x) -> NULL // when args have different types
+ * (STRING, STRING) -> TIME // argument strings contain same types only
+ * (STRING, STRING) -> NULL // argument strings are different types */ private DefaultFunctionResolver timediff() { - return define(BuiltinFunctionName.TIMEDIFF.getName(), + return define( + BuiltinFunctionName.TIMEDIFF.getName(), impl(nullMissingHandling(DateTimeFunction::exprTimeDiff), TIME, TIME, TIME)); } @@ -872,90 +989,122 @@ private DefaultFunctionResolver timediff() { * TIME_TO_SEC(STRING/TIME/DATETIME/TIMESTAMP). return the time argument, converted to seconds. */ private DefaultFunctionResolver time_to_sec() { - return define(BuiltinFunctionName.TIME_TO_SEC.getName(), + return define( + BuiltinFunctionName.TIME_TO_SEC.getName(), impl(nullMissingHandling(DateTimeFunction::exprTimeToSec), LONG, STRING), impl(nullMissingHandling(DateTimeFunction::exprTimeToSec), LONG, TIME), impl(nullMissingHandling(DateTimeFunction::exprTimeToSec), LONG, TIMESTAMP), - impl(nullMissingHandling(DateTimeFunction::exprTimeToSec), LONG, DATETIME) - ); + impl(nullMissingHandling(DateTimeFunction::exprTimeToSec), LONG, DATETIME)); } /** - * Extracts the timestamp of a date and time value. - * Input strings may contain a timestamp only in format 'yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]' - * STRING/DATE/TIME/DATETIME/TIMESTAMP -> TIMESTAMP - * STRING/DATE/TIME/DATETIME/TIMESTAMP, STRING/DATE/TIME/DATETIME/TIMESTAMP -> TIMESTAMP + * Extracts the timestamp of a date and time value.
+ * Input strings may contain a timestamp only in format 'yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]'
+ * STRING/DATE/TIME/DATETIME/TIMESTAMP -> TIMESTAMP
+ * STRING/DATE/TIME/DATETIME/TIMESTAMP, STRING/DATE/TIME/DATETIME/TIMESTAMP -> TIMESTAMP
* All types are converted to TIMESTAMP actually before the function call - it is responsibility + *
* of the automatic cast mechanism defined in `ExprCoreType` and performed by `TypeCastOperator`. */ private DefaultFunctionResolver timestamp() { - return define(BuiltinFunctionName.TIMESTAMP.getName(), + return define( + BuiltinFunctionName.TIMESTAMP.getName(), impl(nullMissingHandling(v -> v), TIMESTAMP, TIMESTAMP), // We can use FunctionProperties.None, because it is not used. It is required to convert // TIME to other datetime types, but arguments there are already converted. - impl(nullMissingHandling((v1, v2) -> exprAddTime(FunctionProperties.None, v1, v2)), - TIMESTAMP, TIMESTAMP, TIMESTAMP)); + impl( + nullMissingHandling((v1, v2) -> exprAddTime(FunctionProperties.None, v1, v2)), + TIMESTAMP, + TIMESTAMP, + TIMESTAMP)); } /** - * Adds an interval of time to the provided DATE/DATETIME/TIME/TIMESTAMP/STRING argument. - * The interval of time added is determined by the given first and second arguments. - * The first argument is an interval type, and must be one of the tokens below... - * [MICROSECOND, SECOND, MINUTE, HOUR, DAY, WEEK, MONTH, QUARTER, YEAR] - * The second argument is the amount of the interval type to be added. - * The third argument is the DATE/DATETIME/TIME/TIMESTAMP/STRING to add to. + * Adds an interval of time to the provided DATE/DATETIME/TIME/TIMESTAMP/STRING argument. The + * interval of time added is determined by the given first and second arguments. The first + * argument is an interval type, and must be one of the tokens below... [MICROSECOND, SECOND, + * MINUTE, HOUR, DAY, WEEK, MONTH, QUARTER, YEAR] The second argument is the amount of the + * interval type to be added. The third argument is the DATE/DATETIME/TIME/TIMESTAMP/STRING to add + * to. + * * @return The DATETIME representing the summed DATE/DATETIME/TIME/TIMESTAMP and interval. */ private DefaultFunctionResolver timestampadd() { - return define(BuiltinFunctionName.TIMESTAMPADD.getName(), - impl(nullMissingHandling(DateTimeFunction::exprTimestampAdd), - DATETIME, STRING, INTEGER, DATETIME), - impl(nullMissingHandling(DateTimeFunction::exprTimestampAdd), - DATETIME, STRING, INTEGER, TIMESTAMP), + return define( + BuiltinFunctionName.TIMESTAMPADD.getName(), + impl( + nullMissingHandling(DateTimeFunction::exprTimestampAdd), + DATETIME, + STRING, + INTEGER, + DATETIME), + impl( + nullMissingHandling(DateTimeFunction::exprTimestampAdd), + DATETIME, + STRING, + INTEGER, + TIMESTAMP), implWithProperties( nullMissingHandlingWithProperties( - (functionProperties, part, amount, time) -> exprTimestampAddForTimeType( - functionProperties.getQueryStartClock(), - part, - amount, - time)), - DATETIME, STRING, INTEGER, TIME)); + (functionProperties, part, amount, time) -> + exprTimestampAddForTimeType( + functionProperties.getQueryStartClock(), part, amount, time)), + DATETIME, + STRING, + INTEGER, + TIME)); } /** - * Finds the difference between provided DATE/DATETIME/TIME/TIMESTAMP/STRING arguments. - * The first argument is an interval type, and must be one of the tokens below... - * [MICROSECOND, SECOND, MINUTE, HOUR, DAY, WEEK, MONTH, QUARTER, YEAR] - * The second argument the DATE/DATETIME/TIME/TIMESTAMP/STRING representing the start time. - * The third argument is the DATE/DATETIME/TIME/TIMESTAMP/STRING representing the end time. + * Finds the difference between provided DATE/DATETIME/TIME/TIMESTAMP/STRING arguments. The first + * argument is an interval type, and must be one of the tokens below... [MICROSECOND, SECOND, + * MINUTE, HOUR, DAY, WEEK, MONTH, QUARTER, YEAR] The second argument the + * DATE/DATETIME/TIME/TIMESTAMP/STRING representing the start time. The third argument is the + * DATE/DATETIME/TIME/TIMESTAMP/STRING representing the end time. + * * @return A LONG representing the difference between arguments, using the given interval type. */ private DefaultFunctionResolver timestampdiff() { - return define(BuiltinFunctionName.TIMESTAMPDIFF.getName(), - impl(nullMissingHandling(DateTimeFunction::exprTimestampDiff), - DATETIME, STRING, DATETIME, DATETIME), - impl(nullMissingHandling(DateTimeFunction::exprTimestampDiff), - DATETIME, STRING, DATETIME, TIMESTAMP), - impl(nullMissingHandling(DateTimeFunction::exprTimestampDiff), - DATETIME, STRING, TIMESTAMP, DATETIME), - impl(nullMissingHandling(DateTimeFunction::exprTimestampDiff), - DATETIME, STRING, TIMESTAMP, TIMESTAMP), + return define( + BuiltinFunctionName.TIMESTAMPDIFF.getName(), + impl( + nullMissingHandling(DateTimeFunction::exprTimestampDiff), + DATETIME, + STRING, + DATETIME, + DATETIME), + impl( + nullMissingHandling(DateTimeFunction::exprTimestampDiff), + DATETIME, + STRING, + DATETIME, + TIMESTAMP), + impl( + nullMissingHandling(DateTimeFunction::exprTimestampDiff), + DATETIME, + STRING, + TIMESTAMP, + DATETIME), + impl( + nullMissingHandling(DateTimeFunction::exprTimestampDiff), + DATETIME, + STRING, + TIMESTAMP, + TIMESTAMP), implWithProperties( nullMissingHandlingWithProperties( - (functionProperties, part, startTime, endTime) -> exprTimestampDiffForTimeType( - functionProperties, - part, - startTime, - endTime)), - DATETIME, STRING, TIME, TIME) - ); + (functionProperties, part, startTime, endTime) -> + exprTimestampDiffForTimeType(functionProperties, part, startTime, endTime)), + DATETIME, + STRING, + TIME, + TIME)); } - /** - * TO_DAYS(STRING/DATE/DATETIME/TIMESTAMP). return the day number of the given date. - */ + /** TO_DAYS(STRING/DATE/DATETIME/TIMESTAMP). return the day number of the given date. */ private DefaultFunctionResolver to_days() { - return define(BuiltinFunctionName.TO_DAYS.getName(), + return define( + BuiltinFunctionName.TO_DAYS.getName(), impl(nullMissingHandling(DateTimeFunction::exprToDays), LONG, STRING), impl(nullMissingHandling(DateTimeFunction::exprToDays), LONG, TIMESTAMP), impl(nullMissingHandling(DateTimeFunction::exprToDays), LONG, DATE), @@ -963,156 +1112,166 @@ private DefaultFunctionResolver to_days() { } /** - * TO_SECONDS(TIMESTAMP/LONG). return the seconds number of the given date. - * Arguments of type STRING/TIMESTAMP/LONG are also accepted. - * STRING/TIMESTAMP/LONG arguments are automatically cast to TIMESTAMP. + * TO_SECONDS(TIMESTAMP/LONG). return the seconds number of the given date. Arguments of type + * STRING/TIMESTAMP/LONG are also accepted. STRING/TIMESTAMP/LONG arguments are automatically cast + * to TIMESTAMP. */ private DefaultFunctionResolver to_seconds() { - return define(BuiltinFunctionName.TO_SECONDS.getName(), + return define( + BuiltinFunctionName.TO_SECONDS.getName(), impl(nullMissingHandling(DateTimeFunction::exprToSeconds), LONG, TIMESTAMP), impl(nullMissingHandling(DateTimeFunction::exprToSecondsForIntType), LONG, LONG)); } private FunctionResolver unix_timestamp() { - return define(BuiltinFunctionName.UNIX_TIMESTAMP.getName(), - implWithProperties(functionProperties - -> DateTimeFunction.unixTimeStamp(functionProperties.getQueryStartClock()), LONG), + return define( + BuiltinFunctionName.UNIX_TIMESTAMP.getName(), + implWithProperties( + functionProperties -> + DateTimeFunction.unixTimeStamp(functionProperties.getQueryStartClock()), + LONG), impl(nullMissingHandling(DateTimeFunction::unixTimeStampOf), DOUBLE, DATE), impl(nullMissingHandling(DateTimeFunction::unixTimeStampOf), DOUBLE, DATETIME), impl(nullMissingHandling(DateTimeFunction::unixTimeStampOf), DOUBLE, TIMESTAMP), - impl(nullMissingHandling(DateTimeFunction::unixTimeStampOf), DOUBLE, DOUBLE) - ); + impl(nullMissingHandling(DateTimeFunction::unixTimeStampOf), DOUBLE, DOUBLE)); } - /** - * UTC_DATE(). return the current UTC Date in format yyyy-MM-dd - */ + /** UTC_DATE(). return the current UTC Date in format yyyy-MM-dd */ private DefaultFunctionResolver utc_date() { - return define(BuiltinFunctionName.UTC_DATE.getName(), - implWithProperties(functionProperties - -> exprUtcDate(functionProperties), DATE)); + return define( + BuiltinFunctionName.UTC_DATE.getName(), + implWithProperties(functionProperties -> exprUtcDate(functionProperties), DATE)); } - /** - * UTC_TIME(). return the current UTC Time in format HH:mm:ss - */ + /** UTC_TIME(). return the current UTC Time in format HH:mm:ss */ private DefaultFunctionResolver utc_time() { - return define(BuiltinFunctionName.UTC_TIME.getName(), - implWithProperties(functionProperties - -> exprUtcTime(functionProperties), TIME)); + return define( + BuiltinFunctionName.UTC_TIME.getName(), + implWithProperties(functionProperties -> exprUtcTime(functionProperties), TIME)); } - /** - * UTC_TIMESTAMP(). return the current UTC TimeStamp in format yyyy-MM-dd HH:mm:ss - */ + /** UTC_TIMESTAMP(). return the current UTC TimeStamp in format yyyy-MM-dd HH:mm:ss */ private DefaultFunctionResolver utc_timestamp() { - return define(BuiltinFunctionName.UTC_TIMESTAMP.getName(), - implWithProperties(functionProperties - -> exprUtcTimeStamp(functionProperties), DATETIME)); + return define( + BuiltinFunctionName.UTC_TIMESTAMP.getName(), + implWithProperties(functionProperties -> exprUtcTimeStamp(functionProperties), DATETIME)); } - /** - * WEEK(DATE[,mode]). return the week number for date. - */ + /** WEEK(DATE[,mode]). return the week number for date. */ private DefaultFunctionResolver week(BuiltinFunctionName week) { - return define(week.getName(), - implWithProperties(nullMissingHandlingWithProperties((functionProperties, arg) - -> DateTimeFunction.weekOfYearToday( - DEFAULT_WEEK_OF_YEAR_MODE, - functionProperties.getQueryStartClock())), INTEGER, TIME), + return define( + week.getName(), + implWithProperties( + nullMissingHandlingWithProperties( + (functionProperties, arg) -> + DateTimeFunction.weekOfYearToday( + DEFAULT_WEEK_OF_YEAR_MODE, functionProperties.getQueryStartClock())), + INTEGER, + TIME), impl(nullMissingHandling(DateTimeFunction::exprWeekWithoutMode), INTEGER, DATE), impl(nullMissingHandling(DateTimeFunction::exprWeekWithoutMode), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprWeekWithoutMode), INTEGER, TIMESTAMP), impl(nullMissingHandling(DateTimeFunction::exprWeekWithoutMode), INTEGER, STRING), - implWithProperties(nullMissingHandlingWithProperties((functionProperties, time, modeArg) - -> DateTimeFunction.weekOfYearToday( - modeArg, - functionProperties.getQueryStartClock())), INTEGER, TIME, INTEGER), + implWithProperties( + nullMissingHandlingWithProperties( + (functionProperties, time, modeArg) -> + DateTimeFunction.weekOfYearToday( + modeArg, functionProperties.getQueryStartClock())), + INTEGER, + TIME, + INTEGER), impl(nullMissingHandling(DateTimeFunction::exprWeek), INTEGER, DATE, INTEGER), impl(nullMissingHandling(DateTimeFunction::exprWeek), INTEGER, DATETIME, INTEGER), impl(nullMissingHandling(DateTimeFunction::exprWeek), INTEGER, TIMESTAMP, INTEGER), - impl(nullMissingHandling(DateTimeFunction::exprWeek), INTEGER, STRING, INTEGER) - ); + impl(nullMissingHandling(DateTimeFunction::exprWeek), INTEGER, STRING, INTEGER)); } private DefaultFunctionResolver weekday() { - return define(BuiltinFunctionName.WEEKDAY.getName(), - implWithProperties(nullMissingHandlingWithProperties( - (functionProperties, arg) -> new ExprIntegerValue( - formatNow(functionProperties.getQueryStartClock()).getDayOfWeek().getValue() - 1)), - INTEGER, TIME), + return define( + BuiltinFunctionName.WEEKDAY.getName(), + implWithProperties( + nullMissingHandlingWithProperties( + (functionProperties, arg) -> + new ExprIntegerValue( + formatNow(functionProperties.getQueryStartClock()).getDayOfWeek().getValue() + - 1)), + INTEGER, + TIME), impl(nullMissingHandling(DateTimeFunction::exprWeekday), INTEGER, DATE), impl(nullMissingHandling(DateTimeFunction::exprWeekday), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprWeekday), INTEGER, TIMESTAMP), - impl(nullMissingHandling(DateTimeFunction::exprWeekday), INTEGER, STRING) - ); + impl(nullMissingHandling(DateTimeFunction::exprWeekday), INTEGER, STRING)); } - /** - * YEAR(STRING/DATE/DATETIME/TIMESTAMP). return the year for date (1000-9999). - */ + /** YEAR(STRING/DATE/DATETIME/TIMESTAMP). return the year for date (1000-9999). */ private DefaultFunctionResolver year() { - return define(BuiltinFunctionName.YEAR.getName(), + return define( + BuiltinFunctionName.YEAR.getName(), impl(nullMissingHandling(DateTimeFunction::exprYear), INTEGER, DATE), impl(nullMissingHandling(DateTimeFunction::exprYear), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprYear), INTEGER, TIMESTAMP), - impl(nullMissingHandling(DateTimeFunction::exprYear), INTEGER, STRING) - ); + impl(nullMissingHandling(DateTimeFunction::exprYear), INTEGER, STRING)); } - /** - * YEARWEEK(DATE[,mode]). return the week number for date. - */ + /** YEARWEEK(DATE[,mode]). return the week number for date. */ private DefaultFunctionResolver yearweek() { - return define(BuiltinFunctionName.YEARWEEK.getName(), - implWithProperties(nullMissingHandlingWithProperties((functionProperties, arg) - -> yearweekToday( - DEFAULT_WEEK_OF_YEAR_MODE, - functionProperties.getQueryStartClock())), INTEGER, TIME), + return define( + BuiltinFunctionName.YEARWEEK.getName(), + implWithProperties( + nullMissingHandlingWithProperties( + (functionProperties, arg) -> + yearweekToday( + DEFAULT_WEEK_OF_YEAR_MODE, functionProperties.getQueryStartClock())), + INTEGER, + TIME), impl(nullMissingHandling(DateTimeFunction::exprYearweekWithoutMode), INTEGER, DATE), impl(nullMissingHandling(DateTimeFunction::exprYearweekWithoutMode), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprYearweekWithoutMode), INTEGER, TIMESTAMP), impl(nullMissingHandling(DateTimeFunction::exprYearweekWithoutMode), INTEGER, STRING), - implWithProperties(nullMissingHandlingWithProperties((functionProperties, time, modeArg) - -> yearweekToday( - modeArg, - functionProperties.getQueryStartClock())), INTEGER, TIME, INTEGER), + implWithProperties( + nullMissingHandlingWithProperties( + (functionProperties, time, modeArg) -> + yearweekToday(modeArg, functionProperties.getQueryStartClock())), + INTEGER, + TIME, + INTEGER), impl(nullMissingHandling(DateTimeFunction::exprYearweek), INTEGER, DATE, INTEGER), impl(nullMissingHandling(DateTimeFunction::exprYearweek), INTEGER, DATETIME, INTEGER), impl(nullMissingHandling(DateTimeFunction::exprYearweek), INTEGER, TIMESTAMP, INTEGER), - impl(nullMissingHandling(DateTimeFunction::exprYearweek), INTEGER, STRING, INTEGER) - ); + impl(nullMissingHandling(DateTimeFunction::exprYearweek), INTEGER, STRING, INTEGER)); } /** - * Formats date according to format specifier. First argument is date, second is format. - * Detailed supported signatures: - * (STRING, STRING) -> STRING - * (DATE, STRING) -> STRING - * (DATETIME, STRING) -> STRING - * (TIME, STRING) -> STRING + * Formats date according to format specifier. First argument is date, second is format.
+ * Detailed supported signatures:
+ * (STRING, STRING) -> STRING
+ * (DATE, STRING) -> STRING
+ * (DATETIME, STRING) -> STRING
+ * (TIME, STRING) -> STRING
* (TIMESTAMP, STRING) -> STRING */ private DefaultFunctionResolver date_format() { - return define(BuiltinFunctionName.DATE_FORMAT.getName(), - impl(nullMissingHandling(DateTimeFormatterUtil::getFormattedDate), - STRING, STRING, STRING), - impl(nullMissingHandling(DateTimeFormatterUtil::getFormattedDate), - STRING, DATE, STRING), - impl(nullMissingHandling(DateTimeFormatterUtil::getFormattedDate), - STRING, DATETIME, STRING), + return define( + BuiltinFunctionName.DATE_FORMAT.getName(), + impl(nullMissingHandling(DateTimeFormatterUtil::getFormattedDate), STRING, STRING, STRING), + impl(nullMissingHandling(DateTimeFormatterUtil::getFormattedDate), STRING, DATE, STRING), + impl( + nullMissingHandling(DateTimeFormatterUtil::getFormattedDate), STRING, DATETIME, STRING), implWithProperties( nullMissingHandlingWithProperties( - (functionProperties, time, formatString) - -> DateTimeFormatterUtil.getFormattedDateOfToday( + (functionProperties, time, formatString) -> + DateTimeFormatterUtil.getFormattedDateOfToday( formatString, time, functionProperties.getQueryStartClock())), - STRING, TIME, STRING), - impl(nullMissingHandling(DateTimeFormatterUtil::getFormattedDate), - STRING, TIMESTAMP, STRING) - ); + STRING, + TIME, + STRING), + impl( + nullMissingHandling(DateTimeFormatterUtil::getFormattedDate), + STRING, + TIMESTAMP, + STRING)); } - private ExprValue dayOfMonthToday(Clock clock) { return new ExprIntegerValue(LocalDateTime.now(clock).getDayOfMonth()); } @@ -1144,8 +1303,8 @@ private ExprValue dayOfWeekToday(Clock clock) { * @param interval ExprValue of Interval type, the temporal amount to add. * @return Datetime resulted from `interval` added to `datetime`. */ - private ExprValue exprAddDateInterval(FunctionProperties functionProperties, - ExprValue datetime, ExprValue interval) { + private ExprValue exprAddDateInterval( + FunctionProperties functionProperties, ExprValue datetime, ExprValue interval) { return exprDateApplyInterval(functionProperties, datetime, interval.intervalValue(), true); } @@ -1158,36 +1317,37 @@ private ExprValue exprAddDateInterval(FunctionProperties functionProperties, * @param isAdd A flag: true to isAdd, false to subtract. * @return Datetime calculated. */ - private ExprValue exprDateApplyInterval(FunctionProperties functionProperties, - ExprValue datetime, - TemporalAmount interval, - Boolean isAdd) { + private ExprValue exprDateApplyInterval( + FunctionProperties functionProperties, + ExprValue datetime, + TemporalAmount interval, + Boolean isAdd) { var dt = extractDateTime(datetime, functionProperties); return new ExprDatetimeValue(isAdd ? dt.plus(interval) : dt.minus(interval)); } /** - * Formats date according to format specifier. First argument is time, second is format. - * Detailed supported signatures: - * (STRING, STRING) -> STRING - * (DATE, STRING) -> STRING - * (DATETIME, STRING) -> STRING - * (TIME, STRING) -> STRING + * Formats date according to format specifier. First argument is time, second is format.
+ * Detailed supported signatures:
+ * (STRING, STRING) -> STRING
+ * (DATE, STRING) -> STRING
+ * (DATETIME, STRING) -> STRING
+ * (TIME, STRING) -> STRING
* (TIMESTAMP, STRING) -> STRING */ private DefaultFunctionResolver time_format() { - return define(BuiltinFunctionName.TIME_FORMAT.getName(), - impl(nullMissingHandling(DateTimeFormatterUtil::getFormattedTime), - STRING, STRING, STRING), - impl(nullMissingHandling(DateTimeFormatterUtil::getFormattedTime), - STRING, DATE, STRING), - impl(nullMissingHandling(DateTimeFormatterUtil::getFormattedTime), - STRING, DATETIME, STRING), - impl(nullMissingHandling(DateTimeFormatterUtil::getFormattedTime), - STRING, TIME, STRING), - impl(nullMissingHandling(DateTimeFormatterUtil::getFormattedTime), - STRING, TIMESTAMP, STRING) - ); + return define( + BuiltinFunctionName.TIME_FORMAT.getName(), + impl(nullMissingHandling(DateTimeFormatterUtil::getFormattedTime), STRING, STRING, STRING), + impl(nullMissingHandling(DateTimeFormatterUtil::getFormattedTime), STRING, DATE, STRING), + impl( + nullMissingHandling(DateTimeFormatterUtil::getFormattedTime), STRING, DATETIME, STRING), + impl(nullMissingHandling(DateTimeFormatterUtil::getFormattedTime), STRING, TIME, STRING), + impl( + nullMissingHandling(DateTimeFormatterUtil::getFormattedTime), + STRING, + TIMESTAMP, + STRING)); } /** @@ -1198,8 +1358,8 @@ private DefaultFunctionResolver time_format() { * @param days ExprValue of Long type, representing the number of days to add. * @return Date/Datetime resulted from days added to `datetime`. */ - private ExprValue exprAddDateDays(FunctionProperties functionProperties, - ExprValue datetime, ExprValue days) { + private ExprValue exprAddDateDays( + FunctionProperties functionProperties, ExprValue datetime, ExprValue days) { return exprDateApplyDays(functionProperties, datetime, days.longValue(), true); } @@ -1212,11 +1372,11 @@ private ExprValue exprAddDateDays(FunctionProperties functionProperties, * @param isAdd A flag: true to add, false to subtract. * @return Datetime calculated. */ - private ExprValue exprDateApplyDays(FunctionProperties functionProperties, - ExprValue datetime, Long days, Boolean isAdd) { + private ExprValue exprDateApplyDays( + FunctionProperties functionProperties, ExprValue datetime, Long days, Boolean isAdd) { if (datetime.type() == DATE) { - return new ExprDateValue(isAdd ? datetime.dateValue().plusDays(days) - : datetime.dateValue().minusDays(days)); + return new ExprDateValue( + isAdd ? datetime.dateValue().plusDays(days) : datetime.dateValue().minusDays(days)); } var dt = extractDateTime(datetime, functionProperties); return new ExprDatetimeValue(isAdd ? dt.plusDays(days) : dt.minusDays(days)); @@ -1231,12 +1391,16 @@ private ExprValue exprDateApplyDays(FunctionProperties functionProperties, * @param isAdd A flag: true to add, false to subtract. * @return A value calculated. */ - private ExprValue exprApplyTime(FunctionProperties functionProperties, - ExprValue temporal, ExprValue temporalDelta, Boolean isAdd) { + private ExprValue exprApplyTime( + FunctionProperties functionProperties, + ExprValue temporal, + ExprValue temporalDelta, + Boolean isAdd) { var interval = Duration.between(LocalTime.MIN, temporalDelta.timeValue()); - var result = isAdd - ? extractDateTime(temporal, functionProperties).plus(interval) - : extractDateTime(temporal, functionProperties).minus(interval); + var result = + isAdd + ? extractDateTime(temporal, functionProperties).plus(interval) + : extractDateTime(temporal, functionProperties).minus(interval); return temporal.type() == TIME ? new ExprTimeValue(result.toLocalTime()) : new ExprDatetimeValue(result); @@ -1250,18 +1414,18 @@ private ExprValue exprApplyTime(FunctionProperties functionProperties, * @param temporalDelta A Date/Time/Datetime/Timestamp object to add time from. * @return A value calculated. */ - private ExprValue exprAddTime(FunctionProperties functionProperties, - ExprValue temporal, ExprValue temporalDelta) { + private ExprValue exprAddTime( + FunctionProperties functionProperties, ExprValue temporal, ExprValue temporalDelta) { return exprApplyTime(functionProperties, temporal, temporalDelta, true); } /** - * CONVERT_TZ function implementation for ExprValue. - * Returns null for time zones outside of +13:00 and -12:00. + * CONVERT_TZ function implementation for ExprValue. Returns null for time zones outside of +13:00 + * and -12:00. * * @param startingDateTime ExprValue of DateTime that is being converted from - * @param fromTz ExprValue of time zone, representing the time to convert from. - * @param toTz ExprValue of time zone, representing the time to convert to. + * @param fromTz ExprValue of time zone, representing the time to convert from. + * @param toTz ExprValue of time zone, representing the time to convert to. * @return DateTime that has been converted to the to_tz timezone. */ private ExprValue exprConvertTZ(ExprValue startingDateTime, ExprValue fromTz, ExprValue toTz) { @@ -1278,8 +1442,7 @@ private ExprValue exprConvertTZ(ExprValue startingDateTime, ExprValue fromTz, Ex || !DateTimeUtils.isValidMySqlTimeZoneId(convertedToTz)) { return ExprNullValue.of(); } - ZonedDateTime zonedDateTime = - startingDateTime.datetimeValue().atZone(convertedFromTz); + ZonedDateTime zonedDateTime = startingDateTime.datetimeValue().atZone(convertedFromTz); return new ExprDatetimeValue( zonedDateTime.withZoneSameInstant(convertedToTz).toLocalDateTime()); @@ -1305,19 +1468,19 @@ private ExprValue exprDate(ExprValue exprValue) { } /** - * Calculate the value in days from one date to the other. - * Only the date parts of the values are used in the calculation. + * Calculate the value in days from one date to the other. Only the date parts of the values are + * used in the calculation. * * @param first The first value. * @param second The second value. * @return The diff. */ - private ExprValue exprDateDiff(FunctionProperties functionProperties, - ExprValue first, ExprValue second) { + private ExprValue exprDateDiff( + FunctionProperties functionProperties, ExprValue first, ExprValue second) { // java inverses the value, so we have to swap 1 and 2 - return new ExprLongValue(DAYS.between( - extractDate(second, functionProperties), - extractDate(first, functionProperties))); + return new ExprLongValue( + DAYS.between( + extractDate(second, functionProperties), extractDate(first, functionProperties))); } /** @@ -1357,10 +1520,7 @@ private ExprValue exprDateTime(ExprValue dateTime, ExprValue timeZone) { ldt = new ExprDatetimeValue(dateTime.stringValue()); toTz = defaultTimeZone; } - convertTZResult = exprConvertTZ( - ldt, - new ExprStringValue(toTz), - timeZone); + convertTZResult = exprConvertTZ(ldt, new ExprStringValue(toTz), timeZone); return convertTZResult; } @@ -1426,8 +1586,8 @@ private ExprValue exprDayOfYear(ExprValue date) { public ExprLongValue formatExtractFunction(ExprValue part, ExprValue datetime) { String partName = part.stringValue().toUpperCase(); LocalDateTime arg = datetime.datetimeValue(); - String text = arg.format(DateTimeFormatter.ofPattern( - extract_formats.get(partName), Locale.ENGLISH)); + String text = + arg.format(DateTimeFormatter.ofPattern(extract_formats.get(partName), Locale.ENGLISH)); return new ExprLongValue(Long.parseLong(text)); } @@ -1450,12 +1610,10 @@ private ExprValue exprExtract(ExprValue part, ExprValue datetime) { * @param time The time to be formatted. * @return A LONG */ - private ExprValue exprExtractForTime(FunctionProperties functionProperties, - ExprValue part, - ExprValue time) { + private ExprValue exprExtractForTime( + FunctionProperties functionProperties, ExprValue part, ExprValue time) { return formatExtractFunction( - part, - new ExprDatetimeValue(extractDateTime(time, functionProperties))); + part, new ExprDatetimeValue(extractDateTime(time, functionProperties))); } /** @@ -1484,9 +1642,8 @@ private ExprValue exprFromUnixTime(ExprValue time) { private LocalDateTime exprFromUnixTimeImpl(ExprValue time) { return LocalDateTime.ofInstant( - Instant.ofEpochSecond((long)Math.floor(time.doubleValue())), - UTC_ZONE_ID) - .withNano((int)((time.doubleValue() % 1) * 1E9)); + Instant.ofEpochSecond((long) Math.floor(time.doubleValue())), UTC_ZONE_ID) + .withNano((int) ((time.doubleValue() % 1) * 1E9)); } private ExprValue exprFromUnixTimeFormat(ExprValue time, ExprValue format) { @@ -1506,9 +1663,8 @@ private ExprValue exprFromUnixTimeFormat(ExprValue time, ExprValue format) { */ private ExprValue exprGetFormat(ExprValue type, ExprValue format) { if (formats.contains(type.stringValue().toLowerCase(), format.stringValue().toLowerCase())) { - return new ExprStringValue(formats.get( - type.stringValue().toLowerCase(), - format.stringValue().toLowerCase())); + return new ExprStringValue( + formats.get(type.stringValue().toLowerCase(), format.stringValue().toLowerCase())); } return ExprNullValue.of(); @@ -1521,8 +1677,7 @@ private ExprValue exprGetFormat(ExprValue type, ExprValue format) { * @return ExprValue. */ private ExprValue exprHour(ExprValue time) { - return new ExprIntegerValue( - HOURS.between(LocalTime.MIN, time.timeValue())); + return new ExprIntegerValue(HOURS.between(LocalTime.MIN, time.timeValue())); } /** @@ -1533,9 +1688,7 @@ private ExprValue exprHour(ExprValue time) { */ private LocalDate getLastDay(LocalDate today) { return LocalDate.of( - today.getYear(), - today.getMonth(), - today.getMonth().length(today.isLeapYear())); + today.getYear(), today.getMonth(), today.getMonth().length(today.isLeapYear())); } /** @@ -1559,12 +1712,15 @@ private ExprValue exprLastDayToday(Clock clock) { } /** - * Following MySQL, function receives arguments of type double and rounds them before use. - * Furthermore: - * - zero year interpreted as 2000 - * - negative year is not accepted - * - @dayOfYear should be greater than 1 - * - if @dayOfYear is greater than 365/366, calculation goes to the next year(s) + * Following MySQL, function receives arguments of type double and rounds them before use.
+ * Furthermore:
+ * + *
    + *
  • zero year interpreted as 2000 + *
  • negative year is not accepted + *
  • @dayOfYear should be greater than 1 + *
  • if @dayOfYear is greater than 365/366, calculation goes to the next year(s) + *
* * @param yearExpr year * @param dayOfYearExp day of the @year, starting from 1 @@ -1580,12 +1736,13 @@ private ExprValue exprMakeDate(ExprValue yearExpr, ExprValue dayOfYearExp) { if (0 == year) { year = 2000; } - return new ExprDateValue(LocalDate.ofYearDay((int)year, 1).plusDays(dayOfYear - 1)); + return new ExprDateValue(LocalDate.ofYearDay((int) year, 1).plusDays(dayOfYear - 1)); } /** * Following MySQL, function receives arguments of type double. @hour and @minute are rounded, * while @second used as is, including fraction part. + * * @param hourExpr hour * @param minuteExpr minute * @param secondExpr second @@ -1598,8 +1755,9 @@ private ExprValue exprMakeTime(ExprValue hourExpr, ExprValue minuteExpr, ExprVal if (0 > hour || 0 > minute || 0 > second) { return ExprNullValue.of(); } - return new ExprTimeValue(LocalTime.parse(String.format("%02d:%02d:%012.9f", - hour, minute, second), DateTimeFormatter.ISO_TIME)); + return new ExprTimeValue( + LocalTime.parse( + String.format("%02d:%02d:%012.9f", hour, minute, second), DateTimeFormatter.ISO_TIME)); } /** @@ -1620,8 +1778,7 @@ private ExprValue exprMicrosecond(ExprValue time) { * @return ExprValue. */ private ExprValue exprMinute(ExprValue time) { - return new ExprIntegerValue( - (MINUTES.between(LocalTime.MIN, time.timeValue()) % 60)); + return new ExprIntegerValue((MINUTES.between(LocalTime.MIN, time.timeValue()) % 60)); } /** @@ -1631,8 +1788,7 @@ private ExprValue exprMinute(ExprValue time) { * @return ExprValue. */ private ExprValue exprMinuteOfDay(ExprValue time) { - return new ExprIntegerValue( - MINUTES.between(LocalTime.MIN, time.timeValue())); + return new ExprIntegerValue(MINUTES.between(LocalTime.MIN, time.timeValue())); } /** @@ -1675,8 +1831,7 @@ private LocalDate parseDatePeriod(Integer period) { } /** - * Adds N months to period P (in the format YYMM or YYYYMM). - * Returns a value in the format YYYYMM. + * Adds N months to period P (in the format YYMM or YYYYMM). Returns a value in the format YYYYMM. * * @param period Period in the format YYMM or YYYYMM. * @param months Amount of months to add. @@ -1684,19 +1839,20 @@ private LocalDate parseDatePeriod(Integer period) { */ private ExprValue exprPeriodAdd(ExprValue period, ExprValue months) { // We should add a day to make string parsable and remove it afterwards - var input = period.integerValue() * 100 + 1; // adds 01 to end of the string + var input = period.integerValue() * 100 + 1; // adds 01 to end of the string var parsedDate = parseDatePeriod(input); if (parsedDate == null) { return ExprNullValue.of(); } var res = DATE_FORMATTER_LONG_YEAR.format(parsedDate.plusMonths(months.integerValue())); - return new ExprIntegerValue(Integer.parseInt( - res.substring(0, res.length() - 2))); // Remove the day part, .eg. 20070101 -> 200701 + return new ExprIntegerValue( + Integer.parseInt( + res.substring(0, res.length() - 2))); // Remove the day part, .eg. 20070101 -> 200701 } /** - * Returns the number of months between periods P1 and P2. - * P1 and P2 should be in the format YYMM or YYYYMM. + * Returns the number of months between periods P1 and P2. P1 and P2 should be in the format YYMM + * or YYYYMM. * * @param period1 Period in the format YYMM or YYYYMM. * @param period2 Period in the format YYMM or YYYYMM. @@ -1724,6 +1880,7 @@ private ExprValue exprQuarter(ExprValue date) { /** * Returns TIME value of sec_to_time function for an INTEGER or LONG arguments. + * * @param totalSeconds The total number of seconds * @return A TIME value */ @@ -1732,17 +1889,17 @@ private ExprValue exprSecToTime(ExprValue totalSeconds) { } /** - * Helper function which obtains the decimal portion of the seconds value passed in. - * Uses BigDecimal to prevent issues with math on floating point numbers. - * Return is formatted to be used with Duration.ofSeconds(); + * Helper function which obtains the decimal portion of the seconds value passed in. Uses + * BigDecimal to prevent issues with math on floating point numbers. Return is formatted to be + * used with Duration.ofSeconds(); * * @param seconds and ExprDoubleValue or ExprFloatValue for the seconds * @return A LONG representing the nanoseconds portion */ private long formatNanos(ExprValue seconds) { - //Convert ExprValue to BigDecimal + // Convert ExprValue to BigDecimal BigDecimal formattedNanos = BigDecimal.valueOf(seconds.doubleValue()); - //Extract only the nanosecond part + // Extract only the nanosecond part formattedNanos = formattedNanos.subtract(BigDecimal.valueOf(formattedNanos.intValue())); return formattedNanos.scaleByPowerOfTen(9).longValue(); @@ -1750,6 +1907,7 @@ private long formatNanos(ExprValue seconds) { /** * Returns TIME value of sec_to_time function for FLOAT or DOUBLE arguments. + * * @param totalSeconds The total number of seconds * @return A TIME value */ @@ -1767,8 +1925,7 @@ private ExprValue exprSecToTimeWithNanos(ExprValue totalSeconds) { * @return ExprValue. */ private ExprValue exprSecond(ExprValue time) { - return new ExprIntegerValue( - (SECONDS.between(LocalTime.MIN, time.timeValue()) % 60)); + return new ExprIntegerValue((SECONDS.between(LocalTime.MIN, time.timeValue()) % 60)); } /** @@ -1779,8 +1936,8 @@ private ExprValue exprSecond(ExprValue time) { * @param days ExprValue of Long type, representing the number of days to subtract. * @return Date/Datetime resulted from days subtracted to date. */ - private ExprValue exprSubDateDays(FunctionProperties functionProperties, - ExprValue date, ExprValue days) { + private ExprValue exprSubDateDays( + FunctionProperties functionProperties, ExprValue date, ExprValue days) { return exprDateApplyDays(functionProperties, date, days.longValue(), false); } @@ -1792,8 +1949,8 @@ private ExprValue exprSubDateDays(FunctionProperties functionProperties, * @param expr ExprValue of Interval type, the temporal amount to subtract. * @return Datetime resulted from expr subtracted to `datetime`. */ - private ExprValue exprSubDateInterval(FunctionProperties functionProperties, - ExprValue datetime, ExprValue expr) { + private ExprValue exprSubDateInterval( + FunctionProperties functionProperties, ExprValue datetime, ExprValue expr) { return exprDateApplyInterval(functionProperties, datetime, expr.intervalValue(), false); } @@ -1804,14 +1961,13 @@ private ExprValue exprSubDateInterval(FunctionProperties functionProperties, * @param temporalDelta A Date/Time/Datetime/Timestamp to subtract time from. * @return A value calculated. */ - private ExprValue exprSubTime(FunctionProperties functionProperties, - ExprValue temporal, ExprValue temporalDelta) { + private ExprValue exprSubTime( + FunctionProperties functionProperties, ExprValue temporal, ExprValue temporalDelta) { return exprApplyTime(functionProperties, temporal, temporalDelta, false); } - private ExprValue exprStrToDate(FunctionProperties fp, - ExprValue dateTimeExpr, - ExprValue formatStringExp) { + private ExprValue exprStrToDate( + FunctionProperties fp, ExprValue dateTimeExpr, ExprValue formatStringExp) { return DateTimeFormatterUtil.parseStringWithDateOrTime(fp, dateTimeExpr, formatStringExp); } @@ -1838,8 +1994,8 @@ private ExprValue exprTime(ExprValue exprValue) { */ private ExprValue exprTimeDiff(ExprValue first, ExprValue second) { // java inverses the value, so we have to swap 1 and 2 - return new ExprTimeValue(LocalTime.MIN.plus( - Duration.between(second.timeValue(), first.timeValue()))); + return new ExprTimeValue( + LocalTime.MIN.plus(Duration.between(second.timeValue(), first.timeValue()))); } /** @@ -1852,9 +2008,8 @@ private ExprValue exprTimeToSec(ExprValue time) { return new ExprLongValue(time.timeValue().toSecondOfDay()); } - private ExprValue exprTimestampAdd(ExprValue partExpr, - ExprValue amountExpr, - ExprValue datetimeExpr) { + private ExprValue exprTimestampAdd( + ExprValue partExpr, ExprValue amountExpr, ExprValue datetimeExpr) { String part = partExpr.stringValue(); int amount = amountExpr.integerValue(); LocalDateTime datetime = datetimeExpr.datetimeValue(); @@ -1895,13 +2050,9 @@ private ExprValue exprTimestampAdd(ExprValue partExpr, return new ExprDatetimeValue(datetime.plus(amount, temporalUnit)); } - private ExprValue exprTimestampAddForTimeType(Clock clock, - ExprValue partExpr, - ExprValue amountExpr, - ExprValue timeExpr) { - LocalDateTime datetime = LocalDateTime.of( - formatNow(clock).toLocalDate(), - timeExpr.timeValue()); + private ExprValue exprTimestampAddForTimeType( + Clock clock, ExprValue partExpr, ExprValue amountExpr, ExprValue timeExpr) { + LocalDateTime datetime = LocalDateTime.of(formatNow(clock).toLocalDate(), timeExpr.timeValue()); return exprTimestampAdd(partExpr, amountExpr, new ExprDatetimeValue(datetime)); } @@ -1942,19 +2093,13 @@ private ExprValue getTimeDifference(String part, LocalDateTime startTime, LocalD } private ExprValue exprTimestampDiff( - ExprValue partExpr, - ExprValue startTimeExpr, - ExprValue endTimeExpr) { + ExprValue partExpr, ExprValue startTimeExpr, ExprValue endTimeExpr) { return getTimeDifference( - partExpr.stringValue(), - startTimeExpr.datetimeValue(), - endTimeExpr.datetimeValue()); + partExpr.stringValue(), startTimeExpr.datetimeValue(), endTimeExpr.datetimeValue()); } - private ExprValue exprTimestampDiffForTimeType(FunctionProperties fp, - ExprValue partExpr, - ExprValue startTimeExpr, - ExprValue endTimeExpr) { + private ExprValue exprTimestampDiffForTimeType( + FunctionProperties fp, ExprValue partExpr, ExprValue startTimeExpr, ExprValue endTimeExpr) { return getTimeDifference( partExpr.stringValue(), extractDateTime(startTimeExpr, fp), @@ -1988,8 +2133,8 @@ private ExprValue exprUtcTime(FunctionProperties functionProperties) { * @return ExprValue. */ private ExprValue exprUtcTimeStamp(FunctionProperties functionProperties) { - var zdt = ZonedDateTime.now(functionProperties.getQueryStartClock()) - .withZoneSameInstant(UTC_ZONE_ID); + var zdt = + ZonedDateTime.now(functionProperties.getQueryStartClock()).withZoneSameInstant(UTC_ZONE_ID); return new ExprDatetimeValue(zdt.toLocalDateTime()); } @@ -2027,25 +2172,25 @@ private DateTimeFormatter getFormatter(int dateAsInt) { throw new DateTimeException("Integer argument was out of range"); } - //Check below from YYYYMMDD - MMDD which format should be used + // Check below from YYYYMMDD - MMDD which format should be used switch (length) { - //Check if dateAsInt is at least 8 digits long + // Check if dateAsInt is at least 8 digits long case FULL_DATE_LENGTH: return DATE_FORMATTER_LONG_YEAR; - //Check if dateAsInt is at least 6 digits long + // Check if dateAsInt is at least 6 digits long case SHORT_DATE_LENGTH: return DATE_FORMATTER_SHORT_YEAR; - //Check if dateAsInt is at least 5 digits long + // Check if dateAsInt is at least 5 digits long case SINGLE_DIGIT_YEAR_DATE_LENGTH: return DATE_FORMATTER_SINGLE_DIGIT_YEAR; - //Check if dateAsInt is at least 4 digits long + // Check if dateAsInt is at least 4 digits long case NO_YEAR_DATE_LENGTH: return DATE_FORMATTER_NO_YEAR; - //Check if dateAsInt is at least 3 digits long + // Check if dateAsInt is at least 3 digits long case SINGLE_DIGIT_MONTH_DATE_LENGTH: return DATE_FORMATTER_SINGLE_DIGIT_MONTH; @@ -2064,15 +2209,16 @@ private DateTimeFormatter getFormatter(int dateAsInt) { */ private ExprValue exprToSecondsForIntType(ExprValue dateExpr) { try { - //Attempt to parse integer argument as date - LocalDate date = LocalDate.parse(String.valueOf(dateExpr.integerValue()), - getFormatter(dateExpr.integerValue())); + // Attempt to parse integer argument as date + LocalDate date = + LocalDate.parse( + String.valueOf(dateExpr.integerValue()), getFormatter(dateExpr.integerValue())); - return new ExprLongValue(date.toEpochSecond(LocalTime.MIN, ZoneOffset.UTC) - + DAYS_0000_TO_1970 * SECONDS_PER_DAY); + return new ExprLongValue( + date.toEpochSecond(LocalTime.MIN, ZoneOffset.UTC) + DAYS_0000_TO_1970 * SECONDS_PER_DAY); } catch (DateTimeException ignored) { - //Return null if parsing error + // Return null if parsing error return ExprNullValue.of(); } } @@ -2121,12 +2267,14 @@ private ExprValue unixTimeStampOf(ExprValue value) { private Double unixTimeStampOfImpl(ExprValue value) { // Also, according to MySQL documentation: // The date argument may be a DATE, DATETIME, or TIMESTAMP ... - switch ((ExprCoreType)value.type()) { - case DATE: return value.dateValue().toEpochSecond(LocalTime.MIN, ZoneOffset.UTC) + 0d; - case DATETIME: return value.datetimeValue().toEpochSecond(ZoneOffset.UTC) - + value.datetimeValue().getNano() / 1E9; - case TIMESTAMP: return value.timestampValue().getEpochSecond() - + value.timestampValue().getNano() / 1E9; + switch ((ExprCoreType) value.type()) { + case DATE: + return value.dateValue().toEpochSecond(LocalTime.MIN, ZoneOffset.UTC) + 0d; + case DATETIME: + return value.datetimeValue().toEpochSecond(ZoneOffset.UTC) + + value.datetimeValue().getNano() / 1E9; + case TIMESTAMP: + return value.timestampValue().getEpochSecond() + value.timestampValue().getNano() / 1E9; default: // ... or a number in YYMMDD, YYMMDDhhmmss, YYYYMMDD, or YYYYMMDDhhmmss format. // If the argument includes a time part, it may optionally include a fractional @@ -2172,8 +2320,8 @@ private Double unixTimeStampOfImpl(ExprValue value) { } /** - * Week for date implementation for ExprValue. - * When mode is not specified default value mode 0 is used for default_week_format. + * Week for date implementation for ExprValue. When mode is not specified default value mode 0 is + * used for default_week_format. * * @param date ExprValue of Date/Datetime/Timestamp/String type. * @return ExprValue. @@ -2203,12 +2351,11 @@ private ExprIntegerValue extractYearweek(LocalDate date, int mode) { // Needed to align with MySQL. Due to how modes for this function work. // See description of modes here ... // https://dev.mysql.com/doc/refman/8.0/en/date-and-time-functions.html#function_week - int modeJava = CalendarLookup.getWeekNumber(mode, date) != 0 ? mode : - mode <= 4 ? 2 : - 7; + int modeJava = CalendarLookup.getWeekNumber(mode, date) != 0 ? mode : mode <= 4 ? 2 : 7; - int formatted = CalendarLookup.getYearNumber(modeJava, date) * 100 - + CalendarLookup.getWeekNumber(modeJava, date); + int formatted = + CalendarLookup.getYearNumber(modeJava, date) * 100 + + CalendarLookup.getWeekNumber(modeJava, date); return new ExprIntegerValue(formatted); } @@ -2224,8 +2371,8 @@ private ExprValue exprYearweek(ExprValue date, ExprValue mode) { } /** - * Yearweek for date implementation for ExprValue. - * When mode is not specified default value mode 0 is used. + * Yearweek for date implementation for ExprValue. When mode is not specified default value mode 0 + * is used. * * @param date ExprValue of Date/Datetime/Time/Timestamp/String type. * @return ExprValue. @@ -2248,19 +2395,22 @@ private LocalDateTime formatNow(Clock clock) { /** * Prepare LocalDateTime value. Truncate fractional second part according to the argument. - * @param fsp argument is given to specify a fractional seconds precision from 0 to 6, - * the return value includes a fractional seconds part of that many digits. + * + * @param fsp argument is given to specify a fractional seconds precision from 0 to 6, the return + * value includes a fractional seconds part of that many digits. * @return LocalDateTime object. */ - private LocalDateTime formatNow(Clock clock, Integer fsp) { + private LocalDateTime formatNow(Clock clock, Integer fsp) { var res = LocalDateTime.now(clock); var defaultPrecision = 9; // There are 10^9 nanoseconds in one second if (fsp < 0 || fsp > 6) { // Check that the argument is in the allowed range [0, 6] throw new IllegalArgumentException( String.format("Invalid `fsp` value: %d, allowed 0 to 6", fsp)); } - var nano = new BigDecimal(res.getNano()) - .setScale(fsp - defaultPrecision, RoundingMode.DOWN).intValue(); + var nano = + new BigDecimal(res.getNano()) + .setScale(fsp - defaultPrecision, RoundingMode.DOWN) + .intValue(); return res.withNano(nano); } } diff --git a/core/src/main/java/org/opensearch/sql/expression/datetime/IntervalClause.java b/core/src/main/java/org/opensearch/sql/expression/datetime/IntervalClause.java index 3df8489b20..5170d49fc7 100644 --- a/core/src/main/java/org/opensearch/sql/expression/datetime/IntervalClause.java +++ b/core/src/main/java/org/opensearch/sql/expression/datetime/IntervalClause.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.datetime; import static org.opensearch.sql.data.model.ExprValueUtils.getIntegerValue; @@ -45,7 +44,8 @@ public void register(BuiltinFunctionRepository repository) { } private DefaultFunctionResolver interval() { - return define(BuiltinFunctionName.INTERVAL.getName(), + return define( + BuiltinFunctionName.INTERVAL.getName(), impl(nullMissingHandling(IntervalClause::interval), INTERVAL, INTEGER, STRING), impl(nullMissingHandling(IntervalClause::interval), INTERVAL, LONG, STRING)); } diff --git a/core/src/main/java/org/opensearch/sql/expression/env/Environment.java b/core/src/main/java/org/opensearch/sql/expression/env/Environment.java index d96d0c0a50..b1377f22ae 100644 --- a/core/src/main/java/org/opensearch/sql/expression/env/Environment.java +++ b/core/src/main/java/org/opensearch/sql/expression/env/Environment.java @@ -3,33 +3,30 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.env; /** * The definition of the environment. - * @param the type of expression + * + * @param the type of expression * @param the type of expression value */ public interface Environment { - /** - * resolve the value of expression from the environment. - */ + /** resolve the value of expression from the environment. */ V resolve(E var); /** * Extend the environment. * - * @param env environment - * @param expr expression. - * @param value expression value. - * @param the type of expression + * @param env environment + * @param expr expression. + * @param value expression value. + * @param the type of expression * @param the type of expression value * @return extended environment. */ - static Environment extendEnv( - Environment env, E expr, V value) { + static Environment extendEnv(Environment env, E expr, V value) { return var -> { if (var.equals(expr)) { return value; diff --git a/core/src/main/java/org/opensearch/sql/expression/function/BuiltinFunctionName.java b/core/src/main/java/org/opensearch/sql/expression/function/BuiltinFunctionName.java index 728712f537..f50fa927b8 100644 --- a/core/src/main/java/org/opensearch/sql/expression/function/BuiltinFunctionName.java +++ b/core/src/main/java/org/opensearch/sql/expression/function/BuiltinFunctionName.java @@ -12,15 +12,11 @@ import lombok.Getter; import lombok.RequiredArgsConstructor; -/** - * Builtin Function Name. - */ +/** Builtin Function Name. */ @Getter @RequiredArgsConstructor public enum BuiltinFunctionName { - /** - * Mathematical Functions. - */ + /** Mathematical Functions. */ ABS(FunctionName.of("abs")), CEIL(FunctionName.of("ceil")), CEILING(FunctionName.of("ceiling")), @@ -59,9 +55,7 @@ public enum BuiltinFunctionName { SIN(FunctionName.of("sin")), TAN(FunctionName.of("tan")), - /** - * Date and Time Functions. - */ + /** Date and Time Functions. */ ADDDATE(FunctionName.of("adddate")), ADDTIME(FunctionName.of("addtime")), CONVERT_TZ(FunctionName.of("convert_tz")), @@ -135,14 +129,10 @@ public enum BuiltinFunctionName { LOCALTIMESTAMP(FunctionName.of("localtimestamp")), SYSDATE(FunctionName.of("sysdate")), - /** - * Text Functions. - */ + /** Text Functions. */ TOSTRING(FunctionName.of("tostring")), - /** - * Arithmetic Operators. - */ + /** Arithmetic Operators. */ ADD(FunctionName.of("+")), ADDFUNCTION(FunctionName.of("add")), DIVIDE(FunctionName.of("/")), @@ -155,9 +145,7 @@ public enum BuiltinFunctionName { SUBTRACT(FunctionName.of("-")), SUBTRACTFUNCTION(FunctionName.of("subtract")), - /** - * Boolean Operators. - */ + /** Boolean Operators. */ AND(FunctionName.of("and")), OR(FunctionName.of("or")), XOR(FunctionName.of("xor")), @@ -171,9 +159,7 @@ public enum BuiltinFunctionName { LIKE(FunctionName.of("like")), NOT_LIKE(FunctionName.of("not like")), - /** - * Aggregation Function. - */ + /** Aggregation Function. */ AVG(FunctionName.of("avg")), SUM(FunctionName.of("sum")), COUNT(FunctionName.of("count")), @@ -192,9 +178,7 @@ public enum BuiltinFunctionName { // Not always an aggregation query NESTED(FunctionName.of("nested")), - /** - * Text Functions. - */ + /** Text Functions. */ ASCII(FunctionName.of("ascii")), CONCAT(FunctionName.of("concat")), CONCAT_WS(FunctionName.of("concat_ws")), @@ -215,9 +199,7 @@ public enum BuiltinFunctionName { TRIM(FunctionName.of("trim")), UPPER(FunctionName.of("upper")), - /** - * NULL Test. - */ + /** NULL Test. */ IS_NULL(FunctionName.of("is null")), IS_NOT_NULL(FunctionName.of("is not null")), IFNULL(FunctionName.of("ifnull")), @@ -231,9 +213,7 @@ public enum BuiltinFunctionName { INTERVAL(FunctionName.of("interval")), - /** - * Data Type Convert Function. - */ + /** Data Type Convert Function. */ CAST_TO_STRING(FunctionName.of("cast_to_string")), CAST_TO_BYTE(FunctionName.of("cast_to_byte")), CAST_TO_SHORT(FunctionName.of("cast_to_short")), @@ -248,9 +228,7 @@ public enum BuiltinFunctionName { CAST_TO_DATETIME(FunctionName.of("cast_to_datetime")), TYPEOF(FunctionName.of("typeof")), - /** - * Relevance Function. - */ + /** Relevance Function. */ MATCH(FunctionName.of("match")), SIMPLE_QUERY_STRING(FunctionName.of("simple_query_string")), MATCH_PHRASE(FunctionName.of("match_phrase")), @@ -264,9 +242,7 @@ public enum BuiltinFunctionName { SCOREQUERY(FunctionName.of("scorequery")), SCORE_QUERY(FunctionName.of("score_query")), - /** - * Legacy Relevance Function. - */ + /** Legacy Relevance Function. */ QUERY(FunctionName.of("query")), MATCH_QUERY(FunctionName.of("match_query")), MATCHQUERY(FunctionName.of("matchquery")), diff --git a/core/src/main/java/org/opensearch/sql/expression/function/BuiltinFunctionRepository.java b/core/src/main/java/org/opensearch/sql/expression/function/BuiltinFunctionRepository.java index 0eb11a9280..2e16d5f01f 100644 --- a/core/src/main/java/org/opensearch/sql/expression/function/BuiltinFunctionRepository.java +++ b/core/src/main/java/org/opensearch/sql/expression/function/BuiltinFunctionRepository.java @@ -38,10 +38,8 @@ import org.opensearch.sql.storage.StorageEngine; /** - * Builtin Function Repository. - * Repository registers datasource specific functions under datasource namespace and - * universal functions under default namespace. - * + * Builtin Function Repository. Repository registers datasource specific functions under datasource + * namespace and universal functions under default namespace. */ public class BuiltinFunctionRepository { @@ -96,23 +94,20 @@ public void register(FunctionResolver resolver) { functionResolverMap.put(resolver.getFunctionName(), resolver); } - /** - * Compile FunctionExpression using core function resolver. - * - */ - public FunctionImplementation compile(FunctionProperties functionProperties, - FunctionName functionName, List expressions) { + /** Compile FunctionExpression using core function resolver. */ + public FunctionImplementation compile( + FunctionProperties functionProperties, + FunctionName functionName, + List expressions) { return compile(functionProperties, Collections.emptyList(), functionName, expressions); } - - /** - * Compile FunctionExpression within {@link StorageEngine} provided {@link FunctionResolver}. - */ - public FunctionImplementation compile(FunctionProperties functionProperties, - Collection dataSourceFunctionResolver, - FunctionName functionName, - List expressions) { + /** Compile FunctionExpression within {@link StorageEngine} provided {@link FunctionResolver}. */ + public FunctionImplementation compile( + FunctionProperties functionProperties, + Collection dataSourceFunctionResolver, + FunctionName functionName, + List expressions) { FunctionBuilder resolvedFunctionBuilder = resolve( dataSourceFunctionResolver, @@ -134,8 +129,9 @@ public FunctionImplementation compile(FunctionProperties functionProperties, public FunctionBuilder resolve( Collection dataSourceFunctionResolver, FunctionSignature functionSignature) { - Map dataSourceFunctionMap = dataSourceFunctionResolver.stream() - .collect(Collectors.toMap(FunctionResolver::getFunctionName, t -> t)); + Map dataSourceFunctionMap = + dataSourceFunctionResolver.stream() + .collect(Collectors.toMap(FunctionResolver::getFunctionName, t -> t)); // first, resolve in datasource provide function resolver. // second, resolve in builtin function resolver. @@ -171,14 +167,13 @@ private Optional resolve( } /** - * Wrap resolved function builder's arguments by cast function to cast input expression value - * to value of target type at runtime. For example, suppose unresolved signature is - * equal(BOOL,STRING) and its resolved function builder is F with signature equal(BOOL,BOOL). - * In this case, wrap F and return equal(BOOL, cast_to_bool(STRING)). + * Wrap resolved function builder's arguments by cast function to cast input expression value to + * value of target type at runtime. For example, suppose unresolved signature is + * equal(BOOL,STRING) and its resolved function builder is F with signature equal(BOOL,BOOL). In + * this case, wrap F and return equal(BOOL, cast_to_bool(STRING)). */ - private FunctionBuilder castArguments(List sourceTypes, - List targetTypes, - FunctionBuilder funcBuilder) { + private FunctionBuilder castArguments( + List sourceTypes, List targetTypes, FunctionBuilder funcBuilder) { return (fp, arguments) -> { List argsCasted = new ArrayList<>(); for (int i = 0; i < arguments.size(); i++) { @@ -208,10 +203,10 @@ private boolean isCastRequired(ExprType sourceType, ExprType targetType) { private Function cast(Expression arg, ExprType targetType) { FunctionName castFunctionName = getCastFunctionName(targetType); if (castFunctionName == null) { - throw new ExpressionEvaluationException(StringUtils.format( - "Type conversion to type %s is not supported", targetType)); + throw new ExpressionEvaluationException( + StringUtils.format("Type conversion to type %s is not supported", targetType)); } - return functionProperties -> (Expression) compile(functionProperties, - castFunctionName, List.of(arg)); + return functionProperties -> + (Expression) compile(functionProperties, castFunctionName, List.of(arg)); } } diff --git a/core/src/main/java/org/opensearch/sql/expression/function/DefaultFunctionResolver.java b/core/src/main/java/org/opensearch/sql/expression/function/DefaultFunctionResolver.java index a28fa7e0ad..5d0f31594b 100644 --- a/core/src/main/java/org/opensearch/sql/expression/function/DefaultFunctionResolver.java +++ b/core/src/main/java/org/opensearch/sql/expression/function/DefaultFunctionResolver.java @@ -18,52 +18,53 @@ import org.opensearch.sql.exception.ExpressionEvaluationException; /** - * The Function Resolver hold the overload {@link FunctionBuilder} implementation. - * is composed by {@link FunctionName} which identified the function name - * and a map of {@link FunctionSignature} and {@link FunctionBuilder} - * to represent the overloaded implementation + * The Function Resolver hold the overload {@link FunctionBuilder} implementation. is composed by + * {@link FunctionName} which identified the function name and a map of {@link FunctionSignature} + * and {@link FunctionBuilder} to represent the overloaded implementation */ @Builder @RequiredArgsConstructor public class DefaultFunctionResolver implements FunctionResolver { - @Getter - private final FunctionName functionName; + @Getter private final FunctionName functionName; + @Singular("functionBundle") private final Map functionBundle; /** - * Resolve the {@link FunctionBuilder} by using input {@link FunctionSignature}. - * If the {@link FunctionBuilder} exactly match the input {@link FunctionSignature}, return it. - * If applying the widening rule, found the most match one, return it. - * If nothing found, throw {@link ExpressionEvaluationException} + * Resolve the {@link FunctionBuilder} by using input {@link FunctionSignature}. If the {@link + * FunctionBuilder} exactly match the input {@link FunctionSignature}, return it. If applying the + * widening rule, found the most match one, return it. If nothing found, throw {@link + * ExpressionEvaluationException} * * @return function signature and its builder */ @Override public Pair resolve(FunctionSignature unresolvedSignature) { - PriorityQueue> functionMatchQueue = new PriorityQueue<>( - Map.Entry.comparingByKey()); + PriorityQueue> functionMatchQueue = + new PriorityQueue<>(Map.Entry.comparingByKey()); for (FunctionSignature functionSignature : functionBundle.keySet()) { functionMatchQueue.add( - new AbstractMap.SimpleEntry<>(unresolvedSignature.match(functionSignature), - functionSignature)); + new AbstractMap.SimpleEntry<>( + unresolvedSignature.match(functionSignature), functionSignature)); } Map.Entry bestMatchEntry = functionMatchQueue.peek(); if (FunctionSignature.isVarArgFunction(bestMatchEntry.getValue().getParamTypeList()) - && (unresolvedSignature.getParamTypeList().isEmpty() + && (unresolvedSignature.getParamTypeList().isEmpty() || unresolvedSignature.getParamTypeList().size() > 9)) { throw new ExpressionEvaluationException( - String.format("%s function expected 1-9 arguments, but got %d", - functionName, unresolvedSignature.getParamTypeList().size())); + String.format( + "%s function expected 1-9 arguments, but got %d", + functionName, unresolvedSignature.getParamTypeList().size())); } if (FunctionSignature.NOT_MATCH.equals(bestMatchEntry.getKey()) - && !FunctionSignature.isVarArgFunction(bestMatchEntry.getValue().getParamTypeList())) { + && !FunctionSignature.isVarArgFunction(bestMatchEntry.getValue().getParamTypeList())) { throw new ExpressionEvaluationException( - String.format("%s function expected %s, but get %s", functionName, + String.format( + "%s function expected %s, but get %s", + functionName, formatFunctions(functionBundle.keySet()), - unresolvedSignature.formatTypes() - )); + unresolvedSignature.formatTypes())); } else { FunctionSignature resolvedSignature = bestMatchEntry.getValue(); return Pair.of(resolvedSignature, functionBundle.get(resolvedSignature)); @@ -71,7 +72,8 @@ public Pair resolve(FunctionSignature unreso } private String formatFunctions(Set functionSignatures) { - return functionSignatures.stream().map(FunctionSignature::formatTypes) + return functionSignatures.stream() + .map(FunctionSignature::formatTypes) .collect(Collectors.joining(",", "{", "}")); } } diff --git a/core/src/main/java/org/opensearch/sql/expression/function/FunctionBuilder.java b/core/src/main/java/org/opensearch/sql/expression/function/FunctionBuilder.java index b6e32a1d27..a529885c16 100644 --- a/core/src/main/java/org/opensearch/sql/expression/function/FunctionBuilder.java +++ b/core/src/main/java/org/opensearch/sql/expression/function/FunctionBuilder.java @@ -3,15 +3,14 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.function; import java.util.List; import org.opensearch.sql.expression.Expression; /** - * The definition of function which create {@link FunctionImplementation} - * from input {@link Expression} list. + * The definition of function which create {@link FunctionImplementation} from input {@link + * Expression} list. */ public interface FunctionBuilder { @@ -19,7 +18,7 @@ public interface FunctionBuilder { * Create {@link FunctionImplementation} from input {@link Expression} list. * * @param functionProperties context for function execution. - * @param arguments {@link Expression} list. + * @param arguments {@link Expression} list. * @return {@link FunctionImplementation} */ FunctionImplementation apply(FunctionProperties functionProperties, List arguments); diff --git a/core/src/main/java/org/opensearch/sql/expression/function/FunctionDSL.java b/core/src/main/java/org/opensearch/sql/expression/function/FunctionDSL.java index c57d96caea..8ebbfd3a3c 100644 --- a/core/src/main/java/org/opensearch/sql/expression/function/FunctionDSL.java +++ b/core/src/main/java/org/opensearch/sql/expression/function/FunctionDSL.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.function; import java.util.Arrays; @@ -21,21 +20,19 @@ import org.opensearch.sql.expression.env.Environment; import org.opensearch.sql.expression.function.DefaultFunctionResolver.DefaultFunctionResolverBuilder; -/** - * Function Define Utility. - */ +/** Function Define Utility. */ @UtilityClass public class FunctionDSL { /** * Define overloaded function with implementation. * * @param functionName function name. - * @param functions a list of function implementation. + * @param functions a list of function implementation. * @return FunctionResolver. */ - public static DefaultFunctionResolver define(FunctionName functionName, - SerializableFunction>... functions) { + public static DefaultFunctionResolver define( + FunctionName functionName, + SerializableFunction>... functions) { return define(functionName, List.of(functions)); } @@ -43,11 +40,13 @@ public static DefaultFunctionResolver define(FunctionName functionName, * Define overloaded function with implementation. * * @param functionName function name. - * @param functions a list of function implementation. + * @param functions a list of function implementation. * @return FunctionResolver. */ - public static DefaultFunctionResolver define(FunctionName functionName, List< - SerializableFunction>> functions) { + public static DefaultFunctionResolver define( + FunctionName functionName, + List>> + functions) { DefaultFunctionResolverBuilder builder = DefaultFunctionResolver.builder(); builder.functionName(functionName); @@ -58,7 +57,6 @@ public static DefaultFunctionResolver define(FunctionName functionName, List< return builder.build(); } - /** * Implementation of no args function that uses FunctionProperties. * @@ -67,8 +65,8 @@ public static DefaultFunctionResolver define(FunctionName functionName, List< * @return no args function implementation. */ public static SerializableFunction> - implWithProperties(SerializableFunction function, - ExprType returnType) { + implWithProperties( + SerializableFunction function, ExprType returnType) { return functionName -> { FunctionSignature functionSignature = new FunctionSignature(functionName, Collections.emptyList()); @@ -95,53 +93,54 @@ public String toString() { } /** - * Implementation of a function that takes one argument, returns a value, and - * requires FunctionProperties to complete. + * Implementation of a function that takes one argument, returns a value, and requires + * FunctionProperties to complete. * - * @param function {@link ExprValue} based unary function. + * @param function {@link ExprValue} based unary function. * @param returnType return type. * @param argsType argument type. * @return Unary Function Implementation. */ public static SerializableFunction> implWithProperties( - SerializableBiFunction function, - ExprType returnType, - ExprType argsType) { + SerializableBiFunction function, + ExprType returnType, + ExprType argsType) { return functionName -> { FunctionSignature functionSignature = new FunctionSignature(functionName, Collections.singletonList(argsType)); FunctionBuilder functionBuilder = - (functionProperties, arguments) -> new FunctionExpression(functionName, arguments) { - @Override - public ExprValue valueOf(Environment valueEnv) { - ExprValue value = arguments.get(0).valueOf(valueEnv); - return function.apply(functionProperties, value); - } - - @Override - public ExprType type() { - return returnType; - } - - @Override - public String toString() { - return String.format("%s(%s)", functionName, - arguments.stream() - .map(Object::toString) - .collect(Collectors.joining(", "))); - } - }; + (functionProperties, arguments) -> + new FunctionExpression(functionName, arguments) { + @Override + public ExprValue valueOf(Environment valueEnv) { + ExprValue value = arguments.get(0).valueOf(valueEnv); + return function.apply(functionProperties, value); + } + + @Override + public ExprType type() { + return returnType; + } + + @Override + public String toString() { + return String.format( + "%s(%s)", + functionName, + arguments.stream().map(Object::toString).collect(Collectors.joining(", "))); + } + }; return Pair.of(functionSignature, functionBuilder); }; } /** - * Implementation of a function that takes two arguments, returns a value, and - * requires FunctionProperties to complete. + * Implementation of a function that takes two arguments, returns a value, and requires + * FunctionProperties to complete. * - * @param function {@link ExprValue} based Binary function. + * @param function {@link ExprValue} based Binary function. * @param returnType return type. * @param args1Type first argument type. * @param args2Type second argument type. @@ -149,45 +148,46 @@ public String toString() { */ public static SerializableFunction> implWithProperties( - SerializableTriFunction function, - ExprType returnType, - ExprType args1Type, - ExprType args2Type) { + SerializableTriFunction function, + ExprType returnType, + ExprType args1Type, + ExprType args2Type) { return functionName -> { FunctionSignature functionSignature = new FunctionSignature(functionName, Arrays.asList(args1Type, args2Type)); FunctionBuilder functionBuilder = - (functionProperties, arguments) -> new FunctionExpression(functionName, arguments) { - @Override - public ExprValue valueOf(Environment valueEnv) { - ExprValue arg1 = arguments.get(0).valueOf(valueEnv); - ExprValue arg2 = arguments.get(1).valueOf(valueEnv); - return function.apply(functionProperties, arg1, arg2); - } - - @Override - public ExprType type() { - return returnType; - } - - @Override - public String toString() { - return String.format("%s(%s)", functionName, - arguments.stream() - .map(Object::toString) - .collect(Collectors.joining(", "))); - } - }; + (functionProperties, arguments) -> + new FunctionExpression(functionName, arguments) { + @Override + public ExprValue valueOf(Environment valueEnv) { + ExprValue arg1 = arguments.get(0).valueOf(valueEnv); + ExprValue arg2 = arguments.get(1).valueOf(valueEnv); + return function.apply(functionProperties, arg1, arg2); + } + + @Override + public ExprType type() { + return returnType; + } + + @Override + public String toString() { + return String.format( + "%s(%s)", + functionName, + arguments.stream().map(Object::toString).collect(Collectors.joining(", "))); + } + }; return Pair.of(functionSignature, functionBuilder); }; } /** - * Implementation of a function that takes three arguments, returns a value, and - * requires FunctionProperties to complete. + * Implementation of a function that takes three arguments, returns a value, and requires + * FunctionProperties to complete. * - * @param function {@link ExprValue} based Binary function. + * @param function {@link ExprValue} based Binary function. * @param returnType return type. * @param args1Type first argument type. * @param args2Type second argument type. @@ -196,43 +196,40 @@ public String toString() { */ public static SerializableFunction> implWithProperties( - SerializableQuadFunction< - FunctionProperties, - ExprValue, - ExprValue, - ExprValue, - ExprValue> function, - ExprType returnType, - ExprType args1Type, - ExprType args2Type, - ExprType args3Type) { + SerializableQuadFunction + function, + ExprType returnType, + ExprType args1Type, + ExprType args2Type, + ExprType args3Type) { return functionName -> { FunctionSignature functionSignature = new FunctionSignature(functionName, Arrays.asList(args1Type, args2Type, args3Type)); FunctionBuilder functionBuilder = - (functionProperties, arguments) -> new FunctionExpression(functionName, arguments) { - @Override - public ExprValue valueOf(Environment valueEnv) { - ExprValue arg1 = arguments.get(0).valueOf(valueEnv); - ExprValue arg2 = arguments.get(1).valueOf(valueEnv); - ExprValue arg3 = arguments.get(2).valueOf(valueEnv); - return function.apply(functionProperties, arg1, arg2, arg3); - } - - @Override - public ExprType type() { - return returnType; - } - - @Override - public String toString() { - return String.format("%s(%s)", functionName, - arguments.stream() - .map(Object::toString) - .collect(Collectors.joining(", "))); - } - }; + (functionProperties, arguments) -> + new FunctionExpression(functionName, arguments) { + @Override + public ExprValue valueOf(Environment valueEnv) { + ExprValue arg1 = arguments.get(0).valueOf(valueEnv); + ExprValue arg2 = arguments.get(1).valueOf(valueEnv); + ExprValue arg3 = arguments.get(2).valueOf(valueEnv); + return function.apply(functionProperties, arg1, arg2, arg3); + } + + @Override + public ExprType type() { + return returnType; + } + + @Override + public String toString() { + return String.format( + "%s(%s)", + functionName, + arguments.stream().map(Object::toString).collect(Collectors.joining(", "))); + } + }; return Pair.of(functionSignature, functionBuilder); }; } @@ -240,28 +237,25 @@ public String toString() { /** * No Arg Function Implementation. * - * @param function {@link ExprValue} based unary function. + * @param function {@link ExprValue} based unary function. * @param returnType return type. * @return Unary Function Implementation. */ public static SerializableFunction> impl( - SerializableNoArgFunction function, - ExprType returnType) { + SerializableNoArgFunction function, ExprType returnType) { return implWithProperties(fp -> function.get(), returnType); } /** * Unary Function Implementation. * - * @param function {@link ExprValue} based unary function. + * @param function {@link ExprValue} based unary function. * @param returnType return type. - * @param argsType argument type. + * @param argsType argument type. * @return Unary Function Implementation. */ public static SerializableFunction> impl( - SerializableFunction function, - ExprType returnType, - ExprType argsType) { + SerializableFunction function, ExprType returnType, ExprType argsType) { return implWithProperties((fp, arg) -> function.apply(arg), returnType, argsType); } @@ -269,10 +263,10 @@ public static SerializableFunction> impl( @@ -281,17 +275,17 @@ public static SerializableFunction - function.apply(arg1, arg2), returnType, args1Type, args2Type); + return implWithProperties( + (fp, arg1, arg2) -> function.apply(arg1, arg2), returnType, args1Type, args2Type); } /** * Triple Function Implementation. * - * @param function {@link ExprValue} based unary function. + * @param function {@link ExprValue} based unary function. * @param returnType return type. - * @param args1Type argument type. - * @param args2Type argument type. + * @param args1Type argument type. + * @param args2Type argument type. * @return Binary Function Implementation. */ public static SerializableFunction> impl( @@ -305,26 +299,31 @@ public static SerializableFunction new FunctionExpression(functionName, arguments) { - @Override - public ExprValue valueOf(Environment valueEnv) { - ExprValue arg1 = arguments.get(0).valueOf(valueEnv); - ExprValue arg2 = arguments.get(1).valueOf(valueEnv); - ExprValue arg3 = arguments.get(2).valueOf(valueEnv); - return function.apply(arg1, arg2, arg3); - } - - @Override - public ExprType type() { - return returnType; - } - - @Override - public String toString() { - return String.format("%s(%s, %s, %s)", functionName, arguments.get(0).toString(), - arguments.get(1).toString(), arguments.get(2).toString()); - } - }; + (functionProperties, arguments) -> + new FunctionExpression(functionName, arguments) { + @Override + public ExprValue valueOf(Environment valueEnv) { + ExprValue arg1 = arguments.get(0).valueOf(valueEnv); + ExprValue arg2 = arguments.get(1).valueOf(valueEnv); + ExprValue arg3 = arguments.get(2).valueOf(valueEnv); + return function.apply(arg1, arg2, arg3); + } + + @Override + public ExprType type() { + return returnType; + } + + @Override + public String toString() { + return String.format( + "%s(%s, %s, %s)", + functionName, + arguments.get(0).toString(), + arguments.get(1).toString(), + arguments.get(2).toString()); + } + }; return Pair.of(functionSignature, functionBuilder); }; } @@ -332,11 +331,11 @@ public String toString() { /** * Quadruple Function Implementation. * - * @param function {@link ExprValue} based unary function. + * @param function {@link ExprValue} based unary function. * @param returnType return type. - * @param args1Type argument type. - * @param args2Type argument type. - * @param args3Type argument type. + * @param args1Type argument type. + * @param args2Type argument type. + * @param args3Type argument type. * @return Quadruple Function Implementation. */ public static SerializableFunction> impl( @@ -349,42 +348,41 @@ public static SerializableFunction { FunctionSignature functionSignature = - new FunctionSignature(functionName, Arrays.asList( - args1Type, - args2Type, - args3Type, - args4Type)); + new FunctionSignature( + functionName, Arrays.asList(args1Type, args2Type, args3Type, args4Type)); FunctionBuilder functionBuilder = - (functionProperties, arguments) -> new FunctionExpression(functionName, arguments) { - @Override - public ExprValue valueOf(Environment valueEnv) { - ExprValue arg1 = arguments.get(0).valueOf(valueEnv); - ExprValue arg2 = arguments.get(1).valueOf(valueEnv); - ExprValue arg3 = arguments.get(2).valueOf(valueEnv); - ExprValue arg4 = arguments.get(3).valueOf(valueEnv); - return function.apply(arg1, arg2, arg3, arg4); - } - - @Override - public ExprType type() { - return returnType; - } - - @Override - public String toString() { - return String.format("%s(%s, %s, %s, %s)", functionName, arguments.get(0).toString(), - arguments.get(1).toString(), - arguments.get(2).toString(), - arguments.get(3).toString()); - } - }; + (functionProperties, arguments) -> + new FunctionExpression(functionName, arguments) { + @Override + public ExprValue valueOf(Environment valueEnv) { + ExprValue arg1 = arguments.get(0).valueOf(valueEnv); + ExprValue arg2 = arguments.get(1).valueOf(valueEnv); + ExprValue arg3 = arguments.get(2).valueOf(valueEnv); + ExprValue arg4 = arguments.get(3).valueOf(valueEnv); + return function.apply(arg1, arg2, arg3, arg4); + } + + @Override + public ExprType type() { + return returnType; + } + + @Override + public String toString() { + return String.format( + "%s(%s, %s, %s, %s)", + functionName, + arguments.get(0).toString(), + arguments.get(1).toString(), + arguments.get(2).toString(), + arguments.get(3).toString()); + } + }; return Pair.of(functionSignature, functionBuilder); }; } - /** - * Wrapper the unary ExprValue function with default NULL and MISSING handling. - */ + /** Wrapper the unary ExprValue function with default NULL and MISSING handling. */ public static SerializableFunction nullMissingHandling( SerializableFunction function) { return value -> { @@ -398,9 +396,7 @@ public static SerializableFunction nullMissingHandling( }; } - /** - * Wrapper the binary ExprValue function with default NULL and MISSING handling. - */ + /** Wrapper the binary ExprValue function with default NULL and MISSING handling. */ public static SerializableBiFunction nullMissingHandling( SerializableBiFunction function) { return (v1, v2) -> { @@ -414,9 +410,7 @@ public static SerializableBiFunction nullMissin }; } - /** - * Wrapper the triple ExprValue function with default NULL and MISSING handling. - */ + /** Wrapper the triple ExprValue function with default NULL and MISSING handling. */ public SerializableTriFunction nullMissingHandling( SerializableTriFunction function) { return (v1, v2, v3) -> { @@ -431,12 +425,12 @@ public SerializableTriFunction nullM } /** - * Wrapper the unary ExprValue function that is aware of FunctionProperties, - * with default NULL and MISSING handling. + * Wrapper the unary ExprValue function that is aware of FunctionProperties, with default NULL and + * MISSING handling. */ public static SerializableBiFunction - nullMissingHandlingWithProperties( - SerializableBiFunction implementation) { + nullMissingHandlingWithProperties( + SerializableBiFunction implementation) { return (functionProperties, v1) -> { if (v1.isMissing()) { return ExprValueUtils.missingValue(); @@ -453,8 +447,9 @@ public SerializableTriFunction nullM * with default NULL and MISSING handling. */ public static SerializableTriFunction - nullMissingHandlingWithProperties( - SerializableTriFunction implementation) { + nullMissingHandlingWithProperties( + SerializableTriFunction + implementation) { return (functionProperties, v1, v2) -> { if (v1.isMissing() || v2.isMissing()) { return ExprValueUtils.missingValue(); @@ -471,18 +466,10 @@ public SerializableTriFunction nullM * with default NULL and MISSING handling. */ public static SerializableQuadFunction< - FunctionProperties, - ExprValue, - ExprValue, - ExprValue, - ExprValue> + FunctionProperties, ExprValue, ExprValue, ExprValue, ExprValue> nullMissingHandlingWithProperties( - SerializableQuadFunction< - FunctionProperties, - ExprValue, - ExprValue, - ExprValue, - ExprValue> implementation) { + SerializableQuadFunction + implementation) { return (functionProperties, v1, v2, v3) -> { if (v1.isMissing() || v2.isMissing() || v3.isMissing()) { return ExprValueUtils.missingValue(); diff --git a/core/src/main/java/org/opensearch/sql/expression/function/FunctionImplementation.java b/core/src/main/java/org/opensearch/sql/expression/function/FunctionImplementation.java index d829e01225..4fd265a890 100644 --- a/core/src/main/java/org/opensearch/sql/expression/function/FunctionImplementation.java +++ b/core/src/main/java/org/opensearch/sql/expression/function/FunctionImplementation.java @@ -3,24 +3,17 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.function; import java.util.List; import org.opensearch.sql.expression.Expression; -/** - * The definition of Function Implementation. - */ +/** The definition of Function Implementation. */ public interface FunctionImplementation { - /** - * Get Function Name. - */ + /** Get Function Name. */ FunctionName getFunctionName(); - /** - * Get Function Arguments. - */ + /** Get Function Arguments. */ List getArguments(); } diff --git a/core/src/main/java/org/opensearch/sql/expression/function/FunctionName.java b/core/src/main/java/org/opensearch/sql/expression/function/FunctionName.java index cb3d5fab92..ae2987a164 100644 --- a/core/src/main/java/org/opensearch/sql/expression/function/FunctionName.java +++ b/core/src/main/java/org/opensearch/sql/expression/function/FunctionName.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.function; import java.io.Serializable; @@ -11,14 +10,11 @@ import lombok.Getter; import lombok.RequiredArgsConstructor; -/** - * The definition of Function Name. - */ +/** The definition of Function Name. */ @EqualsAndHashCode @RequiredArgsConstructor public class FunctionName implements Serializable { - @Getter - private final String functionName; + @Getter private final String functionName; public static FunctionName of(String functionName) { return new FunctionName(functionName.toLowerCase()); diff --git a/core/src/main/java/org/opensearch/sql/expression/function/FunctionProperties.java b/core/src/main/java/org/opensearch/sql/expression/function/FunctionProperties.java index 4222748051..100c98bd38 100644 --- a/core/src/main/java/org/opensearch/sql/expression/function/FunctionProperties.java +++ b/core/src/main/java/org/opensearch/sql/expression/function/FunctionProperties.java @@ -19,9 +19,7 @@ public class FunctionProperties implements Serializable { private final Instant nowInstant; private final ZoneId currentZoneId; - /** - * By default, use current time and current timezone. - */ + /** By default, use current time and current timezone. */ public FunctionProperties() { nowInstant = Instant.now(); currentZoneId = ZoneId.systemDefault(); @@ -29,6 +27,7 @@ public FunctionProperties() { /** * Method to access current system clock. + * * @return a ticking clock that tells the time. */ public Clock getSystemClock() { @@ -36,29 +35,28 @@ public Clock getSystemClock() { } /** - * Method to get time when query began execution. - * Clock class combines an instant Supplier and a time zone. - * @return a fixed clock that returns the time execution started at. + * Method to get time when query began execution. Clock class combines an instant Supplier and a + * time zone. * + * @return a fixed clock that returns the time execution started at. */ public Clock getQueryStartClock() { return Clock.fixed(nowInstant, currentZoneId); } - /** - * Use when compiling functions that do not rely on function properties. - */ - public static final FunctionProperties None = new FunctionProperties() { - @Override - public Clock getSystemClock() { - throw new UnexpectedCallException(); - } + /** Use when compiling functions that do not rely on function properties. */ + public static final FunctionProperties None = + new FunctionProperties() { + @Override + public Clock getSystemClock() { + throw new UnexpectedCallException(); + } - @Override - public Clock getQueryStartClock() { - throw new UnexpectedCallException(); - } - }; + @Override + public Clock getQueryStartClock() { + throw new UnexpectedCallException(); + } + }; class UnexpectedCallException extends RuntimeException { public UnexpectedCallException() { diff --git a/core/src/main/java/org/opensearch/sql/expression/function/FunctionResolver.java b/core/src/main/java/org/opensearch/sql/expression/function/FunctionResolver.java index 1635b6f846..eaede1da7e 100644 --- a/core/src/main/java/org/opensearch/sql/expression/function/FunctionResolver.java +++ b/core/src/main/java/org/opensearch/sql/expression/function/FunctionResolver.java @@ -8,8 +8,8 @@ import org.apache.commons.lang3.tuple.Pair; /** - * An interface for any class that can provide a {@ref FunctionBuilder} - * given a {@ref FunctionSignature}. + * An interface for any class that can provide a {@ref FunctionBuilder} given a {@ref + * FunctionSignature}. */ public interface FunctionResolver { Pair resolve(FunctionSignature unresolvedSignature); diff --git a/core/src/main/java/org/opensearch/sql/expression/function/FunctionSignature.java b/core/src/main/java/org/opensearch/sql/expression/function/FunctionSignature.java index 0c59d71c25..e1246cde28 100644 --- a/core/src/main/java/org/opensearch/sql/expression/function/FunctionSignature.java +++ b/core/src/main/java/org/opensearch/sql/expression/function/FunctionSignature.java @@ -15,9 +15,7 @@ import org.opensearch.sql.data.type.ExprType; import org.opensearch.sql.data.type.WideningTypeRule; -/** - * Function signature is composed by function name and arguments list. - */ +/** Function signature is composed by function name and arguments list. */ @Getter @RequiredArgsConstructor @EqualsAndHashCode @@ -31,9 +29,10 @@ public class FunctionSignature { /** * calculate the function signature match degree. * - * @return EXACTLY_MATCH: exactly match - * NOT_MATCH: not match - * By widening rule, the small number means better match + * @return
+ * EXACTLY_MATCH: exactly match
+ * NOT_MATCH: not match
+ * By widening rule, the small number means better match */ public int match(FunctionSignature functionSignature) { List functionTypeList = functionSignature.getParamTypeList(); @@ -60,18 +59,14 @@ public int match(FunctionSignature functionSignature) { return matchDegree; } - /** - * util function for formatted arguments list. - */ + /** util function for formatted arguments list. */ public String formatTypes() { return getParamTypeList().stream() .map(ExprType::typeName) .collect(Collectors.joining(",", "[", "]")); } - /** - * util function - returns true if function has variable arguments. - */ + /** util function - returns true if function has variable arguments. */ protected static boolean isVarArgFunction(List argTypes) { return argTypes.size() == 1 && argTypes.get(0) == ARRAY; } diff --git a/core/src/main/java/org/opensearch/sql/expression/function/OpenSearchFunctions.java b/core/src/main/java/org/opensearch/sql/expression/function/OpenSearchFunctions.java index c5fcb010f5..8d8928c16a 100644 --- a/core/src/main/java/org/opensearch/sql/expression/function/OpenSearchFunctions.java +++ b/core/src/main/java/org/opensearch/sql/expression/function/OpenSearchFunctions.java @@ -22,9 +22,7 @@ @UtilityClass public class OpenSearchFunctions { - /** - * Add functions specific to OpenSearch to repository. - */ + /** Add functions specific to OpenSearch to repository. */ public void register(BuiltinFunctionRepository repository) { repository.register(match_bool_prefix()); repository.register(multi_match(BuiltinFunctionName.MULTI_MATCH)); @@ -101,19 +99,20 @@ private static FunctionResolver nested() { @Override public Pair resolve( FunctionSignature unresolvedSignature) { - return Pair.of(unresolvedSignature, + return Pair.of( + unresolvedSignature, (functionProperties, arguments) -> - new FunctionExpression(BuiltinFunctionName.NESTED.getName(), arguments) { - @Override - public ExprValue valueOf(Environment valueEnv) { - return valueEnv.resolve(getArguments().get(0)); - } - - @Override - public ExprType type() { - return getArguments().get(0).type(); - } - }); + new FunctionExpression(BuiltinFunctionName.NESTED.getName(), arguments) { + @Override + public ExprValue valueOf(Environment valueEnv) { + return valueEnv.resolve(getArguments().get(0)); + } + + @Override + public ExprType type() { + return getArguments().get(0).type(); + } + }); } @Override @@ -123,9 +122,6 @@ public FunctionName getFunctionName() { }; } - - - private static FunctionResolver score(BuiltinFunctionName score) { FunctionName funcName = score.getName(); return new RelevanceFunctionResolver(funcName); @@ -135,12 +131,11 @@ public static class OpenSearchFunction extends FunctionExpression { private final FunctionName functionName; private final List arguments; - @Getter - @Setter - private boolean isScoreTracked; + @Getter @Setter private boolean isScoreTracked; /** * Required argument constructor. + * * @param functionName name of the function * @param arguments a list of expressions */ @@ -153,9 +148,10 @@ public OpenSearchFunction(FunctionName functionName, List arguments) @Override public ExprValue valueOf(Environment valueEnv) { - throw new UnsupportedOperationException(String.format( - "OpenSearch defined function [%s] is only supported in WHERE and HAVING clause.", - functionName)); + throw new UnsupportedOperationException( + String.format( + "OpenSearch defined function [%s] is only supported in WHERE and HAVING clause.", + functionName)); } @Override @@ -165,10 +161,15 @@ public ExprType type() { @Override public String toString() { - List args = arguments.stream() - .map(arg -> String.format("%s=%s", ((NamedArgumentExpression) arg) - .getArgName(), ((NamedArgumentExpression) arg).getValue().toString())) - .collect(Collectors.toList()); + List args = + arguments.stream() + .map( + arg -> + String.format( + "%s=%s", + ((NamedArgumentExpression) arg).getArgName(), + ((NamedArgumentExpression) arg).getValue().toString())) + .collect(Collectors.toList()); return String.format("%s(%s)", functionName, String.join(", ", args)); } } diff --git a/core/src/main/java/org/opensearch/sql/expression/function/RelevanceFunctionResolver.java b/core/src/main/java/org/opensearch/sql/expression/function/RelevanceFunctionResolver.java index ef0ac9226c..ae882897d0 100644 --- a/core/src/main/java/org/opensearch/sql/expression/function/RelevanceFunctionResolver.java +++ b/core/src/main/java/org/opensearch/sql/expression/function/RelevanceFunctionResolver.java @@ -14,17 +14,18 @@ import org.opensearch.sql.exception.SemanticCheckException; @RequiredArgsConstructor -public class RelevanceFunctionResolver - implements FunctionResolver { +public class RelevanceFunctionResolver implements FunctionResolver { - @Getter - private final FunctionName functionName; + @Getter private final FunctionName functionName; @Override public Pair resolve(FunctionSignature unresolvedSignature) { if (!unresolvedSignature.getFunctionName().equals(functionName)) { - throw new SemanticCheckException(String.format("Expected '%s' but got '%s'", - functionName.getFunctionName(), unresolvedSignature.getFunctionName().getFunctionName())); + throw new SemanticCheckException( + String.format( + "Expected '%s' but got '%s'", + functionName.getFunctionName(), + unresolvedSignature.getFunctionName().getFunctionName())); } List paramTypes = unresolvedSignature.getParamTypeList(); // Check if all but the first parameter are of type STRING. @@ -36,13 +37,15 @@ public Pair resolve(FunctionSignature unreso } } - FunctionBuilder buildFunction = (functionProperties, args) - -> new OpenSearchFunctions.OpenSearchFunction(functionName, args); + FunctionBuilder buildFunction = + (functionProperties, args) -> + new OpenSearchFunctions.OpenSearchFunction(functionName, args); return Pair.of(unresolvedSignature, buildFunction); } - /** Returns a helpful error message when expected parameter type does not match the - * specified parameter type. + /** + * Returns a helpful error message when expected parameter type does not match the specified + * parameter type. * * @param i 0-based index of the parameter in a function signature. * @param paramType the type of the ith parameter at run-time. @@ -50,7 +53,8 @@ public Pair resolve(FunctionSignature unreso * @return A user-friendly error message that informs of the type difference. */ private String getWrongParameterErrorMessage(int i, ExprType paramType, ExprType expectedType) { - return String.format("Expected type %s instead of %s for parameter #%d", + return String.format( + "Expected type %s instead of %s for parameter #%d", expectedType.typeName(), paramType.typeName(), i + 1); } } diff --git a/core/src/main/java/org/opensearch/sql/expression/function/SerializableBiFunction.java b/core/src/main/java/org/opensearch/sql/expression/function/SerializableBiFunction.java index 5b3aaf31f3..9f182e4c85 100644 --- a/core/src/main/java/org/opensearch/sql/expression/function/SerializableBiFunction.java +++ b/core/src/main/java/org/opensearch/sql/expression/function/SerializableBiFunction.java @@ -3,14 +3,10 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.function; import java.io.Serializable; import java.util.function.BiFunction; -/** - * Serializable BiFunction. - */ -public interface SerializableBiFunction extends BiFunction, Serializable { -} +/** Serializable BiFunction. */ +public interface SerializableBiFunction extends BiFunction, Serializable {} diff --git a/core/src/main/java/org/opensearch/sql/expression/function/SerializableFunction.java b/core/src/main/java/org/opensearch/sql/expression/function/SerializableFunction.java index 467c034c39..fb3e2f2cfb 100644 --- a/core/src/main/java/org/opensearch/sql/expression/function/SerializableFunction.java +++ b/core/src/main/java/org/opensearch/sql/expression/function/SerializableFunction.java @@ -3,11 +3,9 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.function; import java.io.Serializable; import java.util.function.Function; -public interface SerializableFunction extends Function, Serializable { -} +public interface SerializableFunction extends Function, Serializable {} diff --git a/core/src/main/java/org/opensearch/sql/expression/function/SerializableNoArgFunction.java b/core/src/main/java/org/opensearch/sql/expression/function/SerializableNoArgFunction.java index e68d6084b4..6eaf699bf9 100644 --- a/core/src/main/java/org/opensearch/sql/expression/function/SerializableNoArgFunction.java +++ b/core/src/main/java/org/opensearch/sql/expression/function/SerializableNoArgFunction.java @@ -3,14 +3,10 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.function; import java.io.Serializable; import java.util.function.Supplier; -/** - * Serializable no argument function. - */ -public interface SerializableNoArgFunction extends Supplier, Serializable { -} +/** Serializable no argument function. */ +public interface SerializableNoArgFunction extends Supplier, Serializable {} diff --git a/core/src/main/java/org/opensearch/sql/expression/function/SerializableQuadFunction.java b/core/src/main/java/org/opensearch/sql/expression/function/SerializableQuadFunction.java index 056a17d5b3..7285d9a32f 100644 --- a/core/src/main/java/org/opensearch/sql/expression/function/SerializableQuadFunction.java +++ b/core/src/main/java/org/opensearch/sql/expression/function/SerializableQuadFunction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.function; import java.io.Serializable; diff --git a/core/src/main/java/org/opensearch/sql/expression/function/SerializableTriFunction.java b/core/src/main/java/org/opensearch/sql/expression/function/SerializableTriFunction.java index 911012fcdb..e980b1c82a 100644 --- a/core/src/main/java/org/opensearch/sql/expression/function/SerializableTriFunction.java +++ b/core/src/main/java/org/opensearch/sql/expression/function/SerializableTriFunction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.function; import java.io.Serializable; diff --git a/core/src/main/java/org/opensearch/sql/expression/function/TableFunctionImplementation.java b/core/src/main/java/org/opensearch/sql/expression/function/TableFunctionImplementation.java index f35ffe4898..b8b90bf5e3 100644 --- a/core/src/main/java/org/opensearch/sql/expression/function/TableFunctionImplementation.java +++ b/core/src/main/java/org/opensearch/sql/expression/function/TableFunctionImplementation.java @@ -9,11 +9,8 @@ import org.opensearch.sql.storage.Table; -/** - * Interface for table function which returns Table when executed. - */ +/** Interface for table function which returns Table when executed. */ public interface TableFunctionImplementation extends FunctionImplementation { Table applyArguments(); - } diff --git a/core/src/main/java/org/opensearch/sql/expression/operator/arthmetic/ArithmeticFunction.java b/core/src/main/java/org/opensearch/sql/expression/operator/arthmetic/ArithmeticFunction.java index e25c5cda20..82b91e1d34 100644 --- a/core/src/main/java/org/opensearch/sql/expression/operator/arthmetic/ArithmeticFunction.java +++ b/core/src/main/java/org/opensearch/sql/expression/operator/arthmetic/ArithmeticFunction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.operator.arthmetic; import static org.opensearch.sql.data.type.ExprCoreType.BYTE; @@ -30,11 +29,11 @@ import org.opensearch.sql.expression.function.FunctionName; /** - * The definition of arithmetic function - * add, Accepts two numbers and produces a number. - * subtract, Accepts two numbers and produces a number. - * multiply, Accepts two numbers and produces a number. - * divide, Accepts two numbers and produces a number. + * The definition of arithmetic function
+ * add, Accepts two numbers and produces a number.
+ * subtract, Accepts two numbers and produces a number.
+ * multiply, Accepts two numbers and produces a number.
+ * divide, Accepts two numbers and produces a number.
* module, Accepts two numbers and produces a number. */ @UtilityClass @@ -59,33 +58,49 @@ public static void register(BuiltinFunctionRepository repository) { } /** - * Definition of add(x, y) function. - * Returns the number x plus number y - * The supported signature of add function is - * (x: BYTE/SHORT/INTEGER/LONG/FLOAT/DOUBLE, y: BYTE/SHORT/INTEGER/LONG/FLOAT/DOUBLE) + * Definition of add(x, y) function.
+ * Returns the number x plus number y
+ * The supported signature of add function is
+ * (x: BYTE/SHORT/INTEGER/LONG/FLOAT/DOUBLE, y: BYTE/SHORT/INTEGER/LONG/FLOAT/DOUBLE)
* -> wider type between types of x and y */ private static DefaultFunctionResolver addBase(FunctionName functionName) { - return define(functionName, - impl(nullMissingHandling( - (v1, v2) -> new ExprByteValue(v1.byteValue() + v2.byteValue())), - BYTE, BYTE, BYTE), - impl(nullMissingHandling( - (v1, v2) -> new ExprShortValue(v1.shortValue() + v2.shortValue())), - SHORT, SHORT, SHORT), - impl(nullMissingHandling( - (v1, v2) -> new ExprIntegerValue(Math.addExact(v1.integerValue(), v2.integerValue()))), - INTEGER, INTEGER, INTEGER), - impl(nullMissingHandling( - (v1, v2) -> new ExprLongValue(Math.addExact(v1.longValue(), v2.longValue()))), - LONG, LONG, LONG), - impl(nullMissingHandling( - (v1, v2) -> new ExprFloatValue(v1.floatValue() + v2.floatValue())), - FLOAT, FLOAT, FLOAT), - impl(nullMissingHandling( - (v1, v2) -> new ExprDoubleValue(v1.doubleValue() + v2.doubleValue())), - DOUBLE, DOUBLE, DOUBLE) - ); + return define( + functionName, + impl( + nullMissingHandling((v1, v2) -> new ExprByteValue(v1.byteValue() + v2.byteValue())), + BYTE, + BYTE, + BYTE), + impl( + nullMissingHandling((v1, v2) -> new ExprShortValue(v1.shortValue() + v2.shortValue())), + SHORT, + SHORT, + SHORT), + impl( + nullMissingHandling( + (v1, v2) -> + new ExprIntegerValue(Math.addExact(v1.integerValue(), v2.integerValue()))), + INTEGER, + INTEGER, + INTEGER), + impl( + nullMissingHandling( + (v1, v2) -> new ExprLongValue(Math.addExact(v1.longValue(), v2.longValue()))), + LONG, + LONG, + LONG), + impl( + nullMissingHandling((v1, v2) -> new ExprFloatValue(v1.floatValue() + v2.floatValue())), + FLOAT, + FLOAT, + FLOAT), + impl( + nullMissingHandling( + (v1, v2) -> new ExprDoubleValue(v1.doubleValue() + v2.doubleValue())), + DOUBLE, + DOUBLE, + DOUBLE)); } private static DefaultFunctionResolver add() { @@ -97,39 +112,69 @@ private static DefaultFunctionResolver addFunction() { } /** - * Definition of divide(x, y) function. - * Returns the number x divided by number y - * The supported signature of divide function is - * (x: BYTE/SHORT/INTEGER/LONG/FLOAT/DOUBLE, y: BYTE/SHORT/INTEGER/LONG/FLOAT/DOUBLE) + * Definition of divide(x, y) function.
+ * Returns the number x divided by number y
+ * The supported signature of divide function is
+ * (x: BYTE/SHORT/INTEGER/LONG/FLOAT/DOUBLE, y: BYTE/SHORT/INTEGER/LONG/FLOAT/DOUBLE)
* -> wider type between types of x and y */ private static DefaultFunctionResolver divideBase(FunctionName functionName) { - return define(functionName, - impl(nullMissingHandling( - (v1, v2) -> v2.byteValue() == 0 ? ExprNullValue.of() : - new ExprByteValue(v1.byteValue() / v2.byteValue())), - BYTE, BYTE, BYTE), - impl(nullMissingHandling( - (v1, v2) -> v2.shortValue() == 0 ? ExprNullValue.of() : - new ExprShortValue(v1.shortValue() / v2.shortValue())), - SHORT, SHORT, SHORT), - impl(nullMissingHandling( - (v1, v2) -> v2.integerValue() == 0 ? ExprNullValue.of() : - new ExprIntegerValue(v1.integerValue() / v2.integerValue())), - INTEGER, INTEGER, INTEGER), - impl(nullMissingHandling( - (v1, v2) -> v2.longValue() == 0 ? ExprNullValue.of() : - new ExprLongValue(v1.longValue() / v2.longValue())), - LONG, LONG, LONG), - impl(nullMissingHandling( - (v1, v2) -> v2.floatValue() == 0 ? ExprNullValue.of() : - new ExprFloatValue(v1.floatValue() / v2.floatValue())), - FLOAT, FLOAT, FLOAT), - impl(nullMissingHandling( - (v1, v2) -> v2.doubleValue() == 0 ? ExprNullValue.of() : - new ExprDoubleValue(v1.doubleValue() / v2.doubleValue())), - DOUBLE, DOUBLE, DOUBLE) - ); + return define( + functionName, + impl( + nullMissingHandling( + (v1, v2) -> + v2.byteValue() == 0 + ? ExprNullValue.of() + : new ExprByteValue(v1.byteValue() / v2.byteValue())), + BYTE, + BYTE, + BYTE), + impl( + nullMissingHandling( + (v1, v2) -> + v2.shortValue() == 0 + ? ExprNullValue.of() + : new ExprShortValue(v1.shortValue() / v2.shortValue())), + SHORT, + SHORT, + SHORT), + impl( + nullMissingHandling( + (v1, v2) -> + v2.integerValue() == 0 + ? ExprNullValue.of() + : new ExprIntegerValue(v1.integerValue() / v2.integerValue())), + INTEGER, + INTEGER, + INTEGER), + impl( + nullMissingHandling( + (v1, v2) -> + v2.longValue() == 0 + ? ExprNullValue.of() + : new ExprLongValue(v1.longValue() / v2.longValue())), + LONG, + LONG, + LONG), + impl( + nullMissingHandling( + (v1, v2) -> + v2.floatValue() == 0 + ? ExprNullValue.of() + : new ExprFloatValue(v1.floatValue() / v2.floatValue())), + FLOAT, + FLOAT, + FLOAT), + impl( + nullMissingHandling( + (v1, v2) -> + v2.doubleValue() == 0 + ? ExprNullValue.of() + : new ExprDoubleValue(v1.doubleValue() / v2.doubleValue())), + DOUBLE, + DOUBLE, + DOUBLE)); } private static DefaultFunctionResolver divide() { @@ -141,39 +186,69 @@ private static DefaultFunctionResolver divideFunction() { } /** - * Definition of modulus(x, y) function. - * Returns the number x modulo by number y - * The supported signature of modulo function is - * (x: BYTE/SHORT/INTEGER/LONG/FLOAT/DOUBLE, y: BYTE/SHORT/INTEGER/LONG/FLOAT/DOUBLE) + * Definition of modulus(x, y) function.
+ * Returns the number x modulo by number y
+ * The supported signature of modulo function is
+ * (x: BYTE/SHORT/INTEGER/LONG/FLOAT/DOUBLE, y: BYTE/SHORT/INTEGER/LONG/FLOAT/DOUBLE)
* -> wider type between types of x and y */ private static DefaultFunctionResolver modulusBase(FunctionName functionName) { - return define(functionName, - impl(nullMissingHandling( - (v1, v2) -> v2.byteValue() == 0 ? ExprNullValue.of() : - new ExprByteValue(v1.byteValue() % v2.byteValue())), - BYTE, BYTE, BYTE), - impl(nullMissingHandling( - (v1, v2) -> v2.shortValue() == 0 ? ExprNullValue.of() : - new ExprShortValue(v1.shortValue() % v2.shortValue())), - SHORT, SHORT, SHORT), - impl(nullMissingHandling( - (v1, v2) -> v2.integerValue() == 0 ? ExprNullValue.of() : - new ExprIntegerValue(v1.integerValue() % v2.integerValue())), - INTEGER, INTEGER, INTEGER), - impl(nullMissingHandling( - (v1, v2) -> v2.longValue() == 0 ? ExprNullValue.of() : - new ExprLongValue(v1.longValue() % v2.longValue())), - LONG, LONG, LONG), - impl(nullMissingHandling( - (v1, v2) -> v2.floatValue() == 0 ? ExprNullValue.of() : - new ExprFloatValue(v1.floatValue() % v2.floatValue())), - FLOAT, FLOAT, FLOAT), - impl(nullMissingHandling( - (v1, v2) -> v2.doubleValue() == 0 ? ExprNullValue.of() : - new ExprDoubleValue(v1.doubleValue() % v2.doubleValue())), - DOUBLE, DOUBLE, DOUBLE) - ); + return define( + functionName, + impl( + nullMissingHandling( + (v1, v2) -> + v2.byteValue() == 0 + ? ExprNullValue.of() + : new ExprByteValue(v1.byteValue() % v2.byteValue())), + BYTE, + BYTE, + BYTE), + impl( + nullMissingHandling( + (v1, v2) -> + v2.shortValue() == 0 + ? ExprNullValue.of() + : new ExprShortValue(v1.shortValue() % v2.shortValue())), + SHORT, + SHORT, + SHORT), + impl( + nullMissingHandling( + (v1, v2) -> + v2.integerValue() == 0 + ? ExprNullValue.of() + : new ExprIntegerValue(v1.integerValue() % v2.integerValue())), + INTEGER, + INTEGER, + INTEGER), + impl( + nullMissingHandling( + (v1, v2) -> + v2.longValue() == 0 + ? ExprNullValue.of() + : new ExprLongValue(v1.longValue() % v2.longValue())), + LONG, + LONG, + LONG), + impl( + nullMissingHandling( + (v1, v2) -> + v2.floatValue() == 0 + ? ExprNullValue.of() + : new ExprFloatValue(v1.floatValue() % v2.floatValue())), + FLOAT, + FLOAT, + FLOAT), + impl( + nullMissingHandling( + (v1, v2) -> + v2.doubleValue() == 0 + ? ExprNullValue.of() + : new ExprDoubleValue(v1.doubleValue() % v2.doubleValue())), + DOUBLE, + DOUBLE, + DOUBLE)); } private static DefaultFunctionResolver mod() { @@ -189,34 +264,49 @@ private static DefaultFunctionResolver modulusFunction() { } /** - * Definition of multiply(x, y) function. - * Returns the number x multiplied by number y - * The supported signature of multiply function is - * (x: BYTE/SHORT/INTEGER/LONG/FLOAT/DOUBLE, y: BYTE/SHORT/INTEGER/LONG/FLOAT/DOUBLE) - * -> wider type between types of x and y + * Definition of multiply(x, y) function.
+ * Returns the number x multiplied by number y
+ * The supported signature of multiply function is
+ * (x: BYTE/SHORT/INTEGER/LONG/FLOAT/DOUBLE, y: BYTE/SHORT/INTEGER/LONG/FLOAT/DOUBLE)
+ * -> wider type between types of x and y */ private static DefaultFunctionResolver multiplyBase(FunctionName functionName) { - return define(functionName, - impl(nullMissingHandling( - (v1, v2) -> new ExprByteValue(v1.byteValue() * v2.byteValue())), - BYTE, BYTE, BYTE), - impl(nullMissingHandling( - (v1, v2) -> new ExprShortValue(v1.shortValue() * v2.shortValue())), - SHORT, SHORT, SHORT), - impl(nullMissingHandling( - (v1, v2) -> new ExprIntegerValue(Math.multiplyExact(v1.integerValue(), - v2.integerValue()))), - INTEGER, INTEGER, INTEGER), - impl(nullMissingHandling( + return define( + functionName, + impl( + nullMissingHandling((v1, v2) -> new ExprByteValue(v1.byteValue() * v2.byteValue())), + BYTE, + BYTE, + BYTE), + impl( + nullMissingHandling((v1, v2) -> new ExprShortValue(v1.shortValue() * v2.shortValue())), + SHORT, + SHORT, + SHORT), + impl( + nullMissingHandling( + (v1, v2) -> + new ExprIntegerValue(Math.multiplyExact(v1.integerValue(), v2.integerValue()))), + INTEGER, + INTEGER, + INTEGER), + impl( + nullMissingHandling( (v1, v2) -> new ExprLongValue(Math.multiplyExact(v1.longValue(), v2.longValue()))), - LONG, LONG, LONG), - impl(nullMissingHandling( - (v1, v2) -> new ExprFloatValue(v1.floatValue() * v2.floatValue())), - FLOAT, FLOAT, FLOAT), - impl(nullMissingHandling( + LONG, + LONG, + LONG), + impl( + nullMissingHandling((v1, v2) -> new ExprFloatValue(v1.floatValue() * v2.floatValue())), + FLOAT, + FLOAT, + FLOAT), + impl( + nullMissingHandling( (v1, v2) -> new ExprDoubleValue(v1.doubleValue() * v2.doubleValue())), - DOUBLE, DOUBLE, DOUBLE) - ); + DOUBLE, + DOUBLE, + DOUBLE)); } private static DefaultFunctionResolver multiply() { @@ -228,34 +318,49 @@ private static DefaultFunctionResolver multiplyFunction() { } /** - * Definition of subtract(x, y) function. - * Returns the number x minus number y - * The supported signature of subtract function is - * (x: BYTE/SHORT/INTEGER/LONG/FLOAT/DOUBLE, y: BYTE/SHORT/INTEGER/LONG/FLOAT/DOUBLE) + * Definition of subtract(x, y) function.
+ * Returns the number x minus number y
+ * The supported signature of subtract function is
+ * (x: BYTE/SHORT/INTEGER/LONG/FLOAT/DOUBLE, y: BYTE/SHORT/INTEGER/LONG/FLOAT/DOUBLE)
* -> wider type between types of x and y */ private static DefaultFunctionResolver subtractBase(FunctionName functionName) { - return define(functionName, - impl(nullMissingHandling( - (v1, v2) -> new ExprByteValue(v1.byteValue() - v2.byteValue())), - BYTE, BYTE, BYTE), - impl(nullMissingHandling( - (v1, v2) -> new ExprShortValue(v1.shortValue() - v2.shortValue())), - SHORT, SHORT, SHORT), - impl(nullMissingHandling( - (v1, v2) -> new ExprIntegerValue(Math.subtractExact(v1.integerValue(), - v2.integerValue()))), - INTEGER, INTEGER, INTEGER), - impl(nullMissingHandling( + return define( + functionName, + impl( + nullMissingHandling((v1, v2) -> new ExprByteValue(v1.byteValue() - v2.byteValue())), + BYTE, + BYTE, + BYTE), + impl( + nullMissingHandling((v1, v2) -> new ExprShortValue(v1.shortValue() - v2.shortValue())), + SHORT, + SHORT, + SHORT), + impl( + nullMissingHandling( + (v1, v2) -> + new ExprIntegerValue(Math.subtractExact(v1.integerValue(), v2.integerValue()))), + INTEGER, + INTEGER, + INTEGER), + impl( + nullMissingHandling( (v1, v2) -> new ExprLongValue(Math.subtractExact(v1.longValue(), v2.longValue()))), - LONG, LONG, LONG), - impl(nullMissingHandling( - (v1, v2) -> new ExprFloatValue(v1.floatValue() - v2.floatValue())), - FLOAT, FLOAT, FLOAT), - impl(nullMissingHandling( + LONG, + LONG, + LONG), + impl( + nullMissingHandling((v1, v2) -> new ExprFloatValue(v1.floatValue() - v2.floatValue())), + FLOAT, + FLOAT, + FLOAT), + impl( + nullMissingHandling( (v1, v2) -> new ExprDoubleValue(v1.doubleValue() - v2.doubleValue())), - DOUBLE, DOUBLE, DOUBLE) - ); + DOUBLE, + DOUBLE, + DOUBLE)); } private static DefaultFunctionResolver subtract() { diff --git a/core/src/main/java/org/opensearch/sql/expression/operator/arthmetic/MathematicalFunction.java b/core/src/main/java/org/opensearch/sql/expression/operator/arthmetic/MathematicalFunction.java index 810d292ca2..22f4b76573 100644 --- a/core/src/main/java/org/opensearch/sql/expression/operator/arthmetic/MathematicalFunction.java +++ b/core/src/main/java/org/opensearch/sql/expression/operator/arthmetic/MathematicalFunction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.operator.arthmetic; import static org.opensearch.sql.data.type.ExprCoreType.BYTE; @@ -102,126 +101,155 @@ public static void register(BuiltinFunctionRepository repository) { * @return DefaultFunctionResolver for math functions. */ private static DefaultFunctionResolver baseMathFunction( - FunctionName functionName, SerializableFunction formula, ExprCoreType returnType) { - return define(functionName, ExprCoreType.numberTypes().stream().map(type -> - impl(nullMissingHandling(formula), returnType, type)).collect(Collectors.toList())); + FunctionName functionName, + SerializableFunction formula, + ExprCoreType returnType) { + return define( + functionName, + ExprCoreType.numberTypes().stream() + .map(type -> impl(nullMissingHandling(formula), returnType, type)) + .collect(Collectors.toList())); } /** - * Definition of abs() function. The supported signature of abs() function are INT -> INT LONG -> - * LONG FLOAT -> FLOAT DOUBLE -> DOUBLE + * Definition of abs() function.<\b>
+ * The supported signature of abs() function are
+ * INT/LONG/FLOAT/DOUBLE -> INT/LONG/FLOAT/DOUBLE */ private static DefaultFunctionResolver abs() { - return define(BuiltinFunctionName.ABS.getName(), - impl(nullMissingHandling(v -> new ExprByteValue(Math.abs(v.byteValue()))), - BYTE, BYTE), - impl(nullMissingHandling(v -> new ExprShortValue(Math.abs(v.shortValue()))), - SHORT, SHORT), - impl(nullMissingHandling(v -> new ExprIntegerValue(Math.abs(v.integerValue()))), - INTEGER, INTEGER), - impl(nullMissingHandling(v -> new ExprLongValue(Math.abs(v.longValue()))), - LONG, LONG), - impl(nullMissingHandling(v -> new ExprFloatValue(Math.abs(v.floatValue()))), - FLOAT, FLOAT), - impl(nullMissingHandling(v -> new ExprDoubleValue(Math.abs(v.doubleValue()))), - DOUBLE, DOUBLE) - ); - } - - /** - * Definition of ceil(x)/ceiling(x) function. Calculate the next highest integer that x rounds up - * to The supported signature of ceil/ceiling function is DOUBLE -> INTEGER + return define( + BuiltinFunctionName.ABS.getName(), + impl(nullMissingHandling(v -> new ExprByteValue(Math.abs(v.byteValue()))), BYTE, BYTE), + impl(nullMissingHandling(v -> new ExprShortValue(Math.abs(v.shortValue()))), SHORT, SHORT), + impl( + nullMissingHandling(v -> new ExprIntegerValue(Math.abs(v.integerValue()))), + INTEGER, + INTEGER), + impl(nullMissingHandling(v -> new ExprLongValue(Math.abs(v.longValue()))), LONG, LONG), + impl(nullMissingHandling(v -> new ExprFloatValue(Math.abs(v.floatValue()))), FLOAT, FLOAT), + impl( + nullMissingHandling(v -> new ExprDoubleValue(Math.abs(v.doubleValue()))), + DOUBLE, + DOUBLE)); + } + + /** + * Definition of ceil(x)/ceiling(x) function.<\b>
+ * Calculate the next highest integer that x rounds up to The supported signature of ceil/ceiling + * function is DOUBLE -> INTEGER */ private static DefaultFunctionResolver ceil() { - return define(BuiltinFunctionName.CEIL.getName(), - impl(nullMissingHandling(v -> new ExprLongValue(Math.ceil(v.doubleValue()))), - LONG, DOUBLE) - ); + return define( + BuiltinFunctionName.CEIL.getName(), + impl( + nullMissingHandling(v -> new ExprLongValue(Math.ceil(v.doubleValue()))), LONG, DOUBLE)); } private static DefaultFunctionResolver ceiling() { - return define(BuiltinFunctionName.CEILING.getName(), - impl(nullMissingHandling(v -> new ExprLongValue(Math.ceil(v.doubleValue()))), - LONG, DOUBLE) - ); + return define( + BuiltinFunctionName.CEILING.getName(), + impl( + nullMissingHandling(v -> new ExprLongValue(Math.ceil(v.doubleValue()))), LONG, DOUBLE)); } /** - * Definition of conv(x, a, b) function. - * Convert number x from base a to base b - * The supported signature of floor function is - * (STRING, INTEGER, INTEGER) -> STRING + * Definition of conv(x, a, b) function.<\b>
+ * Convert number x from base a to base b
+ * The supported signature of floor function is
+ * (STRING, INTEGER, INTEGER) -> STRING
* (INTEGER, INTEGER, INTEGER) -> STRING */ private static DefaultFunctionResolver conv() { - return define(BuiltinFunctionName.CONV.getName(), - impl(nullMissingHandling((x, a, b) -> new ExprStringValue( - Integer.toString(Integer.parseInt(x.stringValue(), a.integerValue()), - b.integerValue()))), - STRING, STRING, INTEGER, INTEGER), - impl(nullMissingHandling((x, a, b) -> new ExprStringValue( - Integer.toString(Integer.parseInt(x.integerValue().toString(), a.integerValue()), - b.integerValue()))), - STRING, INTEGER, INTEGER, INTEGER) - ); - } - - /** - * Definition of crc32(x) function. - * Calculate a cyclic redundancy check value and returns a 32-bit unsigned value - * The supported signature of crc32 function is + return define( + BuiltinFunctionName.CONV.getName(), + impl( + nullMissingHandling( + (x, a, b) -> + new ExprStringValue( + Integer.toString( + Integer.parseInt(x.stringValue(), a.integerValue()), + b.integerValue()))), + STRING, + STRING, + INTEGER, + INTEGER), + impl( + nullMissingHandling( + (x, a, b) -> + new ExprStringValue( + Integer.toString( + Integer.parseInt(x.integerValue().toString(), a.integerValue()), + b.integerValue()))), + STRING, + INTEGER, + INTEGER, + INTEGER)); + } + + /** + * Definition of crc32(x) function.<\b>
+ * Calculate a cyclic redundancy check value and returns a 32-bit unsigned value
+ * The supported signature of crc32 function is
* STRING -> LONG */ private static DefaultFunctionResolver crc32() { - return define(BuiltinFunctionName.CRC32.getName(), - impl(nullMissingHandling(v -> { - CRC32 crc = new CRC32(); - crc.update(v.stringValue().getBytes()); - return new ExprLongValue(crc.getValue()); - }), - LONG, STRING) - ); + return define( + BuiltinFunctionName.CRC32.getName(), + impl( + nullMissingHandling( + v -> { + CRC32 crc = new CRC32(); + crc.update(v.stringValue().getBytes()); + return new ExprLongValue(crc.getValue()); + }), + LONG, + STRING)); } /** - * Definition of e() function. - * Get the Euler's number. - * () -> DOUBLE + * Definition of e() function.
+ * Get the Euler's number. () -> DOUBLE */ private static DefaultFunctionResolver euler() { - return define(BuiltinFunctionName.E.getName(), - impl(() -> new ExprDoubleValue(Math.E), DOUBLE) - ); + return define(BuiltinFunctionName.E.getName(), impl(() -> new ExprDoubleValue(Math.E), DOUBLE)); } /** - * Definition of exp(x) function. Calculate exponent function e to the x - * The supported signature of exp function is INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE + * Definition of exp(x) function.
+ * Calculate exponent function e to the x The supported signature of exp function is + * INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE */ private static DefaultFunctionResolver exp() { - return baseMathFunction(BuiltinFunctionName.EXP.getName(), - v -> new ExprDoubleValue(Math.exp(v.doubleValue())), DOUBLE); + return baseMathFunction( + BuiltinFunctionName.EXP.getName(), + v -> new ExprDoubleValue(Math.exp(v.doubleValue())), + DOUBLE); } /** - * Definition of expm1(x) function. Calculate exponent function e to the x, minus 1 - * The supported signature of exp function is INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE + * Definition of expm1(x) function.
+ * Calculate exponent function e to the x, minus 1 The supported signature of exp function is + * INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE */ private static DefaultFunctionResolver expm1() { - return baseMathFunction(BuiltinFunctionName.EXPM1.getName(), - v -> new ExprDoubleValue(Math.expm1(v.doubleValue())), DOUBLE); + return baseMathFunction( + BuiltinFunctionName.EXPM1.getName(), + v -> new ExprDoubleValue(Math.expm1(v.doubleValue())), + DOUBLE); } /** - * Definition of floor(x) function. Calculate the next nearest whole integer that x rounds down to - * The supported signature of floor function is DOUBLE -> INTEGER + * Definition of floor(x) function.
+ * Calculate the next nearest whole integer that x rounds down to The supported signature of floor + * function is DOUBLE -> INTEGER */ private static DefaultFunctionResolver floor() { - return define(BuiltinFunctionName.FLOOR.getName(), - impl(nullMissingHandling(v -> new ExprLongValue(Math.floor(v.doubleValue()))), - LONG, DOUBLE) - ); + return define( + BuiltinFunctionName.FLOOR.getName(), + impl( + nullMissingHandling(v -> new ExprLongValue(Math.floor(v.doubleValue()))), + LONG, + DOUBLE)); } /** @@ -229,108 +257,171 @@ private static DefaultFunctionResolver floor() { * ln function is INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE */ private static DefaultFunctionResolver ln() { - return baseMathFunction(BuiltinFunctionName.LN.getName(), - v -> v.doubleValue() <= 0 ? ExprNullValue.of() : - new ExprDoubleValue(Math.log(v.doubleValue())), DOUBLE); + return baseMathFunction( + BuiltinFunctionName.LN.getName(), + v -> + v.doubleValue() <= 0 + ? ExprNullValue.of() + : new ExprDoubleValue(Math.log(v.doubleValue())), + DOUBLE); } /** - * Definition of log(b, x) function. Calculate the logarithm of x using b as the base The - * supported signature of log function is (b: INTEGER/LONG/FLOAT/DOUBLE, x: - * INTEGER/LONG/FLOAT/DOUBLE]) -> DOUBLE + * Definition of log(b, x) function.
+ * Calculate the logarithm of x using b as the base The supported signature of log function is
+ * (b: INTEGER/LONG/FLOAT/DOUBLE, x: INTEGER/LONG/FLOAT/DOUBLE]) -> DOUBLE */ private static DefaultFunctionResolver log() { - ImmutableList.Builder>> builder = new ImmutableList.Builder<>(); + ImmutableList.Builder< + SerializableFunction>> + builder = new ImmutableList.Builder<>(); // build unary log(x), SHORT/INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE for (ExprType type : ExprCoreType.numberTypes()) { - builder.add(impl(nullMissingHandling(v -> v.doubleValue() <= 0 ? ExprNullValue.of() : - new ExprDoubleValue(Math.log(v.doubleValue()))), - DOUBLE, type)); + builder.add( + impl( + nullMissingHandling( + v -> + v.doubleValue() <= 0 + ? ExprNullValue.of() + : new ExprDoubleValue(Math.log(v.doubleValue()))), + DOUBLE, + type)); } // build binary function log(b, x) for (ExprType baseType : ExprCoreType.numberTypes()) { for (ExprType numberType : ExprCoreType.numberTypes()) { - builder.add(impl(nullMissingHandling((b, x) -> b.doubleValue() <= 0 || x.doubleValue() <= 0 - ? ExprNullValue.of() : new ExprDoubleValue( - Math.log(x.doubleValue()) / Math.log(b.doubleValue()))), - DOUBLE, baseType, numberType)); + builder.add( + impl( + nullMissingHandling( + (b, x) -> + b.doubleValue() <= 0 || x.doubleValue() <= 0 + ? ExprNullValue.of() + : new ExprDoubleValue( + Math.log(x.doubleValue()) / Math.log(b.doubleValue()))), + DOUBLE, + baseType, + numberType)); } } return define(BuiltinFunctionName.LOG.getName(), builder.build()); } - /** - * Definition of log10(x) function. Calculate base-10 logarithm of x The supported signature of + * Definition of log10(x) function.
+ * Calculate base-10 logarithm of x The supported signature of
* log function is SHORT/INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE */ private static DefaultFunctionResolver log10() { - return baseMathFunction(BuiltinFunctionName.LOG10.getName(), - v -> v.doubleValue() <= 0 ? ExprNullValue.of() : - new ExprDoubleValue(Math.log10(v.doubleValue())), DOUBLE); + return baseMathFunction( + BuiltinFunctionName.LOG10.getName(), + v -> + v.doubleValue() <= 0 + ? ExprNullValue.of() + : new ExprDoubleValue(Math.log10(v.doubleValue())), + DOUBLE); } /** - * Definition of log2(x) function. Calculate base-2 logarithm of x The supported signature of log + * Definition of log2(x) function.
+ * Calculate base-2 logarithm of x The supported signature of log
* function is SHORT/INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE */ private static DefaultFunctionResolver log2() { - return baseMathFunction(BuiltinFunctionName.LOG2.getName(), - v -> v.doubleValue() <= 0 ? ExprNullValue.of() : - new ExprDoubleValue(Math.log(v.doubleValue()) / Math.log(2)), DOUBLE); + return baseMathFunction( + BuiltinFunctionName.LOG2.getName(), + v -> + v.doubleValue() <= 0 + ? ExprNullValue.of() + : new ExprDoubleValue(Math.log(v.doubleValue()) / Math.log(2)), + DOUBLE); } /** - * Definition of mod(x, y) function. - * Calculate the remainder of x divided by y - * The supported signature of mod function is - * (x: INTEGER/LONG/FLOAT/DOUBLE, y: INTEGER/LONG/FLOAT/DOUBLE) + * Definition of mod(x, y) function.
+ * Calculate the remainder of x divided by y
+ * The supported signature of mod function is
+ * (x: INTEGER/LONG/FLOAT/DOUBLE, y: INTEGER/LONG/FLOAT/DOUBLE)
* -> wider type between types of x and y */ private static DefaultFunctionResolver mod() { - return define(BuiltinFunctionName.MOD.getName(), - impl(nullMissingHandling((v1, v2) -> v2.byteValue() == 0 ? ExprNullValue.of() : - new ExprByteValue(v1.byteValue() % v2.byteValue())), - BYTE, BYTE, BYTE), - impl(nullMissingHandling((v1, v2) -> v2.shortValue() == 0 ? ExprNullValue.of() : - new ExprShortValue(v1.shortValue() % v2.shortValue())), - SHORT, SHORT, SHORT), - impl(nullMissingHandling((v1, v2) -> v2.shortValue() == 0 ? ExprNullValue.of() : - new ExprIntegerValue(Math.floorMod(v1.integerValue(), v2.integerValue()))), - INTEGER, INTEGER, INTEGER), - impl(nullMissingHandling((v1, v2) -> v2.shortValue() == 0 ? ExprNullValue.of() : - new ExprLongValue(Math.floorMod(v1.longValue(), v2.longValue()))), - LONG, LONG, LONG), - impl(nullMissingHandling((v1, v2) -> v2.shortValue() == 0 ? ExprNullValue.of() : - new ExprFloatValue(v1.floatValue() % v2.floatValue())), - FLOAT, FLOAT, FLOAT), - impl(nullMissingHandling((v1, v2) -> v2.shortValue() == 0 ? ExprNullValue.of() : - new ExprDoubleValue(v1.doubleValue() % v2.doubleValue())), - DOUBLE, DOUBLE, DOUBLE) - ); - } - - /** - * Definition of pi() function. - * Get the value of pi. + return define( + BuiltinFunctionName.MOD.getName(), + impl( + nullMissingHandling( + (v1, v2) -> + v2.byteValue() == 0 + ? ExprNullValue.of() + : new ExprByteValue(v1.byteValue() % v2.byteValue())), + BYTE, + BYTE, + BYTE), + impl( + nullMissingHandling( + (v1, v2) -> + v2.shortValue() == 0 + ? ExprNullValue.of() + : new ExprShortValue(v1.shortValue() % v2.shortValue())), + SHORT, + SHORT, + SHORT), + impl( + nullMissingHandling( + (v1, v2) -> + v2.shortValue() == 0 + ? ExprNullValue.of() + : new ExprIntegerValue( + Math.floorMod(v1.integerValue(), v2.integerValue()))), + INTEGER, + INTEGER, + INTEGER), + impl( + nullMissingHandling( + (v1, v2) -> + v2.shortValue() == 0 + ? ExprNullValue.of() + : new ExprLongValue(Math.floorMod(v1.longValue(), v2.longValue()))), + LONG, + LONG, + LONG), + impl( + nullMissingHandling( + (v1, v2) -> + v2.shortValue() == 0 + ? ExprNullValue.of() + : new ExprFloatValue(v1.floatValue() % v2.floatValue())), + FLOAT, + FLOAT, + FLOAT), + impl( + nullMissingHandling( + (v1, v2) -> + v2.shortValue() == 0 + ? ExprNullValue.of() + : new ExprDoubleValue(v1.doubleValue() % v2.doubleValue())), + DOUBLE, + DOUBLE, + DOUBLE)); + } + + /** + * Definition of pi() function.
+ * Get the value of pi.
* () -> DOUBLE */ private static DefaultFunctionResolver pi() { - return define(BuiltinFunctionName.PI.getName(), - impl(() -> new ExprDoubleValue(Math.PI), DOUBLE) - ); + return define( + BuiltinFunctionName.PI.getName(), impl(() -> new ExprDoubleValue(Math.PI), DOUBLE)); } /** - * Definition of pow(x, y)/power(x, y) function. - * Calculate the value of x raised to the power of y - * The supported signature of pow/power function is - * (INTEGER, INTEGER) -> DOUBLE - * (LONG, LONG) -> DOUBLE - * (FLOAT, FLOAT) -> DOUBLE + * Definition of pow(x, y)/power(x, y) function.
+ * Calculate the value of x raised to the power of y
+ * The supported signature of pow/power function is
+ * (INTEGER, INTEGER) -> DOUBLE
+ * (LONG, LONG) -> DOUBLE
+ * (FLOAT, FLOAT) -> DOUBLE
* (DOUBLE, DOUBLE) -> DOUBLE */ private static DefaultFunctionResolver pow() { @@ -341,336 +432,485 @@ private static DefaultFunctionResolver power() { return define(BuiltinFunctionName.POWER.getName(), powerFunctionImpl()); } - private List>> powerFunctionImpl() { + private List>> + powerFunctionImpl() { return Arrays.asList( - impl(nullMissingHandling( - (v1, v2) -> new ExprDoubleValue(Math.pow(v1.shortValue(), v2.shortValue()))), - DOUBLE, SHORT, SHORT), - impl(nullMissingHandling( - (v1, v2) -> new ExprDoubleValue(Math.pow(v1.integerValue(), v2.integerValue()))), - DOUBLE, INTEGER, INTEGER), - impl(nullMissingHandling( - (v1, v2) -> new ExprDoubleValue(Math.pow(v1.longValue(), v2.longValue()))), - DOUBLE, LONG, LONG), - impl(nullMissingHandling( - (v1, v2) -> v1.floatValue() <= 0 && v2.floatValue() != Math.floor(v2.floatValue()) - ? ExprNullValue.of() : - new ExprDoubleValue(Math.pow(v1.floatValue(), v2.floatValue()))), - DOUBLE, FLOAT, FLOAT), - impl(nullMissingHandling( - (v1, v2) -> v1.doubleValue() <= 0 && v2.doubleValue() != Math.floor(v2.doubleValue()) - ? ExprNullValue.of() : - new ExprDoubleValue(Math.pow(v1.doubleValue(), v2.doubleValue()))), - DOUBLE, DOUBLE, DOUBLE)); - } - - /** - * Definition of rand() and rand(N) function. - * rand() returns a random floating-point value in the range 0 <= value < 1.0 - * If integer N is specified, the seed is initialized prior to execution. + impl( + nullMissingHandling( + (v1, v2) -> new ExprDoubleValue(Math.pow(v1.shortValue(), v2.shortValue()))), + DOUBLE, + SHORT, + SHORT), + impl( + nullMissingHandling( + (v1, v2) -> new ExprDoubleValue(Math.pow(v1.integerValue(), v2.integerValue()))), + DOUBLE, + INTEGER, + INTEGER), + impl( + nullMissingHandling( + (v1, v2) -> new ExprDoubleValue(Math.pow(v1.longValue(), v2.longValue()))), + DOUBLE, + LONG, + LONG), + impl( + nullMissingHandling( + (v1, v2) -> + v1.floatValue() <= 0 && v2.floatValue() != Math.floor(v2.floatValue()) + ? ExprNullValue.of() + : new ExprDoubleValue(Math.pow(v1.floatValue(), v2.floatValue()))), + DOUBLE, + FLOAT, + FLOAT), + impl( + nullMissingHandling( + (v1, v2) -> + v1.doubleValue() <= 0 && v2.doubleValue() != Math.floor(v2.doubleValue()) + ? ExprNullValue.of() + : new ExprDoubleValue(Math.pow(v1.doubleValue(), v2.doubleValue()))), + DOUBLE, + DOUBLE, + DOUBLE)); + } + + /** + * Definition of rand() and rand(N) function.
+ * rand() returns a random floating-point value in the range 0 <= value < 1.0
+ * If integer N is specified, the seed is initialized prior to execution.
* One implication of this behavior is with identical argument N,rand(N) returns the same value - * each time, and thus produces a repeatable sequence of column values. - * The supported signature of rand function is - * ([INTEGER]) -> FLOAT + *
+ * each time, and thus produces a repeatable sequence of column values. The supported signature of + *
+ * rand function is ([INTEGER]) -> FLOAT */ private static DefaultFunctionResolver rand() { - return define(BuiltinFunctionName.RAND.getName(), + return define( + BuiltinFunctionName.RAND.getName(), impl(() -> new ExprFloatValue(new Random().nextFloat()), FLOAT), - impl(nullMissingHandling( - v -> new ExprFloatValue(new Random(v.integerValue()).nextFloat())), FLOAT, INTEGER) - ); + impl( + nullMissingHandling(v -> new ExprFloatValue(new Random(v.integerValue()).nextFloat())), + FLOAT, + INTEGER)); } /** - * Definition of rint(x) function. - * Returns the closest whole integer value to x - * The supported signature is + * Definition of rint(x) function.
+ * Returns the closest whole integer value to x
+ * The supported signature is
* BYTE/SHORT/INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE */ private static DefaultFunctionResolver rint() { - return baseMathFunction(BuiltinFunctionName.RINT.getName(), - v -> new ExprDoubleValue(Math.rint(v.doubleValue())), DOUBLE); + return baseMathFunction( + BuiltinFunctionName.RINT.getName(), + v -> new ExprDoubleValue(Math.rint(v.doubleValue())), + DOUBLE); } /** - * Definition of round(x)/round(x, d) function. - * Rounds the argument x to d decimal places, d defaults to 0 if not specified. - * The supported signature of round function is - * (x: INTEGER [, y: INTEGER]) -> INTEGER - * (x: LONG [, y: INTEGER]) -> LONG - * (x: FLOAT [, y: INTEGER]) -> FLOAT + * Definition of round(x)/round(x, d) function.
+ * Rounds the argument x to d decimal places, d defaults to 0 if not specified.
+ * The supported signature of round function is
+ * (x: INTEGER [, y: INTEGER]) -> INTEGER
+ * (x: LONG [, y: INTEGER]) -> LONG
+ * (x: FLOAT [, y: INTEGER]) -> FLOAT
* (x: DOUBLE [, y: INTEGER]) -> DOUBLE */ private static DefaultFunctionResolver round() { - return define(BuiltinFunctionName.ROUND.getName(), + return define( + BuiltinFunctionName.ROUND.getName(), // rand(x) - impl(nullMissingHandling(v -> new ExprLongValue((long) Math.round(v.integerValue()))), - LONG, INTEGER), - impl(nullMissingHandling(v -> new ExprLongValue((long) Math.round(v.longValue()))), - LONG, LONG), - impl(nullMissingHandling(v -> new ExprDoubleValue((double) Math.round(v.floatValue()))), - DOUBLE, FLOAT), - impl(nullMissingHandling(v -> new ExprDoubleValue(new BigDecimal(v.doubleValue()) - .setScale(0, RoundingMode.HALF_UP).doubleValue())), - DOUBLE, DOUBLE), + impl( + nullMissingHandling(v -> new ExprLongValue((long) Math.round(v.integerValue()))), + LONG, + INTEGER), + impl( + nullMissingHandling(v -> new ExprLongValue((long) Math.round(v.longValue()))), + LONG, + LONG), + impl( + nullMissingHandling(v -> new ExprDoubleValue((double) Math.round(v.floatValue()))), + DOUBLE, + FLOAT), + impl( + nullMissingHandling( + v -> + new ExprDoubleValue( + new BigDecimal(v.doubleValue()) + .setScale(0, RoundingMode.HALF_UP) + .doubleValue())), + DOUBLE, + DOUBLE), // rand(x, d) - impl(nullMissingHandling((x, d) -> new ExprLongValue(new BigDecimal(x.integerValue()) - .setScale(d.integerValue(), RoundingMode.HALF_UP).longValue())), - LONG, INTEGER, INTEGER), - impl(nullMissingHandling((x, d) -> new ExprLongValue(new BigDecimal(x.longValue()) - .setScale(d.integerValue(), RoundingMode.HALF_UP).longValue())), - LONG, LONG, INTEGER), - impl(nullMissingHandling((x, d) -> new ExprDoubleValue(new BigDecimal(x.floatValue()) - .setScale(d.integerValue(), RoundingMode.HALF_UP).doubleValue())), - DOUBLE, FLOAT, INTEGER), - impl(nullMissingHandling((x, d) -> new ExprDoubleValue(new BigDecimal(x.doubleValue()) - .setScale(d.integerValue(), RoundingMode.HALF_UP).doubleValue())), - DOUBLE, DOUBLE, INTEGER)); - } - - /** - * Definition of sign(x) function. - * Returns the sign of the argument as -1, 0, or 1 - * depending on whether x is negative, zero, or positive - * The supported signature is + impl( + nullMissingHandling( + (x, d) -> + new ExprLongValue( + new BigDecimal(x.integerValue()) + .setScale(d.integerValue(), RoundingMode.HALF_UP) + .longValue())), + LONG, + INTEGER, + INTEGER), + impl( + nullMissingHandling( + (x, d) -> + new ExprLongValue( + new BigDecimal(x.longValue()) + .setScale(d.integerValue(), RoundingMode.HALF_UP) + .longValue())), + LONG, + LONG, + INTEGER), + impl( + nullMissingHandling( + (x, d) -> + new ExprDoubleValue( + new BigDecimal(x.floatValue()) + .setScale(d.integerValue(), RoundingMode.HALF_UP) + .doubleValue())), + DOUBLE, + FLOAT, + INTEGER), + impl( + nullMissingHandling( + (x, d) -> + new ExprDoubleValue( + new BigDecimal(x.doubleValue()) + .setScale(d.integerValue(), RoundingMode.HALF_UP) + .doubleValue())), + DOUBLE, + DOUBLE, + INTEGER)); + } + + /** + * Definition of sign(x) function.
+ * Returns the sign of the argument as -1, 0, or 1
+ * depending on whether x is negative, zero, or positive
+ * The supported signature is
* SHORT/INTEGER/LONG/FLOAT/DOUBLE -> INTEGER */ private static DefaultFunctionResolver sign() { - return baseMathFunction(BuiltinFunctionName.SIGN.getName(), - v -> new ExprIntegerValue(Math.signum(v.doubleValue())), INTEGER); + return baseMathFunction( + BuiltinFunctionName.SIGN.getName(), + v -> new ExprIntegerValue(Math.signum(v.doubleValue())), + INTEGER); } /** - * Definition of signum(x) function. - * Returns the sign of the argument as -1.0, 0, or 1.0 - * depending on whether x is negative, zero, or positive - * The supported signature is + * Definition of signum(x) function.
+ * Returns the sign of the argument as -1.0, 0, or 1.0
+ * depending on whether x is negative, zero, or positive
+ * The supported signature is
* BYTE/SHORT/INTEGER/LONG/FLOAT/DOUBLE -> INTEGER */ private static DefaultFunctionResolver signum() { - return baseMathFunction(BuiltinFunctionName.SIGNUM.getName(), - v -> new ExprIntegerValue(Math.signum(v.doubleValue())), INTEGER); + return baseMathFunction( + BuiltinFunctionName.SIGNUM.getName(), + v -> new ExprIntegerValue(Math.signum(v.doubleValue())), + INTEGER); } /** - * Definition of sinh(x) function. - * Returns the hyperbolix sine of x, defined as (((e^x) - (e^(-x))) / 2) - * The supported signature is + * Definition of sinh(x) function.
+ * Returns the hyperbolix sine of x, defined as (((e^x) - (e^(-x))) / 2)
+ * The supported signature is
* BYTE/SHORT/INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE */ private static DefaultFunctionResolver sinh() { - return baseMathFunction(BuiltinFunctionName.SINH.getName(), - v -> new ExprDoubleValue(Math.sinh(v.doubleValue())), DOUBLE); + return baseMathFunction( + BuiltinFunctionName.SINH.getName(), + v -> new ExprDoubleValue(Math.sinh(v.doubleValue())), + DOUBLE); } /** - * Definition of sqrt(x) function. - * Calculate the square root of a non-negative number x - * The supported signature is + * Definition of sqrt(x) function.
+ * Calculate the square root of a non-negative number x
+ * The supported signature is
* INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE */ private static DefaultFunctionResolver sqrt() { - return baseMathFunction(BuiltinFunctionName.SQRT.getName(), - v -> v.doubleValue() < 0 ? ExprNullValue.of() : - new ExprDoubleValue(Math.sqrt(v.doubleValue())), DOUBLE); + return baseMathFunction( + BuiltinFunctionName.SQRT.getName(), + v -> + v.doubleValue() < 0 + ? ExprNullValue.of() + : new ExprDoubleValue(Math.sqrt(v.doubleValue())), + DOUBLE); } /** - * Definition of cbrt(x) function. - * Calculate the cube root of a number x - * The supported signature is + * Definition of cbrt(x) function.
+ * Calculate the cube root of a number x
+ * The supported signature is
* INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE */ private static DefaultFunctionResolver cbrt() { - return baseMathFunction(BuiltinFunctionName.CBRT.getName(), - v -> new ExprDoubleValue(Math.cbrt(v.doubleValue())), DOUBLE); + return baseMathFunction( + BuiltinFunctionName.CBRT.getName(), + v -> new ExprDoubleValue(Math.cbrt(v.doubleValue())), + DOUBLE); } /** - * Definition of truncate(x, d) function. - * Returns the number x, truncated to d decimal places - * The supported signature of round function is - * (x: INTEGER, y: INTEGER) -> LONG - * (x: LONG, y: INTEGER) -> LONG - * (x: FLOAT, y: INTEGER) -> DOUBLE + * Definition of truncate(x, d) function.
+ * Returns the number x, truncated to d decimal places
+ * The supported signature of round function is
+ * (x: INTEGER, y: INTEGER) -> LONG
+ * (x: LONG, y: INTEGER) -> LONG
+ * (x: FLOAT, y: INTEGER) -> DOUBLE
* (x: DOUBLE, y: INTEGER) -> DOUBLE */ private static DefaultFunctionResolver truncate() { - return define(BuiltinFunctionName.TRUNCATE.getName(), - impl(nullMissingHandling((x, y) -> new ExprLongValue(BigDecimal.valueOf(x.integerValue()) - .setScale(y.integerValue(), RoundingMode.DOWN).longValue())), - LONG, INTEGER, INTEGER), - impl(nullMissingHandling((x, y) -> new ExprLongValue(BigDecimal.valueOf(x.longValue()) - .setScale(y.integerValue(), RoundingMode.DOWN).longValue())), - LONG, LONG, INTEGER), - impl(nullMissingHandling((x, y) -> new ExprDoubleValue(BigDecimal.valueOf(x.floatValue()) - .setScale(y.integerValue(), RoundingMode.DOWN).doubleValue())), - DOUBLE, FLOAT, INTEGER), - impl(nullMissingHandling((x, y) -> new ExprDoubleValue(BigDecimal.valueOf(x.doubleValue()) - .setScale(y.integerValue(), RoundingMode.DOWN).doubleValue())), - DOUBLE, DOUBLE, INTEGER)); - } - - /** - * Definition of acos(x) function. - * Calculates the arc cosine of x, that is, the value whose cosine is x. - * Returns NULL if x is not in the range -1 to 1. - * The supported signature of acos function is + return define( + BuiltinFunctionName.TRUNCATE.getName(), + impl( + nullMissingHandling( + (x, y) -> + new ExprLongValue( + BigDecimal.valueOf(x.integerValue()) + .setScale(y.integerValue(), RoundingMode.DOWN) + .longValue())), + LONG, + INTEGER, + INTEGER), + impl( + nullMissingHandling( + (x, y) -> + new ExprLongValue( + BigDecimal.valueOf(x.longValue()) + .setScale(y.integerValue(), RoundingMode.DOWN) + .longValue())), + LONG, + LONG, + INTEGER), + impl( + nullMissingHandling( + (x, y) -> + new ExprDoubleValue( + BigDecimal.valueOf(x.floatValue()) + .setScale(y.integerValue(), RoundingMode.DOWN) + .doubleValue())), + DOUBLE, + FLOAT, + INTEGER), + impl( + nullMissingHandling( + (x, y) -> + new ExprDoubleValue( + BigDecimal.valueOf(x.doubleValue()) + .setScale(y.integerValue(), RoundingMode.DOWN) + .doubleValue())), + DOUBLE, + DOUBLE, + INTEGER)); + } + + /** + * Definition of acos(x) function.
+ * Calculates the arc cosine of x, that is, the value whose cosine is x.
+ * Returns NULL if x is not in the range -1 to 1.
+ * The supported signature of acos function is
* INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE */ private static DefaultFunctionResolver acos() { - return define(BuiltinFunctionName.ACOS.getName(), + return define( + BuiltinFunctionName.ACOS.getName(), ExprCoreType.numberTypes().stream() - .map(type -> impl(nullMissingHandling( - v -> v.doubleValue() < -1 || v.doubleValue() > 1 ? ExprNullValue.of() : - new ExprDoubleValue(Math.acos(v.doubleValue()))), - DOUBLE, type)).collect(Collectors.toList())); - } - - /** - * Definition of asin(x) function. - * Calculates the arc sine of x, that is, the value whose sine is x. - * Returns NULL if x is not in the range -1 to 1. - * The supported signature of asin function is - * INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE + .map( + type -> + impl( + nullMissingHandling( + v -> + v.doubleValue() < -1 || v.doubleValue() > 1 + ? ExprNullValue.of() + : new ExprDoubleValue(Math.acos(v.doubleValue()))), + DOUBLE, + type)) + .collect(Collectors.toList())); + } + + /** + * Definition of asin(x) function.
+ * Calculates the arc sine of x, that is, the value whose sine is x.
+ * Returns NULL if x is not in the range -1 to 1.
+ * The supported signature of asin function is
+ * INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE
*/ private static DefaultFunctionResolver asin() { - return define(BuiltinFunctionName.ASIN.getName(), + return define( + BuiltinFunctionName.ASIN.getName(), ExprCoreType.numberTypes().stream() - .map(type -> impl(nullMissingHandling( - v -> v.doubleValue() < -1 || v.doubleValue() > 1 ? ExprNullValue.of() : - new ExprDoubleValue(Math.asin(v.doubleValue()))), - DOUBLE, type)).collect(Collectors.toList())); - } - - /** - * Definition of atan(x) and atan(y, x) function. - * atan(x) calculates the arc tangent of x, that is, the value whose tangent is x. - * atan(y, x) calculates the arc tangent of y / x, except that the signs of both arguments - * are used to determine the quadrant of the result. - * The supported signature of atan function is + .map( + type -> + impl( + nullMissingHandling( + v -> + v.doubleValue() < -1 || v.doubleValue() > 1 + ? ExprNullValue.of() + : new ExprDoubleValue(Math.asin(v.doubleValue()))), + DOUBLE, + type)) + .collect(Collectors.toList())); + } + + /** + * Definition of atan(x) and atan(y, x) function.
+ * atan(x) calculates the arc tangent of x, that is, the value whose tangent is x.
+ * atan(y, x) calculates the arc tangent of y / x, except that the signs of both arguments
+ * are used to determine the quadrant of the result.
+ * The supported signature of atan function is
* (x: INTEGER/LONG/FLOAT/DOUBLE, y: INTEGER/LONG/FLOAT/DOUBLE) -> DOUBLE */ private static DefaultFunctionResolver atan() { - ImmutableList.Builder>> builder = new ImmutableList.Builder<>(); + ImmutableList.Builder< + SerializableFunction>> + builder = new ImmutableList.Builder<>(); for (ExprType type : ExprCoreType.numberTypes()) { - builder.add(impl(nullMissingHandling(x -> new ExprDoubleValue(Math.atan(x.doubleValue()))), - type, DOUBLE)); - builder.add(impl(nullMissingHandling((y, x) -> new ExprDoubleValue(Math.atan2(y.doubleValue(), - x.doubleValue()))), - DOUBLE, type, type)); + builder.add( + impl( + nullMissingHandling(x -> new ExprDoubleValue(Math.atan(x.doubleValue()))), + type, + DOUBLE)); + builder.add( + impl( + nullMissingHandling( + (y, x) -> new ExprDoubleValue(Math.atan2(y.doubleValue(), x.doubleValue()))), + DOUBLE, + type, + type)); } return define(BuiltinFunctionName.ATAN.getName(), builder.build()); } /** - * Definition of atan2(y, x) function. - * Calculates the arc tangent of y / x, except that the signs of both arguments - * are used to determine the quadrant of the result. - * The supported signature of atan2 function is + * Definition of atan2(y, x) function.
+ * Calculates the arc tangent of y / x, except that the signs of both arguments are used to + * determine the quadrant of the result.
+ * The supported signature of atan2 function is
* (x: INTEGER/LONG/FLOAT/DOUBLE, y: INTEGER/LONG/FLOAT/DOUBLE) -> DOUBLE */ private static DefaultFunctionResolver atan2() { - ImmutableList.Builder>> builder = new ImmutableList.Builder<>(); + ImmutableList.Builder< + SerializableFunction>> + builder = new ImmutableList.Builder<>(); for (ExprType type : ExprCoreType.numberTypes()) { - builder.add(impl(nullMissingHandling((y, x) -> new ExprDoubleValue(Math.atan2(y.doubleValue(), - x.doubleValue()))), DOUBLE, type, type)); + builder.add( + impl( + nullMissingHandling( + (y, x) -> new ExprDoubleValue(Math.atan2(y.doubleValue(), x.doubleValue()))), + DOUBLE, + type, + type)); } return define(BuiltinFunctionName.ATAN2.getName(), builder.build()); } /** - * Definition of cos(x) function. - * Calculates the cosine of X, where X is given in radians - * The supported signature of cos function is + * Definition of cos(x) function.
+ * Calculates the cosine of X, where X is given in radians
+ * The supported signature of cos function is
* INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE */ private static DefaultFunctionResolver cos() { - return baseMathFunction(BuiltinFunctionName.COS.getName(), - v -> new ExprDoubleValue(Math.cos(v.doubleValue())), DOUBLE); + return baseMathFunction( + BuiltinFunctionName.COS.getName(), + v -> new ExprDoubleValue(Math.cos(v.doubleValue())), + DOUBLE); } /** - * Definition of cosh(x) function. - * Returns the hyperbolic cosine of x, defined as (((e^x) + (e^(-x))) / 2) - * The supported signature is + * Definition of cosh(x) function.
+ * Returns the hyperbolic cosine of x, defined as (((e^x) + (e^(-x))) / 2)
+ * The supported signature is
* BYTE/SHORT/INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE */ private static DefaultFunctionResolver cosh() { - return baseMathFunction(BuiltinFunctionName.COSH.getName(), - v -> new ExprDoubleValue(Math.cosh(v.doubleValue())), DOUBLE); + return baseMathFunction( + BuiltinFunctionName.COSH.getName(), + v -> new ExprDoubleValue(Math.cosh(v.doubleValue())), + DOUBLE); } /** - * Definition of cot(x) function. - * Calculates the cotangent of x - * The supported signature of cot function is + * Definition of cot(x) function.<\b>
+ * Calculates the cotangent of x The supported signature of cot function is * INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE */ private static DefaultFunctionResolver cot() { - return define(BuiltinFunctionName.COT.getName(), + return define( + BuiltinFunctionName.COT.getName(), ExprCoreType.numberTypes().stream() - .map(type -> impl(nullMissingHandling( - v -> { - Double value = v.doubleValue(); - if (value == 0) { - throw new ArithmeticException( - String.format("Out of range value for cot(%s)", value)); - } - return new ExprDoubleValue(1 / Math.tan(value)); - }), - DOUBLE, type)).collect(Collectors.toList())); - } - - /** - * Definition of degrees(x) function. - * Converts x from radians to degrees - * The supported signature of degrees function is + .map( + type -> + impl( + nullMissingHandling( + v -> { + Double value = v.doubleValue(); + if (value == 0) { + throw new ArithmeticException( + String.format("Out of range value for cot(%s)", value)); + } + return new ExprDoubleValue(1 / Math.tan(value)); + }), + DOUBLE, + type)) + .collect(Collectors.toList())); + } + + /** + * Definition of degrees(x) function.
+ * Converts x from radians to degrees The supported signature of degrees function is * INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE */ private static DefaultFunctionResolver degrees() { - return baseMathFunction(BuiltinFunctionName.DEGREES.getName(), - v -> new ExprDoubleValue(Math.toDegrees(v.doubleValue())), DOUBLE); + return baseMathFunction( + BuiltinFunctionName.DEGREES.getName(), + v -> new ExprDoubleValue(Math.toDegrees(v.doubleValue())), + DOUBLE); } /** - * Definition of radians(x) function. - * Converts x from degrees to radians - * The supported signature of radians function is + * Definition of radians(x) function.
+ * Converts x from degrees to radians The supported signature of radians function is * INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE */ private static DefaultFunctionResolver radians() { - return baseMathFunction(BuiltinFunctionName.RADIANS.getName(), - v -> new ExprDoubleValue(Math.toRadians(v.doubleValue())), DOUBLE); + return baseMathFunction( + BuiltinFunctionName.RADIANS.getName(), + v -> new ExprDoubleValue(Math.toRadians(v.doubleValue())), + DOUBLE); } /** - * Definition of sin(x) function. - * Calculates the sine of x, where x is given in radians - * The supported signature of sin function is + * Definition of sin(x) function.
+ * Calculates the sine of x, where x is given in radians The supported signature of sin function + * is
* INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE */ private static DefaultFunctionResolver sin() { - return baseMathFunction(BuiltinFunctionName.SIN.getName(), - v -> new ExprDoubleValue(Math.sin(v.doubleValue())), DOUBLE); + return baseMathFunction( + BuiltinFunctionName.SIN.getName(), + v -> new ExprDoubleValue(Math.sin(v.doubleValue())), + DOUBLE); } /** - * Definition of tan(x) function. - * Calculates the tangent of x, where x is given in radians - * The supported signature of tan function is + * Definition of tan(x) function.
+ * Calculates the tangent of x, where x is given in radians The supported signature of tan + * function is
* INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE */ private static DefaultFunctionResolver tan() { - return baseMathFunction(BuiltinFunctionName.TAN.getName(), - v -> new ExprDoubleValue(Math.tan(v.doubleValue())), DOUBLE); + return baseMathFunction( + BuiltinFunctionName.TAN.getName(), + v -> new ExprDoubleValue(Math.tan(v.doubleValue())), + DOUBLE); } } diff --git a/core/src/main/java/org/opensearch/sql/expression/operator/convert/TypeCastOperator.java b/core/src/main/java/org/opensearch/sql/expression/operator/convert/TypeCastOperator.java index d3295a53f0..7c3565f69c 100644 --- a/core/src/main/java/org/opensearch/sql/expression/operator/convert/TypeCastOperator.java +++ b/core/src/main/java/org/opensearch/sql/expression/operator/convert/TypeCastOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.operator.convert; import static org.opensearch.sql.data.type.ExprCoreType.BOOLEAN; @@ -46,9 +45,7 @@ @UtilityClass public class TypeCastOperator { - /** - * Register Type Cast Operator. - */ + /** Register Type Cast Operator. */ public static void register(BuiltinFunctionRepository repository) { repository.register(castToString()); repository.register(castToByte()); @@ -64,148 +61,175 @@ public static void register(BuiltinFunctionRepository repository) { repository.register(castToDatetime()); } - private static DefaultFunctionResolver castToString() { - return FunctionDSL.define(BuiltinFunctionName.CAST_TO_STRING.getName(), + return FunctionDSL.define( + BuiltinFunctionName.CAST_TO_STRING.getName(), Stream.concat( - Arrays.asList(BYTE, SHORT, INTEGER, LONG, FLOAT, DOUBLE, BOOLEAN, TIME, DATE, - TIMESTAMP, DATETIME).stream() - .map(type -> impl( - nullMissingHandling((v) -> new ExprStringValue(v.value().toString())), - STRING, type)), - Stream.of(impl(nullMissingHandling((v) -> v), STRING, STRING))) - .collect(Collectors.toList()) - ); + Arrays.asList( + BYTE, SHORT, INTEGER, LONG, FLOAT, DOUBLE, BOOLEAN, TIME, DATE, TIMESTAMP, + DATETIME) + .stream() + .map( + type -> + impl( + nullMissingHandling( + (v) -> new ExprStringValue(v.value().toString())), + STRING, + type)), + Stream.of(impl(nullMissingHandling((v) -> v), STRING, STRING))) + .collect(Collectors.toList())); } private static DefaultFunctionResolver castToByte() { - return FunctionDSL.define(BuiltinFunctionName.CAST_TO_BYTE.getName(), - impl(nullMissingHandling( - (v) -> new ExprByteValue(Byte.valueOf(v.stringValue()))), BYTE, STRING), - impl(nullMissingHandling( - (v) -> new ExprByteValue(v.byteValue())), BYTE, DOUBLE), - impl(nullMissingHandling( - (v) -> new ExprByteValue(v.booleanValue() ? 1 : 0)), BYTE, BOOLEAN) - ); + return FunctionDSL.define( + BuiltinFunctionName.CAST_TO_BYTE.getName(), + impl( + nullMissingHandling((v) -> new ExprByteValue(Byte.valueOf(v.stringValue()))), + BYTE, + STRING), + impl(nullMissingHandling((v) -> new ExprByteValue(v.byteValue())), BYTE, DOUBLE), + impl( + nullMissingHandling((v) -> new ExprByteValue(v.booleanValue() ? 1 : 0)), + BYTE, + BOOLEAN)); } private static DefaultFunctionResolver castToShort() { - return FunctionDSL.define(BuiltinFunctionName.CAST_TO_SHORT.getName(), - impl(nullMissingHandling( - (v) -> new ExprShortValue(Short.valueOf(v.stringValue()))), SHORT, STRING), - impl(nullMissingHandling( - (v) -> new ExprShortValue(v.shortValue())), SHORT, DOUBLE), - impl(nullMissingHandling( - (v) -> new ExprShortValue(v.booleanValue() ? 1 : 0)), SHORT, BOOLEAN) - ); + return FunctionDSL.define( + BuiltinFunctionName.CAST_TO_SHORT.getName(), + impl( + nullMissingHandling((v) -> new ExprShortValue(Short.valueOf(v.stringValue()))), + SHORT, + STRING), + impl(nullMissingHandling((v) -> new ExprShortValue(v.shortValue())), SHORT, DOUBLE), + impl( + nullMissingHandling((v) -> new ExprShortValue(v.booleanValue() ? 1 : 0)), + SHORT, + BOOLEAN)); } private static DefaultFunctionResolver castToInt() { - return FunctionDSL.define(BuiltinFunctionName.CAST_TO_INT.getName(), - impl(nullMissingHandling( - (v) -> new ExprIntegerValue(Integer.valueOf(v.stringValue()))), INTEGER, STRING), - impl(nullMissingHandling( - (v) -> new ExprIntegerValue(v.integerValue())), INTEGER, DOUBLE), - impl(nullMissingHandling( - (v) -> new ExprIntegerValue(v.booleanValue() ? 1 : 0)), INTEGER, BOOLEAN) - ); + return FunctionDSL.define( + BuiltinFunctionName.CAST_TO_INT.getName(), + impl( + nullMissingHandling((v) -> new ExprIntegerValue(Integer.valueOf(v.stringValue()))), + INTEGER, + STRING), + impl(nullMissingHandling((v) -> new ExprIntegerValue(v.integerValue())), INTEGER, DOUBLE), + impl( + nullMissingHandling((v) -> new ExprIntegerValue(v.booleanValue() ? 1 : 0)), + INTEGER, + BOOLEAN)); } private static DefaultFunctionResolver castToLong() { - return FunctionDSL.define(BuiltinFunctionName.CAST_TO_LONG.getName(), - impl(nullMissingHandling( - (v) -> new ExprLongValue(Long.valueOf(v.stringValue()))), LONG, STRING), - impl(nullMissingHandling( - (v) -> new ExprLongValue(v.longValue())), LONG, DOUBLE), - impl(nullMissingHandling( - (v) -> new ExprLongValue(v.booleanValue() ? 1L : 0L)), LONG, BOOLEAN) - ); + return FunctionDSL.define( + BuiltinFunctionName.CAST_TO_LONG.getName(), + impl( + nullMissingHandling((v) -> new ExprLongValue(Long.valueOf(v.stringValue()))), + LONG, + STRING), + impl(nullMissingHandling((v) -> new ExprLongValue(v.longValue())), LONG, DOUBLE), + impl( + nullMissingHandling((v) -> new ExprLongValue(v.booleanValue() ? 1L : 0L)), + LONG, + BOOLEAN)); } private static DefaultFunctionResolver castToFloat() { - return FunctionDSL.define(BuiltinFunctionName.CAST_TO_FLOAT.getName(), - impl(nullMissingHandling( - (v) -> new ExprFloatValue(Float.valueOf(v.stringValue()))), FLOAT, STRING), - impl(nullMissingHandling( - (v) -> new ExprFloatValue(v.floatValue())), FLOAT, DOUBLE), - impl(nullMissingHandling( - (v) -> new ExprFloatValue(v.booleanValue() ? 1f : 0f)), FLOAT, BOOLEAN) - ); + return FunctionDSL.define( + BuiltinFunctionName.CAST_TO_FLOAT.getName(), + impl( + nullMissingHandling((v) -> new ExprFloatValue(Float.valueOf(v.stringValue()))), + FLOAT, + STRING), + impl(nullMissingHandling((v) -> new ExprFloatValue(v.floatValue())), FLOAT, DOUBLE), + impl( + nullMissingHandling((v) -> new ExprFloatValue(v.booleanValue() ? 1f : 0f)), + FLOAT, + BOOLEAN)); } private static DefaultFunctionResolver castToDouble() { - return FunctionDSL.define(BuiltinFunctionName.CAST_TO_DOUBLE.getName(), - impl(nullMissingHandling( - (v) -> new ExprDoubleValue(Double.valueOf(v.stringValue()))), DOUBLE, STRING), - impl(nullMissingHandling( - (v) -> new ExprDoubleValue(v.doubleValue())), DOUBLE, DOUBLE), - impl(nullMissingHandling( - (v) -> new ExprDoubleValue(v.booleanValue() ? 1D : 0D)), DOUBLE, BOOLEAN) - ); + return FunctionDSL.define( + BuiltinFunctionName.CAST_TO_DOUBLE.getName(), + impl( + nullMissingHandling((v) -> new ExprDoubleValue(Double.valueOf(v.stringValue()))), + DOUBLE, + STRING), + impl(nullMissingHandling((v) -> new ExprDoubleValue(v.doubleValue())), DOUBLE, DOUBLE), + impl( + nullMissingHandling((v) -> new ExprDoubleValue(v.booleanValue() ? 1D : 0D)), + DOUBLE, + BOOLEAN)); } private static DefaultFunctionResolver castToBoolean() { - return FunctionDSL.define(BuiltinFunctionName.CAST_TO_BOOLEAN.getName(), - impl(nullMissingHandling( - (v) -> ExprBooleanValue.of(Boolean.valueOf(v.stringValue()))), BOOLEAN, STRING), - impl(nullMissingHandling( - (v) -> ExprBooleanValue.of(v.doubleValue() != 0)), BOOLEAN, DOUBLE), - impl(nullMissingHandling((v) -> v), BOOLEAN, BOOLEAN) - ); + return FunctionDSL.define( + BuiltinFunctionName.CAST_TO_BOOLEAN.getName(), + impl( + nullMissingHandling((v) -> ExprBooleanValue.of(Boolean.valueOf(v.stringValue()))), + BOOLEAN, + STRING), + impl( + nullMissingHandling((v) -> ExprBooleanValue.of(v.doubleValue() != 0)), BOOLEAN, DOUBLE), + impl(nullMissingHandling((v) -> v), BOOLEAN, BOOLEAN)); } private static DefaultFunctionResolver castToDate() { - return FunctionDSL.define(BuiltinFunctionName.CAST_TO_DATE.getName(), - impl(nullMissingHandling( - (v) -> new ExprDateValue(v.stringValue())), DATE, STRING), - impl(nullMissingHandling( - (v) -> new ExprDateValue(v.dateValue())), DATE, DATETIME), - impl(nullMissingHandling( - (v) -> new ExprDateValue(v.dateValue())), DATE, TIMESTAMP), - impl(nullMissingHandling((v) -> v), DATE, DATE) - ); + return FunctionDSL.define( + BuiltinFunctionName.CAST_TO_DATE.getName(), + impl(nullMissingHandling((v) -> new ExprDateValue(v.stringValue())), DATE, STRING), + impl(nullMissingHandling((v) -> new ExprDateValue(v.dateValue())), DATE, DATETIME), + impl(nullMissingHandling((v) -> new ExprDateValue(v.dateValue())), DATE, TIMESTAMP), + impl(nullMissingHandling((v) -> v), DATE, DATE)); } private static DefaultFunctionResolver castToTime() { - return FunctionDSL.define(BuiltinFunctionName.CAST_TO_TIME.getName(), - impl(nullMissingHandling( - (v) -> new ExprTimeValue(v.stringValue())), TIME, STRING), - impl(nullMissingHandling( - (v) -> new ExprTimeValue(v.timeValue())), TIME, DATETIME), - impl(nullMissingHandling( - (v) -> new ExprTimeValue(v.timeValue())), TIME, TIMESTAMP), - impl(nullMissingHandling((v) -> v), TIME, TIME) - ); + return FunctionDSL.define( + BuiltinFunctionName.CAST_TO_TIME.getName(), + impl(nullMissingHandling((v) -> new ExprTimeValue(v.stringValue())), TIME, STRING), + impl(nullMissingHandling((v) -> new ExprTimeValue(v.timeValue())), TIME, DATETIME), + impl(nullMissingHandling((v) -> new ExprTimeValue(v.timeValue())), TIME, TIMESTAMP), + impl(nullMissingHandling((v) -> v), TIME, TIME)); } // `DATE`/`TIME`/`DATETIME` -> `DATETIME`/TIMESTAMP` cast tested in BinaryPredicateOperatorTest private static DefaultFunctionResolver castToTimestamp() { - return FunctionDSL.define(BuiltinFunctionName.CAST_TO_TIMESTAMP.getName(), - impl(nullMissingHandling( - (v) -> new ExprTimestampValue(v.stringValue())), TIMESTAMP, STRING), - impl(nullMissingHandling( - (v) -> new ExprTimestampValue(v.timestampValue())), TIMESTAMP, DATETIME), - impl(nullMissingHandling( - (v) -> new ExprTimestampValue(v.timestampValue())), TIMESTAMP, DATE), - implWithProperties(nullMissingHandlingWithProperties( - (fp, v) -> new ExprTimestampValue(((ExprTimeValue)v).timestampValue(fp))), - TIMESTAMP, TIME), - impl(nullMissingHandling((v) -> v), TIMESTAMP, TIMESTAMP) - ); + return FunctionDSL.define( + BuiltinFunctionName.CAST_TO_TIMESTAMP.getName(), + impl( + nullMissingHandling((v) -> new ExprTimestampValue(v.stringValue())), TIMESTAMP, STRING), + impl( + nullMissingHandling((v) -> new ExprTimestampValue(v.timestampValue())), + TIMESTAMP, + DATETIME), + impl( + nullMissingHandling((v) -> new ExprTimestampValue(v.timestampValue())), + TIMESTAMP, + DATE), + implWithProperties( + nullMissingHandlingWithProperties( + (fp, v) -> new ExprTimestampValue(((ExprTimeValue) v).timestampValue(fp))), + TIMESTAMP, + TIME), + impl(nullMissingHandling((v) -> v), TIMESTAMP, TIMESTAMP)); } private static DefaultFunctionResolver castToDatetime() { - return FunctionDSL.define(BuiltinFunctionName.CAST_TO_DATETIME.getName(), - impl(nullMissingHandling( - (v) -> new ExprDatetimeValue(v.stringValue())), DATETIME, STRING), - impl(nullMissingHandling( - (v) -> new ExprDatetimeValue(v.datetimeValue())), DATETIME, TIMESTAMP), - impl(nullMissingHandling( - (v) -> new ExprDatetimeValue(v.datetimeValue())), DATETIME, DATE), - implWithProperties(nullMissingHandlingWithProperties( - (fp, v) -> new ExprDatetimeValue(((ExprTimeValue)v).datetimeValue(fp))), - DATETIME, TIME), - impl(nullMissingHandling((v) -> v), DATETIME, DATETIME) - ); + return FunctionDSL.define( + BuiltinFunctionName.CAST_TO_DATETIME.getName(), + impl(nullMissingHandling((v) -> new ExprDatetimeValue(v.stringValue())), DATETIME, STRING), + impl( + nullMissingHandling((v) -> new ExprDatetimeValue(v.datetimeValue())), + DATETIME, + TIMESTAMP), + impl(nullMissingHandling((v) -> new ExprDatetimeValue(v.datetimeValue())), DATETIME, DATE), + implWithProperties( + nullMissingHandlingWithProperties( + (fp, v) -> new ExprDatetimeValue(((ExprTimeValue) v).datetimeValue(fp))), + DATETIME, + TIME), + impl(nullMissingHandling((v) -> v), DATETIME, DATETIME)); } } diff --git a/core/src/main/java/org/opensearch/sql/expression/operator/predicate/BinaryPredicateOperator.java b/core/src/main/java/org/opensearch/sql/expression/operator/predicate/BinaryPredicateOperator.java index cc5b47bde1..bf6b3c22f5 100644 --- a/core/src/main/java/org/opensearch/sql/expression/operator/predicate/BinaryPredicateOperator.java +++ b/core/src/main/java/org/opensearch/sql/expression/operator/predicate/BinaryPredicateOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.operator.predicate; import static org.opensearch.sql.data.model.ExprValueUtils.LITERAL_FALSE; @@ -30,10 +29,10 @@ import org.opensearch.sql.utils.OperatorUtils; /** - * The definition of binary predicate function - * and, Accepts two Boolean values and produces a Boolean. - * or, Accepts two Boolean values and produces a Boolean. - * xor, Accepts two Boolean values and produces a Boolean. + * The definition of binary predicate function
+ * and, Accepts two Boolean values and produces a Boolean.
+ * or, Accepts two Boolean values and produces a Boolean.
+ * xor, Accepts two Boolean values and produces a Boolean.
* equalTo, Compare the left expression and right expression and produces a Boolean. */ @UtilityClass @@ -60,17 +59,64 @@ public static void register(BuiltinFunctionRepository repository) { /** * The and logic. - * A B A AND B - * TRUE TRUE TRUE - * TRUE FALSE FALSE - * TRUE NULL NULL - * TRUE MISSING MISSING - * FALSE FALSE FALSE - * FALSE NULL FALSE - * FALSE MISSING FALSE - * NULL NULL NULL - * NULL MISSING MISSING - * MISSING MISSING MISSING + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
ABA AND B
TRUETRUETRUE
TRUEFALSEFALSE
TRUENULLNULL
TRUEMISSINGMISSING
FALSEFALSEFALSE
FALSENULLFALSE
FALSEMISSINGFALSE
NULLNULLNULL
NULLMISSINGMISSING
MISSINGMISSINGMISSING
*/ private static Table andTable = new ImmutableTable.Builder() @@ -88,17 +134,64 @@ public static void register(BuiltinFunctionRepository repository) { /** * The or logic. - * A B A AND B - * TRUE TRUE TRUE - * TRUE FALSE TRUE - * TRUE NULL TRUE - * TRUE MISSING TRUE - * FALSE FALSE FALSE - * FALSE NULL NULL - * FALSE MISSING MISSING - * NULL NULL NULL - * NULL MISSING NULL - * MISSING MISSING MISSING + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
ABA OR B
TRUETRUETRUE
TRUEFALSETRUE
TRUENULLTRUE
TRUEMISSINGTRUE
FALSEFALSEFALSE
FALSENULLNULL
FALSEMISSINGMISSING
NULLNULLNULL
NULLMISSINGNULL
MISSINGMISSINGMISSING
*/ private static Table orTable = new ImmutableTable.Builder() @@ -116,17 +209,64 @@ public static void register(BuiltinFunctionRepository repository) { /** * The xor logic. - * A B A AND B - * TRUE TRUE FALSE - * TRUE FALSE TRUE - * TRUE NULL TRUE - * TRUE MISSING TRUE - * FALSE FALSE FALSE - * FALSE NULL NULL - * FALSE MISSING MISSING - * NULL NULL NULL - * NULL MISSING NULL - * MISSING MISSING MISSING + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
ABA XOR B
TRUETRUEFALSE
TRUEFALSETRUE
TRUENULLTRUE
TRUEMISSINGTRUE
FALSEFALSEFALSE
FALSENULLNULL
FALSEMISSINGMISSING
NULLNULLNULL
NULLMISSINGNULL
MISSINGMISSINGMISSING
*/ private static Table xorTable = new ImmutableTable.Builder() @@ -143,87 +283,132 @@ public static void register(BuiltinFunctionRepository repository) { .build(); private static DefaultFunctionResolver and() { - return define(BuiltinFunctionName.AND.getName(), + return define( + BuiltinFunctionName.AND.getName(), impl((v1, v2) -> lookupTableFunction(v1, v2, andTable), BOOLEAN, BOOLEAN, BOOLEAN)); } private static DefaultFunctionResolver or() { - return define(BuiltinFunctionName.OR.getName(), + return define( + BuiltinFunctionName.OR.getName(), impl((v1, v2) -> lookupTableFunction(v1, v2, orTable), BOOLEAN, BOOLEAN, BOOLEAN)); } private static DefaultFunctionResolver xor() { - return define(BuiltinFunctionName.XOR.getName(), + return define( + BuiltinFunctionName.XOR.getName(), impl((v1, v2) -> lookupTableFunction(v1, v2, xorTable), BOOLEAN, BOOLEAN, BOOLEAN)); } private static DefaultFunctionResolver equal() { - return define(BuiltinFunctionName.EQUAL.getName(), ExprCoreType.coreTypes().stream() - .map(type -> impl(nullMissingHandling( - (v1, v2) -> ExprBooleanValue.of(v1.equals(v2))), - BOOLEAN, type, type)) - .collect(Collectors.toList())); + return define( + BuiltinFunctionName.EQUAL.getName(), + ExprCoreType.coreTypes().stream() + .map( + type -> + impl( + nullMissingHandling((v1, v2) -> ExprBooleanValue.of(v1.equals(v2))), + BOOLEAN, + type, + type)) + .collect(Collectors.toList())); } private static DefaultFunctionResolver notEqual() { - return define(BuiltinFunctionName.NOTEQUAL.getName(), ExprCoreType.coreTypes().stream() - .map(type -> impl(nullMissingHandling( - (v1, v2) -> ExprBooleanValue.of(!v1.equals(v2))), - BOOLEAN, type, type)) - .collect(Collectors.toList())); + return define( + BuiltinFunctionName.NOTEQUAL.getName(), + ExprCoreType.coreTypes().stream() + .map( + type -> + impl( + nullMissingHandling((v1, v2) -> ExprBooleanValue.of(!v1.equals(v2))), + BOOLEAN, + type, + type)) + .collect(Collectors.toList())); } private static DefaultFunctionResolver less() { - return define(BuiltinFunctionName.LESS.getName(), ExprCoreType.coreTypes().stream() - .map(type -> impl(nullMissingHandling( - (v1, v2) -> ExprBooleanValue.of(v1.compareTo(v2) < 0)), - BOOLEAN,type, type)) - .collect(Collectors.toList())); + return define( + BuiltinFunctionName.LESS.getName(), + ExprCoreType.coreTypes().stream() + .map( + type -> + impl( + nullMissingHandling((v1, v2) -> ExprBooleanValue.of(v1.compareTo(v2) < 0)), + BOOLEAN, + type, + type)) + .collect(Collectors.toList())); } private static DefaultFunctionResolver lte() { - return define(BuiltinFunctionName.LTE.getName(), ExprCoreType.coreTypes().stream() - .map(type -> impl(nullMissingHandling( - (v1, v2) -> ExprBooleanValue.of(v1.compareTo(v2) <= 0)), - BOOLEAN, type, type)) - .collect(Collectors.toList())); + return define( + BuiltinFunctionName.LTE.getName(), + ExprCoreType.coreTypes().stream() + .map( + type -> + impl( + nullMissingHandling((v1, v2) -> ExprBooleanValue.of(v1.compareTo(v2) <= 0)), + BOOLEAN, + type, + type)) + .collect(Collectors.toList())); } private static DefaultFunctionResolver greater() { - return define(BuiltinFunctionName.GREATER.getName(), ExprCoreType.coreTypes().stream() - .map(type -> impl(nullMissingHandling( - (v1, v2) -> ExprBooleanValue.of(v1.compareTo(v2) > 0)), - BOOLEAN, type, type)) - .collect(Collectors.toList())); + return define( + BuiltinFunctionName.GREATER.getName(), + ExprCoreType.coreTypes().stream() + .map( + type -> + impl( + nullMissingHandling((v1, v2) -> ExprBooleanValue.of(v1.compareTo(v2) > 0)), + BOOLEAN, + type, + type)) + .collect(Collectors.toList())); } private static DefaultFunctionResolver gte() { - return define(BuiltinFunctionName.GTE.getName(), ExprCoreType.coreTypes().stream() - .map(type -> impl(nullMissingHandling( - (v1, v2) -> ExprBooleanValue.of(v1.compareTo(v2) >= 0)), - BOOLEAN, type, type)) - .collect(Collectors.toList())); + return define( + BuiltinFunctionName.GTE.getName(), + ExprCoreType.coreTypes().stream() + .map( + type -> + impl( + nullMissingHandling((v1, v2) -> ExprBooleanValue.of(v1.compareTo(v2) >= 0)), + BOOLEAN, + type, + type)) + .collect(Collectors.toList())); } private static DefaultFunctionResolver like() { - return define(BuiltinFunctionName.LIKE.getName(), + return define( + BuiltinFunctionName.LIKE.getName(), impl(nullMissingHandling(OperatorUtils::matches), BOOLEAN, STRING, STRING)); } private static DefaultFunctionResolver regexp() { - return define(BuiltinFunctionName.REGEXP.getName(), + return define( + BuiltinFunctionName.REGEXP.getName(), impl(nullMissingHandling(OperatorUtils::matchesRegexp), INTEGER, STRING, STRING)); } private static DefaultFunctionResolver notLike() { - return define(BuiltinFunctionName.NOT_LIKE.getName(), - impl(nullMissingHandling( - (v1, v2) -> UnaryPredicateOperator.not(OperatorUtils.matches(v1, v2))), - BOOLEAN, STRING, STRING)); + return define( + BuiltinFunctionName.NOT_LIKE.getName(), + impl( + nullMissingHandling( + (v1, v2) -> UnaryPredicateOperator.not(OperatorUtils.matches(v1, v2))), + BOOLEAN, + STRING, + STRING)); } - private static ExprValue lookupTableFunction(ExprValue arg1, ExprValue arg2, - Table table) { + private static ExprValue lookupTableFunction( + ExprValue arg1, ExprValue arg2, Table table) { if (table.contains(arg1, arg2)) { return table.get(arg1, arg2); } else { diff --git a/core/src/main/java/org/opensearch/sql/expression/operator/predicate/UnaryPredicateOperator.java b/core/src/main/java/org/opensearch/sql/expression/operator/predicate/UnaryPredicateOperator.java index 7d79d9d923..ad9d9ac934 100644 --- a/core/src/main/java/org/opensearch/sql/expression/operator/predicate/UnaryPredicateOperator.java +++ b/core/src/main/java/org/opensearch/sql/expression/operator/predicate/UnaryPredicateOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.operator.predicate; import static org.opensearch.sql.data.model.ExprValueUtils.LITERAL_NULL; @@ -28,14 +27,11 @@ import org.opensearch.sql.expression.function.SerializableFunction; /** - * The definition of unary predicate function - * not, Accepts one Boolean value and produces a Boolean. + * The definition of unary predicate function not, Accepts one Boolean value and produces a Boolean. */ @UtilityClass public class UnaryPredicateOperator { - /** - * Register Unary Predicate Function. - */ + /** Register Unary Predicate Function. */ public static void register(BuiltinFunctionRepository repository) { repository.register(not()); repository.register(isNotNull()); @@ -47,17 +43,36 @@ public static void register(BuiltinFunctionRepository repository) { } private static DefaultFunctionResolver not() { - return FunctionDSL.define(BuiltinFunctionName.NOT.getName(), FunctionDSL - .impl(UnaryPredicateOperator::not, BOOLEAN, BOOLEAN)); + return FunctionDSL.define( + BuiltinFunctionName.NOT.getName(), + FunctionDSL.impl(UnaryPredicateOperator::not, BOOLEAN, BOOLEAN)); } /** * The not logic. - * A NOT A - * TRUE FALSE - * FALSE TRUE - * NULL NULL - * MISSING MISSING + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
ANOT A
TRUEFALSE
FALSETRUE
NULLNULL
MISSINGMISSING
*/ public ExprValue not(ExprValue v) { if (v.isMissing() || v.isNull()) { @@ -68,31 +83,33 @@ public ExprValue not(ExprValue v) { } private static DefaultFunctionResolver isNull(BuiltinFunctionName funcName) { - return FunctionDSL - .define(funcName.getName(), Arrays.stream(ExprCoreType.values()) - .map(type -> FunctionDSL - .impl((v) -> ExprBooleanValue.of(v.isNull()), BOOLEAN, type)) - .collect( - Collectors.toList())); + return FunctionDSL.define( + funcName.getName(), + Arrays.stream(ExprCoreType.values()) + .map(type -> FunctionDSL.impl((v) -> ExprBooleanValue.of(v.isNull()), BOOLEAN, type)) + .collect(Collectors.toList())); } private static DefaultFunctionResolver isNotNull() { - return FunctionDSL - .define(BuiltinFunctionName.IS_NOT_NULL.getName(), Arrays.stream(ExprCoreType.values()) - .map(type -> FunctionDSL - .impl((v) -> ExprBooleanValue.of(!v.isNull()), BOOLEAN, type)) - .collect( - Collectors.toList())); + return FunctionDSL.define( + BuiltinFunctionName.IS_NOT_NULL.getName(), + Arrays.stream(ExprCoreType.values()) + .map(type -> FunctionDSL.impl((v) -> ExprBooleanValue.of(!v.isNull()), BOOLEAN, type)) + .collect(Collectors.toList())); } private static DefaultFunctionResolver ifFunction() { FunctionName functionName = BuiltinFunctionName.IF.getName(); List typeList = ExprCoreType.coreTypes(); - List>> functionsOne = typeList.stream().map(v -> - impl((UnaryPredicateOperator::exprIf), v, BOOLEAN, v, v)) - .collect(Collectors.toList()); + List< + SerializableFunction< + FunctionName, + org.apache.commons.lang3.tuple.Pair>> + functionsOne = + typeList.stream() + .map(v -> impl((UnaryPredicateOperator::exprIf), v, BOOLEAN, v, v)) + .collect(Collectors.toList()); DefaultFunctionResolver functionResolver = FunctionDSL.define(functionName, functionsOne); return functionResolver; @@ -102,10 +119,14 @@ private static DefaultFunctionResolver ifNull() { FunctionName functionName = BuiltinFunctionName.IFNULL.getName(); List typeList = ExprCoreType.coreTypes(); - List>> functionsOne = typeList.stream().map(v -> - impl((UnaryPredicateOperator::exprIfNull), v, v, v)) - .collect(Collectors.toList()); + List< + SerializableFunction< + FunctionName, + org.apache.commons.lang3.tuple.Pair>> + functionsOne = + typeList.stream() + .map(v -> impl((UnaryPredicateOperator::exprIfNull), v, v, v)) + .collect(Collectors.toList()); DefaultFunctionResolver functionResolver = FunctionDSL.define(functionName, functionsOne); return functionResolver; @@ -116,14 +137,16 @@ private static DefaultFunctionResolver nullIf() { List typeList = ExprCoreType.coreTypes(); DefaultFunctionResolver functionResolver = - FunctionDSL.define(functionName, - typeList.stream().map(v -> - impl((UnaryPredicateOperator::exprNullIf), v, v, v)) - .collect(Collectors.toList())); + FunctionDSL.define( + functionName, + typeList.stream() + .map(v -> impl((UnaryPredicateOperator::exprNullIf), v, v, v)) + .collect(Collectors.toList())); return functionResolver; } - /** v2 if v1 is null. + /** + * v2 if v1 is null. * * @param v1 varable 1 * @param v2 varable 2 @@ -133,7 +156,8 @@ public static ExprValue exprIfNull(ExprValue v1, ExprValue v2) { return (v1.isNull() || v1.isMissing()) ? v2 : v1; } - /** return null if v1 equls to v2. + /** + * return null if v1 equls to v2. * * @param v1 varable 1 * @param v2 varable 2 @@ -146,5 +170,4 @@ public static ExprValue exprNullIf(ExprValue v1, ExprValue v2) { public static ExprValue exprIf(ExprValue v1, ExprValue v2, ExprValue v3) { return !v1.isNull() && !v1.isMissing() && LITERAL_TRUE.equals(v1) ? v2 : v3; } - } diff --git a/core/src/main/java/org/opensearch/sql/expression/parse/GrokExpression.java b/core/src/main/java/org/opensearch/sql/expression/parse/GrokExpression.java index 9797832f07..748ce5f559 100644 --- a/core/src/main/java/org/opensearch/sql/expression/parse/GrokExpression.java +++ b/core/src/main/java/org/opensearch/sql/expression/parse/GrokExpression.java @@ -20,9 +20,7 @@ import org.opensearch.sql.exception.ExpressionEvaluationException; import org.opensearch.sql.expression.Expression; -/** - * GrokExpression with grok patterns. - */ +/** GrokExpression with grok patterns. */ @EqualsAndHashCode(callSuper = true) @ToString public class GrokExpression extends ParseExpression { @@ -33,15 +31,14 @@ public class GrokExpression extends ParseExpression { grokCompiler.registerDefaultPatterns(); } - @EqualsAndHashCode.Exclude - private final Grok grok; + @EqualsAndHashCode.Exclude private final Grok grok; /** * GrokExpression. * * @param sourceField source text field - * @param pattern pattern used for parsing - * @param identifier derived field + * @param pattern pattern used for parsing + * @param identifier derived field */ public GrokExpression(Expression sourceField, Expression pattern, Expression identifier) { super("grok", sourceField, pattern, identifier); @@ -69,7 +66,9 @@ ExprValue parseValue(ExprValue value) throws ExpressionEvaluationException { */ public static List getNamedGroupCandidates(String pattern) { Grok grok = grokCompiler.compile(pattern); - return grok.namedGroups.stream().map(grok::getNamedRegexCollectionById) - .filter(group -> !group.equals("UNWANTED")).collect(Collectors.toUnmodifiableList()); + return grok.namedGroups.stream() + .map(grok::getNamedRegexCollectionById) + .filter(group -> !group.equals("UNWANTED")) + .collect(Collectors.toUnmodifiableList()); } } diff --git a/core/src/main/java/org/opensearch/sql/expression/parse/ParseExpression.java b/core/src/main/java/org/opensearch/sql/expression/parse/ParseExpression.java index 8d1ebcce08..6e2456ecc2 100644 --- a/core/src/main/java/org/opensearch/sql/expression/parse/ParseExpression.java +++ b/core/src/main/java/org/opensearch/sql/expression/parse/ParseExpression.java @@ -21,29 +21,25 @@ import org.opensearch.sql.expression.env.Environment; import org.opensearch.sql.expression.function.FunctionName; -/** - * ParseExpression. - */ +/** ParseExpression. */ @EqualsAndHashCode(callSuper = false) @ToString public abstract class ParseExpression extends FunctionExpression { - @Getter - protected final Expression sourceField; + @Getter protected final Expression sourceField; protected final Expression pattern; - @Getter - protected final Expression identifier; + @Getter protected final Expression identifier; protected final String identifierStr; /** * ParseExpression. * * @param functionName name of function expression - * @param sourceField source text field - * @param pattern pattern used for parsing - * @param identifier derived field + * @param sourceField source text field + * @param pattern pattern used for parsing + * @param identifier derived field */ - public ParseExpression(String functionName, Expression sourceField, Expression pattern, - Expression identifier) { + public ParseExpression( + String functionName, Expression sourceField, Expression pattern, Expression identifier) { super(FunctionName.of(functionName), ImmutableList.of(sourceField, pattern, identifier)); this.sourceField = sourceField; this.pattern = pattern; diff --git a/core/src/main/java/org/opensearch/sql/expression/parse/PatternsExpression.java b/core/src/main/java/org/opensearch/sql/expression/parse/PatternsExpression.java index 67160dad58..5b92779c35 100644 --- a/core/src/main/java/org/opensearch/sql/expression/parse/PatternsExpression.java +++ b/core/src/main/java/org/opensearch/sql/expression/parse/PatternsExpression.java @@ -17,30 +17,28 @@ import org.opensearch.sql.exception.ExpressionEvaluationException; import org.opensearch.sql.expression.Expression; -/** - * PatternsExpression with regex filter. - */ +/** PatternsExpression with regex filter. */ @EqualsAndHashCode(callSuper = true) @ToString public class PatternsExpression extends ParseExpression { - /** - * Default name of the derived field. - */ + /** Default name of the derived field. */ public static final String DEFAULT_NEW_FIELD = "patterns_field"; - private static final ImmutableSet DEFAULT_IGNORED_CHARS = ImmutableSet.copyOf( - "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789".chars() - .mapToObj(c -> (char) c).toArray(Character[]::new)); + private static final ImmutableSet DEFAULT_IGNORED_CHARS = + ImmutableSet.copyOf( + "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789" + .chars() + .mapToObj(c -> (char) c) + .toArray(Character[]::new)); private final boolean useCustomPattern; - @EqualsAndHashCode.Exclude - private Pattern pattern; + @EqualsAndHashCode.Exclude private Pattern pattern; /** * PatternsExpression. * * @param sourceField source text field - * @param pattern pattern used for parsing - * @param identifier derived field + * @param pattern pattern used for parsing + * @param identifier derived field */ public PatternsExpression(Expression sourceField, Expression pattern, Expression identifier) { super("patterns", sourceField, pattern, identifier); diff --git a/core/src/main/java/org/opensearch/sql/expression/parse/RegexExpression.java b/core/src/main/java/org/opensearch/sql/expression/parse/RegexExpression.java index f3a3ff0b66..7514c9df69 100644 --- a/core/src/main/java/org/opensearch/sql/expression/parse/RegexExpression.java +++ b/core/src/main/java/org/opensearch/sql/expression/parse/RegexExpression.java @@ -19,24 +19,20 @@ import org.opensearch.sql.exception.ExpressionEvaluationException; import org.opensearch.sql.expression.Expression; -/** - * RegexExpression with regex and named capture group. - */ +/** RegexExpression with regex and named capture group. */ @EqualsAndHashCode(callSuper = true) @ToString public class RegexExpression extends ParseExpression { private static final Logger log = LogManager.getLogger(RegexExpression.class); private static final Pattern GROUP_PATTERN = Pattern.compile("\\(\\?<([a-zA-Z][a-zA-Z0-9]*)>"); - @Getter - @EqualsAndHashCode.Exclude - private final Pattern regexPattern; + @Getter @EqualsAndHashCode.Exclude private final Pattern regexPattern; /** * RegexExpression. * * @param sourceField source text field - * @param pattern pattern used for parsing - * @param identifier derived field + * @param pattern pattern used for parsing + * @param identifier derived field */ public RegexExpression(Expression sourceField, Expression pattern, Expression identifier) { super("regex", sourceField, pattern, identifier); diff --git a/core/src/main/java/org/opensearch/sql/expression/span/SpanExpression.java b/core/src/main/java/org/opensearch/sql/expression/span/SpanExpression.java index aff114145e..949ed52e7f 100644 --- a/core/src/main/java/org/opensearch/sql/expression/span/SpanExpression.java +++ b/core/src/main/java/org/opensearch/sql/expression/span/SpanExpression.java @@ -24,9 +24,7 @@ public class SpanExpression implements Expression { private final Expression value; private final SpanUnit unit; - /** - * Construct a span expression by field and span interval expression. - */ + /** Construct a span expression by field and span interval expression. */ public SpanExpression(Expression field, Expression value, SpanUnit unit) { this.field = field; this.value = value; @@ -35,18 +33,46 @@ public SpanExpression(Expression field, Expression value, SpanUnit unit) { @Override public ExprValue valueOf(Environment valueEnv) { - Rounding rounding = Rounding.createRounding(this); //TODO: will integrate with WindowAssigner + Rounding rounding = + Rounding.createRounding(this); // TODO: will integrate with WindowAssigner return rounding.round(field.valueOf(valueEnv)); } /** * Return type follows the following table. - * FIELD VALUE RETURN_TYPE - * int/long integer int/long (field type) - * int/long double double - * float/double integer float/double (field type) - * float/double double float/double (field type) - * other any field type + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
FIELDVALUERETURN_TYPE
int/longintegerint/long (field type)
int/longdoubledouble
float/doubleintegerfloat/double (field type)
float/doubledoublefloat/double (field type)
otheranyfield type
*/ @Override public ExprType type() { diff --git a/core/src/main/java/org/opensearch/sql/expression/system/SystemFunctions.java b/core/src/main/java/org/opensearch/sql/expression/system/SystemFunctions.java index e12bcd0a58..cf071c4f31 100644 --- a/core/src/main/java/org/opensearch/sql/expression/system/SystemFunctions.java +++ b/core/src/main/java/org/opensearch/sql/expression/system/SystemFunctions.java @@ -24,9 +24,7 @@ @UtilityClass public class SystemFunctions { - /** - * Register TypeOf Operator. - */ + /** Register TypeOf Operator. */ public static void register(BuiltinFunctionRepository repository) { repository.register(typeof()); } @@ -37,19 +35,20 @@ private static FunctionResolver typeof() { @Override public Pair resolve( FunctionSignature unresolvedSignature) { - return Pair.of(unresolvedSignature, + return Pair.of( + unresolvedSignature, (functionProperties, arguments) -> new FunctionExpression(BuiltinFunctionName.TYPEOF.getName(), arguments) { - @Override - public ExprValue valueOf(Environment valueEnv) { - return new ExprStringValue(getArguments().get(0).type().legacyTypeName()); - } + @Override + public ExprValue valueOf(Environment valueEnv) { + return new ExprStringValue(getArguments().get(0).type().legacyTypeName()); + } - @Override - public ExprType type() { - return STRING; - } - }); + @Override + public ExprType type() { + return STRING; + } + }); } @Override diff --git a/core/src/main/java/org/opensearch/sql/expression/text/TextFunction.java b/core/src/main/java/org/opensearch/sql/expression/text/TextFunction.java index 0bbfb65154..1cf7f64867 100644 --- a/core/src/main/java/org/opensearch/sql/expression/text/TextFunction.java +++ b/core/src/main/java/org/opensearch/sql/expression/text/TextFunction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.text; import static org.opensearch.sql.data.type.ExprCoreType.ARRAY; @@ -35,9 +34,8 @@ import org.opensearch.sql.expression.function.SerializableTriFunction; /** - * The definition of text functions. - * 1) have the clear interface for function define. - * 2) the implementation should rely on ExprValue. + * The definition of text functions. 1) have the clear interface for function define. 2) the + * implementation should rely on ExprValue. */ @UtilityClass public class TextFunction { @@ -70,17 +68,21 @@ public void register(BuiltinFunctionRepository repository) { } /** - * Gets substring starting at given point, for optional given length. - * Form of this function using keywords instead of comma delimited variables is not supported. - * Supports following signatures: + * Gets substring starting at given point, for optional given length.
+ * Form of this function using keywords instead of comma delimited variables is not supported.
+ * Supports following signatures:
* (STRING, INTEGER)/(STRING, INTEGER, INTEGER) -> STRING */ private DefaultFunctionResolver substringSubstr(FunctionName functionName) { - return define(functionName, - impl(nullMissingHandling(TextFunction::exprSubstrStart), - STRING, STRING, INTEGER), - impl(nullMissingHandling(TextFunction::exprSubstrStartLength), - STRING, STRING, INTEGER, INTEGER)); + return define( + functionName, + impl(nullMissingHandling(TextFunction::exprSubstrStart), STRING, STRING, INTEGER), + impl( + nullMissingHandling(TextFunction::exprSubstrStartLength), + STRING, + STRING, + INTEGER, + INTEGER)); } private DefaultFunctionResolver substring() { @@ -92,222 +94,277 @@ private DefaultFunctionResolver substr() { } /** - * Removes leading whitespace from string. - * Supports following signatures: + * Removes leading whitespace from string.
+ * Supports following signatures:
* STRING -> STRING */ private DefaultFunctionResolver ltrim() { - return define(BuiltinFunctionName.LTRIM.getName(), - impl(nullMissingHandling((v) -> new ExprStringValue(v.stringValue().stripLeading())), - STRING, STRING)); + return define( + BuiltinFunctionName.LTRIM.getName(), + impl( + nullMissingHandling((v) -> new ExprStringValue(v.stringValue().stripLeading())), + STRING, + STRING)); } /** - * Removes trailing whitespace from string. - * Supports following signatures: + * Removes trailing whitespace from string.
+ * Supports following signatures:
* STRING -> STRING */ private DefaultFunctionResolver rtrim() { - return define(BuiltinFunctionName.RTRIM.getName(), - impl(nullMissingHandling((v) -> new ExprStringValue(v.stringValue().stripTrailing())), - STRING, STRING)); + return define( + BuiltinFunctionName.RTRIM.getName(), + impl( + nullMissingHandling((v) -> new ExprStringValue(v.stringValue().stripTrailing())), + STRING, + STRING)); } /** - * Removes leading and trailing whitespace from string. - * Has option to specify a String to trim instead of whitespace but this is not yet supported. - * Supporting String specification requires finding keywords inside TRIM command. - * Supports following signatures: + * Removes leading and trailing whitespace from string.
+ * Has option to specify a String to trim instead of whitespace but this is not yet supported.
+ * Supporting String specification requires finding keywords inside TRIM command.
+ * Supports following signatures:
* STRING -> STRING */ private DefaultFunctionResolver trim() { - return define(BuiltinFunctionName.TRIM.getName(), - impl(nullMissingHandling((v) -> new ExprStringValue(v.stringValue().trim())), - STRING, STRING)); + return define( + BuiltinFunctionName.TRIM.getName(), + impl( + nullMissingHandling((v) -> new ExprStringValue(v.stringValue().trim())), + STRING, + STRING)); } /** - * Converts String to lowercase. - * Supports following signatures: + * Converts String to lowercase.
+ * Supports following signatures:
* STRING -> STRING */ private DefaultFunctionResolver lower() { - return define(BuiltinFunctionName.LOWER.getName(), - impl(nullMissingHandling((v) -> new ExprStringValue((v.stringValue().toLowerCase()))), - STRING, STRING) - ); + return define( + BuiltinFunctionName.LOWER.getName(), + impl( + nullMissingHandling((v) -> new ExprStringValue((v.stringValue().toLowerCase()))), + STRING, + STRING)); } /** - * Converts String to uppercase. - * Supports following signatures: + * Converts String to uppercase.
+ * Supports following signatures:
* STRING -> STRING */ private DefaultFunctionResolver upper() { - return define(BuiltinFunctionName.UPPER.getName(), - impl(nullMissingHandling((v) -> new ExprStringValue((v.stringValue().toUpperCase()))), - STRING, STRING) - ); + return define( + BuiltinFunctionName.UPPER.getName(), + impl( + nullMissingHandling((v) -> new ExprStringValue((v.stringValue().toUpperCase()))), + STRING, + STRING)); } /** - * Concatenates a list of Strings. - * Supports following signatures: + * Concatenates a list of Strings.
+ * Supports following signatures:
* (STRING, STRING, ...., STRING) -> STRING */ private DefaultFunctionResolver concat() { FunctionName concatFuncName = BuiltinFunctionName.CONCAT.getName(); - return define(concatFuncName, funcName -> + return define( + concatFuncName, + funcName -> Pair.of( - new FunctionSignature(concatFuncName, Collections.singletonList(ARRAY)), - (funcProp, args) -> new FunctionExpression(funcName, args) { + new FunctionSignature(concatFuncName, Collections.singletonList(ARRAY)), + (funcProp, args) -> + new FunctionExpression(funcName, args) { @Override public ExprValue valueOf(Environment valueEnv) { - List exprValues = args.stream() - .map(arg -> arg.valueOf(valueEnv)).collect(Collectors.toList()); - if (exprValues.stream().anyMatch(ExprValue::isMissing)) { - return ExprValueUtils.missingValue(); - } - if (exprValues.stream().anyMatch(ExprValue::isNull)) { - return ExprValueUtils.nullValue(); - } - return new ExprStringValue(exprValues.stream() - .map(ExprValue::stringValue) - .collect(Collectors.joining())); + List exprValues = + args.stream() + .map(arg -> arg.valueOf(valueEnv)) + .collect(Collectors.toList()); + if (exprValues.stream().anyMatch(ExprValue::isMissing)) { + return ExprValueUtils.missingValue(); + } + if (exprValues.stream().anyMatch(ExprValue::isNull)) { + return ExprValueUtils.nullValue(); + } + return new ExprStringValue( + exprValues.stream() + .map(ExprValue::stringValue) + .collect(Collectors.joining())); } @Override public ExprType type() { return STRING; } - } - )); + })); } /** - * TODO: https://github.com/opendistro-for-elasticsearch/sql/issues/710 - * Extend to accept variable argument amounts. - * Concatenates a list of Strings with a separator string. - * Supports following signatures: + * TODO: https://github.com/opendistro-for-elasticsearch/sql/issues/710
+ * Extend to accept variable argument amounts.
+ *
+ * Concatenates a list of Strings with a separator string. Supports following
+ * signatures:
* (STRING, STRING, STRING) -> STRING */ private DefaultFunctionResolver concat_ws() { - return define(BuiltinFunctionName.CONCAT_WS.getName(), - impl(nullMissingHandling((sep, str1, str2) -> - new ExprStringValue(str1.stringValue() + sep.stringValue() + str2.stringValue())), - STRING, STRING, STRING, STRING)); + return define( + BuiltinFunctionName.CONCAT_WS.getName(), + impl( + nullMissingHandling( + (sep, str1, str2) -> + new ExprStringValue( + str1.stringValue() + sep.stringValue() + str2.stringValue())), + STRING, + STRING, + STRING, + STRING)); } /** - * Calculates length of String in bytes. - * Supports following signatures: + * Calculates length of String in bytes.
+ * Supports following signatures:
* STRING -> INTEGER */ private DefaultFunctionResolver length() { - return define(BuiltinFunctionName.LENGTH.getName(), - impl(nullMissingHandling((str) -> - new ExprIntegerValue(str.stringValue().getBytes().length)), INTEGER, STRING)); + return define( + BuiltinFunctionName.LENGTH.getName(), + impl( + nullMissingHandling((str) -> new ExprIntegerValue(str.stringValue().getBytes().length)), + INTEGER, + STRING)); } /** - * Does String comparison of two Strings and returns Integer value. - * Supports following signatures: + * Does String comparison of two Strings and returns Integer value.
+ * Supports following signatures:
* (STRING, STRING) -> INTEGER */ private DefaultFunctionResolver strcmp() { - return define(BuiltinFunctionName.STRCMP.getName(), - impl(nullMissingHandling((str1, str2) -> - new ExprIntegerValue(Integer.compare( - str1.stringValue().compareTo(str2.stringValue()), 0))), - INTEGER, STRING, STRING)); + return define( + BuiltinFunctionName.STRCMP.getName(), + impl( + nullMissingHandling( + (str1, str2) -> + new ExprIntegerValue( + Integer.compare(str1.stringValue().compareTo(str2.stringValue()), 0))), + INTEGER, + STRING, + STRING)); } /** - * Returns the rightmost len characters from the string str, or NULL if any argument is NULL. - * Supports following signatures: + * Returns the rightmost len characters from the string str, or NULL if any argument is + * NULL.
+ * Supports following signatures:
* (STRING, INTEGER) -> STRING */ private DefaultFunctionResolver right() { - return define(BuiltinFunctionName.RIGHT.getName(), - impl(nullMissingHandling(TextFunction::exprRight), STRING, STRING, INTEGER)); + return define( + BuiltinFunctionName.RIGHT.getName(), + impl(nullMissingHandling(TextFunction::exprRight), STRING, STRING, INTEGER)); } /** - * Returns the leftmost len characters from the string str, or NULL if any argument is NULL. - * Supports following signature: + * Returns the leftmost len characters from the string str, or NULL if any argument is + * NULL.
+ * Supports following signature:
* (STRING, INTEGER) -> STRING */ private DefaultFunctionResolver left() { - return define(BuiltinFunctionName.LEFT.getName(), + return define( + BuiltinFunctionName.LEFT.getName(), impl(nullMissingHandling(TextFunction::exprLeft), STRING, STRING, INTEGER)); } /** - * Returns the numeric value of the leftmost character of the string str. - * Returns 0 if str is the empty string. Returns NULL if str is NULL. - * ASCII() works for 8-bit characters. - * Supports following signature: + * Returns the numeric value of the leftmost character of the string str.
+ * Returns 0 if str is the empty string. Returns NULL if str is NULL.
+ * ASCII() works for 8-bit characters.
+ * Supports following signature:
* STRING -> INTEGER */ private DefaultFunctionResolver ascii() { - return define(BuiltinFunctionName.ASCII.getName(), + return define( + BuiltinFunctionName.ASCII.getName(), impl(nullMissingHandling(TextFunction::exprAscii), INTEGER, STRING)); } /** - * LOCATE(substr, str) returns the position of the first occurrence of substring substr - * in string str. LOCATE(substr, str, pos) returns the position of the first occurrence - * of substring substr in string str, starting at position pos. - * Returns 0 if substr is not in str. - * Returns NULL if any argument is NULL. - * Supports following signature: - * (STRING, STRING) -> INTEGER + * LOCATE(substr, str) returns the position of the first occurrence of substring substr
+ * in string str. LOCATE(substr, str, pos) returns the position of the first occurrence
+ * of substring substr in string str, starting at position pos.
+ * Returns 0 if substr is not in str.
+ * Returns NULL if any argument is NULL.
+ * Supports following signature:
+ * (STRING, STRING) -> INTEGER
* (STRING, STRING, INTEGER) -> INTEGER */ private DefaultFunctionResolver locate() { - return define(BuiltinFunctionName.LOCATE.getName(), - impl(nullMissingHandling( - (SerializableBiFunction) - TextFunction::exprLocate), INTEGER, STRING, STRING), - impl(nullMissingHandling( - (SerializableTriFunction) - TextFunction::exprLocate), INTEGER, STRING, STRING, INTEGER)); + return define( + BuiltinFunctionName.LOCATE.getName(), + impl( + nullMissingHandling( + (SerializableBiFunction) TextFunction::exprLocate), + INTEGER, + STRING, + STRING), + impl( + nullMissingHandling( + (SerializableTriFunction) + TextFunction::exprLocate), + INTEGER, + STRING, + STRING, + INTEGER)); } /** - * Returns the position of the first occurrence of a substring in a string starting from 1. - * Returns 0 if substring is not in string. - * Returns NULL if any argument is NULL. - * Supports following signature: + * Returns the position of the first occurrence of a substring in a string starting from 1. + *
+ * Returns 0 if substring is not in string.
+ * Returns NULL if any argument is NULL.
+ * Supports following signature:
* (STRING IN STRING) -> INTEGER */ private DefaultFunctionResolver position() { - return define(BuiltinFunctionName.POSITION.getName(), - impl(nullMissingHandling( - (SerializableBiFunction) - TextFunction::exprLocate), INTEGER, STRING, STRING)); + return define( + BuiltinFunctionName.POSITION.getName(), + impl( + nullMissingHandling( + (SerializableBiFunction) TextFunction::exprLocate), + INTEGER, + STRING, + STRING)); } /** - * REPLACE(str, from_str, to_str) returns the string str with all occurrences of - * the string from_str replaced by the string to_str. - * REPLACE() performs a case-sensitive match when searching for from_str. - * Supports following signature: + * REPLACE(str, from_str, to_str) returns the string str with all occurrences of
+ * the string from_str replaced by the string to_str.

+ * REPLACE() performs a case-sensitive match when searching for from_str.
+ * Supports following signature:
* (STRING, STRING, STRING) -> STRING */ private DefaultFunctionResolver replace() { - return define(BuiltinFunctionName.REPLACE.getName(), + return define( + BuiltinFunctionName.REPLACE.getName(), impl(nullMissingHandling(TextFunction::exprReplace), STRING, STRING, STRING, STRING)); } /** - * REVERSE(str) returns reversed string of the string supplied as an argument - * Returns NULL if the argument is NULL. - * Supports the following signature: + * REVERSE(str) returns reversed string of the string supplied as an argument

+ * Returns NULL if the argument is NULL.
+ * Supports the following signature:
* (STRING) -> STRING */ private DefaultFunctionResolver reverse() { - return define(BuiltinFunctionName.REVERSE.getName(), + return define( + BuiltinFunctionName.REVERSE.getName(), impl(nullMissingHandling(TextFunction::exprReverse), STRING, STRING)); } @@ -321,7 +378,7 @@ private static ExprValue exprSubstrStart(ExprValue exprValue, ExprValue start) { } private static ExprValue exprSubstrStartLength( - ExprValue exprValue, ExprValue start, ExprValue length) { + ExprValue exprValue, ExprValue start, ExprValue length) { int startIdx = start.integerValue(); int len = length.integerValue(); if ((startIdx == 0) || (len == 0)) { diff --git a/core/src/main/java/org/opensearch/sql/expression/window/WindowDefinition.java b/core/src/main/java/org/opensearch/sql/expression/window/WindowDefinition.java index 24751633de..2030ce8062 100644 --- a/core/src/main/java/org/opensearch/sql/expression/window/WindowDefinition.java +++ b/core/src/main/java/org/opensearch/sql/expression/window/WindowDefinition.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.window; import static org.opensearch.sql.ast.tree.Sort.SortOption; @@ -16,9 +15,7 @@ import org.apache.commons.lang3.tuple.Pair; import org.opensearch.sql.expression.Expression; -/** - * Window definition that consists of partition and sort by information for a window. - */ +/** Window definition that consists of partition and sort by information for a window. */ @Data public class WindowDefinition { @@ -27,7 +24,8 @@ public class WindowDefinition { /** * Return all items in partition by and sort list. - * @return all sort items + * + * @return all sort items */ public List> getAllSortItems() { List> allSorts = new ArrayList<>(); @@ -35,5 +33,4 @@ public List> getAllSortItems() { allSorts.addAll(sortList); return allSorts; } - } diff --git a/core/src/main/java/org/opensearch/sql/expression/window/WindowFunctionExpression.java b/core/src/main/java/org/opensearch/sql/expression/window/WindowFunctionExpression.java index a15919bf03..73f0734953 100644 --- a/core/src/main/java/org/opensearch/sql/expression/window/WindowFunctionExpression.java +++ b/core/src/main/java/org/opensearch/sql/expression/window/WindowFunctionExpression.java @@ -3,27 +3,27 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.window; import org.opensearch.sql.expression.Expression; import org.opensearch.sql.expression.window.frame.WindowFrame; -/** - * Window function abstraction. - */ +/** Window function abstraction. */ public interface WindowFunctionExpression extends Expression { /** - * Create specific window frame based on window definition and what's current window function. - * For now two types of cumulative window frame is returned: - * 1. Ranking window functions: ignore frame definition and always operates on - * previous and current row. - * 2. Aggregate window functions: frame partition into peers and sliding window is not supported. + * Create specific window frame based on window definition and what's current window function. For + * now two types of cumulative window frame is returned: + * + *
    + *
  1. Ranking window functions: ignore frame definition and always operates on previous and + * current row. + *
  2. Aggregate window functions: frame partition into peers and sliding window is not + * supported. + *
* * @param definition window definition - * @return window frame + * @return window frame */ WindowFrame createWindowFrame(WindowDefinition definition); - } diff --git a/core/src/main/java/org/opensearch/sql/expression/window/WindowFunctions.java b/core/src/main/java/org/opensearch/sql/expression/window/WindowFunctions.java index 9a9e0c4c86..3df59c52c0 100644 --- a/core/src/main/java/org/opensearch/sql/expression/window/WindowFunctions.java +++ b/core/src/main/java/org/opensearch/sql/expression/window/WindowFunctions.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.window; import static java.util.Collections.emptyList; @@ -22,16 +21,14 @@ import org.opensearch.sql.expression.window.ranking.RankingWindowFunction; import org.opensearch.sql.expression.window.ranking.RowNumberFunction; -/** - * Window functions that register all window functions in function repository. - */ +/** Window functions that register all window functions in function repository. */ @UtilityClass public class WindowFunctions { /** * Register all window functions to function repository. * - * @param repository function repository + * @param repository function repository */ public void register(BuiltinFunctionRepository repository) { repository.register(rowNumber()); @@ -51,11 +48,11 @@ private DefaultFunctionResolver denseRank() { return rankingFunction(BuiltinFunctionName.DENSE_RANK.getName(), DenseRankFunction::new); } - private DefaultFunctionResolver rankingFunction(FunctionName functionName, - Supplier constructor) { + private DefaultFunctionResolver rankingFunction( + FunctionName functionName, Supplier constructor) { FunctionSignature functionSignature = new FunctionSignature(functionName, emptyList()); FunctionBuilder functionBuilder = (functionProperties, arguments) -> constructor.get(); - return new DefaultFunctionResolver(functionName, - ImmutableMap.of(functionSignature, functionBuilder)); + return new DefaultFunctionResolver( + functionName, ImmutableMap.of(functionSignature, functionBuilder)); } } diff --git a/core/src/main/java/org/opensearch/sql/expression/window/aggregation/AggregateWindowFunction.java b/core/src/main/java/org/opensearch/sql/expression/window/aggregation/AggregateWindowFunction.java index 604f65e6ff..63922ac3fd 100644 --- a/core/src/main/java/org/opensearch/sql/expression/window/aggregation/AggregateWindowFunction.java +++ b/core/src/main/java/org/opensearch/sql/expression/window/aggregation/AggregateWindowFunction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.window.aggregation; import java.util.List; @@ -21,9 +20,7 @@ import org.opensearch.sql.expression.window.frame.PeerRowsWindowFrame; import org.opensearch.sql.expression.window.frame.WindowFrame; -/** - * Aggregate function adapter that adapts Aggregator for window operator use. - */ +/** Aggregate function adapter that adapts Aggregator for window operator use. */ @EqualsAndHashCode @RequiredArgsConstructor public class AggregateWindowFunction implements WindowFunctionExpression { @@ -64,5 +61,4 @@ public T accept(ExpressionNodeVisitor visitor, C context) { public String toString() { return aggregator.toString(); } - } diff --git a/core/src/main/java/org/opensearch/sql/expression/window/frame/CurrentRowWindowFrame.java b/core/src/main/java/org/opensearch/sql/expression/window/frame/CurrentRowWindowFrame.java index 06b19a1488..359486a4ef 100644 --- a/core/src/main/java/org/opensearch/sql/expression/window/frame/CurrentRowWindowFrame.java +++ b/core/src/main/java/org/opensearch/sql/expression/window/frame/CurrentRowWindowFrame.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.window.frame; import com.google.common.collect.PeekingIterator; @@ -21,18 +20,17 @@ import org.opensearch.sql.expression.window.WindowDefinition; /** - * Conceptually, cumulative window frame should hold all seen rows till next partition. - * This class is actually an optimized version that only hold previous and current row. This is - * efficient and sufficient for ranking and aggregate window function support for now, though need - * to add "real" cumulative frame implementation in future as needed. + * Conceptually, cumulative window frame should hold all seen rows till next partition. This class + * is actually an optimized version that only hold previous and current row. This is efficient and + * sufficient for ranking and aggregate window function support for now, though need to add "real" + * cumulative frame implementation in future as needed. */ @EqualsAndHashCode @RequiredArgsConstructor @ToString public class CurrentRowWindowFrame implements WindowFrame { - @Getter - private final WindowDefinition windowDefinition; + @Getter private final WindowDefinition windowDefinition; private ExprValue previous; private ExprValue current; @@ -67,14 +65,12 @@ public ExprValue previous() { private List resolve(List expressions, ExprValue row) { Environment valueEnv = row.bindingTuples(); - return expressions.stream() - .map(expr -> expr.valueOf(valueEnv)) - .collect(Collectors.toList()); + return expressions.stream().map(expr -> expr.valueOf(valueEnv)).collect(Collectors.toList()); } /** - * Current row window frame won't pre-fetch any row ahead. - * So always return false as nothing "cached" in frame. + * Current row window frame won't pre-fetch any row ahead. So always return false as nothing + * "cached" in frame. */ @Override public boolean hasNext() { @@ -85,5 +81,4 @@ public boolean hasNext() { public List next() { return Collections.emptyList(); } - } diff --git a/core/src/main/java/org/opensearch/sql/expression/window/frame/PeerRowsWindowFrame.java b/core/src/main/java/org/opensearch/sql/expression/window/frame/PeerRowsWindowFrame.java index a3e8de40c1..a98826d333 100644 --- a/core/src/main/java/org/opensearch/sql/expression/window/frame/PeerRowsWindowFrame.java +++ b/core/src/main/java/org/opensearch/sql/expression/window/frame/PeerRowsWindowFrame.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.window.frame; import com.google.common.collect.PeekingIterator; @@ -19,9 +18,9 @@ import org.opensearch.sql.expression.window.WindowDefinition; /** - * Window frame that only keep peers (tuples with same value of fields specified in sort list - * in window definition). See PeerWindowFrameTest for details about how this window frame - * interacts with window operator and window function. + * Window frame that only keep peers (tuples with same value of fields specified in sort list in + * window definition). See PeerWindowFrameTest for details about how this window frame interacts + * with window operator and window function. */ @RequiredArgsConstructor public class PeerRowsWindowFrame implements WindowFrame { @@ -29,34 +28,27 @@ public class PeerRowsWindowFrame implements WindowFrame { private final WindowDefinition windowDefinition; /** - * All peer rows (peer means rows in a partition that share same sort key - * based on sort list in window definition. + * All peer rows (peer means rows in a partition that share same sort key based on sort list in + * window definition. */ private final List peers = new ArrayList<>(); - /** - * Which row in the peer is currently being enriched by window function. - */ + /** Which row in the peer is currently being enriched by window function. */ private int position; - /** - * Does row at current position represents a new partition. - */ + /** Does row at current position represents a new partition. */ private boolean isNewPartition = true; - /** - * If any more pre-fetched rows not returned to window operator yet. - */ + /** If any more pre-fetched rows not returned to window operator yet. */ @Override public boolean hasNext() { return position < peers.size(); } /** - * Move position and clear new partition flag. - * Note that because all peer rows have same result from window function, - * this is only returned at first time to change window function state. - * Afterwards, empty list is returned to avoid changes until next peer loaded. + * Move position and clear new partition flag. Note that because all peer rows have same result + * from window function, this is only returned at first time to change window function state. + * Afterward, empty list is returned to avoid changes until next peer loaded. * * @return all rows for the peer */ @@ -70,8 +62,9 @@ public List next() { } /** - * Current row at the position. Because rows are pre-fetched here, - * window operator needs to get them from here too. + * Current row at the position. Because rows are pre-fetched here, window operator needs to get + * them from here too. + * * @return row at current position that being enriched by window function */ @Override @@ -82,11 +75,16 @@ public ExprValue current() { /** * Preload all peer rows if last peer rows done. Note that when no more data in peeking iterator, * there must be rows in frame (hasNext()=true), so no need to check it.hasNext() in this method. - * Load until: - * 1. Different peer found (row with different sort key) - * 2. Or new partition (row with different partition key) - * 3. Or no more rows - * @param it rows iterator + *
+ * Load until:
+ * + *
    + *
  1. Different peer found (row with different sort key) + *
  2. Or new partition (row with different partition key) + *
  3. Or no more rows + *
+ * + * @param it rows iterator */ @Override public void load(PeekingIterator it) { @@ -118,10 +116,7 @@ public boolean isNewPartition() { private boolean isPeer(ExprValue next) { List sortFields = - windowDefinition.getSortList() - .stream() - .map(Pair::getRight) - .collect(Collectors.toList()); + windowDefinition.getSortList().stream().map(Pair::getRight).collect(Collectors.toList()); ExprValue last = peers.get(peers.size() - 1); return resolve(sortFields, last).equals(resolve(sortFields, next)); @@ -139,9 +134,6 @@ private boolean isSamePartition(ExprValue next) { private List resolve(List expressions, ExprValue row) { Environment valueEnv = row.bindingTuples(); - return expressions.stream() - .map(expr -> expr.valueOf(valueEnv)) - .collect(Collectors.toList()); + return expressions.stream().map(expr -> expr.valueOf(valueEnv)).collect(Collectors.toList()); } - } diff --git a/core/src/main/java/org/opensearch/sql/expression/window/frame/WindowFrame.java b/core/src/main/java/org/opensearch/sql/expression/window/frame/WindowFrame.java index 323656547f..657f63e4c9 100644 --- a/core/src/main/java/org/opensearch/sql/expression/window/frame/WindowFrame.java +++ b/core/src/main/java/org/opensearch/sql/expression/window/frame/WindowFrame.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.window.frame; import com.google.common.collect.PeekingIterator; @@ -14,13 +13,17 @@ import org.opensearch.sql.expression.env.Environment; /** - * Window frame that represents a subset of a window which is all data accessible to - * the window function when calculation. Basically there are 3 types of window frame: - * 1) Entire window frame that holds all data of the window - * 2) Cumulative window frame that accumulates one row by another - * 3) Sliding window frame that maintains a sliding window of fixed size - * Note that which type of window frame is used is determined by both window function itself - * and frame definition in a window definition. + * Window frame that represents a subset of a window which is all data accessible to the window + * function when calculation. Basically there are 3 types of window frame: + * + *
    + *
  1. Entire window frame that holds all data of the window + *
  2. Cumulative window frame that accumulates one row by another + *
  3. Sliding window frame that maintains a sliding window of fixed size + *
+ * + * Note that which type of window frame is used is determined by both window function itself and + * frame definition in a window definition. */ public interface WindowFrame extends Environment, Iterator> { @@ -31,20 +34,22 @@ default ExprValue resolve(Expression var) { /** * Check is current row the beginning of a new partition according to window definition. - * @return true if a new partition begins here, otherwise false. + * + * @return true if a new partition begins here, otherwise false. */ boolean isNewPartition(); /** * Load one or more rows as window function calculation needed. - * @param iterator peeking iterator that can peek next element without moving iterator + * + * @param iterator peeking iterator that can peek next element without moving iterator */ void load(PeekingIterator iterator); /** * Get current data row for giving window operator chance to get rows preloaded into frame. + * * @return data row */ ExprValue current(); - } diff --git a/core/src/main/java/org/opensearch/sql/expression/window/ranking/DenseRankFunction.java b/core/src/main/java/org/opensearch/sql/expression/window/ranking/DenseRankFunction.java index ba6e88d98d..87506ef63e 100644 --- a/core/src/main/java/org/opensearch/sql/expression/window/ranking/DenseRankFunction.java +++ b/core/src/main/java/org/opensearch/sql/expression/window/ranking/DenseRankFunction.java @@ -3,15 +3,14 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.window.ranking; import org.opensearch.sql.expression.function.BuiltinFunctionName; import org.opensearch.sql.expression.window.frame.CurrentRowWindowFrame; /** - * Dense rank window function that assigns a rank number to each row similarly as - * rank function. The difference is there is no gap between rank number assigned. + * Dense rank window function that assigns a rank number to each row similarly as rank function. The + * difference is there is no gap between rank number assigned. */ public class DenseRankFunction extends RankingWindowFunction { @@ -30,5 +29,4 @@ protected int rank(CurrentRowWindowFrame frame) { } return rank; } - } diff --git a/core/src/main/java/org/opensearch/sql/expression/window/ranking/RankFunction.java b/core/src/main/java/org/opensearch/sql/expression/window/ranking/RankFunction.java index c1f33e6137..f72a28cd9a 100644 --- a/core/src/main/java/org/opensearch/sql/expression/window/ranking/RankFunction.java +++ b/core/src/main/java/org/opensearch/sql/expression/window/ranking/RankFunction.java @@ -3,22 +3,18 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.window.ranking; import org.opensearch.sql.expression.function.BuiltinFunctionName; import org.opensearch.sql.expression.window.frame.CurrentRowWindowFrame; /** - * Rank window function that assigns a rank number to each row based on sort items - * defined in window definition. Use same rank number if sort item values same on - * previous and current row. + * Rank window function that assigns a rank number to each row based on sort items defined in window + * definition. Use same rank number if sort item values same on previous and current row. */ public class RankFunction extends RankingWindowFunction { - /** - * Total number of rows have seen in current partition. - */ + /** Total number of rows have seen in current partition. */ private int total; public RankFunction() { @@ -38,5 +34,4 @@ protected int rank(CurrentRowWindowFrame frame) { } return rank; } - } diff --git a/core/src/main/java/org/opensearch/sql/expression/window/ranking/RankingWindowFunction.java b/core/src/main/java/org/opensearch/sql/expression/window/ranking/RankingWindowFunction.java index 07a4b42dbd..c119629cda 100644 --- a/core/src/main/java/org/opensearch/sql/expression/window/ranking/RankingWindowFunction.java +++ b/core/src/main/java/org/opensearch/sql/expression/window/ranking/RankingWindowFunction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.window.ranking; import static java.util.Collections.emptyList; @@ -30,11 +29,9 @@ * such as same return type (integer), same argument list (no arg). */ public abstract class RankingWindowFunction extends FunctionExpression - implements WindowFunctionExpression { + implements WindowFunctionExpression { - /** - * Current rank number assigned. - */ + /** Current rank number assigned. */ protected int rank; public RankingWindowFunction(FunctionName functionName) { @@ -58,26 +55,27 @@ public ExprValue valueOf(Environment valueEnv) { /** * Rank logic that sub-class needs to implement. - * @param frame window frame - * @return rank number + * + * @param frame window frame + * @return rank number */ protected abstract int rank(CurrentRowWindowFrame frame); /** * Check sort field to see if current value is different from previous. - * @param frame window frame - * @return true if different, false if same or no sort list defined + * + * @param frame window frame + * @return true if different, false if same or no sort list defined */ protected boolean isSortFieldValueDifferent(CurrentRowWindowFrame frame) { if (isSortItemsNotDefined(frame)) { return false; } - List sortItems = frame.getWindowDefinition() - .getSortList() - .stream() - .map(Pair::getRight) - .collect(Collectors.toList()); + List sortItems = + frame.getWindowDefinition().getSortList().stream() + .map(Pair::getRight) + .collect(Collectors.toList()); List previous = resolve(frame, sortItems, frame.previous()); List current = resolve(frame, sortItems, frame.current()); @@ -90,9 +88,7 @@ private boolean isSortItemsNotDefined(CurrentRowWindowFrame frame) { private List resolve(WindowFrame frame, List expressions, ExprValue row) { BindingTuple valueEnv = row.bindingTuples(); - return expressions.stream() - .map(expr -> expr.valueOf(valueEnv)) - .collect(Collectors.toList()); + return expressions.stream().map(expr -> expr.valueOf(valueEnv)).collect(Collectors.toList()); } @Override diff --git a/core/src/main/java/org/opensearch/sql/expression/window/ranking/RowNumberFunction.java b/core/src/main/java/org/opensearch/sql/expression/window/ranking/RowNumberFunction.java index 067dfa569d..90bb2ed8ff 100644 --- a/core/src/main/java/org/opensearch/sql/expression/window/ranking/RowNumberFunction.java +++ b/core/src/main/java/org/opensearch/sql/expression/window/ranking/RowNumberFunction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.window.ranking; import org.opensearch.sql.expression.function.BuiltinFunctionName; @@ -25,5 +24,4 @@ protected int rank(CurrentRowWindowFrame frame) { } return rank++; } - } diff --git a/core/src/main/java/org/opensearch/sql/monitor/AlwaysHealthyMonitor.java b/core/src/main/java/org/opensearch/sql/monitor/AlwaysHealthyMonitor.java index 94bb8d6936..84cec4c9c7 100644 --- a/core/src/main/java/org/opensearch/sql/monitor/AlwaysHealthyMonitor.java +++ b/core/src/main/java/org/opensearch/sql/monitor/AlwaysHealthyMonitor.java @@ -3,19 +3,13 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.monitor; -/** - * Always healthy resource monitor. - */ +/** Always healthy resource monitor. */ public class AlwaysHealthyMonitor extends ResourceMonitor { - public static final ResourceMonitor ALWAYS_HEALTHY_MONITOR = - new AlwaysHealthyMonitor(); + public static final ResourceMonitor ALWAYS_HEALTHY_MONITOR = new AlwaysHealthyMonitor(); - /** - * always healthy. - */ + /** always healthy. */ @Override public boolean isHealthy() { return true; diff --git a/core/src/main/java/org/opensearch/sql/monitor/ResourceMonitor.java b/core/src/main/java/org/opensearch/sql/monitor/ResourceMonitor.java index ce76a3f982..bbd1c67a62 100644 --- a/core/src/main/java/org/opensearch/sql/monitor/ResourceMonitor.java +++ b/core/src/main/java/org/opensearch/sql/monitor/ResourceMonitor.java @@ -3,12 +3,11 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.monitor; /** - * The abstract interface of ResourceMonitor. - * When an fault is detected, the circuit breaker is open. + * The abstract interface of ResourceMonitor. When an fault is detected, the circuit breaker is + * open. */ public abstract class ResourceMonitor { /** From 991458063364399582651540e4d08325172504f2 Mon Sep 17 00:00:00 2001 From: Mitchell Gale Date: Wed, 9 Aug 2023 14:02:28 -0700 Subject: [PATCH 10/42] [Spotless] Applying Google Code Format for core/src/main files #3 (#1932) * Applying Google Java code format changes to core/src/main/java/org/opensearch/sql/planner core/src/main/java/org/opensearch/sql/storage core/src/main/java/org/opensearch/sql/utils Signed-off-by: Mitchell Gale * Ignore on failure for checkstyle in core and added the three spotless fixes files. Signed-off-by: Mitchell Gale * Fixed javadoc formatting in LogicalPlanOptimizer.java LogicalValues.java OperatorUtils.java Planner.java Signed-off-by: Mitchell Gale * spotless apply, fixing include for spotless build.gradle and adding pre tag. Signed-off-by: Mitchell Gale * addressed PR comments. Signed-off-by: Mitchell Gale --------- Signed-off-by: Mitchell Gale Signed-off-by: Mitchell Gale --- build.gradle | 5 +- .../sql/planner/DefaultImplementor.java | 27 ++-- .../opensearch/sql/planner/PlanContext.java | 7 +- .../org/opensearch/sql/planner/PlanNode.java | 5 +- .../org/opensearch/sql/planner/Planner.java | 48 +++--- .../sql/planner/SerializablePlan.java | 35 ++--- .../sql/planner/logical/LogicalAD.java | 1 + .../planner/logical/LogicalAggregation.java | 19 +-- .../planner/logical/LogicalCloseCursor.java | 4 +- .../sql/planner/logical/LogicalDedupe.java | 13 +- .../sql/planner/logical/LogicalEval.java | 12 +- .../planner/logical/LogicalFetchCursor.java | 14 +- .../sql/planner/logical/LogicalFilter.java | 12 +- .../sql/planner/logical/LogicalHighlight.java | 8 +- .../sql/planner/logical/LogicalLimit.java | 5 +- .../sql/planner/logical/LogicalML.java | 7 +- .../sql/planner/logical/LogicalMLCommons.java | 10 +- .../sql/planner/logical/LogicalNested.java | 12 +- .../sql/planner/logical/LogicalPaginate.java | 7 +- .../sql/planner/logical/LogicalPlan.java | 10 +- .../sql/planner/logical/LogicalPlanDSL.java | 39 ++--- .../logical/LogicalPlanNodeVisitor.java | 1 - .../sql/planner/logical/LogicalProject.java | 15 +- .../sql/planner/logical/LogicalRareTopN.java | 12 +- .../sql/planner/logical/LogicalRelation.java | 15 +- .../sql/planner/logical/LogicalRemove.java | 16 +- .../sql/planner/logical/LogicalRename.java | 17 +-- .../sql/planner/logical/LogicalSort.java | 13 +- .../sql/planner/logical/LogicalValues.java | 36 +++-- .../sql/planner/logical/LogicalWindow.java | 10 +- .../sql/planner/logical/LogicalWrite.java | 8 +- .../optimizer/LogicalPlanOptimizer.java | 68 ++++----- .../planner/optimizer/PushDownPageSize.java | 9 +- .../sql/planner/optimizer/Rule.java | 7 +- .../planner/optimizer/pattern/Patterns.java | 94 +++++------- .../optimizer/rule/MergeFilterAndFilter.java | 21 +-- .../optimizer/rule/PushFilterUnderSort.java | 20 +-- .../rule/read/CreateTableScanBuilder.java | 13 +- .../rule/read/TableScanPushDown.java | 59 +++----- .../rule/write/CreateTableWriteBuilder.java | 7 +- .../planner/physical/AggregationOperator.java | 32 ++-- .../planner/physical/CursorCloseOperator.java | 10 +- .../sql/planner/physical/DedupeOperator.java | 29 ++-- .../sql/planner/physical/EvalOperator.java | 12 +- .../sql/planner/physical/FilterOperator.java | 16 +- .../sql/planner/physical/LimitOperator.java | 18 +-- .../sql/planner/physical/NestedOperator.java | 97 +++++------- .../sql/planner/physical/PhysicalPlan.java | 15 +- .../sql/planner/physical/PhysicalPlanDSL.java | 43 +++--- .../physical/PhysicalPlanNodeVisitor.java | 1 - .../sql/planner/physical/ProjectOperator.java | 39 +++-- .../planner/physical/RareTopNOperator.java | 142 +++++++++--------- .../sql/planner/physical/RemoveOperator.java | 18 +-- .../sql/planner/physical/RenameOperator.java | 28 ++-- .../sql/planner/physical/SortOperator.java | 28 ++-- .../sql/planner/physical/ValuesOperator.java | 22 +-- .../sql/planner/physical/WindowOperator.java | 39 ++--- .../physical/collector/BucketCollector.java | 27 ++-- .../planner/physical/collector/Collector.java | 16 +- .../physical/collector/MetricCollector.java | 4 +- .../planner/physical/collector/Rounding.java | 74 ++++----- .../physical/datasource/DataSourceTable.java | 11 +- .../datasource/DataSourceTableScan.java | 24 ++- .../datasource/DataSourceTableSchema.java | 19 +-- .../sql/planner/streaming/StreamContext.java | 4 +- .../BoundedOutOfOrderWatermarkGenerator.java | 4 +- .../watermark/WatermarkGenerator.java | 5 +- .../planner/streaming/windowing/Window.java | 8 +- .../assigner/SlidingWindowAssigner.java | 14 +- .../assigner/TumblingWindowAssigner.java | 8 +- .../windowing/assigner/WindowAssigner.java | 6 +- .../trigger/AfterWatermarkWindowTrigger.java | 4 +- .../windowing/trigger/TriggerResult.java | 4 +- .../sql/storage/DataSourceFactory.java | 9 +- .../opensearch/sql/storage/StorageEngine.java | 9 +- .../org/opensearch/sql/storage/Table.java | 25 +-- .../sql/storage/TableScanOperator.java | 5 +- .../storage/bindingtuple/BindingTuple.java | 26 ++-- .../bindingtuple/LazyBindingTuple.java | 5 +- .../sql/storage/read/TableScanBuilder.java | 40 +++-- .../opensearch/sql/storage/split/Split.java | 5 +- .../sql/storage/write/TableWriteBuilder.java | 6 +- .../sql/utils/DateTimeFormatters.java | 47 ++---- .../opensearch/sql/utils/DateTimeUtils.java | 70 ++++----- .../opensearch/sql/utils/ExpressionUtils.java | 9 +- .../opensearch/sql/utils/OperatorUtils.java | 11 +- .../org/opensearch/sql/utils/ParseUtils.java | 38 +++-- .../sql/utils/SystemIndexUtils.java | 41 ++--- 88 files changed, 761 insertions(+), 1147 deletions(-) diff --git a/build.gradle b/build.gradle index f6f390505d..3e75433d83 100644 --- a/build.gradle +++ b/build.gradle @@ -84,7 +84,10 @@ repositories { spotless { java { target fileTree('.') { - include 'core/src/main/java/org/opensearch/sql/monitor/**/*.java', + include 'core/src/main/java/org/opensearch/sql/planner/**/*.java', + 'core/src/main/java/org/opensearch/sql/storage/**/*.java', + 'core/src/main/java/org/opensearch/sql/utils/**/*.java', + 'core/src/main/java/org/opensearch/sql/monitor/**/*.java', 'core/src/main/java/org/opensearch/sql/expression/**/*.java', 'core/src/main/java/org/opensearch/sql/executor/**/*.java', 'core/src/main/java/org/opensearch/sql/exception/**/*.java', diff --git a/core/src/main/java/org/opensearch/sql/planner/DefaultImplementor.java b/core/src/main/java/org/opensearch/sql/planner/DefaultImplementor.java index 699d0ec76a..b53d17b38f 100644 --- a/core/src/main/java/org/opensearch/sql/planner/DefaultImplementor.java +++ b/core/src/main/java/org/opensearch/sql/planner/DefaultImplementor.java @@ -45,13 +45,12 @@ /** * Default implementor for implementing logical to physical translation. "Default" here means all - * logical operator will be translated to correspondent physical operator to pipeline operations - * in post-processing style in memory. - * Different storage can override methods here to optimize default pipelining operator, for example - * a storage has the flexibility to override visitFilter and visitRelation to push down filtering - * operation and return a single physical index scan operator. + * logical operator will be translated to correspondent physical operator to pipeline operations in + * post-processing style in memory. Different storage can override methods here to optimize default + * pipelining operator, for example a storage has the flexibility to override visitFilter and + * visitRelation to push down filtering operation and return a single physical index scan operator. * - * @param context type + * @param context type */ public class DefaultImplementor extends LogicalPlanNodeVisitor { @@ -62,8 +61,7 @@ public PhysicalPlan visitRareTopN(LogicalRareTopN node, C context) { node.getCommandType(), node.getNoOfResults(), node.getFieldList(), - node.getGroupByList() - ); + node.getGroupByList()); } @Override @@ -78,16 +76,14 @@ public PhysicalPlan visitDedupe(LogicalDedupe node, C context) { @Override public PhysicalPlan visitProject(LogicalProject node, C context) { - return new ProjectOperator(visitChild(node, context), node.getProjectList(), - node.getNamedParseExpressions()); + return new ProjectOperator( + visitChild(node, context), node.getProjectList(), node.getNamedParseExpressions()); } @Override public PhysicalPlan visitWindow(LogicalWindow node, C context) { return new WindowOperator( - visitChild(node, context), - node.getWindowFunction(), - node.getWindowDefinition()); + visitChild(node, context), node.getWindowFunction(), node.getWindowDefinition()); } @Override @@ -148,8 +144,9 @@ public PhysicalPlan visitTableWriteBuilder(TableWriteBuilder plan, C context) { @Override public PhysicalPlan visitRelation(LogicalRelation node, C context) { - throw new UnsupportedOperationException("Storage engine is responsible for " - + "implementing and optimizing logical plan with relation involved"); + throw new UnsupportedOperationException( + "Storage engine is responsible for " + + "implementing and optimizing logical plan with relation involved"); } @Override diff --git a/core/src/main/java/org/opensearch/sql/planner/PlanContext.java b/core/src/main/java/org/opensearch/sql/planner/PlanContext.java index 3d43c02d61..38f2bde244 100644 --- a/core/src/main/java/org/opensearch/sql/planner/PlanContext.java +++ b/core/src/main/java/org/opensearch/sql/planner/PlanContext.java @@ -9,13 +9,10 @@ import lombok.Getter; import org.opensearch.sql.storage.split.Split; -/** - * Plan context hold planning related information. - */ +/** Plan context hold planning related information. */ public class PlanContext { - @Getter - private final Optional split; + @Getter private final Optional split; public PlanContext(Split split) { this.split = Optional.of(split); diff --git a/core/src/main/java/org/opensearch/sql/planner/PlanNode.java b/core/src/main/java/org/opensearch/sql/planner/PlanNode.java index 8cd6e088e5..a79997cd7f 100644 --- a/core/src/main/java/org/opensearch/sql/planner/PlanNode.java +++ b/core/src/main/java/org/opensearch/sql/planner/PlanNode.java @@ -3,14 +3,11 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner; import java.util.List; -/** - * The definition of Plan Node. - */ +/** The definition of Plan Node. */ public interface PlanNode { /** diff --git a/core/src/main/java/org/opensearch/sql/planner/Planner.java b/core/src/main/java/org/opensearch/sql/planner/Planner.java index 8333425091..1397fa8a18 100644 --- a/core/src/main/java/org/opensearch/sql/planner/Planner.java +++ b/core/src/main/java/org/opensearch/sql/planner/Planner.java @@ -3,10 +3,8 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner; - import java.util.List; import lombok.RequiredArgsConstructor; import org.opensearch.sql.planner.logical.LogicalPlan; @@ -16,17 +14,15 @@ import org.opensearch.sql.planner.physical.PhysicalPlan; import org.opensearch.sql.storage.Table; -/** - * Planner that plans and chooses the optimal physical plan. - */ +/** Planner that plans and chooses the optimal physical plan. */ @RequiredArgsConstructor public class Planner { private final LogicalPlanOptimizer logicalOptimizer; /** - * Generate optimal physical plan for logical plan. If no table involved, - * translate logical plan to physical by default implementor. + * Generate optimal physical plan for logical plan. If no table involved, translate logical plan + * to physical by default implementor.
* TODO: for now just delegate entire logical plan to storage engine. * * @param plan logical plan @@ -37,28 +33,28 @@ public PhysicalPlan plan(LogicalPlan plan) { if (table == null) { return plan.accept(new DefaultImplementor<>(), null); } - return table.implement( - table.optimize(optimize(plan))); + return table.implement(table.optimize(optimize(plan))); } private Table findTable(LogicalPlan plan) { - return plan.accept(new LogicalPlanNodeVisitor() { - - @Override - public Table visitNode(LogicalPlan node, Object context) { - List children = node.getChild(); - if (children.isEmpty()) { - return null; - } - return children.get(0).accept(this, context); - } - - @Override - public Table visitRelation(LogicalRelation node, Object context) { - return node.getTable(); - } - - }, null); + return plan.accept( + new LogicalPlanNodeVisitor() { + + @Override + public Table visitNode(LogicalPlan node, Object context) { + List children = node.getChild(); + if (children.isEmpty()) { + return null; + } + return children.get(0).accept(this, context); + } + + @Override + public Table visitRelation(LogicalRelation node, Object context) { + return node.getTable(); + } + }, + null); } private LogicalPlan optimize(LogicalPlan plan) { diff --git a/core/src/main/java/org/opensearch/sql/planner/SerializablePlan.java b/core/src/main/java/org/opensearch/sql/planner/SerializablePlan.java index ab195da5bf..1503946abc 100644 --- a/core/src/main/java/org/opensearch/sql/planner/SerializablePlan.java +++ b/core/src/main/java/org/opensearch/sql/planner/SerializablePlan.java @@ -10,36 +10,37 @@ /** * All subtypes of PhysicalPlan which needs to be serialized (in cursor, for pagination feature) * should follow one of the following options. + * *
    *
  • Both: - *
      - *
    • Override both methods from {@link Externalizable}.
    • - *
    • Define a public no-arg constructor.
    • - *
    - *
  • - *
  • - * Overwrite {@link #getPlanForSerialization} to return - * another instance of {@link SerializablePlan}. - *
  • + *
      + *
    • Override both methods from {@link Externalizable}. + *
    • Define a public no-arg constructor. + *
    + *
  • Overwrite {@link #getPlanForSerialization} to return another instance of {@link + * SerializablePlan}. *
*/ public interface SerializablePlan extends Externalizable { /** - * Override to return child or delegated plan, so parent plan should skip this one - * for serialization, but it should try to serialize grandchild plan. - * Imagine plan structure like this + * Override to return child or delegated plan, so parent plan should skip this one for + * serialization, but it should try to serialize grandchild plan. Imagine plan structure like this + * *
    *    A         -> this
    *    `- B      -> child
    *      `- C    -> this
    * 
- * In that case only plans A and C should be attempted to serialize. - * It is needed to skip a `ResourceMonitorPlan` instance only, actually. * - *
{@code
-   *    * A.writeObject(B.getPlanForSerialization());
-   *  }
+ * In that case only plans A and C should be attempted to serialize. It is needed to skip a + * `ResourceMonitorPlan` instance only, actually. + * + *
{@code
+   * * A.writeObject(B.getPlanForSerialization());
+   *
+   * }
+ * * @return Next plan for serialization. */ default SerializablePlan getPlanForSerialization() { diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalAD.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalAD.java index c8c04b1817..25dbd14f1a 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalAD.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalAD.java @@ -18,6 +18,7 @@ public class LogicalAD extends LogicalPlan { /** * Constructor of LogicalAD. + * * @param child child logical plan * @param arguments arguments of the algorithm */ diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalAggregation.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalAggregation.java index ebca01cdf8..ecbcece623 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalAggregation.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalAggregation.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.logical; import java.util.Collections; @@ -14,26 +13,18 @@ import org.opensearch.sql.expression.NamedExpression; import org.opensearch.sql.expression.aggregation.NamedAggregator; -/** - * Logical Aggregation. - */ +/** Logical Aggregation. */ @ToString @EqualsAndHashCode(callSuper = true) public class LogicalAggregation extends LogicalPlan { - @Getter - private final List aggregatorList; + @Getter private final List aggregatorList; - @Getter - private final List groupByList; + @Getter private final List groupByList; - /** - * Constructor of LogicalAggregation. - */ + /** Constructor of LogicalAggregation. */ public LogicalAggregation( - LogicalPlan child, - List aggregatorList, - List groupByList) { + LogicalPlan child, List aggregatorList, List groupByList) { super(Collections.singletonList(child)); this.aggregatorList = aggregatorList; this.groupByList = groupByList; diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalCloseCursor.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalCloseCursor.java index e5c30a4f4f..d1b98df8ed 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalCloseCursor.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalCloseCursor.java @@ -10,8 +10,8 @@ import lombok.ToString; /** - * A logical plan node which wraps {@link org.opensearch.sql.planner.LogicalCursor} - * and represent a cursor close operation. + * A logical plan node which wraps {@link org.opensearch.sql.planner.LogicalCursor} and represent a + * cursor close operation. */ @ToString @EqualsAndHashCode(callSuper = false) diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalDedupe.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalDedupe.java index 020352287d..92734440f7 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalDedupe.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalDedupe.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.logical; import java.util.Arrays; @@ -13,9 +12,7 @@ import lombok.ToString; import org.opensearch.sql.expression.Expression; -/** - * Logical Dedupe Plan. - */ +/** Logical Dedupe Plan. */ @Getter @ToString @EqualsAndHashCode(callSuper = true) @@ -26,12 +23,12 @@ public class LogicalDedupe extends LogicalPlan { private final Boolean keepEmpty; private final Boolean consecutive; - /** - * Constructor of LogicalDedupe. - */ + /** Constructor of LogicalDedupe. */ public LogicalDedupe( LogicalPlan child, - List dedupeList, Integer allowedDuplication, Boolean keepEmpty, + List dedupeList, + Integer allowedDuplication, + Boolean keepEmpty, Boolean consecutive) { super(Arrays.asList(child)); this.dedupeList = dedupeList; diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalEval.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalEval.java index 8ec0b84dad..e7b8f353bc 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalEval.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalEval.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.logical; import java.util.Collections; @@ -24,15 +23,10 @@ @EqualsAndHashCode(callSuper = true) public class LogicalEval extends LogicalPlan { - @Getter - private final List> expressions; + @Getter private final List> expressions; - /** - * Constructor of LogicalEval. - */ - public LogicalEval( - LogicalPlan child, - List> expressions) { + /** Constructor of LogicalEval. */ + public LogicalEval(LogicalPlan child, List> expressions) { super(Collections.singletonList(child)); this.expressions = expressions; } diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalFetchCursor.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalFetchCursor.java index 4fc96f3ec1..ca16b41597 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalFetchCursor.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalFetchCursor.java @@ -11,21 +11,15 @@ import lombok.ToString; import org.opensearch.sql.storage.StorageEngine; -/** - * A plan node which represents operation of fetching a next page from the cursor. - */ +/** A plan node which represents operation of fetching a next page from the cursor. */ @EqualsAndHashCode(callSuper = false) @ToString public class LogicalFetchCursor extends LogicalPlan { - @Getter - private final String cursor; + @Getter private final String cursor; - @Getter - private final StorageEngine engine; + @Getter private final StorageEngine engine; - /** - * LogicalCursor constructor. Does not have child plans. - */ + /** LogicalCursor constructor. Does not have child plans. */ public LogicalFetchCursor(String cursor, StorageEngine engine) { super(List.of()); this.cursor = cursor; diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalFilter.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalFilter.java index 78887ad448..49280e8709 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalFilter.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalFilter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.logical; import java.util.Collections; @@ -12,19 +11,14 @@ import lombok.ToString; import org.opensearch.sql.expression.Expression; -/** - * Logical Filter represent the filter relation. - */ +/** Logical Filter represent the filter relation. */ @ToString @EqualsAndHashCode(callSuper = true) public class LogicalFilter extends LogicalPlan { - @Getter - private final Expression condition; + @Getter private final Expression condition; - /** - * Constructor of LogicalFilter. - */ + /** Constructor of LogicalFilter. */ public LogicalFilter(LogicalPlan child, Expression condition) { super(Collections.singletonList(child)); this.condition = condition; diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalHighlight.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalHighlight.java index c1e873a00d..41fcd48f81 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalHighlight.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalHighlight.java @@ -20,11 +20,9 @@ public class LogicalHighlight extends LogicalPlan { private final Expression highlightField; private final Map arguments; - /** - * Constructor of LogicalHighlight. - */ - public LogicalHighlight(LogicalPlan childPlan, Expression highlightField, - Map arguments) { + /** Constructor of LogicalHighlight. */ + public LogicalHighlight( + LogicalPlan childPlan, Expression highlightField, Map arguments) { super(Collections.singletonList(childPlan)); this.highlightField = highlightField; this.arguments = arguments; diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalLimit.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalLimit.java index e6253cb2cc..bec77d9b6f 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalLimit.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalLimit.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.logical; import java.util.Collections; @@ -18,9 +17,7 @@ public class LogicalLimit extends LogicalPlan { private final Integer limit; private final Integer offset; - /** - * Constructor of LogicalLimit. - */ + /** Constructor of LogicalLimit. */ public LogicalLimit(LogicalPlan input, Integer limit, Integer offset) { super(Collections.singletonList(input)); this.limit = limit; diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalML.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalML.java index c54ee92e08..780e0bba94 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalML.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalML.java @@ -7,17 +7,16 @@ import lombok.ToString; import org.opensearch.sql.ast.expression.Literal; -/** - * ML logical plan. - */ +/** ML logical plan. */ @Getter @ToString @EqualsAndHashCode(callSuper = true) public class LogicalML extends LogicalPlan { - private final Map arguments; + private final Map arguments; /** * Constructor of LogicalML. + * * @param child child logical plan * @param arguments arguments of the algorithm */ diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalMLCommons.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalMLCommons.java index 22771b42de..cfc313a68d 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalMLCommons.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalMLCommons.java @@ -7,25 +7,23 @@ import lombok.ToString; import org.opensearch.sql.ast.expression.Literal; -/** - * ml-commons logical plan. - */ +/** ml-commons logical plan. */ @Getter @ToString @EqualsAndHashCode(callSuper = true) public class LogicalMLCommons extends LogicalPlan { private final String algorithm; - private final Map arguments; + private final Map arguments; /** * Constructor of LogicalMLCommons. + * * @param child child logical plan * @param algorithm algorithm name * @param arguments arguments of the algorithm */ - public LogicalMLCommons(LogicalPlan child, String algorithm, - Map arguments) { + public LogicalMLCommons(LogicalPlan child, String algorithm, Map arguments) { super(Collections.singletonList(child)); this.algorithm = algorithm; this.arguments = arguments; diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalNested.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalNested.java index 3e0e167cf3..e791a1fad1 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalNested.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalNested.java @@ -14,9 +14,7 @@ import org.opensearch.sql.expression.NamedExpression; import org.opensearch.sql.expression.ReferenceExpression; -/** - * Logical Nested plan. - */ +/** Logical Nested plan. */ @EqualsAndHashCode(callSuper = true) @Getter @ToString @@ -24,15 +22,11 @@ public class LogicalNested extends LogicalPlan { private List> fields; private final List projectList; - /** - * Constructor of LogicalNested. - * - */ + /** Constructor of LogicalNested. */ public LogicalNested( LogicalPlan childPlan, List> fields, - List projectList - ) { + List projectList) { super(Collections.singletonList(childPlan)); this.fields = fields; this.projectList = projectList; diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPaginate.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPaginate.java index 372f9dcf0b..bd9f20e055 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPaginate.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPaginate.java @@ -10,14 +10,11 @@ import lombok.Getter; import lombok.ToString; -/** - * LogicalPaginate represents pagination operation for underlying plan. - */ +/** LogicalPaginate represents pagination operation for underlying plan. */ @ToString @EqualsAndHashCode(callSuper = false) public class LogicalPaginate extends LogicalPlan { - @Getter - private final int pageSize; + @Getter private final int pageSize; public LogicalPaginate(int pageSize, List childPlans) { super(childPlans); diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPlan.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPlan.java index ad4a0b3794..2bc1a8756f 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPlan.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPlan.java @@ -3,16 +3,13 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.logical; import java.util.List; import lombok.EqualsAndHashCode; import org.opensearch.sql.planner.PlanNode; -/** - * The abstract base class for all the Logical Plan node. - */ +/** The abstract base class for all the Logical Plan node. */ @EqualsAndHashCode(callSuper = false) public abstract class LogicalPlan implements PlanNode { @@ -27,8 +24,8 @@ public LogicalPlan(List childPlans) { * * @param visitor visitor. * @param context visitor context. - * @param returned object type. - * @param context type. + * @param returned object type. + * @param context type. * @return returned object. */ public abstract R accept(LogicalPlanNodeVisitor visitor, C context); @@ -38,7 +35,6 @@ public LogicalPlan replaceChildPlans(List childPlans) { return this; } - @Override public List getChild() { return childPlans; diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPlanDSL.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPlanDSL.java index c0e253ca50..2a886ba0ca 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPlanDSL.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPlanDSL.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.logical; import com.google.common.collect.ImmutableList; @@ -25,9 +24,7 @@ import org.opensearch.sql.storage.StorageEngine; import org.opensearch.sql.storage.Table; -/** - * Logical Plan DSL. - */ +/** Logical Plan DSL. */ @UtilityClass public class LogicalPlanDSL { @@ -57,7 +54,7 @@ public static LogicalPlan rename( return new LogicalRename(input, renameMap); } - public static LogicalPlan paginate(LogicalPlan input, int fetchSize) { + public static LogicalPlan paginate(LogicalPlan input, int fetchSize) { return new LogicalPaginate(fetchSize, List.of(input)); } @@ -65,23 +62,23 @@ public static LogicalPlan project(LogicalPlan input, NamedExpression... fields) return new LogicalProject(input, Arrays.asList(fields), ImmutableList.of()); } - public static LogicalPlan project(LogicalPlan input, List fields, - List namedParseExpressions) { + public static LogicalPlan project( + LogicalPlan input, + List fields, + List namedParseExpressions) { return new LogicalProject(input, fields, namedParseExpressions); } - public LogicalPlan window(LogicalPlan input, - NamedExpression windowFunction, - WindowDefinition windowDefinition) { + public LogicalPlan window( + LogicalPlan input, NamedExpression windowFunction, WindowDefinition windowDefinition) { return new LogicalWindow(input, windowFunction, windowDefinition); } - public LogicalPlan highlight(LogicalPlan input, Expression field, - Map arguments) { + public LogicalPlan highlight( + LogicalPlan input, Expression field, Map arguments) { return new LogicalHighlight(input, field, arguments); } - public static LogicalPlan nested( LogicalPlan input, List> nestedArgs, @@ -116,13 +113,20 @@ public static LogicalPlan dedupe( input, Arrays.asList(fields), allowedDuplication, keepEmpty, consecutive); } - public static LogicalPlan rareTopN(LogicalPlan input, CommandType commandType, - List groupByList, Expression... fields) { + public static LogicalPlan rareTopN( + LogicalPlan input, + CommandType commandType, + List groupByList, + Expression... fields) { return rareTopN(input, commandType, 10, groupByList, fields); } - public static LogicalPlan rareTopN(LogicalPlan input, CommandType commandType, int noOfResults, - List groupByList, Expression... fields) { + public static LogicalPlan rareTopN( + LogicalPlan input, + CommandType commandType, + int noOfResults, + List groupByList, + Expression... fields) { return new LogicalRareTopN(input, commandType, noOfResults, Arrays.asList(fields), groupByList); } @@ -134,5 +138,4 @@ public LogicalPlan values(List... values) { public static LogicalPlan limit(LogicalPlan input, Integer limit, Integer offset) { return new LogicalLimit(input, limit, offset); } - } diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPlanNodeVisitor.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPlanNodeVisitor.java index dbe21d38e0..156db35306 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPlanNodeVisitor.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPlanNodeVisitor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.logical; import org.opensearch.sql.storage.read.TableScanBuilder; diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalProject.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalProject.java index 427ccffc62..5978620480 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalProject.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalProject.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.logical; import java.util.Collections; @@ -13,21 +12,15 @@ import lombok.ToString; import org.opensearch.sql.expression.NamedExpression; -/** - * Project field specified by the {@link LogicalProject#projectList}. - */ +/** Project field specified by the {@link LogicalProject#projectList}. */ @ToString @EqualsAndHashCode(callSuper = true) public class LogicalProject extends LogicalPlan { - @Getter - private final List projectList; - @Getter - private final List namedParseExpressions; + @Getter private final List projectList; + @Getter private final List namedParseExpressions; - /** - * Constructor of LogicalProject. - */ + /** Constructor of LogicalProject. */ public LogicalProject( LogicalPlan child, List projectList, diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRareTopN.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRareTopN.java index 4744bc590f..2c387eca9c 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRareTopN.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRareTopN.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.logical; import java.util.Collections; @@ -14,9 +13,7 @@ import org.opensearch.sql.ast.tree.RareTopN.CommandType; import org.opensearch.sql.expression.Expression; -/** - * Logical Rare and TopN Plan. - */ +/** Logical Rare and TopN Plan. */ @Getter @ToString @EqualsAndHashCode(callSuper = true) @@ -27,12 +24,11 @@ public class LogicalRareTopN extends LogicalPlan { private final List fieldList; private final List groupByList; - /** - * Constructor of LogicalRareTopN. - */ + /** Constructor of LogicalRareTopN. */ public LogicalRareTopN( LogicalPlan child, - CommandType commandType, Integer noOfResults, + CommandType commandType, + Integer noOfResults, List fieldList, List groupByList) { super(Collections.singletonList(child)); diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRelation.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRelation.java index a49c3d5cbe..d50e286e1d 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRelation.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRelation.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.logical; import com.google.common.collect.ImmutableList; @@ -12,22 +11,16 @@ import lombok.ToString; import org.opensearch.sql.storage.Table; -/** - * Logical Relation represent the data source. - */ +/** Logical Relation represent the data source. */ @ToString @EqualsAndHashCode(callSuper = true) public class LogicalRelation extends LogicalPlan { - @Getter - private final String relationName; + @Getter private final String relationName; - @Getter - private final Table table; + @Getter private final Table table; - /** - * Constructor of LogicalRelation. - */ + /** Constructor of LogicalRelation. */ public LogicalRelation(String relationName, Table table) { super(ImmutableList.of()); this.relationName = relationName; diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRemove.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRemove.java index cda7282c40..c1aeda22c7 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRemove.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRemove.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.logical; import java.util.Collections; @@ -13,22 +12,15 @@ import lombok.ToString; import org.opensearch.sql.expression.ReferenceExpression; -/** - * Remove field specified by the {@link LogicalRemove#removeList}. - */ +/** Remove field specified by the {@link LogicalRemove#removeList}. */ @ToString @EqualsAndHashCode(callSuper = true) public class LogicalRemove extends LogicalPlan { - @Getter - private final Set removeList; + @Getter private final Set removeList; - /** - * Constructor of LogicalRemove. - */ - public LogicalRemove( - LogicalPlan child, - Set removeList) { + /** Constructor of LogicalRemove. */ + public LogicalRemove(LogicalPlan child, Set removeList) { super(Collections.singletonList(child)); this.removeList = removeList; } diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRename.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRename.java index 007a0a6fca..25ee645932 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRename.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRename.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.logical; import java.util.Collections; @@ -13,23 +12,15 @@ import lombok.ToString; import org.opensearch.sql.expression.ReferenceExpression; -/** - * Rename Operator. - * renameList is list of mapping of source and target. - */ +/** Rename Operator. renameList is list of mapping of source and target. */ @ToString @EqualsAndHashCode(callSuper = true) public class LogicalRename extends LogicalPlan { - @Getter - private final Map renameMap; + @Getter private final Map renameMap; - /** - * Constructor of LogicalRename. - */ - public LogicalRename( - LogicalPlan child, - Map renameMap) { + /** Constructor of LogicalRename. */ + public LogicalRename(LogicalPlan child, Map renameMap) { super(Collections.singletonList(child)); this.renameMap = renameMap; } diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalSort.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalSort.java index 947411518f..569ca7e309 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalSort.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalSort.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.logical; import java.util.Collections; @@ -15,9 +14,7 @@ import org.opensearch.sql.ast.tree.Sort.SortOption; import org.opensearch.sql.expression.Expression; -/** - * Sort Plan. - */ +/** Sort Plan. */ @Getter @ToString @EqualsAndHashCode(callSuper = true) @@ -25,12 +22,8 @@ public class LogicalSort extends LogicalPlan { private final List> sortList; - /** - * Constructor of LogicalSort. - */ - public LogicalSort( - LogicalPlan child, - List> sortList) { + /** Constructor of LogicalSort. */ + public LogicalSort(LogicalPlan child, List> sortList) { super(Collections.singletonList(child)); this.sortList = sortList; } diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalValues.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalValues.java index 29d2db54b2..325650db33 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalValues.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalValues.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.logical; import com.google.common.collect.ImmutableList; @@ -14,19 +13,22 @@ import org.opensearch.sql.expression.LiteralExpression; /** - * Logical operator which is a sequence of literal rows (like a relation). - * Basically, Values operator is used to create rows of constant literals - * "out of nothing" which is corresponding with VALUES clause in SQL. - * Mostly all rows must have the same number of literals and each column should - * have same type or can be converted implicitly. - * In particular, typical use cases include: - * 1. Project without relation involved. - * 2. Defining query or insertion without a relation. + * Logical operator which is a sequence of literal rows (like a relation).
+ * Basically, Values operator is used to create rows of constant literals
+ * "out of nothing" which is corresponding with VALUES clause in SQL.
+ * Mostly all rows must have the same number of literals and each column should have same type or + * can be converted implicitly. In particular, typical use cases include: + * + *
    + *
  1. Project without relation involved. + *
  2. Defining query or insertion without a relation. + *
+ * * Take the following logical plan for example: - *
- *  LogicalProject(expr=[log(2),true,1+2])
- *   |_ LogicalValues([[]])  #an empty row so that Project can evaluate its expressions in next()
- *  
+ * + *

LogicalProject(expr=[log(2),true,1+2])
+ *   |_ LogicalValues([[]]) #an empty row so that Project can evaluate its expressions in + * next() */ @ToString @Getter @@ -35,11 +37,8 @@ public class LogicalValues extends LogicalPlan { private final List> values; - /** - * Constructor of LogicalValues. - */ - public LogicalValues( - List> values) { + /** Constructor of LogicalValues. */ + public LogicalValues(List> values) { super(ImmutableList.of()); this.values = values; } @@ -48,5 +47,4 @@ public LogicalValues( public R accept(LogicalPlanNodeVisitor visitor, C context) { return visitor.visitValues(this, context); } - } diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalWindow.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalWindow.java index 022b284674..00c89410a7 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalWindow.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalWindow.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.logical; import java.util.Collections; @@ -25,13 +24,9 @@ public class LogicalWindow extends LogicalPlan { private final NamedExpression windowFunction; private final WindowDefinition windowDefinition; - /** - * Constructor of logical window. - */ + /** Constructor of logical window. */ public LogicalWindow( - LogicalPlan child, - NamedExpression windowFunction, - WindowDefinition windowDefinition) { + LogicalPlan child, NamedExpression windowFunction, WindowDefinition windowDefinition) { super(Collections.singletonList(child)); this.windowFunction = windowFunction; this.windowDefinition = windowDefinition; @@ -41,5 +36,4 @@ public LogicalWindow( public R accept(LogicalPlanNodeVisitor visitor, C context) { return visitor.visitWindow(this, context); } - } diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalWrite.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalWrite.java index 496e6009e3..a253739a68 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalWrite.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalWrite.java @@ -12,9 +12,7 @@ import lombok.ToString; import org.opensearch.sql.storage.Table; -/** - * Logical operator for insert statement. - */ +/** Logical operator for insert statement. */ @EqualsAndHashCode(callSuper = true) @Getter @ToString @@ -26,9 +24,7 @@ public class LogicalWrite extends LogicalPlan { /** Optional column name list specified in insert statement. */ private final List columns; - /** - * Construct a logical write with given child node, table and column name list. - */ + /** Construct a logical write with given child node, table and column name list. */ public LogicalWrite(LogicalPlan child, Table table, List columns) { super(Collections.singletonList(child)); this.table = table; diff --git a/core/src/main/java/org/opensearch/sql/planner/optimizer/LogicalPlanOptimizer.java b/core/src/main/java/org/opensearch/sql/planner/optimizer/LogicalPlanOptimizer.java index be1227c1da..5c115f0db8 100644 --- a/core/src/main/java/org/opensearch/sql/planner/optimizer/LogicalPlanOptimizer.java +++ b/core/src/main/java/org/opensearch/sql/planner/optimizer/LogicalPlanOptimizer.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.optimizer; import static com.facebook.presto.matching.DefaultMatcher.DEFAULT_MATCHER; @@ -20,56 +19,53 @@ import org.opensearch.sql.planner.optimizer.rule.write.CreateTableWriteBuilder; /** - * {@link LogicalPlan} Optimizer. - * The Optimizer will run in the TopDown manner. - * 1> Optimize the current node with all the rules. - * 2> Optimize the all the child nodes with all the rules. - * 3) In case the child node could change, Optimize the current node again. + * {@link LogicalPlan} Optimizer.
+ * The Optimizer will run in the TopDown manner.
+ * + *

    + *
  1. Optimize the current node with all the rules. + *
  2. Optimize the all the child nodes with all the rules. + *
  3. In case the child node could change, Optimize the current node again. + *
*/ public class LogicalPlanOptimizer { private final List> rules; - /** - * Create {@link LogicalPlanOptimizer} with customized rules. - */ + /** Create {@link LogicalPlanOptimizer} with customized rules. */ public LogicalPlanOptimizer(List> rules) { this.rules = rules; } - /** - * Create {@link LogicalPlanOptimizer} with pre-defined rules. - */ + /** Create {@link LogicalPlanOptimizer} with pre-defined rules. */ public static LogicalPlanOptimizer create() { - return new LogicalPlanOptimizer(Arrays.asList( - /* - * Phase 1: Transformations that rely on relational algebra equivalence - */ - new MergeFilterAndFilter(), - new PushFilterUnderSort(), - /* - * Phase 2: Transformations that rely on data source push down capability - */ - new CreateTableScanBuilder(), - TableScanPushDown.PUSH_DOWN_FILTER, - TableScanPushDown.PUSH_DOWN_AGGREGATION, - TableScanPushDown.PUSH_DOWN_SORT, - TableScanPushDown.PUSH_DOWN_LIMIT, - new PushDownPageSize(), - TableScanPushDown.PUSH_DOWN_HIGHLIGHT, - TableScanPushDown.PUSH_DOWN_NESTED, - TableScanPushDown.PUSH_DOWN_PROJECT, - new CreateTableWriteBuilder())); + return new LogicalPlanOptimizer( + Arrays.asList( + /* + * Phase 1: Transformations that rely on relational algebra equivalence + */ + new MergeFilterAndFilter(), + new PushFilterUnderSort(), + /* + * Phase 2: Transformations that rely on data source push down capability + */ + new CreateTableScanBuilder(), + TableScanPushDown.PUSH_DOWN_FILTER, + TableScanPushDown.PUSH_DOWN_AGGREGATION, + TableScanPushDown.PUSH_DOWN_SORT, + TableScanPushDown.PUSH_DOWN_LIMIT, + new PushDownPageSize(), + TableScanPushDown.PUSH_DOWN_HIGHLIGHT, + TableScanPushDown.PUSH_DOWN_NESTED, + TableScanPushDown.PUSH_DOWN_PROJECT, + new CreateTableWriteBuilder())); } - /** - * Optimize {@link LogicalPlan}. - */ + /** Optimize {@link LogicalPlan}. */ public LogicalPlan optimize(LogicalPlan plan) { LogicalPlan optimized = internalOptimize(plan); optimized.replaceChildPlans( - optimized.getChild().stream().map(this::optimize).collect( - Collectors.toList())); + optimized.getChild().stream().map(this::optimize).collect(Collectors.toList())); return internalOptimize(optimized); } diff --git a/core/src/main/java/org/opensearch/sql/planner/optimizer/PushDownPageSize.java b/core/src/main/java/org/opensearch/sql/planner/optimizer/PushDownPageSize.java index 8150de824d..5201c83c25 100644 --- a/core/src/main/java/org/opensearch/sql/planner/optimizer/PushDownPageSize.java +++ b/core/src/main/java/org/opensearch/sql/planner/optimizer/PushDownPageSize.java @@ -14,15 +14,12 @@ import org.opensearch.sql.planner.logical.LogicalPlan; import org.opensearch.sql.storage.read.TableScanBuilder; -/** - * A {@link LogicalPlanOptimizer} rule that pushes down page size - * to table scan builder. - */ +/** A {@link LogicalPlanOptimizer} rule that pushes down page size to table scan builder. */ public class PushDownPageSize implements Rule { @Override public Pattern pattern() { return Pattern.typeOf(LogicalPaginate.class) - .matching(lp -> findTableScanBuilder(lp).isPresent()); + .matching(lp -> findTableScanBuilder(lp).isPresent()); } @Override @@ -44,7 +41,7 @@ private Optional findTableScanBuilder(LogicalPaginate logicalP if (children.stream().anyMatch(TableScanBuilder.class::isInstance)) { if (children.size() > 1) { throw new UnsupportedOperationException( - "Unsupported plan: relation operator cannot have siblings"); + "Unsupported plan: relation operator cannot have siblings"); } return Optional.of((TableScanBuilder) children.get(0)); } diff --git a/core/src/main/java/org/opensearch/sql/planner/optimizer/Rule.java b/core/src/main/java/org/opensearch/sql/planner/optimizer/Rule.java index 123754d3d0..b06ca3e968 100644 --- a/core/src/main/java/org/opensearch/sql/planner/optimizer/Rule.java +++ b/core/src/main/java/org/opensearch/sql/planner/optimizer/Rule.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.optimizer; import com.facebook.presto.matching.Captures; @@ -12,17 +11,17 @@ /** * Optimization Rule. + * * @param LogicalPlan. */ public interface Rule { - /** - * Get the {@link Pattern}. - */ + /** Get the {@link Pattern}. */ Pattern pattern(); /** * Apply the Rule to the LogicalPlan. + * * @param plan LogicalPlan which match the Pattern. * @param captures A list of LogicalPlan which are captured by the Pattern. * @return the transfromed LogicalPlan. diff --git a/core/src/main/java/org/opensearch/sql/planner/optimizer/pattern/Patterns.java b/core/src/main/java/org/opensearch/sql/planner/optimizer/pattern/Patterns.java index 8f5ac86580..ee4e9a20cc 100644 --- a/core/src/main/java/org/opensearch/sql/planner/optimizer/pattern/Patterns.java +++ b/core/src/main/java/org/opensearch/sql/planner/optimizer/pattern/Patterns.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.optimizer.pattern; import com.facebook.presto.matching.Capture; @@ -25,108 +24,89 @@ import org.opensearch.sql.storage.Table; import org.opensearch.sql.storage.read.TableScanBuilder; -/** - * Pattern helper class. - */ +/** Pattern helper class. */ @UtilityClass public class Patterns { - /** - * Logical filter with a given pattern on inner field. - */ + /** Logical filter with a given pattern on inner field. */ public static Pattern filter(Pattern pattern) { return Pattern.typeOf(LogicalFilter.class).with(source(pattern)); } - /** - * Logical aggregate operator with a given pattern on inner field. - */ + /** Logical aggregate operator with a given pattern on inner field. */ public static Pattern aggregate(Pattern pattern) { return Pattern.typeOf(LogicalAggregation.class).with(source(pattern)); } - /** - * Logical sort operator with a given pattern on inner field. - */ + /** Logical sort operator with a given pattern on inner field. */ public static Pattern sort(Pattern pattern) { return Pattern.typeOf(LogicalSort.class).with(source(pattern)); } - /** - * Logical limit operator with a given pattern on inner field. - */ + /** Logical limit operator with a given pattern on inner field. */ public static Pattern limit(Pattern pattern) { return Pattern.typeOf(LogicalLimit.class).with(source(pattern)); } - /** - * Logical highlight operator with a given pattern on inner field. - */ + /** Logical highlight operator with a given pattern on inner field. */ public static Pattern highlight(Pattern pattern) { return Pattern.typeOf(LogicalHighlight.class).with(source(pattern)); } - /** - * Logical nested operator with a given pattern on inner field. - */ + /** Logical nested operator with a given pattern on inner field. */ public static Pattern nested(Pattern pattern) { return Pattern.typeOf(LogicalNested.class).with(source(pattern)); } - /** - * Logical project operator with a given pattern on inner field. - */ + /** Logical project operator with a given pattern on inner field. */ public static Pattern project(Pattern pattern) { return Pattern.typeOf(LogicalProject.class).with(source(pattern)); } - /** - * Pattern for {@link TableScanBuilder} and capture it meanwhile. - */ + /** Pattern for {@link TableScanBuilder} and capture it meanwhile. */ public static Pattern scanBuilder() { return Pattern.typeOf(TableScanBuilder.class).capturedAs(Capture.newCapture()); } - /** - * LogicalPlan source {@link Property}. - */ + /** LogicalPlan source {@link Property}. */ public static Property source() { - return Property.optionalProperty("source", plan -> plan.getChild().size() == 1 - ? Optional.of(plan.getChild().get(0)) - : Optional.empty()); + return Property.optionalProperty( + "source", + plan -> + plan.getChild().size() == 1 ? Optional.of(plan.getChild().get(0)) : Optional.empty()); } - /** - * Source (children field) with a given pattern. - */ + /** Source (children field) with a given pattern. */ @SuppressWarnings("unchecked") - public static - PropertyPattern source(Pattern pattern) { - Property property = Property.optionalProperty("source", - plan -> plan.getChild().size() == 1 - ? Optional.of((T) plan.getChild().get(0)) - : Optional.empty()); + public static PropertyPattern source(Pattern pattern) { + Property property = + Property.optionalProperty( + "source", + plan -> + plan.getChild().size() == 1 + ? Optional.of((T) plan.getChild().get(0)) + : Optional.empty()); return property.matching(pattern); } - /** - * Logical relation with table field. - */ + /** Logical relation with table field. */ public static Property table() { - return Property.optionalProperty("table", - plan -> plan instanceof LogicalRelation - ? Optional.of(((LogicalRelation) plan).getTable()) - : Optional.empty()); + return Property.optionalProperty( + "table", + plan -> + plan instanceof LogicalRelation + ? Optional.of(((LogicalRelation) plan).getTable()) + : Optional.empty()); } - /** - * Logical write with table field. - */ + /** Logical write with table field. */ public static Property writeTable() { - return Property.optionalProperty("table", - plan -> plan instanceof LogicalWrite - ? Optional.of(((LogicalWrite) plan).getTable()) - : Optional.empty()); + return Property.optionalProperty( + "table", + plan -> + plan instanceof LogicalWrite + ? Optional.of(((LogicalWrite) plan).getTable()) + : Optional.empty()); } } diff --git a/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/MergeFilterAndFilter.java b/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/MergeFilterAndFilter.java index 57763728d5..6270eee131 100644 --- a/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/MergeFilterAndFilter.java +++ b/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/MergeFilterAndFilter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.optimizer.rule; import static com.facebook.presto.matching.Pattern.typeOf; @@ -19,9 +18,7 @@ import org.opensearch.sql.planner.logical.LogicalPlan; import org.opensearch.sql.planner.optimizer.Rule; -/** - * Merge Filter --> Filter to the single Filter condition. - */ +/** Merge Filter --> Filter to the single Filter condition. */ public class MergeFilterAndFilter implements Rule { private final Capture capture; @@ -30,22 +27,18 @@ public class MergeFilterAndFilter implements Rule { @Getter private final Pattern pattern; - /** - * Constructor of MergeFilterAndFilter. - */ + /** Constructor of MergeFilterAndFilter. */ public MergeFilterAndFilter() { this.capture = Capture.newCapture(); - this.pattern = typeOf(LogicalFilter.class) - .with(source().matching(typeOf(LogicalFilter.class).capturedAs(capture))); + this.pattern = + typeOf(LogicalFilter.class) + .with(source().matching(typeOf(LogicalFilter.class).capturedAs(capture))); } @Override - public LogicalPlan apply(LogicalFilter filter, - Captures captures) { + public LogicalPlan apply(LogicalFilter filter, Captures captures) { LogicalFilter childFilter = captures.get(capture); return new LogicalFilter( - childFilter.getChild().get(0), - DSL.and(filter.getCondition(), childFilter.getCondition()) - ); + childFilter.getChild().get(0), DSL.and(filter.getCondition(), childFilter.getCondition())); } } diff --git a/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/PushFilterUnderSort.java b/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/PushFilterUnderSort.java index e3347b402b..b5cd312e64 100644 --- a/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/PushFilterUnderSort.java +++ b/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/PushFilterUnderSort.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.optimizer.rule; import static com.facebook.presto.matching.Pattern.typeOf; @@ -20,7 +19,7 @@ import org.opensearch.sql.planner.optimizer.Rule; /** - * Push Filter under Sort. + * Push Filter under Sort.
* Filter - Sort - Child --> Sort - Filter - Child */ public class PushFilterUnderSort implements Rule { @@ -31,22 +30,17 @@ public class PushFilterUnderSort implements Rule { @Getter private final Pattern pattern; - /** - * Constructor of PushFilterUnderSort. - */ + /** Constructor of PushFilterUnderSort. */ public PushFilterUnderSort() { this.capture = Capture.newCapture(); - this.pattern = typeOf(LogicalFilter.class) - .with(source().matching(typeOf(LogicalSort.class).capturedAs(capture))); + this.pattern = + typeOf(LogicalFilter.class) + .with(source().matching(typeOf(LogicalSort.class).capturedAs(capture))); } @Override - public LogicalPlan apply(LogicalFilter filter, - Captures captures) { + public LogicalPlan apply(LogicalFilter filter, Captures captures) { LogicalSort sort = captures.get(capture); - return new LogicalSort( - filter.replaceChildPlans(sort.getChild()), - sort.getSortList() - ); + return new LogicalSort(filter.replaceChildPlans(sort.getChild()), sort.getSortList()); } } diff --git a/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/read/CreateTableScanBuilder.java b/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/read/CreateTableScanBuilder.java index dbe61ca8c3..6ed8e1faeb 100644 --- a/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/read/CreateTableScanBuilder.java +++ b/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/read/CreateTableScanBuilder.java @@ -19,9 +19,9 @@ import org.opensearch.sql.storage.read.TableScanBuilder; /** - * Rule that replace logical relation operator to {@link TableScanBuilder} for later - * push down optimization. All push down optimization rules that depends on table scan - * builder needs to run after this. + * Rule that replace logical relation operator to {@link TableScanBuilder} for later push down + * optimization. All push down optimization rules that depends on table scan builder needs to run + * after this. */ public class CreateTableScanBuilder implements Rule { @@ -33,13 +33,10 @@ public class CreateTableScanBuilder implements Rule { @Getter private final Pattern pattern; - /** - * Construct create table scan builder rule. - */ + /** Construct create table scan builder rule. */ public CreateTableScanBuilder() { this.capture = Capture.newCapture(); - this.pattern = Pattern.typeOf(LogicalRelation.class) - .with(table().capturedAs(capture)); + this.pattern = Pattern.typeOf(LogicalRelation.class).with(table().capturedAs(capture)); } @Override diff --git a/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/read/TableScanPushDown.java b/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/read/TableScanPushDown.java index de2b47d403..b83155d90f 100644 --- a/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/read/TableScanPushDown.java +++ b/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/read/TableScanPushDown.java @@ -26,10 +26,10 @@ import org.opensearch.sql.storage.read.TableScanBuilder; /** - * Rule template for all table scan push down rules. Because all push down optimization rules - * have similar workflow in common, such as a pattern that match an operator on top of table scan - * builder, and action that eliminates the original operator if pushed down, this class helps - * remove redundant code and improve readability. + * Rule template for all table scan push down rules. Because all push down optimization rules have + * similar workflow in common, such as a pattern that match an operator on top of table scan + * builder, and action that eliminates the original operator if pushed down, this class helps remove + * redundant code and improve readability. * * @param logical plan node type */ @@ -37,48 +37,32 @@ public class TableScanPushDown implements Rule { /** Push down optimize rule for filtering condition. */ public static final Rule PUSH_DOWN_FILTER = - match( - filter( - scanBuilder())) - .apply((filter, scanBuilder) -> scanBuilder.pushDownFilter(filter)); + match(filter(scanBuilder())) + .apply((filter, scanBuilder) -> scanBuilder.pushDownFilter(filter)); /** Push down optimize rule for aggregate operator. */ public static final Rule PUSH_DOWN_AGGREGATION = - match( - aggregate( - scanBuilder())) - .apply((agg, scanBuilder) -> scanBuilder.pushDownAggregation(agg)); + match(aggregate(scanBuilder())) + .apply((agg, scanBuilder) -> scanBuilder.pushDownAggregation(agg)); /** Push down optimize rule for sort operator. */ public static final Rule PUSH_DOWN_SORT = - match( - sort( - scanBuilder())) - .apply((sort, scanBuilder) -> scanBuilder.pushDownSort(sort)); + match(sort(scanBuilder())).apply((sort, scanBuilder) -> scanBuilder.pushDownSort(sort)); /** Push down optimize rule for limit operator. */ public static final Rule PUSH_DOWN_LIMIT = - match( - limit( - scanBuilder())) - .apply((limit, scanBuilder) -> scanBuilder.pushDownLimit(limit)); + match(limit(scanBuilder())).apply((limit, scanBuilder) -> scanBuilder.pushDownLimit(limit)); public static final Rule PUSH_DOWN_PROJECT = - match( - project( - scanBuilder())) - .apply((project, scanBuilder) -> scanBuilder.pushDownProject(project)); + match(project(scanBuilder())) + .apply((project, scanBuilder) -> scanBuilder.pushDownProject(project)); public static final Rule PUSH_DOWN_HIGHLIGHT = - match( - highlight( - scanBuilder())) + match(highlight(scanBuilder())) .apply((highlight, scanBuilder) -> scanBuilder.pushDownHighlight(highlight)); public static final Rule PUSH_DOWN_NESTED = - match( - nested( - scanBuilder())) + match(nested(scanBuilder())) .apply((nested, scanBuilder) -> scanBuilder.pushDownNested(nested)); /** Pattern that matches a plan node. */ @@ -90,10 +74,9 @@ public class TableScanPushDown implements Rule { /** Push down function applied to the plan node and captured table scan builder. */ private final BiFunction pushDownFunction; - @SuppressWarnings("unchecked") - private TableScanPushDown(WithPattern pattern, - BiFunction pushDownFunction) { + private TableScanPushDown( + WithPattern pattern, BiFunction pushDownFunction) { this.pattern = pattern; this.capture = ((CapturePattern) pattern.getPattern()).capture(); this.pushDownFunction = pushDownFunction; @@ -113,22 +96,18 @@ public LogicalPlan apply(T plan, Captures captures) { return plan; } - /** - * Custom builder class other than generated by Lombok to provide more readable code. - */ + /** Custom builder class other than generated by Lombok to provide more readable code. */ static class TableScanPushDownBuilder { private WithPattern pattern; - public static - TableScanPushDownBuilder match(Pattern pattern) { + public static TableScanPushDownBuilder match(Pattern pattern) { TableScanPushDownBuilder builder = new TableScanPushDownBuilder<>(); builder.pattern = (WithPattern) pattern; return builder; } - public TableScanPushDown apply( - BiFunction pushDownFunction) { + public TableScanPushDown apply(BiFunction pushDownFunction) { return new TableScanPushDown<>(pattern, pushDownFunction); } } diff --git a/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/write/CreateTableWriteBuilder.java b/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/write/CreateTableWriteBuilder.java index 4fbf676862..0a4045d404 100644 --- a/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/write/CreateTableWriteBuilder.java +++ b/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/write/CreateTableWriteBuilder.java @@ -32,13 +32,10 @@ public class CreateTableWriteBuilder implements Rule { @Getter private final Pattern pattern; - /** - * Construct create table write builder rule. - */ + /** Construct create table write builder rule. */ public CreateTableWriteBuilder() { this.capture = Capture.newCapture(); - this.pattern = Pattern.typeOf(LogicalWrite.class) - .with(writeTable().capturedAs(capture)); + this.pattern = Pattern.typeOf(LogicalWrite.class).with(writeTable().capturedAs(capture)); } @Override diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/AggregationOperator.java b/core/src/main/java/org/opensearch/sql/planner/physical/AggregationOperator.java index 2c643c986d..cc1c047c31 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/AggregationOperator.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/AggregationOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.physical; import java.util.Collections; @@ -27,30 +26,26 @@ @EqualsAndHashCode(callSuper = false) @ToString public class AggregationOperator extends PhysicalPlan { - @Getter - private final PhysicalPlan input; - @Getter - private final List aggregatorList; - @Getter - private final List groupByExprList; + @Getter private final PhysicalPlan input; + @Getter private final List aggregatorList; + @Getter private final List groupByExprList; - /** - * {@link BindingTuple} Collector. - */ - @EqualsAndHashCode.Exclude - private final Collector collector; - @EqualsAndHashCode.Exclude - private Iterator iterator; + /** {@link BindingTuple} Collector. */ + @EqualsAndHashCode.Exclude private final Collector collector; + + @EqualsAndHashCode.Exclude private Iterator iterator; /** * AggregationOperator Constructor. * - * @param input Input {@link PhysicalPlan} - * @param aggregatorList List of {@link Aggregator} + * @param input Input {@link PhysicalPlan} + * @param aggregatorList List of {@link Aggregator} * @param groupByExprList List of group by {@link Expression} */ - public AggregationOperator(PhysicalPlan input, List aggregatorList, - List groupByExprList) { + public AggregationOperator( + PhysicalPlan input, + List aggregatorList, + List groupByExprList) { this.input = input; this.aggregatorList = aggregatorList; this.groupByExprList = groupByExprList; @@ -67,7 +62,6 @@ public List getChild() { return Collections.singletonList(input); } - @Override public boolean hasNext() { return iterator.hasNext(); diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/CursorCloseOperator.java b/core/src/main/java/org/opensearch/sql/planner/physical/CursorCloseOperator.java index 7921d0dd50..688ffa0d8d 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/CursorCloseOperator.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/CursorCloseOperator.java @@ -11,9 +11,9 @@ import org.opensearch.sql.executor.ExecutionEngine; /** - * A plan node which blocks issuing a request in {@link #open} and - * getting results in {@link #hasNext}, but doesn't block releasing resources in {@link #close}. - * Designed to be on top of the deserialized tree. + * A plan node which blocks issuing a request in {@link #open} and getting results in {@link + * #hasNext}, but doesn't block releasing resources in {@link #close}. Designed to be on top of the + * deserialized tree. */ @RequiredArgsConstructor public class CursorCloseOperator extends PhysicalPlan { @@ -41,9 +41,7 @@ public List getChild() { return List.of(input); } - /** - * Provides an empty schema, because this plan node is always located on the top of the tree. - */ + /** Provides an empty schema, because this plan node is always located on the top of the tree. */ @Override public ExecutionEngine.Schema schema() { return new ExecutionEngine.Schema(List.of()); diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/DedupeOperator.java b/core/src/main/java/org/opensearch/sql/planner/physical/DedupeOperator.java index 452fbd9707..7faec2154b 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/DedupeOperator.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/DedupeOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.physical; import com.google.common.collect.ImmutableList; @@ -28,21 +27,14 @@ @Getter @EqualsAndHashCode(callSuper = false) public class DedupeOperator extends PhysicalPlan { - @Getter - private final PhysicalPlan input; - @Getter - private final List dedupeList; - @Getter - private final Integer allowedDuplication; - @Getter - private final Boolean keepEmpty; - @Getter - private final Boolean consecutive; - - @EqualsAndHashCode.Exclude - private final Deduper> deduper; - @EqualsAndHashCode.Exclude - private ExprValue next; + @Getter private final PhysicalPlan input; + @Getter private final List dedupeList; + @Getter private final Integer allowedDuplication; + @Getter private final Boolean keepEmpty; + @Getter private final Boolean consecutive; + + @EqualsAndHashCode.Exclude private final Deduper> deduper; + @EqualsAndHashCode.Exclude private ExprValue next; private static final Integer ALL_ONE_DUPLICATION = 1; private static final Boolean IGNORE_EMPTY = false; @@ -57,6 +49,7 @@ public DedupeOperator(PhysicalPlan input, List dedupeList) { /** * Dedup Constructor. + * * @param input input {@link PhysicalPlan} * @param dedupeList list of dedupe {@link Expression} * @param allowedDuplication max allowed duplication @@ -140,9 +133,7 @@ static class Deduper { private final BiFunction, K, Integer> seenFirstTime; private final Map seenMap = new ConcurrentHashMap<>(); - /** - * The Historical Deduper monitor the duplicated element with all the seen value. - */ + /** The Historical Deduper monitor the duplicated element with all the seen value. */ public static Deduper historicalDeduper() { return new Deduper<>( (map, key) -> { diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/EvalOperator.java b/core/src/main/java/org/opensearch/sql/planner/physical/EvalOperator.java index 3b9e1a8214..ac62fe1b86 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/EvalOperator.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/EvalOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.physical; import static org.opensearch.sql.data.type.ExprCoreType.STRUCT; @@ -34,17 +33,15 @@ * If the field name exist in the input, a new value will be put into to output. * *

The {@link EvalOperator#expressionList} are evaluated from left to right. It means you can - * reference previous evaluated field. - * e.g. fields velocity = distance/time, doubleVelocity = 2 * velocity + * reference previous evaluated field. e.g. fields velocity = distance/time, doubleVelocity = 2 * + * velocity */ @ToString @EqualsAndHashCode(callSuper = false) @RequiredArgsConstructor public class EvalOperator extends PhysicalPlan { - @Getter - private final PhysicalPlan input; - @Getter - private final List> expressionList; + @Getter private final PhysicalPlan input; + @Getter private final List> expressionList; @Override public R accept(PhysicalPlanNodeVisitor visitor, C context) { @@ -86,6 +83,7 @@ public ExprValue next() { /** * Evaluate the expression in the {@link EvalOperator#expressionList} with {@link Environment}. + * * @param env {@link Environment} * @return The mapping of reference and {@link ExprValue} for each expression. */ diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/FilterOperator.java b/core/src/main/java/org/opensearch/sql/planner/physical/FilterOperator.java index 4b5045d24e..ec61d53163 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/FilterOperator.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/FilterOperator.java @@ -17,21 +17,17 @@ import org.opensearch.sql.storage.bindingtuple.BindingTuple; /** - * The Filter operator represents WHERE clause and - * uses the conditions to evaluate the input {@link BindingTuple}. - * The Filter operator only returns the results that evaluated to true. - * The NULL and MISSING are handled by the logic defined in {@link BinaryPredicateOperator}. + * The Filter operator represents WHERE clause and uses the conditions to evaluate the input {@link + * BindingTuple}. The Filter operator only returns the results that evaluated to true. The NULL and + * MISSING are handled by the logic defined in {@link BinaryPredicateOperator}. */ @EqualsAndHashCode(callSuper = false) @ToString @RequiredArgsConstructor public class FilterOperator extends PhysicalPlan { - @Getter - private final PhysicalPlan input; - @Getter - private final Expression conditions; - @ToString.Exclude - private ExprValue next = null; + @Getter private final PhysicalPlan input; + @Getter private final Expression conditions; + @ToString.Exclude private ExprValue next = null; @Override public R accept(PhysicalPlanNodeVisitor visitor, C context) { diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/LimitOperator.java b/core/src/main/java/org/opensearch/sql/planner/physical/LimitOperator.java index cd84234c4b..dc9038f2a3 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/LimitOperator.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/LimitOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.physical; import com.google.common.collect.ImmutableList; @@ -15,15 +14,15 @@ import org.opensearch.sql.data.model.ExprValue; /** - * The limit operator sets a window, to and block the rows out of the window - * and allow only the result subset within this window to the output. + * The limit operator sets a window, to and block the rows out of the window and allow only the + * result subset within this window to the output. * - *

The result subset is enframed from original result with {@link LimitOperator#offset} - * as the offset and {@link LimitOperator#limit} as the size, thus the output - * is the subset of the original result set that has indices from {index + 1} to {index + limit}. - * Special cases might occur where the result subset has a size smaller than expected {limit}, - * it occurs when the original result set has a size smaller than {index + limit}, - * or even not greater than the offset. The latter results in an empty output.

+ *

The result subset is enframed from original result with {@link LimitOperator#offset} as the + * offset and {@link LimitOperator#limit} as the size, thus the output is the subset of the original + * result set that has indices from {index + 1} to {index + limit}. Special cases might occur where + * the result subset has a size smaller than expected {limit}, it occurs when the original result + * set has a size smaller than {index + limit}, or even not greater than the offset. The latter + * results in an empty output. */ @RequiredArgsConstructor @Getter @@ -66,5 +65,4 @@ public R accept(PhysicalPlanNodeVisitor visitor, C context) { public List getChild() { return ImmutableList.of(input); } - } diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/NestedOperator.java b/core/src/main/java/org/opensearch/sql/planner/physical/NestedOperator.java index 54cd541519..8539df5463 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/NestedOperator.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/NestedOperator.java @@ -27,58 +27,47 @@ import org.opensearch.sql.expression.ReferenceExpression; /** - * The NestedOperator evaluates the {@link NestedOperator#fields} and - * generates {@link NestedOperator#nonNestedFields} to form the - * {@link NestedOperator#result} output. Resolve two nested fields - * with differing paths will result in a cartesian product(inner join). + * The NestedOperator evaluates the {@link NestedOperator#fields} and generates {@link + * NestedOperator#nonNestedFields} to form the {@link NestedOperator#result} output. Resolve two + * nested fields with differing paths will result in a cartesian product(inner join). */ @EqualsAndHashCode(callSuper = false) public class NestedOperator extends PhysicalPlan { - @Getter - private final PhysicalPlan input; - @Getter - private final Set fields; // Needs to be a Set to match legacy implementation - @Getter - private final Map> groupedPathsAndFields; - @EqualsAndHashCode.Exclude - private List> result = new ArrayList<>(); - @EqualsAndHashCode.Exclude - private List nonNestedFields = new ArrayList<>(); + @Getter private final PhysicalPlan input; + @Getter private final Set fields; // Needs to be a Set to match legacy implementation + @Getter private final Map> groupedPathsAndFields; + @EqualsAndHashCode.Exclude private List> result = new ArrayList<>(); + @EqualsAndHashCode.Exclude private List nonNestedFields = new ArrayList<>(); + @EqualsAndHashCode.Exclude private ListIterator> flattenedResult = result.listIterator(); /** * Constructor for NestedOperator with list of map as arg. + * * @param input : PhysicalPlan input. * @param fields : List of all fields and paths for nested fields. */ public NestedOperator(PhysicalPlan input, List> fields) { this.input = input; - this.fields = fields.stream() - .map(m -> m.get("field").toString()) - .collect(Collectors.toSet()); - this.groupedPathsAndFields = fields.stream().collect( - Collectors.groupingBy( - m -> m.get("path").toString(), - mapping( - m -> m.get("field").toString(), - toList() - ) - ) - ); + this.fields = fields.stream().map(m -> m.get("field").toString()).collect(Collectors.toSet()); + this.groupedPathsAndFields = + fields.stream() + .collect( + Collectors.groupingBy( + m -> m.get("path").toString(), + mapping(m -> m.get("field").toString(), toList()))); } /** * Constructor for NestedOperator with Set of fields. + * * @param input : PhysicalPlan input. * @param fields : List of all fields for nested fields. * @param groupedPathsAndFields : Map of fields grouped by their path. */ public NestedOperator( - PhysicalPlan input, - Set fields, - Map> groupedPathsAndFields - ) { + PhysicalPlan input, Set fields, Map> groupedPathsAndFields) { this.input = input; this.fields = fields; this.groupedPathsAndFields = groupedPathsAndFields; @@ -128,16 +117,16 @@ public ExprValue next() { } /** - * Generate list of non-nested fields that are in inputMap, but not in the member variable - * fields list. + * Generate list of non-nested fields that are in inputMap, but not in the member variable fields + * list. + * * @param inputMap : Row to parse non-nested fields. */ public void generateNonNestedFieldsMap(ExprValue inputMap) { for (Map.Entry inputField : inputMap.tupleValue().entrySet()) { boolean foundNestedField = - this.fields.stream().anyMatch( - field -> field.split("\\.")[0].equalsIgnoreCase(inputField.getKey()) - ); + this.fields.stream() + .anyMatch(field -> field.split("\\.")[0].equalsIgnoreCase(inputField.getKey())); if (!foundNestedField) { this.nonNestedFields.add(inputField.getKey()); @@ -145,12 +134,11 @@ public void generateNonNestedFieldsMap(ExprValue inputMap) { } } - /** - * Simplifies the structure of row's source Map by flattening it, - * making the full path of an object the key - * and the Object it refers to the value. + * Simplifies the structure of row's source Map by flattening it, making the full path of an + * object the key and the Object it refers to the value. * + *

    * 

Sample input: * keys = ['comments.likes'] * row = comments: { @@ -159,6 +147,7 @@ public void generateNonNestedFieldsMap(ExprValue inputMap) { * *

Return: * flattenedRow = {comment.likes: 2} + *

* * @param nestedField : Field to query in row. * @param row : Row returned from OS. @@ -166,11 +155,7 @@ public void generateNonNestedFieldsMap(ExprValue inputMap) { * @return : List of nested select items or cartesian product of nested calls. */ private List> flatten( - String nestedField, - ExprValue row, - List> prevList - ) { + String nestedField, ExprValue row, List> prevList) { List> copy = new ArrayList<>(); List> newList = new ArrayList<>(); @@ -201,11 +186,10 @@ private List> flatten( // Generate cartesian product for (Map prevMap : prevList) { for (Map newMap : copy) { - newList.add(Stream.of(newMap, prevMap) - .flatMap(map -> map.entrySet().stream()) - .collect(Collectors.toMap( - Map.Entry::getKey, - Map.Entry::getValue))); + newList.add( + Stream.of(newMap, prevMap) + .flatMap(map -> map.entrySet().stream()) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))); } } return newList; @@ -214,6 +198,7 @@ private List> flatten( /** * Check if newMap field has any sharing paths in prevMap. + * * @param newMap : New map to add to result set. * @return : true if there is already a field added to result set with same path. */ @@ -243,9 +228,11 @@ boolean containSamePath(Map newMap) { * @return : Object at current nested level. */ private void getNested( - String field, String nestedField, ExprValue row, - List> ret, ExprValue nestedObj - ) { + String field, + String nestedField, + ExprValue row, + List> ret, + ExprValue nestedObj) { ExprValue currentObj = (nestedObj == null) ? row : nestedObj; String[] splitKeys = nestedField.split("\\."); @@ -271,12 +258,10 @@ private void getNested( // Return final nested result if (currentObj != null && (StringUtils.substringAfterLast(field, ".").equals(nestedField) - || !field.contains(".")) - ) { + || !field.contains("."))) { ret.add(new LinkedHashMap<>(Map.of(field, currentObj))); } else if (currentObj != null) { - getNested(field, nestedField.substring(nestedField.indexOf(".") + 1), - row, ret, currentObj); + getNested(field, nestedField.substring(nestedField.indexOf(".") + 1), row, ret, currentObj); } } } diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/PhysicalPlan.java b/core/src/main/java/org/opensearch/sql/planner/physical/PhysicalPlan.java index 247b347940..0ae795aa31 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/PhysicalPlan.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/PhysicalPlan.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.physical; import java.util.Iterator; @@ -12,9 +11,7 @@ import org.opensearch.sql.planner.PlanNode; import org.opensearch.sql.storage.split.Split; -/** - * Physical plan. - */ +/** Physical plan. */ public abstract class PhysicalPlan implements PlanNode, Iterator, AutoCloseable { /** @@ -22,8 +19,8 @@ public abstract class PhysicalPlan * * @param visitor visitor. * @param context visitor context. - * @param returned object type. - * @param context type. + * @param returned object type. + * @param context type. * @return returned object. */ public abstract R accept(PhysicalPlanNodeVisitor visitor, C context); @@ -41,7 +38,9 @@ public void add(Split split) { } public ExecutionEngine.Schema schema() { - throw new IllegalStateException(String.format("[BUG] schema can been only applied to " - + "ProjectOperator, instead of %s", this.getClass().getSimpleName())); + throw new IllegalStateException( + String.format( + "[BUG] schema can been only applied to " + "ProjectOperator, instead of %s", + this.getClass().getSimpleName())); } } diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/PhysicalPlanDSL.java b/core/src/main/java/org/opensearch/sql/planner/physical/PhysicalPlanDSL.java index 8c10c91fb6..147f0e08dc 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/PhysicalPlanDSL.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/PhysicalPlanDSL.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.physical; import com.google.common.collect.ImmutableList; @@ -23,9 +22,7 @@ import org.opensearch.sql.expression.aggregation.NamedAggregator; import org.opensearch.sql.expression.window.WindowDefinition; -/** - * Physical Plan DSL. - */ +/** Physical Plan DSL. */ @UtilityClass public class PhysicalPlanDSL { @@ -47,8 +44,10 @@ public static ProjectOperator project(PhysicalPlan input, NamedExpression... fie return new ProjectOperator(input, Arrays.asList(fields), ImmutableList.of()); } - public static ProjectOperator project(PhysicalPlan input, List fields, - List namedParseExpressions) { + public static ProjectOperator project( + PhysicalPlan input, + List fields, + List namedParseExpressions) { return new ProjectOperator(input, fields, namedParseExpressions); } @@ -61,8 +60,7 @@ public static EvalOperator eval( return new EvalOperator(input, Arrays.asList(expressions)); } - public static SortOperator sort(PhysicalPlan input, Pair... sorts) { + public static SortOperator sort(PhysicalPlan input, Pair... sorts) { return new SortOperator(input, Arrays.asList(sorts)); } @@ -80,22 +78,27 @@ public static DedupeOperator dedupe( input, Arrays.asList(expressions), allowedDuplication, keepEmpty, consecutive); } - public WindowOperator window(PhysicalPlan input, - NamedExpression windowFunction, - WindowDefinition windowDefinition) { + public WindowOperator window( + PhysicalPlan input, NamedExpression windowFunction, WindowDefinition windowDefinition) { return new WindowOperator(input, windowFunction, windowDefinition); } - public static RareTopNOperator rareTopN(PhysicalPlan input, CommandType commandType, - List groups, Expression... expressions) { + public static RareTopNOperator rareTopN( + PhysicalPlan input, + CommandType commandType, + List groups, + Expression... expressions) { return new RareTopNOperator(input, commandType, Arrays.asList(expressions), groups); } - public static RareTopNOperator rareTopN(PhysicalPlan input, CommandType commandType, - int noOfResults, - List groups, Expression... expressions) { - return new RareTopNOperator(input, commandType, noOfResults, Arrays.asList(expressions), - groups); + public static RareTopNOperator rareTopN( + PhysicalPlan input, + CommandType commandType, + int noOfResults, + List groups, + Expression... expressions) { + return new RareTopNOperator( + input, commandType, noOfResults, Arrays.asList(expressions), groups); } @SafeVarargs @@ -108,9 +111,7 @@ public static LimitOperator limit(PhysicalPlan input, Integer limit, Integer off } public static NestedOperator nested( - PhysicalPlan input, - Set args, - Map> groupedFieldsByPath) { + PhysicalPlan input, Set args, Map> groupedFieldsByPath) { return new NestedOperator(input, args, groupedFieldsByPath); } } diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/PhysicalPlanNodeVisitor.java b/core/src/main/java/org/opensearch/sql/planner/physical/PhysicalPlanNodeVisitor.java index 14a839db27..99b5cc8020 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/PhysicalPlanNodeVisitor.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/PhysicalPlanNodeVisitor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.physical; import org.opensearch.sql.storage.TableScanOperator; diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/ProjectOperator.java b/core/src/main/java/org/opensearch/sql/planner/physical/ProjectOperator.java index 1699c97c15..55422dacd3 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/ProjectOperator.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/ProjectOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.physical; import com.google.common.collect.ImmutableMap; @@ -27,19 +26,14 @@ import org.opensearch.sql.expression.parse.ParseExpression; import org.opensearch.sql.planner.SerializablePlan; -/** - * Project the fields specified in {@link ProjectOperator#projectList} from input. - */ +/** Project the fields specified in {@link ProjectOperator#projectList} from input. */ @ToString @EqualsAndHashCode(callSuper = false) @AllArgsConstructor public class ProjectOperator extends PhysicalPlan implements SerializablePlan { - @Getter - private PhysicalPlan input; - @Getter - private List projectList; - @Getter - private List namedParseExpressions; + @Getter private PhysicalPlan input; + @Getter private List projectList; + @Getter private List namedParseExpressions; @Override public R accept(PhysicalPlanNodeVisitor visitor, C context) { @@ -65,17 +59,20 @@ public ExprValue next() { // TODO needs a better implementation, see https://github.com/opensearch-project/sql/issues/458 for (NamedExpression expr : projectList) { ExprValue exprValue = expr.valueOf(inputValue.bindingTuples()); - Optional optionalParseExpression = namedParseExpressions.stream() - .filter(parseExpr -> parseExpr.getNameOrAlias().equals(expr.getNameOrAlias())) - .findFirst(); + Optional optionalParseExpression = + namedParseExpressions.stream() + .filter(parseExpr -> parseExpr.getNameOrAlias().equals(expr.getNameOrAlias())) + .findFirst(); if (optionalParseExpression.isEmpty()) { mapBuilder.put(expr.getNameOrAlias(), exprValue); continue; } NamedExpression parseExpression = optionalParseExpression.get(); - ExprValue sourceFieldValue = inputValue.bindingTuples() - .resolve(((ParseExpression) parseExpression.getDelegated()).getSourceField()); + ExprValue sourceFieldValue = + inputValue + .bindingTuples() + .resolve(((ParseExpression) parseExpression.getDelegated()).getSourceField()); if (sourceFieldValue.isMissing()) { // source field will be missing after stats command, read from inputValue if it exists // otherwise do nothing since it should not appear as a field @@ -94,15 +91,17 @@ public ExprValue next() { @Override public ExecutionEngine.Schema schema() { - return new ExecutionEngine.Schema(getProjectList().stream() - .map(expr -> new ExecutionEngine.Schema.Column(expr.getName(), - expr.getAlias(), expr.type())).collect(Collectors.toList())); + return new ExecutionEngine.Schema( + getProjectList().stream() + .map( + expr -> + new ExecutionEngine.Schema.Column(expr.getName(), expr.getAlias(), expr.type())) + .collect(Collectors.toList())); } /** Don't use, it is for deserialization needs only. */ @Deprecated - public ProjectOperator() { - } + public ProjectOperator() {} @SuppressWarnings("unchecked") @Override diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/RareTopNOperator.java b/core/src/main/java/org/opensearch/sql/planner/physical/RareTopNOperator.java index fb3a91e2e6..ecf997f7ae 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/RareTopNOperator.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/RareTopNOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.physical; import com.google.common.annotations.VisibleForTesting; @@ -36,40 +35,38 @@ @EqualsAndHashCode(callSuper = false) public class RareTopNOperator extends PhysicalPlan { - @Getter - private final PhysicalPlan input; - @Getter - private final CommandType commandType; - @Getter - private final Integer noOfResults; - @Getter - private final List fieldExprList; - @Getter - private final List groupByExprList; - - @EqualsAndHashCode.Exclude - private final Group group; - @EqualsAndHashCode.Exclude - private Iterator iterator; + @Getter private final PhysicalPlan input; + @Getter private final CommandType commandType; + @Getter private final Integer noOfResults; + @Getter private final List fieldExprList; + @Getter private final List groupByExprList; - private static final Integer DEFAULT_NO_OF_RESULTS = 10; + @EqualsAndHashCode.Exclude private final Group group; + @EqualsAndHashCode.Exclude private Iterator iterator; + private static final Integer DEFAULT_NO_OF_RESULTS = 10; - public RareTopNOperator(PhysicalPlan input, CommandType commandType, - List fieldExprList, List groupByExprList) { + public RareTopNOperator( + PhysicalPlan input, + CommandType commandType, + List fieldExprList, + List groupByExprList) { this(input, commandType, DEFAULT_NO_OF_RESULTS, fieldExprList, groupByExprList); } /** * RareTopNOperator Constructor. * - * @param input Input {@link PhysicalPlan} - * @param commandType Enum for Rare/TopN command. - * @param noOfResults Number of results - * @param fieldExprList List of {@link Expression} + * @param input Input {@link PhysicalPlan} + * @param commandType Enum for Rare/TopN command. + * @param noOfResults Number of results + * @param fieldExprList List of {@link Expression} * @param groupByExprList List of group by {@link Expression} */ - public RareTopNOperator(PhysicalPlan input, CommandType commandType, int noOfResults, + public RareTopNOperator( + PhysicalPlan input, + CommandType commandType, + int noOfResults, List fieldExprList, List groupByExprList) { this.input = input; @@ -115,48 +112,50 @@ public class Group { private final Map> groupListMap = new HashMap<>(); - /** - * Push the BindingTuple to Group. - */ + /** Push the BindingTuple to Group. */ public void push(ExprValue inputValue) { Key groupKey = new Key(inputValue, groupByExprList); Key fieldKey = new Key(inputValue, fieldExprList); - groupListMap.computeIfAbsent(groupKey, k -> { - Map map = new HashMap<>(); - map.put(fieldKey, 1); - return map; - }); - groupListMap.computeIfPresent(groupKey, (key, map) -> { - map.computeIfAbsent(fieldKey, f -> 1); - map.computeIfPresent(fieldKey, (field, count) -> { - return count + 1; - }); - return map; - }); + groupListMap.computeIfAbsent( + groupKey, + k -> { + Map map = new HashMap<>(); + map.put(fieldKey, 1); + return map; + }); + groupListMap.computeIfPresent( + groupKey, + (key, map) -> { + map.computeIfAbsent(fieldKey, f -> 1); + map.computeIfPresent( + fieldKey, + (field, count) -> { + return count + 1; + }); + return map; + }); } - /** - * Get the list of {@link BindingTuple} for each group. - */ + /** Get the list of {@link BindingTuple} for each group. */ public List result() { ImmutableList.Builder resultBuilder = new ImmutableList.Builder<>(); - groupListMap.forEach((groups, fieldMap) -> { - Map map = new LinkedHashMap<>(); - List result = find(fieldMap); - result.forEach(field -> { - map.putAll(groups.keyMap(groupByExprList)); - map.putAll(field.keyMap(fieldExprList)); - resultBuilder.add(ExprTupleValue.fromExprValueMap(map)); - }); - }); + groupListMap.forEach( + (groups, fieldMap) -> { + Map map = new LinkedHashMap<>(); + List result = find(fieldMap); + result.forEach( + field -> { + map.putAll(groups.keyMap(groupByExprList)); + map.putAll(field.keyMap(fieldExprList)); + resultBuilder.add(ExprTupleValue.fromExprValueMap(map)); + }); + }); return resultBuilder.build(); } - /** - * Get a list of result. - */ + /** Get a list of result. */ public List find(Map map) { Comparator> valueComparator; if (CommandType.TOP.equals(commandType)) { @@ -165,40 +164,37 @@ public List find(Map map) { valueComparator = Map.Entry.comparingByValue(); } - return map.entrySet().stream().sorted(valueComparator).limit(noOfResults) - .map(Map.Entry::getKey).collect(Collectors.toList()); + return map.entrySet().stream() + .sorted(valueComparator) + .limit(noOfResults) + .map(Map.Entry::getKey) + .collect(Collectors.toList()); } } - /** - * Key. - */ + /** Key. */ @EqualsAndHashCode @VisibleForTesting public class Key { private final List valueList; - /** - * Key constructor. - */ + /** Key constructor. */ public Key(ExprValue value, List exprList) { - this.valueList = exprList.stream() - .map(expr -> expr.valueOf(value.bindingTuples())).collect(Collectors.toList()); + this.valueList = + exprList.stream() + .map(expr -> expr.valueOf(value.bindingTuples())) + .collect(Collectors.toList()); } - /** - * Return the Map of key and key value. - */ + /** Return the Map of key and key value. */ public Map keyMap(List exprList) { return Streams.zip( - exprList.stream().map( - expression -> expression.toString()), - valueList.stream(), - AbstractMap.SimpleEntry::new - ).collect(Collectors.toMap(key -> key.getKey(), key -> key.getValue())); + exprList.stream().map(expression -> expression.toString()), + valueList.stream(), + AbstractMap.SimpleEntry::new) + .collect(Collectors.toMap(key -> key.getKey(), key -> key.getValue())); } } - } diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/RemoveOperator.java b/core/src/main/java/org/opensearch/sql/planner/physical/RemoveOperator.java index 3fa3519d10..b4a724aa7a 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/RemoveOperator.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/RemoveOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.physical; import static org.opensearch.sql.data.type.ExprCoreType.STRUCT; @@ -24,26 +23,19 @@ import org.opensearch.sql.data.model.ExprValueUtils; import org.opensearch.sql.expression.ReferenceExpression; -/** - * Remove the fields specified in {@link RemoveOperator#removeList} from input. - */ +/** Remove the fields specified in {@link RemoveOperator#removeList} from input. */ @ToString @EqualsAndHashCode(callSuper = false) public class RemoveOperator extends PhysicalPlan { - @Getter - private final PhysicalPlan input; - @Getter - private final Set removeList; - @ToString.Exclude - @EqualsAndHashCode.Exclude - private final Set nameRemoveList; + @Getter private final PhysicalPlan input; + @Getter private final Set removeList; + @ToString.Exclude @EqualsAndHashCode.Exclude private final Set nameRemoveList; /** * Todo. This is the temporary solution that add the mapping between string and ref. because when * rename the field from input, there we can only get the string field. */ - public RemoveOperator(PhysicalPlan input, - Set removeList) { + public RemoveOperator(PhysicalPlan input, Set removeList) { this.input = input; this.removeList = removeList; this.nameRemoveList = diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/RenameOperator.java b/core/src/main/java/org/opensearch/sql/planner/physical/RenameOperator.java index f0b0d13c50..e6f97dab4a 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/RenameOperator.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/RenameOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.physical; import static org.opensearch.sql.data.type.ExprCoreType.STRUCT; @@ -24,35 +23,30 @@ import org.opensearch.sql.storage.bindingtuple.BindingTuple; /** - * Rename the binding name in {@link BindingTuple}. - * The mapping maintain the relation between source and target. - * it means BindingTuple.resolve(target) = BindingTuple.resolve(source). + * Rename the binding name in {@link BindingTuple}. The mapping maintain the relation between source + * and target. it means BindingTuple.resolve(target) = BindingTuple.resolve(source). */ @EqualsAndHashCode(callSuper = false) @ToString public class RenameOperator extends PhysicalPlan { - @Getter - private final PhysicalPlan input; - @Getter - private final Map mapping; + @Getter private final PhysicalPlan input; + @Getter private final Map mapping; + /** * Todo. This is the temporary solution that add the mapping between string and ref. because when * rename the field from input, there we can only get the string field. */ - @ToString.Exclude - @EqualsAndHashCode.Exclude + @ToString.Exclude @EqualsAndHashCode.Exclude private final Map nameMapping; - /** - * Constructor of RenameOperator. - */ - public RenameOperator(PhysicalPlan input, - Map mapping) { + /** Constructor of RenameOperator. */ + public RenameOperator(PhysicalPlan input, Map mapping) { this.input = input; this.mapping = mapping; this.nameMapping = - mapping.entrySet().stream().collect(Collectors.toMap(entry -> entry.getKey().getAttr(), - entry -> entry.getValue())); + mapping.entrySet().stream() + .collect( + Collectors.toMap(entry -> entry.getKey().getAttr(), entry -> entry.getValue())); } @Override diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/SortOperator.java b/core/src/main/java/org/opensearch/sql/planner/physical/SortOperator.java index 4463892ca5..e3116baedf 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/SortOperator.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/SortOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.physical; import static org.opensearch.sql.ast.tree.Sort.NullOrder.NULL_FIRST; @@ -28,30 +27,26 @@ /** * Sort Operator.The input data is sorted by the sort fields in the {@link SortOperator#sortList}. - * The sort field is specified by the {@link Expression} with {@link SortOption}. - * The count indicate how many sorted result should been return. + * The sort field is specified by the {@link Expression} with {@link SortOption}. The count indicate + * how many sorted result should been return. */ @ToString @EqualsAndHashCode(callSuper = false) public class SortOperator extends PhysicalPlan { - @Getter - private final PhysicalPlan input; + @Getter private final PhysicalPlan input; - @Getter - private final List> sortList; - @EqualsAndHashCode.Exclude - private final Sorter sorter; - @EqualsAndHashCode.Exclude - private Iterator iterator; + @Getter private final List> sortList; + @EqualsAndHashCode.Exclude private final Sorter sorter; + @EqualsAndHashCode.Exclude private Iterator iterator; /** * Sort Operator Constructor. + * * @param input input {@link PhysicalPlan} - * @param sortList list of sort sort field. - * The sort field is specified by the {@link Expression} with {@link SortOption} + * @param sortList list of sort sort field. The sort field is specified by the {@link Expression} + * with {@link SortOption} */ - public SortOperator( - PhysicalPlan input, List> sortList) { + public SortOperator(PhysicalPlan input, List> sortList) { this.input = input; this.sortList = sortList; SorterBuilder sorterBuilder = Sorter.builder(); @@ -101,8 +96,7 @@ public ExprValue next() { @Builder public static class Sorter implements Comparator { - @Singular - private final List>> comparators; + @Singular private final List>> comparators; @Override public int compare(ExprValue o1, ExprValue o2) { diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/ValuesOperator.java b/core/src/main/java/org/opensearch/sql/planner/physical/ValuesOperator.java index 4ac9d6a30a..4a4ce27da8 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/ValuesOperator.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/ValuesOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.physical; import com.google.common.collect.ImmutableList; @@ -18,22 +17,15 @@ import org.opensearch.sql.expression.Expression; import org.opensearch.sql.expression.LiteralExpression; -/** - * Physical operator for Values. - */ +/** Physical operator for Values. */ @ToString @EqualsAndHashCode(callSuper = false, of = "values") public class ValuesOperator extends PhysicalPlan { - /** - * Original values list for print and equality check. - */ - @Getter - private final List> values; + /** Original values list for print and equality check. */ + @Getter private final List> values; - /** - * Values iterator. - */ + /** Values iterator. */ private final Iterator> valuesIterator; public ValuesOperator(List> values) { @@ -58,10 +50,8 @@ public boolean hasNext() { @Override public ExprValue next() { - List values = valuesIterator.next().stream() - .map(Expression::valueOf) - .collect(Collectors.toList()); + List values = + valuesIterator.next().stream().map(Expression::valueOf).collect(Collectors.toList()); return new ExprCollectionValue(values); } - } diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/WindowOperator.java b/core/src/main/java/org/opensearch/sql/planner/physical/WindowOperator.java index 8ecdcfbf49..10377ce47a 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/WindowOperator.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/WindowOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.physical; import com.google.common.collect.ImmutableMap; @@ -21,43 +20,34 @@ import org.opensearch.sql.expression.window.WindowFunctionExpression; import org.opensearch.sql.expression.window.frame.WindowFrame; -/** - * Physical operator for window function computation. - */ +/** Physical operator for window function computation. */ @EqualsAndHashCode(callSuper = false) @ToString public class WindowOperator extends PhysicalPlan { - @Getter - private final PhysicalPlan input; + @Getter private final PhysicalPlan input; - @Getter - private final NamedExpression windowFunction; + @Getter private final NamedExpression windowFunction; - @Getter - private final WindowDefinition windowDefinition; + @Getter private final WindowDefinition windowDefinition; - @EqualsAndHashCode.Exclude - @ToString.Exclude - private final WindowFrame windowFrame; + @EqualsAndHashCode.Exclude @ToString.Exclude private final WindowFrame windowFrame; /** - * Peeking iterator that can peek next element which is required - * by window frame such as peer frame to prefetch all rows related - * to same peer (of same sorting key). + * Peeking iterator that can peek next element which is required by window frame such as peer + * frame to prefetch all rows related to same peer (of same sorting key). */ - @EqualsAndHashCode.Exclude - @ToString.Exclude + @EqualsAndHashCode.Exclude @ToString.Exclude private final PeekingIterator peekingIterator; /** * Initialize window operator. - * @param input child operator - * @param windowFunction window function - * @param windowDefinition window definition + * + * @param input child operator + * @param windowFunction window function + * @param windowDefinition window definition */ - public WindowOperator(PhysicalPlan input, - NamedExpression windowFunction, - WindowDefinition windowDefinition) { + public WindowOperator( + PhysicalPlan input, NamedExpression windowFunction, WindowDefinition windowDefinition) { this.input = input; this.windowFunction = windowFunction; this.windowDefinition = windowDefinition; @@ -107,5 +97,4 @@ private void addWindowFunctionResultColumn(ImmutableMap.Builder supplier; /** - * Map from bucketKey to nested collector sorted by key to make sure - * final result is in order after traversal. + * Map from bucketKey to nested collector sorted by key to make sure final result is in order + * after traversal. */ private final Map collectorMap = new TreeMap<>(); - /** - * Bucket Index. - */ + /** Bucket Index. */ private int bucketIndex = 0; /** - * Collect Bucket from {@link BindingTuple}. - * If bucket not exist, create new bucket and {@link Collector}. - * If bucket exist, let {@link Collector} in the bucket collect from {@link BindingTuple}. + * Collect Bucket from {@link BindingTuple}. If bucket not exist, create new bucket and {@link + * Collector}. If bucket exist, let {@link Collector} in the bucket collect from {@link + * BindingTuple}. * * @param input {@link BindingTuple}. */ @@ -64,6 +56,7 @@ public void collect(BindingTuple input) { /** * Bucket Key. + * * @param tuple {@link BindingTuple}. * @return Bucket Key. */ diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/collector/Collector.java b/core/src/main/java/org/opensearch/sql/planner/physical/collector/Collector.java index a2b3a41a27..e696d5068f 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/collector/Collector.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/collector/Collector.java @@ -13,9 +13,7 @@ import org.opensearch.sql.expression.aggregation.NamedAggregator; import org.opensearch.sql.storage.bindingtuple.BindingTuple; -/** - * Interface of {@link BindingTuple} Collector. - */ +/** Interface of {@link BindingTuple} Collector. */ public interface Collector { /** @@ -32,16 +30,12 @@ public interface Collector { */ List results(); - /** - * {@link Collector} tree builder. - */ + /** {@link Collector} tree builder. */ @UtilityClass class Builder { - /** - * build {@link Collector}. - */ - public static Collector build(List buckets, - List aggregators) { + /** build {@link Collector}. */ + public static Collector build( + List buckets, List aggregators) { if (buckets.isEmpty()) { return new MetricCollector(aggregators); } else { diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/collector/MetricCollector.java b/core/src/main/java/org/opensearch/sql/planner/physical/collector/MetricCollector.java index c804c7bc9b..2cfa3c9457 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/collector/MetricCollector.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/collector/MetricCollector.java @@ -22,9 +22,7 @@ */ public class MetricCollector implements Collector { - /** - * List of {@link NamedAggregator}. - */ + /** List of {@link NamedAggregator}. */ private final List> aggregators; /** diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/collector/Rounding.java b/core/src/main/java/org/opensearch/sql/planner/physical/collector/Rounding.java index 782c931046..81a1a0230f 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/collector/Rounding.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/collector/Rounding.java @@ -34,15 +34,11 @@ import org.opensearch.sql.expression.span.SpanExpression; import org.opensearch.sql.utils.DateTimeUtils; -/** - * Rounding. - */ +/** Rounding. */ @EqualsAndHashCode public abstract class Rounding { - /** - * Create Rounding instance. - */ + /** Create Rounding instance. */ public static Rounding createRounding(SpanExpression span) { ExprValue interval = span.getValue().valueOf(); ExprType type = span.type(); @@ -70,7 +66,6 @@ public static Rounding createRounding(SpanExpression span) { public abstract ExprValue round(ExprValue value); - static class TimestampRounding extends Rounding { private final ExprValue interval; private final DateTimeUnit dateTimeUnit; @@ -82,13 +77,13 @@ public TimestampRounding(ExprValue interval, String unit) { @Override public ExprValue round(ExprValue var) { - Instant instant = Instant.ofEpochMilli(dateTimeUnit.round(var.timestampValue() - .toEpochMilli(), interval.integerValue())); + Instant instant = + Instant.ofEpochMilli( + dateTimeUnit.round(var.timestampValue().toEpochMilli(), interval.integerValue())); return new ExprTimestampValue(instant); } } - static class DatetimeRounding extends Rounding { private final ExprValue interval; private final DateTimeUnit dateTimeUnit; @@ -100,13 +95,15 @@ public DatetimeRounding(ExprValue interval, String unit) { @Override public ExprValue round(ExprValue var) { - Instant instant = Instant.ofEpochMilli(dateTimeUnit.round(var.datetimeValue() - .atZone(UTC_ZONE_ID).toInstant().toEpochMilli(), interval.integerValue())); + Instant instant = + Instant.ofEpochMilli( + dateTimeUnit.round( + var.datetimeValue().atZone(UTC_ZONE_ID).toInstant().toEpochMilli(), + interval.integerValue())); return new ExprDatetimeValue(instant.atZone(UTC_ZONE_ID).toLocalDateTime()); } } - static class DateRounding extends Rounding { private final ExprValue interval; private final DateTimeUnit dateTimeUnit; @@ -118,8 +115,11 @@ public DateRounding(ExprValue interval, String unit) { @Override public ExprValue round(ExprValue var) { - Instant instant = Instant.ofEpochMilli(dateTimeUnit.round(var.dateValue().atStartOfDay() - .atZone(UTC_ZONE_ID).toInstant().toEpochMilli(), interval.integerValue())); + Instant instant = + Instant.ofEpochMilli( + dateTimeUnit.round( + var.dateValue().atStartOfDay().atZone(UTC_ZONE_ID).toInstant().toEpochMilli(), + interval.integerValue())); return new ExprDateValue(instant.atZone(UTC_ZONE_ID).toLocalDate()); } } @@ -136,17 +136,18 @@ public TimeRounding(ExprValue interval, String unit) { @Override public ExprValue round(ExprValue var) { if (dateTimeUnit.id > 4) { - throw new ExpressionEvaluationException(String - .format("Unable to set span unit %s for TIME type", dateTimeUnit.getName())); + throw new ExpressionEvaluationException( + String.format("Unable to set span unit %s for TIME type", dateTimeUnit.getName())); } - Instant instant = Instant.ofEpochMilli(dateTimeUnit.round(var.timeValue().getLong( - ChronoField.MILLI_OF_DAY), interval.integerValue())); + Instant instant = + Instant.ofEpochMilli( + dateTimeUnit.round( + var.timeValue().getLong(ChronoField.MILLI_OF_DAY), interval.integerValue())); return new ExprTimeValue(instant.atZone(UTC_ZONE_ID).toLocalTime()); } } - static class LongRounding extends Rounding { private final Long longInterval; @@ -161,7 +162,6 @@ public ExprValue round(ExprValue value) { } } - static class DoubleRounding extends Rounding { private final Double doubleInterval; @@ -171,13 +171,12 @@ protected DoubleRounding(ExprValue interval) { @Override public ExprValue round(ExprValue value) { - double rounded = Double - .valueOf(value.doubleValue() / doubleInterval).intValue() * doubleInterval; + double rounded = + Double.valueOf(value.doubleValue() / doubleInterval).intValue() * doubleInterval; return ExprValueUtils.doubleValue(rounded); } } - @RequiredArgsConstructor static class UnknownRounding extends Rounding { @Override @@ -186,43 +185,37 @@ public ExprValue round(ExprValue var) { } } - @RequiredArgsConstructor public enum DateTimeUnit { - MILLISECOND(1, "ms", true, ChronoField.MILLI_OF_SECOND - .getBaseUnit().getDuration().toMillis()) { + MILLISECOND(1, "ms", true, ChronoField.MILLI_OF_SECOND.getBaseUnit().getDuration().toMillis()) { @Override long round(long utcMillis, int interval) { return DateTimeUtils.roundFloor(utcMillis, ratio * interval); } }, - SECOND(2, "s", true, ChronoField.SECOND_OF_MINUTE - .getBaseUnit().getDuration().toMillis()) { + SECOND(2, "s", true, ChronoField.SECOND_OF_MINUTE.getBaseUnit().getDuration().toMillis()) { @Override long round(long utcMillis, int interval) { return DateTimeUtils.roundFloor(utcMillis, ratio * interval); } }, - MINUTE(3, "m", true, ChronoField.MINUTE_OF_HOUR - .getBaseUnit().getDuration().toMillis()) { + MINUTE(3, "m", true, ChronoField.MINUTE_OF_HOUR.getBaseUnit().getDuration().toMillis()) { @Override long round(long utcMillis, int interval) { return DateTimeUtils.roundFloor(utcMillis, ratio * interval); } }, - HOUR(4, "h", true, ChronoField.HOUR_OF_DAY - .getBaseUnit().getDuration().toMillis()) { + HOUR(4, "h", true, ChronoField.HOUR_OF_DAY.getBaseUnit().getDuration().toMillis()) { @Override long round(long utcMillis, int interval) { return DateTimeUtils.roundFloor(utcMillis, ratio * interval); } }, - DAY(5, "d", true, ChronoField.DAY_OF_MONTH - .getBaseUnit().getDuration().toMillis()) { + DAY(5, "d", true, ChronoField.DAY_OF_MONTH.getBaseUnit().getDuration().toMillis()) { @Override long round(long utcMillis, int interval) { return DateTimeUtils.roundFloor(utcMillis, ratio * interval); @@ -257,18 +250,14 @@ long round(long utcMillis, int interval) { } }; - @Getter - private final int id; - @Getter - private final String name; + @Getter private final int id; + @Getter private final String name; protected final boolean isMillisBased; protected final long ratio; abstract long round(long utcMillis, int interval); - /** - * Resolve the date time unit. - */ + /** Resolve the date time unit. */ public static Rounding.DateTimeUnit resolve(String name) { switch (name) { case "M": @@ -283,5 +272,4 @@ public static Rounding.DateTimeUnit resolve(String name) { } } } - } diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/datasource/DataSourceTable.java b/core/src/main/java/org/opensearch/sql/planner/physical/datasource/DataSourceTable.java index 105ad5ed32..5542d0f0e4 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/datasource/DataSourceTable.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/datasource/DataSourceTable.java @@ -19,12 +19,9 @@ import org.opensearch.sql.planner.physical.PhysicalPlan; import org.opensearch.sql.storage.Table; - /** - * Table implementation to handle show datasources command. - * Since datasource information is not tied to any storage engine, this info - * is handled via DataSource Table. - * + * Table implementation to handle show datasources command. Since datasource information is not tied + * to any storage engine, this info is handled via DataSource Table. */ @RequiredArgsConstructor @EqualsAndHashCode @@ -44,8 +41,7 @@ public PhysicalPlan implement(LogicalPlan plan) { @VisibleForTesting @RequiredArgsConstructor - public static class DataSourceTableDefaultImplementor - extends DefaultImplementor { + public static class DataSourceTableDefaultImplementor extends DefaultImplementor { private final DataSourceService dataSourceService; @@ -54,5 +50,4 @@ public PhysicalPlan visitRelation(LogicalRelation node, Object context) { return new DataSourceTableScan(dataSourceService); } } - } diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/datasource/DataSourceTableScan.java b/core/src/main/java/org/opensearch/sql/planner/physical/datasource/DataSourceTableScan.java index 93e65054b5..bc92df7d16 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/datasource/DataSourceTableScan.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/datasource/DataSourceTableScan.java @@ -22,11 +22,9 @@ import org.opensearch.sql.storage.TableScanOperator; /** - * This class handles table scan of data source table. - * Right now these are derived from dataSourceService thorough static fields. - * In future this might scan data from underlying datastore if we start - * persisting datasource info somewhere. - * + * This class handles table scan of data source table. Right now these are derived from + * dataSourceService thorough static fields. In future this might scan data from underlying + * datastore if we start persisting datasource info somewhere. */ public class DataSourceTableScan extends TableScanOperator { @@ -47,15 +45,16 @@ public String explain() { @Override public void open() { List exprValues = new ArrayList<>(); - Set dataSourceMetadataSet - = dataSourceService.getDataSourceMetadata(true); + Set dataSourceMetadataSet = dataSourceService.getDataSourceMetadata(true); for (DataSourceMetadata dataSourceMetadata : dataSourceMetadataSet) { exprValues.add( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "DATASOURCE_NAME", - ExprValueUtils.stringValue(dataSourceMetadata.getName()), - "CONNECTOR_TYPE", - ExprValueUtils.stringValue(dataSourceMetadata.getConnector().name()))))); + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of( + "DATASOURCE_NAME", + ExprValueUtils.stringValue(dataSourceMetadata.getName()), + "CONNECTOR_TYPE", + ExprValueUtils.stringValue(dataSourceMetadata.getConnector().name()))))); } iterator = exprValues.iterator(); } @@ -69,5 +68,4 @@ public boolean hasNext() { public ExprValue next() { return iterator.next(); } - } diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/datasource/DataSourceTableSchema.java b/core/src/main/java/org/opensearch/sql/planner/physical/datasource/DataSourceTableSchema.java index dd959d9b56..469305a15d 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/datasource/DataSourceTableSchema.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/datasource/DataSourceTableSchema.java @@ -13,19 +13,16 @@ import lombok.RequiredArgsConstructor; import org.opensearch.sql.data.type.ExprType; -/** - * Definition of the data source table schema. - */ +/** Definition of the data source table schema. */ @Getter @RequiredArgsConstructor public enum DataSourceTableSchema { - - DATASOURCE_TABLE_SCHEMA(new LinkedHashMap<>() { - { - put("DATASOURCE_NAME", STRING); - put("CONNECTOR_TYPE", STRING); - } - } - ); + DATASOURCE_TABLE_SCHEMA( + new LinkedHashMap<>() { + { + put("DATASOURCE_NAME", STRING); + put("CONNECTOR_TYPE", STRING); + } + }); private final Map mapping; } diff --git a/core/src/main/java/org/opensearch/sql/planner/streaming/StreamContext.java b/core/src/main/java/org/opensearch/sql/planner/streaming/StreamContext.java index 18eb10f19d..87ff048531 100644 --- a/core/src/main/java/org/opensearch/sql/planner/streaming/StreamContext.java +++ b/core/src/main/java/org/opensearch/sql/planner/streaming/StreamContext.java @@ -8,8 +8,8 @@ import lombok.Data; /** - * Stream context required by stream processing components and can be - * stored and restored between executions. + * Stream context required by stream processing components and can be stored and restored between + * executions. */ @Data public class StreamContext { diff --git a/core/src/main/java/org/opensearch/sql/planner/streaming/watermark/BoundedOutOfOrderWatermarkGenerator.java b/core/src/main/java/org/opensearch/sql/planner/streaming/watermark/BoundedOutOfOrderWatermarkGenerator.java index 63d6a5b163..49a91dd9cc 100644 --- a/core/src/main/java/org/opensearch/sql/planner/streaming/watermark/BoundedOutOfOrderWatermarkGenerator.java +++ b/core/src/main/java/org/opensearch/sql/planner/streaming/watermark/BoundedOutOfOrderWatermarkGenerator.java @@ -7,9 +7,7 @@ import lombok.RequiredArgsConstructor; -/** - * Watermark generator that generates watermark with bounded out-of-order delay. - */ +/** Watermark generator that generates watermark with bounded out-of-order delay. */ @RequiredArgsConstructor public class BoundedOutOfOrderWatermarkGenerator implements WatermarkGenerator { diff --git a/core/src/main/java/org/opensearch/sql/planner/streaming/watermark/WatermarkGenerator.java b/core/src/main/java/org/opensearch/sql/planner/streaming/watermark/WatermarkGenerator.java index 4f4c9a8a00..e4a44e5169 100644 --- a/core/src/main/java/org/opensearch/sql/planner/streaming/watermark/WatermarkGenerator.java +++ b/core/src/main/java/org/opensearch/sql/planner/streaming/watermark/WatermarkGenerator.java @@ -6,8 +6,8 @@ package org.opensearch.sql.planner.streaming.watermark; /** - * A watermark generator generates watermark timestamp based on some strategy which is defined - * in implementation class. + * A watermark generator generates watermark timestamp based on some strategy which is defined in + * implementation class. */ public interface WatermarkGenerator { @@ -18,5 +18,4 @@ public interface WatermarkGenerator { * @return watermark timestamp in millisecond */ long generate(long timestamp); - } diff --git a/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/Window.java b/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/Window.java index 2a85ea391c..3d5b180346 100644 --- a/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/Window.java +++ b/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/Window.java @@ -7,9 +7,7 @@ import lombok.Data; -/** - * A time window is a window of time interval with inclusive start time and exclusive end time. - */ +/** A time window is a window of time interval with inclusive start time and exclusive end time. */ @Data public class Window { @@ -19,9 +17,7 @@ public class Window { /** End timestamp (exclusive) of the time window. */ private final long endTime; - /** - * Return the maximum timestamp (inclusive) of the window. - */ + /** Return the maximum timestamp (inclusive) of the window. */ public long maxTimestamp() { return endTime - 1; } diff --git a/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/assigner/SlidingWindowAssigner.java b/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/assigner/SlidingWindowAssigner.java index f0f47fd575..1b1f12a573 100644 --- a/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/assigner/SlidingWindowAssigner.java +++ b/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/assigner/SlidingWindowAssigner.java @@ -12,8 +12,8 @@ import org.opensearch.sql.utils.DateTimeUtils; /** - * A sliding window assigner assigns multiple overlapped window per event timestamp. - * The overlap size is determined by the given slide interval. + * A sliding window assigner assigns multiple overlapped window per event timestamp. The overlap + * size is determined by the given slide interval. */ public class SlidingWindowAssigner implements WindowAssigner { @@ -27,13 +27,13 @@ public class SlidingWindowAssigner implements WindowAssigner { * Create sliding window assigner with the given window and slide size in millisecond. * * @param windowSize window size in millisecond - * @param slideSize slide size in millisecond + * @param slideSize slide size in millisecond */ public SlidingWindowAssigner(long windowSize, long slideSize) { - Preconditions.checkArgument(windowSize > 0, - "Window size [%s] must be positive number", windowSize); - Preconditions.checkArgument(slideSize > 0, - "Slide size [%s] must be positive number", slideSize); + Preconditions.checkArgument( + windowSize > 0, "Window size [%s] must be positive number", windowSize); + Preconditions.checkArgument( + slideSize > 0, "Slide size [%s] must be positive number", slideSize); this.windowSize = windowSize; this.slideSize = slideSize; } diff --git a/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/assigner/TumblingWindowAssigner.java b/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/assigner/TumblingWindowAssigner.java index 192bb6c429..2591689a35 100644 --- a/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/assigner/TumblingWindowAssigner.java +++ b/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/assigner/TumblingWindowAssigner.java @@ -11,9 +11,7 @@ import org.opensearch.sql.planner.streaming.windowing.Window; import org.opensearch.sql.utils.DateTimeUtils; -/** - * A tumbling window assigner assigns a single window per event timestamp without overlap. - */ +/** A tumbling window assigner assigns a single window per event timestamp without overlap. */ public class TumblingWindowAssigner implements WindowAssigner { /** Window size in millisecond. */ @@ -25,8 +23,8 @@ public class TumblingWindowAssigner implements WindowAssigner { * @param windowSize window size in millisecond */ public TumblingWindowAssigner(long windowSize) { - Preconditions.checkArgument(windowSize > 0, - "Window size [%s] must be positive number", windowSize); + Preconditions.checkArgument( + windowSize > 0, "Window size [%s] must be positive number", windowSize); this.windowSize = windowSize; } diff --git a/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/assigner/WindowAssigner.java b/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/assigner/WindowAssigner.java index dac882c5ff..fd615c2d5e 100644 --- a/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/assigner/WindowAssigner.java +++ b/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/assigner/WindowAssigner.java @@ -9,16 +9,16 @@ import org.opensearch.sql.planner.streaming.windowing.Window; /** - * A window assigner assigns zero or more window to an event timestamp - * based on different windowing approach. + * A window assigner assigns zero or more window to an event timestamp based on different windowing + * approach. */ public interface WindowAssigner { /** * Return window(s) assigned to the timestamp. + * * @param timestamp given event timestamp * @return windows assigned */ List assign(long timestamp); - } diff --git a/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/trigger/AfterWatermarkWindowTrigger.java b/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/trigger/AfterWatermarkWindowTrigger.java index 1801880961..f614ce847e 100644 --- a/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/trigger/AfterWatermarkWindowTrigger.java +++ b/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/trigger/AfterWatermarkWindowTrigger.java @@ -11,8 +11,8 @@ /** * After watermark window trigger fires window state output once a window is below watermark. - * Precisely speaking, after watermark means the window boundary (max timestamp) is equal to - * or less than the current watermark timestamp. + * Precisely speaking, after watermark means the window boundary (max timestamp) is equal to or less + * than the current watermark timestamp. */ @RequiredArgsConstructor public class AfterWatermarkWindowTrigger implements WindowTrigger { diff --git a/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/trigger/TriggerResult.java b/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/trigger/TriggerResult.java index 465f0aa9eb..30dba22725 100644 --- a/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/trigger/TriggerResult.java +++ b/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/trigger/TriggerResult.java @@ -8,9 +8,7 @@ import lombok.Getter; import lombok.RequiredArgsConstructor; -/** - * Result determined by a trigger for what should happen to the window. - */ +/** Result determined by a trigger for what should happen to the window. */ @Getter @RequiredArgsConstructor public enum TriggerResult { diff --git a/core/src/main/java/org/opensearch/sql/storage/DataSourceFactory.java b/core/src/main/java/org/opensearch/sql/storage/DataSourceFactory.java index 8512eddbe3..69d902c1d7 100644 --- a/core/src/main/java/org/opensearch/sql/storage/DataSourceFactory.java +++ b/core/src/main/java/org/opensearch/sql/storage/DataSourceFactory.java @@ -19,14 +19,9 @@ * {@link DataSourceFactory}. */ public interface DataSourceFactory { - /** - * Get {@link DataSourceType}. - */ + /** Get {@link DataSourceType}. */ DataSourceType getDataSourceType(); - /** - * Create {@link DataSource}. - */ + /** Create {@link DataSource}. */ DataSource createDataSource(DataSourceMetadata metadata); - } diff --git a/core/src/main/java/org/opensearch/sql/storage/StorageEngine.java b/core/src/main/java/org/opensearch/sql/storage/StorageEngine.java index ffcc0911de..c3b54beaaa 100644 --- a/core/src/main/java/org/opensearch/sql/storage/StorageEngine.java +++ b/core/src/main/java/org/opensearch/sql/storage/StorageEngine.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.storage; import java.util.Collection; @@ -11,14 +10,10 @@ import org.opensearch.sql.DataSourceSchemaName; import org.opensearch.sql.expression.function.FunctionResolver; -/** - * Storage engine for different storage to provide data access API implementation. - */ +/** Storage engine for different storage to provide data access API implementation. */ public interface StorageEngine { - /** - * Get {@link Table} from storage engine. - */ + /** Get {@link Table} from storage engine. */ Table getTable(DataSourceSchemaName dataSourceSchemaName, String tableName); /** diff --git a/core/src/main/java/org/opensearch/sql/storage/Table.java b/core/src/main/java/org/opensearch/sql/storage/Table.java index fc1def5a2e..33dbd7d66d 100644 --- a/core/src/main/java/org/opensearch/sql/storage/Table.java +++ b/core/src/main/java/org/opensearch/sql/storage/Table.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.storage; import java.util.Map; @@ -15,9 +14,7 @@ import org.opensearch.sql.storage.read.TableScanBuilder; import org.opensearch.sql.storage.write.TableWriteBuilder; -/** - * Table. - */ +/** Table. */ public interface Table { /** @@ -38,14 +35,10 @@ default void create(Map schema) { throw new UnsupportedOperationException("Unsupported Operation"); } - /** - * Get the {@link ExprType} for each field in the table. - */ + /** Get the {@link ExprType} for each field in the table. */ Map getFieldTypes(); - /** - * Get the {@link ExprType} for each meta-field (reserved fields) in the table. - */ + /** Get the {@link ExprType} for each meta-field (reserved fields) in the table. */ default Map getReservedFieldTypes() { return Map.of(); } @@ -61,8 +54,8 @@ default Map getReservedFieldTypes() { PhysicalPlan implement(LogicalPlan plan); /** - * Optimize the {@link LogicalPlan} by storage engine rule. - * The default optimize solution is no optimization. + * Optimize the {@link LogicalPlan} by storage engine rule. The default optimize solution is no + * optimization. * * @param plan logical plan. * @return logical plan. @@ -89,15 +82,11 @@ default TableScanBuilder createScanBuilder() { * @return table write builder */ default TableWriteBuilder createWriteBuilder(LogicalWrite plan) { - throw new UnsupportedOperationException( - "Write operation is not supported on current table"); + throw new UnsupportedOperationException("Write operation is not supported on current table"); } - /** - * Translate {@link Table} to {@link StreamingSource} if possible. - */ + /** Translate {@link Table} to {@link StreamingSource} if possible. */ default StreamingSource asStreamingSource() { throw new UnsupportedOperationException(); } - } diff --git a/core/src/main/java/org/opensearch/sql/storage/TableScanOperator.java b/core/src/main/java/org/opensearch/sql/storage/TableScanOperator.java index 1b8e33bc4f..130516b3ef 100644 --- a/core/src/main/java/org/opensearch/sql/storage/TableScanOperator.java +++ b/core/src/main/java/org/opensearch/sql/storage/TableScanOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.storage; import java.util.Collections; @@ -12,8 +11,8 @@ import org.opensearch.sql.planner.physical.PhysicalPlanNodeVisitor; /** - * Abstract table scan class for different storage to implement. - * This is also to avoid "polluting" physical plan visitor by concrete table scan implementation. + * Abstract table scan class for different storage to implement. This is also to avoid "polluting" + * physical plan visitor by concrete table scan implementation. */ public abstract class TableScanOperator extends PhysicalPlan { diff --git a/core/src/main/java/org/opensearch/sql/storage/bindingtuple/BindingTuple.java b/core/src/main/java/org/opensearch/sql/storage/bindingtuple/BindingTuple.java index 51a0348116..2487c651ad 100644 --- a/core/src/main/java/org/opensearch/sql/storage/bindingtuple/BindingTuple.java +++ b/core/src/main/java/org/opensearch/sql/storage/bindingtuple/BindingTuple.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.storage.bindingtuple; import org.opensearch.sql.data.model.ExprMissingValue; @@ -14,20 +13,19 @@ import org.opensearch.sql.expression.env.Environment; /** - * BindingTuple represents the a relationship between bindingName and ExprValue. - * e.g. The operation output column name is bindingName, the value is the ExprValue. + * BindingTuple represents the a relationship between bindingName and ExprValue. e.g. The operation + * output column name is bindingName, the value is the ExprValue. */ public abstract class BindingTuple implements Environment { - public static BindingTuple EMPTY = new BindingTuple() { - @Override - public ExprValue resolve(ReferenceExpression ref) { - return ExprMissingValue.of(); - } - }; + public static BindingTuple EMPTY = + new BindingTuple() { + @Override + public ExprValue resolve(ReferenceExpression ref) { + return ExprMissingValue.of(); + } + }; - /** - * Resolve {@link Expression} in the BindingTuple environment. - */ + /** Resolve {@link Expression} in the BindingTuple environment. */ @Override public ExprValue resolve(Expression var) { if (var instanceof ReferenceExpression) { @@ -37,8 +35,6 @@ public ExprValue resolve(Expression var) { } } - /** - * Resolve the {@link ReferenceExpression} in BindingTuple context. - */ + /** Resolve the {@link ReferenceExpression} in BindingTuple context. */ public abstract ExprValue resolve(ReferenceExpression ref); } diff --git a/core/src/main/java/org/opensearch/sql/storage/bindingtuple/LazyBindingTuple.java b/core/src/main/java/org/opensearch/sql/storage/bindingtuple/LazyBindingTuple.java index 4589731442..d43a3f2a1b 100644 --- a/core/src/main/java/org/opensearch/sql/storage/bindingtuple/LazyBindingTuple.java +++ b/core/src/main/java/org/opensearch/sql/storage/bindingtuple/LazyBindingTuple.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.storage.bindingtuple; import java.util.function.Supplier; @@ -12,9 +11,7 @@ import org.opensearch.sql.data.model.ExprValue; import org.opensearch.sql.expression.ReferenceExpression; -/** - * Lazy Implementation of {@link BindingTuple}. - */ +/** Lazy Implementation of {@link BindingTuple}. */ @RequiredArgsConstructor public class LazyBindingTuple extends BindingTuple { private final Supplier lazyBinding; diff --git a/core/src/main/java/org/opensearch/sql/storage/read/TableScanBuilder.java b/core/src/main/java/org/opensearch/sql/storage/read/TableScanBuilder.java index f0158c52b8..b2da0b67a4 100644 --- a/core/src/main/java/org/opensearch/sql/storage/read/TableScanBuilder.java +++ b/core/src/main/java/org/opensearch/sql/storage/read/TableScanBuilder.java @@ -19,16 +19,14 @@ import org.opensearch.sql.storage.TableScanOperator; /** - * A TableScanBuilder represents transition state between logical planning and physical planning - * for table scan operator. The concrete implementation class gets involved in the logical - * optimization through this abstraction and thus get the chance to handle push down optimization - * without intruding core engine. + * A TableScanBuilder represents transition state between logical planning and physical planning for + * table scan operator. The concrete implementation class gets involved in the logical optimization + * through this abstraction and thus get the chance to handle push down optimization without + * intruding core engine. */ public abstract class TableScanBuilder extends LogicalPlan { - /** - * Construct and initialize children to empty list. - */ + /** Construct and initialize children to empty list. */ protected TableScanBuilder() { super(Collections.emptyList()); } @@ -41,8 +39,8 @@ protected TableScanBuilder() { public abstract TableScanOperator build(); /** - * Can a given filter operator be pushed down to table scan builder. Assume no such support - * by default unless subclass override this. + * Can a given filter operator be pushed down to table scan builder. Assume no such support by + * default unless subclass override this. * * @param filter logical filter operator * @return true if pushed down, otherwise false @@ -52,8 +50,8 @@ public boolean pushDownFilter(LogicalFilter filter) { } /** - * Can a given aggregate operator be pushed down to table scan builder. Assume no such support - * by default unless subclass override this. + * Can a given aggregate operator be pushed down to table scan builder. Assume no such support by + * default unless subclass override this. * * @param aggregation logical aggregate operator * @return true if pushed down, otherwise false @@ -63,8 +61,8 @@ public boolean pushDownAggregation(LogicalAggregation aggregation) { } /** - * Can a given sort operator be pushed down to table scan builder. Assume no such support - * by default unless subclass override this. + * Can a given sort operator be pushed down to table scan builder. Assume no such support by + * default unless subclass override this. * * @param sort logical sort operator * @return true if pushed down, otherwise false @@ -74,8 +72,8 @@ public boolean pushDownSort(LogicalSort sort) { } /** - * Can a given limit operator be pushed down to table scan builder. Assume no such support - * by default unless subclass override this. + * Can a given limit operator be pushed down to table scan builder. Assume no such support by + * default unless subclass override this. * * @param limit logical limit operator * @return true if pushed down, otherwise false @@ -85,8 +83,8 @@ public boolean pushDownLimit(LogicalLimit limit) { } /** - * Can a given project operator be pushed down to table scan builder. Assume no such support - * by default unless subclass override this. + * Can a given project operator be pushed down to table scan builder. Assume no such support by + * default unless subclass override this. * * @param project logical project operator * @return true if pushed down, otherwise false @@ -96,8 +94,8 @@ public boolean pushDownProject(LogicalProject project) { } /** - * Can a given highlight operator be pushed down to table scan builder. Assume no such support - * by default unless subclass override this. + * Can a given highlight operator be pushed down to table scan builder. Assume no such support by + * default unless subclass override this. * * @param highlight logical highlight operator * @return true if pushed down, otherwise false @@ -107,8 +105,8 @@ public boolean pushDownHighlight(LogicalHighlight highlight) { } /** - * Can a given nested operator be pushed down to table scan builder. Assume no such support - * by default unless subclass override this. + * Can a given nested operator be pushed down to table scan builder. Assume no such support by + * default unless subclass override this. * * @param nested logical nested operator * @return true if pushed down, otherwise false diff --git a/core/src/main/java/org/opensearch/sql/storage/split/Split.java b/core/src/main/java/org/opensearch/sql/storage/split/Split.java index e9e0c6fcc1..1cb0ca57ce 100644 --- a/core/src/main/java/org/opensearch/sql/storage/split/Split.java +++ b/core/src/main/java/org/opensearch/sql/storage/split/Split.java @@ -8,13 +8,14 @@ import org.opensearch.sql.storage.StorageEngine; /** - * Split is a sections of a data set. Each {@link StorageEngine} should have specific - * implementation of Split. + * Split is a sections of a data set. Each {@link StorageEngine} should have specific implementation + * of Split. */ public interface Split { /** * Get the split id. + * * @return split id. */ String getSplitId(); diff --git a/core/src/main/java/org/opensearch/sql/storage/write/TableWriteBuilder.java b/core/src/main/java/org/opensearch/sql/storage/write/TableWriteBuilder.java index 54dfa5d557..af18916f71 100644 --- a/core/src/main/java/org/opensearch/sql/storage/write/TableWriteBuilder.java +++ b/core/src/main/java/org/opensearch/sql/storage/write/TableWriteBuilder.java @@ -18,9 +18,7 @@ */ public abstract class TableWriteBuilder extends LogicalPlan { - /** - * Construct table write builder with child node. - */ + /** Construct table write builder with child node. */ public TableWriteBuilder(LogicalPlan child) { super(Collections.singletonList(child)); } @@ -28,7 +26,7 @@ public TableWriteBuilder(LogicalPlan child) { /** * Build table write operator with given child node. * - * @param child child operator node + * @param child child operator node * @return table write operator */ public abstract TableWriteOperator build(PhysicalPlan child); diff --git a/core/src/main/java/org/opensearch/sql/utils/DateTimeFormatters.java b/core/src/main/java/org/opensearch/sql/utils/DateTimeFormatters.java index 39726bc975..18e6541514 100644 --- a/core/src/main/java/org/opensearch/sql/utils/DateTimeFormatters.java +++ b/core/src/main/java/org/opensearch/sql/utils/DateTimeFormatters.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.utils; import static java.time.temporal.ChronoField.DAY_OF_MONTH; @@ -22,26 +21,23 @@ import java.util.Locale; import lombok.experimental.UtilityClass; -/** - * DateTimeFormatter. - * Reference org.opensearch.common.time.DateFormatters. - */ +/** DateTimeFormatter. Reference org.opensearch.common.time.DateFormatters. */ @UtilityClass public class DateTimeFormatters { - //Length of a date formatted as YYYYMMDD. + // Length of a date formatted as YYYYMMDD. public static final int FULL_DATE_LENGTH = 8; - //Length of a date formatted as YYMMDD. + // Length of a date formatted as YYMMDD. public static final int SHORT_DATE_LENGTH = 6; - //Length of a date formatted as YMMDD. + // Length of a date formatted as YMMDD. public static final int SINGLE_DIGIT_YEAR_DATE_LENGTH = 5; - //Length of a date formatted as MMDD. + // Length of a date formatted as MMDD. public static final int NO_YEAR_DATE_LENGTH = 4; - //Length of a date formatted as MDD. + // Length of a date formatted as MDD. public static final int SINGLE_DIGIT_MONTH_DATE_LENGTH = 3; private static final int MIN_FRACTION_SECONDS = 0; @@ -110,8 +106,8 @@ public class DateTimeFormatters { .toFormatter(Locale.ROOT) .withResolverStyle(ResolverStyle.STRICT); - public static final DateTimeFormatter SQL_LITERAL_DATE_TIME_FORMAT = DateTimeFormatter - .ofPattern("yyyy-MM-dd HH:mm:ss"); + public static final DateTimeFormatter SQL_LITERAL_DATE_TIME_FORMAT = + DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"); public static final DateTimeFormatter DATE_TIME_FORMATTER = new DateTimeFormatterBuilder() @@ -120,9 +116,7 @@ public class DateTimeFormatters { .appendOptional(STRICT_HOUR_MINUTE_SECOND_FORMATTER) .toFormatter(); - /** - * todo. only support timestamp in format yyyy-MM-dd HH:mm:ss. - */ + /** todo. only support timestamp in format yyyy-MM-dd HH:mm:ss. */ public static final DateTimeFormatter DATE_TIME_FORMATTER_WITHOUT_NANO = SQL_LITERAL_DATE_TIME_FORMAT; @@ -130,10 +124,7 @@ public class DateTimeFormatters { new DateTimeFormatterBuilder() .appendPattern("uuuu-MM-dd HH:mm:ss") .appendFraction( - ChronoField.NANO_OF_SECOND, - MIN_FRACTION_SECONDS, - MAX_FRACTION_SECONDS, - true) + ChronoField.NANO_OF_SECOND, MIN_FRACTION_SECONDS, MAX_FRACTION_SECONDS, true) .toFormatter(Locale.ROOT) .withResolverStyle(ResolverStyle.STRICT); @@ -141,10 +132,7 @@ public class DateTimeFormatters { new DateTimeFormatterBuilder() .appendPattern("[uuuu-MM-dd HH:mm:ss][uuuu-MM-dd HH:mm][HH:mm:ss][HH:mm][uuuu-MM-dd]") .appendFraction( - ChronoField.NANO_OF_SECOND, - MIN_FRACTION_SECONDS, - MAX_FRACTION_SECONDS, - true) + ChronoField.NANO_OF_SECOND, MIN_FRACTION_SECONDS, MAX_FRACTION_SECONDS, true) .toFormatter(Locale.ROOT) .withResolverStyle(ResolverStyle.STRICT); @@ -199,7 +187,7 @@ public class DateTimeFormatters { // YYYYMMDDhhmmss public static final DateTimeFormatter DATE_TIME_FORMATTER_LONG_YEAR = new DateTimeFormatterBuilder() - .appendValue(YEAR,4) + .appendValue(YEAR, 4) .appendPattern("MMddHHmmss") .toFormatter() .withResolverStyle(ResolverStyle.STRICT); @@ -214,11 +202,8 @@ public class DateTimeFormatters { // uuuu-MM-dd HH:mm:ss[xxx] public static final DateTimeFormatter DATE_TIME_FORMATTER_WITH_TZ = new DateTimeFormatterBuilder() - .appendPattern("uuuu-MM-dd HH:mm:ss[xxx]") - .appendFraction( - ChronoField.NANO_OF_SECOND, - MIN_FRACTION_SECONDS, - MAX_FRACTION_SECONDS, - true) - .toFormatter(); + .appendPattern("uuuu-MM-dd HH:mm:ss[xxx]") + .appendFraction( + ChronoField.NANO_OF_SECOND, MIN_FRACTION_SECONDS, MAX_FRACTION_SECONDS, true) + .toFormatter(); } diff --git a/core/src/main/java/org/opensearch/sql/utils/DateTimeUtils.java b/core/src/main/java/org/opensearch/sql/utils/DateTimeUtils.java index 74fdf42571..593b4c4471 100644 --- a/core/src/main/java/org/opensearch/sql/utils/DateTimeUtils.java +++ b/core/src/main/java/org/opensearch/sql/utils/DateTimeUtils.java @@ -21,9 +21,9 @@ public class DateTimeUtils { /** * Util method to round the date/time with given unit. * - * @param utcMillis Date/time value to round, given in utc millis - * @param unitMillis Date/time interval unit in utc millis - * @return Rounded date/time value in utc millis + * @param utcMillis Date/time value to round, given in utc millis + * @param unitMillis Date/time interval unit in utc millis + * @return Rounded date/time value in utc millis */ public static long roundFloor(long utcMillis, long unitMillis) { return utcMillis - utcMillis % unitMillis; @@ -32,9 +32,9 @@ public static long roundFloor(long utcMillis, long unitMillis) { /** * Util method to round the date/time in week(s). * - * @param utcMillis Date/time value to round, given in utc millis - * @param interval Number of weeks as the rounding interval - * @return Rounded date/time value in utc millis + * @param utcMillis Date/time value to round, given in utc millis + * @param interval Number of weeks as the rounding interval + * @return Rounded date/time value in utc millis */ public static long roundWeek(long utcMillis, int interval) { return roundFloor(utcMillis + 259200000L, 604800000L * interval) - 259200000L; @@ -43,16 +43,18 @@ public static long roundWeek(long utcMillis, int interval) { /** * Util method to round the date/time in month(s). * - * @param utcMillis Date/time value to round, given in utc millis - * @param interval Number of months as the rounding interval - * @return Rounded date/time value in utc millis + * @param utcMillis Date/time value to round, given in utc millis + * @param interval Number of months as the rounding interval + * @return Rounded date/time value in utc millis */ public static long roundMonth(long utcMillis, int interval) { ZonedDateTime initDateTime = ZonedDateTime.of(1970, 1, 1, 0, 0, 0, 0, UTC_ZONE_ID); - ZonedDateTime zonedDateTime = Instant.ofEpochMilli(utcMillis).atZone(UTC_ZONE_ID) - .plusMonths(interval); - long monthDiff = (zonedDateTime.getYear() - initDateTime.getYear()) * 12L + zonedDateTime - .getMonthValue() - initDateTime.getMonthValue(); + ZonedDateTime zonedDateTime = + Instant.ofEpochMilli(utcMillis).atZone(UTC_ZONE_ID).plusMonths(interval); + long monthDiff = + (zonedDateTime.getYear() - initDateTime.getYear()) * 12L + + zonedDateTime.getMonthValue() + - initDateTime.getMonthValue(); long monthToAdd = (monthDiff / interval - 1) * interval; return initDateTime.plusMonths(monthToAdd).toInstant().toEpochMilli(); } @@ -60,16 +62,18 @@ public static long roundMonth(long utcMillis, int interval) { /** * Util method to round the date/time in quarter(s). * - * @param utcMillis Date/time value to round, given in utc millis - * @param interval Number of quarters as the rounding interval - * @return Rounded date/time value in utc millis + * @param utcMillis Date/time value to round, given in utc millis + * @param interval Number of quarters as the rounding interval + * @return Rounded date/time value in utc millis */ public static long roundQuarter(long utcMillis, int interval) { ZonedDateTime initDateTime = ZonedDateTime.of(1970, 1, 1, 0, 0, 0, 0, UTC_ZONE_ID); - ZonedDateTime zonedDateTime = Instant.ofEpochMilli(utcMillis).atZone(UTC_ZONE_ID) - .plusMonths(interval * 3L); - long monthDiff = ((zonedDateTime.getYear() - initDateTime.getYear()) * 12L + zonedDateTime - .getMonthValue() - initDateTime.getMonthValue()); + ZonedDateTime zonedDateTime = + Instant.ofEpochMilli(utcMillis).atZone(UTC_ZONE_ID).plusMonths(interval * 3L); + long monthDiff = + ((zonedDateTime.getYear() - initDateTime.getYear()) * 12L + + zonedDateTime.getMonthValue() + - initDateTime.getMonthValue()); long monthToAdd = (monthDiff / (interval * 3L) - 1) * interval * 3; return initDateTime.plusMonths(monthToAdd).toInstant().toEpochMilli(); } @@ -77,9 +81,9 @@ public static long roundQuarter(long utcMillis, int interval) { /** * Util method to round the date/time in year(s). * - * @param utcMillis Date/time value to round, given in utc millis - * @param interval Number of years as the rounding interval - * @return Rounded date/time value in utc millis + * @param utcMillis Date/time value to round, given in utc millis + * @param interval Number of years as the rounding interval + * @return Rounded date/time value in utc millis */ public static long roundYear(long utcMillis, int interval) { ZonedDateTime initDateTime = ZonedDateTime.of(1970, 1, 1, 0, 0, 0, 0, UTC_ZONE_ID); @@ -124,29 +128,25 @@ public Boolean isValidMySqlTimeZoneId(ZoneId zone) { ZonedDateTime passedTzValidator = defaultDateTime.withZoneSameInstant(zone).withZoneSameLocal(defaultTz); - return (passedTzValidator.isBefore(maxTzValidator) - || passedTzValidator.isEqual(maxTzValidator)) - && (passedTzValidator.isAfter(minTzValidator) - || passedTzValidator.isEqual(minTzValidator)); + return (passedTzValidator.isBefore(maxTzValidator) || passedTzValidator.isEqual(maxTzValidator)) + && (passedTzValidator.isAfter(minTzValidator) || passedTzValidator.isEqual(minTzValidator)); } /** - * Extracts LocalDateTime from a datetime ExprValue. - * Uses `FunctionProperties` for `ExprTimeValue`. + * Extracts LocalDateTime from a datetime ExprValue. Uses `FunctionProperties` for + * `ExprTimeValue`. */ - public static LocalDateTime extractDateTime(ExprValue value, - FunctionProperties functionProperties) { + public static LocalDateTime extractDateTime( + ExprValue value, FunctionProperties functionProperties) { return value instanceof ExprTimeValue ? ((ExprTimeValue) value).datetimeValue(functionProperties) : value.datetimeValue(); } /** - * Extracts LocalDate from a datetime ExprValue. - * Uses `FunctionProperties` for `ExprTimeValue`. + * Extracts LocalDate from a datetime ExprValue. Uses `FunctionProperties` for `ExprTimeValue`. */ - public static LocalDate extractDate(ExprValue value, - FunctionProperties functionProperties) { + public static LocalDate extractDate(ExprValue value, FunctionProperties functionProperties) { return value instanceof ExprTimeValue ? ((ExprTimeValue) value).dateValue(functionProperties) : value.dateValue(); diff --git a/core/src/main/java/org/opensearch/sql/utils/ExpressionUtils.java b/core/src/main/java/org/opensearch/sql/utils/ExpressionUtils.java index e8324af5f4..f04bf3748f 100644 --- a/core/src/main/java/org/opensearch/sql/utils/ExpressionUtils.java +++ b/core/src/main/java/org/opensearch/sql/utils/ExpressionUtils.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.utils; import java.util.List; @@ -11,17 +10,13 @@ import lombok.experimental.UtilityClass; import org.opensearch.sql.expression.Expression; -/** - * Utils for {@link Expression}. - */ +/** Utils for {@link Expression}. */ @UtilityClass public class ExpressionUtils { public static String PATH_SEP = "."; - /** - * Format the list of {@link Expression}. - */ + /** Format the list of {@link Expression}. */ public static String format(List expressionList) { return expressionList.stream().map(Expression::toString).collect(Collectors.joining(",")); } diff --git a/core/src/main/java/org/opensearch/sql/utils/OperatorUtils.java b/core/src/main/java/org/opensearch/sql/utils/OperatorUtils.java index f4ece6a190..d9ae0b4258 100644 --- a/core/src/main/java/org/opensearch/sql/utils/OperatorUtils.java +++ b/core/src/main/java/org/opensearch/sql/utils/OperatorUtils.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.utils; import java.util.regex.Pattern; @@ -15,9 +14,10 @@ @UtilityClass public class OperatorUtils { /** - * Wildcard pattern matcher util. - * Percent (%) character for wildcard, + * Wildcard pattern matcher util.
+ * Percent (%) character for wildcard,
* Underscore (_) character for a single character match. + * * @param pattern string pattern to match. * @return if text matches pattern returns true; else return false. */ @@ -30,12 +30,13 @@ public static ExprBooleanValue matches(ExprValue text, ExprValue pattern) { /** * Checks if text matches regular expression pattern. + * * @param pattern string pattern to match. * @return if text matches pattern returns true; else return false. */ public static ExprIntegerValue matchesRegexp(ExprValue text, ExprValue pattern) { - return new ExprIntegerValue(Pattern.compile(pattern.stringValue()).matcher(text.stringValue()) - .matches() ? 1 : 0); + return new ExprIntegerValue( + Pattern.compile(pattern.stringValue()).matcher(text.stringValue()).matches() ? 1 : 0); } private static final char DEFAULT_ESCAPE = '\\'; diff --git a/core/src/main/java/org/opensearch/sql/utils/ParseUtils.java b/core/src/main/java/org/opensearch/sql/utils/ParseUtils.java index 6c640482d0..e659cfdf50 100644 --- a/core/src/main/java/org/opensearch/sql/utils/ParseUtils.java +++ b/core/src/main/java/org/opensearch/sql/utils/ParseUtils.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.utils; import com.google.common.collect.ImmutableMap; @@ -18,30 +17,27 @@ import org.opensearch.sql.expression.parse.PatternsExpression; import org.opensearch.sql.expression.parse.RegexExpression; -/** - * Utils for {@link ParseExpression}. - */ +/** Utils for {@link ParseExpression}. */ @UtilityClass public class ParseUtils { private static final String NEW_FIELD_KEY = "new_field"; - private static final Map FACTORY_MAP = ImmutableMap.of( - ParseMethod.REGEX, RegexExpression::new, - ParseMethod.GROK, GrokExpression::new, - ParseMethod.PATTERNS, PatternsExpression::new - ); + private static final Map FACTORY_MAP = + ImmutableMap.of( + ParseMethod.REGEX, RegexExpression::new, + ParseMethod.GROK, GrokExpression::new, + ParseMethod.PATTERNS, PatternsExpression::new); /** * Construct corresponding ParseExpression by {@link ParseMethod}. * * @param parseMethod method used to parse * @param sourceField source text field - * @param pattern pattern used for parsing - * @param identifier derived field + * @param pattern pattern used for parsing + * @param identifier derived field * @return {@link ParseExpression} */ - public static ParseExpression createParseExpression(ParseMethod parseMethod, - Expression sourceField, Expression pattern, - Expression identifier) { + public static ParseExpression createParseExpression( + ParseMethod parseMethod, Expression sourceField, Expression pattern, Expression identifier) { return FACTORY_MAP.get(parseMethod).initialize(sourceField, pattern, identifier); } @@ -51,21 +47,23 @@ public static ParseExpression createParseExpression(ParseMethod parseMethod, * @param pattern pattern used for parsing * @return list of names of the derived fields */ - public static List getNamedGroupCandidates(ParseMethod parseMethod, String pattern, - Map arguments) { + public static List getNamedGroupCandidates( + ParseMethod parseMethod, String pattern, Map arguments) { switch (parseMethod) { case REGEX: return RegexExpression.getNamedGroupCandidates(pattern); case GROK: return GrokExpression.getNamedGroupCandidates(pattern); default: - return PatternsExpression.getNamedGroupCandidates(arguments.containsKey(NEW_FIELD_KEY) - ? (String) arguments.get(NEW_FIELD_KEY).getValue() : null); + return PatternsExpression.getNamedGroupCandidates( + arguments.containsKey(NEW_FIELD_KEY) + ? (String) arguments.get(NEW_FIELD_KEY).getValue() + : null); } } private interface ParseExpressionFactory { - ParseExpression initialize(Expression sourceField, Expression expression, - Expression identifier); + ParseExpression initialize( + Expression sourceField, Expression expression, Expression identifier); } } diff --git a/core/src/main/java/org/opensearch/sql/utils/SystemIndexUtils.java b/core/src/main/java/org/opensearch/sql/utils/SystemIndexUtils.java index 5325ea371a..38d2753f6c 100644 --- a/core/src/main/java/org/opensearch/sql/utils/SystemIndexUtils.java +++ b/core/src/main/java/org/opensearch/sql/utils/SystemIndexUtils.java @@ -3,44 +3,32 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.utils; import lombok.Getter; import lombok.RequiredArgsConstructor; import lombok.experimental.UtilityClass; -/** - * System Index Utils. - * Todo. Find the better name for this class. - */ +/** System Index Utils. Todo. Find the better name for this class. */ @UtilityClass public class SystemIndexUtils { public static final String TABLE_NAME_FOR_TABLES_INFO = "tables"; - /** - * The suffix of all the system tables. - */ + + /** The suffix of all the system tables. */ private static final String SYS_TABLES_SUFFIX = "ODFE_SYS_TABLE"; - /** - * The suffix of all the meta tables. - */ + /** The suffix of all the meta tables. */ private static final String SYS_META_SUFFIX = "META_" + SYS_TABLES_SUFFIX; - /** - * The suffix of all the table mappings. - */ - private static final String SYS_MAPPINGS_SUFFIX = "MAPPINGS_" + SYS_TABLES_SUFFIX; + /** The suffix of all the table mappings. */ + private static final String SYS_MAPPINGS_SUFFIX = "MAPPINGS_" + SYS_TABLES_SUFFIX; - /** - * The ALL.META_ODFE_SYS_TABLE contain all the table info. - */ + /** The ALL.META_ODFE_SYS_TABLE contain all the table info. */ public static final String TABLE_INFO = "ALL." + SYS_META_SUFFIX; public static final String DATASOURCES_TABLE_NAME = ".DATASOURCES"; - public static Boolean isSystemIndex(String indexName) { return indexName.endsWith(SYS_TABLES_SUFFIX); } @@ -62,8 +50,7 @@ public static String mappingTable(String indexName) { public static SystemTable systemTable(String indexName) { final int lastDot = indexName.lastIndexOf("."); String suffix = indexName.substring(lastDot + 1); - String tableName = indexName.substring(0, lastDot) - .replace("%", "*"); + String tableName = indexName.substring(0, lastDot).replace("%", "*"); if (suffix.equalsIgnoreCase(SYS_META_SUFFIX)) { return new SystemInfoTable(tableName); @@ -74,9 +61,7 @@ public static SystemTable systemTable(String indexName) { } } - /** - * System Table. - */ + /** System Table. */ public interface SystemTable { String getTableName(); @@ -90,9 +75,7 @@ default boolean isMetaInfoTable() { } } - /** - * System Info Table. - */ + /** System Info Table. */ @Getter @RequiredArgsConstructor public static class SystemInfoTable implements SystemTable { @@ -104,9 +87,7 @@ public boolean isSystemInfoTable() { } } - /** - * System Table. - */ + /** System Table. */ @Getter @RequiredArgsConstructor public static class MetaInfoTable implements SystemTable { From 63b00ba097a76a1dd706e5159b601855e190b352 Mon Sep 17 00:00:00 2001 From: Mitchell Gale Date: Wed, 9 Aug 2023 15:03:27 -0700 Subject: [PATCH 11/42] [Spotless] Applying Google Code Format for core/src/main files #4 (#1933) * GJF part 4 Signed-off-by: Mitchell Gale * add build.gradle comment to mention why we are ignoring checkstyle failures for core. Signed-off-by: Mitchell Gale * Fix include spotless build gradle. Signed-off-by: Mitchell Gale * revert astDSL.JAVA Signed-off-by: Mitchell Gale * revert ast changes as was covered in spotless #1 PR for GJF. Signed-off-by: Mitchell Gale * Reverting commits in ast folder attempt #2 Signed-off-by: Mitchell Gale * revert change to RaretopN.java Signed-off-by: Mitchell Gale * addressed PR comments. Signed-off-by: Mitchell Gale * Replacing removed include in spotless. Signed-off-by: Mitchell Gale --------- Signed-off-by: Mitchell Gale Signed-off-by: Mitchell Gale --- build.gradle | 6 +- core/build.gradle | 4 +- .../opensearch/sql/DataSourceSchemaName.java | 1 - .../sql/analysis/AnalysisContextTest.java | 1 - .../opensearch/sql/analysis/AnalyzerTest.java | 1363 ++++++++--------- .../sql/analysis/AnalyzerTestBase.java | 101 +- .../sql/analysis/ExpressionAnalyzerTest.java | 665 ++++---- .../ExpressionReferenceOptimizerTest.java | 85 +- .../analysis/NamedExpressionAnalyzerTest.java | 13 +- .../sql/analysis/QualifierAnalyzerTest.java | 47 +- .../sql/analysis/SelectAnalyzeTest.java | 43 +- .../SelectExpressionAnalyzerTest.java | 39 +- .../sql/analysis/TypeEnvironmentTest.java | 26 +- .../WindowExpressionAnalyzerTest.java | 45 +- ...ourceSchemaIdentifierNameResolverTest.java | 11 +- .../sql/analysis/symbol/SymbolTableTest.java | 25 +- .../org/opensearch/sql/config/TestConfig.java | 66 +- 17 files changed, 1221 insertions(+), 1320 deletions(-) diff --git a/build.gradle b/build.gradle index 3e75433d83..71f94636b5 100644 --- a/build.gradle +++ b/build.gradle @@ -84,7 +84,11 @@ repositories { spotless { java { target fileTree('.') { - include 'core/src/main/java/org/opensearch/sql/planner/**/*.java', + include 'core/src/main/java/org/opensearch/sql/DataSourceSchemaName.java', + 'core/src/test/java/org/opensearch/sql/data/**/*.java', + 'core/src/test/java/org/opensearch/sql/config/**/*.java', + 'core/src/test/java/org/opensearch/sql/analysis/**/*.java', + 'core/src/main/java/org/opensearch/sql/planner/**/*.java', 'core/src/main/java/org/opensearch/sql/storage/**/*.java', 'core/src/main/java/org/opensearch/sql/utils/**/*.java', 'core/src/main/java/org/opensearch/sql/monitor/**/*.java', diff --git a/core/build.gradle b/core/build.gradle index 89fac623f2..cf7f0b7a1c 100644 --- a/core/build.gradle +++ b/core/build.gradle @@ -34,8 +34,10 @@ repositories { mavenCentral() } -checkstyleMain.ignoreFailures = true +// Being ignored as a temporary measure before being removed in favour of +// spotless https://github.com/opensearch-project/sql/issues/1101 checkstyleTest.ignoreFailures = true +checkstyleMain.ignoreFailures = true pitest { targetClasses = ['org.opensearch.sql.*'] diff --git a/core/src/main/java/org/opensearch/sql/DataSourceSchemaName.java b/core/src/main/java/org/opensearch/sql/DataSourceSchemaName.java index 47988097c3..9c9dfa0772 100644 --- a/core/src/main/java/org/opensearch/sql/DataSourceSchemaName.java +++ b/core/src/main/java/org/opensearch/sql/DataSourceSchemaName.java @@ -17,5 +17,4 @@ public class DataSourceSchemaName { private final String dataSourceName; private final String schemaName; - } diff --git a/core/src/test/java/org/opensearch/sql/analysis/AnalysisContextTest.java b/core/src/test/java/org/opensearch/sql/analysis/AnalysisContextTest.java index 0d643aa53f..b052fe47ce 100644 --- a/core/src/test/java/org/opensearch/sql/analysis/AnalysisContextTest.java +++ b/core/src/test/java/org/opensearch/sql/analysis/AnalysisContextTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.analysis; import static org.junit.jupiter.api.Assertions.assertEquals; diff --git a/core/src/test/java/org/opensearch/sql/analysis/AnalyzerTest.java b/core/src/test/java/org/opensearch/sql/analysis/AnalyzerTest.java index 100cfd67af..2f4d6e8ada 100644 --- a/core/src/test/java/org/opensearch/sql/analysis/AnalyzerTest.java +++ b/core/src/test/java/org/opensearch/sql/analysis/AnalyzerTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.analysis; import static java.util.Collections.emptyList; @@ -134,17 +133,13 @@ public void filter_relation_with_reserved_qualifiedName() { @Test public void filter_relation_with_invalid_qualifiedName_SemanticCheckException() { - UnresolvedPlan invalidFieldPlan = AstDSL.filter( - AstDSL.relation("schema"), - AstDSL.equalTo( - AstDSL.qualifiedName("_invalid"), - AstDSL.stringLiteral("value")) - ); + UnresolvedPlan invalidFieldPlan = + AstDSL.filter( + AstDSL.relation("schema"), + AstDSL.equalTo(AstDSL.qualifiedName("_invalid"), AstDSL.stringLiteral("value"))); SemanticCheckException exception = - assertThrows( - SemanticCheckException.class, - () -> analyze(invalidFieldPlan)); + assertThrows(SemanticCheckException.class, () -> analyze(invalidFieldPlan)); assertEquals( "can't resolve Symbol(namespace=FIELD_NAME, name=_invalid) in type env", exception.getMessage()); @@ -152,15 +147,13 @@ public void filter_relation_with_invalid_qualifiedName_SemanticCheckException() @Test public void filter_relation_with_invalid_qualifiedName_ExpressionEvaluationException() { - UnresolvedPlan typeMismatchPlan = AstDSL.filter( - AstDSL.relation("schema"), - AstDSL.equalTo(AstDSL.qualifiedName("_test"), AstDSL.intLiteral(1)) - ); + UnresolvedPlan typeMismatchPlan = + AstDSL.filter( + AstDSL.relation("schema"), + AstDSL.equalTo(AstDSL.qualifiedName("_test"), AstDSL.intLiteral(1))); ExpressionEvaluationException exception = - assertThrows( - ExpressionEvaluationException.class, - () -> analyze(typeMismatchPlan)); + assertThrows(ExpressionEvaluationException.class, () -> analyze(typeMismatchPlan)); assertEquals( "= function expected {[BYTE,BYTE],[SHORT,SHORT],[INTEGER,INTEGER],[LONG,LONG]," + "[FLOAT,FLOAT],[DOUBLE,DOUBLE],[STRING,STRING],[BOOLEAN,BOOLEAN],[DATE,DATE]," @@ -265,8 +258,8 @@ public void filter_relation_with_non_existing_datasource_with_three_parts() { LogicalPlanDSL.relation("test.nonexisting_schema.http_total_requests", table), DSL.equal(DSL.ref("integer_value", INTEGER), DSL.literal(integerValue(1)))), AstDSL.filter( - AstDSL.relation(AstDSL.qualifiedName("test", - "nonexisting_schema", "http_total_requests")), + AstDSL.relation( + AstDSL.qualifiedName("test", "nonexisting_schema", "http_total_requests")), AstDSL.equalTo(AstDSL.field("integer_value"), AstDSL.intLiteral(1)))); } @@ -283,73 +276,68 @@ public void filter_relation_with_multiple_tables() { @Test public void analyze_filter_visit_score_function() { - UnresolvedPlan unresolvedPlan = AstDSL.filter( - AstDSL.relation("schema"), - new ScoreFunction( - AstDSL.function("match_phrase_prefix", - AstDSL.unresolvedArg("field", stringLiteral("field_value1")), - AstDSL.unresolvedArg("query", stringLiteral("search query")), - AstDSL.unresolvedArg("boost", stringLiteral("3")) - ), AstDSL.doubleLiteral(1.0)) - ); + UnresolvedPlan unresolvedPlan = + AstDSL.filter( + AstDSL.relation("schema"), + new ScoreFunction( + AstDSL.function( + "match_phrase_prefix", + AstDSL.unresolvedArg("field", stringLiteral("field_value1")), + AstDSL.unresolvedArg("query", stringLiteral("search query")), + AstDSL.unresolvedArg("boost", stringLiteral("3"))), + AstDSL.doubleLiteral(1.0))); assertAnalyzeEqual( LogicalPlanDSL.filter( LogicalPlanDSL.relation("schema", table), DSL.match_phrase_prefix( DSL.namedArgument("field", "field_value1"), DSL.namedArgument("query", "search query"), - DSL.namedArgument("boost", "3.0") - ) - ), - unresolvedPlan - ); + DSL.namedArgument("boost", "3.0"))), + unresolvedPlan); LogicalPlan logicalPlan = analyze(unresolvedPlan); OpenSearchFunctions.OpenSearchFunction relevanceQuery = - (OpenSearchFunctions.OpenSearchFunction)((LogicalFilter) logicalPlan).getCondition(); + (OpenSearchFunctions.OpenSearchFunction) ((LogicalFilter) logicalPlan).getCondition(); assertEquals(true, relevanceQuery.isScoreTracked()); } @Test public void analyze_filter_visit_without_score_function() { - UnresolvedPlan unresolvedPlan = AstDSL.filter( - AstDSL.relation("schema"), - AstDSL.function("match_phrase_prefix", - AstDSL.unresolvedArg("field", stringLiteral("field_value1")), - AstDSL.unresolvedArg("query", stringLiteral("search query")), - AstDSL.unresolvedArg("boost", stringLiteral("3")) - ) - ); + UnresolvedPlan unresolvedPlan = + AstDSL.filter( + AstDSL.relation("schema"), + AstDSL.function( + "match_phrase_prefix", + AstDSL.unresolvedArg("field", stringLiteral("field_value1")), + AstDSL.unresolvedArg("query", stringLiteral("search query")), + AstDSL.unresolvedArg("boost", stringLiteral("3")))); assertAnalyzeEqual( LogicalPlanDSL.filter( LogicalPlanDSL.relation("schema", table), DSL.match_phrase_prefix( DSL.namedArgument("field", "field_value1"), DSL.namedArgument("query", "search query"), - DSL.namedArgument("boost", "3") - ) - ), - unresolvedPlan - ); + DSL.namedArgument("boost", "3"))), + unresolvedPlan); LogicalPlan logicalPlan = analyze(unresolvedPlan); OpenSearchFunctions.OpenSearchFunction relevanceQuery = - (OpenSearchFunctions.OpenSearchFunction)((LogicalFilter) logicalPlan).getCondition(); + (OpenSearchFunctions.OpenSearchFunction) ((LogicalFilter) logicalPlan).getCondition(); assertEquals(false, relevanceQuery.isScoreTracked()); } @Test public void analyze_filter_visit_score_function_with_double_boost() { - UnresolvedPlan unresolvedPlan = AstDSL.filter( - AstDSL.relation("schema"), - new ScoreFunction( - AstDSL.function("match_phrase_prefix", - AstDSL.unresolvedArg("field", stringLiteral("field_value1")), - AstDSL.unresolvedArg("query", stringLiteral("search query")), - AstDSL.unresolvedArg("slop", stringLiteral("3")) - ), new Literal(3.0, DataType.DOUBLE) - ) - ); + UnresolvedPlan unresolvedPlan = + AstDSL.filter( + AstDSL.relation("schema"), + new ScoreFunction( + AstDSL.function( + "match_phrase_prefix", + AstDSL.unresolvedArg("field", stringLiteral("field_value1")), + AstDSL.unresolvedArg("query", stringLiteral("search query")), + AstDSL.unresolvedArg("slop", stringLiteral("3"))), + new Literal(3.0, DataType.DOUBLE))); assertAnalyzeEqual( LogicalPlanDSL.filter( @@ -358,44 +346,36 @@ public void analyze_filter_visit_score_function_with_double_boost() { DSL.namedArgument("field", "field_value1"), DSL.namedArgument("query", "search query"), DSL.namedArgument("slop", "3"), - DSL.namedArgument("boost", "3.0") - ) - ), - unresolvedPlan - ); + DSL.namedArgument("boost", "3.0"))), + unresolvedPlan); LogicalPlan logicalPlan = analyze(unresolvedPlan); OpenSearchFunctions.OpenSearchFunction relevanceQuery = - (OpenSearchFunctions.OpenSearchFunction)((LogicalFilter) logicalPlan).getCondition(); + (OpenSearchFunctions.OpenSearchFunction) ((LogicalFilter) logicalPlan).getCondition(); assertEquals(true, relevanceQuery.isScoreTracked()); } @Test public void analyze_filter_visit_score_function_with_unsupported_boost_SemanticCheckException() { - UnresolvedPlan unresolvedPlan = AstDSL.filter( - AstDSL.relation("schema"), - new ScoreFunction( - AstDSL.function("match_phrase_prefix", - AstDSL.unresolvedArg("field", stringLiteral("field_value1")), - AstDSL.unresolvedArg("query", stringLiteral("search query")), - AstDSL.unresolvedArg("boost", stringLiteral("3")) - ), AstDSL.stringLiteral("3.0") - ) - ); + UnresolvedPlan unresolvedPlan = + AstDSL.filter( + AstDSL.relation("schema"), + new ScoreFunction( + AstDSL.function( + "match_phrase_prefix", + AstDSL.unresolvedArg("field", stringLiteral("field_value1")), + AstDSL.unresolvedArg("query", stringLiteral("search query")), + AstDSL.unresolvedArg("boost", stringLiteral("3"))), + AstDSL.stringLiteral("3.0"))); SemanticCheckException exception = - assertThrows( - SemanticCheckException.class, - () -> analyze(unresolvedPlan)); - assertEquals( - "Expected boost type 'DOUBLE' but got 'STRING'", - exception.getMessage()); + assertThrows(SemanticCheckException.class, () -> analyze(unresolvedPlan)); + assertEquals("Expected boost type 'DOUBLE' but got 'STRING'", exception.getMessage()); } @Test public void head_relation() { assertAnalyzeEqual( - LogicalPlanDSL.limit(LogicalPlanDSL.relation("schema", table), - 10, 0), + LogicalPlanDSL.limit(LogicalPlanDSL.relation("schema", table), 10, 0), AstDSL.head(AstDSL.relation("schema"), 10, 0)); } @@ -418,7 +398,7 @@ public void analyze_filter_aggregation_relation() { DSL.named("AVG(integer_value)", DSL.avg(DSL.ref("integer_value", INTEGER))), DSL.named("MIN(integer_value)", DSL.min(DSL.ref("integer_value", INTEGER)))), ImmutableList.of(DSL.named("string_value", DSL.ref("string_value", STRING)))), - DSL.greater(// Expect to be replaced with reference by expression optimizer + DSL.greater( // Expect to be replaced with reference by expression optimizer DSL.ref("MIN(integer_value)", INTEGER), DSL.literal(integerValue(10)))), AstDSL.filter( AstDSL.agg( @@ -429,8 +409,7 @@ public void analyze_filter_aggregation_relation() { emptyList(), ImmutableList.of(alias("string_value", qualifiedName("string_value"))), emptyList()), - compare(">", - aggregate("MIN", qualifiedName("integer_value")), intLiteral(10)))); + compare(">", aggregate("MIN", qualifiedName("integer_value")), intLiteral(10)))); } @Test @@ -449,19 +428,16 @@ public void stats_source() { assertAnalyzeEqual( LogicalPlanDSL.aggregation( LogicalPlanDSL.relation("schema", table), - ImmutableList - .of(DSL.named("avg(integer_value)", DSL.avg(DSL.ref("integer_value", INTEGER)))), + ImmutableList.of( + DSL.named("avg(integer_value)", DSL.avg(DSL.ref("integer_value", INTEGER)))), ImmutableList.of(DSL.named("string_value", DSL.ref("string_value", STRING)))), AstDSL.agg( AstDSL.relation("schema"), AstDSL.exprList( AstDSL.alias( - "avg(integer_value)", - AstDSL.aggregate("avg", field("integer_value"))) - ), + "avg(integer_value)", AstDSL.aggregate("avg", field("integer_value")))), null, - ImmutableList.of( - AstDSL.alias("string_value", field("string_value"))), + ImmutableList.of(AstDSL.alias("string_value", field("string_value"))), AstDSL.defaultStatsArgs())); } @@ -473,16 +449,13 @@ public void rare_source() { CommandType.RARE, 10, ImmutableList.of(DSL.ref("string_value", STRING)), - DSL.ref("integer_value", INTEGER) - ), + DSL.ref("integer_value", INTEGER)), AstDSL.rareTopN( AstDSL.relation("schema"), CommandType.RARE, ImmutableList.of(argument("noOfResults", intLiteral(10))), ImmutableList.of(field("string_value")), - field("integer_value") - ) - ); + field("integer_value"))); } @Test @@ -493,16 +466,13 @@ public void top_source() { CommandType.TOP, 5, ImmutableList.of(DSL.ref("string_value", STRING)), - DSL.ref("integer_value", INTEGER) - ), + DSL.ref("integer_value", INTEGER)), AstDSL.rareTopN( AstDSL.relation("schema"), CommandType.TOP, ImmutableList.of(argument("noOfResults", intLiteral(5))), ImmutableList.of(field("string_value")), - field("integer_value") - ) - ); + field("integer_value"))); } @Test @@ -516,8 +486,9 @@ public void rename_to_invalid_expression() { AstDSL.agg( AstDSL.relation("schema"), AstDSL.exprList( - AstDSL.alias("avg(integer_value)", AstDSL.aggregate("avg", field( - "integer_value")))), + AstDSL.alias( + "avg(integer_value)", + AstDSL.aggregate("avg", field("integer_value")))), Collections.emptyList(), ImmutableList.of(), AstDSL.defaultStatsArgs()), @@ -535,8 +506,7 @@ public void project_source() { LogicalPlanDSL.project( LogicalPlanDSL.relation("schema", table), DSL.named("integer_value", DSL.ref("integer_value", INTEGER)), - DSL.named("double_value", DSL.ref("double_value", DOUBLE)) - ), + DSL.named("double_value", DSL.ref("double_value", DOUBLE))), AstDSL.projectWithArg( AstDSL.relation("schema"), AstDSL.defaultFieldsArgs(), @@ -550,34 +520,25 @@ public void project_nested_field_arg() { List.of( Map.of( "field", new ReferenceExpression("message.info", STRING), - "path", new ReferenceExpression("message", STRING) - ) - ); + "path", new ReferenceExpression("message", STRING))); List projectList = List.of( new NamedExpression( - "nested(message.info)", - DSL.nested(DSL.ref("message.info", STRING)), - null) - ); + "nested(message.info)", DSL.nested(DSL.ref("message.info", STRING)), null)); assertAnalyzeEqual( LogicalPlanDSL.project( LogicalPlanDSL.nested( - LogicalPlanDSL.relation("schema", table), - nestedArgs, - projectList), - DSL.named("nested(message.info)", - DSL.nested(DSL.ref("message.info", STRING))) - ), + LogicalPlanDSL.relation("schema", table), nestedArgs, projectList), + DSL.named("nested(message.info)", DSL.nested(DSL.ref("message.info", STRING)))), AstDSL.projectWithArg( AstDSL.relation("schema"), AstDSL.defaultFieldsArgs(), - AstDSL.alias("nested(message.info)", - function("nested", qualifiedName("message", "info")), null) - ) - ); + AstDSL.alias( + "nested(message.info)", + function("nested", qualifiedName("message", "info")), + null))); assertTrue(isNestedFunction(DSL.nested(DSL.ref("message.info", STRING)))); assertFalse(isNestedFunction(DSL.literal("fieldA"))); @@ -586,64 +547,51 @@ public void project_nested_field_arg() { @Test public void sort_with_nested_all_tuple_fields_throws_exception() { - assertThrows(UnsupportedOperationException.class, () -> analyze( - AstDSL.project( - AstDSL.sort( - AstDSL.relation("schema"), - field(nestedAllTupleFields("message")) - ), - AstDSL.alias("nested(message.*)", - nestedAllTupleFields("message")) - ) - )); + assertThrows( + UnsupportedOperationException.class, + () -> + analyze( + AstDSL.project( + AstDSL.sort(AstDSL.relation("schema"), field(nestedAllTupleFields("message"))), + AstDSL.alias("nested(message.*)", nestedAllTupleFields("message"))))); } @Test public void filter_with_nested_all_tuple_fields_throws_exception() { - assertThrows(UnsupportedOperationException.class, () -> analyze( - AstDSL.project( - AstDSL.filter( - AstDSL.relation("schema"), - AstDSL.function("=", nestedAllTupleFields("message"), AstDSL.intLiteral(1))), - AstDSL.alias("nested(message.*)", - nestedAllTupleFields("message")) - ) - )); + assertThrows( + UnsupportedOperationException.class, + () -> + analyze( + AstDSL.project( + AstDSL.filter( + AstDSL.relation("schema"), + AstDSL.function( + "=", nestedAllTupleFields("message"), AstDSL.intLiteral(1))), + AstDSL.alias("nested(message.*)", nestedAllTupleFields("message"))))); } - @Test public void project_nested_field_star_arg() { List> nestedArgs = List.of( Map.of( "field", new ReferenceExpression("message.info", STRING), - "path", new ReferenceExpression("message", STRING) - ) - ); + "path", new ReferenceExpression("message", STRING))); List projectList = List.of( - new NamedExpression("nested(message.info)", - DSL.nested(DSL.ref("message.info", STRING))) - ); + new NamedExpression( + "nested(message.info)", DSL.nested(DSL.ref("message.info", STRING)))); assertAnalyzeEqual( LogicalPlanDSL.project( LogicalPlanDSL.nested( - LogicalPlanDSL.relation("schema", table), - nestedArgs, - projectList), - DSL.named("nested(message.info)", - DSL.nested(DSL.ref("message.info", STRING))) - ), + LogicalPlanDSL.relation("schema", table), nestedArgs, projectList), + DSL.named("nested(message.info)", DSL.nested(DSL.ref("message.info", STRING)))), AstDSL.projectWithArg( AstDSL.relation("schema"), AstDSL.defaultFieldsArgs(), - AstDSL.alias("nested(message.*)", - nestedAllTupleFields("message")) - ) - ); + AstDSL.alias("nested(message.*)", nestedAllTupleFields("message")))); } @Test @@ -652,42 +600,29 @@ public void project_nested_field_star_arg_with_another_nested_function() { List.of( Map.of( "field", new ReferenceExpression("message.info", STRING), - "path", new ReferenceExpression("message", STRING) - ), + "path", new ReferenceExpression("message", STRING)), Map.of( "field", new ReferenceExpression("comment.data", STRING), - "path", new ReferenceExpression("comment", STRING) - ) - ); + "path", new ReferenceExpression("comment", STRING))); List projectList = List.of( - new NamedExpression("nested(message.info)", - DSL.nested(DSL.ref("message.info", STRING))), - new NamedExpression("nested(comment.data)", - DSL.nested(DSL.ref("comment.data", STRING))) - ); + new NamedExpression( + "nested(message.info)", DSL.nested(DSL.ref("message.info", STRING))), + new NamedExpression( + "nested(comment.data)", DSL.nested(DSL.ref("comment.data", STRING)))); assertAnalyzeEqual( LogicalPlanDSL.project( LogicalPlanDSL.nested( - LogicalPlanDSL.relation("schema", table), - nestedArgs, - projectList), - DSL.named("nested(message.info)", - DSL.nested(DSL.ref("message.info", STRING))), - DSL.named("nested(comment.data)", - DSL.nested(DSL.ref("comment.data", STRING))) - ), + LogicalPlanDSL.relation("schema", table), nestedArgs, projectList), + DSL.named("nested(message.info)", DSL.nested(DSL.ref("message.info", STRING))), + DSL.named("nested(comment.data)", DSL.nested(DSL.ref("comment.data", STRING)))), AstDSL.projectWithArg( AstDSL.relation("schema"), AstDSL.defaultFieldsArgs(), - AstDSL.alias("nested(message.*)", - nestedAllTupleFields("message")), - AstDSL.alias("nested(comment.*)", - nestedAllTupleFields("comment")) - ) - ); + AstDSL.alias("nested(message.*)", nestedAllTupleFields("message")), + AstDSL.alias("nested(comment.*)", nestedAllTupleFields("comment")))); } @Test @@ -696,38 +631,25 @@ public void project_nested_field_star_arg_with_another_field() { List.of( Map.of( "field", new ReferenceExpression("message.info", STRING), - "path", new ReferenceExpression("message", STRING) - ) - ); + "path", new ReferenceExpression("message", STRING))); List projectList = List.of( - new NamedExpression("nested(message.info)", - DSL.nested(DSL.ref("message.info", STRING))), - new NamedExpression("comment.data", - DSL.ref("comment.data", STRING)) - ); + new NamedExpression( + "nested(message.info)", DSL.nested(DSL.ref("message.info", STRING))), + new NamedExpression("comment.data", DSL.ref("comment.data", STRING))); assertAnalyzeEqual( LogicalPlanDSL.project( LogicalPlanDSL.nested( - LogicalPlanDSL.relation("schema", table), - nestedArgs, - projectList), - DSL.named("nested(message.info)", - DSL.nested(DSL.ref("message.info", STRING))), - DSL.named("comment.data", - DSL.ref("comment.data", STRING)) - ), + LogicalPlanDSL.relation("schema", table), nestedArgs, projectList), + DSL.named("nested(message.info)", DSL.nested(DSL.ref("message.info", STRING))), + DSL.named("comment.data", DSL.ref("comment.data", STRING))), AstDSL.projectWithArg( AstDSL.relation("schema"), AstDSL.defaultFieldsArgs(), - AstDSL.alias("nested(message.*)", - nestedAllTupleFields("message")), - AstDSL.alias("comment.data", - field("comment.data")) - ) - ); + AstDSL.alias("nested(message.*)", nestedAllTupleFields("message")), + AstDSL.alias("comment.data", field("comment.data")))); } @Test @@ -736,41 +658,32 @@ public void project_nested_field_star_arg_with_highlight() { List.of( Map.of( "field", new ReferenceExpression("message.info", STRING), - "path", new ReferenceExpression("message", STRING) - ) - ); + "path", new ReferenceExpression("message", STRING))); List projectList = List.of( - new NamedExpression("nested(message.info)", - DSL.nested(DSL.ref("message.info", STRING))), - DSL.named("highlight(fieldA)", - new HighlightExpression(DSL.literal("fieldA"))) - ); + new NamedExpression( + "nested(message.info)", DSL.nested(DSL.ref("message.info", STRING))), + DSL.named("highlight(fieldA)", new HighlightExpression(DSL.literal("fieldA")))); Map highlightArgs = new HashMap<>(); assertAnalyzeEqual( LogicalPlanDSL.project( LogicalPlanDSL.nested( - LogicalPlanDSL.highlight(LogicalPlanDSL.relation("schema", table), - DSL.literal("fieldA"), highlightArgs), + LogicalPlanDSL.highlight( + LogicalPlanDSL.relation("schema", table), DSL.literal("fieldA"), highlightArgs), nestedArgs, projectList), - DSL.named("nested(message.info)", - DSL.nested(DSL.ref("message.info", STRING))), - DSL.named("highlight(fieldA)", - new HighlightExpression(DSL.literal("fieldA"))) - ), + DSL.named("nested(message.info)", DSL.nested(DSL.ref("message.info", STRING))), + DSL.named("highlight(fieldA)", new HighlightExpression(DSL.literal("fieldA")))), AstDSL.projectWithArg( AstDSL.relation("schema"), AstDSL.defaultFieldsArgs(), - AstDSL.alias("nested(message.*)", - nestedAllTupleFields("message")), - AstDSL.alias("highlight(fieldA)", - new HighlightFunction(AstDSL.stringLiteral("fieldA"), highlightArgs)) - ) - ); + AstDSL.alias("nested(message.*)", nestedAllTupleFields("message")), + AstDSL.alias( + "highlight(fieldA)", + new HighlightFunction(AstDSL.stringLiteral("fieldA"), highlightArgs)))); } @Test @@ -779,40 +692,29 @@ public void project_nested_field_and_path_args() { List.of( Map.of( "field", new ReferenceExpression("message.info", STRING), - "path", new ReferenceExpression("message", STRING) - ) - ); + "path", new ReferenceExpression("message", STRING))); List projectList = List.of( new NamedExpression( "nested(message.info)", DSL.nested(DSL.ref("message.info", STRING), DSL.ref("message", STRING)), - null) - ); + null)); assertAnalyzeEqual( LogicalPlanDSL.project( LogicalPlanDSL.nested( - LogicalPlanDSL.relation("schema", table), - nestedArgs, - projectList), - DSL.named("nested(message.info)", - DSL.nested(DSL.ref("message.info", STRING), DSL.ref("message", STRING))) - ), + LogicalPlanDSL.relation("schema", table), nestedArgs, projectList), + DSL.named( + "nested(message.info)", + DSL.nested(DSL.ref("message.info", STRING), DSL.ref("message", STRING)))), AstDSL.projectWithArg( AstDSL.relation("schema"), AstDSL.defaultFieldsArgs(), - AstDSL.alias("nested(message.info)", - function( - "nested", - qualifiedName("message", "info"), - qualifiedName("message") - ), - null - ) - ) - ); + AstDSL.alias( + "nested(message.info)", + function("nested", qualifiedName("message", "info"), qualifiedName("message")), + null))); } @Test @@ -821,34 +723,25 @@ public void project_nested_deep_field_arg() { List.of( Map.of( "field", new ReferenceExpression("message.info.id", STRING), - "path", new ReferenceExpression("message.info", STRING) - ) - ); + "path", new ReferenceExpression("message.info", STRING))); List projectList = List.of( new NamedExpression( - "nested(message.info.id)", - DSL.nested(DSL.ref("message.info.id", STRING)), - null) - ); + "nested(message.info.id)", DSL.nested(DSL.ref("message.info.id", STRING)), null)); assertAnalyzeEqual( LogicalPlanDSL.project( LogicalPlanDSL.nested( - LogicalPlanDSL.relation("schema", table), - nestedArgs, - projectList), - DSL.named("nested(message.info.id)", - DSL.nested(DSL.ref("message.info.id", STRING))) - ), + LogicalPlanDSL.relation("schema", table), nestedArgs, projectList), + DSL.named("nested(message.info.id)", DSL.nested(DSL.ref("message.info.id", STRING)))), AstDSL.projectWithArg( AstDSL.relation("schema"), AstDSL.defaultFieldsArgs(), - AstDSL.alias("nested(message.info.id)", - function("nested", qualifiedName("message", "info", "id")), null) - ) - ); + AstDSL.alias( + "nested(message.info.id)", + function("nested", qualifiedName("message", "info", "id")), + null))); } @Test @@ -857,114 +750,102 @@ public void project_multiple_nested() { List.of( Map.of( "field", new ReferenceExpression("message.info", STRING), - "path", new ReferenceExpression("message", STRING) - ), + "path", new ReferenceExpression("message", STRING)), Map.of( "field", new ReferenceExpression("comment.data", STRING), - "path", new ReferenceExpression("comment", STRING) - ) - ); + "path", new ReferenceExpression("comment", STRING))); List projectList = List.of( new NamedExpression( - "nested(message.info)", - DSL.nested(DSL.ref("message.info", STRING)), - null), + "nested(message.info)", DSL.nested(DSL.ref("message.info", STRING)), null), new NamedExpression( - "nested(comment.data)", - DSL.nested(DSL.ref("comment.data", STRING)), - null) - ); + "nested(comment.data)", DSL.nested(DSL.ref("comment.data", STRING)), null)); assertAnalyzeEqual( LogicalPlanDSL.project( LogicalPlanDSL.nested( - LogicalPlanDSL.relation("schema", table), - nestedArgs, - projectList), - DSL.named("nested(message.info)", - DSL.nested(DSL.ref("message.info", STRING))), - DSL.named("nested(comment.data)", - DSL.nested(DSL.ref("comment.data", STRING))) - ), + LogicalPlanDSL.relation("schema", table), nestedArgs, projectList), + DSL.named("nested(message.info)", DSL.nested(DSL.ref("message.info", STRING))), + DSL.named("nested(comment.data)", DSL.nested(DSL.ref("comment.data", STRING)))), AstDSL.projectWithArg( AstDSL.relation("schema"), AstDSL.defaultFieldsArgs(), - AstDSL.alias("nested(message.info)", - function("nested", qualifiedName("message", "info")), null), - AstDSL.alias("nested(comment.data)", - function("nested", qualifiedName("comment", "data")), null) - ) - ); + AstDSL.alias( + "nested(message.info)", function("nested", qualifiedName("message", "info")), null), + AstDSL.alias( + "nested(comment.data)", + function("nested", qualifiedName("comment", "data")), + null))); } @Test public void project_nested_invalid_field_throws_exception() { - var exception = assertThrows( - IllegalArgumentException.class, - () -> analyze(AstDSL.projectWithArg( - AstDSL.relation("schema"), - AstDSL.defaultFieldsArgs(), - AstDSL.alias("message", - function("nested", qualifiedName("message")), null) - ) - ) - ); + var exception = + assertThrows( + IllegalArgumentException.class, + () -> + analyze( + AstDSL.projectWithArg( + AstDSL.relation("schema"), + AstDSL.defaultFieldsArgs(), + AstDSL.alias( + "message", function("nested", qualifiedName("message")), null)))); assertEquals(exception.getMessage(), "Illegal nested field name: message"); } @Test public void project_nested_invalid_arg_type_throws_exception() { - var exception = assertThrows( - IllegalArgumentException.class, - () -> analyze(AstDSL.projectWithArg( - AstDSL.relation("schema"), - AstDSL.defaultFieldsArgs(), - AstDSL.alias("message", - function("nested", stringLiteral("message")), null) - ) - ) - ); + var exception = + assertThrows( + IllegalArgumentException.class, + () -> + analyze( + AstDSL.projectWithArg( + AstDSL.relation("schema"), + AstDSL.defaultFieldsArgs(), + AstDSL.alias( + "message", function("nested", stringLiteral("message")), null)))); assertEquals(exception.getMessage(), "Illegal nested field name: message"); } @Test public void project_nested_no_args_throws_exception() { - var exception = assertThrows( - IllegalArgumentException.class, - () -> analyze(AstDSL.projectWithArg( - AstDSL.relation("schema"), - AstDSL.defaultFieldsArgs(), - AstDSL.alias("message", - function("nested"), null) - ) - ) - ); - assertEquals(exception.getMessage(), - "on nested object only allowed 2 parameters (field,path) or 1 parameter (field)" - ); + var exception = + assertThrows( + IllegalArgumentException.class, + () -> + analyze( + AstDSL.projectWithArg( + AstDSL.relation("schema"), + AstDSL.defaultFieldsArgs(), + AstDSL.alias("message", function("nested"), null)))); + assertEquals( + exception.getMessage(), + "on nested object only allowed 2 parameters (field,path) or 1 parameter (field)"); } @Test public void project_nested_too_many_args_throws_exception() { - var exception = assertThrows( - IllegalArgumentException.class, - () -> analyze(AstDSL.projectWithArg( - AstDSL.relation("schema"), - AstDSL.defaultFieldsArgs(), - AstDSL.alias("message", - function("nested", - stringLiteral("message.info"), - stringLiteral("message"), - stringLiteral("message")), - null) - ) - ) - ); - assertEquals(exception.getMessage(), - "on nested object only allowed 2 parameters (field,path) or 1 parameter (field)" - ); + var exception = + assertThrows( + IllegalArgumentException.class, + () -> + analyze( + AstDSL.projectWithArg( + AstDSL.relation("schema"), + AstDSL.defaultFieldsArgs(), + AstDSL.alias( + "message", + function( + "nested", + stringLiteral("message.info"), + stringLiteral("message"), + stringLiteral("message")), + null)))); + assertEquals( + exception.getMessage(), + "on nested object only allowed 2 parameters (field,path) or 1 parameter (field)"); } @Test @@ -975,18 +856,17 @@ public void project_highlight() { assertAnalyzeEqual( LogicalPlanDSL.project( - LogicalPlanDSL.highlight(LogicalPlanDSL.relation("schema", table), - DSL.literal("fieldA"), args), - DSL.named("highlight(fieldA, pre_tags='', post_tags='')", - new HighlightExpression(DSL.literal("fieldA"))) - ), + LogicalPlanDSL.highlight( + LogicalPlanDSL.relation("schema", table), DSL.literal("fieldA"), args), + DSL.named( + "highlight(fieldA, pre_tags='', post_tags='')", + new HighlightExpression(DSL.literal("fieldA")))), AstDSL.projectWithArg( AstDSL.relation("schema"), AstDSL.defaultFieldsArgs(), - AstDSL.alias("highlight(fieldA, pre_tags='', post_tags='')", - new HighlightFunction(AstDSL.stringLiteral("fieldA"), args)) - ) - ); + AstDSL.alias( + "highlight(fieldA, pre_tags='', post_tags='')", + new HighlightFunction(AstDSL.stringLiteral("fieldA"), args)))); } @Test @@ -994,18 +874,13 @@ public void project_highlight_wildcard() { Map args = new HashMap<>(); assertAnalyzeEqual( LogicalPlanDSL.project( - LogicalPlanDSL.highlight(LogicalPlanDSL.relation("schema", table), - DSL.literal("*"), args), - DSL.named("highlight(*)", - new HighlightExpression(DSL.literal("*"))) - ), + LogicalPlanDSL.highlight( + LogicalPlanDSL.relation("schema", table), DSL.literal("*"), args), + DSL.named("highlight(*)", new HighlightExpression(DSL.literal("*")))), AstDSL.projectWithArg( AstDSL.relation("schema"), AstDSL.defaultFieldsArgs(), - AstDSL.alias("highlight(*)", - new HighlightFunction(AstDSL.stringLiteral("*"), args)) - ) - ); + AstDSL.alias("highlight(*)", new HighlightFunction(AstDSL.stringLiteral("*"), args)))); } @Test @@ -1013,8 +888,8 @@ public void remove_source() { assertAnalyzeEqual( LogicalPlanDSL.remove( LogicalPlanDSL.relation("schema", table), - DSL.ref("integer_value", INTEGER), DSL.ref( - "double_value", DOUBLE)), + DSL.ref("integer_value", INTEGER), + DSL.ref("double_value", DOUBLE)), AstDSL.projectWithArg( AstDSL.relation("schema"), Collections.singletonList(argument("exclude", booleanLiteral(true))), @@ -1022,7 +897,8 @@ public void remove_source() { AstDSL.field("double_value"))); } - @Disabled("the project/remove command should shrink the type env. Should be enabled once " + @Disabled( + "the project/remove command should shrink the type env. Should be enabled once " + "https://github.com/opensearch-project/sql/issues/917 is resolved") @Test public void project_source_change_type_env() { @@ -1048,15 +924,12 @@ public void project_values() { LogicalPlanDSL.values(ImmutableList.of(DSL.literal(123))), DSL.named("123", DSL.literal(123)), DSL.named("hello", DSL.literal("hello")), - DSL.named("false", DSL.literal(false)) - ), + DSL.named("false", DSL.literal(false))), AstDSL.project( AstDSL.values(ImmutableList.of(AstDSL.intLiteral(123))), AstDSL.alias("123", AstDSL.intLiteral(123)), AstDSL.alias("hello", AstDSL.stringLiteral("hello")), - AstDSL.alias("false", AstDSL.booleanLiteral(false)) - ) - ); + AstDSL.alias("false", AstDSL.booleanLiteral(false)))); } @SuppressWarnings("unchecked") @@ -1069,8 +942,7 @@ public void sort_with_aggregator() { LogicalPlanDSL.relation("test", table), ImmutableList.of( DSL.named( - "avg(integer_value)", - DSL.avg(DSL.ref("integer_value", INTEGER)))), + "avg(integer_value)", DSL.avg(DSL.ref("integer_value", INTEGER)))), ImmutableList.of(DSL.named("string_value", DSL.ref("string_value", STRING)))), // Aggregator in Sort AST node is replaced with reference by expression optimizer Pair.of(SortOption.DEFAULT_ASC, DSL.ref("avg(integer_value)", DOUBLE))), @@ -1081,12 +953,10 @@ public void sort_with_aggregator() { AstDSL.relation("test"), ImmutableList.of( AstDSL.alias( - "avg(integer_value)", - function("avg", qualifiedName("integer_value")))), + "avg(integer_value)", function("avg", qualifiedName("integer_value")))), emptyList(), ImmutableList.of(AstDSL.alias("string_value", qualifiedName("string_value"))), - emptyList() - ), + emptyList()), field( function("avg", qualifiedName("integer_value")), argument("asc", booleanLiteral(true)))), @@ -1098,40 +968,49 @@ public void sort_with_aggregator() { public void sort_with_options() { ImmutableMap argOptions = ImmutableMap.builder() - .put(new Argument[] {argument("asc", booleanLiteral(true))}, + .put( + new Argument[] {argument("asc", booleanLiteral(true))}, new SortOption(SortOrder.ASC, NullOrder.NULL_FIRST)) - .put(new Argument[] {argument("asc", booleanLiteral(false))}, + .put( + new Argument[] {argument("asc", booleanLiteral(false))}, new SortOption(SortOrder.DESC, NullOrder.NULL_LAST)) - .put(new Argument[] { - argument("asc", booleanLiteral(true)), - argument("nullFirst", booleanLiteral(true))}, + .put( + new Argument[] { + argument("asc", booleanLiteral(true)), argument("nullFirst", booleanLiteral(true)) + }, new SortOption(SortOrder.ASC, NullOrder.NULL_FIRST)) - .put(new Argument[] { - argument("asc", booleanLiteral(true)), - argument("nullFirst", booleanLiteral(false))}, + .put( + new Argument[] { + argument("asc", booleanLiteral(true)), + argument("nullFirst", booleanLiteral(false)) + }, new SortOption(SortOrder.ASC, NullOrder.NULL_LAST)) - .put(new Argument[] { - argument("asc", booleanLiteral(false)), - argument("nullFirst", booleanLiteral(true))}, + .put( + new Argument[] { + argument("asc", booleanLiteral(false)), + argument("nullFirst", booleanLiteral(true)) + }, new SortOption(SortOrder.DESC, NullOrder.NULL_FIRST)) - .put(new Argument[] { - argument("asc", booleanLiteral(false)), - argument("nullFirst", booleanLiteral(false))}, + .put( + new Argument[] { + argument("asc", booleanLiteral(false)), + argument("nullFirst", booleanLiteral(false)) + }, new SortOption(SortOrder.DESC, NullOrder.NULL_LAST)) .build(); - argOptions.forEach((args, expectOption) -> - assertAnalyzeEqual( - LogicalPlanDSL.project( - LogicalPlanDSL.sort( - LogicalPlanDSL.relation("test", table), - Pair.of(expectOption, DSL.ref("integer_value", INTEGER))), - DSL.named("string_value", DSL.ref("string_value", STRING))), - AstDSL.project( - AstDSL.sort( - AstDSL.relation("test"), - field(qualifiedName("integer_value"), args)), - AstDSL.alias("string_value", qualifiedName("string_value"))))); + argOptions.forEach( + (args, expectOption) -> + assertAnalyzeEqual( + LogicalPlanDSL.project( + LogicalPlanDSL.sort( + LogicalPlanDSL.relation("test", table), + Pair.of(expectOption, DSL.ref("integer_value", INTEGER))), + DSL.named("string_value", DSL.ref("string_value", STRING))), + AstDSL.project( + AstDSL.sort( + AstDSL.relation("test"), field(qualifiedName("integer_value"), args)), + AstDSL.alias("string_value", qualifiedName("string_value"))))); } @SuppressWarnings("unchecked") @@ -1156,7 +1035,8 @@ public void window_function() { AstDSL.project( AstDSL.relation("test"), AstDSL.alias("string_value", AstDSL.qualifiedName("string_value")), - AstDSL.alias("window_function", + AstDSL.alias( + "window_function", AstDSL.window( AstDSL.function("row_number"), Collections.singletonList(AstDSL.qualifiedName("string_value")), @@ -1164,11 +1044,7 @@ public void window_function() { ImmutablePair.of(DEFAULT_ASC, AstDSL.qualifiedName("integer_value"))))))); } - /** - * SELECT name FROM ( - * SELECT name, age FROM test - * ) AS schema. - */ + /** SELECT name FROM ( SELECT name, age FROM test ) AS schema. */ @Test public void from_subquery() { assertAnalyzeEqual( @@ -1176,29 +1052,19 @@ public void from_subquery() { LogicalPlanDSL.project( LogicalPlanDSL.relation("schema", table), DSL.named("string_value", DSL.ref("string_value", STRING)), - DSL.named("integer_value", DSL.ref("integer_value", INTEGER)) - ), - DSL.named("string_value", DSL.ref("string_value", STRING)) - ), + DSL.named("integer_value", DSL.ref("integer_value", INTEGER))), + DSL.named("string_value", DSL.ref("string_value", STRING))), AstDSL.project( AstDSL.relationSubquery( AstDSL.project( AstDSL.relation("schema"), AstDSL.alias("string_value", AstDSL.qualifiedName("string_value")), - AstDSL.alias("integer_value", AstDSL.qualifiedName("integer_value")) - ), - "schema" - ), - AstDSL.alias("string_value", AstDSL.qualifiedName("string_value")) - ) - ); + AstDSL.alias("integer_value", AstDSL.qualifiedName("integer_value"))), + "schema"), + AstDSL.alias("string_value", AstDSL.qualifiedName("string_value")))); } - /** - * SELECT * FROM ( - * SELECT name FROM test - * ) AS schema. - */ + /** SELECT * FROM ( SELECT name FROM test ) AS schema. */ @Test public void select_all_from_subquery() { assertAnalyzeEqual( @@ -1206,147 +1072,130 @@ public void select_all_from_subquery() { LogicalPlanDSL.project( LogicalPlanDSL.relation("schema", table), DSL.named("string_value", DSL.ref("string_value", STRING))), - DSL.named("string_value", DSL.ref("string_value", STRING)) - ), + DSL.named("string_value", DSL.ref("string_value", STRING))), AstDSL.project( AstDSL.relationSubquery( AstDSL.project( AstDSL.relation("schema"), - AstDSL.alias("string_value", AstDSL.qualifiedName("string_value")) - ), - "schema" - ), - AstDSL.allFields() - ) - ); + AstDSL.alias("string_value", AstDSL.qualifiedName("string_value"))), + "schema"), + AstDSL.allFields())); } /** - * Ensure Nested function falls back to legacy engine when used in GROUP BY clause. - * TODO Remove this test when support is added. + * Ensure Nested function falls back to legacy engine when used in GROUP BY clause. TODO Remove + * this test when support is added. */ @Test public void nested_group_by_clause_throws_syntax_exception() { - SyntaxCheckException exception = assertThrows(SyntaxCheckException.class, - () -> analyze( - AstDSL.project( - AstDSL.agg( - AstDSL.relation("schema"), - emptyList(), - emptyList(), - ImmutableList.of(alias("nested(message.info)", - function("nested", - qualifiedName("message", "info")))), - emptyList() - ))) - ); - assertEquals("Falling back to legacy engine. Nested function is not supported in WHERE," + SyntaxCheckException exception = + assertThrows( + SyntaxCheckException.class, + () -> + analyze( + AstDSL.project( + AstDSL.agg( + AstDSL.relation("schema"), + emptyList(), + emptyList(), + ImmutableList.of( + alias( + "nested(message.info)", + function("nested", qualifiedName("message", "info")))), + emptyList())))); + assertEquals( + "Falling back to legacy engine. Nested function is not supported in WHERE," + " GROUP BY, and HAVING clauses.", exception.getMessage()); } - /** - * SELECT name, AVG(age) FROM test GROUP BY name. - */ + /** SELECT name, AVG(age) FROM test GROUP BY name. */ @Test public void sql_group_by_field() { assertAnalyzeEqual( LogicalPlanDSL.project( LogicalPlanDSL.aggregation( LogicalPlanDSL.relation("schema", table), - ImmutableList - .of(DSL - .named("AVG(integer_value)", DSL.avg(DSL.ref("integer_value", INTEGER)))), + ImmutableList.of( + DSL.named("AVG(integer_value)", DSL.avg(DSL.ref("integer_value", INTEGER)))), ImmutableList.of(DSL.named("string_value", DSL.ref("string_value", STRING)))), DSL.named("string_value", DSL.ref("string_value", STRING)), DSL.named("AVG(integer_value)", DSL.ref("AVG(integer_value)", DOUBLE))), AstDSL.project( AstDSL.agg( AstDSL.relation("schema"), - ImmutableList.of(alias("AVG(integer_value)", - aggregate("AVG", qualifiedName("integer_value")))), + ImmutableList.of( + alias("AVG(integer_value)", aggregate("AVG", qualifiedName("integer_value")))), emptyList(), ImmutableList.of(alias("string_value", qualifiedName("string_value"))), emptyList()), AstDSL.alias("string_value", qualifiedName("string_value")), - AstDSL.alias("AVG(integer_value)", aggregate("AVG", qualifiedName("integer_value")))) - ); + AstDSL.alias("AVG(integer_value)", aggregate("AVG", qualifiedName("integer_value"))))); } - /** - * SELECT abs(name), AVG(age) FROM test GROUP BY abs(name). - */ + /** SELECT abs(name), AVG(age) FROM test GROUP BY abs(name). */ @Test public void sql_group_by_function() { assertAnalyzeEqual( LogicalPlanDSL.project( LogicalPlanDSL.aggregation( LogicalPlanDSL.relation("schema", table), - ImmutableList - .of(DSL - .named("AVG(integer_value)", DSL.avg(DSL.ref("integer_value", INTEGER)))), - ImmutableList.of(DSL.named("abs(long_value)", - DSL.abs(DSL.ref("long_value", LONG))))), + ImmutableList.of( + DSL.named("AVG(integer_value)", DSL.avg(DSL.ref("integer_value", INTEGER)))), + ImmutableList.of( + DSL.named("abs(long_value)", DSL.abs(DSL.ref("long_value", LONG))))), DSL.named("abs(long_value)", DSL.ref("abs(long_value)", LONG)), DSL.named("AVG(integer_value)", DSL.ref("AVG(integer_value)", DOUBLE))), AstDSL.project( AstDSL.agg( AstDSL.relation("schema"), - ImmutableList.of(alias("AVG(integer_value)", - aggregate("AVG", qualifiedName("integer_value")))), + ImmutableList.of( + alias("AVG(integer_value)", aggregate("AVG", qualifiedName("integer_value")))), emptyList(), - ImmutableList - .of(alias("abs(long_value)", function("abs", qualifiedName("long_value")))), + ImmutableList.of( + alias("abs(long_value)", function("abs", qualifiedName("long_value")))), emptyList()), AstDSL.alias("abs(long_value)", function("abs", qualifiedName("long_value"))), - AstDSL.alias("AVG(integer_value)", aggregate("AVG", qualifiedName("integer_value")))) - ); + AstDSL.alias("AVG(integer_value)", aggregate("AVG", qualifiedName("integer_value"))))); } - /** - * SELECT abs(name), AVG(age) FROM test GROUP BY ABS(name). - */ + /** SELECT abs(name), AVG(age) FROM test GROUP BY ABS(name). */ @Test public void sql_group_by_function_in_uppercase() { assertAnalyzeEqual( LogicalPlanDSL.project( LogicalPlanDSL.aggregation( LogicalPlanDSL.relation("schema", table), - ImmutableList - .of(DSL - .named("AVG(integer_value)", DSL.avg(DSL.ref("integer_value", INTEGER)))), - ImmutableList.of(DSL.named("ABS(long_value)", - DSL.abs(DSL.ref("long_value", LONG))))), + ImmutableList.of( + DSL.named("AVG(integer_value)", DSL.avg(DSL.ref("integer_value", INTEGER)))), + ImmutableList.of( + DSL.named("ABS(long_value)", DSL.abs(DSL.ref("long_value", LONG))))), DSL.named("abs(long_value)", DSL.ref("ABS(long_value)", LONG)), DSL.named("AVG(integer_value)", DSL.ref("AVG(integer_value)", DOUBLE))), AstDSL.project( AstDSL.agg( AstDSL.relation("schema"), - ImmutableList.of(alias("AVG(integer_value)", - aggregate("AVG", qualifiedName("integer_value")))), + ImmutableList.of( + alias("AVG(integer_value)", aggregate("AVG", qualifiedName("integer_value")))), emptyList(), - ImmutableList - .of(alias("ABS(long_value)", function("ABS", qualifiedName("long_value")))), + ImmutableList.of( + alias("ABS(long_value)", function("ABS", qualifiedName("long_value")))), emptyList()), AstDSL.alias("abs(long_value)", function("abs", qualifiedName("long_value"))), - AstDSL.alias("AVG(integer_value)", aggregate("AVG", qualifiedName("integer_value")))) - ); + AstDSL.alias("AVG(integer_value)", aggregate("AVG", qualifiedName("integer_value"))))); } - /** - * SELECT abs(name), abs(avg(age) FROM test GROUP BY abs(name). - */ + /** SELECT abs(name), abs(avg(age) FROM test GROUP BY abs(name). */ @Test public void sql_expression_over_one_aggregation() { assertAnalyzeEqual( LogicalPlanDSL.project( LogicalPlanDSL.aggregation( LogicalPlanDSL.relation("schema", table), - ImmutableList - .of(DSL.named("avg(integer_value)", - DSL.avg(DSL.ref("integer_value", INTEGER)))), - ImmutableList.of(DSL.named("abs(long_value)", - DSL.abs(DSL.ref("long_value", LONG))))), + ImmutableList.of( + DSL.named("avg(integer_value)", DSL.avg(DSL.ref("integer_value", INTEGER)))), + ImmutableList.of( + DSL.named("abs(long_value)", DSL.abs(DSL.ref("long_value", LONG))))), DSL.named("abs(long_value)", DSL.ref("abs(long_value)", LONG)), DSL.named("abs(avg(integer_value)", DSL.abs(DSL.ref("avg(integer_value)", DOUBLE)))), AstDSL.project( @@ -1355,34 +1204,32 @@ public void sql_expression_over_one_aggregation() { ImmutableList.of( alias("avg(integer_value)", aggregate("avg", qualifiedName("integer_value")))), emptyList(), - ImmutableList - .of(alias("abs(long_value)", function("abs", qualifiedName("long_value")))), + ImmutableList.of( + alias("abs(long_value)", function("abs", qualifiedName("long_value")))), emptyList()), AstDSL.alias("abs(long_value)", function("abs", qualifiedName("long_value"))), - AstDSL.alias("abs(avg(integer_value)", - function("abs", aggregate("avg", qualifiedName("integer_value"))))) - ); + AstDSL.alias( + "abs(avg(integer_value)", + function("abs", aggregate("avg", qualifiedName("integer_value")))))); } - /** - * SELECT abs(name), sum(age)-avg(age) FROM test GROUP BY abs(name). - */ + /** SELECT abs(name), sum(age)-avg(age) FROM test GROUP BY abs(name). */ @Test public void sql_expression_over_two_aggregation() { assertAnalyzeEqual( LogicalPlanDSL.project( LogicalPlanDSL.aggregation( LogicalPlanDSL.relation("schema", table), - ImmutableList - .of(DSL.named("sum(integer_value)", - DSL.sum(DSL.ref("integer_value", INTEGER))), - DSL.named("avg(integer_value)", - DSL.avg(DSL.ref("integer_value", INTEGER)))), - ImmutableList.of(DSL.named("abs(long_value)", - DSL.abs(DSL.ref("long_value", LONG))))), + ImmutableList.of( + DSL.named("sum(integer_value)", DSL.sum(DSL.ref("integer_value", INTEGER))), + DSL.named("avg(integer_value)", DSL.avg(DSL.ref("integer_value", INTEGER)))), + ImmutableList.of( + DSL.named("abs(long_value)", DSL.abs(DSL.ref("long_value", LONG))))), DSL.named("abs(long_value)", DSL.ref("abs(long_value)", LONG)), - DSL.named("sum(integer_value)-avg(integer_value)", - DSL.subtract(DSL.ref("sum(integer_value)", INTEGER), + DSL.named( + "sum(integer_value)-avg(integer_value)", + DSL.subtract( + DSL.ref("sum(integer_value)", INTEGER), DSL.ref("avg(integer_value)", DOUBLE)))), AstDSL.project( AstDSL.agg( @@ -1391,40 +1238,33 @@ public void sql_expression_over_two_aggregation() { alias("sum(integer_value)", aggregate("sum", qualifiedName("integer_value"))), alias("avg(integer_value)", aggregate("avg", qualifiedName("integer_value")))), emptyList(), - ImmutableList - .of(alias("abs(long_value)", function("abs", qualifiedName("long_value")))), + ImmutableList.of( + alias("abs(long_value)", function("abs", qualifiedName("long_value")))), emptyList()), AstDSL.alias("abs(long_value)", function("abs", qualifiedName("long_value"))), - AstDSL.alias("sum(integer_value)-avg(integer_value)", - function("-", aggregate("sum", qualifiedName("integer_value")), - aggregate("avg", qualifiedName("integer_value"))))) - ); + AstDSL.alias( + "sum(integer_value)-avg(integer_value)", + function( + "-", + aggregate("sum", qualifiedName("integer_value")), + aggregate("avg", qualifiedName("integer_value")))))); } @Test public void limit_offset() { assertAnalyzeEqual( LogicalPlanDSL.project( - LogicalPlanDSL.limit( - LogicalPlanDSL.relation("schema", table), - 1, 1 - ), - DSL.named("integer_value", DSL.ref("integer_value", INTEGER)) - ), + LogicalPlanDSL.limit(LogicalPlanDSL.relation("schema", table), 1, 1), + DSL.named("integer_value", DSL.ref("integer_value", INTEGER))), AstDSL.project( - AstDSL.limit( - AstDSL.relation("schema"), - 1, 1 - ), - AstDSL.alias("integer_value", qualifiedName("integer_value")) - ) - ); + AstDSL.limit(AstDSL.relation("schema"), 1, 1), + AstDSL.alias("integer_value", qualifiedName("integer_value")))); } /** - * SELECT COUNT(NAME) FILTER(WHERE age > 1) FROM test. - * This test is to verify that the aggregator properties are taken - * when wrapping it to {@link org.opensearch.sql.expression.aggregation.NamedAggregator} + * SELECT COUNT(NAME) FILTER(WHERE age > 1) FROM test. This test is to verify that the aggregator + * properties are taken when wrapping it to {@link + * org.opensearch.sql.expression.aggregation.NamedAggregator} */ @Test public void named_aggregator_with_condition() { @@ -1433,36 +1273,37 @@ public void named_aggregator_with_condition() { LogicalPlanDSL.aggregation( LogicalPlanDSL.relation("schema", table), ImmutableList.of( - DSL.named("count(string_value) filter(where integer_value > 1)", - DSL.count(DSL.ref("string_value", STRING)).condition(DSL.greater(DSL.ref( - "integer_value", INTEGER), DSL.literal(1)))) - ), - emptyList() - ), - DSL.named("count(string_value) filter(where integer_value > 1)", DSL.ref( - "count(string_value) filter(where integer_value > 1)", INTEGER)) - ), + DSL.named( + "count(string_value) filter(where integer_value > 1)", + DSL.count(DSL.ref("string_value", STRING)) + .condition( + DSL.greater(DSL.ref("integer_value", INTEGER), DSL.literal(1))))), + emptyList()), + DSL.named( + "count(string_value) filter(where integer_value > 1)", + DSL.ref("count(string_value) filter(where integer_value > 1)", INTEGER))), AstDSL.project( AstDSL.agg( AstDSL.relation("schema"), ImmutableList.of( - alias("count(string_value) filter(where integer_value > 1)", filteredAggregate( - "count", qualifiedName("string_value"), function( - ">", qualifiedName("integer_value"), intLiteral(1))))), + alias( + "count(string_value) filter(where integer_value > 1)", + filteredAggregate( + "count", + qualifiedName("string_value"), + function(">", qualifiedName("integer_value"), intLiteral(1))))), emptyList(), emptyList(), - emptyList() - ), - AstDSL.alias("count(string_value) filter(where integer_value > 1)", filteredAggregate( - "count", qualifiedName("string_value"), function( - ">", qualifiedName("integer_value"), intLiteral(1)))) - ) - ); + emptyList()), + AstDSL.alias( + "count(string_value) filter(where integer_value > 1)", + filteredAggregate( + "count", + qualifiedName("string_value"), + function(">", qualifiedName("integer_value"), intLiteral(1)))))); } - /** - * stats avg(integer_value) by string_value span(long_value, 10). - */ + /** stats avg(integer_value) by string_value span(long_value, 10). */ @Test public void ppl_stats_by_fieldAndSpan() { assertAnalyzeEqual( @@ -1489,10 +1330,13 @@ public void parse_relation_with_grok_expression() { LogicalPlanDSL.project( LogicalPlanDSL.relation("schema", table), ImmutableList.of(DSL.named("string_value", DSL.ref("string_value", STRING))), - ImmutableList.of(DSL.named("grok_field", - DSL.grok(DSL.ref("string_value", STRING), DSL.literal("%{IPV4:grok_field}"), - DSL.literal("grok_field")))) - ), + ImmutableList.of( + DSL.named( + "grok_field", + DSL.grok( + DSL.ref("string_value", STRING), + DSL.literal("%{IPV4:grok_field}"), + DSL.literal("grok_field"))))), AstDSL.project( AstDSL.parse( AstDSL.relation("schema"), @@ -1500,8 +1344,7 @@ public void parse_relation_with_grok_expression() { AstDSL.field("string_value"), AstDSL.stringLiteral("%{IPV4:grok_field}"), ImmutableMap.of()), - AstDSL.alias("string_value", qualifiedName("string_value")) - )); + AstDSL.alias("string_value", qualifiedName("string_value")))); } @Test @@ -1510,10 +1353,13 @@ public void parse_relation_with_regex_expression() { LogicalPlanDSL.project( LogicalPlanDSL.relation("schema", table), ImmutableList.of(DSL.named("string_value", DSL.ref("string_value", STRING))), - ImmutableList.of(DSL.named("group", - DSL.regex(DSL.ref("string_value", STRING), DSL.literal("(?.*)"), - DSL.literal("group")))) - ), + ImmutableList.of( + DSL.named( + "group", + DSL.regex( + DSL.ref("string_value", STRING), + DSL.literal("(?.*)"), + DSL.literal("group"))))), AstDSL.project( AstDSL.parse( AstDSL.relation("schema"), @@ -1521,25 +1367,28 @@ public void parse_relation_with_regex_expression() { AstDSL.field("string_value"), AstDSL.stringLiteral("(?.*)"), ImmutableMap.of()), - AstDSL.alias("string_value", qualifiedName("string_value")) - )); + AstDSL.alias("string_value", qualifiedName("string_value")))); } @Test public void parse_relation_with_patterns_expression() { - Map arguments = ImmutableMap.builder() - .put("new_field", AstDSL.stringLiteral("custom_field")) - .put("pattern", AstDSL.stringLiteral("custom_pattern")) - .build(); + Map arguments = + ImmutableMap.builder() + .put("new_field", AstDSL.stringLiteral("custom_field")) + .put("pattern", AstDSL.stringLiteral("custom_pattern")) + .build(); assertAnalyzeEqual( LogicalPlanDSL.project( LogicalPlanDSL.relation("schema", table), ImmutableList.of(DSL.named("string_value", DSL.ref("string_value", STRING))), - ImmutableList.of(DSL.named("custom_field", - DSL.patterns(DSL.ref("string_value", STRING), DSL.literal("custom_pattern"), - DSL.literal("custom_field")))) - ), + ImmutableList.of( + DSL.named( + "custom_field", + DSL.patterns( + DSL.ref("string_value", STRING), + DSL.literal("custom_pattern"), + DSL.literal("custom_field"))))), AstDSL.project( AstDSL.parse( AstDSL.relation("schema"), @@ -1547,8 +1396,7 @@ public void parse_relation_with_patterns_expression() { AstDSL.field("string_value"), AstDSL.stringLiteral("custom_pattern"), arguments), - AstDSL.alias("string_value", qualifiedName("string_value")) - )); + AstDSL.alias("string_value", qualifiedName("string_value")))); } @Test @@ -1557,10 +1405,13 @@ public void parse_relation_with_patterns_expression_no_args() { LogicalPlanDSL.project( LogicalPlanDSL.relation("schema", table), ImmutableList.of(DSL.named("string_value", DSL.ref("string_value", STRING))), - ImmutableList.of(DSL.named("patterns_field", - DSL.patterns(DSL.ref("string_value", STRING), DSL.literal(""), - DSL.literal("patterns_field")))) - ), + ImmutableList.of( + DSL.named( + "patterns_field", + DSL.patterns( + DSL.ref("string_value", STRING), + DSL.literal(""), + DSL.literal("patterns_field"))))), AstDSL.project( AstDSL.parse( AstDSL.relation("schema"), @@ -1568,89 +1419,109 @@ public void parse_relation_with_patterns_expression_no_args() { AstDSL.field("string_value"), AstDSL.stringLiteral(""), ImmutableMap.of()), - AstDSL.alias("string_value", qualifiedName("string_value")) - )); + AstDSL.alias("string_value", qualifiedName("string_value")))); } @Test public void kmeanns_relation() { - Map argumentMap = new HashMap() {{ - put("centroids", new Literal(3, DataType.INTEGER)); - put("iterations", new Literal(2, DataType.INTEGER)); - put("distance_type", new Literal("COSINE", DataType.STRING)); - }}; + Map argumentMap = + new HashMap() { + { + put("centroids", new Literal(3, DataType.INTEGER)); + put("iterations", new Literal(2, DataType.INTEGER)); + put("distance_type", new Literal("COSINE", DataType.STRING)); + } + }; assertAnalyzeEqual( - new LogicalMLCommons(LogicalPlanDSL.relation("schema", table), - "kmeans", argumentMap), - new Kmeans(AstDSL.relation("schema"), argumentMap) - ); + new LogicalMLCommons(LogicalPlanDSL.relation("schema", table), "kmeans", argumentMap), + new Kmeans(AstDSL.relation("schema"), argumentMap)); } @Test public void ad_batchRCF_relation() { Map argumentMap = - new HashMap() {{ + new HashMap() { + { put("shingle_size", new Literal(8, DataType.INTEGER)); - }}; + } + }; assertAnalyzeEqual( new LogicalAD(LogicalPlanDSL.relation("schema", table), argumentMap), - new AD(AstDSL.relation("schema"), argumentMap) - ); + new AD(AstDSL.relation("schema"), argumentMap)); } @Test public void ad_fitRCF_relation() { - Map argumentMap = new HashMap() {{ - put("shingle_size", new Literal(8, DataType.INTEGER)); - put("time_decay", new Literal(0.0001, DataType.DOUBLE)); - put("time_field", new Literal("timestamp", DataType.STRING)); - }}; + Map argumentMap = + new HashMap() { + { + put("shingle_size", new Literal(8, DataType.INTEGER)); + put("time_decay", new Literal(0.0001, DataType.DOUBLE)); + put("time_field", new Literal("timestamp", DataType.STRING)); + } + }; assertAnalyzeEqual( - new LogicalAD(LogicalPlanDSL.relation("schema", table), - argumentMap), - new AD(AstDSL.relation("schema"), argumentMap) - ); + new LogicalAD(LogicalPlanDSL.relation("schema", table), argumentMap), + new AD(AstDSL.relation("schema"), argumentMap)); } @Test public void ad_fitRCF_relation_with_time_field() { - Map argumentMap = new HashMap() {{ - put("shingle_size", new Literal(8, DataType.INTEGER)); - put("time_decay", new Literal(0.0001, DataType.DOUBLE)); - put("time_field", new Literal("ts", DataType.STRING)); - }}; - - LogicalPlan actual = analyze(AstDSL.project( - new AD(AstDSL.relation("schema"), argumentMap), AstDSL.allFields())); + Map argumentMap = + new HashMap() { + { + put("shingle_size", new Literal(8, DataType.INTEGER)); + put("time_decay", new Literal(0.0001, DataType.DOUBLE)); + put("time_field", new Literal("ts", DataType.STRING)); + } + }; + + LogicalPlan actual = + analyze(AstDSL.project(new AD(AstDSL.relation("schema"), argumentMap), AstDSL.allFields())); assertTrue(((LogicalProject) actual).getProjectList().size() >= 3); - assertTrue(((LogicalProject) actual).getProjectList() + assertTrue( + ((LogicalProject) actual) + .getProjectList() .contains(DSL.named("score", DSL.ref("score", DOUBLE)))); - assertTrue(((LogicalProject) actual).getProjectList() + assertTrue( + ((LogicalProject) actual) + .getProjectList() .contains(DSL.named("anomaly_grade", DSL.ref("anomaly_grade", DOUBLE)))); - assertTrue(((LogicalProject) actual).getProjectList() + assertTrue( + ((LogicalProject) actual) + .getProjectList() .contains(DSL.named("ts", DSL.ref("ts", TIMESTAMP)))); } @Test public void ad_fitRCF_relation_without_time_field() { - Map argumentMap = new HashMap<>() {{ - put("shingle_size", new Literal(8, DataType.INTEGER)); - put("time_decay", new Literal(0.0001, DataType.DOUBLE)); - }}; + Map argumentMap = + new HashMap<>() { + { + put("shingle_size", new Literal(8, DataType.INTEGER)); + put("time_decay", new Literal(0.0001, DataType.DOUBLE)); + } + }; - LogicalPlan actual = analyze(AstDSL.project( - new AD(AstDSL.relation("schema"), argumentMap), AstDSL.allFields())); + LogicalPlan actual = + analyze(AstDSL.project(new AD(AstDSL.relation("schema"), argumentMap), AstDSL.allFields())); assertTrue(((LogicalProject) actual).getProjectList().size() >= 2); - assertTrue(((LogicalProject) actual).getProjectList() + assertTrue( + ((LogicalProject) actual) + .getProjectList() .contains(DSL.named("score", DSL.ref("score", DOUBLE)))); - assertTrue(((LogicalProject) actual).getProjectList() + assertTrue( + ((LogicalProject) actual) + .getProjectList() .contains(DSL.named("anomalous", DSL.ref("anomalous", BOOLEAN)))); } @Test public void table_function() { - assertAnalyzeEqual(new LogicalRelation("query_range", table), - AstDSL.tableFunction(List.of("prometheus", "query_range"), + assertAnalyzeEqual( + new LogicalRelation("query_range", table), + AstDSL.tableFunction( + List.of("prometheus", "query_range"), unresolvedArg("query", stringLiteral("http_latency")), unresolvedArg("starttime", intLiteral(12345)), unresolvedArg("endtime", intLiteral(12345)), @@ -1659,158 +1530,214 @@ public void table_function() { @Test public void table_function_with_no_datasource() { - ExpressionEvaluationException exception = assertThrows(ExpressionEvaluationException.class, - () -> analyze(AstDSL.tableFunction(List.of("query_range"), - unresolvedArg("query", stringLiteral("http_latency")), - unresolvedArg("", intLiteral(12345)), - unresolvedArg("", intLiteral(12345)), - unresolvedArg(null, intLiteral(14))))); - assertEquals("unsupported function name: query_range", - exception.getMessage()); + ExpressionEvaluationException exception = + assertThrows( + ExpressionEvaluationException.class, + () -> + analyze( + AstDSL.tableFunction( + List.of("query_range"), + unresolvedArg("query", stringLiteral("http_latency")), + unresolvedArg("", intLiteral(12345)), + unresolvedArg("", intLiteral(12345)), + unresolvedArg(null, intLiteral(14))))); + assertEquals("unsupported function name: query_range", exception.getMessage()); } @Test public void table_function_with_wrong_datasource() { - ExpressionEvaluationException exception = assertThrows(ExpressionEvaluationException.class, - () -> analyze(AstDSL.tableFunction(Arrays.asList("prome", "query_range"), - unresolvedArg("query", stringLiteral("http_latency")), - unresolvedArg("", intLiteral(12345)), - unresolvedArg("", intLiteral(12345)), - unresolvedArg(null, intLiteral(14))))); + ExpressionEvaluationException exception = + assertThrows( + ExpressionEvaluationException.class, + () -> + analyze( + AstDSL.tableFunction( + Arrays.asList("prome", "query_range"), + unresolvedArg("query", stringLiteral("http_latency")), + unresolvedArg("", intLiteral(12345)), + unresolvedArg("", intLiteral(12345)), + unresolvedArg(null, intLiteral(14))))); assertEquals("unsupported function name: prome.query_range", exception.getMessage()); } @Test public void table_function_with_wrong_table_function() { - ExpressionEvaluationException exception = assertThrows(ExpressionEvaluationException.class, - () -> analyze(AstDSL.tableFunction(Arrays.asList("prometheus", "queryrange"), - unresolvedArg("query", stringLiteral("http_latency")), - unresolvedArg("", intLiteral(12345)), - unresolvedArg("", intLiteral(12345)), - unresolvedArg(null, intLiteral(14))))); + ExpressionEvaluationException exception = + assertThrows( + ExpressionEvaluationException.class, + () -> + analyze( + AstDSL.tableFunction( + Arrays.asList("prometheus", "queryrange"), + unresolvedArg("query", stringLiteral("http_latency")), + unresolvedArg("", intLiteral(12345)), + unresolvedArg("", intLiteral(12345)), + unresolvedArg(null, intLiteral(14))))); assertEquals("unsupported function name: queryrange", exception.getMessage()); } @Test public void show_datasources() { - assertAnalyzeEqual(new LogicalRelation(DATASOURCES_TABLE_NAME, - new DataSourceTable(dataSourceService)), + assertAnalyzeEqual( + new LogicalRelation(DATASOURCES_TABLE_NAME, new DataSourceTable(dataSourceService)), AstDSL.relation(qualifiedName(DATASOURCES_TABLE_NAME))); } @Test public void ml_relation_unsupported_action() { - Map argumentMap = new HashMap<>() {{ - put(ACTION, new Literal("unsupported", DataType.STRING)); - put(ALGO, new Literal(KMEANS, DataType.STRING)); - }}; + Map argumentMap = + new HashMap<>() { + { + put(ACTION, new Literal("unsupported", DataType.STRING)); + put(ALGO, new Literal(KMEANS, DataType.STRING)); + } + }; IllegalArgumentException exception = - assertThrows( - IllegalArgumentException.class, - () -> analyze(AstDSL.project( - new ML(AstDSL.relation("schema"), argumentMap), AstDSL.allFields()))); + assertThrows( + IllegalArgumentException.class, + () -> + analyze( + AstDSL.project( + new ML(AstDSL.relation("schema"), argumentMap), AstDSL.allFields()))); assertEquals( - "Action error. Please indicate train, predict or trainandpredict.", - exception.getMessage()); + "Action error. Please indicate train, predict or trainandpredict.", exception.getMessage()); } @Test public void ml_relation_unsupported_algorithm() { - Map argumentMap = new HashMap<>() {{ - put(ACTION, new Literal(PREDICT, DataType.STRING)); - put(ALGO, new Literal("unsupported", DataType.STRING)); - }}; + Map argumentMap = + new HashMap<>() { + { + put(ACTION, new Literal(PREDICT, DataType.STRING)); + put(ALGO, new Literal("unsupported", DataType.STRING)); + } + }; IllegalArgumentException exception = - assertThrows( - IllegalArgumentException.class, - () -> analyze(AstDSL.project( - new ML(AstDSL.relation("schema"), argumentMap), AstDSL.allFields()))); - assertEquals( - "Unsupported algorithm: unsupported", - exception.getMessage()); + assertThrows( + IllegalArgumentException.class, + () -> + analyze( + AstDSL.project( + new ML(AstDSL.relation("schema"), argumentMap), AstDSL.allFields()))); + assertEquals("Unsupported algorithm: unsupported", exception.getMessage()); } @Test public void ml_relation_train_sync() { - Map argumentMap = new HashMap<>() {{ - put(ACTION, new Literal(TRAIN, DataType.STRING)); - put(ALGO, new Literal(KMEANS, DataType.STRING)); - }}; - - LogicalPlan actual = analyze(AstDSL.project( - new ML(AstDSL.relation("schema"), argumentMap), AstDSL.allFields())); + Map argumentMap = + new HashMap<>() { + { + put(ACTION, new Literal(TRAIN, DataType.STRING)); + put(ALGO, new Literal(KMEANS, DataType.STRING)); + } + }; + + LogicalPlan actual = + analyze(AstDSL.project(new ML(AstDSL.relation("schema"), argumentMap), AstDSL.allFields())); assertTrue(((LogicalProject) actual).getProjectList().size() >= 2); - assertTrue(((LogicalProject) actual).getProjectList() + assertTrue( + ((LogicalProject) actual) + .getProjectList() .contains(DSL.named(STATUS, DSL.ref(STATUS, STRING)))); - assertTrue(((LogicalProject) actual).getProjectList() + assertTrue( + ((LogicalProject) actual) + .getProjectList() .contains(DSL.named(MODELID, DSL.ref(MODELID, STRING)))); } @Test public void ml_relation_train_async() { - Map argumentMap = new HashMap<>() {{ - put(ACTION, new Literal(TRAIN, DataType.STRING)); - put(ALGO, new Literal(KMEANS, DataType.STRING)); - put(ASYNC, new Literal(true, DataType.BOOLEAN)); - }}; - - LogicalPlan actual = analyze(AstDSL.project( - new ML(AstDSL.relation("schema"), argumentMap), AstDSL.allFields())); + Map argumentMap = + new HashMap<>() { + { + put(ACTION, new Literal(TRAIN, DataType.STRING)); + put(ALGO, new Literal(KMEANS, DataType.STRING)); + put(ASYNC, new Literal(true, DataType.BOOLEAN)); + } + }; + + LogicalPlan actual = + analyze(AstDSL.project(new ML(AstDSL.relation("schema"), argumentMap), AstDSL.allFields())); assertTrue(((LogicalProject) actual).getProjectList().size() >= 2); - assertTrue(((LogicalProject) actual).getProjectList() + assertTrue( + ((LogicalProject) actual) + .getProjectList() .contains(DSL.named(STATUS, DSL.ref(STATUS, STRING)))); - assertTrue(((LogicalProject) actual).getProjectList() + assertTrue( + ((LogicalProject) actual) + .getProjectList() .contains(DSL.named(TASKID, DSL.ref(TASKID, STRING)))); } @Test public void ml_relation_predict_kmeans() { - Map argumentMap = new HashMap<>() {{ - put(ACTION, new Literal(PREDICT, DataType.STRING)); - put(ALGO, new Literal(KMEANS, DataType.STRING)); - }}; - - LogicalPlan actual = analyze(AstDSL.project( - new ML(AstDSL.relation("schema"), argumentMap), AstDSL.allFields())); + Map argumentMap = + new HashMap<>() { + { + put(ACTION, new Literal(PREDICT, DataType.STRING)); + put(ALGO, new Literal(KMEANS, DataType.STRING)); + } + }; + + LogicalPlan actual = + analyze(AstDSL.project(new ML(AstDSL.relation("schema"), argumentMap), AstDSL.allFields())); assertTrue(((LogicalProject) actual).getProjectList().size() >= 1); - assertTrue(((LogicalProject) actual).getProjectList() + assertTrue( + ((LogicalProject) actual) + .getProjectList() .contains(DSL.named(CLUSTERID, DSL.ref(CLUSTERID, INTEGER)))); } @Test public void ml_relation_predict_rcf_with_time_field() { - Map argumentMap = new HashMap<>() {{ - put(ACTION, new Literal(PREDICT, DataType.STRING)); - put(ALGO, new Literal(RCF, DataType.STRING)); - put(RCF_TIME_FIELD, new Literal("ts", DataType.STRING)); - }}; - - LogicalPlan actual = analyze(AstDSL.project( - new ML(AstDSL.relation("schema"), argumentMap), AstDSL.allFields())); + Map argumentMap = + new HashMap<>() { + { + put(ACTION, new Literal(PREDICT, DataType.STRING)); + put(ALGO, new Literal(RCF, DataType.STRING)); + put(RCF_TIME_FIELD, new Literal("ts", DataType.STRING)); + } + }; + + LogicalPlan actual = + analyze(AstDSL.project(new ML(AstDSL.relation("schema"), argumentMap), AstDSL.allFields())); assertTrue(((LogicalProject) actual).getProjectList().size() >= 3); - assertTrue(((LogicalProject) actual).getProjectList() + assertTrue( + ((LogicalProject) actual) + .getProjectList() .contains(DSL.named(RCF_SCORE, DSL.ref(RCF_SCORE, DOUBLE)))); - assertTrue(((LogicalProject) actual).getProjectList() + assertTrue( + ((LogicalProject) actual) + .getProjectList() .contains(DSL.named(RCF_ANOMALY_GRADE, DSL.ref(RCF_ANOMALY_GRADE, DOUBLE)))); - assertTrue(((LogicalProject) actual).getProjectList() + assertTrue( + ((LogicalProject) actual) + .getProjectList() .contains(DSL.named("ts", DSL.ref("ts", TIMESTAMP)))); } @Test public void ml_relation_predict_rcf_without_time_field() { - Map argumentMap = new HashMap<>() {{ - put(ACTION, new Literal(PREDICT, DataType.STRING)); - put(ALGO, new Literal(RCF, DataType.STRING)); - }}; - - LogicalPlan actual = analyze(AstDSL.project( - new ML(AstDSL.relation("schema"), argumentMap), AstDSL.allFields())); + Map argumentMap = + new HashMap<>() { + { + put(ACTION, new Literal(PREDICT, DataType.STRING)); + put(ALGO, new Literal(RCF, DataType.STRING)); + } + }; + + LogicalPlan actual = + analyze(AstDSL.project(new ML(AstDSL.relation("schema"), argumentMap), AstDSL.allFields())); assertTrue(((LogicalProject) actual).getProjectList().size() >= 2); - assertTrue(((LogicalProject) actual).getProjectList() + assertTrue( + ((LogicalProject) actual) + .getProjectList() .contains(DSL.named(RCF_SCORE, DSL.ref(RCF_SCORE, DOUBLE)))); - assertTrue(((LogicalProject) actual).getProjectList() + assertTrue( + ((LogicalProject) actual) + .getProjectList() .contains(DSL.named(RCF_ANOMALOUS, DSL.ref(RCF_ANOMALOUS, BOOLEAN)))); } @@ -1825,8 +1752,10 @@ public void visit_paginate() { void visit_cursor() { LogicalPlan actual = analyze((new FetchCursor("test"))); assertTrue(actual instanceof LogicalFetchCursor); - assertEquals(new LogicalFetchCursor("test", - dataSourceService.getDataSource("@opensearch").getStorageEngine()), actual); + assertEquals( + new LogicalFetchCursor( + "test", dataSourceService.getDataSource("@opensearch").getStorageEngine()), + actual); } @Test @@ -1835,7 +1764,7 @@ public void visit_close_cursor() { assertAll( () -> assertTrue(analyzed instanceof LogicalCloseCursor), () -> assertTrue(analyzed.getChild().get(0) instanceof LogicalFetchCursor), - () -> assertEquals("pewpew", ((LogicalFetchCursor) analyzed.getChild().get(0)).getCursor()) - ); + () -> + assertEquals("pewpew", ((LogicalFetchCursor) analyzed.getChild().get(0)).getCursor())); } } diff --git a/core/src/test/java/org/opensearch/sql/analysis/AnalyzerTestBase.java b/core/src/test/java/org/opensearch/sql/analysis/AnalyzerTestBase.java index b6e2600041..f09bc5d380 100644 --- a/core/src/test/java/org/opensearch/sql/analysis/AnalyzerTestBase.java +++ b/core/src/test/java/org/opensearch/sql/analysis/AnalyzerTestBase.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.analysis; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -47,7 +46,6 @@ import org.opensearch.sql.storage.StorageEngine; import org.opensearch.sql.storage.Table; - public class AnalyzerTestBase { protected Map typeMapping() { @@ -92,31 +90,34 @@ public Table getTable(DataSourceSchemaName dataSourceSchemaName, String tableNam } protected Table table() { - return Optional.ofNullable(table).orElseGet(() -> new Table() { - @Override - public boolean exists() { - return true; - } - - @Override - public void create(Map schema) { - throw new UnsupportedOperationException("Create table is not supported"); - } - - @Override - public Map getFieldTypes() { - return typeMapping(); - } - - @Override - public PhysicalPlan implement(LogicalPlan plan) { - throw new UnsupportedOperationException(); - } - - public Map getReservedFieldTypes() { - return ImmutableMap.of("_test", STRING); - } - }); + return Optional.ofNullable(table) + .orElseGet( + () -> + new Table() { + @Override + public boolean exists() { + return true; + } + + @Override + public void create(Map schema) { + throw new UnsupportedOperationException("Create table is not supported"); + } + + @Override + public Map getFieldTypes() { + return typeMapping(); + } + + @Override + public PhysicalPlan implement(LogicalPlan plan) { + throw new UnsupportedOperationException(); + } + + public Map getReservedFieldTypes() { + return ImmutableMap.of("_test", STRING); + } + }); } protected DataSourceService dataSourceService() { @@ -125,10 +126,12 @@ protected DataSourceService dataSourceService() { protected SymbolTable symbolTable() { SymbolTable symbolTable = new SymbolTable(); - typeMapping().entrySet() + typeMapping() + .entrySet() .forEach( - entry -> symbolTable - .store(new Symbol(Namespace.FIELD_NAME, entry.getKey()), entry.getValue())); + entry -> + symbolTable.store( + new Symbol(Namespace.FIELD_NAME, entry.getKey()), entry.getValue())); return symbolTable; } @@ -154,8 +157,8 @@ protected Environment typeEnv() { protected Analyzer analyzer = analyzer(expressionAnalyzer(), dataSourceService); - protected Analyzer analyzer(ExpressionAnalyzer expressionAnalyzer, - DataSourceService dataSourceService) { + protected Analyzer analyzer( + ExpressionAnalyzer expressionAnalyzer, DataSourceService dataSourceService) { BuiltinFunctionRepository functionRepository = BuiltinFunctionRepository.getInstance(); return new Analyzer(expressionAnalyzer, dataSourceService, functionRepository); } @@ -182,18 +185,22 @@ protected LogicalPlan analyze(UnresolvedPlan unresolvedPlan) { private class DefaultDataSourceService implements DataSourceService { - private final DataSource opensearchDataSource = new DataSource(DEFAULT_DATASOURCE_NAME, - DataSourceType.OPENSEARCH, storageEngine()); - private final DataSource prometheusDataSource - = new DataSource("prometheus", DataSourceType.PROMETHEUS, prometheusStorageEngine()); - + private final DataSource opensearchDataSource = + new DataSource(DEFAULT_DATASOURCE_NAME, DataSourceType.OPENSEARCH, storageEngine()); + private final DataSource prometheusDataSource = + new DataSource("prometheus", DataSourceType.PROMETHEUS, prometheusStorageEngine()); @Override public Set getDataSourceMetadata(boolean isDefaultDataSourceRequired) { return Stream.of(opensearchDataSource, prometheusDataSource) - .map(ds -> new DataSourceMetadata(ds.getName(), - ds.getConnectorType(),Collections.emptyList(), - ImmutableMap.of())).collect(Collectors.toSet()); + .map( + ds -> + new DataSourceMetadata( + ds.getName(), + ds.getConnectorType(), + Collections.emptyList(), + ImmutableMap.of())) + .collect(Collectors.toSet()); } @Override @@ -216,18 +223,14 @@ public DataSource getDataSource(String dataSourceName) { } @Override - public void updateDataSource(DataSourceMetadata dataSourceMetadata) { - - } + public void updateDataSource(DataSourceMetadata dataSourceMetadata) {} @Override - public void deleteDataSource(String dataSourceName) { - } + public void deleteDataSource(String dataSourceName) {} @Override public Boolean dataSourceExists(String dataSourceName) { - return dataSourceName.equals(DEFAULT_DATASOURCE_NAME) - || dataSourceName.equals("prometheus"); + return dataSourceName.equals(DEFAULT_DATASOURCE_NAME) || dataSourceName.equals("prometheus"); } } @@ -239,8 +242,8 @@ private class TestTableFunctionImplementation implements TableFunctionImplementa private Table table; - public TestTableFunctionImplementation(FunctionName functionName, List arguments, - Table table) { + public TestTableFunctionImplementation( + FunctionName functionName, List arguments, Table table) { this.functionName = functionName; this.arguments = arguments; this.table = table; diff --git a/core/src/test/java/org/opensearch/sql/analysis/ExpressionAnalyzerTest.java b/core/src/test/java/org/opensearch/sql/analysis/ExpressionAnalyzerTest.java index 5a05c79132..9d30ebeaab 100644 --- a/core/src/test/java/org/opensearch/sql/analysis/ExpressionAnalyzerTest.java +++ b/core/src/test/java/org/opensearch/sql/analysis/ExpressionAnalyzerTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.analysis; import static java.util.Collections.emptyList; @@ -57,64 +56,50 @@ class ExpressionAnalyzerTest extends AnalyzerTestBase { public void equal() { assertAnalyzeEqual( DSL.equal(DSL.ref("integer_value", INTEGER), DSL.literal(integerValue(1))), - AstDSL.equalTo(AstDSL.unresolvedAttr("integer_value"), AstDSL.intLiteral(1)) - ); + AstDSL.equalTo(AstDSL.unresolvedAttr("integer_value"), AstDSL.intLiteral(1))); } @Test public void and() { assertAnalyzeEqual( DSL.and(DSL.ref("boolean_value", BOOLEAN), DSL.literal(LITERAL_TRUE)), - AstDSL.and(AstDSL.unresolvedAttr("boolean_value"), AstDSL.booleanLiteral(true)) - ); + AstDSL.and(AstDSL.unresolvedAttr("boolean_value"), AstDSL.booleanLiteral(true))); } @Test public void or() { assertAnalyzeEqual( DSL.or(DSL.ref("boolean_value", BOOLEAN), DSL.literal(LITERAL_TRUE)), - AstDSL.or(AstDSL.unresolvedAttr("boolean_value"), AstDSL.booleanLiteral(true)) - ); + AstDSL.or(AstDSL.unresolvedAttr("boolean_value"), AstDSL.booleanLiteral(true))); } @Test public void xor() { assertAnalyzeEqual( DSL.xor(DSL.ref("boolean_value", BOOLEAN), DSL.literal(LITERAL_TRUE)), - AstDSL.xor(AstDSL.unresolvedAttr("boolean_value"), AstDSL.booleanLiteral(true)) - ); + AstDSL.xor(AstDSL.unresolvedAttr("boolean_value"), AstDSL.booleanLiteral(true))); } @Test public void not() { assertAnalyzeEqual( DSL.not(DSL.ref("boolean_value", BOOLEAN)), - AstDSL.not(AstDSL.unresolvedAttr("boolean_value")) - ); + AstDSL.not(AstDSL.unresolvedAttr("boolean_value"))); } @Test public void qualified_name() { - assertAnalyzeEqual( - DSL.ref("integer_value", INTEGER), - qualifiedName("integer_value") - ); + assertAnalyzeEqual(DSL.ref("integer_value", INTEGER), qualifiedName("integer_value")); } @Test public void between() { assertAnalyzeEqual( DSL.and( - DSL.gte( - DSL.ref("integer_value", INTEGER), - DSL.literal(20)), - DSL.lte( - DSL.ref("integer_value", INTEGER), - DSL.literal(30))), + DSL.gte(DSL.ref("integer_value", INTEGER), DSL.literal(20)), + DSL.lte(DSL.ref("integer_value", INTEGER), DSL.literal(30))), AstDSL.between( - qualifiedName("integer_value"), - AstDSL.intLiteral(20), - AstDSL.intLiteral(30))); + qualifiedName("integer_value"), AstDSL.intLiteral(20), AstDSL.intLiteral(30))); } @Test @@ -149,36 +134,38 @@ public void case_conditions() { AstDSL.caseWhen( null, AstDSL.when( - AstDSL.function(">", - qualifiedName("integer_value"), - AstDSL.intLiteral(50)), AstDSL.stringLiteral("Fifty")), + AstDSL.function(">", qualifiedName("integer_value"), AstDSL.intLiteral(50)), + AstDSL.stringLiteral("Fifty")), AstDSL.when( - AstDSL.function(">", - qualifiedName("integer_value"), - AstDSL.intLiteral(30)), AstDSL.stringLiteral("Thirty")))); + AstDSL.function(">", qualifiedName("integer_value"), AstDSL.intLiteral(30)), + AstDSL.stringLiteral("Thirty")))); } @Test public void castAnalyzer() { assertAnalyzeEqual( DSL.castInt(DSL.ref("boolean_value", BOOLEAN)), - AstDSL.cast(AstDSL.unresolvedAttr("boolean_value"), AstDSL.stringLiteral("INT")) - ); + AstDSL.cast(AstDSL.unresolvedAttr("boolean_value"), AstDSL.stringLiteral("INT"))); - assertThrows(IllegalStateException.class, () -> analyze(AstDSL.cast(AstDSL.unresolvedAttr( - "boolean_value"), AstDSL.stringLiteral("INTERVAL")))); + assertThrows( + IllegalStateException.class, + () -> + analyze( + AstDSL.cast( + AstDSL.unresolvedAttr("boolean_value"), AstDSL.stringLiteral("INTERVAL")))); } @Test public void case_with_default_result_type_different() { - UnresolvedExpression caseWhen = AstDSL.caseWhen( - qualifiedName("integer_value"), - AstDSL.intLiteral(60), - AstDSL.when(AstDSL.intLiteral(30), AstDSL.stringLiteral("Thirty")), - AstDSL.when(AstDSL.intLiteral(50), AstDSL.stringLiteral("Fifty"))); - - SemanticCheckException exception = assertThrows( - SemanticCheckException.class, () -> analyze(caseWhen)); + UnresolvedExpression caseWhen = + AstDSL.caseWhen( + qualifiedName("integer_value"), + AstDSL.intLiteral(60), + AstDSL.when(AstDSL.intLiteral(30), AstDSL.stringLiteral("Thirty")), + AstDSL.when(AstDSL.intLiteral(50), AstDSL.stringLiteral("Fifty"))); + + SemanticCheckException exception = + assertThrows(SemanticCheckException.class, () -> analyze(caseWhen)); assertEquals( "All result types of CASE clause must be the same, but found [STRING, STRING, INTEGER]", exception.getMessage()); @@ -187,8 +174,7 @@ public void case_with_default_result_type_different() { @Test public void scalar_window_function() { assertAnalyzeEqual( - DSL.rank(), - AstDSL.window(AstDSL.function("rank"), emptyList(), emptyList())); + DSL.rank(), AstDSL.window(AstDSL.function("rank"), emptyList(), emptyList())); } @SuppressWarnings("unchecked") @@ -197,9 +183,7 @@ public void aggregate_window_function() { assertAnalyzeEqual( new AggregateWindowFunction(DSL.avg(DSL.ref("integer_value", INTEGER))), AstDSL.window( - AstDSL.aggregate("avg", qualifiedName("integer_value")), - emptyList(), - emptyList())); + AstDSL.aggregate("avg", qualifiedName("integer_value")), emptyList(), emptyList())); } @Test @@ -207,26 +191,24 @@ public void qualified_name_with_qualifier() { analysisContext.push(); analysisContext.peek().define(new Symbol(Namespace.INDEX_NAME, "index_alias"), STRUCT); assertAnalyzeEqual( - DSL.ref("integer_value", INTEGER), - qualifiedName("index_alias", "integer_value") - ); + DSL.ref("integer_value", INTEGER), qualifiedName("index_alias", "integer_value")); analysisContext.peek().define(new Symbol(Namespace.FIELD_NAME, "object_field"), STRUCT); - analysisContext.peek().define(new Symbol(Namespace.FIELD_NAME, "object_field.integer_value"), - INTEGER); + analysisContext + .peek() + .define(new Symbol(Namespace.FIELD_NAME, "object_field.integer_value"), INTEGER); assertAnalyzeEqual( DSL.ref("object_field.integer_value", INTEGER), - qualifiedName("object_field", "integer_value") - ); + qualifiedName("object_field", "integer_value")); SyntaxCheckException exception = - assertThrows(SyntaxCheckException.class, + assertThrows( + SyntaxCheckException.class, () -> analyze(qualifiedName("nested_field", "integer_value"))); assertEquals( "The qualifier [nested_field] of qualified name [nested_field.integer_value] " + "must be an field name, index name or its alias", - exception.getMessage() - ); + exception.getMessage()); analysisContext.pop(); } @@ -237,21 +219,12 @@ public void qualified_name_with_reserved_symbol() { analysisContext.peek().addReservedWord(new Symbol(Namespace.FIELD_NAME, "_reserved"), STRING); analysisContext.peek().addReservedWord(new Symbol(Namespace.FIELD_NAME, "_priority"), FLOAT); analysisContext.peek().define(new Symbol(Namespace.INDEX_NAME, "index_alias"), STRUCT); - assertAnalyzeEqual( - DSL.ref("_priority", FLOAT), - qualifiedName("_priority") - ); - assertAnalyzeEqual( - DSL.ref("_reserved", STRING), - qualifiedName("index_alias", "_reserved") - ); + assertAnalyzeEqual(DSL.ref("_priority", FLOAT), qualifiedName("_priority")); + assertAnalyzeEqual(DSL.ref("_reserved", STRING), qualifiedName("index_alias", "_reserved")); // reserved fields take priority over symbol table analysisContext.peek().define(new Symbol(Namespace.FIELD_NAME, "_reserved"), LONG); - assertAnalyzeEqual( - DSL.ref("_reserved", STRING), - qualifiedName("index_alias", "_reserved") - ); + assertAnalyzeEqual(DSL.ref("_reserved", STRING), qualifiedName("index_alias", "_reserved")); analysisContext.pop(); } @@ -265,9 +238,7 @@ public void interval() { @Test public void all_fields() { - assertAnalyzeEqual( - DSL.literal("*"), - AllFields.of()); + assertAnalyzeEqual(DSL.literal("*"), AllFields.of()); } @Test @@ -281,25 +252,30 @@ public void case_clause() { AstDSL.caseWhen( AstDSL.nullLiteral(), AstDSL.when( - AstDSL.function("=", - qualifiedName("integer_value"), - AstDSL.intLiteral(30)), + AstDSL.function("=", qualifiedName("integer_value"), AstDSL.intLiteral(30)), AstDSL.stringLiteral("test")))); } @Test public void undefined_var_semantic_check_failed() { - SemanticCheckException exception = assertThrows(SemanticCheckException.class, - () -> analyze( - AstDSL.and(AstDSL.unresolvedAttr("undefined_field"), AstDSL.booleanLiteral(true)))); - assertEquals("can't resolve Symbol(namespace=FIELD_NAME, name=undefined_field) in type env", + SemanticCheckException exception = + assertThrows( + SemanticCheckException.class, + () -> + analyze( + AstDSL.and( + AstDSL.unresolvedAttr("undefined_field"), AstDSL.booleanLiteral(true)))); + assertEquals( + "can't resolve Symbol(namespace=FIELD_NAME, name=undefined_field) in type env", exception.getMessage()); } @Test public void undefined_aggregation_function() { - SemanticCheckException exception = assertThrows(SemanticCheckException.class, - () -> analyze(AstDSL.aggregate("ESTDC_ERROR", field("integer_value")))); + SemanticCheckException exception = + assertThrows( + SemanticCheckException.class, + () -> analyze(AstDSL.aggregate("ESTDC_ERROR", field("integer_value")))); assertEquals("Unsupported aggregation function ESTDC_ERROR", exception.getMessage()); } @@ -308,25 +284,24 @@ public void aggregation_filter() { assertAnalyzeEqual( DSL.avg(DSL.ref("integer_value", INTEGER)) .condition(DSL.greater(DSL.ref("integer_value", INTEGER), DSL.literal(1))), - AstDSL.filteredAggregate("avg", qualifiedName("integer_value"), - function(">", qualifiedName("integer_value"), intLiteral(1))) - ); + AstDSL.filteredAggregate( + "avg", + qualifiedName("integer_value"), + function(">", qualifiedName("integer_value"), intLiteral(1)))); } @Test public void variance_mapto_varPop() { assertAnalyzeEqual( DSL.varPop(DSL.ref("integer_value", INTEGER)), - AstDSL.aggregate("variance", qualifiedName("integer_value")) - ); + AstDSL.aggregate("variance", qualifiedName("integer_value"))); } @Test public void distinct_count() { assertAnalyzeEqual( DSL.distinctCount(DSL.ref("integer_value", INTEGER)), - AstDSL.distinctAggregate("count", qualifiedName("integer_value")) - ); + AstDSL.distinctAggregate("count", qualifiedName("integer_value"))); } @Test @@ -334,48 +309,49 @@ public void filtered_distinct_count() { assertAnalyzeEqual( DSL.distinctCount(DSL.ref("integer_value", INTEGER)) .condition(DSL.greater(DSL.ref("integer_value", INTEGER), DSL.literal(1))), - AstDSL.filteredDistinctCount("count", qualifiedName("integer_value"), function( - ">", qualifiedName("integer_value"), intLiteral(1))) - ); + AstDSL.filteredDistinctCount( + "count", + qualifiedName("integer_value"), + function(">", qualifiedName("integer_value"), intLiteral(1)))); } @Test public void take_aggregation() { assertAnalyzeEqual( DSL.take(DSL.ref("string_value", STRING), DSL.literal(10)), - AstDSL.aggregate("take", qualifiedName("string_value"), intLiteral(10)) - ); + AstDSL.aggregate("take", qualifiedName("string_value"), intLiteral(10))); } @Test public void named_argument() { assertAnalyzeEqual( DSL.namedArgument("arg_name", DSL.literal("query")), - AstDSL.unresolvedArg("arg_name", stringLiteral("query")) - ); + AstDSL.unresolvedArg("arg_name", stringLiteral("query"))); } @Test public void named_parse_expression() { analysisContext.push(); analysisContext.peek().define(new Symbol(Namespace.FIELD_NAME, "string_field"), STRING); - analysisContext.getNamedParseExpressions() - .add(DSL.named("group", - DSL.regex(ref("string_field", STRING), DSL.literal("(?\\d+)"), - DSL.literal("group")))); + analysisContext + .getNamedParseExpressions() + .add( + DSL.named( + "group", + DSL.regex( + ref("string_field", STRING), + DSL.literal("(?\\d+)"), + DSL.literal("group")))); assertAnalyzeEqual( - DSL.regex(ref("string_field", STRING), DSL.literal("(?\\d+)"), - DSL.literal("group")), - qualifiedName("group") - ); + DSL.regex(ref("string_field", STRING), DSL.literal("(?\\d+)"), DSL.literal("group")), + qualifiedName("group")); } @Test public void named_non_parse_expression() { analysisContext.push(); analysisContext.peek().define(new Symbol(Namespace.FIELD_NAME, "string_field"), STRING); - analysisContext.getNamedParseExpressions() - .add(DSL.named("string_field", DSL.literal("123"))); + analysisContext.getNamedParseExpressions().add(DSL.named("string_field", DSL.literal("123"))); assertAnalyzeEqual(DSL.ref("string_field", STRING), qualifiedName("string_field")); } @@ -385,25 +361,29 @@ void match_bool_prefix_expression() { DSL.match_bool_prefix( DSL.namedArgument("field", DSL.literal("field_value1")), DSL.namedArgument("query", DSL.literal("sample query"))), - AstDSL.function("match_bool_prefix", + AstDSL.function( + "match_bool_prefix", AstDSL.unresolvedArg("field", stringLiteral("field_value1")), AstDSL.unresolvedArg("query", stringLiteral("sample query")))); } @Test void match_bool_prefix_wrong_expression() { - assertThrows(SemanticCheckException.class, - () -> analyze(AstDSL.function("match_bool_prefix", - AstDSL.unresolvedArg("field", stringLiteral("fieldA")), - AstDSL.unresolvedArg("query", floatLiteral(1.2f))))); + assertThrows( + SemanticCheckException.class, + () -> + analyze( + AstDSL.function( + "match_bool_prefix", + AstDSL.unresolvedArg("field", stringLiteral("fieldA")), + AstDSL.unresolvedArg("query", floatLiteral(1.2f))))); } @Test void visit_span() { assertAnalyzeEqual( DSL.span(DSL.ref("integer_value", INTEGER), DSL.literal(1), ""), - AstDSL.span(qualifiedName("integer_value"), intLiteral(1), SpanUnit.NONE) - ); + AstDSL.span(qualifiedName("integer_value"), intLiteral(1), SpanUnit.NONE)); } @Test @@ -425,13 +405,16 @@ void visit_in() { void multi_match_expression() { assertAnalyzeEqual( DSL.multi_match( - DSL.namedArgument("fields", DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "field_value1", ExprValueUtils.floatValue(1.F)))))), + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of("field_value1", ExprValueUtils.floatValue(1.F)))))), DSL.namedArgument("query", DSL.literal("sample query"))), - AstDSL.function("multi_match", - AstDSL.unresolvedArg("fields", new RelevanceFieldList(Map.of( - "field_value1", 1.F))), + AstDSL.function( + "multi_match", + AstDSL.unresolvedArg("fields", new RelevanceFieldList(Map.of("field_value1", 1.F))), AstDSL.unresolvedArg("query", stringLiteral("sample query")))); } @@ -439,14 +422,17 @@ void multi_match_expression() { void multi_match_expression_with_params() { assertAnalyzeEqual( DSL.multi_match( - DSL.namedArgument("fields", DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "field_value1", ExprValueUtils.floatValue(1.F)))))), + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of("field_value1", ExprValueUtils.floatValue(1.F)))))), DSL.namedArgument("query", DSL.literal("sample query")), DSL.namedArgument("analyzer", DSL.literal("keyword"))), - AstDSL.function("multi_match", - AstDSL.unresolvedArg("fields", new RelevanceFieldList(Map.of( - "field_value1", 1.F))), + AstDSL.function( + "multi_match", + AstDSL.unresolvedArg("fields", new RelevanceFieldList(Map.of("field_value1", 1.F))), AstDSL.unresolvedArg("query", stringLiteral("sample query")), AstDSL.unresolvedArg("analyzer", stringLiteral("keyword")))); } @@ -455,14 +441,20 @@ void multi_match_expression_with_params() { void multi_match_expression_two_fields() { assertAnalyzeEqual( DSL.multi_match( - DSL.namedArgument("fields", DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "field_value1", ExprValueUtils.floatValue(1.F), - "field_value2", ExprValueUtils.floatValue(.3F)))))), + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of( + "field_value1", ExprValueUtils.floatValue(1.F), + "field_value2", ExprValueUtils.floatValue(.3F)))))), DSL.namedArgument("query", DSL.literal("sample query"))), - AstDSL.function("multi_match", - AstDSL.unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of( - "field_value1", 1.F, "field_value2", .3F))), + AstDSL.function( + "multi_match", + AstDSL.unresolvedArg( + "fields", + new RelevanceFieldList(ImmutableMap.of("field_value1", 1.F, "field_value2", .3F))), AstDSL.unresolvedArg("query", stringLiteral("sample query")))); } @@ -470,13 +462,16 @@ void multi_match_expression_two_fields() { void simple_query_string_expression() { assertAnalyzeEqual( DSL.simple_query_string( - DSL.namedArgument("fields", DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "field_value1", ExprValueUtils.floatValue(1.F)))))), + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of("field_value1", ExprValueUtils.floatValue(1.F)))))), DSL.namedArgument("query", DSL.literal("sample query"))), - AstDSL.function("simple_query_string", - AstDSL.unresolvedArg("fields", new RelevanceFieldList(Map.of( - "field_value1", 1.F))), + AstDSL.function( + "simple_query_string", + AstDSL.unresolvedArg("fields", new RelevanceFieldList(Map.of("field_value1", 1.F))), AstDSL.unresolvedArg("query", stringLiteral("sample query")))); } @@ -484,14 +479,17 @@ void simple_query_string_expression() { void simple_query_string_expression_with_params() { assertAnalyzeEqual( DSL.simple_query_string( - DSL.namedArgument("fields", DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "field_value1", ExprValueUtils.floatValue(1.F)))))), + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of("field_value1", ExprValueUtils.floatValue(1.F)))))), DSL.namedArgument("query", DSL.literal("sample query")), DSL.namedArgument("analyzer", DSL.literal("keyword"))), - AstDSL.function("simple_query_string", - AstDSL.unresolvedArg("fields", new RelevanceFieldList(Map.of( - "field_value1", 1.F))), + AstDSL.function( + "simple_query_string", + AstDSL.unresolvedArg("fields", new RelevanceFieldList(Map.of("field_value1", 1.F))), AstDSL.unresolvedArg("query", stringLiteral("sample query")), AstDSL.unresolvedArg("analyzer", stringLiteral("keyword")))); } @@ -500,37 +498,44 @@ void simple_query_string_expression_with_params() { void simple_query_string_expression_two_fields() { assertAnalyzeEqual( DSL.simple_query_string( - DSL.namedArgument("fields", DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "field_value1", ExprValueUtils.floatValue(1.F), - "field_value2", ExprValueUtils.floatValue(.3F)))))), + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of( + "field_value1", ExprValueUtils.floatValue(1.F), + "field_value2", ExprValueUtils.floatValue(.3F)))))), DSL.namedArgument("query", DSL.literal("sample query"))), - AstDSL.function("simple_query_string", - AstDSL.unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of( - "field_value1", 1.F, "field_value2", .3F))), + AstDSL.function( + "simple_query_string", + AstDSL.unresolvedArg( + "fields", + new RelevanceFieldList(ImmutableMap.of("field_value1", 1.F, "field_value2", .3F))), AstDSL.unresolvedArg("query", stringLiteral("sample query")))); } @Test void query_expression() { assertAnalyzeEqual( - DSL.query( - DSL.namedArgument("query", DSL.literal("field:query"))), - AstDSL.function("query", - AstDSL.unresolvedArg("query", stringLiteral("field:query")))); + DSL.query(DSL.namedArgument("query", DSL.literal("field:query"))), + AstDSL.function("query", AstDSL.unresolvedArg("query", stringLiteral("field:query")))); } @Test void query_string_expression() { assertAnalyzeEqual( DSL.query_string( - DSL.namedArgument("fields", DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "field_value1", ExprValueUtils.floatValue(1.F)))))), + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of("field_value1", ExprValueUtils.floatValue(1.F)))))), DSL.namedArgument("query", DSL.literal("query_value"))), - AstDSL.function("query_string", - AstDSL.unresolvedArg("fields", new RelevanceFieldList(Map.of( - "field_value1", 1.F))), + AstDSL.function( + "query_string", + AstDSL.unresolvedArg("fields", new RelevanceFieldList(Map.of("field_value1", 1.F))), AstDSL.unresolvedArg("query", stringLiteral("query_value")))); } @@ -538,14 +543,17 @@ void query_string_expression() { void query_string_expression_with_params() { assertAnalyzeEqual( DSL.query_string( - DSL.namedArgument("fields", DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "field_value1", ExprValueUtils.floatValue(1.F)))))), + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of("field_value1", ExprValueUtils.floatValue(1.F)))))), DSL.namedArgument("query", DSL.literal("query_value")), DSL.namedArgument("escape", DSL.literal("false"))), - AstDSL.function("query_string", - AstDSL.unresolvedArg("fields", new RelevanceFieldList(Map.of( - "field_value1", 1.F))), + AstDSL.function( + "query_string", + AstDSL.unresolvedArg("fields", new RelevanceFieldList(Map.of("field_value1", 1.F))), AstDSL.unresolvedArg("query", stringLiteral("query_value")), AstDSL.unresolvedArg("escape", stringLiteral("false")))); } @@ -554,14 +562,20 @@ void query_string_expression_with_params() { void query_string_expression_two_fields() { assertAnalyzeEqual( DSL.query_string( - DSL.namedArgument("fields", DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "field_value1", ExprValueUtils.floatValue(1.F), - "field_value2", ExprValueUtils.floatValue(.3F)))))), + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of( + "field_value1", ExprValueUtils.floatValue(1.F), + "field_value2", ExprValueUtils.floatValue(.3F)))))), DSL.namedArgument("query", DSL.literal("query_value"))), - AstDSL.function("query_string", - AstDSL.unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of( - "field_value1", 1.F, "field_value2", .3F))), + AstDSL.function( + "query_string", + AstDSL.unresolvedArg( + "fields", + new RelevanceFieldList(ImmutableMap.of("field_value1", 1.F, "field_value2", .3F))), AstDSL.unresolvedArg("query", stringLiteral("query_value")))); } @@ -571,7 +585,8 @@ void wildcard_query_expression() { DSL.wildcard_query( DSL.namedArgument("field", DSL.literal("test")), DSL.namedArgument("query", DSL.literal("query_value*"))), - AstDSL.function("wildcard_query", + AstDSL.function( + "wildcard_query", unresolvedArg("field", stringLiteral("test")), unresolvedArg("query", stringLiteral("query_value*")))); } @@ -585,7 +600,8 @@ void wildcard_query_expression_all_params() { DSL.namedArgument("boost", DSL.literal("1.5")), DSL.namedArgument("case_insensitive", DSL.literal("true")), DSL.namedArgument("rewrite", DSL.literal("scoring_boolean"))), - AstDSL.function("wildcard_query", + AstDSL.function( + "wildcard_query", unresolvedArg("field", stringLiteral("test")), unresolvedArg("query", stringLiteral("query_value*")), unresolvedArg("boost", stringLiteral("1.5")), @@ -603,154 +619,144 @@ public void match_phrase_prefix_all_params() { DSL.namedArgument("boost", "1.5"), DSL.namedArgument("analyzer", "standard"), DSL.namedArgument("max_expansions", "4"), - DSL.namedArgument("zero_terms_query", "NONE") - ), - AstDSL.function("match_phrase_prefix", - unresolvedArg("field", stringLiteral("field_value1")), - unresolvedArg("query", stringLiteral("search query")), - unresolvedArg("slop", stringLiteral("3")), - unresolvedArg("boost", stringLiteral("1.5")), - unresolvedArg("analyzer", stringLiteral("standard")), - unresolvedArg("max_expansions", stringLiteral("4")), - unresolvedArg("zero_terms_query", stringLiteral("NONE")) - ) - ); - } - - @Test void score_function_expression() { - assertAnalyzeEqual( - DSL.score( - DSL.namedArgument("RelevanceQuery", - DSL.match_phrase_prefix( - DSL.namedArgument("field", "field_value1"), - DSL.namedArgument("query", "search query"), - DSL.namedArgument("slop", "3") - ) - )), - AstDSL.function("score", - unresolvedArg("RelevanceQuery", - AstDSL.function("match_phrase_prefix", - unresolvedArg("field", stringLiteral("field_value1")), - unresolvedArg("query", stringLiteral("search query")), - unresolvedArg("slop", stringLiteral("3")) - ) - ) - ) - ); - } - - @Test void score_function_with_boost() { - assertAnalyzeEqual( - DSL.score( - DSL.namedArgument("RelevanceQuery", - DSL.match_phrase_prefix( - DSL.namedArgument("field", "field_value1"), - DSL.namedArgument("query", "search query"), - DSL.namedArgument("boost", "3.0") - )), - DSL.namedArgument("boost", "2") - ), - AstDSL.function("score", - unresolvedArg("RelevanceQuery", - AstDSL.function("match_phrase_prefix", - unresolvedArg("field", stringLiteral("field_value1")), - unresolvedArg("query", stringLiteral("search query")), - unresolvedArg("boost", stringLiteral("3.0")) - ) - ), - unresolvedArg("boost", stringLiteral("2")) - ) - ); - } - - @Test void score_query_function_expression() { - assertAnalyzeEqual( - DSL.score_query( - DSL.namedArgument("RelevanceQuery", - DSL.wildcard_query( - DSL.namedArgument("field", "field_value1"), - DSL.namedArgument("query", "search query") - ) - )), - AstDSL.function("score_query", - unresolvedArg("RelevanceQuery", - AstDSL.function("wildcard_query", - unresolvedArg("field", stringLiteral("field_value1")), - unresolvedArg("query", stringLiteral("search query")) - ) - ) - ) - ); - } - - @Test void score_query_function_with_boost() { - assertAnalyzeEqual( - DSL.score_query( - DSL.namedArgument("RelevanceQuery", - DSL.wildcard_query( - DSL.namedArgument("field", "field_value1"), - DSL.namedArgument("query", "search query") - ) - ), - DSL.namedArgument("boost", "2.0") - ), - AstDSL.function("score_query", - unresolvedArg("RelevanceQuery", - AstDSL.function("wildcard_query", - unresolvedArg("field", stringLiteral("field_value1")), - unresolvedArg("query", stringLiteral("search query")) - ) - ), - unresolvedArg("boost", stringLiteral("2.0")) - ) - ); - } - - @Test void scorequery_function_expression() { - assertAnalyzeEqual( - DSL.scorequery( - DSL.namedArgument("RelevanceQuery", - DSL.simple_query_string( - DSL.namedArgument("field", "field_value1"), - DSL.namedArgument("query", "search query"), - DSL.namedArgument("slop", "3") - ) - )), - AstDSL.function("scorequery", - unresolvedArg("RelevanceQuery", - AstDSL.function("simple_query_string", - unresolvedArg("field", stringLiteral("field_value1")), - unresolvedArg("query", stringLiteral("search query")), - unresolvedArg("slop", stringLiteral("3")) - ) - ) - ) - ); + DSL.namedArgument("zero_terms_query", "NONE")), + AstDSL.function( + "match_phrase_prefix", + unresolvedArg("field", stringLiteral("field_value1")), + unresolvedArg("query", stringLiteral("search query")), + unresolvedArg("slop", stringLiteral("3")), + unresolvedArg("boost", stringLiteral("1.5")), + unresolvedArg("analyzer", stringLiteral("standard")), + unresolvedArg("max_expansions", stringLiteral("4")), + unresolvedArg("zero_terms_query", stringLiteral("NONE")))); + } + + @Test + void score_function_expression() { + assertAnalyzeEqual( + DSL.score( + DSL.namedArgument( + "RelevanceQuery", + DSL.match_phrase_prefix( + DSL.namedArgument("field", "field_value1"), + DSL.namedArgument("query", "search query"), + DSL.namedArgument("slop", "3")))), + AstDSL.function( + "score", + unresolvedArg( + "RelevanceQuery", + AstDSL.function( + "match_phrase_prefix", + unresolvedArg("field", stringLiteral("field_value1")), + unresolvedArg("query", stringLiteral("search query")), + unresolvedArg("slop", stringLiteral("3")))))); + } + + @Test + void score_function_with_boost() { + assertAnalyzeEqual( + DSL.score( + DSL.namedArgument( + "RelevanceQuery", + DSL.match_phrase_prefix( + DSL.namedArgument("field", "field_value1"), + DSL.namedArgument("query", "search query"), + DSL.namedArgument("boost", "3.0"))), + DSL.namedArgument("boost", "2")), + AstDSL.function( + "score", + unresolvedArg( + "RelevanceQuery", + AstDSL.function( + "match_phrase_prefix", + unresolvedArg("field", stringLiteral("field_value1")), + unresolvedArg("query", stringLiteral("search query")), + unresolvedArg("boost", stringLiteral("3.0")))), + unresolvedArg("boost", stringLiteral("2")))); + } + + @Test + void score_query_function_expression() { + assertAnalyzeEqual( + DSL.score_query( + DSL.namedArgument( + "RelevanceQuery", + DSL.wildcard_query( + DSL.namedArgument("field", "field_value1"), + DSL.namedArgument("query", "search query")))), + AstDSL.function( + "score_query", + unresolvedArg( + "RelevanceQuery", + AstDSL.function( + "wildcard_query", + unresolvedArg("field", stringLiteral("field_value1")), + unresolvedArg("query", stringLiteral("search query")))))); + } + + @Test + void score_query_function_with_boost() { + assertAnalyzeEqual( + DSL.score_query( + DSL.namedArgument( + "RelevanceQuery", + DSL.wildcard_query( + DSL.namedArgument("field", "field_value1"), + DSL.namedArgument("query", "search query"))), + DSL.namedArgument("boost", "2.0")), + AstDSL.function( + "score_query", + unresolvedArg( + "RelevanceQuery", + AstDSL.function( + "wildcard_query", + unresolvedArg("field", stringLiteral("field_value1")), + unresolvedArg("query", stringLiteral("search query")))), + unresolvedArg("boost", stringLiteral("2.0")))); + } + + @Test + void scorequery_function_expression() { + assertAnalyzeEqual( + DSL.scorequery( + DSL.namedArgument( + "RelevanceQuery", + DSL.simple_query_string( + DSL.namedArgument("field", "field_value1"), + DSL.namedArgument("query", "search query"), + DSL.namedArgument("slop", "3")))), + AstDSL.function( + "scorequery", + unresolvedArg( + "RelevanceQuery", + AstDSL.function( + "simple_query_string", + unresolvedArg("field", stringLiteral("field_value1")), + unresolvedArg("query", stringLiteral("search query")), + unresolvedArg("slop", stringLiteral("3")))))); } @Test void scorequery_function_with_boost() { assertAnalyzeEqual( - DSL.scorequery( - DSL.namedArgument("RelevanceQuery", - DSL.simple_query_string( - DSL.namedArgument("field", "field_value1"), - DSL.namedArgument("query", "search query"), - DSL.namedArgument("slop", "3") - )), - DSL.namedArgument("boost", "2.0") - ), - AstDSL.function("scorequery", - unresolvedArg("RelevanceQuery", - AstDSL.function("simple_query_string", - unresolvedArg("field", stringLiteral("field_value1")), - unresolvedArg("query", stringLiteral("search query")), - unresolvedArg("slop", stringLiteral("3")) - ) - ), - unresolvedArg("boost", stringLiteral("2.0")) - ) - ); + DSL.scorequery( + DSL.namedArgument( + "RelevanceQuery", + DSL.simple_query_string( + DSL.namedArgument("field", "field_value1"), + DSL.namedArgument("query", "search query"), + DSL.namedArgument("slop", "3"))), + DSL.namedArgument("boost", "2.0")), + AstDSL.function( + "scorequery", + unresolvedArg( + "RelevanceQuery", + AstDSL.function( + "simple_query_string", + unresolvedArg("field", stringLiteral("field_value1")), + unresolvedArg("query", stringLiteral("search query")), + unresolvedArg("slop", stringLiteral("3")))), + unresolvedArg("boost", stringLiteral("2.0")))); } @Test @@ -764,8 +770,12 @@ public void function_returns_non_constant_value() { // Even a function returns the same values - they are calculated on each call // `sysdate()` which returns `LocalDateTime.now()` shouldn't be cached and should return always // different values - var values = List.of(analyze(function("sysdate")), analyze(function("sysdate")), - analyze(function("sysdate")), analyze(function("sysdate"))); + var values = + List.of( + analyze(function("sysdate")), + analyze(function("sysdate")), + analyze(function("sysdate")), + analyze(function("sysdate"))); var referenceValue = analyze(function("sysdate")).valueOf(); assertTrue(values.stream().noneMatch(v -> v.valueOf() == referenceValue)); } @@ -773,8 +783,12 @@ public void function_returns_non_constant_value() { @Test public void now_as_a_function_not_cached() { // // We can call `now()` as a function, in that case nothing should be cached - var values = List.of(analyze(function("now")), analyze(function("now")), - analyze(function("now")), analyze(function("now"))); + var values = + List.of( + analyze(function("now")), + analyze(function("now")), + analyze(function("now")), + analyze(function("now"))); var referenceValue = analyze(function("now")).valueOf(); assertTrue(values.stream().noneMatch(v -> v.valueOf() == referenceValue)); } @@ -783,13 +797,12 @@ protected Expression analyze(UnresolvedExpression unresolvedExpression) { return expressionAnalyzer.analyze(unresolvedExpression, analysisContext); } - protected void assertAnalyzeEqual(Expression expected, - UnresolvedExpression unresolvedExpression) { + protected void assertAnalyzeEqual( + Expression expected, UnresolvedExpression unresolvedExpression) { assertEquals(expected, analyze(unresolvedExpression)); } - protected void assertAnalyzeEqual(Expression expected, - UnresolvedPlan unresolvedPlan) { + protected void assertAnalyzeEqual(Expression expected, UnresolvedPlan unresolvedPlan) { assertEquals(expected, analyze(unresolvedPlan)); } } diff --git a/core/src/test/java/org/opensearch/sql/analysis/ExpressionReferenceOptimizerTest.java b/core/src/test/java/org/opensearch/sql/analysis/ExpressionReferenceOptimizerTest.java index 89d5f699e3..28bcb8793f 100644 --- a/core/src/test/java/org/opensearch/sql/analysis/ExpressionReferenceOptimizerTest.java +++ b/core/src/test/java/org/opensearch/sql/analysis/ExpressionReferenceOptimizerTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.analysis; import static java.util.Collections.emptyList; @@ -27,65 +26,58 @@ class ExpressionReferenceOptimizerTest extends AnalyzerTestBase { void expression_without_aggregation_should_not_be_replaced() { assertEquals( DSL.subtract(DSL.ref("age", INTEGER), DSL.literal(1)), - optimize(DSL.subtract(DSL.ref("age", INTEGER), DSL.literal(1))) - ); + optimize(DSL.subtract(DSL.ref("age", INTEGER), DSL.literal(1)))); } @Test void group_expression_should_be_replaced() { - assertEquals( - DSL.ref("abs(balance)", INTEGER), - optimize(DSL.abs(DSL.ref("balance", INTEGER))) - ); + assertEquals(DSL.ref("abs(balance)", INTEGER), optimize(DSL.abs(DSL.ref("balance", INTEGER)))); } @Test void aggregation_expression_should_be_replaced() { - assertEquals( - DSL.ref("AVG(age)", DOUBLE), - optimize(DSL.avg(DSL.ref("age", INTEGER))) - ); + assertEquals(DSL.ref("AVG(age)", DOUBLE), optimize(DSL.avg(DSL.ref("age", INTEGER)))); } @Test void aggregation_in_expression_should_be_replaced() { assertEquals( DSL.subtract(DSL.ref("AVG(age)", DOUBLE), DSL.literal(1)), - optimize(DSL.subtract(DSL.avg(DSL.ref("age", INTEGER)), DSL.literal(1))) - ); + optimize(DSL.subtract(DSL.avg(DSL.ref("age", INTEGER)), DSL.literal(1)))); } @Test void case_clause_should_be_replaced() { - Expression caseClause = DSL.cases( - null, - DSL.when( - DSL.equal(DSL.ref("age", INTEGER), DSL.literal(30)), - DSL.literal("true"))); + Expression caseClause = + DSL.cases( + null, + DSL.when(DSL.equal(DSL.ref("age", INTEGER), DSL.literal(30)), DSL.literal("true"))); LogicalPlan logicalPlan = LogicalPlanDSL.aggregation( LogicalPlanDSL.relation("test", table), emptyList(), - ImmutableList.of(DSL.named( - "CaseClause(whenClauses=[WhenClause(condition==(age, 30), result=\"true\")]," - + " defaultResult=null)", - caseClause))); + ImmutableList.of( + DSL.named( + "CaseClause(whenClauses=[WhenClause(condition==(age, 30), result=\"true\")]," + + " defaultResult=null)", + caseClause))); assertEquals( DSL.ref( "CaseClause(whenClauses=[WhenClause(condition==(age, 30), result=\"true\")]," - + " defaultResult=null)", STRING), + + " defaultResult=null)", + STRING), optimize(caseClause, logicalPlan)); } @Test void aggregation_in_case_when_clause_should_be_replaced() { - Expression caseClause = DSL.cases( - null, - DSL.when( - DSL.equal(DSL.avg(DSL.ref("age", INTEGER)), DSL.literal(30)), - DSL.literal("true"))); + Expression caseClause = + DSL.cases( + null, + DSL.when( + DSL.equal(DSL.avg(DSL.ref("age", INTEGER)), DSL.literal(30)), DSL.literal("true"))); LogicalPlan logicalPlan = LogicalPlanDSL.aggregation( @@ -96,19 +88,16 @@ void aggregation_in_case_when_clause_should_be_replaced() { assertEquals( DSL.cases( null, - DSL.when( - DSL.equal(DSL.ref("AVG(age)", DOUBLE), DSL.literal(30)), - DSL.literal("true"))), + DSL.when(DSL.equal(DSL.ref("AVG(age)", DOUBLE), DSL.literal(30)), DSL.literal("true"))), optimize(caseClause, logicalPlan)); } @Test void aggregation_in_case_else_clause_should_be_replaced() { - Expression caseClause = DSL.cases( - DSL.avg(DSL.ref("age", INTEGER)), - DSL.when( - DSL.equal(DSL.ref("age", INTEGER), DSL.literal(30)), - DSL.literal("true"))); + Expression caseClause = + DSL.cases( + DSL.avg(DSL.ref("age", INTEGER)), + DSL.when(DSL.equal(DSL.ref("age", INTEGER), DSL.literal(30)), DSL.literal("true"))); LogicalPlan logicalPlan = LogicalPlanDSL.aggregation( @@ -119,9 +108,7 @@ void aggregation_in_case_else_clause_should_be_replaced() { assertEquals( DSL.cases( DSL.ref("AVG(age)", DOUBLE), - DSL.when( - DSL.equal(DSL.ref("age", INTEGER), DSL.literal(30)), - DSL.literal("true"))), + DSL.when(DSL.equal(DSL.ref("age", INTEGER), DSL.literal(30)), DSL.literal("true"))), optimize(caseClause, logicalPlan)); } @@ -136,12 +123,8 @@ void window_expression_should_be_replaced() { DSL.named(DSL.denseRank()), new WindowDefinition(emptyList(), emptyList())); - assertEquals( - DSL.ref("rank()", INTEGER), - optimize(DSL.rank(), logicalPlan)); - assertEquals( - DSL.ref("dense_rank()", INTEGER), - optimize(DSL.denseRank(), logicalPlan)); + assertEquals(DSL.ref("rank()", INTEGER), optimize(DSL.rank(), logicalPlan)); + assertEquals(DSL.ref("dense_rank()", INTEGER), optimize(DSL.denseRank(), logicalPlan)); } Expression optimize(Expression expression) { @@ -158,11 +141,11 @@ Expression optimize(Expression expression, LogicalPlan logicalPlan) { LogicalPlan logicalPlan() { return LogicalPlanDSL.aggregation( LogicalPlanDSL.relation("schema", table), - ImmutableList - .of(DSL.named("AVG(age)", DSL.avg(DSL.ref("age", INTEGER))), - DSL.named("SUM(age)", DSL.sum(DSL.ref("age", INTEGER)))), - ImmutableList.of(DSL.named("balance", DSL.ref("balance", INTEGER)), - DSL.named("abs(balance)", DSL.abs(DSL.ref("balance", INTEGER)))) - ); + ImmutableList.of( + DSL.named("AVG(age)", DSL.avg(DSL.ref("age", INTEGER))), + DSL.named("SUM(age)", DSL.sum(DSL.ref("age", INTEGER)))), + ImmutableList.of( + DSL.named("balance", DSL.ref("balance", INTEGER)), + DSL.named("abs(balance)", DSL.abs(DSL.ref("balance", INTEGER))))); } } diff --git a/core/src/test/java/org/opensearch/sql/analysis/NamedExpressionAnalyzerTest.java b/core/src/test/java/org/opensearch/sql/analysis/NamedExpressionAnalyzerTest.java index e9c891905c..68c508b645 100644 --- a/core/src/test/java/org/opensearch/sql/analysis/NamedExpressionAnalyzerTest.java +++ b/core/src/test/java/org/opensearch/sql/analysis/NamedExpressionAnalyzerTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.analysis; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -22,8 +21,7 @@ class NamedExpressionAnalyzerTest extends AnalyzerTestBase { void visit_named_select_item() { Alias alias = AstDSL.alias("integer_value", AstDSL.qualifiedName("integer_value")); - NamedExpressionAnalyzer analyzer = - new NamedExpressionAnalyzer(expressionAnalyzer); + NamedExpressionAnalyzer analyzer = new NamedExpressionAnalyzer(expressionAnalyzer); NamedExpression analyze = analyzer.analyze(alias, analysisContext); assertEquals("integer_value", analyze.getNameOrAlias()); @@ -32,11 +30,10 @@ void visit_named_select_item() { @Test void visit_highlight() { Map args = new HashMap<>(); - Alias alias = AstDSL.alias("highlight(fieldA)", - new HighlightFunction( - AstDSL.stringLiteral("fieldA"), args)); - NamedExpressionAnalyzer analyzer = - new NamedExpressionAnalyzer(expressionAnalyzer); + Alias alias = + AstDSL.alias( + "highlight(fieldA)", new HighlightFunction(AstDSL.stringLiteral("fieldA"), args)); + NamedExpressionAnalyzer analyzer = new NamedExpressionAnalyzer(expressionAnalyzer); NamedExpression analyze = analyzer.analyze(alias, analysisContext); assertEquals("highlight(fieldA)", analyze.getNameOrAlias()); diff --git a/core/src/test/java/org/opensearch/sql/analysis/QualifierAnalyzerTest.java b/core/src/test/java/org/opensearch/sql/analysis/QualifierAnalyzerTest.java index 5833ef6ae4..3599a86918 100644 --- a/core/src/test/java/org/opensearch/sql/analysis/QualifierAnalyzerTest.java +++ b/core/src/test/java/org/opensearch/sql/analysis/QualifierAnalyzerTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.analysis; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -34,18 +33,26 @@ void should_return_original_name_if_no_qualifier() { @Test void should_report_error_if_qualifier_is_not_index() { - runInScope(new Symbol(Namespace.FIELD_NAME, "aIndex"), ARRAY, () -> { - SyntaxCheckException error = assertThrows(SyntaxCheckException.class, - () -> qualifierAnalyzer.unqualified("a", "integer_value")); - assertEquals("The qualifier [a] of qualified name [a.integer_value] " - + "must be an field name, index name or its alias", error.getMessage()); - }); + runInScope( + new Symbol(Namespace.FIELD_NAME, "aIndex"), + ARRAY, + () -> { + SyntaxCheckException error = + assertThrows( + SyntaxCheckException.class, + () -> qualifierAnalyzer.unqualified("a", "integer_value")); + assertEquals( + "The qualifier [a] of qualified name [a.integer_value] " + + "must be an field name, index name or its alias", + error.getMessage()); + }); } @Test void should_report_error_if_qualifier_is_not_exist() { - SyntaxCheckException error = assertThrows(SyntaxCheckException.class, - () -> qualifierAnalyzer.unqualified("a", "integer_value")); + SyntaxCheckException error = + assertThrows( + SyntaxCheckException.class, () -> qualifierAnalyzer.unqualified("a", "integer_value")); assertEquals( "The qualifier [a] of qualified name [a.integer_value] must be an field name, index name " + "or its alias", @@ -54,23 +61,26 @@ void should_report_error_if_qualifier_is_not_exist() { @Test void should_return_qualified_name_if_qualifier_is_index() { - runInScope(new Symbol(Namespace.INDEX_NAME, "a"), STRUCT, () -> - assertEquals("integer_value", qualifierAnalyzer.unqualified("a", "integer_value")) - ); + runInScope( + new Symbol(Namespace.INDEX_NAME, "a"), + STRUCT, + () -> assertEquals("integer_value", qualifierAnalyzer.unqualified("a", "integer_value"))); } @Test void should_return_qualified_name_if_qualifier_is_field() { - runInScope(new Symbol(Namespace.FIELD_NAME, "a"), STRUCT, () -> - assertEquals("a.integer_value", qualifierAnalyzer.unqualified("a", "integer_value")) - ); + runInScope( + new Symbol(Namespace.FIELD_NAME, "a"), + STRUCT, + () -> assertEquals("a.integer_value", qualifierAnalyzer.unqualified("a", "integer_value"))); } @Test void should_report_error_if_more_parts_in_qualified_name() { - runInScope(new Symbol(Namespace.INDEX_NAME, "a"), STRUCT, () -> - qualifierAnalyzer.unqualified("a", "integer_value", "invalid") - ); + runInScope( + new Symbol(Namespace.INDEX_NAME, "a"), + STRUCT, + () -> qualifierAnalyzer.unqualified("a", "integer_value", "invalid")); } private void runInScope(Symbol symbol, ExprType type, Runnable test) { @@ -82,5 +92,4 @@ private void runInScope(Symbol symbol, ExprType type, Runnable test) { analysisContext.pop(); } } - } diff --git a/core/src/test/java/org/opensearch/sql/analysis/SelectAnalyzeTest.java b/core/src/test/java/org/opensearch/sql/analysis/SelectAnalyzeTest.java index 3bd90f0081..27edc588fa 100644 --- a/core/src/test/java/org/opensearch/sql/analysis/SelectAnalyzeTest.java +++ b/core/src/test/java/org/opensearch/sql/analysis/SelectAnalyzeTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.analysis; import static org.opensearch.sql.ast.dsl.AstDSL.argument; @@ -44,8 +43,7 @@ public void project_all_from_source() { DSL.named("double_value", DSL.ref("double_value", DOUBLE)), DSL.named("integer_value", DSL.ref("integer_value", INTEGER)), DSL.named("double_value", DSL.ref("double_value", DOUBLE)), - DSL.named("string_value", DSL.ref("string_value", STRING)) - ), + DSL.named("string_value", DSL.ref("string_value", STRING))), AstDSL.projectWithArg( AstDSL.relation("schema"), AstDSL.defaultFieldsArgs(), @@ -61,11 +59,9 @@ public void select_and_project_all() { LogicalPlanDSL.project( LogicalPlanDSL.relation("schema", table), DSL.named("integer_value", DSL.ref("integer_value", INTEGER)), - DSL.named("double_value", DSL.ref("double_value", DOUBLE)) - ), + DSL.named("double_value", DSL.ref("double_value", DOUBLE))), DSL.named("integer_value", DSL.ref("integer_value", INTEGER)), - DSL.named("double_value", DSL.ref("double_value", DOUBLE)) - ), + DSL.named("double_value", DSL.ref("double_value", DOUBLE))), AstDSL.projectWithArg( AstDSL.projectWithArg( AstDSL.relation("schema"), @@ -73,8 +69,7 @@ public void select_and_project_all() { AstDSL.field("integer_value"), AstDSL.field("double_value")), AstDSL.defaultFieldsArgs(), - AllFields.of() - )); + AllFields.of())); } @Test @@ -84,10 +79,8 @@ public void remove_and_project_all() { LogicalPlanDSL.remove( LogicalPlanDSL.relation("schema", table), DSL.ref("integer_value", INTEGER), - DSL.ref("double_value", DOUBLE) - ), - DSL.named("string_value", DSL.ref("string_value", STRING)) - ), + DSL.ref("double_value", DOUBLE)), + DSL.named("string_value", DSL.ref("string_value", STRING))), AstDSL.projectWithArg( AstDSL.projectWithArg( AstDSL.relation("schema"), @@ -95,8 +88,7 @@ public void remove_and_project_all() { AstDSL.field("integer_value"), AstDSL.field("double_value")), AstDSL.defaultFieldsArgs(), - AllFields.of() - )); + AllFields.of())); } @Test @@ -105,20 +97,21 @@ public void stats_and_project_all() { LogicalPlanDSL.project( LogicalPlanDSL.aggregation( LogicalPlanDSL.relation("schema", table), - ImmutableList.of(DSL - .named("avg(integer_value)", DSL.avg(DSL.ref("integer_value", INTEGER)))), + ImmutableList.of( + DSL.named("avg(integer_value)", DSL.avg(DSL.ref("integer_value", INTEGER)))), ImmutableList.of(DSL.named("string_value", DSL.ref("string_value", STRING)))), DSL.named("avg(integer_value)", DSL.ref("avg(integer_value)", DOUBLE)), - DSL.named("string_value", DSL.ref("string_value", STRING)) - ), + DSL.named("string_value", DSL.ref("string_value", STRING))), AstDSL.projectWithArg( AstDSL.agg( AstDSL.relation("schema"), - AstDSL.exprList(AstDSL.alias("avg(integer_value)", AstDSL.aggregate("avg", - field("integer_value")))), + AstDSL.exprList( + AstDSL.alias( + "avg(integer_value)", AstDSL.aggregate("avg", field("integer_value")))), null, ImmutableList.of(AstDSL.alias("string_value", field("string_value"))), - AstDSL.defaultStatsArgs()), AstDSL.defaultFieldsArgs(), + AstDSL.defaultStatsArgs()), + AstDSL.defaultFieldsArgs(), AllFields.of())); } @@ -131,14 +124,12 @@ public void rename_and_project_all() { ImmutableMap.of(DSL.ref("integer_value", INTEGER), DSL.ref("ivalue", INTEGER))), DSL.named("double_value", DSL.ref("double_value", DOUBLE)), DSL.named("string_value", DSL.ref("string_value", STRING)), - DSL.named("ivalue", DSL.ref("ivalue", INTEGER)) - ), + DSL.named("ivalue", DSL.ref("ivalue", INTEGER))), AstDSL.projectWithArg( AstDSL.rename( AstDSL.relation("schema"), AstDSL.map(AstDSL.field("integer_value"), AstDSL.field("ivalue"))), AstDSL.defaultFieldsArgs(), - AllFields.of() - )); + AllFields.of())); } } diff --git a/core/src/test/java/org/opensearch/sql/analysis/SelectExpressionAnalyzerTest.java b/core/src/test/java/org/opensearch/sql/analysis/SelectExpressionAnalyzerTest.java index b2fe29b509..38d4704bcd 100644 --- a/core/src/test/java/org/opensearch/sql/analysis/SelectExpressionAnalyzerTest.java +++ b/core/src/test/java/org/opensearch/sql/analysis/SelectExpressionAnalyzerTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.analysis; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -28,23 +27,20 @@ @ExtendWith(MockitoExtension.class) public class SelectExpressionAnalyzerTest extends AnalyzerTestBase { - @Mock - private ExpressionReferenceOptimizer optimizer; + @Mock private ExpressionReferenceOptimizer optimizer; @Test public void named_expression() { assertAnalyzeEqual( DSL.named("integer_value", DSL.ref("integer_value", INTEGER)), - AstDSL.alias("integer_value", AstDSL.qualifiedName("integer_value")) - ); + AstDSL.alias("integer_value", AstDSL.qualifiedName("integer_value"))); } @Test public void named_expression_with_alias() { assertAnalyzeEqual( DSL.named("integer_value", DSL.ref("integer_value", INTEGER), "int"), - AstDSL.alias("integer_value", AstDSL.qualifiedName("integer_value"), "int") - ); + AstDSL.alias("integer_value", AstDSL.qualifiedName("integer_value"), "int")); } @Test @@ -52,9 +48,8 @@ public void field_name_with_qualifier() { analysisContext.peek().define(new Symbol(Namespace.INDEX_NAME, "index_alias"), STRUCT); assertAnalyzeEqual( DSL.named("integer_value", DSL.ref("integer_value", INTEGER)), - AstDSL.alias("integer_alias.integer_value", - AstDSL.qualifiedName("index_alias", "integer_value")) - ); + AstDSL.alias( + "integer_alias.integer_value", AstDSL.qualifiedName("index_alias", "integer_value"))); } @Test @@ -62,9 +57,9 @@ public void field_name_with_qualifier_quoted() { analysisContext.peek().define(new Symbol(Namespace.INDEX_NAME, "index_alias"), STRUCT); assertAnalyzeEqual( DSL.named("integer_value", DSL.ref("integer_value", INTEGER)), - AstDSL.alias("`integer_alias`.integer_value", // qualifier in SELECT is quoted originally - AstDSL.qualifiedName("index_alias", "integer_value")) - ); + AstDSL.alias( + "`integer_alias`.integer_value", // qualifier in SELECT is quoted originally + AstDSL.qualifiedName("index_alias", "integer_value"))); } @Test @@ -72,21 +67,21 @@ public void field_name_in_expression_with_qualifier() { analysisContext.peek().define(new Symbol(Namespace.INDEX_NAME, "index_alias"), STRUCT); assertAnalyzeEqual( DSL.named("abs(index_alias.integer_value)", DSL.abs(DSL.ref("integer_value", INTEGER))), - AstDSL.alias("abs(index_alias.integer_value)", - AstDSL.function("abs", AstDSL.qualifiedName("index_alias", "integer_value"))) - ); + AstDSL.alias( + "abs(index_alias.integer_value)", + AstDSL.function("abs", AstDSL.qualifiedName("index_alias", "integer_value")))); } protected List analyze(UnresolvedExpression unresolvedExpression) { - doAnswer(invocation -> ((NamedExpression) invocation.getArgument(0)) - .getDelegated()).when(optimizer).optimize(any(), any()); + doAnswer(invocation -> ((NamedExpression) invocation.getArgument(0)).getDelegated()) + .when(optimizer) + .optimize(any(), any()); return new SelectExpressionAnalyzer(expressionAnalyzer) - .analyze(Arrays.asList(unresolvedExpression), - analysisContext, optimizer); + .analyze(Arrays.asList(unresolvedExpression), analysisContext, optimizer); } - protected void assertAnalyzeEqual(NamedExpression expected, - UnresolvedExpression unresolvedExpression) { + protected void assertAnalyzeEqual( + NamedExpression expected, UnresolvedExpression unresolvedExpression) { assertEquals(Arrays.asList(expected), analyze(unresolvedExpression)); } } diff --git a/core/src/test/java/org/opensearch/sql/analysis/TypeEnvironmentTest.java b/core/src/test/java/org/opensearch/sql/analysis/TypeEnvironmentTest.java index c963e1d30d..91677a901e 100644 --- a/core/src/test/java/org/opensearch/sql/analysis/TypeEnvironmentTest.java +++ b/core/src/test/java/org/opensearch/sql/analysis/TypeEnvironmentTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.analysis; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -21,9 +20,7 @@ public class TypeEnvironmentTest { - /** - * Use context class for push/pop. - */ + /** Use context class for push/pop. */ private AnalysisContext context = new AnalysisContext(); @Test @@ -69,20 +66,24 @@ public void defineFieldSymbolInDifferentEnvironmentsShouldNotAbleToResolveOncePo assertEquals(INTEGER, environment().resolve(toSymbol(age))); SemanticCheckException exception = assertThrows(SemanticCheckException.class, () -> environment().resolve(toSymbol(city))); - assertEquals("can't resolve Symbol(namespace=FIELD_NAME, name=s.city) in type env", + assertEquals( + "can't resolve Symbol(namespace=FIELD_NAME, name=s.city) in type env", exception.getMessage()); - exception = assertThrows(SemanticCheckException.class, - () -> environment().resolve(toSymbol(manager))); - assertEquals("can't resolve Symbol(namespace=FIELD_NAME, name=s.manager) in type env", + exception = + assertThrows(SemanticCheckException.class, () -> environment().resolve(toSymbol(manager))); + assertEquals( + "can't resolve Symbol(namespace=FIELD_NAME, name=s.manager) in type env", exception.getMessage()); } @Test public void resolveLiteralInEnvFailed() { - SemanticCheckException exception = assertThrows(SemanticCheckException.class, - () -> environment().resolve(new Symbol(Namespace.FIELD_NAME, "1"))); - assertEquals("can't resolve Symbol(namespace=FIELD_NAME, name=1) in type env", - exception.getMessage()); + SemanticCheckException exception = + assertThrows( + SemanticCheckException.class, + () -> environment().resolve(new Symbol(Namespace.FIELD_NAME, "1"))); + assertEquals( + "can't resolve Symbol(namespace=FIELD_NAME, name=1) in type env", exception.getMessage()); } private TypeEnvironment environment() { @@ -92,5 +93,4 @@ private TypeEnvironment environment() { private Symbol toSymbol(ReferenceExpression ref) { return new Symbol(Namespace.FIELD_NAME, ref.getAttr()); } - } diff --git a/core/src/test/java/org/opensearch/sql/analysis/WindowExpressionAnalyzerTest.java b/core/src/test/java/org/opensearch/sql/analysis/WindowExpressionAnalyzerTest.java index dd4361ad6a..acb11f0b57 100644 --- a/core/src/test/java/org/opensearch/sql/analysis/WindowExpressionAnalyzerTest.java +++ b/core/src/test/java/org/opensearch/sql/analysis/WindowExpressionAnalyzerTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.analysis; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -75,16 +74,12 @@ void should_not_generate_sort_operator_if_no_partition_by_and_order_by_list() { LogicalPlanDSL.window( LogicalPlanDSL.relation("test", table), DSL.named("row_number", DSL.rowNumber()), - new WindowDefinition( - ImmutableList.of(), - ImmutableList.of())), + new WindowDefinition(ImmutableList.of(), ImmutableList.of())), analyzer.analyze( AstDSL.alias( "row_number", AstDSL.window( - AstDSL.function("row_number"), - ImmutableList.of(), - ImmutableList.of())), + AstDSL.function("row_number"), ImmutableList.of(), ImmutableList.of())), analysisContext)); } @@ -93,10 +88,7 @@ void should_return_original_child_if_project_item_not_windowed() { assertEquals( child, analyzer.analyze( - AstDSL.alias( - "string_value", - AstDSL.qualifiedName("string_value")), - analysisContext)); + AstDSL.alias("string_value", AstDSL.qualifiedName("string_value")), analysisContext)); } @Test @@ -114,20 +106,23 @@ void can_analyze_sort_options() { .put(new SortOption(DESC, NULL_LAST), DEFAULT_DESC) .build(); - expects.forEach((option, expect) -> { - Alias ast = AstDSL.alias( - "row_number", - AstDSL.window( - AstDSL.function("row_number"), - Collections.emptyList(), - ImmutableList.of( - ImmutablePair.of(option, AstDSL.qualifiedName("integer_value"))))); + expects.forEach( + (option, expect) -> { + Alias ast = + AstDSL.alias( + "row_number", + AstDSL.window( + AstDSL.function("row_number"), + Collections.emptyList(), + ImmutableList.of( + ImmutablePair.of(option, AstDSL.qualifiedName("integer_value"))))); - LogicalPlan plan = analyzer.analyze(ast, analysisContext); - LogicalSort sort = (LogicalSort) plan.getChild().get(0); - assertEquals(expect, sort.getSortList().get(0).getLeft(), - "Assertion failed on input option: " + option); - }); + LogicalPlan plan = analyzer.analyze(ast, analysisContext); + LogicalSort sort = (LogicalSort) plan.getChild().get(0); + assertEquals( + expect, + sort.getSortList().get(0).getLeft(), + "Assertion failed on input option: " + option); + }); } - } diff --git a/core/src/test/java/org/opensearch/sql/analysis/model/DataSourceSchemaIdentifierNameResolverTest.java b/core/src/test/java/org/opensearch/sql/analysis/model/DataSourceSchemaIdentifierNameResolverTest.java index c00bd7705d..775984a528 100644 --- a/core/src/test/java/org/opensearch/sql/analysis/model/DataSourceSchemaIdentifierNameResolverTest.java +++ b/core/src/test/java/org/opensearch/sql/analysis/model/DataSourceSchemaIdentifierNameResolverTest.java @@ -7,7 +7,6 @@ package org.opensearch.sql.analysis.model; - import static org.junit.jupiter.api.Assertions.assertEquals; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.when; @@ -27,14 +26,12 @@ @ExtendWith(MockitoExtension.class) public class DataSourceSchemaIdentifierNameResolverTest { - @Mock - private DataSourceService dataSourceService; + @Mock private DataSourceService dataSourceService; @Test void testFullyQualifiedName() { when(dataSourceService.dataSourceExists("prom")).thenReturn(Boolean.TRUE); - identifierOf( - Arrays.asList("prom", "information_schema", "tables"), dataSourceService) + identifierOf(Arrays.asList("prom", "information_schema", "tables"), dataSourceService) .datasource("prom") .schema("information_schema") .name("tables"); @@ -66,8 +63,8 @@ void defaultDataSourceNameResolve() { static class Identifier { private final DataSourceSchemaIdentifierNameResolver resolver; - protected static Identifier identifierOf(List parts, - DataSourceService dataSourceService) { + protected static Identifier identifierOf( + List parts, DataSourceService dataSourceService) { return new Identifier(parts, dataSourceService); } diff --git a/core/src/test/java/org/opensearch/sql/analysis/symbol/SymbolTableTest.java b/core/src/test/java/org/opensearch/sql/analysis/symbol/SymbolTableTest.java index 90f98e8492..176390560e 100644 --- a/core/src/test/java/org/opensearch/sql/analysis/symbol/SymbolTableTest.java +++ b/core/src/test/java/org/opensearch/sql/analysis/symbol/SymbolTableTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.analysis.symbol; import static org.hamcrest.MatcherAssert.assertThat; @@ -24,7 +23,6 @@ import org.junit.jupiter.api.Test; import org.opensearch.sql.data.type.ExprType; - public class SymbolTableTest { private SymbolTable symbolTable; @@ -60,13 +58,7 @@ public void defineFieldSymbolShouldBeAbleToResolveByPrefix() { Map typeByName = symbolTable.lookupByPrefix(new Symbol(Namespace.FIELD_NAME, "s.projects")); - assertThat( - typeByName, - allOf( - aMapWithSize(1), - hasEntry("s.projects.active", BOOLEAN) - ) - ); + assertThat(typeByName, allOf(aMapWithSize(1), hasEntry("s.projects.active", BOOLEAN))); } @Test @@ -76,17 +68,11 @@ public void lookupAllFieldsReturnUnnestedFields() { symbolTable.store(new Symbol(Namespace.FIELD_NAME, "active.manager.name"), STRING); symbolTable.store(new Symbol(Namespace.FIELD_NAME, "s.address"), BOOLEAN); - Map typeByName = - symbolTable.lookupAllFields(Namespace.FIELD_NAME); + Map typeByName = symbolTable.lookupAllFields(Namespace.FIELD_NAME); assertThat( typeByName, - allOf( - aMapWithSize(2), - hasEntry("active", BOOLEAN), - hasEntry("s.address", BOOLEAN) - ) - ); + allOf(aMapWithSize(2), hasEntry("active", BOOLEAN), hasEntry("s.address", BOOLEAN))); } @Test @@ -94,8 +80,8 @@ public void failedToResolveSymbolNoNamespaceMatched() { symbolTable.store(new Symbol(Namespace.FUNCTION_NAME, "customFunction"), BOOLEAN); assertFalse(symbolTable.lookup(new Symbol(Namespace.FIELD_NAME, "s.projects")).isPresent()); - assertThat(symbolTable.lookupByPrefix(new Symbol(Namespace.FIELD_NAME, "s.projects")), - anEmptyMap()); + assertThat( + symbolTable.lookupByPrefix(new Symbol(Namespace.FIELD_NAME, "s.projects")), anEmptyMap()); } @Test @@ -111,5 +97,4 @@ private void defineSymbolShouldBeAbleToResolve(Symbol symbol, ExprType expectedT assertTrue(actualType.isPresent()); assertEquals(expectedType, actualType.get()); } - } diff --git a/core/src/test/java/org/opensearch/sql/config/TestConfig.java b/core/src/test/java/org/opensearch/sql/config/TestConfig.java index 6179f020c2..92b6aac64f 100644 --- a/core/src/test/java/org/opensearch/sql/config/TestConfig.java +++ b/core/src/test/java/org/opensearch/sql/config/TestConfig.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.config; import com.google.common.collect.ImmutableMap; @@ -23,9 +22,7 @@ import org.opensearch.sql.storage.StorageEngine; import org.opensearch.sql.storage.Table; -/** - * Configuration will be used for UT. - */ +/** Configuration will be used for UT. */ public class TestConfig { public static final String INT_TYPE_NULL_VALUE_FIELD = "int_null_value"; public static final String INT_TYPE_MISSING_VALUE_FIELD = "int_missing_value"; @@ -36,32 +33,33 @@ public class TestConfig { public static final String STRING_TYPE_NULL_VALUE_FIELD = "string_null_value"; public static final String STRING_TYPE_MISSING_VALUE_FIELD = "string_missing_value"; - public static Map typeMapping = new ImmutableMap.Builder() - .put("integer_value", ExprCoreType.INTEGER) - .put(INT_TYPE_NULL_VALUE_FIELD, ExprCoreType.INTEGER) - .put(INT_TYPE_MISSING_VALUE_FIELD, ExprCoreType.INTEGER) - .put("long_value", ExprCoreType.LONG) - .put("float_value", ExprCoreType.FLOAT) - .put("double_value", ExprCoreType.DOUBLE) - .put(DOUBLE_TYPE_NULL_VALUE_FIELD, ExprCoreType.DOUBLE) - .put(DOUBLE_TYPE_MISSING_VALUE_FIELD, ExprCoreType.DOUBLE) - .put("boolean_value", ExprCoreType.BOOLEAN) - .put(BOOL_TYPE_NULL_VALUE_FIELD, ExprCoreType.BOOLEAN) - .put(BOOL_TYPE_MISSING_VALUE_FIELD, ExprCoreType.BOOLEAN) - .put("string_value", ExprCoreType.STRING) - .put(STRING_TYPE_NULL_VALUE_FIELD, ExprCoreType.STRING) - .put(STRING_TYPE_MISSING_VALUE_FIELD, ExprCoreType.STRING) - .put("struct_value", ExprCoreType.STRUCT) - .put("array_value", ExprCoreType.ARRAY) - .put("timestamp_value", ExprCoreType.TIMESTAMP) - .put("field_value1", ExprCoreType.STRING) - .put("field_value2", ExprCoreType.STRING) - .put("message", ExprCoreType.STRING) - .put("message.info", ExprCoreType.STRING) - .put("message.info.id", ExprCoreType.STRING) - .put("comment", ExprCoreType.STRING) - .put("comment.data", ExprCoreType.STRING) - .build(); + public static Map typeMapping = + new ImmutableMap.Builder() + .put("integer_value", ExprCoreType.INTEGER) + .put(INT_TYPE_NULL_VALUE_FIELD, ExprCoreType.INTEGER) + .put(INT_TYPE_MISSING_VALUE_FIELD, ExprCoreType.INTEGER) + .put("long_value", ExprCoreType.LONG) + .put("float_value", ExprCoreType.FLOAT) + .put("double_value", ExprCoreType.DOUBLE) + .put(DOUBLE_TYPE_NULL_VALUE_FIELD, ExprCoreType.DOUBLE) + .put(DOUBLE_TYPE_MISSING_VALUE_FIELD, ExprCoreType.DOUBLE) + .put("boolean_value", ExprCoreType.BOOLEAN) + .put(BOOL_TYPE_NULL_VALUE_FIELD, ExprCoreType.BOOLEAN) + .put(BOOL_TYPE_MISSING_VALUE_FIELD, ExprCoreType.BOOLEAN) + .put("string_value", ExprCoreType.STRING) + .put(STRING_TYPE_NULL_VALUE_FIELD, ExprCoreType.STRING) + .put(STRING_TYPE_MISSING_VALUE_FIELD, ExprCoreType.STRING) + .put("struct_value", ExprCoreType.STRUCT) + .put("array_value", ExprCoreType.ARRAY) + .put("timestamp_value", ExprCoreType.TIMESTAMP) + .put("field_value1", ExprCoreType.STRING) + .put("field_value2", ExprCoreType.STRING) + .put("message", ExprCoreType.STRING) + .put("message.info", ExprCoreType.STRING) + .put("message.info.id", ExprCoreType.STRING) + .put("comment", ExprCoreType.STRING) + .put("comment.data", ExprCoreType.STRING) + .build(); protected StorageEngine storageEngine() { return new StorageEngine() { @@ -94,10 +92,12 @@ public PhysicalPlan implement(LogicalPlan plan) { protected SymbolTable symbolTable() { SymbolTable symbolTable = new SymbolTable(); - typeMapping.entrySet() + typeMapping + .entrySet() .forEach( - entry -> symbolTable - .store(new Symbol(Namespace.FIELD_NAME, entry.getKey()), entry.getValue())); + entry -> + symbolTable.store( + new Symbol(Namespace.FIELD_NAME, entry.getKey()), entry.getValue())); return symbolTable; } From 7d23e0f4b28aa59578bf362e4cebd2eaea693f37 Mon Sep 17 00:00:00 2001 From: Andrew Carbonetto Date: Wed, 9 Aug 2023 15:09:49 -0700 Subject: [PATCH 12/42] (#1536) Refactor OpenSearchQueryRequest and move includes to builder (#1937) * #1536: Refactor OpenSearchQueryRequest and move includes to builder (#320) * #1536: Refactor OpenSearchQueryRequest and move incldues to builder Signed-off-by: acarbonetto * #1536: Checkstyle fixes Signed-off-by: acarbonetto * #1536: Checkstyle fixes Signed-off-by: acarbonetto --------- Signed-off-by: acarbonetto * #1536: Spotless Apply Signed-off-by: acarbonetto --------- Signed-off-by: acarbonetto --- .../request/OpenSearchQueryRequest.java | 24 +++--- .../opensearch/request/OpenSearchRequest.java | 1 + .../request/OpenSearchRequestBuilder.java | 11 ++- .../request/OpenSearchScrollRequest.java | 10 +-- .../client/OpenSearchNodeClientTest.java | 8 +- .../client/OpenSearchRestClientTest.java | 12 +-- .../request/OpenSearchQueryRequestTest.java | 22 +++--- .../request/OpenSearchRequestBuilderTest.java | 76 ++++++++++++++++++- .../request/OpenSearchScrollRequestTest.java | 49 +++--------- .../storage/scan/OpenSearchIndexScanTest.java | 8 +- 10 files changed, 137 insertions(+), 84 deletions(-) diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchQueryRequest.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchQueryRequest.java index 6da34dd678..919596eee2 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchQueryRequest.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchQueryRequest.java @@ -7,7 +7,6 @@ package org.opensearch.sql.opensearch.request; import java.io.IOException; -import java.util.Arrays; import java.util.List; import java.util.function.Consumer; import java.util.function.Function; @@ -20,7 +19,6 @@ import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.search.SearchHits; import org.opensearch.search.builder.SearchSourceBuilder; -import org.opensearch.search.fetch.subphase.FetchSourceContext; import org.opensearch.sql.opensearch.data.value.OpenSearchExprValueFactory; import org.opensearch.sql.opensearch.response.OpenSearchResponse; @@ -52,6 +50,14 @@ public class OpenSearchQueryRequest implements OpenSearchRequest { @ToString.Exclude private final OpenSearchExprValueFactory exprValueFactory; + + /** + * List of includes expected in the response. + */ + @EqualsAndHashCode.Exclude + @ToString.Exclude + private final List includes; + /** * Indicate the search already done. */ @@ -61,40 +67,38 @@ public class OpenSearchQueryRequest implements OpenSearchRequest { * Constructor of OpenSearchQueryRequest. */ public OpenSearchQueryRequest(String indexName, int size, - OpenSearchExprValueFactory factory) { - this(new IndexName(indexName), size, factory); + OpenSearchExprValueFactory factory, List includes) { + this(new IndexName(indexName), size, factory, includes); } /** * Constructor of OpenSearchQueryRequest. */ public OpenSearchQueryRequest(IndexName indexName, int size, - OpenSearchExprValueFactory factory) { + OpenSearchExprValueFactory factory, List includes) { this.indexName = indexName; this.sourceBuilder = new SearchSourceBuilder(); sourceBuilder.from(0); sourceBuilder.size(size); sourceBuilder.timeout(DEFAULT_QUERY_TIMEOUT); this.exprValueFactory = factory; + this.includes = includes; } /** * Constructor of OpenSearchQueryRequest. */ public OpenSearchQueryRequest(IndexName indexName, SearchSourceBuilder sourceBuilder, - OpenSearchExprValueFactory factory) { + OpenSearchExprValueFactory factory, List includes) { this.indexName = indexName; this.sourceBuilder = sourceBuilder; this.exprValueFactory = factory; + this.includes = includes; } @Override public OpenSearchResponse search(Function searchAction, Function scrollAction) { - FetchSourceContext fetchSource = this.sourceBuilder.fetchSource(); - List includes = fetchSource != null && fetchSource.includes() != null - ? Arrays.asList(fetchSource.includes()) - : List.of(); if (searchDone) { return new OpenSearchResponse(SearchHits.empty(), exprValueFactory, includes); } else { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchRequest.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchRequest.java index f070ac11db..5c9d0033c1 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchRequest.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchRequest.java @@ -24,6 +24,7 @@ * OpenSearch search request. */ public interface OpenSearchRequest extends Writeable { + /** * Default query timeout in minutes. */ diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchRequestBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchRequestBuilder.java index bec133f834..80259f15d3 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchRequestBuilder.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchRequestBuilder.java @@ -14,6 +14,7 @@ import static org.opensearch.search.sort.SortOrder.ASC; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.Map; @@ -98,15 +99,19 @@ public OpenSearchRequestBuilder(int requestedTotalSize, public OpenSearchRequest build(OpenSearchRequest.IndexName indexName, int maxResultWindow, TimeValue scrollTimeout) { int size = requestedTotalSize; + FetchSourceContext fetchSource = this.sourceBuilder.fetchSource(); + List includes = fetchSource != null + ? Arrays.asList(fetchSource.includes()) + : List.of(); if (pageSize == null) { if (startFrom + size > maxResultWindow) { sourceBuilder.size(maxResultWindow - startFrom); return new OpenSearchScrollRequest( - indexName, scrollTimeout, sourceBuilder, exprValueFactory); + indexName, scrollTimeout, sourceBuilder, exprValueFactory, includes); } else { sourceBuilder.from(startFrom); sourceBuilder.size(requestedTotalSize); - return new OpenSearchQueryRequest(indexName, sourceBuilder, exprValueFactory); + return new OpenSearchQueryRequest(indexName, sourceBuilder, exprValueFactory, includes); } } else { if (startFrom != 0) { @@ -114,7 +119,7 @@ public OpenSearchRequest build(OpenSearchRequest.IndexName indexName, } sourceBuilder.size(pageSize); return new OpenSearchScrollRequest(indexName, scrollTimeout, - sourceBuilder, exprValueFactory); + sourceBuilder, exprValueFactory, includes); } } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchScrollRequest.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchScrollRequest.java index 9ffcc42ff7..34e8fcd096 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchScrollRequest.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchScrollRequest.java @@ -7,7 +7,6 @@ package org.opensearch.sql.opensearch.request; import java.io.IOException; -import java.util.Arrays; import java.util.List; import java.util.Objects; import java.util.function.Consumer; @@ -71,13 +70,16 @@ public class OpenSearchScrollRequest implements OpenSearchRequest { private boolean needClean = true; @Getter + @EqualsAndHashCode.Exclude + @ToString.Exclude private final List includes; /** Constructor. */ public OpenSearchScrollRequest(IndexName indexName, TimeValue scrollTimeout, SearchSourceBuilder sourceBuilder, - OpenSearchExprValueFactory exprValueFactory) { + OpenSearchExprValueFactory exprValueFactory, + List includes) { this.indexName = indexName; this.scrollTimeout = scrollTimeout; this.exprValueFactory = exprValueFactory; @@ -86,9 +88,7 @@ public OpenSearchScrollRequest(IndexName indexName, .scroll(scrollTimeout) .source(sourceBuilder); - includes = sourceBuilder.fetchSource() == null - ? List.of() - : Arrays.asList(sourceBuilder.fetchSource().includes()); + this.includes = includes; } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchNodeClientTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchNodeClientTest.java index 67c635dc42..d985bcbeec 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchNodeClientTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchNodeClientTest.java @@ -322,7 +322,7 @@ void search() { // Verify response for first scroll request OpenSearchScrollRequest request = new OpenSearchScrollRequest( new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), - new SearchSourceBuilder(), factory); + new SearchSourceBuilder(), factory, List.of("id")); OpenSearchResponse response1 = client.search(request); assertFalse(response1.isEmpty()); @@ -357,7 +357,7 @@ void cleanup() { OpenSearchScrollRequest request = new OpenSearchScrollRequest( new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), - new SearchSourceBuilder(), factory); + new SearchSourceBuilder(), factory, List.of()); request.setScrollId("scroll123"); // Enforce cleaning by setting a private field. FieldUtils.writeField(request, "needClean", true, true); @@ -374,7 +374,7 @@ void cleanup() { void cleanup_without_scrollId() { OpenSearchScrollRequest request = new OpenSearchScrollRequest( new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), - new SearchSourceBuilder(), factory); + new SearchSourceBuilder(), factory, List.of()); client.cleanup(request); verify(nodeClient, never()).prepareClearScroll(); } @@ -386,7 +386,7 @@ void cleanup_rethrows_exception() { OpenSearchScrollRequest request = new OpenSearchScrollRequest( new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), - new SearchSourceBuilder(), factory); + new SearchSourceBuilder(), factory, List.of()); request.setScrollId("scroll123"); // Enforce cleaning by setting a private field. FieldUtils.writeField(request, "needClean", true, true); diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchRestClientTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchRestClientTest.java index 7f968733c1..409596910e 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchRestClientTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchRestClientTest.java @@ -307,7 +307,7 @@ void search() throws IOException { // Verify response for first scroll request OpenSearchScrollRequest request = new OpenSearchScrollRequest( new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), - new SearchSourceBuilder(), factory); + new SearchSourceBuilder(), factory, List.of("id")); OpenSearchResponse response1 = client.search(request); assertFalse(response1.isEmpty()); @@ -329,7 +329,7 @@ void search_with_IOException() throws IOException { IllegalStateException.class, () -> client.search(new OpenSearchScrollRequest( new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), - new SearchSourceBuilder(), factory))); + new SearchSourceBuilder(), factory, List.of()))); } @Test @@ -351,7 +351,7 @@ void scroll_with_IOException() throws IOException { // First request run successfully OpenSearchScrollRequest scrollRequest = new OpenSearchScrollRequest( new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), - new SearchSourceBuilder(), factory); + new SearchSourceBuilder(), factory, List.of()); client.search(scrollRequest); assertThrows( IllegalStateException.class, () -> client.search(scrollRequest)); @@ -370,7 +370,7 @@ void schedule() { void cleanup() { OpenSearchScrollRequest request = new OpenSearchScrollRequest( new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), - new SearchSourceBuilder(), factory); + new SearchSourceBuilder(), factory, List.of()); // Enforce cleaning by setting a private field. FieldUtils.writeField(request, "needClean", true, true); request.setScrollId("scroll123"); @@ -383,7 +383,7 @@ void cleanup() { void cleanup_without_scrollId() throws IOException { OpenSearchScrollRequest request = new OpenSearchScrollRequest( new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), - new SearchSourceBuilder(), factory); + new SearchSourceBuilder(), factory, List.of()); client.cleanup(request); verify(restClient, never()).clearScroll(any(), any()); } @@ -395,7 +395,7 @@ void cleanup_with_IOException() { OpenSearchScrollRequest request = new OpenSearchScrollRequest( new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), - new SearchSourceBuilder(), factory); + new SearchSourceBuilder(), factory, List.of()); // Enforce cleaning by setting a private field. FieldUtils.writeField(request, "needClean", true, true); request.setScrollId("scroll123"); diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchQueryRequestTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchQueryRequestTest.java index cf548d44f9..b6966f2403 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchQueryRequestTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchQueryRequestTest.java @@ -18,6 +18,7 @@ import static org.mockito.Mockito.when; import static org.opensearch.sql.opensearch.request.OpenSearchRequest.DEFAULT_QUERY_TIMEOUT; +import java.util.List; import java.util.function.Consumer; import java.util.function.Function; import org.apache.lucene.search.TotalHits; @@ -68,27 +69,25 @@ public class OpenSearchQueryRequestTest { private OpenSearchExprValueFactory factory; private final OpenSearchQueryRequest request = - new OpenSearchQueryRequest("test", 200, factory); + new OpenSearchQueryRequest("test", 200, factory, List.of()); private final OpenSearchQueryRequest remoteRequest = - new OpenSearchQueryRequest("ccs:test", 200, factory); + new OpenSearchQueryRequest("ccs:test", 200, factory, List.of()); @Test void search() { OpenSearchQueryRequest request = new OpenSearchQueryRequest( new OpenSearchRequest.IndexName("test"), sourceBuilder, - factory + factory, + List.of() ); - when(sourceBuilder.fetchSource()).thenReturn(fetchSourceContext); - when(fetchSourceContext.includes()).thenReturn(null); when(searchAction.apply(any())).thenReturn(searchResponse); when(searchResponse.getHits()).thenReturn(searchHits); when(searchHits.getHits()).thenReturn(new SearchHit[] {searchHit}); OpenSearchResponse searchResponse = request.search(searchAction, scrollAction); - verify(fetchSourceContext, times(1)).includes(); assertFalse(searchResponse.isEmpty()); searchResponse = request.search(searchAction, scrollAction); assertTrue(searchResponse.isEmpty()); @@ -100,15 +99,14 @@ void search_withoutContext() { OpenSearchQueryRequest request = new OpenSearchQueryRequest( new OpenSearchRequest.IndexName("test"), sourceBuilder, - factory + factory, + List.of() ); - when(sourceBuilder.fetchSource()).thenReturn(null); when(searchAction.apply(any())).thenReturn(searchResponse); when(searchResponse.getHits()).thenReturn(searchHits); when(searchHits.getHits()).thenReturn(new SearchHit[] {searchHit}); OpenSearchResponse searchResponse = request.search(searchAction, scrollAction); - verify(sourceBuilder, times(1)).fetchSource(); assertFalse(searchResponse.isEmpty()); assertFalse(request.hasAnotherBatch()); } @@ -118,18 +116,16 @@ void search_withIncludes() { OpenSearchQueryRequest request = new OpenSearchQueryRequest( new OpenSearchRequest.IndexName("test"), sourceBuilder, - factory + factory, + List.of() ); String[] includes = {"_id", "_index"}; - when(sourceBuilder.fetchSource()).thenReturn(fetchSourceContext); - when(fetchSourceContext.includes()).thenReturn(includes); when(searchAction.apply(any())).thenReturn(searchResponse); when(searchResponse.getHits()).thenReturn(searchHits); when(searchHits.getHits()).thenReturn(new SearchHit[] {searchHit}); OpenSearchResponse searchResponse = request.search(searchAction, scrollAction); - verify(fetchSourceContext, times(2)).includes(); assertFalse(searchResponse.isEmpty()); searchResponse = request.search(searchAction, scrollAction); diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchRequestBuilderTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchRequestBuilderTest.java index e8d15bd0bb..483ea1290e 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchRequestBuilderTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchRequestBuilderTest.java @@ -99,7 +99,7 @@ void build_query_request() { .size(limit) .timeout(DEFAULT_QUERY_TIMEOUT) .trackScores(true), - exprValueFactory), + exprValueFactory, List.of()), requestBuilder.build(indexName, MAX_RESULT_WINDOW, DEFAULT_QUERY_TIMEOUT)); } @@ -116,7 +116,7 @@ void build_scroll_request_with_correct_size() { .from(offset) .size(MAX_RESULT_WINDOW - offset) .timeout(DEFAULT_QUERY_TIMEOUT), - exprValueFactory), + exprValueFactory, List.of()), requestBuilder.build(indexName, MAX_RESULT_WINDOW, DEFAULT_QUERY_TIMEOUT)); } @@ -257,6 +257,78 @@ void test_push_down_project() { .timeout(DEFAULT_QUERY_TIMEOUT) .fetchSource(new String[]{"intA"}, new String[0]), requestBuilder); + + assertEquals( + new OpenSearchQueryRequest( + new OpenSearchRequest.IndexName("test"), + new SearchSourceBuilder() + .from(DEFAULT_OFFSET) + .size(DEFAULT_LIMIT) + .timeout(DEFAULT_QUERY_TIMEOUT) + .fetchSource("intA", null), + exprValueFactory, + List.of("intA")), + requestBuilder.build(indexName, MAX_RESULT_WINDOW, DEFAULT_QUERY_TIMEOUT)); + } + + @Test + void test_push_down_project_limit() { + Set references = Set.of(DSL.ref("intA", INTEGER)); + requestBuilder.pushDownProjects(references); + + Integer limit = 200; + Integer offset = 0; + requestBuilder.pushDownLimit(limit, offset); + + assertSearchSourceBuilder( + new SearchSourceBuilder() + .from(offset) + .size(limit) + .timeout(DEFAULT_QUERY_TIMEOUT) + .fetchSource(new String[]{"intA"}, new String[0]), + requestBuilder); + + assertEquals( + new OpenSearchQueryRequest( + new OpenSearchRequest.IndexName("test"), + new SearchSourceBuilder() + .from(offset) + .size(limit) + .timeout(DEFAULT_QUERY_TIMEOUT) + .fetchSource("intA", null), + exprValueFactory, + List.of("intA")), + requestBuilder.build(indexName, MAX_RESULT_WINDOW, DEFAULT_QUERY_TIMEOUT)); + } + + @Test + void test_push_down_project_limit_and_offset() { + Set references = Set.of(DSL.ref("intA", INTEGER)); + requestBuilder.pushDownProjects(references); + + Integer limit = 200; + Integer offset = 10; + requestBuilder.pushDownLimit(limit, offset); + + assertSearchSourceBuilder( + new SearchSourceBuilder() + .from(offset) + .size(limit) + .timeout(DEFAULT_QUERY_TIMEOUT) + .fetchSource(new String[]{"intA"}, new String[0]), + requestBuilder); + + assertEquals( + new OpenSearchQueryRequest( + new OpenSearchRequest.IndexName("test"), + new SearchSourceBuilder() + .from(offset) + .size(limit) + .timeout(DEFAULT_QUERY_TIMEOUT) + .fetchSource("intA", null), + exprValueFactory, + List.of("intA")), + requestBuilder.build(indexName, MAX_RESULT_WINDOW, DEFAULT_QUERY_TIMEOUT)); } @Test diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchScrollRequestTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchScrollRequestTest.java index 63c6a5ca7d..4b9233dbc1 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchScrollRequestTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchScrollRequestTest.java @@ -9,7 +9,6 @@ import static org.junit.jupiter.api.Assertions.assertAll; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertNotEquals; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; @@ -17,8 +16,6 @@ import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.lenient; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import static org.opensearch.sql.opensearch.request.OpenSearchScrollRequest.NO_SCROLL_ID; @@ -73,22 +70,13 @@ class OpenSearchScrollRequestTest { private final SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); private final OpenSearchScrollRequest request = new OpenSearchScrollRequest( INDEX_NAME, SCROLL_TIMEOUT, - searchSourceBuilder, factory); + searchSourceBuilder, factory, List.of()); @Test void constructor() { - searchSourceBuilder.fetchSource(new String[] {"test"}, null); var request = new OpenSearchScrollRequest(INDEX_NAME, SCROLL_TIMEOUT, - searchSourceBuilder, factory); - assertNotEquals(List.of(), request.getIncludes()); - } - - @Test - void constructor2() { - searchSourceBuilder.fetchSource(new String[]{"test"}, null); - var request = new OpenSearchScrollRequest(INDEX_NAME, SCROLL_TIMEOUT, searchSourceBuilder, - factory); - assertNotEquals(List.of(), request.getIncludes()); + searchSourceBuilder, factory, List.of("test")); + assertEquals(List.of("test"), request.getIncludes()); } @Test @@ -134,7 +122,8 @@ void search() { new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), sourceBuilder, - factory + factory, + List.of() ); when(searchResponse.getHits()).thenReturn(searchHits); @@ -150,14 +139,14 @@ void search_without_context() { new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), sourceBuilder, - factory + factory, + List.of() ); when(searchResponse.getHits()).thenReturn(searchHits); when(searchHits.getHits()).thenReturn(new SearchHit[] {searchHit}); OpenSearchResponse response = request.search((sr) -> searchResponse, (sr) -> fail()); - verify(sourceBuilder, times(1)).fetchSource(); assertFalse(response.isEmpty()); } @@ -169,7 +158,8 @@ void search_without_scroll_and_initial_request_should_throw() { new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), sourceBuilder, - factory + factory, + List.of() ); var outStream = new BytesStreamOutput(); request.writeTo(outStream); @@ -193,7 +183,8 @@ void search_withoutIncludes() { new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), sourceBuilder, - factory + factory, + List.of() ); when(searchResponse.getHits()).thenReturn(searchHits); @@ -320,22 +311,4 @@ void setScrollId() { request.setScrollId("test"); assertEquals("test", request.getScrollId()); } - - @Test - void includes() { - - assertIncludes(List.of(), searchSourceBuilder); - - searchSourceBuilder.fetchSource((String[])null, (String[])null); - assertIncludes(List.of(), searchSourceBuilder); - - searchSourceBuilder.fetchSource(new String[] {"test"}, null); - assertIncludes(List.of("test"), searchSourceBuilder); - - } - - void assertIncludes(List expected, SearchSourceBuilder sourceBuilder) { - assertEquals(expected, new OpenSearchScrollRequest( - INDEX_NAME, SCROLL_TIMEOUT, sourceBuilder, factory).getIncludes()); - } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanTest.java index a34e93dd70..67749c4055 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanTest.java @@ -23,6 +23,7 @@ import java.io.ObjectOutputStream; import java.util.Arrays; import java.util.HashMap; +import java.util.List; import java.util.Map; import lombok.SneakyThrows; import org.junit.jupiter.api.BeforeEach; @@ -112,7 +113,7 @@ void serialize() { when(engine.getClient()).thenReturn(client); when(engine.getTable(any(), any())).thenReturn(index); var request = new OpenSearchScrollRequest( - INDEX_NAME, CURSOR_KEEP_ALIVE, searchSourceBuilder, factory); + INDEX_NAME, CURSOR_KEEP_ALIVE, searchSourceBuilder, factory, List.of()); request.setScrollId("valid-id"); // make a response, so OpenSearchResponse::isEmpty would return true and unset needClean var response = mock(SearchResponse.class); @@ -380,7 +381,7 @@ PushDownAssertion shouldQueryHighlight(QueryBuilder query, HighlightBuilder high .highlighter(highlight) .sort(DOC_FIELD_NAME, ASC); OpenSearchRequest request = - new OpenSearchQueryRequest(EMPLOYEES_INDEX, sourceBuilder, factory); + new OpenSearchQueryRequest(EMPLOYEES_INDEX, sourceBuilder, factory, List.of()); when(client.search(request)).thenReturn(response); var indexScan = new OpenSearchIndexScan(client, @@ -396,7 +397,8 @@ PushDownAssertion shouldQuery(QueryBuilder expected) { .size(QUERY_SIZE) .timeout(CURSOR_KEEP_ALIVE) .sort(DOC_FIELD_NAME, ASC); - OpenSearchRequest request = new OpenSearchQueryRequest(EMPLOYEES_INDEX, builder, factory); + OpenSearchRequest request = + new OpenSearchQueryRequest(EMPLOYEES_INDEX, builder, factory, List.of()); when(client.search(request)).thenReturn(response); var indexScan = new OpenSearchIndexScan(client, 10000, requestBuilder.build(EMPLOYEES_INDEX, 10000, CURSOR_KEEP_ALIVE)); From 91bbb5dd85950a4fda37878b7b612adba7a647fb Mon Sep 17 00:00:00 2001 From: Mitchell Gale Date: Thu, 10 Aug 2023 10:56:38 -0700 Subject: [PATCH 13/42] [Spotless] Applying Google Code Format for core #5 (#1951) * Integ/sl google java format5 (#332) Adding core final GJF fix. Signed-off-by: Mitchell Gale * remove repeated unused checkstyle. Signed-off-by: Mitchell Gale * spotless apply Signed-off-by: Mitchell Gale * replaced nbsp with pre tag. Signed-off-by: Mitchell Gale * Add test fixtures to failure ignore in core build.gradle. Signed-off-by: Mitchell Gale --------- Signed-off-by: Mitchell Gale --- build.gradle | 16 +- core/build.gradle | 1 + .../org/opensearch/sql/ast/dsl/AstDSL.java | 72 +- .../sql/ast/expression/QualifiedName.java | 18 +- .../sql/data/model/AbstractExprValue.java | 3 +- .../sql/data/model/ExprTimeValue.java | 3 +- .../sql/data/type/WideningTypeRule.java | 13 +- .../opensearch/sql/executor/ExplainTest.java | 155 ++- .../sql/executor/QueryServiceTest.java | 134 +- .../executor/execution/CommandPlanTest.java | 12 +- .../executor/execution/ExplainPlanTest.java | 9 +- .../IntervalTriggerExecutionTest.java | 8 +- .../execution/QueryPlanFactoryTest.java | 40 +- .../sql/executor/execution/QueryPlanTest.java | 98 +- .../execution/StreamingQueryPlanTest.java | 8 +- .../pagination/CanPaginateVisitorTest.java | 299 +++-- .../pagination/PlanSerializerTest.java | 33 +- .../streaming/DefaultMetadataLogTest.java | 1 - .../MicroBatchStreamingExecutionTest.java | 18 +- .../expression/ExpressionNodeVisitorTest.java | 78 +- .../sql/expression/ExpressionTestBase.java | 1 - .../expression/HighlightExpressionTest.java | 46 +- .../sql/expression/NamedExpressionTest.java | 6 +- .../expression/ReferenceExpressionTest.java | 58 +- .../aggregation/AggregationTest.java | 61 +- .../aggregation/AvgAggregatorTest.java | 36 +- .../aggregation/CountAggregatorTest.java | 55 +- .../aggregation/MaxAggregatorTest.java | 33 +- .../aggregation/MinAggregatorTest.java | 33 +- .../aggregation/StdDevAggregatorTest.java | 9 +- .../aggregation/SumAggregatorTest.java | 46 +- .../aggregation/TakeAggregatorTest.java | 28 +- .../conditional/ConditionalFunctionTest.java | 47 +- .../conditional/cases/CaseClauseTest.java | 12 +- .../conditional/cases/WhenClauseTest.java | 2 - .../datetime/AddTimeAndSubTimeTest.java | 93 +- .../expression/datetime/ConvertTZTest.java | 133 +- .../datetime/DateAddAndAddDateTest.java | 24 +- .../sql/expression/datetime/DateDiffTest.java | 47 +- .../datetime/DateSubAndSubDateTest.java | 24 +- .../datetime/DateTimeFunctionTest.java | 1102 +++++++---------- .../sql/expression/datetime/DateTimeTest.java | 30 +- .../expression/datetime/DateTimeTestBase.java | 119 +- .../sql/expression/datetime/ExtractTest.java | 62 +- .../expression/datetime/FromUnixTimeTest.java | 80 +- .../datetime/IntervalClauseTest.java | 14 +- .../sql/expression/datetime/MakeDateTest.java | 44 +- .../sql/expression/datetime/MakeTimeTest.java | 40 +- .../datetime/NowLikeFunctionTest.java | 114 +- .../datetime/PeriodFunctionsTest.java | 38 +- .../expression/datetime/StrToDateTest.java | 141 +-- .../sql/expression/datetime/TimeDiffTest.java | 10 +- .../expression/datetime/TimeStampAddTest.java | 241 ++-- .../datetime/TimeStampDiffTest.java | 242 ++-- .../expression/datetime/TimestampTest.java | 139 ++- .../expression/datetime/ToSecondsTest.java | 31 +- .../datetime/UnixTimeStampTest.java | 57 +- .../datetime/UnixTwoWayConversionTest.java | 21 +- .../sql/expression/datetime/WeekdayTest.java | 111 +- .../sql/expression/datetime/YearweekTest.java | 95 +- .../function/BuiltinFunctionNameTest.java | 4 +- .../BuiltinFunctionRepositoryTest.java | 108 +- .../function/DefaultFunctionResolverTest.java | 92 +- .../function/FunctionDSLDefineTest.java | 7 +- .../function/FunctionDSLTestBase.java | 54 +- .../function/FunctionDSLimplTestBase.java | 21 +- .../function/FunctionDSLimplTwoArgTest.java | 1 - ...nctionDSLimplWithPropertiesNoArgsTest.java | 2 +- ...nctionDSLimplWithPropertiesOneArgTest.java | 5 +- ...nctionDSLimplWithPropertiesTwoArgTest.java | 4 +- ...ctionDSLimplWithPropertiesTwoArgsTest.java | 5 +- .../FunctionDSLnullMissingHandlingTest.java | 50 +- .../function/FunctionPropertiesTest.java | 21 +- .../function/FunctionSignatureTest.java | 7 +- .../function/OpenSearchFunctionsTest.java | 266 ++-- .../RelevanceFunctionResolverTest.java | 13 +- .../function/WideningTypeRuleTest.java | 67 +- .../arthmetic/ArithmeticFunctionTest.java | 157 +-- .../arthmetic/MathematicalFunctionTest.java | 1058 +++++----------- .../convert/TypeCastOperatorTest.java | 10 +- .../BinaryPredicateOperatorTest.java | 246 ++-- .../predicate/UnaryPredicateOperatorTest.java | 113 +- .../expression/parse/GrokExpressionTest.java | 119 +- .../parse/PatternsExpressionTest.java | 61 +- .../expression/parse/RegexExpressionTest.java | 90 +- .../system/SystemFunctionsTest.java | 39 +- .../sql/expression/text/TextFunctionTest.java | 184 ++- .../window/CurrentRowWindowFrameTest.java | 69 +- .../AggregateWindowFunctionTest.java | 19 +- .../window/frame/PeerRowsWindowFrameTest.java | 178 +-- .../ranking/RankingWindowFunctionTest.java | 119 +- .../sql/monitor/AlwaysHealthyMonitorTest.java | 1 - .../sql/planner/DefaultImplementorTest.java | 194 ++- .../sql/planner/PlanContextTest.java | 3 +- .../opensearch/sql/planner/PlannerTest.java | 52 +- .../planner/logical/LogicalDedupeTest.java | 17 +- .../sql/planner/logical/LogicalEvalTest.java | 9 +- .../logical/LogicalPlanNodeVisitorTest.java | 119 +- .../planner/logical/LogicalRelationTest.java | 5 +- .../sql/planner/logical/LogicalSortTest.java | 1 - .../optimizer/LogicalPlanOptimizerTest.java | 251 ++-- .../optimizer/pattern/PatternsTest.java | 8 +- .../physical/AggregationOperatorTest.java | 1001 ++++++++------- .../planner/physical/DedupeOperatorTest.java | 13 +- .../planner/physical/EvalOperatorTest.java | 16 +- .../planner/physical/FilterOperatorTest.java | 36 +- .../planner/physical/LimitOperatorTest.java | 37 +- .../planner/physical/NestedOperatorTest.java | 410 +++--- .../physical/PhysicalPlanNodeVisitorTest.java | 64 +- .../planner/physical/PhysicalPlanTest.java | 53 +- .../physical/PhysicalPlanTestBase.java | 375 +++--- .../planner/physical/ProjectOperatorTest.java | 133 +- .../physical/RareTopNOperatorTest.java | 100 +- .../planner/physical/RemoveOperatorTest.java | 4 +- .../planner/physical/RenameOperatorTest.java | 35 +- .../planner/physical/SortOperatorTest.java | 7 +- .../planner/physical/ValuesOperatorTest.java | 12 +- .../planner/physical/WindowOperatorTest.java | 209 +++- .../physical/collector/RoundingTest.java | 7 +- .../datasource/DataSourceTableScanTest.java | 31 +- .../datasource/DataSourceTableTest.java | 16 +- .../assigner/SlidingWindowAssignerTest.java | 30 +- .../assigner/TumblingWindowAssignerTest.java | 16 +- .../sql/storage/TableScanOperatorTest.java | 56 +- .../bindingtuple/BindingTupleTest.java | 18 +- .../storage/write/TableWriteOperatorTest.java | 55 +- .../opensearch/sql/utils/ComparisonUtil.java | 52 +- .../sql/utils/DateTimeUtilsTest.java | 11 +- .../opensearch/sql/utils/MatcherUtils.java | 13 +- .../sql/utils/SystemIndexUtilsTest.java | 5 +- .../opensearch/sql/utils/TestOperator.java | 9 +- .../sql/executor/DefaultExecutionEngine.java | 8 +- .../sql/executor/DefaultQueryManager.java | 4 +- 133 files changed, 5589 insertions(+), 5640 deletions(-) diff --git a/build.gradle b/build.gradle index 71f94636b5..0c58fccfeb 100644 --- a/build.gradle +++ b/build.gradle @@ -84,21 +84,7 @@ repositories { spotless { java { target fileTree('.') { - include 'core/src/main/java/org/opensearch/sql/DataSourceSchemaName.java', - 'core/src/test/java/org/opensearch/sql/data/**/*.java', - 'core/src/test/java/org/opensearch/sql/config/**/*.java', - 'core/src/test/java/org/opensearch/sql/analysis/**/*.java', - 'core/src/main/java/org/opensearch/sql/planner/**/*.java', - 'core/src/main/java/org/opensearch/sql/storage/**/*.java', - 'core/src/main/java/org/opensearch/sql/utils/**/*.java', - 'core/src/main/java/org/opensearch/sql/monitor/**/*.java', - 'core/src/main/java/org/opensearch/sql/expression/**/*.java', - 'core/src/main/java/org/opensearch/sql/executor/**/*.java', - 'core/src/main/java/org/opensearch/sql/exception/**/*.java', - 'core/src/main/java/org/opensearch/sql/analysis/**/*.java', - 'core/src/test/java/org/opensearch/sql/data/**/*.java', - 'core/src/test/java/org/opensearch/sql/datasource/**/*.java', - 'core/src/test/java/org/opensearch/sql/ast/**/*.java' + include 'core/**/*.java' exclude '**/build/**', '**/build-*/**' } importOrder() diff --git a/core/build.gradle b/core/build.gradle index cf7f0b7a1c..0e563b274e 100644 --- a/core/build.gradle +++ b/core/build.gradle @@ -38,6 +38,7 @@ repositories { // spotless https://github.com/opensearch-project/sql/issues/1101 checkstyleTest.ignoreFailures = true checkstyleMain.ignoreFailures = true +checkstyleTestFixtures.ignoreFailures = true pitest { targetClasses = ['org.opensearch.sql.*'] diff --git a/core/src/main/java/org/opensearch/sql/ast/dsl/AstDSL.java b/core/src/main/java/org/opensearch/sql/ast/dsl/AstDSL.java index 4ceb387076..4f3056b0f7 100644 --- a/core/src/main/java/org/opensearch/sql/ast/dsl/AstDSL.java +++ b/core/src/main/java/org/opensearch/sql/ast/dsl/AstDSL.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.dsl; import java.util.Arrays; @@ -63,9 +62,7 @@ import org.opensearch.sql.ast.tree.UnresolvedPlan; import org.opensearch.sql.ast.tree.Values; -/** - * Class of static methods to create specific node instances. - */ +/** Class of static methods to create specific node instances. */ @UtilityClass public class AstDSL { @@ -132,8 +129,9 @@ public static UnresolvedPlan rename(UnresolvedPlan input, Map... maps) { /** * Initialize Values node by rows of literals. - * @param values rows in which each row is a list of literal values - * @return Values node + * + * @param values rows in which each row is a list of literal values + * @return Values node */ @SafeVarargs public UnresolvedPlan values(List... values) { @@ -249,6 +247,8 @@ public static Function function(String funcName, UnresolvedExpression... funcArg } /** + * + * *
    * CASE
    *    WHEN search_condition THEN result_expr
@@ -257,12 +257,13 @@ public static Function function(String funcName, UnresolvedExpression... funcArg * END *
*/ - public UnresolvedExpression caseWhen(UnresolvedExpression elseClause, - When... whenClauses) { + public UnresolvedExpression caseWhen(UnresolvedExpression elseClause, When... whenClauses) { return caseWhen(null, elseClause, whenClauses); } /** + * + * *
    * CASE case_value_expr
    *     WHEN compare_expr THEN result_expr
@@ -271,9 +272,8 @@ public UnresolvedExpression caseWhen(UnresolvedExpression elseClause,
    * END
    * 
*/ - public UnresolvedExpression caseWhen(UnresolvedExpression caseValueExpr, - UnresolvedExpression elseClause, - When... whenClauses) { + public UnresolvedExpression caseWhen( + UnresolvedExpression caseValueExpr, UnresolvedExpression elseClause, When... whenClauses) { return new Case(caseValueExpr, Arrays.asList(whenClauses), elseClause); } @@ -285,19 +285,20 @@ public When when(UnresolvedExpression condition, UnresolvedExpression result) { return new When(condition, result); } - public UnresolvedExpression highlight(UnresolvedExpression fieldName, - java.util.Map arguments) { + public UnresolvedExpression highlight( + UnresolvedExpression fieldName, java.util.Map arguments) { return new HighlightFunction(fieldName, arguments); } - public UnresolvedExpression score(UnresolvedExpression relevanceQuery, - Literal relevanceFieldWeight) { + public UnresolvedExpression score( + UnresolvedExpression relevanceQuery, Literal relevanceFieldWeight) { return new ScoreFunction(relevanceQuery, relevanceFieldWeight); } - public UnresolvedExpression window(UnresolvedExpression function, - List partitionByList, - List> sortList) { + public UnresolvedExpression window( + UnresolvedExpression function, + List partitionByList, + List> sortList) { return new WindowFunction(function, partitionByList, sortList); } @@ -332,9 +333,10 @@ public static UnresolvedExpression compare( return new Compare(operator, left, right); } - public static UnresolvedExpression between(UnresolvedExpression value, - UnresolvedExpression lowerBound, - UnresolvedExpression upperBound) { + public static UnresolvedExpression between( + UnresolvedExpression value, + UnresolvedExpression lowerBound, + UnresolvedExpression upperBound) { return new Between(value, lowerBound, upperBound); } @@ -402,9 +404,7 @@ public static List defaultFieldsArgs() { return exprList(argument("exclude", booleanLiteral(false))); } - /** - * Default Stats Command Args. - */ + /** Default Stats Command Args. */ public static List defaultStatsArgs() { return exprList( argument("partitions", intLiteral(1)), @@ -413,9 +413,7 @@ public static List defaultStatsArgs() { argument("dedupsplit", booleanLiteral(false))); } - /** - * Default Dedup Command Args. - */ + /** Default Dedup Command Args. */ public static List defaultDedupArgs() { return exprList( argument("number", intLiteral(1)), @@ -451,9 +449,12 @@ public static List defaultTopArgs() { return exprList(argument("noOfResults", intLiteral(10))); } - public static RareTopN rareTopN(UnresolvedPlan input, CommandType commandType, - List noOfResults, List groupList, - Field... fields) { + public static RareTopN rareTopN( + UnresolvedPlan input, + CommandType commandType, + List noOfResults, + List groupList, + Field... fields) { return new RareTopN(input, commandType, noOfResults, Arrays.asList(fields), groupList) .attach(input); } @@ -462,11 +463,12 @@ public static Limit limit(UnresolvedPlan input, Integer limit, Integer offset) { return new Limit(limit, offset).attach(input); } - public static Parse parse(UnresolvedPlan input, ParseMethod parseMethod, - UnresolvedExpression sourceField, - Literal pattern, - java.util.Map arguments) { + public static Parse parse( + UnresolvedPlan input, + ParseMethod parseMethod, + UnresolvedExpression sourceField, + Literal pattern, + java.util.Map arguments) { return new Parse(parseMethod, sourceField, pattern, arguments, input); } - } diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/QualifiedName.java b/core/src/main/java/org/opensearch/sql/ast/expression/QualifiedName.java index 73c6e3782a..852b61cfa8 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/QualifiedName.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/QualifiedName.java @@ -75,17 +75,19 @@ public Optional first() { } /** + *
    * Get rest parts of the qualified name. Assume that there must be remaining parts so caller is
-   * responsible for the check (first() or size() must be called first).
- * For example:
- * {@code
- *   QualifiedName name = ...
- *   Optional first = name.first();
- *   if (first.isPresent()) {
- *   name.rest() ...
- *   }
+ * responsible for the check (first() or size() must be called first). + * For example: + * {@code + * QualifiedName name = ... + * Optional first = name.first(); + * if (first.isPresent()) { + * name.rest() ... + * } * } * @return rest part(s) + *
*/ public QualifiedName rest() { return QualifiedName.of(parts.subList(1, parts.size())); diff --git a/core/src/main/java/org/opensearch/sql/data/model/AbstractExprValue.java b/core/src/main/java/org/opensearch/sql/data/model/AbstractExprValue.java index f5ac4d493b..f332867645 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/AbstractExprValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/AbstractExprValue.java @@ -29,8 +29,9 @@ public int compareTo(ExprValue other) { } /** - * The customize equals logic. + * The customize equals logic.
* The table below list the NULL and MISSING handling logic. + * * * * diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprTimeValue.java b/core/src/main/java/org/opensearch/sql/data/model/ExprTimeValue.java index c22b423c7d..d808af49b1 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprTimeValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprTimeValue.java @@ -34,8 +34,7 @@ public ExprTimeValue(String time) { this.time = LocalTime.parse(time, DATE_TIME_FORMATTER_VARIABLE_NANOS_OPTIONAL); } catch (DateTimeParseException e) { throw new SemanticCheckException( - String.format( - "time:%s in unsupported format, please use 'HH:mm:ss[.SSSSSSSSS]'", time)); + String.format("time:%s in unsupported format, please use 'HH:mm:ss[.SSSSSSSSS]'", time)); } } diff --git a/core/src/main/java/org/opensearch/sql/data/type/WideningTypeRule.java b/core/src/main/java/org/opensearch/sql/data/type/WideningTypeRule.java index c9b5c29157..723d09956d 100644 --- a/core/src/main/java/org/opensearch/sql/data/type/WideningTypeRule.java +++ b/core/src/main/java/org/opensearch/sql/data/type/WideningTypeRule.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.type; import static org.opensearch.sql.data.type.ExprCoreType.UNKNOWN; @@ -13,6 +12,7 @@ /** * The definition of widening type rule for expression value. + * *
A
* * @@ -31,8 +31,8 @@ public class WideningTypeRule { public static final int TYPE_EQUAL = 0; /** - * The widening distance is calculated from the leaf to root. - * e.g. distance(INTEGER, FLOAT) = 2, but distance(FLOAT, INTEGER) = IMPOSSIBLE_WIDENING + * The widening distance is calculated from the leaf to root. e.g. distance(INTEGER, FLOAT) = 2, + * but distance(FLOAT, INTEGER) = IMPOSSIBLE_WIDENING * * @param type1 widen from type * @param type2 widen to type @@ -50,14 +50,15 @@ private static int distance(ExprType type1, ExprType type2, int distance) { } else { return type1.getParent().stream() .map(parentOfType1 -> distance(parentOfType1, type2, distance + 1)) - .reduce(Math::min).get(); + .reduce(Math::min) + .get(); } } /** * The max type among two types. The max is defined as follow if type1 could widen to type2, then - * max is type2, vice versa if type1 couldn't widen to type2 and type2 could't widen to type1, then - * throw {@link ExpressionEvaluationException}. + * max is type2, vice versa if type1 couldn't widen to type2 and type2 could't widen to type1, + * then throw {@link ExpressionEvaluationException}. * * @param type1 type1 * @param type2 type2 diff --git a/core/src/test/java/org/opensearch/sql/executor/ExplainTest.java b/core/src/test/java/org/opensearch/sql/executor/ExplainTest.java index 7d438c870d..897347f22d 100644 --- a/core/src/test/java/org/opensearch/sql/executor/ExplainTest.java +++ b/core/src/test/java/org/opensearch/sql/executor/ExplainTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.executor; import static java.util.Collections.emptyList; @@ -68,36 +67,30 @@ void can_explain_project_filter_table_scan() { DSL.equal(ref("balance", INTEGER), literal(10000)), DSL.greater(ref("age", INTEGER), literal(30))); NamedExpression[] projectList = { - named("full_name", ref("full_name", STRING), "name"), - named("age", ref("age", INTEGER)) + named("full_name", ref("full_name", STRING), "name"), named("age", ref("age", INTEGER)) }; - PhysicalPlan plan = - project( - filter( - tableScan, - filterExpr), - projectList); + PhysicalPlan plan = project(filter(tableScan, filterExpr), projectList); assertEquals( new ExplainResponse( new ExplainResponseNode( "ProjectOperator", Map.of("fields", "[name, age]"), - singletonList(new ExplainResponseNode( - "FilterOperator", - Map.of("conditions", "and(=(balance, 10000), >(age, 30))"), - singletonList(tableScan.explainNode()))))), + singletonList( + new ExplainResponseNode( + "FilterOperator", + Map.of("conditions", "and(=(balance, 10000), >(age, 30))"), + singletonList(tableScan.explainNode()))))), explain.apply(plan)); } @Test void can_explain_aggregations() { List aggExprs = List.of(ref("balance", DOUBLE)); - List aggList = List.of( - named("avg(balance)", DSL.avg(aggExprs.toArray(new Expression[0])))); - List groupByList = List.of( - named("state", ref("state", STRING))); + List aggList = + List.of(named("avg(balance)", DSL.avg(aggExprs.toArray(new Expression[0])))); + List groupByList = List.of(named("state", ref("state", STRING))); PhysicalPlan plan = agg(new FakeTableScan(), aggList, groupByList); assertEquals( @@ -120,11 +113,7 @@ void can_explain_rare_top_n() { new ExplainResponse( new ExplainResponseNode( "RareTopNOperator", - Map.of( - "commandType", TOP, - "noOfResults", 10, - "fields", "[state]", - "groupBy", "[]"), + Map.of("commandType", TOP, "noOfResults", 10, "fields", "[state]", "groupBy", "[]"), singletonList(tableScan.explainNode()))), explain.apply(plan)); } @@ -132,22 +121,27 @@ void can_explain_rare_top_n() { @Test void can_explain_window() { List partitionByList = List.of(DSL.ref("state", STRING)); - List> sortList = List.of( - ImmutablePair.of(DEFAULT_ASC, ref("age", INTEGER))); + List> sortList = + List.of(ImmutablePair.of(DEFAULT_ASC, ref("age", INTEGER))); - PhysicalPlan plan = window(tableScan, named(DSL.rank()), - new WindowDefinition(partitionByList, sortList)); + PhysicalPlan plan = + window(tableScan, named(DSL.rank()), new WindowDefinition(partitionByList, sortList)); assertEquals( new ExplainResponse( new ExplainResponseNode( "WindowOperator", Map.of( - "function", "rank()", - "definition", Map.of( - "partitionBy", "[state]", - "sortList", Map.of( - "age", Map.of( + "function", + "rank()", + "definition", + Map.of( + "partitionBy", + "[state]", + "sortList", + Map.of( + "age", + Map.of( "sortOrder", "ASC", "nullOrder", "NULL_FIRST")))), singletonList(tableScan.explainNode()))), @@ -157,60 +151,61 @@ void can_explain_window() { @Test void can_explain_other_operators() { ReferenceExpression[] removeList = {ref("state", STRING)}; - Map renameMapping = Map.of( - ref("state", STRING), ref("s", STRING)); - Pair evalExprs = ImmutablePair.of( - ref("age", INTEGER), DSL.add(ref("age", INTEGER), literal(2))); + Map renameMapping = + Map.of(ref("state", STRING), ref("s", STRING)); + Pair evalExprs = + ImmutablePair.of(ref("age", INTEGER), DSL.add(ref("age", INTEGER), literal(2))); Expression[] dedupeList = {ref("age", INTEGER)}; - Pair sortList = ImmutablePair.of( - DEFAULT_ASC, ref("age", INTEGER)); + Pair sortList = ImmutablePair.of(DEFAULT_ASC, ref("age", INTEGER)); List values = List.of(literal("WA"), literal(30)); PhysicalPlan plan = remove( rename( - eval( - dedupe( - sort( - values(values), - sortList), - dedupeList), - evalExprs), - renameMapping), - removeList); + eval(dedupe(sort(values(values), sortList), dedupeList), evalExprs), renameMapping), + removeList); assertEquals( new ExplainResponse( new ExplainResponseNode( "RemoveOperator", Map.of("removeList", "[state]"), - singletonList(new ExplainResponseNode( - "RenameOperator", - Map.of("mapping", Map.of("state", "s")), - singletonList(new ExplainResponseNode( - "EvalOperator", - Map.of("expressions", Map.of("age", "+(age, 2)")), - singletonList(new ExplainResponseNode( - "DedupeOperator", - Map.of( - "dedupeList", "[age]", - "allowedDuplication", 1, - "keepEmpty", false, - "consecutive", false), - singletonList(new ExplainResponseNode( - "SortOperator", - Map.of( - "sortList", Map.of( - "age", Map.of( - "sortOrder", "ASC", - "nullOrder", "NULL_FIRST"))), - singletonList(new ExplainResponseNode( - "ValuesOperator", - Map.of("values", List.of(values)), - emptyList()))))))))))) - ), - explain.apply(plan) - ); + singletonList( + new ExplainResponseNode( + "RenameOperator", + Map.of("mapping", Map.of("state", "s")), + singletonList( + new ExplainResponseNode( + "EvalOperator", + Map.of("expressions", Map.of("age", "+(age, 2)")), + singletonList( + new ExplainResponseNode( + "DedupeOperator", + Map.of( + "dedupeList", + "[age]", + "allowedDuplication", + 1, + "keepEmpty", + false, + "consecutive", + false), + singletonList( + new ExplainResponseNode( + "SortOperator", + Map.of( + "sortList", + Map.of( + "age", + Map.of( + "sortOrder", "ASC", + "nullOrder", "NULL_FIRST"))), + singletonList( + new ExplainResponseNode( + "ValuesOperator", + Map.of("values", List.of(values)), + emptyList())))))))))))), + explain.apply(plan)); } @Test @@ -222,15 +217,13 @@ void can_explain_limit() { "LimitOperator", Map.of("limit", 10, "offset", 5), singletonList(tableScan.explainNode()))), - explain.apply(plan) - ); + explain.apply(plan)); } @Test void can_explain_nested() { Set nestedOperatorArgs = Set.of("message.info", "message"); - Map> groupedFieldsByPath = - Map.of("message", List.of("message.info")); + Map> groupedFieldsByPath = Map.of("message", List.of("message.info")); PhysicalPlan plan = nested(tableScan, nestedOperatorArgs, groupedFieldsByPath); assertEquals( @@ -239,8 +232,7 @@ void can_explain_nested() { "NestedOperator", Map.of("nested", Set.of("message.info", "message")), singletonList(tableScan.explainNode()))), - explain.apply(plan) - ); + explain.apply(plan)); } private static class FakeTableScan extends TableScanOperator { @@ -262,14 +254,11 @@ public String toString() { /** Used to ignore table scan which is duplicate but required for each operator test. */ public ExplainResponseNode explainNode() { return new ExplainResponseNode( - "FakeTableScan", - Map.of("request", "Fake DSL request"), - emptyList()); + "FakeTableScan", Map.of("request", "Fake DSL request"), emptyList()); } public String explain() { return "explain"; } } - } diff --git a/core/src/test/java/org/opensearch/sql/executor/QueryServiceTest.java b/core/src/test/java/org/opensearch/sql/executor/QueryServiceTest.java index 1510b304e6..f6b66b4e77 100644 --- a/core/src/test/java/org/opensearch/sql/executor/QueryServiceTest.java +++ b/core/src/test/java/org/opensearch/sql/executor/QueryServiceTest.java @@ -37,73 +37,52 @@ class QueryServiceTest { private QueryService queryService; - @Mock - private ExecutionEngine executionEngine; + @Mock private ExecutionEngine executionEngine; - @Mock - private Analyzer analyzer; + @Mock private Analyzer analyzer; - @Mock - private Planner planner; + @Mock private Planner planner; - @Mock - private UnresolvedPlan ast; + @Mock private UnresolvedPlan ast; - @Mock - private LogicalPlan logicalPlan; + @Mock private LogicalPlan logicalPlan; - @Mock - private PhysicalPlan plan; + @Mock private PhysicalPlan plan; - @Mock - private ExecutionEngine.Schema schema; + @Mock private ExecutionEngine.Schema schema; - @Mock - private PlanContext planContext; + @Mock private PlanContext planContext; - @Mock - private Split split; + @Mock private Split split; @Test public void executeWithoutContext() { - queryService() - .executeSuccess() - .handledByOnResponse(); + queryService().executeSuccess().handledByOnResponse(); } @Test public void executeWithContext() { - queryService() - .executeSuccess(split) - .handledByOnResponse(); + queryService().executeSuccess(split).handledByOnResponse(); } @Test public void testExplainShouldPass() { - queryService() - .explainSuccess() - .handledByExplainOnResponse(); + queryService().explainSuccess().handledByExplainOnResponse(); } @Test public void testExecuteWithExceptionShouldBeCaughtByHandler() { - queryService() - .executeFail() - .handledByOnFailure(); + queryService().executeFail().handledByOnFailure(); } @Test public void explainWithIllegalQueryShouldBeCaughtByHandler() { - queryService() - .explainFail() - .handledByExplainOnFailure(); + queryService().explainFail().handledByExplainOnFailure(); } @Test public void analyzeExceptionShouldBeCached() { - queryService() - .analyzeFail() - .handledByOnFailure(); + queryService().analyzeFail().handledByOnFailure(); } Helper queryService() { @@ -130,13 +109,14 @@ Helper executeSuccess() { Helper executeSuccess(Split split) { this.split = Optional.ofNullable(split); doAnswer( - invocation -> { - ResponseListener listener = invocation.getArgument(2); - listener.onResponse( - new ExecutionEngine.QueryResponse(schema, Collections.emptyList(), - Cursor.None)); - return null; - }) + invocation -> { + ResponseListener listener = + invocation.getArgument(2); + listener.onResponse( + new ExecutionEngine.QueryResponse( + schema, Collections.emptyList(), Cursor.None)); + return null; + }) .when(executionEngine) .execute(any(), any(), any()); lenient().when(planContext.getSplit()).thenReturn(this.split); @@ -145,9 +125,7 @@ Helper executeSuccess(Split split) { } Helper analyzeFail() { - doThrow(new IllegalStateException("analyze exception")) - .when(analyzer) - .analyze(any(), any()); + doThrow(new IllegalStateException("analyze exception")).when(analyzer).analyze(any(), any()); return this; } @@ -162,14 +140,14 @@ Helper executeFail() { Helper explainSuccess() { doAnswer( - invocation -> { - ResponseListener listener = - invocation.getArgument(1); - listener.onResponse( - new ExecutionEngine.ExplainResponse( - new ExecutionEngine.ExplainResponseNode("test"))); - return null; - }) + invocation -> { + ResponseListener listener = + invocation.getArgument(1); + listener.onResponse( + new ExecutionEngine.ExplainResponse( + new ExecutionEngine.ExplainResponseNode("test"))); + return null; + }) .when(executionEngine) .explain(any(), any()); @@ -184,36 +162,37 @@ Helper explainFail() { return this; } - void handledByOnResponse() { - ResponseListener responseListener = new ResponseListener<>() { - @Override - public void onResponse(ExecutionEngine.QueryResponse pplQueryResponse) { - assertNotNull(pplQueryResponse); - } - - @Override - public void onFailure(Exception e) { - fail(); - } - }; + ResponseListener responseListener = + new ResponseListener<>() { + @Override + public void onResponse(ExecutionEngine.QueryResponse pplQueryResponse) { + assertNotNull(pplQueryResponse); + } + + @Override + public void onFailure(Exception e) { + fail(); + } + }; split.ifPresentOrElse( split -> queryService.executePlan(logicalPlan, planContext, responseListener), () -> queryService.execute(ast, responseListener)); } void handledByOnFailure() { - ResponseListener responseListener = new ResponseListener<>() { - @Override - public void onResponse(ExecutionEngine.QueryResponse pplQueryResponse) { - fail(); - } - - @Override - public void onFailure(Exception e) { - assertTrue(e instanceof IllegalStateException); - } - }; + ResponseListener responseListener = + new ResponseListener<>() { + @Override + public void onResponse(ExecutionEngine.QueryResponse pplQueryResponse) { + fail(); + } + + @Override + public void onFailure(Exception e) { + assertTrue(e instanceof IllegalStateException); + } + }; split.ifPresentOrElse( split -> queryService.executePlan(logicalPlan, planContext, responseListener), () -> queryService.execute(ast, responseListener)); @@ -250,6 +229,5 @@ public void onFailure(Exception e) { } }); } - } } diff --git a/core/src/test/java/org/opensearch/sql/executor/execution/CommandPlanTest.java b/core/src/test/java/org/opensearch/sql/executor/execution/CommandPlanTest.java index aa300cb0da..aff5a0d867 100644 --- a/core/src/test/java/org/opensearch/sql/executor/execution/CommandPlanTest.java +++ b/core/src/test/java/org/opensearch/sql/executor/execution/CommandPlanTest.java @@ -47,8 +47,7 @@ public void execute_without_error() { public void execute_with_error() { QueryService qs = mock(QueryService.class, withSettings().defaultAnswer(CALLS_REAL_METHODS)); ResponseListener listener = mock(ResponseListener.class); - doThrow(new RuntimeException()) - .when(qs).executePlan(any(LogicalPlan.class), any(), any()); + doThrow(new RuntimeException()).when(qs).executePlan(any(LogicalPlan.class), any(), any()); new CommandPlan(QueryId.queryId(), mock(UnresolvedPlan.class), qs, listener).execute(); @@ -62,9 +61,12 @@ public void explain_not_supported() { ResponseListener listener = mock(ResponseListener.class); ResponseListener explainListener = mock(ResponseListener.class); - var exception = assertThrows(Throwable.class, () -> - new CommandPlan(QueryId.queryId(), mock(UnresolvedPlan.class), qs, listener) - .explain(explainListener)); + var exception = + assertThrows( + Throwable.class, + () -> + new CommandPlan(QueryId.queryId(), mock(UnresolvedPlan.class), qs, listener) + .explain(explainListener)); assertEquals("CommandPlan does not support explain", exception.getMessage()); verify(listener, never()).onResponse(any()); diff --git a/core/src/test/java/org/opensearch/sql/executor/execution/ExplainPlanTest.java b/core/src/test/java/org/opensearch/sql/executor/execution/ExplainPlanTest.java index 54b4f24db0..cdb7f9dcb8 100644 --- a/core/src/test/java/org/opensearch/sql/executor/execution/ExplainPlanTest.java +++ b/core/src/test/java/org/opensearch/sql/executor/execution/ExplainPlanTest.java @@ -25,14 +25,11 @@ @ExtendWith(MockitoExtension.class) public class ExplainPlanTest { - @Mock - private QueryId queryId; + @Mock private QueryId queryId; - @Mock - private QueryPlan queryPlan; + @Mock private QueryPlan queryPlan; - @Mock - private ResponseListener explainListener; + @Mock private ResponseListener explainListener; @Test public void execute() { diff --git a/core/src/test/java/org/opensearch/sql/executor/execution/IntervalTriggerExecutionTest.java b/core/src/test/java/org/opensearch/sql/executor/execution/IntervalTriggerExecutionTest.java index e0638ba88f..9eb99d37e3 100644 --- a/core/src/test/java/org/opensearch/sql/executor/execution/IntervalTriggerExecutionTest.java +++ b/core/src/test/java/org/opensearch/sql/executor/execution/IntervalTriggerExecutionTest.java @@ -18,16 +18,12 @@ public class IntervalTriggerExecutionTest { @Test void executeTaskWithInterval() { - triggerTask(2) - .taskRun(1) - .aroundInterval(); + triggerTask(2).taskRun(1).aroundInterval(); } @Test void continueExecuteIfTaskRunningLongerThanInterval() { - triggerTask(1) - .taskRun(2) - .aroundTaskRuntime(); + triggerTask(1).taskRun(2).aroundTaskRuntime(); } Helper triggerTask(long interval) { diff --git a/core/src/test/java/org/opensearch/sql/executor/execution/QueryPlanFactoryTest.java b/core/src/test/java/org/opensearch/sql/executor/execution/QueryPlanFactoryTest.java index 2d346e4c2a..5aed9acc63 100644 --- a/core/src/test/java/org/opensearch/sql/executor/execution/QueryPlanFactoryTest.java +++ b/core/src/test/java/org/opensearch/sql/executor/execution/QueryPlanFactoryTest.java @@ -41,20 +41,15 @@ @DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) class QueryPlanFactoryTest { - @Mock - private UnresolvedPlan plan; + @Mock private UnresolvedPlan plan; - @Mock - private QueryService queryService; + @Mock private QueryService queryService; - @Mock - private ResponseListener queryListener; + @Mock private ResponseListener queryListener; - @Mock - private ResponseListener explainListener; + @Mock private ResponseListener explainListener; - @Mock - private ExecutionEngine.QueryResponse queryResponse; + @Mock private ExecutionEngine.QueryResponse queryResponse; private QueryPlanFactory factory; @@ -81,14 +76,11 @@ public void create_from_explain_should_success() { @Test public void create_from_cursor_should_success() { - AbstractPlan queryExecution = factory.create("", false, - queryListener, explainListener); - AbstractPlan explainExecution = factory.create("", true, - queryListener, explainListener); + AbstractPlan queryExecution = factory.create("", false, queryListener, explainListener); + AbstractPlan explainExecution = factory.create("", true, queryListener, explainListener); assertAll( () -> assertTrue(queryExecution instanceof QueryPlan), - () -> assertTrue(explainExecution instanceof ExplainPlan) - ); + () -> assertTrue(explainExecution instanceof ExplainPlan)); } @Test @@ -96,8 +88,9 @@ public void create_from_query_without_query_listener_should_throw_exception() { Statement query = new Query(plan, 0); IllegalArgumentException exception = - assertThrows(IllegalArgumentException.class, () -> factory.create( - query, Optional.empty(), Optional.empty())); + assertThrows( + IllegalArgumentException.class, + () -> factory.create(query, Optional.empty(), Optional.empty())); assertEquals("[BUG] query listener must be not null", exception.getMessage()); } @@ -106,8 +99,9 @@ public void create_from_explain_without_explain_listener_should_throw_exception( Statement query = new Explain(new Query(plan, 0)); IllegalArgumentException exception = - assertThrows(IllegalArgumentException.class, () -> factory.create( - query, Optional.empty(), Optional.empty())); + assertThrows( + IllegalArgumentException.class, + () -> factory.create(query, Optional.empty(), Optional.empty())); assertEquals("[BUG] explain listener must be not null", exception.getMessage()); } @@ -143,9 +137,9 @@ public void create_query_with_fetch_size_which_cannot_be_paged() { when(plan.accept(any(CanPaginateVisitor.class), any())).thenReturn(Boolean.FALSE); factory = new QueryPlanFactory(queryService); Statement query = new Query(plan, 10); - assertThrows(UnsupportedCursorRequestException.class, - () -> factory.create(query, - Optional.of(queryListener), Optional.empty())); + assertThrows( + UnsupportedCursorRequestException.class, + () -> factory.create(query, Optional.of(queryListener), Optional.empty())); } @Test diff --git a/core/src/test/java/org/opensearch/sql/executor/execution/QueryPlanTest.java b/core/src/test/java/org/opensearch/sql/executor/execution/QueryPlanTest.java index a0a98e2be7..57ff5c70e9 100644 --- a/core/src/test/java/org/opensearch/sql/executor/execution/QueryPlanTest.java +++ b/core/src/test/java/org/opensearch/sql/executor/execution/QueryPlanTest.java @@ -34,20 +34,15 @@ @DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) class QueryPlanTest { - @Mock - private QueryId queryId; + @Mock private QueryId queryId; - @Mock - private UnresolvedPlan plan; + @Mock private UnresolvedPlan plan; - @Mock - private QueryService queryService; + @Mock private QueryService queryService; - @Mock - private ResponseListener explainListener; + @Mock private ResponseListener explainListener; - @Mock - private ResponseListener queryListener; + @Mock private ResponseListener queryListener; @Test public void execute_no_page_size() { @@ -67,53 +62,62 @@ public void explain_no_page_size() { @Test public void can_execute_paginated_plan() { - var listener = new ResponseListener() { - @Override - public void onResponse(ExecutionEngine.QueryResponse response) { - assertNotNull(response); - } - - @Override - public void onFailure(Exception e) { - fail(); - } - }; - var plan = new QueryPlan(QueryId.queryId(), mock(UnresolvedPlan.class), 10, - queryService, listener); + var listener = + new ResponseListener() { + @Override + public void onResponse(ExecutionEngine.QueryResponse response) { + assertNotNull(response); + } + + @Override + public void onFailure(Exception e) { + fail(); + } + }; + var plan = + new QueryPlan(QueryId.queryId(), mock(UnresolvedPlan.class), 10, queryService, listener); plan.execute(); } @Test // Same as previous test, but with incomplete QueryService public void can_handle_error_while_executing_plan() { - var listener = new ResponseListener() { - @Override - public void onResponse(ExecutionEngine.QueryResponse response) { - fail(); - } - - @Override - public void onFailure(Exception e) { - assertNotNull(e); - } - }; - var plan = new QueryPlan(QueryId.queryId(), mock(UnresolvedPlan.class), 10, - new QueryService(null, new DefaultExecutionEngine(), null), listener); + var listener = + new ResponseListener() { + @Override + public void onResponse(ExecutionEngine.QueryResponse response) { + fail(); + } + + @Override + public void onFailure(Exception e) { + assertNotNull(e); + } + }; + var plan = + new QueryPlan( + QueryId.queryId(), + mock(UnresolvedPlan.class), + 10, + new QueryService(null, new DefaultExecutionEngine(), null), + listener); plan.execute(); } @Test public void explain_is_not_supported_for_pagination() { - new QueryPlan(null, null, 0, null, null).explain(new ResponseListener<>() { - @Override - public void onResponse(ExecutionEngine.ExplainResponse response) { - fail(); - } - - @Override - public void onFailure(Exception e) { - assertTrue(e instanceof NotImplementedException); - } - }); + new QueryPlan(null, null, 0, null, null) + .explain( + new ResponseListener<>() { + @Override + public void onResponse(ExecutionEngine.ExplainResponse response) { + fail(); + } + + @Override + public void onFailure(Exception e) { + assertTrue(e instanceof NotImplementedException); + } + }); } } diff --git a/core/src/test/java/org/opensearch/sql/executor/execution/StreamingQueryPlanTest.java b/core/src/test/java/org/opensearch/sql/executor/execution/StreamingQueryPlanTest.java index 7357e99d18..2e8666aea4 100644 --- a/core/src/test/java/org/opensearch/sql/executor/execution/StreamingQueryPlanTest.java +++ b/core/src/test/java/org/opensearch/sql/executor/execution/StreamingQueryPlanTest.java @@ -49,9 +49,7 @@ class StreamingQueryPlanTest { @Test void executionSuccess() throws InterruptedException { - streamingQuery() - .streamingSource() - .shouldSuccess(); + streamingQuery().streamingSource().shouldSuccess(); } @Test @@ -70,9 +68,7 @@ void failIfNoStreamingSource() throws InterruptedException { @Test void taskExecutionShouldNotCallListener() throws InterruptedException { - streamingQuery() - .streamingSource() - .taskExecutionShouldNotCallListener(); + streamingQuery().streamingSource().taskExecutionShouldNotCallListener(); } Helper streamingQuery() { diff --git a/core/src/test/java/org/opensearch/sql/executor/pagination/CanPaginateVisitorTest.java b/core/src/test/java/org/opensearch/sql/executor/pagination/CanPaginateVisitorTest.java index 003967d6aa..5f2ba86c2f 100644 --- a/core/src/test/java/org/opensearch/sql/executor/pagination/CanPaginateVisitorTest.java +++ b/core/src/test/java/org/opensearch/sql/executor/pagination/CanPaginateVisitorTest.java @@ -121,8 +121,7 @@ public void allow_query_with_select_fields_and_from() { @Test // select x public void allow_query_without_from() { - var plan = project(values(List.of(intLiteral(1))), - alias("1", intLiteral(1))); + var plan = project(values(List.of(intLiteral(1))), alias("1", intLiteral(1))); assertTrue(plan.accept(visitor, null)); } @@ -130,106 +129,124 @@ public void allow_query_without_from() { public void visitField() { // test combinations of acceptable and not acceptable args for coverage assertAll( - () -> assertFalse(project(relation("dummy"), - field(map("1", "2"), argument("name", intLiteral(0)))) - .accept(visitor, null)), - () -> assertFalse(project(relation("dummy"), - field("field", new Argument("", new Literal(1, DataType.INTEGER) { - @Override - public List getChild() { - return List.of(map("1", "2")); - } - }))) - .accept(visitor, null)) - ); + () -> + assertFalse( + project(relation("dummy"), field(map("1", "2"), argument("name", intLiteral(0)))) + .accept(visitor, null)), + () -> + assertFalse( + project( + relation("dummy"), + field( + "field", + new Argument( + "", + new Literal(1, DataType.INTEGER) { + @Override + public List getChild() { + return List.of(map("1", "2")); + } + }))) + .accept(visitor, null))); } @Test public void visitAlias() { // test combinations of acceptable and not acceptable args for coverage assertAll( - () -> assertFalse(project(relation("dummy"), - alias("pew", map("1", "2"), "pew")) - .accept(visitor, null)), - () -> assertFalse(project(relation("dummy"), new Alias("pew", field("pew")) { - @Override - public List getChild() { - return List.of(map("1", "2")); - } - }) - .accept(visitor, null)) - ); + () -> + assertFalse( + project(relation("dummy"), alias("pew", map("1", "2"), "pew")) + .accept(visitor, null)), + () -> + assertFalse( + project( + relation("dummy"), + new Alias("pew", field("pew")) { + @Override + public List getChild() { + return List.of(map("1", "2")); + } + }) + .accept(visitor, null))); } @Test // select a = b public void visitEqualTo() { - var plan = project(values(List.of(intLiteral(1))), - alias("1", equalTo(intLiteral(1), intLiteral(1)))); + var plan = + project(values(List.of(intLiteral(1))), alias("1", equalTo(intLiteral(1), intLiteral(1)))); assertTrue(plan.accept(visitor, null)); } @Test // select interval public void visitInterval() { - var plan = project(values(List.of(intLiteral(1))), - alias("1", intervalLiteral(intLiteral(1), DataType.INTEGER, "days"))); + var plan = + project( + values(List.of(intLiteral(1))), + alias("1", intervalLiteral(intLiteral(1), DataType.INTEGER, "days"))); assertTrue(plan.accept(visitor, null)); } @Test // select a != b public void visitCompare() { - var plan = project(values(List.of(intLiteral(1))), - alias("1", compare("!=", intLiteral(1), intLiteral(1)))); + var plan = + project( + values(List.of(intLiteral(1))), + alias("1", compare("!=", intLiteral(1), intLiteral(1)))); assertTrue(plan.accept(visitor, null)); } @Test // select NOT a public void visitNot() { - var plan = project(values(List.of(intLiteral(1))), - alias("1", not(booleanLiteral(true)))); + var plan = project(values(List.of(intLiteral(1))), alias("1", not(booleanLiteral(true)))); assertTrue(plan.accept(visitor, null)); } @Test // select a OR b public void visitOr() { - var plan = project(values(List.of(intLiteral(1))), - alias("1", or(booleanLiteral(true), booleanLiteral(false)))); + var plan = + project( + values(List.of(intLiteral(1))), + alias("1", or(booleanLiteral(true), booleanLiteral(false)))); assertTrue(plan.accept(visitor, null)); } @Test // select a AND b public void visitAnd() { - var plan = project(values(List.of(intLiteral(1))), - alias("1", and(booleanLiteral(true), booleanLiteral(false)))); + var plan = + project( + values(List.of(intLiteral(1))), + alias("1", and(booleanLiteral(true), booleanLiteral(false)))); assertTrue(plan.accept(visitor, null)); } @Test // select a XOR b public void visitXor() { - var plan = project(values(List.of(intLiteral(1))), - alias("1", xor(booleanLiteral(true), booleanLiteral(false)))); + var plan = + project( + values(List.of(intLiteral(1))), + alias("1", xor(booleanLiteral(true), booleanLiteral(false)))); assertTrue(plan.accept(visitor, null)); } @Test // select f() public void visitFunction() { - var plan = project(values(List.of(intLiteral(1))), - function("func")); + var plan = project(values(List.of(intLiteral(1))), function("func")); assertTrue(plan.accept(visitor, null)); } @Test // select nested() ... public void visitNested() { - var plan = project(values(List.of(intLiteral(1))), - function("nested")); + var plan = project(values(List.of(intLiteral(1))), function("nested")); assertFalse(plan.accept(visitor, null)); } @@ -238,30 +255,37 @@ public void visitNested() { public void visitIn() { // test combinations of acceptable and not acceptable args for coverage assertAll( - () -> assertTrue(project(values(List.of(intLiteral(1))), alias("1", in(field("a")))) - .accept(visitor, null)), - () -> assertFalse(project(values(List.of(intLiteral(1))), - alias("1", in(field("a"), map("1", "2")))) - .accept(visitor, null)), - () -> assertFalse(project(values(List.of(intLiteral(1))), - alias("1", in(map("1", "2"), field("a")))) - .accept(visitor, null)) - ); + () -> + assertTrue( + project(values(List.of(intLiteral(1))), alias("1", in(field("a")))) + .accept(visitor, null)), + () -> + assertFalse( + project(values(List.of(intLiteral(1))), alias("1", in(field("a"), map("1", "2")))) + .accept(visitor, null)), + () -> + assertFalse( + project(values(List.of(intLiteral(1))), alias("1", in(map("1", "2"), field("a")))) + .accept(visitor, null))); } @Test // select a BETWEEN 1 AND 2 public void visitBetween() { - var plan = project(values(List.of(intLiteral(1))), - alias("1", between(field("a"), intLiteral(1), intLiteral(2)))); + var plan = + project( + values(List.of(intLiteral(1))), + alias("1", between(field("a"), intLiteral(1), intLiteral(2)))); assertTrue(plan.accept(visitor, null)); } @Test // select a CASE 1 WHEN 2 public void visitCase() { - var plan = project(values(List.of(intLiteral(1))), - alias("1", caseWhen(intLiteral(1), when(intLiteral(3), intLiteral(4))))); + var plan = + project( + values(List.of(intLiteral(1))), + alias("1", caseWhen(intLiteral(1), when(intLiteral(3), intLiteral(4))))); assertTrue(plan.accept(visitor, null)); } @@ -270,21 +294,33 @@ public void visitCase() { public void visitCast() { // test combinations of acceptable and not acceptable args for coverage assertAll( - () -> assertTrue(project(values(List.of(intLiteral(1))), - alias("1", cast(intLiteral(2), stringLiteral("int")))) - .accept(visitor, null)), - () -> assertFalse(project(values(List.of(intLiteral(1))), - alias("1", cast(intLiteral(2), new Literal(1, DataType.INTEGER) { - @Override - public List getChild() { - return List.of(map("1", "2")); - } - }))) - .accept(visitor, null)), - () -> assertFalse(project(values(List.of(intLiteral(1))), - alias("1", cast(map("1", "2"), stringLiteral("int")))) - .accept(visitor, null)) - ); + () -> + assertTrue( + project( + values(List.of(intLiteral(1))), + alias("1", cast(intLiteral(2), stringLiteral("int")))) + .accept(visitor, null)), + () -> + assertFalse( + project( + values(List.of(intLiteral(1))), + alias( + "1", + cast( + intLiteral(2), + new Literal(1, DataType.INTEGER) { + @Override + public List getChild() { + return List.of(map("1", "2")); + } + }))) + .accept(visitor, null)), + () -> + assertFalse( + project( + values(List.of(intLiteral(1))), + alias("1", cast(map("1", "2"), stringLiteral("int")))) + .accept(visitor, null))); } @Test @@ -305,20 +341,22 @@ public void reject_query_with_eval() { // simple_query_string(["Tags" ^ 1.5, "Title", "Body" 4.2], "taste") // and Tags like "% % %" and Title like "%"; public void accept_query_with_highlight_and_relevance_func() { - var plan = project( - filter( - relation("beer.stackexchange"), - and( + var plan = + project( + filter( + relation("beer.stackexchange"), and( - function("like", qualifiedName("Tags"), stringLiteral("% % %")), - function("like", qualifiedName("Title"), stringLiteral("%"))), - function("simple_query_string", - unresolvedArg("fields", - new RelevanceFieldList(Map.of("Title", 1.0F, "Body", 4.2F, "Tags", 1.5F))), - unresolvedArg("query", - stringLiteral("taste"))))), - alias("highlight(\"Body\")", - highlight(stringLiteral("Body"), Map.of()))); + and( + function("like", qualifiedName("Tags"), stringLiteral("% % %")), + function("like", qualifiedName("Title"), stringLiteral("%"))), + function( + "simple_query_string", + unresolvedArg( + "fields", + new RelevanceFieldList( + Map.of("Title", 1.0F, "Body", 4.2F, "Tags", 1.5F))), + unresolvedArg("query", stringLiteral("taste"))))), + alias("highlight(\"Body\")", highlight(stringLiteral("Body"), Map.of()))); assertTrue(plan.accept(visitor, null)); } @@ -339,12 +377,13 @@ public void reject_query_with_offset() { // test added for coverage only @Test public void visitLimit() { - var visitor = new CanPaginateVisitor() { - @Override - public Boolean visitRelation(Relation node, Object context) { - return Boolean.FALSE; - } - }; + var visitor = + new CanPaginateVisitor() { + @Override + public Boolean visitRelation(Relation node, Object context) { + return Boolean.FALSE; + } + }; var plan = project(limit(relation("dummy"), 0, 0), allFields()); assertFalse(plan.accept(visitor, null)); } @@ -352,8 +391,7 @@ public Boolean visitRelation(Relation node, Object context) { @Test // select * from y where z public void allow_query_with_where() { - var plan = project(filter(relation("dummy"), - booleanLiteral(true)), allFields()); + var plan = project(filter(relation("dummy"), booleanLiteral(true)), allFields()); assertTrue(plan.accept(visitor, null)); } @@ -367,28 +405,29 @@ public void allow_query_with_order_by_with_column_references_only() { @Test // select * from y order by func(z) public void reject_query_with_order_by_with_an_expression() { - var plan = project(sort(relation("dummy"), field(function("func"))), - allFields()); + var plan = project(sort(relation("dummy"), field(function("func"))), allFields()); assertFalse(plan.accept(visitor, null)); } // test added for coverage only @Test public void visitSort() { - CanPaginateVisitor visitor = new CanPaginateVisitor() { - @Override - public Boolean visitRelation(Relation node, Object context) { - return Boolean.FALSE; - } - }; + CanPaginateVisitor visitor = + new CanPaginateVisitor() { + @Override + public Boolean visitRelation(Relation node, Object context) { + return Boolean.FALSE; + } + }; var plan = project(sort(relation("dummy"), field("1")), allFields()); assertFalse(plan.accept(visitor, null)); - visitor = new CanPaginateVisitor() { - @Override - public Boolean visitField(Field node, Object context) { - return Boolean.FALSE; - } - }; + visitor = + new CanPaginateVisitor() { + @Override + public Boolean visitField(Field node, Object context) { + return Boolean.FALSE; + } + }; plan = project(sort(relation("dummy"), field("1")), allFields()); assertFalse(plan.accept(visitor, null)); } @@ -396,31 +435,38 @@ public Boolean visitField(Field node, Object context) { @Test // select * from y group by z public void reject_query_with_group_by() { - var plan = project(agg( - relation("dummy"), List.of(), List.of(), List.of(field("1")), List.of()), - allFields()); + var plan = + project( + agg(relation("dummy"), List.of(), List.of(), List.of(field("1")), List.of()), + allFields()); assertFalse(plan.accept(visitor, null)); } @Test // select agg(x) from y public void reject_query_with_aggregation_function() { - var plan = project(agg( - relation("dummy"), - List.of(alias("agg", aggregate("func", field("pewpew")))), - List.of(), List.of(), List.of()), - allFields()); + var plan = + project( + agg( + relation("dummy"), + List.of(alias("agg", aggregate("func", field("pewpew")))), + List.of(), + List.of(), + List.of()), + allFields()); assertFalse(plan.accept(visitor, null)); } @Test // select window(x) from y public void reject_query_with_window_function() { - var plan = project(relation("dummy"), - alias("pewpew", - window( - aggregate("func", field("pewpew")), - List.of(qualifiedName("1")), List.of()))); + var plan = + project( + relation("dummy"), + alias( + "pewpew", + window( + aggregate("func", field("pewpew")), List.of(qualifiedName("1")), List.of()))); assertFalse(plan.accept(visitor, null)); } @@ -449,12 +495,13 @@ public void reject_project_when_relation_has_child() { // test combinations of acceptable and not acceptable args for coverage public void visitFilter() { assertAll( - () -> assertTrue(project(filter(relation("dummy"), booleanLiteral(true))) - .accept(visitor, null)), - () -> assertFalse(project(filter(relation("dummy"), map("1", "2"))) - .accept(visitor, null)), - () -> assertFalse(project(filter(tableFunction(List.of("1", "2")), booleanLiteral(true))) - .accept(visitor, null)) - ); + () -> + assertTrue( + project(filter(relation("dummy"), booleanLiteral(true))).accept(visitor, null)), + () -> assertFalse(project(filter(relation("dummy"), map("1", "2"))).accept(visitor, null)), + () -> + assertFalse( + project(filter(tableFunction(List.of("1", "2")), booleanLiteral(true))) + .accept(visitor, null))); } } diff --git a/core/src/test/java/org/opensearch/sql/executor/pagination/PlanSerializerTest.java b/core/src/test/java/org/opensearch/sql/executor/pagination/PlanSerializerTest.java index 8211a3bc12..495a7db80c 100644 --- a/core/src/test/java/org/opensearch/sql/executor/pagination/PlanSerializerTest.java +++ b/core/src/test/java/org/opensearch/sql/executor/pagination/PlanSerializerTest.java @@ -44,9 +44,14 @@ void setUp() { } @ParameterizedTest - @ValueSource(strings = {"pewpew", "asdkfhashdfjkgakgfwuigfaijkb", "ajdhfgajklghadfjkhgjkadhgad" - + "kadfhgadhjgfjklahdgqheygvskjfbvgsdklgfuirehiluANUIfgauighbahfuasdlhfnhaughsdlfhaughaggf" - + "and_some_other_funny_stuff_which_could_be_generated_while_sleeping_on_the_keyboard"}) + @ValueSource( + strings = { + "pewpew", + "asdkfhashdfjkgakgfwuigfaijkb", + "ajdhfgajklghadfjkhgjkadhgad" + + "kadfhgadhjgfjklahdgqheygvskjfbvgsdklgfuirehiluANUIfgauighbahfuasdlhfnhaughsdlfhaughaggf" + + "and_some_other_funny_stuff_which_could_be_generated_while_sleeping_on_the_keyboard" + }) void serialize_deserialize_str(String input) { var compressed = serialize(input); assertEquals(input, deserialize(compressed)); @@ -98,8 +103,7 @@ void deserialize_throws() { // from gzip - damaged header () -> assertThrows(Throwable.class, () -> deserialize("00")), // from HashCode::fromString - () -> assertThrows(Throwable.class, () -> deserialize("000")) - ); + () -> assertThrows(Throwable.class, () -> deserialize("000"))); } @Test @@ -109,8 +113,7 @@ void convertToCursor_returns_no_cursor_if_cant_serialize() { plan.setThrowNoCursorOnWrite(true); assertAll( () -> assertThrows(NoCursorException.class, () -> serialize(plan)), - () -> assertEquals(Cursor.None, planCache.convertToCursor(plan)) - ); + () -> assertEquals(Cursor.None, planCache.convertToCursor(plan))); } @Test @@ -122,14 +125,14 @@ void convertToCursor_returns_no_cursor_if_plan_is_not_paginate() { @Test void convertToPlan_throws_cursor_has_no_prefix() { - assertThrows(UnsupportedOperationException.class, () -> - planCache.convertToPlan("abc")); + assertThrows(UnsupportedOperationException.class, () -> planCache.convertToPlan("abc")); } @Test void convertToPlan_throws_if_failed_to_deserialize() { - assertThrows(UnsupportedOperationException.class, () -> - planCache.convertToPlan("n:" + serialize(mock(Serializable.class)))); + assertThrows( + UnsupportedOperationException.class, + () -> planCache.convertToPlan("n:" + serialize(mock(Serializable.class)))); } @Test @@ -144,8 +147,8 @@ void serialize_and_deserialize() { @Test void convertToCursor_and_convertToPlan() { var plan = new TestOperator(100500); - var roundTripPlan = (SerializablePlan) - planCache.convertToPlan(planCache.convertToCursor(plan).toString()); + var roundTripPlan = + (SerializablePlan) planCache.convertToPlan(planCache.convertToCursor(plan).toString()); assertEquals(plan, roundTripPlan); assertNotSame(plan, roundTripPlan); } @@ -158,8 +161,8 @@ void resolveObject() { objectOutput.writeObject("Hello, world!"); objectOutput.flush(); - var cds = planCache.getCursorDeserializationStream( - new ByteArrayInputStream(output.toByteArray())); + var cds = + planCache.getCursorDeserializationStream(new ByteArrayInputStream(output.toByteArray())); assertEquals(storageEngine, cds.resolveObject("engine")); var object = new Object(); assertSame(object, cds.resolveObject(object)); diff --git a/core/src/test/java/org/opensearch/sql/executor/streaming/DefaultMetadataLogTest.java b/core/src/test/java/org/opensearch/sql/executor/streaming/DefaultMetadataLogTest.java index 4d8c4f3e93..17ea253e2a 100644 --- a/core/src/test/java/org/opensearch/sql/executor/streaming/DefaultMetadataLogTest.java +++ b/core/src/test/java/org/opensearch/sql/executor/streaming/DefaultMetadataLogTest.java @@ -8,7 +8,6 @@ package org.opensearch.sql.executor.streaming; - import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertThrows; diff --git a/core/src/test/java/org/opensearch/sql/executor/streaming/MicroBatchStreamingExecutionTest.java b/core/src/test/java/org/opensearch/sql/executor/streaming/MicroBatchStreamingExecutionTest.java index 589b093c2f..bd0c8ed3e1 100644 --- a/core/src/test/java/org/opensearch/sql/executor/streaming/MicroBatchStreamingExecutionTest.java +++ b/core/src/test/java/org/opensearch/sql/executor/streaming/MicroBatchStreamingExecutionTest.java @@ -45,19 +45,12 @@ void executedSuccess() { @Test void executedFailed() { - streamingQuery() - .addData() - .executeFailed() - .latestOffsetLogShouldBe(0L) - .noCommittedLog(); + streamingQuery().addData().executeFailed().latestOffsetLogShouldBe(0L).noCommittedLog(); } @Test void noDataInSource() { - streamingQuery() - .neverProcess() - .noOffsetLog() - .noCommittedLog(); + streamingQuery().neverProcess().noOffsetLog().noCommittedLog(); } @Test @@ -170,8 +163,7 @@ Helper executeSuccess(Long... offsets) { ResponseListener listener = invocation.getArgument(2); listener.onResponse( - new ExecutionEngine.QueryResponse(null, Collections.emptyList(), - Cursor.None)); + new ExecutionEngine.QueryResponse(null, Collections.emptyList(), Cursor.None)); PlanContext planContext = invocation.getArgument(1); assertTrue(planContext.getSplit().isPresent()); @@ -257,8 +249,8 @@ public Optional getLatestOffset() { public Batch getBatch(Optional start, Offset end) { return new Batch( new TestOffsetSplit( - start.map(v -> v.getOffset() + 1).orElse(0L), Long.min(offset.get(), - end.getOffset()))); + start.map(v -> v.getOffset() + 1).orElse(0L), + Long.min(offset.get(), end.getOffset()))); } } diff --git a/core/src/test/java/org/opensearch/sql/expression/ExpressionNodeVisitorTest.java b/core/src/test/java/org/opensearch/sql/expression/ExpressionNodeVisitorTest.java index 47fe9dad0f..2f3e855430 100644 --- a/core/src/test/java/org/opensearch/sql/expression/ExpressionNodeVisitorTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/ExpressionNodeVisitorTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -31,70 +30,69 @@ class ExpressionNodeVisitorTest { @Test void should_return_null_by_default() { - ExpressionNodeVisitor visitor = new ExpressionNodeVisitor(){}; + ExpressionNodeVisitor visitor = new ExpressionNodeVisitor() {}; assertNull(new HighlightExpression(DSL.literal("Title")).accept(visitor, null)); assertNull(literal(10).accept(visitor, null)); assertNull(ref("name", STRING).accept(visitor, null)); assertNull(named("bool", literal(true)).accept(visitor, null)); assertNull(DSL.abs(literal(-10)).accept(visitor, null)); assertNull(DSL.sum(literal(10)).accept(visitor, null)); - assertNull(named("avg", new AvgAggregator(Collections.singletonList(ref("age", INTEGER)), - INTEGER)).accept(visitor, null)); + assertNull( + named("avg", new AvgAggregator(Collections.singletonList(ref("age", INTEGER)), INTEGER)) + .accept(visitor, null)); assertNull(new CaseClause(ImmutableList.of(), null).accept(visitor, null)); assertNull(new WhenClause(literal("test"), literal(10)).accept(visitor, null)); assertNull(DSL.namedArgument("field", literal("message")).accept(visitor, null)); assertNull(DSL.span(ref("age", INTEGER), literal(1), "").accept(visitor, null)); - assertNull(DSL.regex(ref("name", STRING), DSL.literal("(?\\d+)"), DSL.literal("group")) - .accept(visitor, null)); + assertNull( + DSL.regex(ref("name", STRING), DSL.literal("(?\\d+)"), DSL.literal("group")) + .accept(visitor, null)); } @Test void can_visit_all_types_of_expression_node() { Expression expr = DSL.regex( - DSL.castString( - DSL.sum( - DSL.add( - ref("balance", INTEGER), - literal(10)) - )), + DSL.castString(DSL.sum(DSL.add(ref("balance", INTEGER), literal(10)))), DSL.literal("(?\\d+)"), DSL.literal("group")); - Expression actual = expr.accept(new ExpressionNodeVisitor() { - @Override - public Expression visitLiteral(LiteralExpression node, Object context) { - return node; - } + Expression actual = + expr.accept( + new ExpressionNodeVisitor() { + @Override + public Expression visitLiteral(LiteralExpression node, Object context) { + return node; + } - @Override - public Expression visitReference(ReferenceExpression node, Object context) { - return node; - } + @Override + public Expression visitReference(ReferenceExpression node, Object context) { + return node; + } - @Override - public Expression visitParse(ParseExpression node, Object context) { - return node; - } + @Override + public Expression visitParse(ParseExpression node, Object context) { + return node; + } - @Override - public Expression visitFunction(FunctionExpression node, Object context) { - return DSL.add(visitArguments(node.getArguments(), context)); - } + @Override + public Expression visitFunction(FunctionExpression node, Object context) { + return DSL.add(visitArguments(node.getArguments(), context)); + } - @Override - public Expression visitAggregator(Aggregator node, Object context) { - return DSL.sum(visitArguments(node.getArguments(), context)); - } + @Override + public Expression visitAggregator(Aggregator node, Object context) { + return DSL.sum(visitArguments(node.getArguments(), context)); + } - private Expression[] visitArguments(List arguments, Object context) { - return arguments.stream() - .map(arg -> arg.accept(this, context)) - .toArray(Expression[]::new); - } - }, null); + private Expression[] visitArguments(List arguments, Object context) { + return arguments.stream() + .map(arg -> arg.accept(this, context)) + .toArray(Expression[]::new); + } + }, + null); assertEquals(expr, actual); } - } diff --git a/core/src/test/java/org/opensearch/sql/expression/ExpressionTestBase.java b/core/src/test/java/org/opensearch/sql/expression/ExpressionTestBase.java index 8ce7a52394..fd886cdda3 100644 --- a/core/src/test/java/org/opensearch/sql/expression/ExpressionTestBase.java +++ b/core/src/test/java/org/opensearch/sql/expression/ExpressionTestBase.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression; import static org.opensearch.sql.config.TestConfig.BOOL_TYPE_MISSING_VALUE_FIELD; diff --git a/core/src/test/java/org/opensearch/sql/expression/HighlightExpressionTest.java b/core/src/test/java/org/opensearch/sql/expression/HighlightExpressionTest.java index 41f3bad030..bc6b3628b0 100644 --- a/core/src/test/java/org/opensearch/sql/expression/HighlightExpressionTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/HighlightExpressionTest.java @@ -18,13 +18,13 @@ import org.opensearch.sql.data.model.ExprValueUtils; import org.opensearch.sql.expression.env.Environment; - public class HighlightExpressionTest extends ExpressionTestBase { @Test public void single_highlight_test() { - Environment hlTuple = ExprValueUtils.tupleValue( - ImmutableMap.of("_highlight.Title", "result value")).bindingTuples(); + Environment hlTuple = + ExprValueUtils.tupleValue(ImmutableMap.of("_highlight.Title", "result value")) + .bindingTuples(); HighlightExpression expr = new HighlightExpression(DSL.literal("Title")); ExprValue resultVal = expr.valueOf(hlTuple); @@ -34,8 +34,9 @@ public void single_highlight_test() { @Test public void missing_highlight_test() { - Environment hlTuple = ExprValueUtils.tupleValue( - ImmutableMap.of("_highlight.Title", "result value")).bindingTuples(); + Environment hlTuple = + ExprValueUtils.tupleValue(ImmutableMap.of("_highlight.Title", "result value")) + .bindingTuples(); HighlightExpression expr = new HighlightExpression(DSL.literal("invalid")); ExprValue resultVal = expr.valueOf(hlTuple); @@ -52,8 +53,8 @@ public void missing_highlight_wildcard_test() { builder.put("_highlight", ExprTupleValue.fromExprValueMap(hlBuilder.build())); HighlightExpression hlExpr = new HighlightExpression(DSL.literal("invalid*")); - ExprValue resultVal = hlExpr.valueOf( - ExprTupleValue.fromExprValueMap(builder.build()).bindingTuples()); + ExprValue resultVal = + hlExpr.valueOf(ExprTupleValue.fromExprValueMap(builder.build()).bindingTuples()); assertTrue(resultVal.isMissing()); } @@ -67,20 +68,23 @@ public void highlight_all_test() { builder.put("_highlight", ExprTupleValue.fromExprValueMap(hlBuilder.build())); HighlightExpression hlExpr = new HighlightExpression(DSL.literal("T*")); - ExprValue resultVal = hlExpr.valueOf( - ExprTupleValue.fromExprValueMap(builder.build()).bindingTuples()); + ExprValue resultVal = + hlExpr.valueOf(ExprTupleValue.fromExprValueMap(builder.build()).bindingTuples()); assertEquals(STRUCT, resultVal.type()); - assertTrue(resultVal.tupleValue().containsValue( - ExprValueUtils.stringValue("correct result value"))); - assertFalse(resultVal.tupleValue().containsValue( - ExprValueUtils.stringValue("secondary correct result value"))); + assertTrue( + resultVal.tupleValue().containsValue(ExprValueUtils.stringValue("correct result value"))); + assertFalse( + resultVal + .tupleValue() + .containsValue(ExprValueUtils.stringValue("secondary correct result value"))); } @Test public void do_nothing_with_missing_value() { - Environment hlTuple = ExprValueUtils.tupleValue( - ImmutableMap.of("NonHighlightField", "ResultValue")).bindingTuples(); + Environment hlTuple = + ExprValueUtils.tupleValue(ImmutableMap.of("NonHighlightField", "ResultValue")) + .bindingTuples(); HighlightExpression expr = new HighlightExpression(DSL.literal("*")); ExprValue resultVal = expr.valueOf(hlTuple); @@ -96,13 +100,13 @@ public void highlight_wildcard_test() { builder.put("_highlight", ExprTupleValue.fromExprValueMap(hlBuilder.build())); HighlightExpression hlExpr = new HighlightExpression(DSL.literal("T*")); - ExprValue resultVal = hlExpr.valueOf( - ExprTupleValue.fromExprValueMap(builder.build()).bindingTuples()); + ExprValue resultVal = + hlExpr.valueOf(ExprTupleValue.fromExprValueMap(builder.build()).bindingTuples()); assertEquals(STRUCT, resultVal.type()); - assertTrue(resultVal.tupleValue().containsValue( - ExprValueUtils.stringValue("correct result value"))); - assertFalse(resultVal.tupleValue().containsValue( - ExprValueUtils.stringValue("incorrect result value"))); + assertTrue( + resultVal.tupleValue().containsValue(ExprValueUtils.stringValue("correct result value"))); + assertFalse( + resultVal.tupleValue().containsValue(ExprValueUtils.stringValue("incorrect result value"))); } } diff --git a/core/src/test/java/org/opensearch/sql/expression/NamedExpressionTest.java b/core/src/test/java/org/opensearch/sql/expression/NamedExpressionTest.java index 38f1ce3ca9..915952cca8 100644 --- a/core/src/test/java/org/opensearch/sql/expression/NamedExpressionTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/NamedExpressionTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -55,11 +54,12 @@ void name_a_span_expression() { @Test void name_a_parse_expression() { ParseExpression parse = - DSL.regex(DSL.ref("string_value", STRING), DSL.literal("(?\\w{2})\\w"), + DSL.regex( + DSL.ref("string_value", STRING), + DSL.literal("(?\\w{2})\\w"), DSL.literal("group")); NamedExpression named = DSL.named(parse); assertEquals(parse, named.getDelegated()); assertEquals(parse.getIdentifier().valueOf().stringValue(), named.getName()); } - } diff --git a/core/src/test/java/org/opensearch/sql/expression/ReferenceExpressionTest.java b/core/src/test/java/org/opensearch/sql/expression/ReferenceExpressionTest.java index 46aae069bb..da8c15d19f 100644 --- a/core/src/test/java/org/opensearch/sql/expression/ReferenceExpressionTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/ReferenceExpressionTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -54,13 +53,13 @@ public void resolve_value() { assertEquals(doubleValue(1d), DSL.ref("double_value", DOUBLE).valueOf(valueEnv())); assertEquals(booleanValue(true), DSL.ref("boolean_value", BOOLEAN).valueOf(valueEnv())); assertEquals(stringValue("str"), DSL.ref("string_value", STRING).valueOf(valueEnv())); - assertEquals(tupleValue(ImmutableMap.of("str", 1)), - DSL.ref("struct_value", STRUCT).valueOf(valueEnv())); - assertEquals(collectionValue(ImmutableList.of(1)), - DSL.ref("array_value", ARRAY).valueOf(valueEnv())); + assertEquals( + tupleValue(ImmutableMap.of("str", 1)), DSL.ref("struct_value", STRUCT).valueOf(valueEnv())); + assertEquals( + collectionValue(ImmutableList.of(1)), DSL.ref("array_value", ARRAY).valueOf(valueEnv())); assertEquals(LITERAL_NULL, DSL.ref(BOOL_TYPE_NULL_VALUE_FIELD, BOOLEAN).valueOf(valueEnv())); - assertEquals(LITERAL_MISSING, - DSL.ref(BOOL_TYPE_MISSING_VALUE_FIELD, BOOLEAN).valueOf(valueEnv())); + assertEquals( + LITERAL_MISSING, DSL.ref(BOOL_TYPE_MISSING_VALUE_FIELD, BOOLEAN).valueOf(valueEnv())); } @Test @@ -138,6 +137,9 @@ public void array_with_multiple_path_value() { } /** + * + * + *
    * {
    *   "name": "bob smith"
    *   "project.year": 1990,
@@ -157,39 +159,31 @@ public void array_with_multiple_path_value() {
    *     { "info": "Only first index of array used" }
    *   ]
    * }
+   * 
*/ private ExprTupleValue tuple() { ExprValue address = - ExprValueUtils.tupleValue(ImmutableMap.of("state", "WA", "city", "seattle", "project" - + ".year", 1990)); - ExprValue project = - ExprValueUtils.tupleValue(ImmutableMap.of("year", 2020)); - ExprValue addressLocal = - ExprValueUtils.tupleValue(ImmutableMap.of("state", "WA")); + ExprValueUtils.tupleValue( + ImmutableMap.of("state", "WA", "city", "seattle", "project" + ".year", 1990)); + ExprValue project = ExprValueUtils.tupleValue(ImmutableMap.of("year", 2020)); + ExprValue addressLocal = ExprValueUtils.tupleValue(ImmutableMap.of("state", "WA")); ExprValue messageCollectionValue = new ExprCollectionValue( ImmutableList.of( ExprValueUtils.tupleValue( - ImmutableMap.of( - "info", stringValue("First message in array") - ) - ), + ImmutableMap.of("info", stringValue("First message in array"))), ExprValueUtils.tupleValue( - ImmutableMap.of( - "info", stringValue("Only first index of array used") - ) - ) - ) - ); - - ExprTupleValue tuple = ExprTupleValue.fromExprValueMap(ImmutableMap.of( - "name", new ExprStringValue("bob smith"), - "project.year", new ExprIntegerValue(1990), - "project", project, - "address", address, - "address.local", addressLocal, - "message", messageCollectionValue - )); + ImmutableMap.of("info", stringValue("Only first index of array used"))))); + + ExprTupleValue tuple = + ExprTupleValue.fromExprValueMap( + ImmutableMap.of( + "name", new ExprStringValue("bob smith"), + "project.year", new ExprIntegerValue(1990), + "project", project, + "address", address, + "address.local", addressLocal, + "message", messageCollectionValue)); return tuple; } } diff --git a/core/src/test/java/org/opensearch/sql/expression/aggregation/AggregationTest.java b/core/src/test/java/org/opensearch/sql/expression/aggregation/AggregationTest.java index 7742e6c4d0..f1a3a9d948 100644 --- a/core/src/test/java/org/opensearch/sql/expression/aggregation/AggregationTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/aggregation/AggregationTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.aggregation; import com.google.common.collect.ImmutableList; @@ -98,26 +97,46 @@ public class AggregationTest extends ExpressionTestBase { protected static List tuples_with_duplicates = Arrays.asList( - ExprValueUtils.tupleValue(ImmutableMap.of( - "integer_value", 1, - "double_value", 4d, - "struct_value", ImmutableMap.of("str", 1), - "array_value", ImmutableList.of(1))), - ExprValueUtils.tupleValue(ImmutableMap.of( - "integer_value", 1, - "double_value", 3d, - "struct_value", ImmutableMap.of("str", 1), - "array_value", ImmutableList.of(1))), - ExprValueUtils.tupleValue(ImmutableMap.of( - "integer_value", 2, - "double_value", 2d, - "struct_value", ImmutableMap.of("str", 2), - "array_value", ImmutableList.of(2))), - ExprValueUtils.tupleValue(ImmutableMap.of( - "integer_value", 3, - "double_value", 1d, - "struct_value", ImmutableMap.of("str1", 1), - "array_value", ImmutableList.of(1, 2)))); + ExprValueUtils.tupleValue( + ImmutableMap.of( + "integer_value", + 1, + "double_value", + 4d, + "struct_value", + ImmutableMap.of("str", 1), + "array_value", + ImmutableList.of(1))), + ExprValueUtils.tupleValue( + ImmutableMap.of( + "integer_value", + 1, + "double_value", + 3d, + "struct_value", + ImmutableMap.of("str", 1), + "array_value", + ImmutableList.of(1))), + ExprValueUtils.tupleValue( + ImmutableMap.of( + "integer_value", + 2, + "double_value", + 2d, + "struct_value", + ImmutableMap.of("str", 2), + "array_value", + ImmutableList.of(2))), + ExprValueUtils.tupleValue( + ImmutableMap.of( + "integer_value", + 3, + "double_value", + 1d, + "struct_value", + ImmutableMap.of("str1", 1), + "array_value", + ImmutableList.of(1, 2)))); protected static List tuples_with_null_and_missing = Arrays.asList( diff --git a/core/src/test/java/org/opensearch/sql/expression/aggregation/AvgAggregatorTest.java b/core/src/test/java/org/opensearch/sql/expression/aggregation/AvgAggregatorTest.java index b3b0052bc3..f465a6477e 100644 --- a/core/src/test/java/org/opensearch/sql/expression/aggregation/AvgAggregatorTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/aggregation/AvgAggregatorTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.aggregation; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -37,16 +36,23 @@ public void avg_field_expression() { @Test public void avg_arithmetic_expression() { - ExprValue result = aggregation(DSL.avg( - DSL.multiply(DSL.ref("integer_value", INTEGER), - DSL.literal(ExprValueUtils.integerValue(10)))), tuples); + ExprValue result = + aggregation( + DSL.avg( + DSL.multiply( + DSL.ref("integer_value", INTEGER), + DSL.literal(ExprValueUtils.integerValue(10)))), + tuples); assertEquals(25.0, result.value()); } @Test public void filtered_avg() { - ExprValue result = aggregation(DSL.avg(DSL.ref("integer_value", INTEGER)) - .condition(DSL.greater(DSL.ref("integer_value", INTEGER), DSL.literal(1))), tuples); + ExprValue result = + aggregation( + DSL.avg(DSL.ref("integer_value", INTEGER)) + .condition(DSL.greater(DSL.ref("integer_value", INTEGER), DSL.literal(1))), + tuples); assertEquals(3.0, result.value()); } @@ -128,16 +134,17 @@ public void avg_timestamp() { @Test public void valueOf() { - ExpressionEvaluationException exception = assertThrows(ExpressionEvaluationException.class, - () -> DSL.avg(DSL.ref("double_value", DOUBLE)).valueOf(valueEnv())); + ExpressionEvaluationException exception = + assertThrows( + ExpressionEvaluationException.class, + () -> DSL.avg(DSL.ref("double_value", DOUBLE)).valueOf(valueEnv())); assertEquals("can't evaluate on aggregator: avg", exception.getMessage()); } @Test public void avg_on_unsupported_type() { var aggregator = new AvgAggregator(List.of(DSL.ref("string", STRING)), STRING); - var exception = assertThrows(IllegalArgumentException.class, - () -> aggregator.create()); + var exception = assertThrows(IllegalArgumentException.class, () -> aggregator.create()); assertEquals("avg aggregation over STRING type is not supported", exception.getMessage()); } @@ -149,9 +156,12 @@ public void test_to_string() { @Test public void test_nested_to_string() { - Aggregator avgAggregator = DSL.avg(DSL.multiply(DSL.ref("integer_value", INTEGER), - DSL.literal(ExprValueUtils.integerValue(10)))); - assertEquals(String.format("avg(*(%s, %d))", DSL.ref("integer_value", INTEGER), 10), + Aggregator avgAggregator = + DSL.avg( + DSL.multiply( + DSL.ref("integer_value", INTEGER), DSL.literal(ExprValueUtils.integerValue(10)))); + assertEquals( + String.format("avg(*(%s, %d))", DSL.ref("integer_value", INTEGER), 10), avgAggregator.toString()); } } diff --git a/core/src/test/java/org/opensearch/sql/expression/aggregation/CountAggregatorTest.java b/core/src/test/java/org/opensearch/sql/expression/aggregation/CountAggregatorTest.java index fd27529a70..50bd3fedfe 100644 --- a/core/src/test/java/org/opensearch/sql/expression/aggregation/CountAggregatorTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/aggregation/CountAggregatorTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.aggregation; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -72,9 +71,13 @@ public void count_datetime_field_expression() { @Test public void count_arithmetic_expression() { - ExprValue result = aggregation(DSL.count( - DSL.multiply(DSL.ref("integer_value", INTEGER), - DSL.literal(ExprValueUtils.integerValue(10)))), tuples); + ExprValue result = + aggregation( + DSL.count( + DSL.multiply( + DSL.ref("integer_value", INTEGER), + DSL.literal(ExprValueUtils.integerValue(10)))), + tuples); assertEquals(4, result.value()); } @@ -104,51 +107,56 @@ public void count_array_field_expression() { @Test public void filtered_count() { - ExprValue result = aggregation(DSL.count(DSL.ref("integer_value", INTEGER)) - .condition(DSL.greater(DSL.ref("integer_value", INTEGER), DSL.literal(1))), tuples); + ExprValue result = + aggregation( + DSL.count(DSL.ref("integer_value", INTEGER)) + .condition(DSL.greater(DSL.ref("integer_value", INTEGER), DSL.literal(1))), + tuples); assertEquals(3, result.value()); } @Test public void distinct_count() { - ExprValue result = aggregation(DSL.distinctCount(DSL.ref("integer_value", INTEGER)), - tuples_with_duplicates); + ExprValue result = + aggregation(DSL.distinctCount(DSL.ref("integer_value", INTEGER)), tuples_with_duplicates); assertEquals(3, result.value()); } @Test public void filtered_distinct_count() { - ExprValue result = aggregation(DSL.distinctCount(DSL.ref("integer_value", INTEGER)) - .condition(DSL.greater(DSL.ref("double_value", DOUBLE), DSL.literal(1d))), - tuples_with_duplicates); + ExprValue result = + aggregation( + DSL.distinctCount(DSL.ref("integer_value", INTEGER)) + .condition(DSL.greater(DSL.ref("double_value", DOUBLE), DSL.literal(1d))), + tuples_with_duplicates); assertEquals(2, result.value()); } @Test public void distinct_count_map() { - ExprValue result = aggregation(DSL.distinctCount(DSL.ref("struct_value", STRUCT)), - tuples_with_duplicates); + ExprValue result = + aggregation(DSL.distinctCount(DSL.ref("struct_value", STRUCT)), tuples_with_duplicates); assertEquals(3, result.value()); } @Test public void distinct_count_array() { - ExprValue result = aggregation(DSL.distinctCount(DSL.ref("array_value", ARRAY)), - tuples_with_duplicates); + ExprValue result = + aggregation(DSL.distinctCount(DSL.ref("array_value", ARRAY)), tuples_with_duplicates); assertEquals(3, result.value()); } @Test public void count_with_missing() { - ExprValue result = aggregation(DSL.count(DSL.ref("integer_value", INTEGER)), - tuples_with_null_and_missing); + ExprValue result = + aggregation(DSL.count(DSL.ref("integer_value", INTEGER)), tuples_with_null_and_missing); assertEquals(2, result.value()); } @Test public void count_with_null() { - ExprValue result = aggregation(DSL.count(DSL.ref("double_value", DOUBLE)), - tuples_with_null_and_missing); + ExprValue result = + aggregation(DSL.count(DSL.ref("double_value", DOUBLE)), tuples_with_null_and_missing); assertEquals(2, result.value()); } @@ -166,8 +174,10 @@ public void count_literal_with_null_and_missing() { @Test public void valueOf() { - ExpressionEvaluationException exception = assertThrows(ExpressionEvaluationException.class, - () -> DSL.count(DSL.ref("double_value", DOUBLE)).valueOf(valueEnv())); + ExpressionEvaluationException exception = + assertThrows( + ExpressionEvaluationException.class, + () -> DSL.count(DSL.ref("double_value", DOUBLE)).valueOf(valueEnv())); assertEquals("can't evaluate on aggregator: count", exception.getMessage()); } @@ -183,7 +193,8 @@ public void test_to_string() { @Test public void test_nested_to_string() { Aggregator countAggregator = DSL.count(DSL.abs(DSL.ref("integer_value", INTEGER))); - assertEquals(String.format("count(abs(%s))", DSL.ref("integer_value", INTEGER)), + assertEquals( + String.format("count(abs(%s))", DSL.ref("integer_value", INTEGER)), countAggregator.toString()); } } diff --git a/core/src/test/java/org/opensearch/sql/expression/aggregation/MaxAggregatorTest.java b/core/src/test/java/org/opensearch/sql/expression/aggregation/MaxAggregatorTest.java index 6886622704..c6cd380ad5 100644 --- a/core/src/test/java/org/opensearch/sql/expression/aggregation/MaxAggregatorTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/aggregation/MaxAggregatorTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.aggregation; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -83,16 +82,23 @@ public void test_max_timestamp() { @Test public void test_max_arithmetic_expression() { - ExprValue result = aggregation( - DSL.max(DSL.add(DSL.ref("integer_value", INTEGER), - DSL.literal(ExprValueUtils.integerValue(0)))), tuples); + ExprValue result = + aggregation( + DSL.max( + DSL.add( + DSL.ref("integer_value", INTEGER), + DSL.literal(ExprValueUtils.integerValue(0)))), + tuples); assertEquals(4, result.value()); } @Test public void filtered_max() { - ExprValue result = aggregation(DSL.max(DSL.ref("integer_value", INTEGER)) - .condition(DSL.less(DSL.ref("integer_value", INTEGER), DSL.literal(4))), tuples); + ExprValue result = + aggregation( + DSL.max(DSL.ref("integer_value", INTEGER)) + .condition(DSL.less(DSL.ref("integer_value", INTEGER), DSL.literal(4))), + tuples); assertEquals(3, result.value()); } @@ -119,8 +125,10 @@ public void test_max_all_missing_or_null() { @Test public void test_value_of() { - ExpressionEvaluationException exception = assertThrows(ExpressionEvaluationException.class, - () -> DSL.max(DSL.ref("double_value", DOUBLE)).valueOf(valueEnv())); + ExpressionEvaluationException exception = + assertThrows( + ExpressionEvaluationException.class, + () -> DSL.max(DSL.ref("double_value", DOUBLE)).valueOf(valueEnv())); assertEquals("can't evaluate on aggregator: max", exception.getMessage()); } @@ -132,9 +140,12 @@ public void test_to_string() { @Test public void test_nested_to_string() { - Aggregator maxAggregator = DSL.max(DSL.add(DSL.ref("integer_value", INTEGER), - DSL.literal(ExprValueUtils.integerValue(10)))); - assertEquals(String.format("max(+(%s, %d))", DSL.ref("integer_value", INTEGER), 10), + Aggregator maxAggregator = + DSL.max( + DSL.add( + DSL.ref("integer_value", INTEGER), DSL.literal(ExprValueUtils.integerValue(10)))); + assertEquals( + String.format("max(+(%s, %d))", DSL.ref("integer_value", INTEGER), 10), maxAggregator.toString()); } } diff --git a/core/src/test/java/org/opensearch/sql/expression/aggregation/MinAggregatorTest.java b/core/src/test/java/org/opensearch/sql/expression/aggregation/MinAggregatorTest.java index 1437f4dfda..1aee0f3a6c 100644 --- a/core/src/test/java/org/opensearch/sql/expression/aggregation/MinAggregatorTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/aggregation/MinAggregatorTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.aggregation; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -83,16 +82,23 @@ public void test_min_timestamp() { @Test public void test_min_arithmetic_expression() { - ExprValue result = aggregation( - DSL.min(DSL.add(DSL.ref("integer_value", INTEGER), - DSL.literal(ExprValueUtils.integerValue(0)))), tuples); + ExprValue result = + aggregation( + DSL.min( + DSL.add( + DSL.ref("integer_value", INTEGER), + DSL.literal(ExprValueUtils.integerValue(0)))), + tuples); assertEquals(1, result.value()); } @Test public void filtered_min() { - ExprValue result = aggregation(DSL.min(DSL.ref("integer_value", INTEGER)) - .condition(DSL.greater(DSL.ref("integer_value", INTEGER), DSL.literal(1))), tuples); + ExprValue result = + aggregation( + DSL.min(DSL.ref("integer_value", INTEGER)) + .condition(DSL.greater(DSL.ref("integer_value", INTEGER), DSL.literal(1))), + tuples); assertEquals(2, result.value()); } @@ -119,8 +125,10 @@ public void test_min_all_missing_or_null() { @Test public void test_value_of() { - ExpressionEvaluationException exception = assertThrows(ExpressionEvaluationException.class, - () -> DSL.min(DSL.ref("double_value", DOUBLE)).valueOf(valueEnv())); + ExpressionEvaluationException exception = + assertThrows( + ExpressionEvaluationException.class, + () -> DSL.min(DSL.ref("double_value", DOUBLE)).valueOf(valueEnv())); assertEquals("can't evaluate on aggregator: min", exception.getMessage()); } @@ -132,9 +140,12 @@ public void test_to_string() { @Test public void test_nested_to_string() { - Aggregator minAggregator = DSL.min(DSL.add(DSL.ref("integer_value", INTEGER), - DSL.literal(ExprValueUtils.integerValue(10)))); - assertEquals(String.format("min(+(%s, %d))", DSL.ref("integer_value", INTEGER), 10), + Aggregator minAggregator = + DSL.min( + DSL.add( + DSL.ref("integer_value", INTEGER), DSL.literal(ExprValueUtils.integerValue(10)))); + assertEquals( + String.format("min(+(%s, %d))", DSL.ref("integer_value", INTEGER), 10), minAggregator.toString()); } } diff --git a/core/src/test/java/org/opensearch/sql/expression/aggregation/StdDevAggregatorTest.java b/core/src/test/java/org/opensearch/sql/expression/aggregation/StdDevAggregatorTest.java index fe4923d4df..ceb76815dc 100644 --- a/core/src/test/java/org/opensearch/sql/expression/aggregation/StdDevAggregatorTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/aggregation/StdDevAggregatorTest.java @@ -40,14 +40,11 @@ @ExtendWith(MockitoExtension.class) public class StdDevAggregatorTest extends AggregationTest { - @Mock - Expression expression; + @Mock Expression expression; - @Mock - ExprValue tupleValue; + @Mock ExprValue tupleValue; - @Mock - BindingTuple tuple; + @Mock BindingTuple tuple; @Test public void stddev_sample_field_expression() { diff --git a/core/src/test/java/org/opensearch/sql/expression/aggregation/SumAggregatorTest.java b/core/src/test/java/org/opensearch/sql/expression/aggregation/SumAggregatorTest.java index 676306041e..eb5a18f248 100644 --- a/core/src/test/java/org/opensearch/sql/expression/aggregation/SumAggregatorTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/aggregation/SumAggregatorTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.aggregation; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -53,9 +52,13 @@ public void sum_double_field_expression() { @Test public void sum_arithmetic_expression() { - ExprValue result = aggregation(DSL.sum( - DSL.multiply(DSL.ref("integer_value", INTEGER), - DSL.literal(ExprValueUtils.integerValue(10)))), tuples); + ExprValue result = + aggregation( + DSL.sum( + DSL.multiply( + DSL.ref("integer_value", INTEGER), + DSL.literal(ExprValueUtils.integerValue(10)))), + tuples); assertEquals(100, result.value()); } @@ -64,19 +67,23 @@ public void sum_string_field_expression() { SumAggregator sumAggregator = new SumAggregator(ImmutableList.of(DSL.ref("string_value", STRING)), ExprCoreType.STRING); SumState sumState = sumAggregator.create(); - ExpressionEvaluationException exception = assertThrows(ExpressionEvaluationException.class, - () -> sumAggregator - .iterate( - ExprValueUtils.tupleValue(ImmutableMap.of("string_value", "m")).bindingTuples(), - sumState) - ); + ExpressionEvaluationException exception = + assertThrows( + ExpressionEvaluationException.class, + () -> + sumAggregator.iterate( + ExprValueUtils.tupleValue(ImmutableMap.of("string_value", "m")).bindingTuples(), + sumState)); assertEquals("unexpected type [STRING] in sum aggregation", exception.getMessage()); } @Test public void filtered_sum() { - ExprValue result = aggregation(DSL.sum(DSL.ref("integer_value", INTEGER)) - .condition(DSL.greater(DSL.ref("integer_value", INTEGER), DSL.literal(1))), tuples); + ExprValue result = + aggregation( + DSL.sum(DSL.ref("integer_value", INTEGER)) + .condition(DSL.greater(DSL.ref("integer_value", INTEGER), DSL.literal(1))), + tuples); assertEquals(9, result.value()); } @@ -103,8 +110,10 @@ public void sum_with_all_missing_or_null() { @Test public void valueOf() { - ExpressionEvaluationException exception = assertThrows(ExpressionEvaluationException.class, - () -> DSL.sum(DSL.ref("double_value", DOUBLE)).valueOf(valueEnv())); + ExpressionEvaluationException exception = + assertThrows( + ExpressionEvaluationException.class, + () -> DSL.sum(DSL.ref("double_value", DOUBLE)).valueOf(valueEnv())); assertEquals("can't evaluate on aggregator: sum", exception.getMessage()); } @@ -116,9 +125,12 @@ public void test_to_string() { @Test public void test_nested_to_string() { - Aggregator sumAggregator = DSL.sum(DSL.multiply(DSL.ref("integer_value", INTEGER), - DSL.literal(ExprValueUtils.integerValue(10)))); - assertEquals(String.format("sum(*(%s, %d))", DSL.ref("integer_value", INTEGER), 10), + Aggregator sumAggregator = + DSL.sum( + DSL.multiply( + DSL.ref("integer_value", INTEGER), DSL.literal(ExprValueUtils.integerValue(10)))); + assertEquals( + String.format("sum(*(%s, %d))", DSL.ref("integer_value", INTEGER), 10), sumAggregator.toString()); } } diff --git a/core/src/test/java/org/opensearch/sql/expression/aggregation/TakeAggregatorTest.java b/core/src/test/java/org/opensearch/sql/expression/aggregation/TakeAggregatorTest.java index 6d9aac4957..2409ec49d2 100644 --- a/core/src/test/java/org/opensearch/sql/expression/aggregation/TakeAggregatorTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/aggregation/TakeAggregatorTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.aggregation; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -35,15 +34,18 @@ public void take_string_field_expression_with_large_size() { @Test public void filtered_take() { ExprValue result = - aggregation(DSL.take(DSL.ref("string_value", STRING), DSL.literal(10)) - .condition(DSL.equal(DSL.ref("string_value", STRING), DSL.literal("m"))), tuples); + aggregation( + DSL.take(DSL.ref("string_value", STRING), DSL.literal(10)) + .condition(DSL.equal(DSL.ref("string_value", STRING), DSL.literal("m"))), + tuples); assertEquals(ImmutableList.of("m", "m"), result.value()); } @Test public void test_take_null() { ExprValue result = - aggregation(DSL.take(DSL.ref("string_value", STRING), DSL.literal(10)), + aggregation( + DSL.take(DSL.ref("string_value", STRING), DSL.literal(10)), tuples_with_null_and_missing); assertEquals(ImmutableList.of("m", "f"), result.value()); } @@ -51,7 +53,8 @@ public void test_take_null() { @Test public void test_take_missing() { ExprValue result = - aggregation(DSL.take(DSL.ref("string_value", STRING), DSL.literal(10)), + aggregation( + DSL.take(DSL.ref("string_value", STRING), DSL.literal(10)), tuples_with_null_and_missing); assertEquals(ImmutableList.of("m", "f"), result.value()); } @@ -59,22 +62,27 @@ public void test_take_missing() { @Test public void test_take_all_missing_or_null() { ExprValue result = - aggregation(DSL.take(DSL.ref("string_value", STRING), DSL.literal(10)), + aggregation( + DSL.take(DSL.ref("string_value", STRING), DSL.literal(10)), tuples_with_all_null_or_missing); assertEquals(ImmutableList.of(), result.value()); } @Test public void test_take_with_invalid_size() { - IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, - () -> aggregation(DSL.take(DSL.ref("string_value", STRING), DSL.literal(0)), tuples)); + IllegalArgumentException exception = + assertThrows( + IllegalArgumentException.class, + () -> aggregation(DSL.take(DSL.ref("string_value", STRING), DSL.literal(0)), tuples)); assertEquals("size must be greater than 0", exception.getMessage()); } @Test public void test_value_of() { - ExpressionEvaluationException exception = assertThrows(ExpressionEvaluationException.class, - () -> DSL.take(DSL.ref("string_value", STRING), DSL.literal(10)).valueOf(valueEnv())); + ExpressionEvaluationException exception = + assertThrows( + ExpressionEvaluationException.class, + () -> DSL.take(DSL.ref("string_value", STRING), DSL.literal(10)).valueOf(valueEnv())); assertEquals("can't evaluate on aggregator: take", exception.getMessage()); } diff --git a/core/src/test/java/org/opensearch/sql/expression/conditional/ConditionalFunctionTest.java b/core/src/test/java/org/opensearch/sql/expression/conditional/ConditionalFunctionTest.java index ae8b714dd8..33654a71cf 100644 --- a/core/src/test/java/org/opensearch/sql/expression/conditional/ConditionalFunctionTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/conditional/ConditionalFunctionTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.conditional; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -29,12 +28,9 @@ @ExtendWith(MockitoExtension.class) public class ConditionalFunctionTest extends ExpressionTestBase { - @Mock - Environment env; + @Mock Environment env; - /** - * Arguments for case test. - */ + /** Arguments for case test. */ private static Stream caseArguments() { Stream.Builder builder = Stream.builder(); return builder @@ -46,28 +42,25 @@ private static Stream caseArguments() { @ParameterizedTest(name = "case {0} when {1} then {2} when {3} then {4} else {5}") @MethodSource("caseArguments") - void case_value(int value, - int cond1, int result1, - int cond2, int result2, - int defaultVal) throws Exception { - Callable expect = () -> { - if (cond1 == value) { - return result1; - } else if (cond2 == value) { - return result2; - } else { - return defaultVal; - } - }; + void case_value(int value, int cond1, int result1, int cond2, int result2, int defaultVal) + throws Exception { + Callable expect = + () -> { + if (cond1 == value) { + return result1; + } else if (cond2 == value) { + return result2; + } else { + return defaultVal; + } + }; - Expression cases = DSL.cases( - DSL.literal(defaultVal), - DSL.when(DSL.equal(DSL.literal(cond1), DSL.literal(value)), DSL.literal(result1)), - DSL.when(DSL.equal(DSL.literal(cond2), DSL.literal(value)), DSL.literal(result2))); + Expression cases = + DSL.cases( + DSL.literal(defaultVal), + DSL.when(DSL.equal(DSL.literal(cond1), DSL.literal(value)), DSL.literal(result1)), + DSL.when(DSL.equal(DSL.literal(cond2), DSL.literal(value)), DSL.literal(result2))); - assertEquals( - new ExprIntegerValue(expect.call()), - cases.valueOf(env)); + assertEquals(new ExprIntegerValue(expect.call()), cases.valueOf(env)); } - } diff --git a/core/src/test/java/org/opensearch/sql/expression/conditional/cases/CaseClauseTest.java b/core/src/test/java/org/opensearch/sql/expression/conditional/cases/CaseClauseTest.java index 3c95c4f461..61ff7ef022 100644 --- a/core/src/test/java/org/opensearch/sql/expression/conditional/cases/CaseClauseTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/conditional/cases/CaseClauseTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.conditional.cases; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -29,8 +28,7 @@ @ExtendWith(MockitoExtension.class) class CaseClauseTest extends ExpressionTestBase { - @Mock - private WhenClause whenClause; + @Mock private WhenClause whenClause; @Test void should_return_when_clause_result_if_matched() { @@ -93,8 +91,7 @@ void should_return_all_result_types_including_default() { CaseClause caseClause = new CaseClause(ImmutableList.of(whenClause), defaultResult); assertEquals( - ImmutableList.of(ExprCoreType.INTEGER, ExprCoreType.STRING), - caseClause.allResultTypes()); + ImmutableList.of(ExprCoreType.INTEGER, ExprCoreType.STRING), caseClause.allResultTypes()); } @Test @@ -104,9 +101,6 @@ void should_return_all_result_types_excluding_null_result() { when(defaultResult.type()).thenReturn(ExprCoreType.UNDEFINED); CaseClause caseClause = new CaseClause(ImmutableList.of(whenClause), defaultResult); - assertEquals( - ImmutableList.of(), - caseClause.allResultTypes()); + assertEquals(ImmutableList.of(), caseClause.allResultTypes()); } - } diff --git a/core/src/test/java/org/opensearch/sql/expression/conditional/cases/WhenClauseTest.java b/core/src/test/java/org/opensearch/sql/expression/conditional/cases/WhenClauseTest.java index a13f072510..e03c851694 100644 --- a/core/src/test/java/org/opensearch/sql/expression/conditional/cases/WhenClauseTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/conditional/cases/WhenClauseTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.conditional.cases; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -59,5 +58,4 @@ void should_use_result_expression_type() { WhenClause whenClause = new WhenClause(DSL.literal(true), DSL.literal(30)); assertEquals(ExprCoreType.INTEGER, whenClause.type()); } - } diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/AddTimeAndSubTimeTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/AddTimeAndSubTimeTest.java index e917e2ee62..eed83f4fa9 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/AddTimeAndSubTimeTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/AddTimeAndSubTimeTest.java @@ -74,38 +74,73 @@ public void time_limited_by_24_hours() { private static Stream getTestData() { return Stream.of( // DATETIME and TIME/DATE/DATETIME/TIMESTAMP - Arguments.of(LocalDateTime.of(1961, 4, 12, 9, 7), LocalTime.of(1, 48), - LocalDateTime.of(1961, 4, 12, 10, 55), LocalDateTime.of(1961, 4, 12, 7, 19)), - Arguments.of(LocalDateTime.of(1961, 4, 12, 9, 7), LocalDate.of(2000, 1, 1), - LocalDateTime.of(1961, 4, 12, 9, 7), LocalDateTime.of(1961, 4, 12, 9, 7)), - Arguments.of(LocalDateTime.of(1961, 4, 12, 9, 7), LocalDateTime.of(1235, 5, 6, 1, 48), - LocalDateTime.of(1961, 4, 12, 10, 55), LocalDateTime.of(1961, 4, 12, 7, 19)), - Arguments.of(LocalDateTime.of(1961, 4, 12, 9, 7), Instant.ofEpochSecond(42), - LocalDateTime.of(1961, 4, 12, 9, 7, 42), LocalDateTime.of(1961, 4, 12, 9, 6, 18)), + Arguments.of( + LocalDateTime.of(1961, 4, 12, 9, 7), + LocalTime.of(1, 48), + LocalDateTime.of(1961, 4, 12, 10, 55), + LocalDateTime.of(1961, 4, 12, 7, 19)), + Arguments.of( + LocalDateTime.of(1961, 4, 12, 9, 7), + LocalDate.of(2000, 1, 1), + LocalDateTime.of(1961, 4, 12, 9, 7), + LocalDateTime.of(1961, 4, 12, 9, 7)), + Arguments.of( + LocalDateTime.of(1961, 4, 12, 9, 7), + LocalDateTime.of(1235, 5, 6, 1, 48), + LocalDateTime.of(1961, 4, 12, 10, 55), + LocalDateTime.of(1961, 4, 12, 7, 19)), + Arguments.of( + LocalDateTime.of(1961, 4, 12, 9, 7), + Instant.ofEpochSecond(42), + LocalDateTime.of(1961, 4, 12, 9, 7, 42), + LocalDateTime.of(1961, 4, 12, 9, 6, 18)), // DATE and TIME/DATE/DATETIME/TIMESTAMP - Arguments.of(LocalDate.of(1961, 4, 12), LocalTime.of(9, 7), - LocalDateTime.of(1961, 4, 12, 9, 7), LocalDateTime.of(1961, 4, 11, 14, 53)), - Arguments.of(LocalDate.of(1961, 4, 12), LocalDate.of(2000, 1, 1), - LocalDateTime.of(1961, 4, 12, 0, 0), LocalDateTime.of(1961, 4, 12, 0, 0)), - Arguments.of(LocalDate.of(1961, 4, 12), LocalDateTime.of(1235, 5, 6, 1, 48), - LocalDateTime.of(1961, 4, 12, 1, 48), LocalDateTime.of(1961, 4, 11, 22, 12)), - Arguments.of(LocalDate.of(1961, 4, 12), Instant.ofEpochSecond(42), - LocalDateTime.of(1961, 4, 12, 0, 0, 42), LocalDateTime.of(1961, 4, 11, 23, 59, 18)), + Arguments.of( + LocalDate.of(1961, 4, 12), + LocalTime.of(9, 7), + LocalDateTime.of(1961, 4, 12, 9, 7), + LocalDateTime.of(1961, 4, 11, 14, 53)), + Arguments.of( + LocalDate.of(1961, 4, 12), + LocalDate.of(2000, 1, 1), + LocalDateTime.of(1961, 4, 12, 0, 0), + LocalDateTime.of(1961, 4, 12, 0, 0)), + Arguments.of( + LocalDate.of(1961, 4, 12), + LocalDateTime.of(1235, 5, 6, 1, 48), + LocalDateTime.of(1961, 4, 12, 1, 48), + LocalDateTime.of(1961, 4, 11, 22, 12)), + Arguments.of( + LocalDate.of(1961, 4, 12), + Instant.ofEpochSecond(42), + LocalDateTime.of(1961, 4, 12, 0, 0, 42), + LocalDateTime.of(1961, 4, 11, 23, 59, 18)), // TIMESTAMP and TIME/DATE/DATETIME/TIMESTAMP - Arguments.of(Instant.ofEpochSecond(42), LocalTime.of(9, 7), - LocalDateTime.of(1970, 1, 1, 9, 7, 42), LocalDateTime.of(1969, 12, 31, 14, 53, 42)), - Arguments.of(Instant.ofEpochSecond(42), LocalDate.of(1961, 4, 12), - LocalDateTime.of(1970, 1, 1, 0, 0, 42), LocalDateTime.of(1970, 1, 1, 0, 0, 42)), - Arguments.of(Instant.ofEpochSecond(42), LocalDateTime.of(1961, 4, 12, 9, 7), - LocalDateTime.of(1970, 1, 1, 9, 7, 42), LocalDateTime.of(1969, 12, 31, 14, 53, 42)), - Arguments.of(Instant.ofEpochSecond(42), Instant.ofEpochMilli(42), + Arguments.of( + Instant.ofEpochSecond(42), + LocalTime.of(9, 7), + LocalDateTime.of(1970, 1, 1, 9, 7, 42), + LocalDateTime.of(1969, 12, 31, 14, 53, 42)), + Arguments.of( + Instant.ofEpochSecond(42), + LocalDate.of(1961, 4, 12), + LocalDateTime.of(1970, 1, 1, 0, 0, 42), + LocalDateTime.of(1970, 1, 1, 0, 0, 42)), + Arguments.of( + Instant.ofEpochSecond(42), + LocalDateTime.of(1961, 4, 12, 9, 7), + LocalDateTime.of(1970, 1, 1, 9, 7, 42), + LocalDateTime.of(1969, 12, 31, 14, 53, 42)), + Arguments.of( + Instant.ofEpochSecond(42), + Instant.ofEpochMilli(42), LocalDateTime.of(1970, 1, 1, 0, 0, 42, 42000000), - LocalDateTime.of(1970, 1, 1, 0, 0, 41, 958000000)) - ); + LocalDateTime.of(1970, 1, 1, 0, 0, 41, 958000000))); } /** * Check that `ADDTIME` and `SUBTIME` functions result value and type. + * * @param arg1 First argument. * @param arg2 Second argument. * @param addTimeExpectedResult Expected result for `ADDTIME`. @@ -113,9 +148,11 @@ private static Stream getTestData() { */ @ParameterizedTest @MethodSource("getTestData") - public void return_datetime_when_first_arg_is_not_time(Temporal arg1, Temporal arg2, - LocalDateTime addTimeExpectedResult, - LocalDateTime subTimeExpectedResult) { + public void return_datetime_when_first_arg_is_not_time( + Temporal arg1, + Temporal arg2, + LocalDateTime addTimeExpectedResult, + LocalDateTime subTimeExpectedResult) { var res = addtime(arg1, arg2); assertEquals(DATETIME, res.type()); assertEquals(addTimeExpectedResult, res.datetimeValue()); diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/ConvertTZTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/ConvertTZTest.java index 7bc788c9a7..17ff4f67ab 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/ConvertTZTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/ConvertTZTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.datetime; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -20,150 +19,162 @@ class ConvertTZTest extends ExpressionTestBase { @Test public void invalidDate() { - FunctionExpression expr = DSL.convert_tz(DSL.datetime( - DSL.literal("2021-04-31 10:00:00")), - DSL.literal("+00:00"), - DSL.literal("+00:00")); + FunctionExpression expr = + DSL.convert_tz( + DSL.datetime(DSL.literal("2021-04-31 10:00:00")), + DSL.literal("+00:00"), + DSL.literal("+00:00")); assertEquals(DATETIME, expr.type()); assertEquals(nullValue(), expr.valueOf()); } @Test public void conversionFromNoOffset() { - FunctionExpression expr = DSL.convert_tz(DSL.datetime( - DSL.literal("2008-05-15 22:00:00")), - DSL.literal("+00:00"), - DSL.literal("+10:00")); + FunctionExpression expr = + DSL.convert_tz( + DSL.datetime(DSL.literal("2008-05-15 22:00:00")), + DSL.literal("+00:00"), + DSL.literal("+10:00")); assertEquals(DATETIME, expr.type()); assertEquals(new ExprDatetimeValue("2008-05-16 08:00:00"), expr.valueOf()); } @Test public void conversionToInvalidInput3Over() { - FunctionExpression expr = DSL.convert_tz(DSL.datetime( - DSL.literal("2008-05-15 22:00:00")), - DSL.literal("+00:00"), - DSL.literal("+16:00")); + FunctionExpression expr = + DSL.convert_tz( + DSL.datetime(DSL.literal("2008-05-15 22:00:00")), + DSL.literal("+00:00"), + DSL.literal("+16:00")); assertEquals(DATETIME, expr.type()); assertEquals(nullValue(), expr.valueOf()); } @Test public void conversionToInvalidInput3Under() { - FunctionExpression expr = DSL.convert_tz(DSL.datetime( - DSL.literal("2008-05-15 22:00:00")), - DSL.literal("+00:00"), - DSL.literal("-16:00")); + FunctionExpression expr = + DSL.convert_tz( + DSL.datetime(DSL.literal("2008-05-15 22:00:00")), + DSL.literal("+00:00"), + DSL.literal("-16:00")); assertEquals(DATETIME, expr.type()); assertEquals(nullValue(), expr.valueOf()); } @Test public void conversionFromPositiveToPositive() { - FunctionExpression expr = DSL.convert_tz(DSL.datetime( - DSL.literal("2008-05-15 22:00:00")), - DSL.literal("+15:00"), - DSL.literal("+01:00")); + FunctionExpression expr = + DSL.convert_tz( + DSL.datetime(DSL.literal("2008-05-15 22:00:00")), + DSL.literal("+15:00"), + DSL.literal("+01:00")); assertEquals(DATETIME, expr.type()); assertEquals(nullValue(), expr.valueOf()); } @Test public void invalidInput2Under() { - FunctionExpression expr = DSL.convert_tz(DSL.datetime( - DSL.literal("2008-05-15 22:00:00")), - DSL.literal("-15:00"), - DSL.literal("+01:00")); + FunctionExpression expr = + DSL.convert_tz( + DSL.datetime(DSL.literal("2008-05-15 22:00:00")), + DSL.literal("-15:00"), + DSL.literal("+01:00")); assertEquals(DATETIME, expr.type()); assertEquals(nullValue(), expr.valueOf()); } @Test public void invalidInput3Over() { - FunctionExpression expr = DSL.convert_tz(DSL.datetime( - DSL.literal("2008-05-15 22:00:00")), - DSL.literal("-12:00"), - DSL.literal("+15:00")); + FunctionExpression expr = + DSL.convert_tz( + DSL.datetime(DSL.literal("2008-05-15 22:00:00")), + DSL.literal("-12:00"), + DSL.literal("+15:00")); assertEquals(DATETIME, expr.type()); assertEquals(nullValue(), expr.valueOf()); } @Test public void conversionToPositiveEdge() { - FunctionExpression expr = DSL.convert_tz(DSL.datetime( - DSL.literal("2008-05-15 22:00:00")), - DSL.literal("+00:00"), - DSL.literal("+14:00")); + FunctionExpression expr = + DSL.convert_tz( + DSL.datetime(DSL.literal("2008-05-15 22:00:00")), + DSL.literal("+00:00"), + DSL.literal("+14:00")); assertEquals(DATETIME, expr.type()); assertEquals(new ExprDatetimeValue("2008-05-16 12:00:00"), expr.valueOf()); } @Test public void conversionToNegativeEdge() { - FunctionExpression expr = DSL.convert_tz(DSL.datetime( - DSL.literal("2008-05-15 22:00:00")), - DSL.literal("+00:01"), - DSL.literal("-13:59")); + FunctionExpression expr = + DSL.convert_tz( + DSL.datetime(DSL.literal("2008-05-15 22:00:00")), + DSL.literal("+00:01"), + DSL.literal("-13:59")); assertEquals(DATETIME, expr.type()); assertEquals(new ExprDatetimeValue("2008-05-15 08:00:00"), expr.valueOf()); } @Test public void invalidInput2() { - FunctionExpression expr = DSL.convert_tz(DSL.datetime( - DSL.literal("2008-05-15 22:00:00")), - DSL.literal("+)()"), - DSL.literal("+12:00")); + FunctionExpression expr = + DSL.convert_tz( + DSL.datetime(DSL.literal("2008-05-15 22:00:00")), + DSL.literal("+)()"), + DSL.literal("+12:00")); assertEquals(DATETIME, expr.type()); assertEquals(nullValue(), expr.valueOf()); } @Test public void invalidInput3() { - FunctionExpression expr = DSL.convert_tz(DSL.datetime( - DSL.literal("2008-05-15 22:00:00")), - DSL.literal("+00:00"), - DSL.literal("test")); + FunctionExpression expr = + DSL.convert_tz( + DSL.datetime(DSL.literal("2008-05-15 22:00:00")), + DSL.literal("+00:00"), + DSL.literal("test")); assertEquals(DATETIME, expr.type()); assertEquals(nullValue(), expr.valueOf()); } @Test public void invalidInput1() { - FunctionExpression expr = DSL.convert_tz( - DSL.literal("test"), - DSL.literal("+00:00"), - DSL.literal("+00:00")); + FunctionExpression expr = + DSL.convert_tz(DSL.literal("test"), DSL.literal("+00:00"), DSL.literal("+00:00")); assertEquals(DATETIME, expr.type()); assertEquals(nullValue(), expr.valueOf()); } @Test public void invalidDateFeb30() { - FunctionExpression expr = DSL.convert_tz(DSL.datetime( - DSL.literal("2021-02-30 10:00:00")), - DSL.literal("+00:00"), - DSL.literal("+00:00")); + FunctionExpression expr = + DSL.convert_tz( + DSL.datetime(DSL.literal("2021-02-30 10:00:00")), + DSL.literal("+00:00"), + DSL.literal("+00:00")); assertEquals(DATETIME, expr.type()); assertEquals(nullValue(), expr.valueOf()); } @Test public void invalidDateApril31() { - FunctionExpression expr = DSL.convert_tz(DSL.datetime( - DSL.literal("2021-04-31 10:00:00")), - DSL.literal("+00:00"), - DSL.literal("+00:00")); + FunctionExpression expr = + DSL.convert_tz( + DSL.datetime(DSL.literal("2021-04-31 10:00:00")), + DSL.literal("+00:00"), + DSL.literal("+00:00")); assertEquals(DATETIME, expr.type()); assertEquals(nullValue(), expr.valueOf()); } @Test public void invalidMonth13() { - FunctionExpression expr = DSL.convert_tz(DSL.datetime( - DSL.literal("2021-13-03 10:00:00")), - DSL.literal("+00:00"), - DSL.literal("+00:00")); + FunctionExpression expr = + DSL.convert_tz( + DSL.datetime(DSL.literal("2021-13-03 10:00:00")), + DSL.literal("+00:00"), + DSL.literal("+00:00")); assertEquals(DATETIME, expr.type()); assertEquals(nullValue(), expr.valueOf()); } diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/DateAddAndAddDateTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/DateAddAndAddDateTest.java index 973b168355..52db0a17e5 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/DateAddAndAddDateTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/DateAddAndAddDateTest.java @@ -35,8 +35,8 @@ public void adddate_returns_datetime_when_args_are_time_and_time_interval() { @Test public void date_add_returns_datetime_when_args_are_time_and_time_interval() { - var res = date_add(LocalTime.of(10, 20, 30), - Duration.ofHours(1).plusMinutes(2).plusSeconds(42)); + var res = + date_add(LocalTime.of(10, 20, 30), Duration.ofHours(1).plusMinutes(2).plusSeconds(42)); assertEquals(DATETIME, res.type()); assertEquals(LocalTime.of(11, 23, 12).atDate(today()), res.datetimeValue()); } @@ -50,8 +50,8 @@ public void adddate_time_limited_by_24_hours() { @Test public void date_add_time_limited_by_24_hours() { - var res = date_add(LocalTime.of(10, 20, 30), - Duration.ofHours(20).plusMinutes(50).plusSeconds(7)); + var res = + date_add(LocalTime.of(10, 20, 30), Duration.ofHours(20).plusMinutes(50).plusSeconds(7)); assertEquals(DATETIME, res.type()); assertEquals(LocalTime.of(7, 10, 37), res.datetimeValue().toLocalTime()); } @@ -108,8 +108,9 @@ public void adddate_returns_datetime_when_first_arg_is_datetime() { @Test public void date_add_returns_datetime_when_first_arg_is_timestamp() { - var res = date_add(LocalDateTime.of(1961, 4, 12, 9, 7).toInstant(ZoneOffset.UTC), - Duration.ofMinutes(108)); + var res = + date_add( + LocalDateTime.of(1961, 4, 12, 9, 7).toInstant(ZoneOffset.UTC), Duration.ofMinutes(108)); assertEquals(DATETIME, res.type()); assertEquals(LocalDateTime.of(1961, 4, 12, 10, 55), res.datetimeValue()); } @@ -127,10 +128,13 @@ public void adddate_has_second_signature_but_not_date_add() { var res = adddate(LocalDateTime.of(1961, 4, 12, 9, 7), 100500); assertEquals(DATETIME, res.type()); - var exception = assertThrows(ExpressionEvaluationException.class, - () -> date_add(LocalDateTime.of(1961, 4, 12, 9, 7), 100500)); - assertEquals("date_add function expected {[DATE,INTERVAL],[DATETIME,INTERVAL]," - + "[TIMESTAMP,INTERVAL],[TIME,INTERVAL]}, but get [DATETIME,INTEGER]", + var exception = + assertThrows( + ExpressionEvaluationException.class, + () -> date_add(LocalDateTime.of(1961, 4, 12, 9, 7), 100500)); + assertEquals( + "date_add function expected {[DATE,INTERVAL],[DATETIME,INTERVAL]," + + "[TIMESTAMP,INTERVAL],[TIME,INTERVAL]}, but get [DATETIME,INTEGER]", exception.getMessage()); } diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/DateDiffTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/DateDiffTest.java index 72c1ceba03..a630758456 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/DateDiffTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/DateDiffTest.java @@ -40,33 +40,26 @@ private static Stream getTestData() { Arguments.of(timeSample1, timeSample2, 0L), Arguments.of(timeSample1, dateNow, 0L), Arguments.of(timeSample1, LocalDateTime.now(), 0L), - Arguments.of(timeSample1, - Instant.now().plusMillis(TimeZone.getDefault().getRawOffset()), 0L), - Arguments.of(dateSample1, timeSample1, - -DAYS.between(dateSample1, dateNow)), - Arguments.of(dateSample1, dateSample3, - -DAYS.between(dateSample1, dateSample3)), - Arguments.of(dateSample1, dateTimeSample1, - -DAYS.between(dateSample1, dateSample2)), - Arguments.of(dateSample1, Instant.ofEpochSecond(42), - -DAYS.between(dateSample1, epochStart)), - Arguments.of(dateTimeSample1, LocalTime.now(), - -DAYS.between(dateSample2, dateNow)), - Arguments.of(dateTimeSample1, dateSample3, - -DAYS.between(dateSample2, dateSample3)), - Arguments.of(dateTimeSample1, dateTimeSample2, - -DAYS.between(dateSample2, dateSample3)), - Arguments.of(dateTimeSample1, Instant.ofEpochSecond(0), - -DAYS.between(dateSample2, epochStart)), - Arguments.of(Instant.ofEpochSecond(0), LocalTime.MAX, - -DAYS.between(epochStart, dateNow)), - Arguments.of(Instant.ofEpochSecond(0), dateSample3, - -DAYS.between(epochStart, dateSample3)), - Arguments.of(Instant.ofEpochSecond(0), dateTimeSample2, - -DAYS.between(epochStart, dateSample3)), - Arguments.of(Instant.ofEpochSecond(0), Instant.now(), - -DAYS.between(epochStart, LocalDateTime.now(ZoneId.of("UTC")))) - ); + Arguments.of( + timeSample1, Instant.now().plusMillis(TimeZone.getDefault().getRawOffset()), 0L), + Arguments.of(dateSample1, timeSample1, -DAYS.between(dateSample1, dateNow)), + Arguments.of(dateSample1, dateSample3, -DAYS.between(dateSample1, dateSample3)), + Arguments.of(dateSample1, dateTimeSample1, -DAYS.between(dateSample1, dateSample2)), + Arguments.of( + dateSample1, Instant.ofEpochSecond(42), -DAYS.between(dateSample1, epochStart)), + Arguments.of(dateTimeSample1, LocalTime.now(), -DAYS.between(dateSample2, dateNow)), + Arguments.of(dateTimeSample1, dateSample3, -DAYS.between(dateSample2, dateSample3)), + Arguments.of(dateTimeSample1, dateTimeSample2, -DAYS.between(dateSample2, dateSample3)), + Arguments.of( + dateTimeSample1, Instant.ofEpochSecond(0), -DAYS.between(dateSample2, epochStart)), + Arguments.of(Instant.ofEpochSecond(0), LocalTime.MAX, -DAYS.between(epochStart, dateNow)), + Arguments.of(Instant.ofEpochSecond(0), dateSample3, -DAYS.between(epochStart, dateSample3)), + Arguments.of( + Instant.ofEpochSecond(0), dateTimeSample2, -DAYS.between(epochStart, dateSample3)), + Arguments.of( + Instant.ofEpochSecond(0), + Instant.now(), + -DAYS.between(epochStart, LocalDateTime.now(ZoneId.of("UTC"))))); } @ParameterizedTest diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/DateSubAndSubDateTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/DateSubAndSubDateTest.java index 37c62313db..460e12384b 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/DateSubAndSubDateTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/DateSubAndSubDateTest.java @@ -35,8 +35,8 @@ public void subdate_returns_datetime_when_args_are_time_and_time_interval() { @Test public void date_sub_returns_datetime_when_args_are_time_and_time_interval() { - var res = date_sub(LocalTime.of(10, 20, 30), - Duration.ofHours(1).plusMinutes(2).plusSeconds(42)); + var res = + date_sub(LocalTime.of(10, 20, 30), Duration.ofHours(1).plusMinutes(2).plusSeconds(42)); assertEquals(DATETIME, res.type()); assertEquals(LocalTime.of(9, 17, 48).atDate(today()), res.datetimeValue()); } @@ -50,8 +50,8 @@ public void subdate_time_limited_by_24_hours() { @Test public void date_sub_time_limited_by_24_hours() { - var res = date_sub(LocalTime.of(10, 20, 30), - Duration.ofHours(20).plusMinutes(50).plusSeconds(7)); + var res = + date_sub(LocalTime.of(10, 20, 30), Duration.ofHours(20).plusMinutes(50).plusSeconds(7)); assertEquals(DATETIME, res.type()); assertEquals(LocalTime.of(13, 30, 23), res.datetimeValue().toLocalTime()); } @@ -108,8 +108,9 @@ public void subdate_returns_datetime_when_first_arg_is_datetime() { @Test public void date_sub_returns_datetime_when_first_arg_is_timestamp() { - var res = date_sub(LocalDateTime.of(1961, 4, 12, 9, 7).toInstant(ZoneOffset.UTC), - Duration.ofMinutes(108)); + var res = + date_sub( + LocalDateTime.of(1961, 4, 12, 9, 7).toInstant(ZoneOffset.UTC), Duration.ofMinutes(108)); assertEquals(DATETIME, res.type()); assertEquals(LocalDateTime.of(1961, 4, 12, 7, 19), res.datetimeValue()); } @@ -127,10 +128,13 @@ public void subdate_has_second_signature_but_not_date_sub() { var res = subdate(LocalDateTime.of(1961, 4, 12, 9, 7), 100500); assertEquals(DATETIME, res.type()); - var exception = assertThrows(ExpressionEvaluationException.class, - () -> date_sub(LocalDateTime.of(1961, 4, 12, 9, 7), 100500)); - assertEquals("date_sub function expected {[DATE,INTERVAL],[DATETIME,INTERVAL]," - + "[TIMESTAMP,INTERVAL],[TIME,INTERVAL]}, but get [DATETIME,INTEGER]", + var exception = + assertThrows( + ExpressionEvaluationException.class, + () -> date_sub(LocalDateTime.of(1961, 4, 12, 9, 7), 100500)); + assertEquals( + "date_sub function expected {[DATE,INTERVAL],[DATETIME,INTERVAL]," + + "[TIMESTAMP,INTERVAL],[TIME,INTERVAL]}, but get [DATETIME,INTEGER]", exception.getMessage()); } diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/DateTimeFunctionTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/DateTimeFunctionTest.java index f8abfe7580..c2a6129626 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/DateTimeFunctionTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/DateTimeFunctionTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.datetime; import static java.time.temporal.ChronoField.ALIGNED_WEEK_OF_YEAR; @@ -51,79 +50,81 @@ class DateTimeFunctionTest extends ExpressionTestBase { - final List dateFormatTesters = ImmutableList.of( - new DateFormatTester("1998-01-31 13:14:15.012345", - ImmutableList.of("%H","%I","%k","%l","%i","%p","%r","%S","%T"," %M", - "%W","%D","%Y","%y","%a","%b","%j","%m","%d","%h","%s","%w","%f", - "%q","%"), - ImmutableList.of("13","01","13","1","14","PM","01:14:15 PM","15","13:14:15"," January", - "Saturday","31st","1998","98","Sat","Jan","031","01","31","01","15","6","012345", - "q","%") - ), - new DateFormatTester("1999-12-01", - ImmutableList.of("%D"), - ImmutableList.of("1st") - ), - new DateFormatTester("1999-12-02", - ImmutableList.of("%D"), - ImmutableList.of("2nd") - ), - new DateFormatTester("1999-12-03", - ImmutableList.of("%D"), - ImmutableList.of("3rd") - ), - new DateFormatTester("1999-12-04", - ImmutableList.of("%D"), - ImmutableList.of("4th") - ), - new DateFormatTester("1999-12-11", - ImmutableList.of("%D"), - ImmutableList.of("11th") - ), - new DateFormatTester("1999-12-12", - ImmutableList.of("%D"), - ImmutableList.of("12th") - ), - new DateFormatTester("1999-12-13", - ImmutableList.of("%D"), - ImmutableList.of("13th") - ), - new DateFormatTester("1999-12-31", - ImmutableList.of("%x","%v","%X","%V","%u","%U"), - ImmutableList.of("1999", "52", "1999", "52", "52", "52") - ), - new DateFormatTester("2000-01-01", - ImmutableList.of("%x","%v","%X","%V","%u","%U"), - ImmutableList.of("1999", "52", "1999", "52", "0", "0") - ), - new DateFormatTester("1998-12-31", - ImmutableList.of("%x","%v","%X","%V","%u","%U"), - ImmutableList.of("1998", "52", "1998", "52", "52", "52") - ), - new DateFormatTester("1999-01-01", - ImmutableList.of("%x","%v","%X","%V","%u","%U"), - ImmutableList.of("1998", "52", "1998", "52", "0", "0") - ), - new DateFormatTester("2020-01-04", - ImmutableList.of("%x","%X"), - ImmutableList.of("2020", "2019") - ), - new DateFormatTester("2008-12-31", - ImmutableList.of("%v","%V","%u","%U"), - ImmutableList.of("53","52","53","52") - ), - new DateFormatTester("1998-01-31 13:14:15.012345", - ImmutableList.of("%Y-%m-%dT%TZ"), - ImmutableList.of("1998-01-31T13:14:15Z") - ), - new DateFormatTester("1998-01-31 13:14:15.012345", - ImmutableList.of("%Y-%m-%da %T a"), - ImmutableList.of("1998-01-31PM 13:14:15 PM") - ), - new DateFormatTester("1998-01-31 13:14:15.012345", - ImmutableList.of("%Y-%m-%db %T b"), - ImmutableList.of("1998-01-31b 13:14:15 b")) - ); + final List dateFormatTesters = + ImmutableList.of( + new DateFormatTester( + "1998-01-31 13:14:15.012345", + ImmutableList.of( + "%H", + "%I", "%k", "%l", "%i", "%p", "%r", "%S", "%T", " %M", "%W", "%D", "%Y", "%y", + "%a", "%b", "%j", "%m", "%d", "%h", "%s", "%w", "%f", "%q", "%"), + ImmutableList.of( + "13", + "01", + "13", + "1", + "14", + "PM", + "01:14:15 PM", + "15", + "13:14:15", + " January", + "Saturday", + "31st", + "1998", + "98", + "Sat", + "Jan", + "031", + "01", + "31", + "01", + "15", + "6", + "012345", + "q", + "%")), + new DateFormatTester("1999-12-01", ImmutableList.of("%D"), ImmutableList.of("1st")), + new DateFormatTester("1999-12-02", ImmutableList.of("%D"), ImmutableList.of("2nd")), + new DateFormatTester("1999-12-03", ImmutableList.of("%D"), ImmutableList.of("3rd")), + new DateFormatTester("1999-12-04", ImmutableList.of("%D"), ImmutableList.of("4th")), + new DateFormatTester("1999-12-11", ImmutableList.of("%D"), ImmutableList.of("11th")), + new DateFormatTester("1999-12-12", ImmutableList.of("%D"), ImmutableList.of("12th")), + new DateFormatTester("1999-12-13", ImmutableList.of("%D"), ImmutableList.of("13th")), + new DateFormatTester( + "1999-12-31", + ImmutableList.of("%x", "%v", "%X", "%V", "%u", "%U"), + ImmutableList.of("1999", "52", "1999", "52", "52", "52")), + new DateFormatTester( + "2000-01-01", + ImmutableList.of("%x", "%v", "%X", "%V", "%u", "%U"), + ImmutableList.of("1999", "52", "1999", "52", "0", "0")), + new DateFormatTester( + "1998-12-31", + ImmutableList.of("%x", "%v", "%X", "%V", "%u", "%U"), + ImmutableList.of("1998", "52", "1998", "52", "52", "52")), + new DateFormatTester( + "1999-01-01", + ImmutableList.of("%x", "%v", "%X", "%V", "%u", "%U"), + ImmutableList.of("1998", "52", "1998", "52", "0", "0")), + new DateFormatTester( + "2020-01-04", ImmutableList.of("%x", "%X"), ImmutableList.of("2020", "2019")), + new DateFormatTester( + "2008-12-31", + ImmutableList.of("%v", "%V", "%u", "%U"), + ImmutableList.of("53", "52", "53", "52")), + new DateFormatTester( + "1998-01-31 13:14:15.012345", + ImmutableList.of("%Y-%m-%dT%TZ"), + ImmutableList.of("1998-01-31T13:14:15Z")), + new DateFormatTester( + "1998-01-31 13:14:15.012345", + ImmutableList.of("%Y-%m-%da %T a"), + ImmutableList.of("1998-01-31PM 13:14:15 PM")), + new DateFormatTester( + "1998-01-31 13:14:15.012345", + ImmutableList.of("%Y-%m-%db %T b"), + ImmutableList.of("1998-01-31b 13:14:15 b"))); @AllArgsConstructor private class DateFormatTester { @@ -196,8 +197,8 @@ public void dayName() { @Test public void dayOfMonth() { - FunctionExpression expression = DSL.dayofmonth( - functionProperties, DSL.literal(new ExprDateValue("2020-08-07"))); + FunctionExpression expression = + DSL.dayofmonth(functionProperties, DSL.literal(new ExprDateValue("2020-08-07"))); assertEquals(INTEGER, expression.type()); assertEquals("dayofmonth(DATE '2020-08-07')", expression.toString()); assertEquals(integerValue(7), eval(expression)); @@ -215,23 +216,21 @@ private void testDayOfMonthWithUnderscores(FunctionExpression dateExpression, in @Test public void dayOfMonthWithUnderscores() { - FunctionExpression expression1 = DSL.dayofmonth( - functionProperties, DSL.literal(new ExprDateValue("2020-08-07"))); + FunctionExpression expression1 = + DSL.dayofmonth(functionProperties, DSL.literal(new ExprDateValue("2020-08-07"))); FunctionExpression expression2 = DSL.dayofmonth(functionProperties, DSL.literal("2020-07-08")); assertAll( () -> testDayOfMonthWithUnderscores(expression1, 7), () -> assertEquals("dayofmonth(DATE '2020-08-07')", expression1.toString()), - () -> testDayOfMonthWithUnderscores(expression2, 8), - () -> assertEquals("dayofmonth(\"2020-07-08\")", expression2.toString()) - ); + () -> assertEquals("dayofmonth(\"2020-07-08\")", expression2.toString())); } @Test public void testDayOfMonthWithTimeType() { - FunctionExpression expression = DSL.day_of_month( - functionProperties, DSL.literal(new ExprTimeValue("12:23:34"))); + FunctionExpression expression = + DSL.day_of_month(functionProperties, DSL.literal(new ExprTimeValue("12:23:34"))); assertEquals(INTEGER, eval(expression).type()); assertEquals( @@ -241,40 +240,35 @@ public void testDayOfMonthWithTimeType() { } private void testInvalidDayOfMonth(String date) { - FunctionExpression expression = DSL.day_of_month( - functionProperties, DSL.literal(new ExprDateValue(date))); + FunctionExpression expression = + DSL.day_of_month(functionProperties, DSL.literal(new ExprDateValue(date))); eval(expression); } @Test public void dayOfMonthWithUnderscoresLeapYear() { - //Feb. 29 of a leap year - testDayOfMonthWithUnderscores(DSL.day_of_month( - functionProperties, DSL.literal("2020-02-29")), 29); + // Feb. 29 of a leap year + testDayOfMonthWithUnderscores( + DSL.day_of_month(functionProperties, DSL.literal("2020-02-29")), 29); - //Feb. 29 of a non-leap year + // Feb. 29 of a non-leap year assertThrows(SemanticCheckException.class, () -> testInvalidDayOfMonth("2021-02-29")); } @Test public void dayOfMonthWithUnderscoresInvalidArguments() { assertAll( - //40th day of the month - () -> assertThrows( - SemanticCheckException.class, () -> testInvalidDayOfMonth("2021-02-40")), - //13th month of the year - () -> assertThrows( - SemanticCheckException.class, () -> testInvalidDayOfMonth("2021-13-40")), - //incorrect format - () -> assertThrows( - SemanticCheckException.class, () -> testInvalidDayOfMonth("asdfasdfasdf")) - ); - } - - private void dayOfWeekQuery( - FunctionExpression dateExpression, - int dayOfWeek, - String testExpr) { + // 40th day of the month + () -> assertThrows(SemanticCheckException.class, () -> testInvalidDayOfMonth("2021-02-40")), + // 13th month of the year + () -> assertThrows(SemanticCheckException.class, () -> testInvalidDayOfMonth("2021-13-40")), + // incorrect format + () -> + assertThrows( + SemanticCheckException.class, () -> testInvalidDayOfMonth("asdfasdfasdf"))); + } + + private void dayOfWeekQuery(FunctionExpression dateExpression, int dayOfWeek, String testExpr) { assertEquals(INTEGER, dateExpression.type()); assertEquals(integerValue(dayOfWeek), eval(dateExpression)); assertEquals(testExpr, dateExpression.toString()); @@ -282,34 +276,23 @@ private void dayOfWeekQuery( @Test public void dayOfWeek() { - FunctionExpression expression1 = DSL.dayofweek( - functionProperties, - DSL.literal(new ExprDateValue("2020-08-07"))); - FunctionExpression expression2 = DSL.dayofweek( - functionProperties, - DSL.literal(new ExprDateValue("2020-08-09"))); - FunctionExpression expression3 = DSL.dayofweek( - functionProperties, - DSL.literal("2020-08-09")); - FunctionExpression expression4 = DSL.dayofweek( - functionProperties, - DSL.literal("2020-08-09 01:02:03")); + FunctionExpression expression1 = + DSL.dayofweek(functionProperties, DSL.literal(new ExprDateValue("2020-08-07"))); + FunctionExpression expression2 = + DSL.dayofweek(functionProperties, DSL.literal(new ExprDateValue("2020-08-09"))); + FunctionExpression expression3 = DSL.dayofweek(functionProperties, DSL.literal("2020-08-09")); + FunctionExpression expression4 = + DSL.dayofweek(functionProperties, DSL.literal("2020-08-09 01:02:03")); assertAll( () -> dayOfWeekQuery(expression1, 6, "dayofweek(DATE '2020-08-07')"), - () -> dayOfWeekQuery(expression2, 1, "dayofweek(DATE '2020-08-09')"), - () -> dayOfWeekQuery(expression3, 1, "dayofweek(\"2020-08-09\")"), - - () -> dayOfWeekQuery(expression4, 1, "dayofweek(\"2020-08-09 01:02:03\")") - ); + () -> dayOfWeekQuery(expression4, 1, "dayofweek(\"2020-08-09 01:02:03\")")); } private void dayOfWeekWithUnderscoresQuery( - FunctionExpression dateExpression, - int dayOfWeek, - String testExpr) { + FunctionExpression dateExpression, int dayOfWeek, String testExpr) { assertEquals(INTEGER, dateExpression.type()); assertEquals(integerValue(dayOfWeek), eval(dateExpression)); assertEquals(testExpr, dateExpression.toString()); @@ -317,88 +300,80 @@ private void dayOfWeekWithUnderscoresQuery( @Test public void dayOfWeekWithUnderscores() { - FunctionExpression expression1 = DSL.day_of_week( - functionProperties, - DSL.literal(new ExprDateValue("2020-08-07"))); - FunctionExpression expression2 = DSL.day_of_week( - functionProperties, - DSL.literal(new ExprDateValue("2020-08-09"))); - FunctionExpression expression3 = DSL.day_of_week( - functionProperties, - DSL.literal("2020-08-09")); - FunctionExpression expression4 = DSL.day_of_week( - functionProperties, - DSL.literal("2020-08-09 01:02:03")); + FunctionExpression expression1 = + DSL.day_of_week(functionProperties, DSL.literal(new ExprDateValue("2020-08-07"))); + FunctionExpression expression2 = + DSL.day_of_week(functionProperties, DSL.literal(new ExprDateValue("2020-08-09"))); + FunctionExpression expression3 = DSL.day_of_week(functionProperties, DSL.literal("2020-08-09")); + FunctionExpression expression4 = + DSL.day_of_week(functionProperties, DSL.literal("2020-08-09 01:02:03")); assertAll( () -> dayOfWeekWithUnderscoresQuery(expression1, 6, "day_of_week(DATE '2020-08-07')"), - () -> dayOfWeekWithUnderscoresQuery(expression2, 1, "day_of_week(DATE '2020-08-09')"), - () -> dayOfWeekWithUnderscoresQuery(expression3, 1, "day_of_week(\"2020-08-09\")"), - - () -> dayOfWeekWithUnderscoresQuery( - expression4, 1, "day_of_week(\"2020-08-09 01:02:03\")") - ); + () -> + dayOfWeekWithUnderscoresQuery(expression4, 1, "day_of_week(\"2020-08-09 01:02:03\")")); } @Test public void testDayOfWeekWithTimeType() { - FunctionExpression expression = DSL.day_of_week( - functionProperties, DSL.literal(new ExprTimeValue("12:23:34"))); + FunctionExpression expression = + DSL.day_of_week(functionProperties, DSL.literal(new ExprTimeValue("12:23:34"))); assertAll( () -> assertEquals(INTEGER, eval(expression).type()), - () -> assertEquals(( - LocalDate.now( - functionProperties.getQueryStartClock()).getDayOfWeek().getValue() % 7) + 1, - eval(expression).integerValue()), - () -> assertEquals("day_of_week(TIME '12:23:34')", expression.toString()) - ); + () -> + assertEquals( + (LocalDate.now(functionProperties.getQueryStartClock()).getDayOfWeek().getValue() + % 7) + + 1, + eval(expression).integerValue()), + () -> assertEquals("day_of_week(TIME '12:23:34')", expression.toString())); } private void testInvalidDayOfWeek(String date) { - FunctionExpression expression = DSL.day_of_week( - functionProperties, DSL.literal(new ExprDateValue(date))); + FunctionExpression expression = + DSL.day_of_week(functionProperties, DSL.literal(new ExprDateValue(date))); eval(expression); } @Test public void dayOfWeekWithUnderscoresLeapYear() { assertAll( - //Feb. 29 of a leap year - () -> dayOfWeekWithUnderscoresQuery(DSL.day_of_week( - functionProperties, - DSL.literal("2020-02-29")), 7, "day_of_week(\"2020-02-29\")"), - //day after Feb. 29 of a leap year - () -> dayOfWeekWithUnderscoresQuery(DSL.day_of_week( - functionProperties, - DSL.literal("2020-03-01")), 1, "day_of_week(\"2020-03-01\")"), - //Feb. 28 of a non-leap year - () -> dayOfWeekWithUnderscoresQuery(DSL.day_of_week( - functionProperties, - DSL.literal("2021-02-28")), 1, "day_of_week(\"2021-02-28\")"), - //Feb. 29 of a non-leap year - () -> assertThrows( - SemanticCheckException.class, () -> testInvalidDayOfWeek("2021-02-29")) - ); + // Feb. 29 of a leap year + () -> + dayOfWeekWithUnderscoresQuery( + DSL.day_of_week(functionProperties, DSL.literal("2020-02-29")), + 7, + "day_of_week(\"2020-02-29\")"), + // day after Feb. 29 of a leap year + () -> + dayOfWeekWithUnderscoresQuery( + DSL.day_of_week(functionProperties, DSL.literal("2020-03-01")), + 1, + "day_of_week(\"2020-03-01\")"), + // Feb. 28 of a non-leap year + () -> + dayOfWeekWithUnderscoresQuery( + DSL.day_of_week(functionProperties, DSL.literal("2021-02-28")), + 1, + "day_of_week(\"2021-02-28\")"), + // Feb. 29 of a non-leap year + () -> assertThrows(SemanticCheckException.class, () -> testInvalidDayOfWeek("2021-02-29"))); } @Test public void dayOfWeekWithUnderscoresInvalidArgument() { assertAll( - //40th day of the month - () -> assertThrows(SemanticCheckException.class, - () -> testInvalidDayOfWeek("2021-02-40")), + // 40th day of the month + () -> assertThrows(SemanticCheckException.class, () -> testInvalidDayOfWeek("2021-02-40")), - //13th month of the year - () -> assertThrows(SemanticCheckException.class, - () -> testInvalidDayOfWeek("2021-13-29")), + // 13th month of the year + () -> assertThrows(SemanticCheckException.class, () -> testInvalidDayOfWeek("2021-13-29")), - //incorrect format - () -> assertThrows(SemanticCheckException.class, - () -> testInvalidDayOfWeek("asdfasdf")) - ); + // incorrect format + () -> assertThrows(SemanticCheckException.class, () -> testInvalidDayOfWeek("asdfasdf"))); } @Test @@ -421,40 +396,26 @@ public void dayOfYear() { private static Stream getTestDataForDayOfYear() { return Stream.of( - Arguments.of(DSL.literal( - new ExprDateValue("2020-08-07")), - "day_of_year(DATE '2020-08-07')", - 220), - Arguments.of(DSL.literal( - new ExprDatetimeValue("2020-08-07 12:23:34")), + Arguments.of( + DSL.literal(new ExprDateValue("2020-08-07")), "day_of_year(DATE '2020-08-07')", 220), + Arguments.of( + DSL.literal(new ExprDatetimeValue("2020-08-07 12:23:34")), "day_of_year(DATETIME '2020-08-07 12:23:34')", 220), - Arguments.of(DSL.literal( - new ExprTimestampValue("2020-08-07 12:23:34")), + Arguments.of( + DSL.literal(new ExprTimestampValue("2020-08-07 12:23:34")), "day_of_year(TIMESTAMP '2020-08-07 12:23:34')", 220), - Arguments.of(DSL.literal( - "2020-08-07"), - "day_of_year(\"2020-08-07\")", - 220), - Arguments.of(DSL.literal( - "2020-08-07 01:02:03"), - "day_of_year(\"2020-08-07 01:02:03\")", - 220) - ); + Arguments.of(DSL.literal("2020-08-07"), "day_of_year(\"2020-08-07\")", 220), + Arguments.of( + DSL.literal("2020-08-07 01:02:03"), "day_of_year(\"2020-08-07 01:02:03\")", 220)); } @ParameterizedTest(name = "{0}") @MethodSource("getTestDataForDayOfYear") public void dayOfYearWithUnderscores( - LiteralExpression arg, - String expectedString, - int expectedResult) { - validateStringFormat( - DSL.day_of_year(functionProperties, arg), - expectedString, - expectedResult - ); + LiteralExpression arg, String expectedString, int expectedResult) { + validateStringFormat(DSL.day_of_year(functionProperties, arg), expectedString, expectedResult); } @Test @@ -466,98 +427,81 @@ public void testDayOfYearWithTimeType() { } public void dayOfYearWithUnderscoresQuery(String date, int dayOfYear) { - FunctionExpression expression = DSL.day_of_year( - functionProperties, - DSL.literal(new ExprDateValue(date))); + FunctionExpression expression = + DSL.day_of_year(functionProperties, DSL.literal(new ExprDateValue(date))); assertAll( () -> assertEquals(INTEGER, expression.type()), - () -> assertEquals(integerValue(dayOfYear), eval(expression)) - ); + () -> assertEquals(integerValue(dayOfYear), eval(expression))); } @Test public void dayOfYearWithUnderscoresDifferentArgumentFormats() { - FunctionExpression expression1 = DSL.day_of_year( - functionProperties, - DSL.literal(new ExprDateValue("2020-08-07"))); - FunctionExpression expression2 = DSL.day_of_year( - functionProperties, - DSL.literal("2020-08-07")); - FunctionExpression expression3 = DSL.day_of_year( - functionProperties, - DSL.literal("2020-08-07 01:02:03")); + FunctionExpression expression1 = + DSL.day_of_year(functionProperties, DSL.literal(new ExprDateValue("2020-08-07"))); + FunctionExpression expression2 = DSL.day_of_year(functionProperties, DSL.literal("2020-08-07")); + FunctionExpression expression3 = + DSL.day_of_year(functionProperties, DSL.literal("2020-08-07 01:02:03")); assertAll( () -> dayOfYearWithUnderscoresQuery("2020-08-07", 220), () -> assertEquals("day_of_year(DATE '2020-08-07')", expression1.toString()), - () -> dayOfYearWithUnderscoresQuery("2020-08-07", 220), - () -> assertEquals("day_of_year(\"2020-08-07\")", expression2.toString()), - + () -> assertEquals("day_of_year(\"2020-08-07\")", expression2.toString()), () -> dayOfYearWithUnderscoresQuery("2020-08-07 01:02:03", 220), - () -> assertEquals("day_of_year(\"2020-08-07 01:02:03\")", expression3.toString()) - ); + () -> assertEquals("day_of_year(\"2020-08-07 01:02:03\")", expression3.toString())); } @Test public void dayOfYearWithUnderscoresCornerCaseDates() { assertAll( - //31st of December during non leap year (should be 365) + // 31st of December during non leap year (should be 365) () -> dayOfYearWithUnderscoresQuery("2019-12-31", 365), - //Year 1200 + // Year 1200 () -> dayOfYearWithUnderscoresQuery("1200-02-28", 59), - //Year 4000 - () -> dayOfYearWithUnderscoresQuery("4000-02-28", 59) - ); + // Year 4000 + () -> dayOfYearWithUnderscoresQuery("4000-02-28", 59)); } @Test public void dayOfYearWithUnderscoresLeapYear() { assertAll( - //28th of Feb + // 28th of Feb () -> dayOfYearWithUnderscoresQuery("2020-02-28", 59), - //29th of Feb during leap year + // 29th of Feb during leap year () -> dayOfYearWithUnderscoresQuery("2020-02-29 23:59:59", 60), () -> dayOfYearWithUnderscoresQuery("2020-02-29", 60), - //1st of March during leap year + // 1st of March during leap year () -> dayOfYearWithUnderscoresQuery("2020-03-01 00:00:00", 61), () -> dayOfYearWithUnderscoresQuery("2020-03-01", 61), - //1st of March during non leap year + // 1st of March during non leap year () -> dayOfYearWithUnderscoresQuery("2019-03-01", 60), - //31st of December during leap year (should be 366) - () -> dayOfYearWithUnderscoresQuery("2020-12-31", 366) - ); + // 31st of December during leap year (should be 366) + () -> dayOfYearWithUnderscoresQuery("2020-12-31", 366)); } private void invalidDayOfYearQuery(String date) { - FunctionExpression expression = DSL.day_of_year( - functionProperties, - DSL.literal(new ExprDateValue(date))); + FunctionExpression expression = + DSL.day_of_year(functionProperties, DSL.literal(new ExprDateValue(date))); eval(expression); } @Test public void invalidDayOfYearArgument() { assertAll( - //29th of Feb non-leapyear - () -> assertThrows( - SemanticCheckException.class, - () -> invalidDayOfYearQuery("2019-02-29")), + // 29th of Feb non-leapyear + () -> assertThrows(SemanticCheckException.class, () -> invalidDayOfYearQuery("2019-02-29")), - //13th month - () -> assertThrows( - SemanticCheckException.class, - () -> invalidDayOfYearQuery("2019-13-15")), + // 13th month + () -> assertThrows(SemanticCheckException.class, () -> invalidDayOfYearQuery("2019-13-15")), - //incorrect format for type - () -> assertThrows( - SemanticCheckException.class, - () -> invalidDayOfYearQuery("asdfasdfasdf")) - ); + // incorrect format for type + () -> + assertThrows( + SemanticCheckException.class, () -> invalidDayOfYearQuery("asdfasdfasdf"))); } @Test @@ -589,13 +533,11 @@ private static Stream getTestDataForGetFormat() { Arguments.of("TIMESTAMP", "JIS", "%Y-%m-%d %H:%i:%s"), Arguments.of("TIMESTAMP", "ISO", "%Y-%m-%d %H:%i:%s"), Arguments.of("TIMESTAMP", "EUR", "%Y-%m-%d %H.%i.%s"), - Arguments.of("TIMESTAMP", "INTERNAL", "%Y%m%d%H%i%s") - ); + Arguments.of("TIMESTAMP", "INTERNAL", "%Y%m%d%H%i%s")); } - private void getFormatQuery(LiteralExpression argType, - LiteralExpression namedFormat, - String expectedResult) { + private void getFormatQuery( + LiteralExpression argType, LiteralExpression namedFormat, String expectedResult) { FunctionExpression expr = DSL.get_format(argType, namedFormat); assertEquals(STRING, expr.type()); assertEquals(expectedResult, eval(expr).stringValue()); @@ -603,20 +545,13 @@ private void getFormatQuery(LiteralExpression argType, @ParameterizedTest(name = "{0}{1}") @MethodSource("getTestDataForGetFormat") - public void testGetFormat(String arg, - String format, - String expectedResult) { - getFormatQuery( - DSL.literal(arg), - DSL.literal(new ExprStringValue(format)), - expectedResult); + public void testGetFormat(String arg, String format, String expectedResult) { + getFormatQuery(DSL.literal(arg), DSL.literal(new ExprStringValue(format)), expectedResult); } @Test public void testGetFormatInvalidFormat() { - FunctionExpression expr = DSL.get_format( - DSL.literal("DATE"), - DSL.literal("1SA")); + FunctionExpression expr = DSL.get_format(DSL.literal("DATE"), DSL.literal("1SA")); assertEquals(nullValue(), eval(expr)); } @@ -655,25 +590,20 @@ private void testInvalidMinuteOfDay(String date) { @Test public void invalidMinuteOfDay() { - assertThrows(SemanticCheckException.class, - () -> testInvalidMinuteOfDay("2022-12-14 12:23:3400")); - assertThrows(SemanticCheckException.class, - () -> testInvalidMinuteOfDay("2022-12-14 12:2300:34")); - assertThrows(SemanticCheckException.class, - () -> testInvalidMinuteOfDay("2022-12-14 1200:23:34")); - assertThrows(SemanticCheckException.class, - () -> testInvalidMinuteOfDay("2022-12-1400 12:23:34")); - assertThrows(SemanticCheckException.class, - () -> testInvalidMinuteOfDay("2022-1200-14 12:23:34")); - assertThrows(SemanticCheckException.class, - () -> testInvalidMinuteOfDay("12:23:3400")); - assertThrows(SemanticCheckException.class, - () -> testInvalidMinuteOfDay("12:2300:34")); - assertThrows(SemanticCheckException.class, - () -> testInvalidMinuteOfDay("1200:23:34")); - assertThrows(SemanticCheckException.class, - () -> testInvalidMinuteOfDay("asdfasdfasdf")); - + assertThrows( + SemanticCheckException.class, () -> testInvalidMinuteOfDay("2022-12-14 12:23:3400")); + assertThrows( + SemanticCheckException.class, () -> testInvalidMinuteOfDay("2022-12-14 12:2300:34")); + assertThrows( + SemanticCheckException.class, () -> testInvalidMinuteOfDay("2022-12-14 1200:23:34")); + assertThrows( + SemanticCheckException.class, () -> testInvalidMinuteOfDay("2022-12-1400 12:23:34")); + assertThrows( + SemanticCheckException.class, () -> testInvalidMinuteOfDay("2022-1200-14 12:23:34")); + assertThrows(SemanticCheckException.class, () -> testInvalidMinuteOfDay("12:23:3400")); + assertThrows(SemanticCheckException.class, () -> testInvalidMinuteOfDay("12:2300:34")); + assertThrows(SemanticCheckException.class, () -> testInvalidMinuteOfDay("1200:23:34")); + assertThrows(SemanticCheckException.class, () -> testInvalidMinuteOfDay("asdfasdfasdf")); } private void hourOfDayQuery(FunctionExpression dateExpression, int hour) { @@ -685,28 +615,23 @@ private void hourOfDayQuery(FunctionExpression dateExpression, int hour) { public void hourOfDay() { FunctionExpression expression1 = DSL.hour_of_day(DSL.literal(new ExprTimeValue("01:02:03"))); FunctionExpression expression2 = DSL.hour_of_day(DSL.literal("01:02:03")); - FunctionExpression expression3 = DSL.hour_of_day( - DSL.literal(new ExprTimestampValue("2020-08-17 01:02:03"))); - FunctionExpression expression4 = DSL.hour_of_day( - DSL.literal(new ExprDatetimeValue("2020-08-17 01:02:03"))); + FunctionExpression expression3 = + DSL.hour_of_day(DSL.literal(new ExprTimestampValue("2020-08-17 01:02:03"))); + FunctionExpression expression4 = + DSL.hour_of_day(DSL.literal(new ExprDatetimeValue("2020-08-17 01:02:03"))); FunctionExpression expression5 = DSL.hour_of_day(DSL.literal("2020-08-17 01:02:03")); assertAll( () -> hourOfDayQuery(expression1, 1), () -> assertEquals("hour_of_day(TIME '01:02:03')", expression1.toString()), - () -> hourOfDayQuery(expression2, 1), () -> assertEquals("hour_of_day(\"01:02:03\")", expression2.toString()), - () -> hourOfDayQuery(expression3, 1), () -> assertEquals("hour_of_day(TIMESTAMP '2020-08-17 01:02:03')", expression3.toString()), - () -> hourOfDayQuery(expression4, 1), () -> assertEquals("hour_of_day(DATETIME '2020-08-17 01:02:03')", expression4.toString()), - () -> hourOfDayQuery(expression5, 1), - () -> assertEquals("hour_of_day(\"2020-08-17 01:02:03\")", expression5.toString()) - ); + () -> assertEquals("hour_of_day(\"2020-08-17 01:02:03\")", expression5.toString())); } private void invalidHourOfDayQuery(String time) { @@ -717,24 +642,20 @@ private void invalidHourOfDayQuery(String time) { @Test public void hourOfDayInvalidArguments() { assertAll( - //Invalid Seconds + // Invalid Seconds () -> assertThrows(SemanticCheckException.class, () -> invalidHourOfDayQuery("12:23:61")), - //Invalid Minutes + // Invalid Minutes () -> assertThrows(SemanticCheckException.class, () -> invalidHourOfDayQuery("12:61:34")), - //Invalid Hours + // Invalid Hours () -> assertThrows(SemanticCheckException.class, () -> invalidHourOfDayQuery("25:23:34")), - //incorrect format - () -> assertThrows(SemanticCheckException.class, () -> invalidHourOfDayQuery("asdfasdf")) - ); - + // incorrect format + () -> assertThrows(SemanticCheckException.class, () -> invalidHourOfDayQuery("asdfasdf"))); } private void checkForExpectedDay( - FunctionExpression functionExpression, - String expectedDay, - String testExpr) { + FunctionExpression functionExpression, String expectedDay, String testExpr) { assertEquals(DATE, functionExpression.type()); assertEquals(new ExprDateValue(expectedDay), eval(functionExpression)); assertEquals(testExpr, functionExpression.toString()); @@ -743,9 +664,9 @@ private void checkForExpectedDay( private static Stream getTestDataForLastDay() { return Stream.of( Arguments.of(new ExprDateValue("2017-01-20"), "2017-01-31", "last_day(DATE '2017-01-20')"), - //Leap year + // Leap year Arguments.of(new ExprDateValue("2020-02-20"), "2020-02-29", "last_day(DATE '2020-02-20')"), - //Non leap year + // Non leap year Arguments.of(new ExprDateValue("2017-02-20"), "2017-02-28", "last_day(DATE '2017-02-20')"), Arguments.of(new ExprDateValue("2017-03-20"), "2017-03-31", "last_day(DATE '2017-03-20')"), Arguments.of(new ExprDateValue("2017-04-20"), "2017-04-30", "last_day(DATE '2017-04-20')"), @@ -756,8 +677,7 @@ private static Stream getTestDataForLastDay() { Arguments.of(new ExprDateValue("2017-09-20"), "2017-09-30", "last_day(DATE '2017-09-20')"), Arguments.of(new ExprDateValue("2017-10-20"), "2017-10-31", "last_day(DATE '2017-10-20')"), Arguments.of(new ExprDateValue("2017-11-20"), "2017-11-30", "last_day(DATE '2017-11-20')"), - Arguments.of(new ExprDateValue("2017-12-20"), "2017-12-31", "last_day(DATE '2017-12-20')") - ); + Arguments.of(new ExprDateValue("2017-12-20"), "2017-12-31", "last_day(DATE '2017-12-20')")); } @ParameterizedTest(name = "{2}") @@ -766,40 +686,37 @@ public void testLastDay(ExprValue testedDateTime, String expectedResult, String checkForExpectedDay( DSL.last_day(functionProperties, DSL.literal(testedDateTime)), expectedResult, - expectedQuery - ); + expectedQuery); } @Test public void testLastDayWithTimeType() { - FunctionExpression expression = DSL.last_day( - functionProperties, DSL.literal(new ExprTimeValue("12:23:34"))); + FunctionExpression expression = + DSL.last_day(functionProperties, DSL.literal(new ExprTimeValue("12:23:34"))); LocalDate expected = LocalDate.now(functionProperties.getQueryStartClock()); LocalDate result = eval(expression).dateValue(); - assertAll( () -> assertEquals((expected.lengthOfMonth()), result.getDayOfMonth()), - () -> assertEquals("last_day(TIME '12:23:34')", expression.toString()) - ); + () -> assertEquals("last_day(TIME '12:23:34')", expression.toString())); } private void lastDay(String date) { - FunctionExpression expression = DSL.day_of_week( - functionProperties, DSL.literal(new ExprDateValue(date))); + FunctionExpression expression = + DSL.day_of_week(functionProperties, DSL.literal(new ExprDateValue(date))); eval(expression); } @Test public void testLastDayInvalidArgument() { - assertThrows(SemanticCheckException.class, () -> lastDay("asdfasdf")); + assertThrows(SemanticCheckException.class, () -> lastDay("asdfasdf")); } @Test public void microsecond() { - FunctionExpression expression = DSL - .microsecond(DSL.literal(new ExprTimeValue("01:02:03.123456"))); + FunctionExpression expression = + DSL.microsecond(DSL.literal(new ExprTimeValue("01:02:03.123456"))); assertEquals(INTEGER, expression.type()); assertEquals(integerValue(123456), eval(expression)); assertEquals("microsecond(TIME '01:02:03.123456')", expression.toString()); @@ -904,20 +821,14 @@ private void minuteOfHourQuery(FunctionExpression dateExpression, int minute, St assertAll( () -> assertEquals(INTEGER, dateExpression.type()), () -> assertEquals(integerValue(minute), eval(dateExpression)), - () -> assertEquals(testExpr, dateExpression.toString()) - ); + () -> assertEquals(testExpr, dateExpression.toString())); } private static Stream getTestDataForMinuteOfHour() { return Stream.of( Arguments.of( - DSL.literal(new ExprTimeValue("01:02:03")), - 2, - "minute_of_hour(TIME '01:02:03')"), - Arguments.of( - DSL.literal("01:02:03"), - 2, - "minute_of_hour(\"01:02:03\")"), + DSL.literal(new ExprTimeValue("01:02:03")), 2, "minute_of_hour(TIME '01:02:03')"), + Arguments.of(DSL.literal("01:02:03"), 2, "minute_of_hour(\"01:02:03\")"), Arguments.of( DSL.literal(new ExprTimestampValue("2020-08-17 01:02:03")), 2, @@ -927,10 +838,7 @@ private static Stream getTestDataForMinuteOfHour() { 2, "minute_of_hour(DATETIME '2020-08-17 01:02:03')"), Arguments.of( - DSL.literal("2020-08-17 01:02:03"), - 2, - "minute_of_hour(\"2020-08-17 01:02:03\")") - ); + DSL.literal("2020-08-17 01:02:03"), 2, "minute_of_hour(\"2020-08-17 01:02:03\")")); } @ParameterizedTest(name = "{2}") @@ -947,29 +855,23 @@ private void invalidMinuteOfHourQuery(String time) { @Test public void minuteOfHourInvalidArguments() { assertAll( - //Invalid Seconds - () -> assertThrows( - SemanticCheckException.class, - () -> invalidMinuteOfHourQuery("12:23:61")), + // Invalid Seconds + () -> + assertThrows(SemanticCheckException.class, () -> invalidMinuteOfHourQuery("12:23:61")), - //Invalid Minutes - () -> assertThrows( - SemanticCheckException.class, - () -> invalidMinuteOfHourQuery("12:61:34")), + // Invalid Minutes + () -> + assertThrows(SemanticCheckException.class, () -> invalidMinuteOfHourQuery("12:61:34")), - //Invalid Hours - () -> assertThrows( - SemanticCheckException.class, - () -> invalidMinuteOfHourQuery("25:23:34")), + // Invalid Hours + () -> + assertThrows(SemanticCheckException.class, () -> invalidMinuteOfHourQuery("25:23:34")), - //incorrect format - () -> assertThrows( - SemanticCheckException.class, - () -> invalidMinuteOfHourQuery("asdfasdf")) - ); + // incorrect format + () -> + assertThrows(SemanticCheckException.class, () -> invalidMinuteOfHourQuery("asdfasdf"))); } - @Test public void month() { FunctionExpression expression = DSL.month(DSL.literal(new ExprDateValue("2020-08-07"))); @@ -991,9 +893,7 @@ public void month() { private static Stream getTestDataForMonthOfYear() { return Stream.of( Arguments.of( - DSL.literal(new ExprDateValue("2020-08-07")), - "month_of_year(DATE '2020-08-07')", - 8), + DSL.literal(new ExprDateValue("2020-08-07")), "month_of_year(DATE '2020-08-07')", 8), Arguments.of( DSL.literal(new ExprDatetimeValue("2020-08-07 12:23:34")), "month_of_year(DATETIME '2020-08-07 12:23:34')", @@ -1002,25 +902,16 @@ private static Stream getTestDataForMonthOfYear() { DSL.literal(new ExprTimestampValue("2020-08-07 12:23:34")), "month_of_year(TIMESTAMP '2020-08-07 12:23:34')", 8), + Arguments.of(DSL.literal("2020-08-07"), "month_of_year(\"2020-08-07\")", 8), Arguments.of( - DSL.literal("2020-08-07"), - "month_of_year(\"2020-08-07\")", - 8), - Arguments.of( - DSL.literal("2020-08-07 01:02:03"), - "month_of_year(\"2020-08-07 01:02:03\")", - 8) - ); + DSL.literal("2020-08-07 01:02:03"), "month_of_year(\"2020-08-07 01:02:03\")", 8)); } @ParameterizedTest(name = "{0}") @MethodSource("getTestDataForMonthOfYear") public void monthOfYear(LiteralExpression arg, String expectedString, int expectedResult) { validateStringFormat( - DSL.month_of_year(functionProperties, arg), - expectedString, - expectedResult - ); + DSL.month_of_year(functionProperties, arg), expectedString, expectedResult); } @Test @@ -1032,41 +923,34 @@ public void testMonthOfYearWithTimeType() { } private void invalidDatesQuery(String date) throws SemanticCheckException { - FunctionExpression expression = DSL.month_of_year( - functionProperties, - DSL.literal(new ExprDateValue(date))); + FunctionExpression expression = + DSL.month_of_year(functionProperties, DSL.literal(new ExprDateValue(date))); eval(expression); } @Test public void monthOfYearInvalidDates() { assertAll( - () -> assertThrows(SemanticCheckException.class, () -> invalidDatesQuery("2019-01-50")), - () -> assertThrows(SemanticCheckException.class, () -> invalidDatesQuery("2019-02-29")), - () -> assertThrows(SemanticCheckException.class, () -> invalidDatesQuery("2019-02-31")), - () -> assertThrows(SemanticCheckException.class, () -> invalidDatesQuery("2019-13-05")) - ); + () -> assertThrows(SemanticCheckException.class, () -> invalidDatesQuery("2019-01-50")), + () -> assertThrows(SemanticCheckException.class, () -> invalidDatesQuery("2019-02-29")), + () -> assertThrows(SemanticCheckException.class, () -> invalidDatesQuery("2019-02-31")), + () -> assertThrows(SemanticCheckException.class, () -> invalidDatesQuery("2019-13-05"))); } @Test public void monthOfYearAlternateArgumentSyntaxes() { - FunctionExpression expression = DSL.month_of_year( - functionProperties, - DSL.literal(new ExprDateValue("2020-08-07"))); + FunctionExpression expression = + DSL.month_of_year(functionProperties, DSL.literal(new ExprDateValue("2020-08-07"))); assertEquals(INTEGER, expression.type()); assertEquals("month_of_year(DATE '2020-08-07')", expression.toString()); assertEquals(integerValue(8), eval(expression)); - expression = DSL.month_of_year( - functionProperties, - DSL.literal("2020-08-07")); + expression = DSL.month_of_year(functionProperties, DSL.literal("2020-08-07")); assertEquals(INTEGER, expression.type()); assertEquals("month_of_year(\"2020-08-07\")", expression.toString()); assertEquals(integerValue(8), eval(expression)); - expression = DSL.month_of_year( - functionProperties, - DSL.literal("2020-08-07 01:02:03")); + expression = DSL.month_of_year(functionProperties, DSL.literal("2020-08-07 01:02:03")); assertEquals(INTEGER, expression.type()); assertEquals("month_of_year(\"2020-08-07 01:02:03\")", expression.toString()); assertEquals(integerValue(8), eval(expression)); @@ -1117,15 +1001,13 @@ private static Stream getTestDataForSecToTime() { Arguments.of(-169200, "01:00:00"), Arguments.of(3600, "01:00:00"), Arguments.of(90000, "01:00:00"), - Arguments.of(176400, "01:00:00") - ); + Arguments.of(176400, "01:00:00")); } @ParameterizedTest(name = "{0}") @MethodSource("getTestDataForSecToTime") public void testSecToTime(int seconds, String expected) { - FunctionExpression expr = DSL.sec_to_time( - DSL.literal(new ExprIntegerValue(seconds))); + FunctionExpression expr = DSL.sec_to_time(DSL.literal(new ExprIntegerValue(seconds))); assertEquals(TIME, expr.type()); assertEquals(new ExprTimeValue(expected), eval(expr)); @@ -1137,8 +1019,7 @@ private static Stream getTestDataForSecToTimeWithDecimal() { Arguments.of(1.00123, "00:00:01.00123"), Arguments.of(1.001023, "00:00:01.001023"), Arguments.of(1.000000042, "00:00:01.000000042"), - Arguments.of(3.14, "00:00:03.14") - ); + Arguments.of(3.14, "00:00:03.14")); } @ParameterizedTest(name = "{0}") @@ -1187,28 +1068,18 @@ private void secondOfMinuteQuery(FunctionExpression dateExpression, int second, private static Stream getTestDataForSecondOfMinute() { return Stream.of( Arguments.of( - DSL.literal(new ExprTimeValue("01:02:03")), - 3, - "second_of_minute(TIME '01:02:03')"), - Arguments.of( - DSL.literal("01:02:03"), - 3, - "second_of_minute(\"01:02:03\")"), + DSL.literal(new ExprTimeValue("01:02:03")), 3, "second_of_minute(TIME '01:02:03')"), + Arguments.of(DSL.literal("01:02:03"), 3, "second_of_minute(\"01:02:03\")"), Arguments.of( - DSL.literal("2020-08-17 01:02:03"), - 3, - "second_of_minute(\"2020-08-17 01:02:03\")"), + DSL.literal("2020-08-17 01:02:03"), 3, "second_of_minute(\"2020-08-17 01:02:03\")"), Arguments.of( - DSL.literal(new ExprTimestampValue("2020-08-17 01:02:03")), 3, "second_of_minute(TIMESTAMP '2020-08-17 01:02:03')"), Arguments.of( - DSL.literal(new ExprDatetimeValue("2020-08-17 01:02:03")), 3, - "second_of_minute(DATETIME '2020-08-17 01:02:03')") - ); + "second_of_minute(DATETIME '2020-08-17 01:02:03')")); } @ParameterizedTest(name = "{2}") @@ -1225,19 +1096,22 @@ private void invalidSecondOfMinuteQuery(String time) { @Test public void secondOfMinuteInvalidArguments() { assertAll( - //Invalid Seconds - () -> assertThrows(SemanticCheckException.class, - () -> invalidSecondOfMinuteQuery("12:23:61")), - //Invalid Minutes - () -> assertThrows(SemanticCheckException.class, - () -> invalidSecondOfMinuteQuery("12:61:34")), - //Invalid Hours - () -> assertThrows(SemanticCheckException.class, - () -> invalidSecondOfMinuteQuery("25:23:34")), - //incorrect format - () -> assertThrows(SemanticCheckException.class, - () -> invalidSecondOfMinuteQuery("asdfasdf")) - ); + // Invalid Seconds + () -> + assertThrows( + SemanticCheckException.class, () -> invalidSecondOfMinuteQuery("12:23:61")), + // Invalid Minutes + () -> + assertThrows( + SemanticCheckException.class, () -> invalidSecondOfMinuteQuery("12:61:34")), + // Invalid Hours + () -> + assertThrows( + SemanticCheckException.class, () -> invalidSecondOfMinuteQuery("25:23:34")), + // incorrect format + () -> + assertThrows( + SemanticCheckException.class, () -> invalidSecondOfMinuteQuery("asdfasdf"))); } @Test @@ -1304,35 +1178,32 @@ public void timestamp() { } private void weekQuery(String date, int mode, int expectedResult) { - FunctionExpression expression = DSL - .week(functionProperties, DSL.literal(new ExprDateValue(date)), DSL.literal(mode)); + FunctionExpression expression = + DSL.week(functionProperties, DSL.literal(new ExprDateValue(date)), DSL.literal(mode)); assertEquals(INTEGER, expression.type()); assertEquals(String.format("week(DATE '%s', %d)", date, mode), expression.toString()); assertEquals(integerValue(expectedResult), eval(expression)); } private void weekOfYearUnderscoresQuery(String date, int mode, int expectedResult) { - FunctionExpression expression = DSL - .week_of_year( - functionProperties, - DSL.literal(new ExprDateValue(date)), DSL.literal(mode)); + FunctionExpression expression = + DSL.week_of_year( + functionProperties, DSL.literal(new ExprDateValue(date)), DSL.literal(mode)); assertEquals(INTEGER, expression.type()); assertEquals(String.format("week_of_year(DATE '%s', %d)", date, mode), expression.toString()); assertEquals(integerValue(expectedResult), eval(expression)); } private void weekOfYearQuery(String date, int mode, int expectedResult) { - FunctionExpression expression = DSL - .weekofyear( - functionProperties, - DSL.literal(new ExprDateValue(date)), DSL.literal(mode)); + FunctionExpression expression = + DSL.weekofyear(functionProperties, DSL.literal(new ExprDateValue(date)), DSL.literal(mode)); assertEquals(INTEGER, expression.type()); assertEquals(String.format("weekofyear(DATE '%s', %d)", date, mode), expression.toString()); assertEquals(integerValue(expectedResult), eval(expression)); } private static Stream getTestDataForWeek() { - //Test the behavior of different modes passed into the 'week_of_year' function + // Test the behavior of different modes passed into the 'week_of_year' function return Stream.of( Arguments.of("2019-01-05", 0, 0), Arguments.of("2019-01-05", 1, 1), @@ -1342,7 +1213,6 @@ private static Stream getTestDataForWeek() { Arguments.of("2019-01-05", 5, 0), Arguments.of("2019-01-05", 6, 1), Arguments.of("2019-01-05", 7, 53), - Arguments.of("2019-01-06", 0, 1), Arguments.of("2019-01-06", 1, 1), Arguments.of("2019-01-06", 2, 1), @@ -1351,7 +1221,6 @@ private static Stream getTestDataForWeek() { Arguments.of("2019-01-06", 5, 0), Arguments.of("2019-01-06", 6, 2), Arguments.of("2019-01-06", 7, 53), - Arguments.of("2019-01-07", 0, 1), Arguments.of("2019-01-07", 1, 2), Arguments.of("2019-01-07", 2, 1), @@ -1360,11 +1229,9 @@ private static Stream getTestDataForWeek() { Arguments.of("2019-01-07", 5, 1), Arguments.of("2019-01-07", 6, 2), Arguments.of("2019-01-07", 7, 1), - Arguments.of("2000-01-01", 0, 0), Arguments.of("2000-01-01", 2, 52), - Arguments.of("1999-12-31", 0, 52) - ); + Arguments.of("1999-12-31", 0, 52)); } @ParameterizedTest(name = "{1}{2}") @@ -1376,145 +1243,130 @@ public void testWeek(String date, int mode, int expected) { } private void validateStringFormat( - FunctionExpression expr, - String expectedString, - int expectedResult) { + FunctionExpression expr, String expectedString, int expectedResult) { assertAll( () -> assertEquals(INTEGER, expr.type()), () -> assertEquals(expectedString, expr.toString()), - () -> assertEquals(integerValue(expectedResult), eval(expr)) - ); + () -> assertEquals(integerValue(expectedResult), eval(expr))); } private static Stream getTestDataForWeekFormats() { return Stream.of( - Arguments.of(DSL.literal(new ExprDateValue("2019-01-05")), - "DATE '2019-01-05'", - 0), - Arguments.of(DSL.literal(new ExprDatetimeValue("2019-01-05 01:02:03")), + Arguments.of(DSL.literal(new ExprDateValue("2019-01-05")), "DATE '2019-01-05'", 0), + Arguments.of( + DSL.literal(new ExprDatetimeValue("2019-01-05 01:02:03")), "DATETIME '2019-01-05 01:02:03'", 0), - Arguments.of(DSL.literal(new ExprTimestampValue("2019-01-05 01:02:03")), - "TIMESTAMP '2019-01-05 01:02:03'", - 0), Arguments.of( - DSL.literal("2019-01-05"), - "\"2019-01-05\"", + DSL.literal(new ExprTimestampValue("2019-01-05 01:02:03")), + "TIMESTAMP '2019-01-05 01:02:03'", 0), - Arguments.of( - DSL.literal("2019-01-05 00:01:00"), - "\"2019-01-05 00:01:00\"", - 0) - ); + Arguments.of(DSL.literal("2019-01-05"), "\"2019-01-05\"", 0), + Arguments.of(DSL.literal("2019-01-05 00:01:00"), "\"2019-01-05 00:01:00\"", 0)); } @ParameterizedTest(name = "{0}") @MethodSource("getTestDataForWeekFormats") public void testWeekFormats( - LiteralExpression arg, - String expectedString, - Integer expectedInteger) { + LiteralExpression arg, String expectedString, Integer expectedInteger) { validateStringFormat( DSL.week(functionProperties, arg), - String.format("week(%s)", expectedString), expectedInteger); + String.format("week(%s)", expectedString), + expectedInteger); validateStringFormat( DSL.week_of_year(functionProperties, arg), - String.format("week_of_year(%s)", expectedString), expectedInteger); + String.format("week_of_year(%s)", expectedString), + expectedInteger); validateStringFormat( DSL.weekofyear(functionProperties, arg), - String.format("weekofyear(%s)", expectedString), expectedInteger); + String.format("weekofyear(%s)", expectedString), + expectedInteger); } @Test public void testWeekOfYearWithTimeType() { assertAll( - () -> validateStringFormat( - DSL.week( - functionProperties, - DSL.literal(new ExprTimeValue("12:23:34")), - DSL.literal(0)), - "week(TIME '12:23:34', 0)", - LocalDate.now(functionProperties.getQueryStartClock()).get(ALIGNED_WEEK_OF_YEAR)), - - () -> validateStringFormat( - DSL.week_of_year(functionProperties, DSL.literal(new ExprTimeValue("12:23:34"))), - "week_of_year(TIME '12:23:34')", - LocalDate.now(functionProperties.getQueryStartClock()).get(ALIGNED_WEEK_OF_YEAR)), - - () -> validateStringFormat( - DSL.weekofyear(functionProperties, DSL.literal(new ExprTimeValue("12:23:34"))), - "weekofyear(TIME '12:23:34')", - LocalDate.now(functionProperties.getQueryStartClock()).get(ALIGNED_WEEK_OF_YEAR)) - ); + () -> + validateStringFormat( + DSL.week( + functionProperties, DSL.literal(new ExprTimeValue("12:23:34")), DSL.literal(0)), + "week(TIME '12:23:34', 0)", + LocalDate.now(functionProperties.getQueryStartClock()).get(ALIGNED_WEEK_OF_YEAR)), + () -> + validateStringFormat( + DSL.week_of_year(functionProperties, DSL.literal(new ExprTimeValue("12:23:34"))), + "week_of_year(TIME '12:23:34')", + LocalDate.now(functionProperties.getQueryStartClock()).get(ALIGNED_WEEK_OF_YEAR)), + () -> + validateStringFormat( + DSL.weekofyear(functionProperties, DSL.literal(new ExprTimeValue("12:23:34"))), + "weekofyear(TIME '12:23:34')", + LocalDate.now(functionProperties.getQueryStartClock()).get(ALIGNED_WEEK_OF_YEAR))); } @Test public void modeInUnsupportedFormat() { - FunctionExpression expression1 = DSL - .week(functionProperties, DSL.literal(new ExprDateValue("2019-01-05")), DSL.literal(8)); + FunctionExpression expression1 = + DSL.week(functionProperties, DSL.literal(new ExprDateValue("2019-01-05")), DSL.literal(8)); SemanticCheckException exception = assertThrows(SemanticCheckException.class, () -> eval(expression1)); - assertEquals("mode:8 is invalid, please use mode value between 0-7", - exception.getMessage()); + assertEquals("mode:8 is invalid, please use mode value between 0-7", exception.getMessage()); - FunctionExpression expression2 = DSL - .week(functionProperties, DSL.literal(new ExprDateValue("2019-01-05")), DSL.literal(-1)); + FunctionExpression expression2 = + DSL.week(functionProperties, DSL.literal(new ExprDateValue("2019-01-05")), DSL.literal(-1)); exception = assertThrows(SemanticCheckException.class, () -> eval(expression2)); - assertEquals("mode:-1 is invalid, please use mode value between 0-7", - exception.getMessage()); + assertEquals("mode:-1 is invalid, please use mode value between 0-7", exception.getMessage()); } @Test public void testInvalidWeekOfYear() { assertAll( - //Test for WeekOfYear - //test invalid month - () -> assertThrows( - SemanticCheckException.class, - () -> weekOfYearQuery("2019-13-05 01:02:03", 0, 0)), - //test invalid day - () -> assertThrows( - SemanticCheckException.class, - () -> weekOfYearQuery("2019-01-50 01:02:03", 0, 0)), - //test invalid leap year - () -> assertThrows( - SemanticCheckException.class, - () -> weekOfYearQuery("2019-02-29 01:02:03", 0, 0)), - - //Test for Week_Of_Year - //test invalid month - () -> assertThrows( - SemanticCheckException.class, - () -> weekOfYearUnderscoresQuery("2019-13-05 01:02:03", 0, 0)), - //test invalid day - () -> assertThrows( - SemanticCheckException.class, - () -> weekOfYearUnderscoresQuery("2019-01-50 01:02:03", 0, 0)), - //test invalid leap year - () -> assertThrows( - SemanticCheckException.class, - () -> weekOfYearUnderscoresQuery("2019-02-29 01:02:03", 0, 0)) - ); + // Test for WeekOfYear + // test invalid month + () -> + assertThrows( + SemanticCheckException.class, () -> weekOfYearQuery("2019-13-05 01:02:03", 0, 0)), + // test invalid day + () -> + assertThrows( + SemanticCheckException.class, () -> weekOfYearQuery("2019-01-50 01:02:03", 0, 0)), + // test invalid leap year + () -> + assertThrows( + SemanticCheckException.class, () -> weekOfYearQuery("2019-02-29 01:02:03", 0, 0)), + + // Test for Week_Of_Year + // test invalid month + () -> + assertThrows( + SemanticCheckException.class, + () -> weekOfYearUnderscoresQuery("2019-13-05 01:02:03", 0, 0)), + // test invalid day + () -> + assertThrows( + SemanticCheckException.class, + () -> weekOfYearUnderscoresQuery("2019-01-50 01:02:03", 0, 0)), + // test invalid leap year + () -> + assertThrows( + SemanticCheckException.class, + () -> weekOfYearUnderscoresQuery("2019-02-29 01:02:03", 0, 0))); } @Test public void weekOfYearModeInUnsupportedFormat() { - FunctionExpression expression1 = DSL - .week_of_year( - functionProperties, - DSL.literal(new ExprDateValue("2019-01-05")), DSL.literal(8)); + FunctionExpression expression1 = + DSL.week_of_year( + functionProperties, DSL.literal(new ExprDateValue("2019-01-05")), DSL.literal(8)); SemanticCheckException exception = assertThrows(SemanticCheckException.class, () -> eval(expression1)); - assertEquals("mode:8 is invalid, please use mode value between 0-7", - exception.getMessage()); + assertEquals("mode:8 is invalid, please use mode value between 0-7", exception.getMessage()); - FunctionExpression expression2 = DSL - .week_of_year( - functionProperties, - DSL.literal(new ExprDateValue("2019-01-05")), DSL.literal(-1)); + FunctionExpression expression2 = + DSL.week_of_year( + functionProperties, DSL.literal(new ExprDateValue("2019-01-05")), DSL.literal(-1)); exception = assertThrows(SemanticCheckException.class, () -> eval(expression2)); - assertEquals("mode:-1 is invalid, please use mode value between 0-7", - exception.getMessage()); + assertEquals("mode:-1 is invalid, please use mode value between 0-7", exception.getMessage()); } @Test @@ -1557,15 +1409,14 @@ public void year() { public void date_format() { dateFormatTesters.forEach(this::testDateFormat); String timestamp = "1998-01-31 13:14:15.012345"; - String timestampFormat = "%a %b %c %D %d %e %f %H %h %I %i %j %k %l %M " - + "%m %p %r %S %s %T %% %P"; - String timestampFormatted = "Sat Jan 01 31st 31 31 012345 13 01 01 14 031 13 1 " - + "January 01 PM 01:14:15 PM 15 15 13:14:15 % P"; - - FunctionExpression expr = DSL.date_format( - functionProperties, - DSL.literal(timestamp), - DSL.literal(timestampFormat)); + String timestampFormat = + "%a %b %c %D %d %e %f %H %h %I %i %j %k %l %M " + "%m %p %r %S %s %T %% %P"; + String timestampFormatted = + "Sat Jan 01 31st 31 31 012345 13 01 01 14 031 13 1 " + + "January 01 PM 01:14:15 PM 15 15 13:14:15 % P"; + + FunctionExpression expr = + DSL.date_format(functionProperties, DSL.literal(timestamp), DSL.literal(timestampFormat)); assertEquals(STRING, expr.type()); assertEquals(timestampFormatted, eval(expr).stringValue()); } @@ -1578,108 +1429,55 @@ void testDateFormat(DateFormatTester dft) { @Test public void testDateFormatWithTimeType() { - FunctionExpression expr = DSL.date_format( - functionProperties, - DSL.literal(new ExprTimeValue("12:23:34")), - DSL.literal(new ExprStringValue("%m %d"))); + FunctionExpression expr = + DSL.date_format( + functionProperties, + DSL.literal(new ExprTimeValue("12:23:34")), + DSL.literal(new ExprStringValue("%m %d"))); + assertEquals(expr.toString(), "date_format(TIME '12:23:34', \"%m %d\")"); assertEquals( - expr.toString(), - "date_format(TIME '12:23:34', \"%m %d\")" - ); - assertEquals( - LocalDateTime.now( - functionProperties.getQueryStartClock()).format( - DateTimeFormatter.ofPattern("\"MM dd\"")), - eval(expr).toString() - ); + LocalDateTime.now(functionProperties.getQueryStartClock()) + .format(DateTimeFormatter.ofPattern("\"MM dd\"")), + eval(expr).toString()); } @Test public void testTimeFormatWithDateType() { - FunctionExpression expr = DSL.time_format( - functionProperties, - DSL.literal(new ExprDateValue("2023-01-16")), - DSL.literal(new ExprStringValue("%h %s"))); + FunctionExpression expr = + DSL.time_format( + functionProperties, + DSL.literal(new ExprDateValue("2023-01-16")), + DSL.literal(new ExprStringValue("%h %s"))); - assertEquals( - expr.toString(), - "time_format(DATE '2023-01-16', \"%h %s\")" - ); - assertEquals( - "\"12 00\"", - eval(expr).toString() - ); + assertEquals(expr.toString(), "time_format(DATE '2023-01-16', \"%h %s\")"); + assertEquals("\"12 00\"", eval(expr).toString()); } private static Stream getTestDataForTimeFormat() { return Stream.of( - Arguments.of( - DSL.literal("1998-01-31 13:14:15.012345"), - DSL.literal("%f"), - "012345"), - Arguments.of( - DSL.literal("1998-01-31 13:14:15.002345"), - DSL.literal("%f"), - "002345"), - Arguments.of( - DSL.literal("1998-01-31 13:14:15.012300"), - DSL.literal("%f"), - "012300"), - Arguments.of( - DSL.literal("1998-01-31 13:14:15.012345"), - DSL.literal("%H"), - "13"), - Arguments.of( - DSL.literal("1998-01-31 13:14:15.012345"), - DSL.literal("%h"), - "01"), - Arguments.of( - DSL.literal("1998-01-31 13:14:15.012345"), - DSL.literal("%I"), - "01"), - Arguments.of( - DSL.literal("1998-01-31 13:14:15.012345"), - DSL.literal("%i"), - "14"), - Arguments.of( - DSL.literal("1998-01-31 13:14:15.012345"), - DSL.literal("%k"), - "13"), - Arguments.of( - DSL.literal("1998-01-31 13:14:15.012345"), - DSL.literal("%l"), - "1"), - Arguments.of( - DSL.literal("1998-01-31 13:14:15.012345"), - DSL.literal("%p"), - "PM"), - Arguments.of( - DSL.literal("1998-01-31 13:14:15.012345"), - DSL.literal("%r"), - "01:14:15 PM"), - Arguments.of( - DSL.literal("1998-01-31 13:14:15.012345"), - DSL.literal("%S"), - "15"), - Arguments.of( - DSL.literal("1998-01-31 13:14:15.012345"), - DSL.literal("%s"), - "15"), - Arguments.of( - DSL.literal("1998-01-31 13:14:15.012345"), - DSL.literal("%T"), - "13:14:15"), + Arguments.of(DSL.literal("1998-01-31 13:14:15.012345"), DSL.literal("%f"), "012345"), + Arguments.of(DSL.literal("1998-01-31 13:14:15.002345"), DSL.literal("%f"), "002345"), + Arguments.of(DSL.literal("1998-01-31 13:14:15.012300"), DSL.literal("%f"), "012300"), + Arguments.of(DSL.literal("1998-01-31 13:14:15.012345"), DSL.literal("%H"), "13"), + Arguments.of(DSL.literal("1998-01-31 13:14:15.012345"), DSL.literal("%h"), "01"), + Arguments.of(DSL.literal("1998-01-31 13:14:15.012345"), DSL.literal("%I"), "01"), + Arguments.of(DSL.literal("1998-01-31 13:14:15.012345"), DSL.literal("%i"), "14"), + Arguments.of(DSL.literal("1998-01-31 13:14:15.012345"), DSL.literal("%k"), "13"), + Arguments.of(DSL.literal("1998-01-31 13:14:15.012345"), DSL.literal("%l"), "1"), + Arguments.of(DSL.literal("1998-01-31 13:14:15.012345"), DSL.literal("%p"), "PM"), + Arguments.of(DSL.literal("1998-01-31 13:14:15.012345"), DSL.literal("%r"), "01:14:15 PM"), + Arguments.of(DSL.literal("1998-01-31 13:14:15.012345"), DSL.literal("%S"), "15"), + Arguments.of(DSL.literal("1998-01-31 13:14:15.012345"), DSL.literal("%s"), "15"), + Arguments.of(DSL.literal("1998-01-31 13:14:15.012345"), DSL.literal("%T"), "13:14:15"), Arguments.of( DSL.literal("1998-01-31 13:14:15.012345"), DSL.literal("%f %H %h %I %i %k %l %p %r %S %s %T"), - "012345 13 01 01 14 13 1 PM 01:14:15 PM 15 15 13:14:15") - ); + "012345 13 01 01 14 13 1 PM 01:14:15 PM 15 15 13:14:15")); } - private void timeFormatQuery(LiteralExpression arg, - LiteralExpression format, - String expectedResult) { + private void timeFormatQuery( + LiteralExpression arg, LiteralExpression format, String expectedResult) { FunctionExpression expr = DSL.time_format(functionProperties, arg, format); assertEquals(STRING, expr.type()); assertEquals(expectedResult, eval(expr).stringValue()); @@ -1687,30 +1485,18 @@ private void timeFormatQuery(LiteralExpression arg, @ParameterizedTest(name = "{0}{1}") @MethodSource("getTestDataForTimeFormat") - public void testTimeFormat(LiteralExpression arg, - LiteralExpression format, - String expectedResult) { + public void testTimeFormat( + LiteralExpression arg, LiteralExpression format, String expectedResult) { timeFormatQuery(arg, format, expectedResult); } private static Stream getInvalidTestDataForTimeFormat() { return Stream.of( - Arguments.of( - DSL.literal("asdfasdf"), - DSL.literal("%f")), - Arguments.of( - DSL.literal("12345"), - DSL.literal("%h")), - Arguments.of( - DSL.literal("10:11:61"), - DSL.literal("%h")), - Arguments.of( - DSL.literal("10:61:12"), - DSL.literal("%h")), - Arguments.of( - DSL.literal("61:11:12"), - DSL.literal("%h")) - ); + Arguments.of(DSL.literal("asdfasdf"), DSL.literal("%f")), + Arguments.of(DSL.literal("12345"), DSL.literal("%h")), + Arguments.of(DSL.literal("10:11:61"), DSL.literal("%h")), + Arguments.of(DSL.literal("10:61:12"), DSL.literal("%h")), + Arguments.of(DSL.literal("61:11:12"), DSL.literal("%h"))); } @ParameterizedTest(name = "{0}{1}") @@ -1734,41 +1520,39 @@ private static Stream getInvalidTimeFormatHandlers() { Arguments.of("%V"), Arguments.of("%v"), Arguments.of("%X"), - Arguments.of("%x") - ); + Arguments.of("%x")); } @ParameterizedTest(name = "{0}") @MethodSource("getInvalidTimeFormatHandlers") public void testTimeFormatWithInvalidHandlers(String handler) { - FunctionExpression expr = DSL.time_format( - functionProperties, - DSL.literal("12:23:34"), - DSL.literal(handler)); + FunctionExpression expr = + DSL.time_format(functionProperties, DSL.literal("12:23:34"), DSL.literal(handler)); assertEquals(ExprNullValue.of(), eval(expr)); } @Test public void testTimeFormatWithDateHandlers() { - FunctionExpression expr = DSL.time_format( - functionProperties, - DSL.literal(new ExprDateValue("2023-01-17")), - DSL.literal("%c %d %e %m %Y %y")); - assertEquals( - "0 00 0 00 0000 00", - eval(expr).stringValue()); + FunctionExpression expr = + DSL.time_format( + functionProperties, + DSL.literal(new ExprDateValue("2023-01-17")), + DSL.literal("%c %d %e %m %Y %y")); + assertEquals("0 00 0 00 0000 00", eval(expr).stringValue()); } @Test public void testTimeFormatAndDateFormatReturnSameResult() { - FunctionExpression timeFormatExpr = DSL.time_format( - functionProperties, - DSL.literal(new ExprDateValue("1998-01-31 13:14:15.012345")), - DSL.literal("%f %H %h %I %i %k %l %p %r %S %s %T")); - FunctionExpression dateFormatExpr = DSL.date_format( - functionProperties, - DSL.literal(new ExprDateValue("1998-01-31 13:14:15.012345")), - DSL.literal("%f %H %h %I %i %k %l %p %r %S %s %T")); + FunctionExpression timeFormatExpr = + DSL.time_format( + functionProperties, + DSL.literal(new ExprDateValue("1998-01-31 13:14:15.012345")), + DSL.literal("%f %H %h %I %i %k %l %p %r %S %s %T")); + FunctionExpression dateFormatExpr = + DSL.date_format( + functionProperties, + DSL.literal(new ExprDateValue("1998-01-31 13:14:15.012345")), + DSL.literal("%f %H %h %I %i %k %l %p %r %S %s %T")); assertEquals(eval(dateFormatExpr), eval(timeFormatExpr)); } diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/DateTimeTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/DateTimeTest.java index 22c3571aca..d857122534 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/DateTimeTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/DateTimeTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.datetime; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -39,8 +38,8 @@ public void positiveTimeZoneNoField2() { @Test public void positiveField1WrittenField2() { - FunctionExpression expr = DSL.datetime(DSL.literal("2008-05-15 22:00:00+01:00"), - DSL.literal("America/Los_Angeles")); + FunctionExpression expr = + DSL.datetime(DSL.literal("2008-05-15 22:00:00+01:00"), DSL.literal("America/Los_Angeles")); assertEquals(DATETIME, expr.type()); assertEquals(new ExprDatetimeValue("2008-05-15 14:00:00"), expr.valueOf()); } @@ -53,42 +52,43 @@ public void localDateTimeConversion() { String dt = "2008-05-15 22:00:00"; String timeZone = "America/Los_Angeles"; LocalDateTime timeConverted = LocalDateTime.parse(dt, formatter); - ZonedDateTime timeZoneLocal = timeConverted.atZone(ZoneId.of(TimeZone.getDefault().getID())) - .withZoneSameInstant(ZoneId.of(timeZone)); - FunctionExpression expr = DSL.datetime(DSL.literal(dt), - DSL.literal(timeZone)); + ZonedDateTime timeZoneLocal = + timeConverted + .atZone(ZoneId.of(TimeZone.getDefault().getID())) + .withZoneSameInstant(ZoneId.of(timeZone)); + FunctionExpression expr = DSL.datetime(DSL.literal(dt), DSL.literal(timeZone)); assertEquals(DATETIME, expr.type()); assertEquals(new ExprDatetimeValue(timeZoneLocal.toLocalDateTime()), expr.valueOf()); } @Test public void negativeField1WrittenField2() { - FunctionExpression expr = DSL.datetime(DSL.literal("2008-05-15 22:00:00-11:00"), - DSL.literal("America/Los_Angeles")); + FunctionExpression expr = + DSL.datetime(DSL.literal("2008-05-15 22:00:00-11:00"), DSL.literal("America/Los_Angeles")); assertEquals(DATETIME, expr.type()); assertEquals(new ExprDatetimeValue("2008-05-16 02:00:00"), expr.valueOf()); } @Test public void negativeField1PositiveField2() { - FunctionExpression expr = DSL.datetime(DSL.literal("2008-05-15 22:00:00-12:00"), - DSL.literal("+15:00")); + FunctionExpression expr = + DSL.datetime(DSL.literal("2008-05-15 22:00:00-12:00"), DSL.literal("+15:00")); assertEquals(DATETIME, expr.type()); assertEquals(nullValue(), expr.valueOf()); } @Test public void twentyFourHourDifference() { - FunctionExpression expr = DSL.datetime(DSL.literal("2008-05-15 22:00:00-14:00"), - DSL.literal("+10:00")); + FunctionExpression expr = + DSL.datetime(DSL.literal("2008-05-15 22:00:00-14:00"), DSL.literal("+10:00")); assertEquals(DATETIME, expr.type()); assertEquals(nullValue(), expr.valueOf()); } @Test public void negativeToNull() { - FunctionExpression expr = DSL.datetime(DSL.literal("2008-05-15 22:00:00-11:00"), - DSL.literal(nullValue())); + FunctionExpression expr = + DSL.datetime(DSL.literal("2008-05-15 22:00:00-11:00"), DSL.literal(nullValue())); assertEquals(DATETIME, expr.type()); assertEquals(nullValue(), expr.valueOf()); } diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/DateTimeTestBase.java b/core/src/test/java/org/opensearch/sql/expression/datetime/DateTimeTestBase.java index 4c8d42e8f9..023a3574aa 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/DateTimeTestBase.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/DateTimeTestBase.java @@ -27,16 +27,17 @@ public class DateTimeTestBase extends ExpressionTestBase { - protected final BuiltinFunctionRepository functionRepository - = BuiltinFunctionRepository.getInstance(); + protected final BuiltinFunctionRepository functionRepository = + BuiltinFunctionRepository.getInstance(); protected ExprValue eval(Expression expression) { return expression.valueOf(); } protected FunctionExpression adddate(Expression date, Expression interval) { - return (FunctionExpression) functionRepository.compile(functionProperties, - BuiltinFunctionName.ADDDATE.getName(), List.of(date, interval)); + return (FunctionExpression) + functionRepository.compile( + functionProperties, BuiltinFunctionName.ADDDATE.getName(), List.of(date, interval)); } protected ExprValue adddate(Object first, Object interval) { @@ -45,8 +46,9 @@ protected ExprValue adddate(Object first, Object interval) { } protected FunctionExpression addtime(Expression date, Expression interval) { - return (FunctionExpression) functionRepository.compile(functionProperties, - BuiltinFunctionName.ADDTIME.getName(), List.of(date, interval)); + return (FunctionExpression) + functionRepository.compile( + functionProperties, BuiltinFunctionName.ADDTIME.getName(), List.of(date, interval)); } protected ExprValue addtime(Temporal first, Temporal second) { @@ -55,8 +57,9 @@ protected ExprValue addtime(Temporal first, Temporal second) { } protected FunctionExpression date_add(Expression date, Expression interval) { - return (FunctionExpression) functionRepository.compile(functionProperties, - BuiltinFunctionName.DATE_ADD.getName(), List.of(date, interval)); + return (FunctionExpression) + functionRepository.compile( + functionProperties, BuiltinFunctionName.DATE_ADD.getName(), List.of(date, interval)); } protected ExprValue date_add(Object first, Object second) { @@ -65,8 +68,9 @@ protected ExprValue date_add(Object first, Object second) { } protected FunctionExpression date_sub(Expression date, Expression interval) { - return (FunctionExpression) functionRepository.compile(functionProperties, - BuiltinFunctionName.DATE_SUB.getName(), List.of(date, interval)); + return (FunctionExpression) + functionRepository.compile( + functionProperties, BuiltinFunctionName.DATE_SUB.getName(), List.of(date, interval)); } protected ExprValue date_sub(Object first, Object second) { @@ -75,13 +79,15 @@ protected ExprValue date_sub(Object first, Object second) { } protected FunctionExpression datediff(Expression first, Expression second) { - return (FunctionExpression) functionRepository.compile(functionProperties, - BuiltinFunctionName.DATEDIFF.getName(), List.of(first, second)); + return (FunctionExpression) + functionRepository.compile( + functionProperties, BuiltinFunctionName.DATEDIFF.getName(), List.of(first, second)); } protected Long datediff(Temporal first, Temporal second) { return datediff(DSL.literal(fromObjectValue(first)), DSL.literal(fromObjectValue(second))) - .valueOf().longValue(); + .valueOf() + .longValue(); } protected LocalDateTime fromUnixTime(Double value) { @@ -89,13 +95,17 @@ protected LocalDateTime fromUnixTime(Double value) { } protected FunctionExpression fromUnixTime(Expression value) { - return (FunctionExpression) functionRepository.compile(functionProperties, - BuiltinFunctionName.FROM_UNIXTIME.getName(), List.of(value)); + return (FunctionExpression) + functionRepository.compile( + functionProperties, BuiltinFunctionName.FROM_UNIXTIME.getName(), List.of(value)); } protected FunctionExpression fromUnixTime(Expression value, Expression format) { - return (FunctionExpression) functionRepository.compile(functionProperties, - BuiltinFunctionName.FROM_UNIXTIME.getName(), List.of(value, format)); + return (FunctionExpression) + functionRepository.compile( + functionProperties, + BuiltinFunctionName.FROM_UNIXTIME.getName(), + List.of(value, format)); } protected LocalDateTime fromUnixTime(Long value) { @@ -103,28 +113,31 @@ protected LocalDateTime fromUnixTime(Long value) { } protected String fromUnixTime(Long value, String format) { - return fromUnixTime(DSL.literal(value), DSL.literal(format)) - .valueOf().stringValue(); + return fromUnixTime(DSL.literal(value), DSL.literal(format)).valueOf().stringValue(); } protected String fromUnixTime(Double value, String format) { - return fromUnixTime(DSL.literal(value), DSL.literal(format)) - .valueOf().stringValue(); + return fromUnixTime(DSL.literal(value), DSL.literal(format)).valueOf().stringValue(); } protected FunctionExpression maketime(Expression hour, Expression minute, Expression second) { - return (FunctionExpression) functionRepository.compile(functionProperties, - BuiltinFunctionName.MAKETIME.getName(), List.of(hour, minute, second)); + return (FunctionExpression) + functionRepository.compile( + functionProperties, + BuiltinFunctionName.MAKETIME.getName(), + List.of(hour, minute, second)); } protected LocalTime maketime(Double hour, Double minute, Double second) { return maketime(DSL.literal(hour), DSL.literal(minute), DSL.literal(second)) - .valueOf().timeValue(); + .valueOf() + .timeValue(); } protected FunctionExpression makedate(Expression year, Expression dayOfYear) { - return (FunctionExpression) functionRepository.compile(functionProperties, - BuiltinFunctionName.MAKEDATE.getName(), List.of(year, dayOfYear)); + return (FunctionExpression) + functionRepository.compile( + functionProperties, BuiltinFunctionName.MAKEDATE.getName(), List.of(year, dayOfYear)); } protected LocalDate makedate(double year, double dayOfYear) { @@ -132,29 +145,29 @@ protected LocalDate makedate(double year, double dayOfYear) { } protected FunctionExpression period_add(Expression period, Expression months) { - return (FunctionExpression) functionRepository.compile(functionProperties, - BuiltinFunctionName.PERIOD_ADD.getName(), List.of(period, months)); + return (FunctionExpression) + functionRepository.compile( + functionProperties, BuiltinFunctionName.PERIOD_ADD.getName(), List.of(period, months)); } protected Integer period_add(Integer period, Integer months) { - return period_add(DSL.literal(period), DSL.literal(months)) - .valueOf().integerValue(); + return period_add(DSL.literal(period), DSL.literal(months)).valueOf().integerValue(); } protected FunctionExpression period_diff(Expression first, Expression second) { - return (FunctionExpression) functionRepository.compile( - functionProperties, - BuiltinFunctionName.PERIOD_DIFF.getName(), List.of(first, second)); + return (FunctionExpression) + functionRepository.compile( + functionProperties, BuiltinFunctionName.PERIOD_DIFF.getName(), List.of(first, second)); } protected Integer period_diff(Integer first, Integer second) { - return period_diff(DSL.literal(first), DSL.literal(second)) - .valueOf().integerValue(); + return period_diff(DSL.literal(first), DSL.literal(second)).valueOf().integerValue(); } protected FunctionExpression subdate(Expression date, Expression interval) { - return (FunctionExpression) functionRepository.compile(functionProperties, - BuiltinFunctionName.SUBDATE.getName(), List.of(date, interval)); + return (FunctionExpression) + functionRepository.compile( + functionProperties, BuiltinFunctionName.SUBDATE.getName(), List.of(date, interval)); } protected ExprValue subdate(Object first, Object interval) { @@ -163,8 +176,9 @@ protected ExprValue subdate(Object first, Object interval) { } protected FunctionExpression subtime(Expression date, Expression interval) { - return (FunctionExpression) functionRepository.compile(functionProperties, - BuiltinFunctionName.SUBTIME.getName(), List.of(date, interval)); + return (FunctionExpression) + functionRepository.compile( + functionProperties, BuiltinFunctionName.SUBTIME.getName(), List.of(date, interval)); } protected ExprValue subtime(Temporal first, Temporal second) { @@ -173,18 +187,21 @@ protected ExprValue subtime(Temporal first, Temporal second) { } protected FunctionExpression timediff(Expression first, Expression second) { - return (FunctionExpression) functionRepository.compile(functionProperties, - BuiltinFunctionName.TIMEDIFF.getName(), List.of(first, second)); + return (FunctionExpression) + functionRepository.compile( + functionProperties, BuiltinFunctionName.TIMEDIFF.getName(), List.of(first, second)); } protected LocalTime timediff(LocalTime first, LocalTime second) { return timediff(DSL.literal(new ExprTimeValue(first)), DSL.literal(new ExprTimeValue(second))) - .valueOf().timeValue(); + .valueOf() + .timeValue(); } protected FunctionExpression unixTimeStampExpr() { - return (FunctionExpression) functionRepository.compile(functionProperties, - BuiltinFunctionName.UNIX_TIMESTAMP.getName(), List.of()); + return (FunctionExpression) + functionRepository.compile( + functionProperties, BuiltinFunctionName.UNIX_TIMESTAMP.getName(), List.of()); } protected Long unixTimeStamp() { @@ -192,8 +209,9 @@ protected Long unixTimeStamp() { } protected FunctionExpression unixTimeStampOf(Expression value) { - return (FunctionExpression) functionRepository.compile(functionProperties, - BuiltinFunctionName.UNIX_TIMESTAMP.getName(), List.of(value)); + return (FunctionExpression) + functionRepository.compile( + functionProperties, BuiltinFunctionName.UNIX_TIMESTAMP.getName(), List.of(value)); } protected Double unixTimeStampOf(Double value) { @@ -201,17 +219,14 @@ protected Double unixTimeStampOf(Double value) { } protected Double unixTimeStampOf(LocalDate value) { - return unixTimeStampOf(DSL.literal(new ExprDateValue(value))) - .valueOf().doubleValue(); + return unixTimeStampOf(DSL.literal(new ExprDateValue(value))).valueOf().doubleValue(); } protected Double unixTimeStampOf(LocalDateTime value) { - return unixTimeStampOf(DSL.literal(new ExprDatetimeValue(value))) - .valueOf().doubleValue(); + return unixTimeStampOf(DSL.literal(new ExprDatetimeValue(value))).valueOf().doubleValue(); } protected Double unixTimeStampOf(Instant value) { - return unixTimeStampOf(DSL.literal(new ExprTimestampValue(value))) - .valueOf().doubleValue(); + return unixTimeStampOf(DSL.literal(new ExprTimestampValue(value))).valueOf().doubleValue(); } } diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/ExtractTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/ExtractTest.java index 338933333a..820158b722 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/ExtractTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/ExtractTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.datetime; import static java.time.temporal.ChronoField.ALIGNED_WEEK_OF_YEAR; @@ -38,8 +37,7 @@ private static Stream getDatetimeResultsForExtractFunction() { Arguments.of("DAY_MICROSECOND", 11101112123000L), Arguments.of("DAY_SECOND", 11101112), Arguments.of("DAY_MINUTE", 111011), - Arguments.of("DAY_HOUR", 1110) - ); + Arguments.of("DAY_HOUR", 1110)); } private static Stream getTimeResultsForExtractFunction() { @@ -53,8 +51,7 @@ private static Stream getTimeResultsForExtractFunction() { Arguments.of("MINUTE_SECOND", 1112), Arguments.of("HOUR_MICROSECOND", 101112123000L), Arguments.of("HOUR_SECOND", 101112), - Arguments.of("HOUR_MINUTE", 1011) - ); + Arguments.of("HOUR_MINUTE", 1011)); } private static Stream getDateResultsForExtractFunction() { @@ -64,19 +61,18 @@ private static Stream getDateResultsForExtractFunction() { Arguments.of("MONTH", 2), Arguments.of("QUARTER", 1), Arguments.of("YEAR", 2023), - Arguments.of("YEAR_MONTH", 202302) - ); + Arguments.of("YEAR_MONTH", 202302)); } @ParameterizedTest(name = "{0}") @MethodSource({ - "getDatetimeResultsForExtractFunction", - "getTimeResultsForExtractFunction", - "getDateResultsForExtractFunction"}) + "getDatetimeResultsForExtractFunction", + "getTimeResultsForExtractFunction", + "getDateResultsForExtractFunction" + }) public void testExtractWithDatetime(String part, long expected) { - FunctionExpression datetimeExpression = DSL.extract( - DSL.literal(part), - DSL.literal(new ExprDatetimeValue(datetimeInput))); + FunctionExpression datetimeExpression = + DSL.extract(DSL.literal(part), DSL.literal(new ExprDatetimeValue(datetimeInput))); assertEquals(LONG, datetimeExpression.type()); assertEquals(expected, eval(datetimeExpression).longValue()); @@ -87,23 +83,17 @@ public void testExtractWithDatetime(String part, long expected) { private void datePartWithTimeArgQuery(String part, String time, long expected) { ExprTimeValue timeValue = new ExprTimeValue(time); - FunctionExpression datetimeExpression = DSL.extract( - functionProperties, - DSL.literal(part), - DSL.literal(timeValue)); + FunctionExpression datetimeExpression = + DSL.extract(functionProperties, DSL.literal(part), DSL.literal(timeValue)); assertEquals(LONG, datetimeExpression.type()); - assertEquals(expected, - eval(datetimeExpression).longValue()); + assertEquals(expected, eval(datetimeExpression).longValue()); } - @Test public void testExtractDatePartWithTimeType() { datePartWithTimeArgQuery( - "DAY", - timeInput, - LocalDate.now(functionProperties.getQueryStartClock()).getDayOfMonth()); + "DAY", timeInput, LocalDate.now(functionProperties.getQueryStartClock()).getDayOfMonth()); datePartWithTimeArgQuery( "WEEK", @@ -111,43 +101,35 @@ public void testExtractDatePartWithTimeType() { LocalDate.now(functionProperties.getQueryStartClock()).get(ALIGNED_WEEK_OF_YEAR)); datePartWithTimeArgQuery( - "MONTH", - timeInput, - LocalDate.now(functionProperties.getQueryStartClock()).getMonthValue()); + "MONTH", timeInput, LocalDate.now(functionProperties.getQueryStartClock()).getMonthValue()); datePartWithTimeArgQuery( - "YEAR", - timeInput, - LocalDate.now(functionProperties.getQueryStartClock()).getYear()); + "YEAR", timeInput, LocalDate.now(functionProperties.getQueryStartClock()).getYear()); } @ParameterizedTest(name = "{0}") @MethodSource("getDateResultsForExtractFunction") public void testExtractWithDate(String part, long expected) { - FunctionExpression datetimeExpression = DSL.extract( - DSL.literal(part), - DSL.literal(new ExprDateValue(dateInput))); + FunctionExpression datetimeExpression = + DSL.extract(DSL.literal(part), DSL.literal(new ExprDateValue(dateInput))); assertEquals(LONG, datetimeExpression.type()); assertEquals(expected, eval(datetimeExpression).longValue()); assertEquals( - String.format("extract(\"%s\", DATE '2023-02-11')", part), - datetimeExpression.toString()); + String.format("extract(\"%s\", DATE '2023-02-11')", part), datetimeExpression.toString()); } @ParameterizedTest(name = "{0}") @MethodSource("getTimeResultsForExtractFunction") public void testExtractWithTime(String part, long expected) { - FunctionExpression datetimeExpression = DSL.extract( - functionProperties, - DSL.literal(part), - DSL.literal(new ExprTimeValue(timeInput))); + FunctionExpression datetimeExpression = + DSL.extract( + functionProperties, DSL.literal(part), DSL.literal(new ExprTimeValue(timeInput))); assertEquals(LONG, datetimeExpression.type()); assertEquals(expected, eval(datetimeExpression).longValue()); assertEquals( - String.format("extract(\"%s\", TIME '10:11:12.123')", part), - datetimeExpression.toString()); + String.format("extract(\"%s\", TIME '10:11:12.123')", part), datetimeExpression.toString()); } private ExprValue eval(Expression expression) { diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/FromUnixTimeTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/FromUnixTimeTest.java index 58387ef04f..8fcc6904b2 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/FromUnixTimeTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/FromUnixTimeTest.java @@ -31,20 +31,21 @@ private static Stream getLongSamples() { Arguments.of(1L), Arguments.of(1447430881L), Arguments.of(2147483647L), - Arguments.of(1662577241L) - ); + Arguments.of(1662577241L)); } /** * Test processing different Long values. + * * @param value a value */ @ParameterizedTest @MethodSource("getLongSamples") public void checkOfLong(Long value) { - assertEquals(LocalDateTime.of(1970, 1, 1, 0, 0, 0).plus(value, ChronoUnit.SECONDS), - fromUnixTime(value)); - assertEquals(LocalDateTime.of(1970, 1, 1, 0, 0, 0).plus(value, ChronoUnit.SECONDS), + assertEquals( + LocalDateTime.of(1970, 1, 1, 0, 0, 0).plus(value, ChronoUnit.SECONDS), fromUnixTime(value)); + assertEquals( + LocalDateTime.of(1970, 1, 1, 0, 0, 0).plus(value, ChronoUnit.SECONDS), eval(fromUnixTime(DSL.literal(new ExprLongValue(value)))).datetimeValue()); } @@ -54,12 +55,12 @@ private static Stream getDoubleSamples() { Arguments.of(100500.100500d), Arguments.of(1447430881.564d), Arguments.of(2147483647.451232d), - Arguments.of(1662577241.d) - ); + Arguments.of(1662577241.d)); } /** * Test processing different Double values. + * * @param value a value */ @ParameterizedTest @@ -70,11 +71,11 @@ public void checkOfDouble(Double value) { var valueAsString = new DecimalFormat("0.#").format(value); assertEquals( - LocalDateTime.ofEpochSecond(intPart, (int)Math.round(fracPart * 1E9), ZoneOffset.UTC), + LocalDateTime.ofEpochSecond(intPart, (int) Math.round(fracPart * 1E9), ZoneOffset.UTC), fromUnixTime(value), valueAsString); assertEquals( - LocalDateTime.ofEpochSecond(intPart, (int)Math.round(fracPart * 1E9), ZoneOffset.UTC), + LocalDateTime.ofEpochSecond(intPart, (int) Math.round(fracPart * 1E9), ZoneOffset.UTC), eval(fromUnixTime(DSL.literal(new ExprDoubleValue(value)))).datetimeValue(), valueAsString); } @@ -88,11 +89,12 @@ private static Stream getLongSamplesWithFormat() { Arguments.of(1447430881L, "%s", "01"), // 2015-11-13 16:08:01, %s - second Arguments.of(2147483647L, "%T", "03:14:07"), // 2038-01-19 03:14:07, %T - time Arguments.of(1662577241L, "%d", "07") // 1662577241, %d - day of the month - ); + ); } /** * Test processing different Long values with format. + * * @param value a value * @param format a format * @param expected expected result @@ -101,8 +103,11 @@ private static Stream getLongSamplesWithFormat() { @MethodSource("getLongSamplesWithFormat") public void checkOfLongWithFormat(Long value, String format, String expected) { assertEquals(expected, fromUnixTime(value, format)); - assertEquals(expected, eval(fromUnixTime(DSL.literal(new ExprLongValue(value)), - DSL.literal(new ExprStringValue(format)))).stringValue()); + assertEquals( + expected, + eval(fromUnixTime( + DSL.literal(new ExprLongValue(value)), DSL.literal(new ExprStringValue(format)))) + .stringValue()); } private static Stream getDoubleSamplesWithFormat() { @@ -112,11 +117,12 @@ private static Stream getDoubleSamplesWithFormat() { Arguments.of(1447430881.56d, "%M", "November"), // 2015-11-13 16:08:01.56, %M - Month name Arguments.of(2147483647.42d, "%j", "019"), // 2038-01-19 03:14:07.42, %j - day of the year Arguments.of(1662577241.d, "%l", "7") // 2022-09-07 19:00:41, %l - 12 hour clock, no 0 pad - ); + ); } /** * Test processing different Double values with format. + * * @param value a value * @param format a format * @param expected expected result @@ -125,16 +131,18 @@ private static Stream getDoubleSamplesWithFormat() { @MethodSource("getDoubleSamplesWithFormat") public void checkOfDoubleWithFormat(Double value, String format, String expected) { assertEquals(expected, fromUnixTime(value, format)); - assertEquals(expected, eval(fromUnixTime(DSL.literal(new ExprDoubleValue(value)), - DSL.literal(new ExprStringValue(format)))).stringValue()); + assertEquals( + expected, + eval(fromUnixTime( + DSL.literal(new ExprDoubleValue(value)), DSL.literal(new ExprStringValue(format)))) + .stringValue()); } @Test public void checkInvalidFormat() { - assertEquals(new ExprStringValue("q"), - fromUnixTime(DSL.literal(0L), DSL.literal("%q")).valueOf()); - assertEquals(new ExprStringValue(""), - fromUnixTime(DSL.literal(0L), DSL.literal("")).valueOf()); + assertEquals( + new ExprStringValue("q"), fromUnixTime(DSL.literal(0L), DSL.literal("%q")).valueOf()); + assertEquals(new ExprStringValue(""), fromUnixTime(DSL.literal(0L), DSL.literal("")).valueOf()); } @Test @@ -153,33 +161,33 @@ public void checkInsideTheRangeWithoutFormat() { @Test public void checkValueOutsideOfTheRangeWithFormat() { - assertEquals(ExprNullValue.of(), - fromUnixTime(DSL.literal(32536771200L), DSL.literal("%d")).valueOf()); - assertEquals(ExprNullValue.of(), - fromUnixTime(DSL.literal(32536771200d), DSL.literal("%d")).valueOf()); + assertEquals( + ExprNullValue.of(), fromUnixTime(DSL.literal(32536771200L), DSL.literal("%d")).valueOf()); + assertEquals( + ExprNullValue.of(), fromUnixTime(DSL.literal(32536771200d), DSL.literal("%d")).valueOf()); } @Test public void checkInsideTheRangeWithFormat() { - assertNotEquals(ExprNullValue.of(), - fromUnixTime(DSL.literal(32536771199L), DSL.literal("%d")).valueOf()); - assertNotEquals(ExprNullValue.of(), - fromUnixTime(DSL.literal(32536771199d), DSL.literal("%d")).valueOf()); + assertNotEquals( + ExprNullValue.of(), fromUnixTime(DSL.literal(32536771199L), DSL.literal("%d")).valueOf()); + assertNotEquals( + ExprNullValue.of(), fromUnixTime(DSL.literal(32536771199d), DSL.literal("%d")).valueOf()); } @Test public void checkNullOrNegativeValues() { assertEquals(ExprNullValue.of(), fromUnixTime(DSL.literal(ExprNullValue.of())).valueOf()); - assertEquals(ExprNullValue.of(), - fromUnixTime(DSL.literal(-1L), DSL.literal("%d")).valueOf()); - assertEquals(ExprNullValue.of(), - fromUnixTime(DSL.literal(-1.5d), DSL.literal("%d")).valueOf()); - assertEquals(ExprNullValue.of(), + assertEquals(ExprNullValue.of(), fromUnixTime(DSL.literal(-1L), DSL.literal("%d")).valueOf()); + assertEquals(ExprNullValue.of(), fromUnixTime(DSL.literal(-1.5d), DSL.literal("%d")).valueOf()); + assertEquals( + ExprNullValue.of(), fromUnixTime(DSL.literal(42L), DSL.literal(ExprNullValue.of())).valueOf()); - assertEquals(ExprNullValue.of(), + assertEquals( + ExprNullValue.of(), fromUnixTime(DSL.literal(ExprNullValue.of()), DSL.literal("%d")).valueOf()); - assertEquals(ExprNullValue.of(), - fromUnixTime(DSL.literal(ExprNullValue.of()), DSL.literal(ExprNullValue.of())) - .valueOf()); + assertEquals( + ExprNullValue.of(), + fromUnixTime(DSL.literal(ExprNullValue.of()), DSL.literal(ExprNullValue.of())).valueOf()); } } diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/IntervalClauseTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/IntervalClauseTest.java index e63acc94c0..d89e5f978a 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/IntervalClauseTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/IntervalClauseTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.datetime; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -31,14 +30,11 @@ @ExtendWith(MockitoExtension.class) public class IntervalClauseTest extends ExpressionTestBase { - @Mock - Environment env; + @Mock Environment env; - @Mock - Expression nullRef; + @Mock Expression nullRef; - @Mock - Expression missingRef; + @Mock Expression missingRef; @Test public void microsecond() { @@ -106,7 +102,9 @@ public void year() { @Test public void unsupported_unit() { FunctionExpression expr = DSL.interval(DSL.literal(1), DSL.literal("year_month")); - assertThrows(ExpressionEvaluationException.class, () -> expr.valueOf(env), + assertThrows( + ExpressionEvaluationException.class, + () -> expr.valueOf(env), "interval unit year_month is not supported"); } diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/MakeDateTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/MakeDateTest.java index fbd7e61c8d..b607d907db 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/MakeDateTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/MakeDateTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.datetime; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -22,21 +21,32 @@ public class MakeDateTest extends DateTimeTestBase { @Test public void checkEdgeCases() { - assertEquals(LocalDate.ofYearDay(2002, 1), makedate(2001., 366.), + assertEquals( + LocalDate.ofYearDay(2002, 1), + makedate(2001., 366.), "No switch to the next year on getting 366th day of a non-leap year"); - assertEquals(LocalDate.ofYearDay(2005, 1), makedate(2004., 367.), + assertEquals( + LocalDate.ofYearDay(2005, 1), + makedate(2004., 367.), "No switch to the next year on getting 367th day of a leap year"); - assertEquals(LocalDate.ofYearDay(2000, 42), makedate(0., 42.), + assertEquals( + LocalDate.ofYearDay(2000, 42), + makedate(0., 42.), "0 year is not interpreted as 2000 as in MySQL"); - assertEquals(nullValue(), eval(makedate(DSL.literal(-1.), DSL.literal(42.))), + assertEquals( + nullValue(), + eval(makedate(DSL.literal(-1.), DSL.literal(42.))), "Negative year doesn't produce NULL"); - assertEquals(nullValue(), eval(makedate(DSL.literal(42.), DSL.literal(-1.))), + assertEquals( + nullValue(), + eval(makedate(DSL.literal(42.), DSL.literal(-1.))), "Negative dayOfYear doesn't produce NULL"); - assertEquals(nullValue(), eval(makedate(DSL.literal(42.), DSL.literal(0.))), + assertEquals( + nullValue(), + eval(makedate(DSL.literal(42.), DSL.literal(0.))), "Zero dayOfYear doesn't produce NULL"); - assertEquals(LocalDate.of(1999, 3, 1), makedate(1999., 60.), - "Got Feb 29th of a non-lear year"); + assertEquals(LocalDate.of(1999, 3, 1), makedate(1999., 60.), "Got Feb 29th of a non-lear year"); assertEquals(LocalDate.of(1999, 12, 31), makedate(1999., 365.)); assertEquals(LocalDate.of(2004, 12, 31), makedate(2004., 366.)); } @@ -73,12 +83,12 @@ private static Stream getTestData() { Arguments.of(3617.452566, 619.795467), Arguments.of(2210.322073, 106.914268), Arguments.of(675.757974, 147.702828), - Arguments.of(1101.801820, 40.055318) - ); + Arguments.of(1101.801820, 40.055318)); } /** * Test function with given pseudo-random values. + * * @param year year * @param dayOfYear day of year */ @@ -88,20 +98,20 @@ public void checkRandomValues(double year, double dayOfYear) { LocalDate actual = makedate(year, dayOfYear); LocalDate expected = getReferenceValue(year, dayOfYear); - assertEquals(expected, actual, - String.format("year = %f, dayOfYear = %f", year, dayOfYear)); + assertEquals(expected, actual, String.format("year = %f, dayOfYear = %f", year, dayOfYear)); } /** - * Using another algorithm to get reference value. - * We should go to the next year until remaining @dayOfYear is bigger than 365/366. + * Using another algorithm to get reference value. We should go to the next year until + * remaining @dayOfYear is bigger than 365/366. + * * @param year Year. * @param dayOfYear Day of the year. * @return The calculated date. */ private LocalDate getReferenceValue(double year, double dayOfYear) { - var yearL = (int)Math.round(year); - var dayL = (int)Math.round(dayOfYear); + var yearL = (int) Math.round(year); + var dayL = (int) Math.round(dayOfYear); while (true) { int daysInYear = Year.isLeap(yearL) ? 366 : 365; if (dayL > daysInYear) { diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/MakeTimeTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/MakeTimeTest.java index 2f8e45b4c3..ff6896647d 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/MakeTimeTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/MakeTimeTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.datetime; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -24,18 +23,27 @@ public class MakeTimeTest extends DateTimeTestBase { @Test public void checkEdgeCases() { - assertEquals(nullValue(), eval(maketime(DSL.literal(-1.), DSL.literal(42.), DSL.literal(42.))), + assertEquals( + nullValue(), + eval(maketime(DSL.literal(-1.), DSL.literal(42.), DSL.literal(42.))), "Negative hour doesn't produce NULL"); - assertEquals(nullValue(), eval(maketime(DSL.literal(42.), DSL.literal(-1.), DSL.literal(42.))), + assertEquals( + nullValue(), + eval(maketime(DSL.literal(42.), DSL.literal(-1.), DSL.literal(42.))), "Negative minute doesn't produce NULL"); - assertEquals(nullValue(), eval(maketime(DSL.literal(12.), DSL.literal(42.), DSL.literal(-1.))), + assertEquals( + nullValue(), + eval(maketime(DSL.literal(12.), DSL.literal(42.), DSL.literal(-1.))), "Negative second doesn't produce NULL"); - assertThrows(DateTimeParseException.class, + assertThrows( + DateTimeParseException.class, () -> eval(maketime(DSL.literal(24.), DSL.literal(42.), DSL.literal(42.)))); - assertThrows(DateTimeParseException.class, + assertThrows( + DateTimeParseException.class, () -> eval(maketime(DSL.literal(12.), DSL.literal(60.), DSL.literal(42.)))); - assertThrows(DateTimeParseException.class, + assertThrows( + DateTimeParseException.class, () -> eval(maketime(DSL.literal(12.), DSL.literal(42.), DSL.literal(60.)))); assertEquals(LocalTime.of(23, 59, 59), maketime(23., 59., 59.)); @@ -81,12 +89,12 @@ private static Stream getTestData() { Arguments.of(7.494112, 9.761983, 17.444988), Arguments.of(17.867756, 10.313120, 36.391815), Arguments.of(19.712155, 3.197562, 6.607233), - Arguments.of(2.385090, 41.761568, 33.342590) - ); + Arguments.of(2.385090, 41.761568, 33.342590)); } /** * Test function with given pseudo-random values. + * * @param hour hour * @param minute minute * @param second second @@ -95,11 +103,15 @@ private static Stream getTestData() { @MethodSource("getTestData") public void checkRandomValues(double hour, double minute, double second) { // results could have 1 nanosec diff because of rounding FP - var expected = LocalTime.of((int)Math.round(hour), (int)Math.round(minute), - // pick fraction second part as nanos - (int)Math.floor(second)).withNano((int)((second % 1) * 1E9)); + var expected = + LocalTime.of( + (int) Math.round(hour), + (int) Math.round(minute), + // pick fraction second part as nanos + (int) Math.floor(second)) + .withNano((int) ((second % 1) * 1E9)); var delta = Duration.between(expected, maketime(hour, minute, second)).getNano(); - assertEquals(0, delta, 1, - String.format("hour = %f, minute = %f, second = %f", hour, minute, second)); + assertEquals( + 0, delta, 1, String.format("hour = %f, minute = %f, second = %f", hour, minute, second)); } } diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/NowLikeFunctionTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/NowLikeFunctionTest.java index b67f4efc21..0e5c00084f 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/NowLikeFunctionTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/NowLikeFunctionTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.datetime; import static org.hamcrest.MatcherAssert.assertThat; @@ -41,11 +40,11 @@ import org.opensearch.sql.expression.FunctionExpression; import org.opensearch.sql.expression.function.FunctionProperties; - class NowLikeFunctionTest extends ExpressionTestBase { @Test void now() { - test_now_like_functions(DSL::now, + test_now_like_functions( + DSL::now, DATETIME, false, () -> LocalDateTime.now(functionProperties.getQueryStartClock())); @@ -53,19 +52,28 @@ void now() { @Test void current_timestamp() { - test_now_like_functions(DSL::current_timestamp, DATETIME, false, + test_now_like_functions( + DSL::current_timestamp, + DATETIME, + false, () -> LocalDateTime.now(functionProperties.getQueryStartClock())); } @Test void localtimestamp() { - test_now_like_functions(DSL::localtimestamp, DATETIME, false, + test_now_like_functions( + DSL::localtimestamp, + DATETIME, + false, () -> LocalDateTime.now(functionProperties.getQueryStartClock())); } @Test void localtime() { - test_now_like_functions(DSL::localtime, DATETIME, false, + test_now_like_functions( + DSL::localtime, + DATETIME, + false, () -> LocalDateTime.now(functionProperties.getQueryStartClock())); } @@ -76,21 +84,21 @@ void sysdate() { @Test void curtime() { - test_now_like_functions(DSL::curtime, TIME, false, - () -> LocalTime.now(functionProperties.getQueryStartClock())); + test_now_like_functions( + DSL::curtime, TIME, false, () -> LocalTime.now(functionProperties.getQueryStartClock())); } @Test void currdate() { - test_now_like_functions(DSL::curdate, - DATE, false, - () -> LocalDate.now(functionProperties.getQueryStartClock())); + test_now_like_functions( + DSL::curdate, DATE, false, () -> LocalDate.now(functionProperties.getQueryStartClock())); } @Test void current_time() { - test_now_like_functions(DSL::current_time, + test_now_like_functions( + DSL::current_time, TIME, false, () -> LocalTime.now(functionProperties.getQueryStartClock())); @@ -98,26 +106,29 @@ void current_time() { @Test void current_date() { - test_now_like_functions(DSL::current_date, DATE, false, + test_now_like_functions( + DSL::current_date, + DATE, + false, () -> LocalDate.now(functionProperties.getQueryStartClock())); } @Test void utc_date() { - test_now_like_functions(DSL::utc_date, DATE, false, - () -> utcDateTimeNow(functionProperties).toLocalDate()); + test_now_like_functions( + DSL::utc_date, DATE, false, () -> utcDateTimeNow(functionProperties).toLocalDate()); } @Test void utc_time() { - test_now_like_functions(DSL::utc_time, TIME, false, - () -> utcDateTimeNow(functionProperties).toLocalTime()); + test_now_like_functions( + DSL::utc_time, TIME, false, () -> utcDateTimeNow(functionProperties).toLocalTime()); } @Test void utc_timestamp() { - test_now_like_functions(DSL::utc_timestamp, DATETIME, false, - () -> utcDateTimeNow(functionProperties)); + test_now_like_functions( + DSL::utc_timestamp, DATETIME, false, () -> utcDateTimeNow(functionProperties)); } private static LocalDateTime utcDateTimeNow(FunctionProperties functionProperties) { @@ -130,16 +141,16 @@ private static LocalDateTime utcDateTimeNow(FunctionProperties functionPropertie /** * Check how NOW-like functions are processed. * - * @param function Function - * @param resType Return type - * @param hasFsp Whether function has fsp argument + * @param function Function + * @param resType Return type + * @param hasFsp Whether function has fsp argument * @param referenceGetter A callback to get reference value */ void test_now_like_functions( BiFunction function, - ExprCoreType resType, - Boolean hasFsp, - Supplier referenceGetter) { + ExprCoreType resType, + Boolean hasFsp, + Supplier referenceGetter) { // Check return types: // `func()` FunctionExpression expr = function.apply(functionProperties, new Expression[] {}); @@ -153,10 +164,15 @@ void test_now_like_functions( assertEquals(resType, expr.type()); for (var wrongFspValue : List.of(-1, 10)) { - var exception = assertThrows(IllegalArgumentException.class, - () -> function.apply(functionProperties, - new Expression[] {DSL.literal(wrongFspValue)}).valueOf()); - assertEquals(String.format("Invalid `fsp` value: %d, allowed 0 to 6", wrongFspValue), + var exception = + assertThrows( + IllegalArgumentException.class, + () -> + function + .apply(functionProperties, new Expression[] {DSL.literal(wrongFspValue)}) + .valueOf()); + assertEquals( + String.format("Invalid `fsp` value: %d, allowed 0 to 6", wrongFspValue), exception.getMessage()); } } @@ -170,11 +186,9 @@ void test_now_like_functions( assertThat(sample, isCloseTo(reference, maxDiff, unit)); if (hasFsp) { // `func(fsp)` - Temporal value = extractValue(function.apply(functionProperties, - new Expression[] {DSL.literal(0)})); - assertThat(referenceGetter.get(), - isCloseTo(value, maxDiff, unit)); - + Temporal value = + extractValue(function.apply(functionProperties, new Expression[] {DSL.literal(0)})); + assertThat(referenceGetter.get(), isCloseTo(value, maxDiff, unit)); } } @@ -182,7 +196,8 @@ static Matcher isCloseTo(Temporal reference, long maxDiff, TemporalUni return new BaseMatcher<>() { @Override public void describeTo(Description description) { - description.appendText("value between ") + description + .appendText("value between ") .appendValue(reference.minus(maxDiff, units)) .appendText(" and ") .appendValue(reference.plus(maxDiff, units)); @@ -197,24 +212,21 @@ public boolean matches(Object value) { } return false; } - - }; } @TestFactory Stream constantValueTestFactory() { - BiFunction, DynamicTest> buildTest - = (name, action) -> - DynamicTest.dynamicTest( - String.format("multiple_invocations_same_value_test[%s]", name), - () -> { - var v1 = extractValue(action.apply(functionProperties)); - Thread.sleep(1000); - var v2 = extractValue(action.apply(functionProperties)); - assertEquals(v1, v2); - } - ); + BiFunction, DynamicTest> buildTest = + (name, action) -> + DynamicTest.dynamicTest( + String.format("multiple_invocations_same_value_test[%s]", name), + () -> { + var v1 = extractValue(action.apply(functionProperties)); + Thread.sleep(1000); + var v2 = extractValue(action.apply(functionProperties)); + assertEquals(v1, v2); + }); return Stream.of( buildTest.apply("now", DSL::now), buildTest.apply("current_timestamp", DSL::current_timestamp), @@ -222,8 +234,7 @@ Stream constantValueTestFactory() { buildTest.apply("curdate", DSL::curdate), buildTest.apply("curtime", DSL::curtime), buildTest.apply("localtimestamp", DSL::localtimestamp), - buildTest.apply("localtime", DSL::localtime) - ); + buildTest.apply("localtime", DSL::localtime)); } @Test @@ -232,7 +243,6 @@ void sysdate_multiple_invocations_differ() throws InterruptedException { Thread.sleep(1000); var v2 = extractValue(DSL.sysdate(functionProperties)); assertThat(v1, IsNot.not(isCloseTo(v2, 1, ChronoUnit.NANOS))); - } private Temporal extractValue(FunctionExpression func) { @@ -243,7 +253,7 @@ private Temporal extractValue(FunctionExpression func) { return func.valueOf().datetimeValue(); case TIME: return func.valueOf().timeValue(); - // unreachable code + // unreachable code default: throw new IllegalArgumentException(String.format("%s", func.type())); } diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/PeriodFunctionsTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/PeriodFunctionsTest.java index bf228dd509..c6a6392c9b 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/PeriodFunctionsTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/PeriodFunctionsTest.java @@ -18,6 +18,7 @@ public class PeriodFunctionsTest extends DateTimeTestBase { /** * Generate sample data for `PERIOD_ADD` function. + * * @return A data set. */ public static Stream getTestDataForPeriodAdd() { @@ -27,8 +28,7 @@ public static Stream getTestDataForPeriodAdd() { Arguments.of(3, -1, 200002), // Mar 2000 - 1 Arguments.of(12, 0, 200012), // Dec 2000 + 0 Arguments.of(6104, 100, 206908), // Apr 2061 + 100m (8y4m) - Arguments.of(201510, 14, 201612) - ); + Arguments.of(201510, 14, 201612)); } @ParameterizedTest @@ -39,6 +39,7 @@ public void period_add_with_different_data(int period, int months, int expected) /** * Generate sample data for `PERIOD_DIFF` function. + * * @return A data set. */ public static Stream getTestDataForPeriodDiff() { @@ -51,7 +52,7 @@ public static Stream getTestDataForPeriodDiff() { Arguments.of(200505, 7505, 360), // May 2005 - May 1975 Arguments.of(6104, 8509, 907), // Apr 2061 - Sep 1985 Arguments.of(207707, 7707, 1200) // Jul 2077 - Jul 1977 - ); + ); } @ParameterizedTest @@ -68,29 +69,30 @@ public void two_way_conversion(int period1, int period2, int expected) { /** * Generate invalid sample data for test. + * * @return A data set. */ public static Stream getInvalidTestData() { return Stream.of( - Arguments.of(0), - Arguments.of(123), - Arguments.of(100), - Arguments.of(1234), - Arguments.of(1000), - Arguments.of(2020), - Arguments.of(12345), - Arguments.of(123456), - Arguments.of(1234567), - Arguments.of(200213), - Arguments.of(200300), - Arguments.of(-1), - Arguments.of(-1234), - Arguments.of(-123401) - ); + Arguments.of(0), + Arguments.of(123), + Arguments.of(100), + Arguments.of(1234), + Arguments.of(1000), + Arguments.of(2020), + Arguments.of(12345), + Arguments.of(123456), + Arguments.of(1234567), + Arguments.of(200213), + Arguments.of(200300), + Arguments.of(-1), + Arguments.of(-1234), + Arguments.of(-123401)); } /** * Check that `PERIOD_ADD` and `PERIOD_DIFF` return NULL on invalid input. + * * @param period An invalid data. */ @ParameterizedTest diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/StrToDateTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/StrToDateTest.java index a35627247a..42d4aab1f6 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/StrToDateTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/StrToDateTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.datetime; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -33,89 +32,46 @@ class StrToDateTest extends ExpressionTestBase { private static Stream getTestDataForStrToDate() { return Stream.of( - //Date arguments + // Date arguments Arguments.of( - "01,5,2013", - "%d,%m,%Y", - new ExprDatetimeValue("2013-05-01 00:00:00"), - DATETIME), + "01,5,2013", "%d,%m,%Y", new ExprDatetimeValue("2013-05-01 00:00:00"), DATETIME), Arguments.of( - "May 1, 2013", - "%M %d, %Y", - new ExprDatetimeValue("2013-05-01 00:00:00"), - DATETIME), + "May 1, 2013", "%M %d, %Y", new ExprDatetimeValue("2013-05-01 00:00:00"), DATETIME), Arguments.of( "May 1, 2013 - 9,23,11", "%M %d, %Y - %h,%i,%s", new ExprDatetimeValue("2013-05-01 09:23:11"), DATETIME), Arguments.of( - "2000,1,1", - "%Y,%m,%d", - new ExprDatetimeValue("2000-01-01 00:00:00"), - DATETIME), + "2000,1,1", "%Y,%m,%d", new ExprDatetimeValue("2000-01-01 00:00:00"), DATETIME), Arguments.of( - "2000,1,1,10", - "%Y,%m,%d,%h", - new ExprDatetimeValue("2000-01-01 10:00:00"), - DATETIME), + "2000,1,1,10", "%Y,%m,%d,%h", new ExprDatetimeValue("2000-01-01 10:00:00"), DATETIME), Arguments.of( "2000,1,1,10,11", "%Y,%m,%d,%h,%i", new ExprDatetimeValue("2000-01-01 10:11:00"), DATETIME), - //Invalid Arguments (should return null) - Arguments.of( - "a09:30:17", - "a%h:%i:%s", - ExprNullValue.of(), - UNDEFINED), - Arguments.of( - "abc", - "abc", - ExprNullValue.of(), - UNDEFINED), - Arguments.of( - "2000,1", - "%Y,%m", - ExprNullValue.of(), - UNDEFINED), - Arguments.of( - "2000,1,10", - "%Y,%m,%h", - ExprNullValue.of(), - UNDEFINED), - Arguments.of( - "2000,1,10,11", - "%Y,%m,%h,%i", - ExprNullValue.of(), - UNDEFINED), - Arguments.of( - "9", - "%m", - ExprNullValue.of(), - UNDEFINED), - Arguments.of( - "9", - "%s", - ExprNullValue.of(), - UNDEFINED) - ); + // Invalid Arguments (should return null) + Arguments.of("a09:30:17", "a%h:%i:%s", ExprNullValue.of(), UNDEFINED), + Arguments.of("abc", "abc", ExprNullValue.of(), UNDEFINED), + Arguments.of("2000,1", "%Y,%m", ExprNullValue.of(), UNDEFINED), + Arguments.of("2000,1,10", "%Y,%m,%h", ExprNullValue.of(), UNDEFINED), + Arguments.of("2000,1,10,11", "%Y,%m,%h,%i", ExprNullValue.of(), UNDEFINED), + Arguments.of("9", "%m", ExprNullValue.of(), UNDEFINED), + Arguments.of("9", "%s", ExprNullValue.of(), UNDEFINED)); } @ParameterizedTest(name = "{0} | {1}") @MethodSource("getTestDataForStrToDate") public void test_str_to_date( - String datetime, - String format, - ExprValue expectedResult, - ExprCoreType expectedType) { + String datetime, String format, ExprValue expectedResult, ExprCoreType expectedType) { - FunctionExpression expression = DSL.str_to_date( - functionProperties, - DSL.literal(new ExprStringValue(datetime)), - DSL.literal(new ExprStringValue(format))); + FunctionExpression expression = + DSL.str_to_date( + functionProperties, + DSL.literal(new ExprStringValue(datetime)), + DSL.literal(new ExprStringValue(format))); ExprValue result = eval(expression); @@ -130,26 +86,25 @@ private static LocalDateTime getExpectedTimeResult(int hour, int minute, int sec LocalDate.now().getDayOfMonth(), hour, minute, - seconds - ); + seconds); } private static Stream getTestDataForStrToDateWithTime() { return Stream.of( Arguments.of("9,23,11", "%h,%i,%s"), Arguments.of("2000,9,23,11", "%Y,%h,%i,%s"), - Arguments.of("2000,3,9,23,11", "%Y,%m,%h,%i,%s") - ); + Arguments.of("2000,3,9,23,11", "%Y,%m,%h,%i,%s")); } @ParameterizedTest(name = "{1}") @MethodSource("getTestDataForStrToDateWithTime") public void test_str_to_date_with_time_type(String parsed, String format) { - FunctionExpression expression = DSL.str_to_date( - functionProperties, - DSL.literal(new ExprStringValue(parsed)), - DSL.literal(new ExprStringValue(format))); + FunctionExpression expression = + DSL.str_to_date( + functionProperties, + DSL.literal(new ExprStringValue(parsed)), + DSL.literal(new ExprStringValue(format))); ExprValue result = eval(expression); @@ -160,19 +115,21 @@ public void test_str_to_date_with_time_type(String parsed, String format) { @Test public void test_str_to_date_with_date_format() { - LocalDateTime arg = LocalDateTime.of(2023, 2, 27, 10, 11,12); + LocalDateTime arg = LocalDateTime.of(2023, 2, 27, 10, 11, 12); String format = "%Y,%m,%d %h,%i,%s"; - FunctionExpression dateFormatExpr = DSL.date_format( - functionProperties, - DSL.literal(new ExprDatetimeValue(arg)), - DSL.literal(new ExprStringValue(format))); + FunctionExpression dateFormatExpr = + DSL.date_format( + functionProperties, + DSL.literal(new ExprDatetimeValue(arg)), + DSL.literal(new ExprStringValue(format))); String dateFormatResult = eval(dateFormatExpr).stringValue(); - FunctionExpression strToDateExpr = DSL.str_to_date( - functionProperties, - DSL.literal(new ExprStringValue(dateFormatResult)), - DSL.literal(new ExprStringValue(format))); + FunctionExpression strToDateExpr = + DSL.str_to_date( + functionProperties, + DSL.literal(new ExprStringValue(dateFormatResult)), + DSL.literal(new ExprStringValue(format))); LocalDateTime strToDateResult = eval(strToDateExpr).datetimeValue(); assertEquals(arg, strToDateResult); @@ -184,24 +141,24 @@ public void test_str_to_date_with_time_format() { final int MINUTES = 11; final int SECONDS = 12; - LocalTime arg = LocalTime.of(HOURS, MINUTES,SECONDS); + LocalTime arg = LocalTime.of(HOURS, MINUTES, SECONDS); String format = "%h,%i,%s"; - FunctionExpression dateFormatExpr = DSL.time_format( - functionProperties, - DSL.literal(new ExprTimeValue(arg)), - DSL.literal(new ExprStringValue(format))); + FunctionExpression dateFormatExpr = + DSL.time_format( + functionProperties, + DSL.literal(new ExprTimeValue(arg)), + DSL.literal(new ExprStringValue(format))); String timeFormatResult = eval(dateFormatExpr).stringValue(); - FunctionExpression strToDateExpr = DSL.str_to_date( - functionProperties, - DSL.literal(new ExprStringValue(timeFormatResult)), - DSL.literal(new ExprStringValue(format))); + FunctionExpression strToDateExpr = + DSL.str_to_date( + functionProperties, + DSL.literal(new ExprStringValue(timeFormatResult)), + DSL.literal(new ExprStringValue(format))); LocalDateTime strToDateResult = eval(strToDateExpr).datetimeValue(); - assertEquals( - getExpectedTimeResult(HOURS, MINUTES, SECONDS), - strToDateResult); + assertEquals(getExpectedTimeResult(HOURS, MINUTES, SECONDS), strToDateResult); } private ExprValue eval(Expression expression) { diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/TimeDiffTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/TimeDiffTest.java index 8bfb09bd49..88218bc63c 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/TimeDiffTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/TimeDiffTest.java @@ -22,12 +22,12 @@ private static Stream getTestData() { Arguments.of(LocalTime.of(12, 42), LocalTime.of(7, 40), LocalTime.of(5, 2)), Arguments.of(LocalTime.of(7, 40), LocalTime.of(12, 42), LocalTime.of(18, 58)), Arguments.of(LocalTime.of(7, 40), LocalTime.of(7, 40), LocalTime.of(0, 0)), - Arguments.of(LocalTime.MAX, LocalTime.MIN, LocalTime.MAX) - ); + Arguments.of(LocalTime.MAX, LocalTime.MIN, LocalTime.MAX)); } /** * Test `TIME_DIFF` function with different data. + * * @param arg1 First argument. * @param arg2 Second argument. * @param expectedResult Expected result. @@ -36,7 +36,9 @@ private static Stream getTestData() { @MethodSource("getTestData") public void try_different_data(LocalTime arg1, LocalTime arg2, LocalTime expectedResult) { assertEquals(expectedResult, timediff(arg1, arg2)); - assertEquals(expectedResult, eval(timediff(DSL.literal(new ExprTimeValue(arg1)), - DSL.literal(new ExprTimeValue(arg2)))).timeValue()); + assertEquals( + expectedResult, + eval(timediff(DSL.literal(new ExprTimeValue(arg1)), DSL.literal(new ExprTimeValue(arg2)))) + .timeValue()); } } diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/TimeStampAddTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/TimeStampAddTest.java index 63514ab352..243eb6bb7b 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/TimeStampAddTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/TimeStampAddTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.datetime; import static org.junit.jupiter.api.Assertions.assertAll; @@ -36,99 +35,90 @@ class TimeStampAddTest extends ExpressionTestBase { private static Stream getTestDataForTimestampAdd() { return Stream.of( - Arguments.of("MINUTE", 1, new ExprStringValue("2003-01-02 00:00:00"), - "2003-01-02 00:01:00"), - Arguments.of("WEEK", 1, new ExprStringValue("2003-01-02 00:00:00"), - "2003-01-09 00:00:00"), - - //Date - Arguments.of("MINUTE", 1, new ExprDateValue("2003-01-02"), - "2003-01-02 00:01:00"), - Arguments.of("WEEK", 1, new ExprDateValue("2003-01-02"), - "2003-01-09 00:00:00"), - - //Datetime - Arguments.of("MINUTE", 1, new ExprDatetimeValue("2003-01-02 00:00:00"), - "2003-01-02 00:01:00"), - Arguments.of("WEEK", 1, new ExprDatetimeValue("2003-01-02 00:00:00"), - "2003-01-09 00:00:00"), - - //Timestamp - Arguments.of("MINUTE", 1, new ExprTimestampValue("2003-01-02 00:00:00"), - "2003-01-02 00:01:00"), - Arguments.of("WEEK", 1, new ExprTimestampValue("2003-01-02 00:00:00"), - "2003-01-09 00:00:00"), - - //Cases surrounding leap year - Arguments.of("SECOND", 1, new ExprTimestampValue("2020-02-28 23:59:59"), - "2020-02-29 00:00:00"), - Arguments.of("MINUTE", 1, new ExprTimestampValue("2020-02-28 23:59:59"), - "2020-02-29 00:00:59"), - Arguments.of("HOUR", 1, new ExprTimestampValue("2020-02-28 23:59:59"), - "2020-02-29 00:59:59"), - Arguments.of("DAY", 1, new ExprTimestampValue("2020-02-28 23:59:59"), - "2020-02-29 23:59:59"), - Arguments.of("WEEK", 1, new ExprTimestampValue("2020-02-28 23:59:59"), - "2020-03-06 23:59:59"), - - //Cases surrounding end-of-year - Arguments.of("SECOND", 1, new ExprTimestampValue("2020-12-31 23:59:59"), - "2021-01-01 00:00:00"), - Arguments.of("MINUTE", 1, new ExprTimestampValue("2020-12-31 23:59:59"), - "2021-01-01 00:00:59"), - Arguments.of("HOUR", 1, new ExprTimestampValue("2020-12-31 23:59:59"), - "2021-01-01 00:59:59"), - Arguments.of("DAY", 1, new ExprTimestampValue("2020-12-31 23:59:59"), - "2021-01-01 23:59:59"), - Arguments.of("WEEK", 1, new ExprTimestampValue("2020-12-31 23:59:59"), - "2021-01-07 23:59:59"), - - //Test adding a month (including special cases) - Arguments.of("MONTH", 1, new ExprStringValue("2003-01-02 00:00:00"), - "2003-02-02 00:00:00"), - Arguments.of("MONTH", 1, new ExprDateValue("2024-03-30"), - "2024-04-30 00:00:00"), - Arguments.of("MONTH", 1, new ExprDateValue("2024-03-31"), - "2024-04-30 00:00:00"), - - //Test remaining interval types - Arguments.of("MICROSECOND", 123, new ExprStringValue("2003-01-02 00:00:00"), + Arguments.of( + "MINUTE", 1, new ExprStringValue("2003-01-02 00:00:00"), "2003-01-02 00:01:00"), + Arguments.of("WEEK", 1, new ExprStringValue("2003-01-02 00:00:00"), "2003-01-09 00:00:00"), + + // Date + Arguments.of("MINUTE", 1, new ExprDateValue("2003-01-02"), "2003-01-02 00:01:00"), + Arguments.of("WEEK", 1, new ExprDateValue("2003-01-02"), "2003-01-09 00:00:00"), + + // Datetime + Arguments.of( + "MINUTE", 1, new ExprDatetimeValue("2003-01-02 00:00:00"), "2003-01-02 00:01:00"), + Arguments.of( + "WEEK", 1, new ExprDatetimeValue("2003-01-02 00:00:00"), "2003-01-09 00:00:00"), + + // Timestamp + Arguments.of( + "MINUTE", 1, new ExprTimestampValue("2003-01-02 00:00:00"), "2003-01-02 00:01:00"), + Arguments.of( + "WEEK", 1, new ExprTimestampValue("2003-01-02 00:00:00"), "2003-01-09 00:00:00"), + + // Cases surrounding leap year + Arguments.of( + "SECOND", 1, new ExprTimestampValue("2020-02-28 23:59:59"), "2020-02-29 00:00:00"), + Arguments.of( + "MINUTE", 1, new ExprTimestampValue("2020-02-28 23:59:59"), "2020-02-29 00:00:59"), + Arguments.of( + "HOUR", 1, new ExprTimestampValue("2020-02-28 23:59:59"), "2020-02-29 00:59:59"), + Arguments.of( + "DAY", 1, new ExprTimestampValue("2020-02-28 23:59:59"), "2020-02-29 23:59:59"), + Arguments.of( + "WEEK", 1, new ExprTimestampValue("2020-02-28 23:59:59"), "2020-03-06 23:59:59"), + + // Cases surrounding end-of-year + Arguments.of( + "SECOND", 1, new ExprTimestampValue("2020-12-31 23:59:59"), "2021-01-01 00:00:00"), + Arguments.of( + "MINUTE", 1, new ExprTimestampValue("2020-12-31 23:59:59"), "2021-01-01 00:00:59"), + Arguments.of( + "HOUR", 1, new ExprTimestampValue("2020-12-31 23:59:59"), "2021-01-01 00:59:59"), + Arguments.of( + "DAY", 1, new ExprTimestampValue("2020-12-31 23:59:59"), "2021-01-01 23:59:59"), + Arguments.of( + "WEEK", 1, new ExprTimestampValue("2020-12-31 23:59:59"), "2021-01-07 23:59:59"), + + // Test adding a month (including special cases) + Arguments.of("MONTH", 1, new ExprStringValue("2003-01-02 00:00:00"), "2003-02-02 00:00:00"), + Arguments.of("MONTH", 1, new ExprDateValue("2024-03-30"), "2024-04-30 00:00:00"), + Arguments.of("MONTH", 1, new ExprDateValue("2024-03-31"), "2024-04-30 00:00:00"), + + // Test remaining interval types + Arguments.of( + "MICROSECOND", + 123, + new ExprStringValue("2003-01-02 00:00:00"), "2003-01-02 00:00:00.000123"), - Arguments.of("QUARTER", 1, new ExprStringValue("2003-01-02 00:00:00"), - "2003-04-02 00:00:00"), - Arguments.of("YEAR", 1, new ExprStringValue("2003-01-02 00:00:00"), - "2004-01-02 00:00:00"), - - //Test negative value for amount (Test for all intervals) - Arguments.of("MICROSECOND", -1, new ExprStringValue("2000-01-01 00:00:00"), + Arguments.of( + "QUARTER", 1, new ExprStringValue("2003-01-02 00:00:00"), "2003-04-02 00:00:00"), + Arguments.of("YEAR", 1, new ExprStringValue("2003-01-02 00:00:00"), "2004-01-02 00:00:00"), + + // Test negative value for amount (Test for all intervals) + Arguments.of( + "MICROSECOND", + -1, + new ExprStringValue("2000-01-01 00:00:00"), "1999-12-31 23:59:59.999999"), - Arguments.of("SECOND", -1, new ExprStringValue("2000-01-01 00:00:00"), - "1999-12-31 23:59:59"), - Arguments.of("MINUTE", -1, new ExprStringValue("2000-01-01 00:00:00"), - "1999-12-31 23:59:00"), - Arguments.of("HOUR", -1, new ExprStringValue("2000-01-01 00:00:00"), - "1999-12-31 23:00:00"), - Arguments.of("DAY", -1, new ExprStringValue("2000-01-01 00:00:00"), - "1999-12-31 00:00:00"), - Arguments.of("WEEK", -1, new ExprStringValue("2000-01-01 00:00:00"), - "1999-12-25 00:00:00"), - Arguments.of("MONTH", -1, new ExprStringValue("2000-01-01 00:00:00"), - "1999-12-01 00:00:00"), - Arguments.of("QUARTER", -1, new ExprStringValue("2000-01-01 00:00:00"), - "1999-10-01 00:00:00"), - Arguments.of("YEAR", -1, new ExprStringValue("2000-01-01 00:00:00"), - "1999-01-01 00:00:00") - ); + Arguments.of( + "SECOND", -1, new ExprStringValue("2000-01-01 00:00:00"), "1999-12-31 23:59:59"), + Arguments.of( + "MINUTE", -1, new ExprStringValue("2000-01-01 00:00:00"), "1999-12-31 23:59:00"), + Arguments.of("HOUR", -1, new ExprStringValue("2000-01-01 00:00:00"), "1999-12-31 23:00:00"), + Arguments.of("DAY", -1, new ExprStringValue("2000-01-01 00:00:00"), "1999-12-31 00:00:00"), + Arguments.of("WEEK", -1, new ExprStringValue("2000-01-01 00:00:00"), "1999-12-25 00:00:00"), + Arguments.of( + "MONTH", -1, new ExprStringValue("2000-01-01 00:00:00"), "1999-12-01 00:00:00"), + Arguments.of( + "QUARTER", -1, new ExprStringValue("2000-01-01 00:00:00"), "1999-10-01 00:00:00"), + Arguments.of( + "YEAR", -1, new ExprStringValue("2000-01-01 00:00:00"), "1999-01-01 00:00:00")); } - private static FunctionExpression timestampaddQuery(String unit, - int amount, - ExprValue datetimeExpr) { + private static FunctionExpression timestampaddQuery( + String unit, int amount, ExprValue datetimeExpr) { return DSL.timestampadd( - DSL.literal(unit), - DSL.literal(new ExprIntegerValue(amount)), - DSL.literal(datetimeExpr) - ); + DSL.literal(unit), DSL.literal(new ExprIntegerValue(amount)), DSL.literal(datetimeExpr)); } @ParameterizedTest @@ -159,22 +149,19 @@ private static Stream getTestDataForTestAddingDatePartToTime() { Arguments.of("YEAR", 1, "10:11:12", LocalDate.now().plusYears(1)), Arguments.of("YEAR", 5, "10:11:12", LocalDate.now().plusYears(5)), Arguments.of("YEAR", 10, "10:11:12", LocalDate.now().plusYears(10)), - Arguments.of("YEAR", -10, "10:11:12", LocalDate.now().plusYears(-10)) - ); + Arguments.of("YEAR", -10, "10:11:12", LocalDate.now().plusYears(-10))); } @ParameterizedTest @MethodSource("getTestDataForTestAddingDatePartToTime") - public void testAddingDatePartToTime(String interval, - int addedInterval, - String timeArg, - LocalDate expectedDate) { - FunctionExpression expr = DSL.timestampadd( - functionProperties, - DSL.literal(interval), - DSL.literal(new ExprIntegerValue(addedInterval)), - DSL.literal(new ExprTimeValue(timeArg)) - ); + public void testAddingDatePartToTime( + String interval, int addedInterval, String timeArg, LocalDate expectedDate) { + FunctionExpression expr = + DSL.timestampadd( + functionProperties, + DSL.literal(interval), + DSL.literal(new ExprIntegerValue(addedInterval)), + DSL.literal(new ExprTimeValue(timeArg))); LocalDateTime expected1 = LocalDateTime.of(expectedDate, LocalTime.parse(timeArg)); @@ -187,16 +174,15 @@ public void testAddingTimePartToTime() { int addedInterval = 1; String timeArg = "10:11:12"; - FunctionExpression expr = DSL.timestampadd( - functionProperties, - DSL.literal(interval), - DSL.literal(new ExprIntegerValue(addedInterval)), - DSL.literal(new ExprTimeValue(timeArg)) - ); + FunctionExpression expr = + DSL.timestampadd( + functionProperties, + DSL.literal(interval), + DSL.literal(new ExprIntegerValue(addedInterval)), + DSL.literal(new ExprTimeValue(timeArg))); - LocalDateTime expected = LocalDateTime.of( - LocalDate.now(), - LocalTime.parse(timeArg).plusMinutes(addedInterval)); + LocalDateTime expected = + LocalDateTime.of(LocalDate.now(), LocalTime.parse(timeArg).plusMinutes(addedInterval)); assertEquals(new ExprDatetimeValue(expected), eval(expr)); } @@ -205,38 +191,27 @@ public void testAddingTimePartToTime() { public void testDifferentInputTypesHaveSameResult() { String part = "SECOND"; int amount = 1; - FunctionExpression dateExpr = timestampaddQuery( - part, - amount, - new ExprDateValue("2000-01-01")); - - FunctionExpression stringExpr = timestampaddQuery( - part, - amount, - new ExprStringValue("2000-01-01 00:00:00")); - - FunctionExpression datetimeExpr = timestampaddQuery( - part, - amount, - new ExprDatetimeValue("2000-01-01 00:00:00")); - - FunctionExpression timestampExpr = timestampaddQuery( - part, - amount, - new ExprTimestampValue("2000-01-01 00:00:00")); + FunctionExpression dateExpr = timestampaddQuery(part, amount, new ExprDateValue("2000-01-01")); + + FunctionExpression stringExpr = + timestampaddQuery(part, amount, new ExprStringValue("2000-01-01 00:00:00")); + + FunctionExpression datetimeExpr = + timestampaddQuery(part, amount, new ExprDatetimeValue("2000-01-01 00:00:00")); + + FunctionExpression timestampExpr = + timestampaddQuery(part, amount, new ExprTimestampValue("2000-01-01 00:00:00")); assertAll( () -> assertEquals(eval(dateExpr), eval(stringExpr)), () -> assertEquals(eval(dateExpr), eval(datetimeExpr)), - () -> assertEquals(eval(dateExpr), eval(timestampExpr)) - ); + () -> assertEquals(eval(dateExpr), eval(timestampExpr))); } private static Stream getInvalidTestDataForTimestampAdd() { return Stream.of( Arguments.of("WEEK", 1, new ExprStringValue("2000-13-01")), - Arguments.of("WEEK", 1, new ExprStringValue("2000-01-40")) - ); + Arguments.of("WEEK", 1, new ExprStringValue("2000-01-40"))); } @ParameterizedTest diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/TimeStampDiffTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/TimeStampDiffTest.java index b00792e048..061420ceee 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/TimeStampDiffTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/TimeStampDiffTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.datetime; import static org.junit.jupiter.api.Assertions.assertAll; @@ -37,11 +36,9 @@ class TimeStampDiffTest extends ExpressionTestBase { - //Helper function to create an argument based on a passed in interval type - private static ExprValue generateArg(String intervalType, - String argType, - LocalDateTime base, - int added) { + // Helper function to create an argument based on a passed in interval type + private static ExprValue generateArg( + String intervalType, String argType, LocalDateTime base, int added) { LocalDateTime arg; switch (intervalType) { case "MICROSECOND": @@ -72,9 +69,8 @@ private static ExprValue generateArg(String intervalType, arg = base.plusYears(added); break; default: - throw new SemanticCheckException(String.format( - "%s is not a valid interval type.", - intervalType)); + throw new SemanticCheckException( + String.format("%s is not a valid interval type.", intervalType)); } switch (argType) { @@ -87,92 +83,69 @@ private static ExprValue generateArg(String intervalType, case "DATETIME": return new ExprDatetimeValue(arg); case "STRING": - return new ExprStringValue(String.format( - "%04d-%02d-%02d %02d:%02d:%02d.%06d", - arg.getYear(), - arg.getMonthValue(), - arg.getDayOfMonth(), - arg.getHour(), - arg.getMinute(), - arg.getSecond(), - arg.getNano() / 1000)); + return new ExprStringValue( + String.format( + "%04d-%02d-%02d %02d:%02d:%02d.%06d", + arg.getYear(), + arg.getMonthValue(), + arg.getDayOfMonth(), + arg.getHour(), + arg.getMinute(), + arg.getSecond(), + arg.getNano() / 1000)); default: - throw new SemanticCheckException(String.format( - "%s is not a valid ExprCoreValueType.", - argType)); + throw new SemanticCheckException( + String.format("%s is not a valid ExprCoreValueType.", argType)); } } - //Generate test data to test all permutations for args (intervalType, arg1, arg2) + // Generate test data to test all permutations for args (intervalType, arg1, arg2) private static Stream getGeneralTestDataForTimestampDiff() { - //Needs to be initialized with a value to prevent a null pointer exception. - Stream testData = Stream.of(Arguments.of( - "DAY", - new ExprDateValue("2000-01-01 00:00:00"), - new ExprDateValue("2000-01-01"), - 0)); - - final String[] timeIntervalTypes = { - "MICROSECOND", - "SECOND", - "MINUTE", - "HOUR" - }; - - final String[] dateIntervalTypes = { - "DAY", - "WEEK", - "MONTH", - "QUARTER", - "YEAR" - }; + // Needs to be initialized with a value to prevent a null pointer exception. + Stream testData = + Stream.of( + Arguments.of( + "DAY", + new ExprDateValue("2000-01-01 00:00:00"), + new ExprDateValue("2000-01-01"), + 0)); + + final String[] timeIntervalTypes = {"MICROSECOND", "SECOND", "MINUTE", "HOUR"}; + final String[] dateIntervalTypes = {"DAY", "WEEK", "MONTH", "QUARTER", "YEAR"}; final String[] intervalTypes = ArrayUtils.addAll(timeIntervalTypes, dateIntervalTypes); - //TIME type not included here as it is a special case handled by a different test - final String[] expressionTypes = { - "DATE", - "DATETIME", - "TIMESTAMP", - "STRING" - }; + // TIME type not included here as it is a special case handled by a different test + final String[] expressionTypes = {"DATE", "DATETIME", "TIMESTAMP", "STRING"}; final LocalDateTime baseDateTime = LocalDateTime.of(2000, 1, 1, 0, 0, 0); final int intervalDifference = 5; - //Iterate through each permutation of argument + // Iterate through each permutation of argument for (String intervalType : intervalTypes) { for (String firstArgExpressionType : expressionTypes) { for (String secondArgExpressionType : expressionTypes) { ExprValue firstArg = generateArg(intervalType, firstArgExpressionType, baseDateTime, 0); - ExprValue secondArg = generateArg( - intervalType, - secondArgExpressionType, - baseDateTime, - intervalDifference); - - //If second arg is a DATE and you are using a unit of TIME to measure then expected is 0. - //The second arg is equal to baseDatetime in this case. - int expected = ( - secondArgExpressionType == "DATE" - && Arrays.asList(timeIntervalTypes).contains(intervalType)) - ? 0 : intervalDifference; - - testData = Stream.concat(testData, Stream.of( - Arguments.of( - intervalType, - firstArg, - secondArg, - expected), - Arguments.of( - intervalType, - secondArg, - firstArg, - -expected) - )); + ExprValue secondArg = + generateArg(intervalType, secondArgExpressionType, baseDateTime, intervalDifference); + + // If second arg is a DATE and you are using a unit of TIME to measure then expected is 0. + // The second arg is equal to baseDatetime in this case. + int expected = + (secondArgExpressionType == "DATE" + && Arrays.asList(timeIntervalTypes).contains(intervalType)) + ? 0 + : intervalDifference; + + testData = + Stream.concat( + testData, + Stream.of( + Arguments.of(intervalType, firstArg, secondArg, expected), + Arguments.of(intervalType, secondArg, firstArg, -expected))); } } } @@ -183,7 +156,7 @@ private static Stream getGeneralTestDataForTimestampDiff() { private static Stream getCornerCaseTestDataForTimestampDiff() { return Stream.of( - //Test around Leap Year + // Test around Leap Year Arguments.of( "DAY", new ExprDatetimeValue("2019-02-28 00:00:00"), @@ -195,7 +168,7 @@ private static Stream getCornerCaseTestDataForTimestampDiff() { new ExprDatetimeValue("2020-03-01 00:00:00"), 2), - //Test around year change + // Test around year change Arguments.of( "SECOND", new ExprDatetimeValue("2019-12-31 23:59:59"), @@ -210,33 +183,27 @@ private static Stream getCornerCaseTestDataForTimestampDiff() { "DAY", new ExprDatetimeValue("2019-12-31 00:00:00"), new ExprDatetimeValue("2020-01-01 00:00:00"), - 1) - ); + 1)); } - private static FunctionExpression timestampdiffQuery(FunctionProperties functionProperties, - String unit, - ExprValue datetimeExpr1, - ExprValue datetimeExpr2) { + private static FunctionExpression timestampdiffQuery( + FunctionProperties functionProperties, + String unit, + ExprValue datetimeExpr1, + ExprValue datetimeExpr2) { return DSL.timestampdiff( functionProperties, DSL.literal(unit), DSL.literal(datetimeExpr1), - DSL.literal(datetimeExpr2) - ); + DSL.literal(datetimeExpr2)); } @ParameterizedTest @MethodSource({"getGeneralTestDataForTimestampDiff", "getCornerCaseTestDataForTimestampDiff"}) - public void testTimestampdiff(String unit, - ExprValue datetimeExpr1, - ExprValue datetimeExpr2, - long expected) { - FunctionExpression expr = timestampdiffQuery( - functionProperties, - unit, - datetimeExpr1, - datetimeExpr2); + public void testTimestampdiff( + String unit, ExprValue datetimeExpr1, ExprValue datetimeExpr2, long expected) { + FunctionExpression expr = + timestampdiffQuery(functionProperties, unit, datetimeExpr1, datetimeExpr2); assertEquals(expected, eval(expr).longValue()); } @@ -250,11 +217,10 @@ private static Stream getUnits() { Arguments.of("WEEK"), Arguments.of("MONTH"), Arguments.of("QUARTER"), - Arguments.of("YEAR") - ); + Arguments.of("YEAR")); } - //Test that Time arg uses today's date with all interval/part arguments + // Test that Time arg uses today's date with all interval/part arguments @ParameterizedTest @MethodSource("getUnits") public void testTimestampDiffWithTimeType(String unit) { @@ -266,16 +232,11 @@ public void testTimestampDiffWithTimeType(String unit) { ExprValue datetimeExpr = generateArg(unit, "TIMESTAMP", base, 0); ExprValue stringExpr = generateArg(unit, "TIMESTAMP", base, 0); - ExprValue[] expressions = {timeExpr, timestampExpr, dateExpr, datetimeExpr,stringExpr}; + ExprValue[] expressions = {timeExpr, timestampExpr, dateExpr, datetimeExpr, stringExpr}; for (ExprValue arg1 : expressions) { for (ExprValue arg2 : expressions) { - FunctionExpression funcExpr = timestampdiffQuery( - functionProperties, - unit, - arg1, - arg2 - ); + FunctionExpression funcExpr = timestampdiffQuery(functionProperties, unit, arg1, arg2); assertEquals(0L, eval(funcExpr).longValue()); } @@ -293,66 +254,65 @@ private static Stream getTimestampDiffInvalidArgs() { Arguments.of("SECOND", "2023-01-01 10:11:12", "2000-01-40 10:11:12"), Arguments.of("SECOND", "2023-01-01 10:11:12", "2000-01-01 25:11:12"), Arguments.of("SECOND", "2023-01-01 10:11:12", "2000-01-01 10:70:12"), - Arguments.of("SECOND", "2023-01-01 10:11:12", "2000-01-01 10:11:70") - ); + Arguments.of("SECOND", "2023-01-01 10:11:12", "2000-01-01 10:11:70")); } @ParameterizedTest @MethodSource("getTimestampDiffInvalidArgs") public void testTimestampDiffWithInvalidTimeArgs(String unit, String arg1, String arg2) { - FunctionExpression expr = timestampdiffQuery( - functionProperties, - unit, - new ExprStringValue(arg1), - new ExprStringValue(arg2) - ); + FunctionExpression expr = + timestampdiffQuery( + functionProperties, unit, new ExprStringValue(arg1), new ExprStringValue(arg2)); assertThrows(SemanticCheckException.class, () -> eval(expr)); } @Test public void testTimestampDiffWithInvalidPartReturnsNull() { - FunctionExpression expr = timestampdiffQuery( - functionProperties, + FunctionExpression expr = + timestampdiffQuery( + functionProperties, "INVALID", - new ExprStringValue("2023-01-01 10:11:12"), - new ExprStringValue("2000-01-01 10:11:12") - ); + new ExprStringValue("2023-01-01 10:11:12"), + new ExprStringValue("2000-01-01 10:11:12")); assertEquals(ExprNullValue.of(), eval(expr)); } - //Test that different input types have the same result + // Test that different input types have the same result @Test public void testDifferentInputTypesHaveSameResult() { String part = "SECOND"; - FunctionExpression dateExpr = timestampdiffQuery( - functionProperties, - part, - new ExprDateValue("2000-01-01"), - new ExprDateValue("2000-01-02")); + FunctionExpression dateExpr = + timestampdiffQuery( + functionProperties, + part, + new ExprDateValue("2000-01-01"), + new ExprDateValue("2000-01-02")); - FunctionExpression stringExpr = timestampdiffQuery( - functionProperties, - part, - new ExprStringValue("2000-01-01 00:00:00"), - new ExprStringValue("2000-01-02 00:00:00")); + FunctionExpression stringExpr = + timestampdiffQuery( + functionProperties, + part, + new ExprStringValue("2000-01-01 00:00:00"), + new ExprStringValue("2000-01-02 00:00:00")); - FunctionExpression datetimeExpr = timestampdiffQuery( - functionProperties, - part, - new ExprDatetimeValue("2000-01-01 00:00:00"), - new ExprDatetimeValue("2000-01-02 00:00:00")); + FunctionExpression datetimeExpr = + timestampdiffQuery( + functionProperties, + part, + new ExprDatetimeValue("2000-01-01 00:00:00"), + new ExprDatetimeValue("2000-01-02 00:00:00")); - FunctionExpression timestampExpr = timestampdiffQuery( - functionProperties, - part, - new ExprTimestampValue("2000-01-01 00:00:00"), - new ExprTimestampValue("2000-01-02 00:00:00")); + FunctionExpression timestampExpr = + timestampdiffQuery( + functionProperties, + part, + new ExprTimestampValue("2000-01-01 00:00:00"), + new ExprTimestampValue("2000-01-02 00:00:00")); assertAll( () -> assertEquals(eval(dateExpr), eval(stringExpr)), () -> assertEquals(eval(dateExpr), eval(datetimeExpr)), - () -> assertEquals(eval(dateExpr), eval(timestampExpr)) - ); + () -> assertEquals(eval(dateExpr), eval(timestampExpr))); } private ExprValue eval(Expression expression) { diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/TimestampTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/TimestampTest.java index 677ad9310e..7d25c0041b 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/TimestampTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/TimestampTest.java @@ -39,35 +39,39 @@ public void timestamp_one_arg_string() { expr = DSL.timestamp(functionProperties, DSL.literal("1961-04-12 09:07:00.123456")); assertEquals(TIMESTAMP, expr.type()); - assertEquals(LocalDateTime.of(1961, 4, 12, 9, 7, 0, 123456000), - expr.valueOf().datetimeValue()); + assertEquals(LocalDateTime.of(1961, 4, 12, 9, 7, 0, 123456000), expr.valueOf().datetimeValue()); } /** * Check that `TIMESTAMP` function throws an exception on incorrect string input. + * * @param value A value. * @param testName A test name. */ @ParameterizedTest(name = "{1}") @CsvSource({ - "1984-02-30 12:20:42, Feb 30th", - "1984-02-10 24:00:00, 24:00:00", - "84-02-10 12:20:42, 2 digit year" + "1984-02-30 12:20:42, Feb 30th", + "1984-02-10 24:00:00, 24:00:00", + "84-02-10 12:20:42, 2 digit year" }) public void timestamp_one_arg_string_invalid_format(String value, String testName) { // exception thrown from ExprTimestampValue(String) CTOR - var exception = assertThrows(SemanticCheckException.class, - () -> DSL.timestamp(functionProperties, DSL.literal(value)).valueOf()); - assertEquals(String.format("timestamp:%s in unsupported format, please " - + "use 'yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]'", value), exception.getMessage()); + var exception = + assertThrows( + SemanticCheckException.class, + () -> DSL.timestamp(functionProperties, DSL.literal(value)).valueOf()); + assertEquals( + String.format( + "timestamp:%s in unsupported format, please " + "use 'yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]'", + value), + exception.getMessage()); } @Test public void timestamp_one_arg_time() { var expr = DSL.timestamp(functionProperties, DSL.time(DSL.literal("22:33:44"))); assertEquals(TIMESTAMP, expr.type()); - var refValue = LocalDate.now().atTime(LocalTime.of(22, 33, 44)) - .atZone(UTC_ZONE_ID).toInstant(); + var refValue = LocalDate.now().atTime(LocalTime.of(22, 33, 44)).atZone(UTC_ZONE_ID).toInstant(); assertEquals(new ExprTimestampValue(refValue), expr.valueOf()); } @@ -75,8 +79,7 @@ public void timestamp_one_arg_time() { public void timestamp_one_arg_date() { var expr = DSL.timestamp(functionProperties, DSL.date(DSL.literal("2077-12-15"))); assertEquals(TIMESTAMP, expr.type()); - var refValue = LocalDate.of(2077, 12, 15).atStartOfDay() - .atZone(UTC_ZONE_ID).toInstant(); + var refValue = LocalDate.of(2077, 12, 15).atStartOfDay().atZone(UTC_ZONE_ID).toInstant(); assertEquals(new ExprTimestampValue(refValue), expr.valueOf()); } @@ -90,8 +93,8 @@ public void timestamp_one_arg_datetime() { @Test public void timestamp_one_arg_timestamp() { var refValue = new ExprTimestampValue(Instant.ofEpochSecond(10050042)); - var expr = DSL.timestamp(functionProperties, - DSL.timestamp(functionProperties, DSL.literal(refValue))); + var expr = + DSL.timestamp(functionProperties, DSL.timestamp(functionProperties, DSL.literal(refValue))); assertEquals(TIMESTAMP, expr.type()); assertEquals(refValue, expr.valueOf()); } @@ -109,71 +112,115 @@ private static Stream getTestData() { // First argument of `TIMESTAMP` function, second argument and expected result value return Stream.of( // STRING and STRING/DATE/TIME/DATETIME/TIMESTAMP - Arguments.of("1961-04-12 09:07:00", "2077-12-15 01:48:00", + Arguments.of( + "1961-04-12 09:07:00", + "2077-12-15 01:48:00", dateTime2ExprTs(LocalDateTime.of(1961, 4, 12, 10, 55, 0))), - Arguments.of("1984-02-10 12:20:42", LocalDate.of(2077, 12, 21), + Arguments.of( + "1984-02-10 12:20:42", + LocalDate.of(2077, 12, 21), dateTime2ExprTs(LocalDateTime.of(1984, 2, 10, 12, 20, 42))), - Arguments.of("1961-04-12 09:07:00", LocalTime.of(1, 48), + Arguments.of( + "1961-04-12 09:07:00", + LocalTime.of(1, 48), dateTime2ExprTs(LocalDateTime.of(1961, 4, 12, 10, 55, 0))), - Arguments.of("2020-12-31 17:30:00", LocalDateTime.of(2077, 12, 21, 12, 20, 42), + Arguments.of( + "2020-12-31 17:30:00", + LocalDateTime.of(2077, 12, 21, 12, 20, 42), dateTime2ExprTs(LocalDateTime.of(2021, 1, 1, 5, 50, 42))), - Arguments.of("2020-12-31 17:30:00", Instant.ofEpochSecond(42), + Arguments.of( + "2020-12-31 17:30:00", + Instant.ofEpochSecond(42), dateTime2ExprTs(LocalDateTime.of(2020, 12, 31, 17, 30, 42))), // DATE and STRING/DATE/TIME/DATETIME/TIMESTAMP - Arguments.of(LocalDate.of(2077, 12, 21), "2077-12-15 01:48:00", + Arguments.of( + LocalDate.of(2077, 12, 21), + "2077-12-15 01:48:00", dateTime2ExprTs(LocalDateTime.of(2077, 12, 21, 1, 48, 0))), - Arguments.of(LocalDate.of(2077, 12, 21), LocalDate.of(1984, 2, 3), + Arguments.of( + LocalDate.of(2077, 12, 21), + LocalDate.of(1984, 2, 3), dateTime2ExprTs(LocalDateTime.of(2077, 12, 21, 0, 0, 0))), - Arguments.of(LocalDate.of(2077, 12, 21), LocalTime.of(22, 33, 44), + Arguments.of( + LocalDate.of(2077, 12, 21), + LocalTime.of(22, 33, 44), dateTime2ExprTs(LocalDateTime.of(2077, 12, 21, 22, 33, 44))), - Arguments.of(LocalDate.of(2077, 12, 21), LocalDateTime.of(1999, 9, 9, 22, 33, 44), + Arguments.of( + LocalDate.of(2077, 12, 21), + LocalDateTime.of(1999, 9, 9, 22, 33, 44), dateTime2ExprTs(LocalDateTime.of(2077, 12, 21, 22, 33, 44))), - Arguments.of(LocalDate.of(2077, 12, 21), Instant.ofEpochSecond(42), + Arguments.of( + LocalDate.of(2077, 12, 21), + Instant.ofEpochSecond(42), dateTime2ExprTs(LocalDateTime.of(2077, 12, 21, 0, 0, 42))), // TIME and STRING/DATE/TIME/DATETIME/TIMESTAMP - Arguments.of(LocalTime.of(9, 7, 0), "2077-12-15 01:48:00", + Arguments.of( + LocalTime.of(9, 7, 0), + "2077-12-15 01:48:00", dateTime2ExprTs(today.atTime(LocalTime.of(10, 55, 0)))), - Arguments.of(LocalTime.of(12, 20, 42), LocalDate.of(2077, 12, 21), + Arguments.of( + LocalTime.of(12, 20, 42), + LocalDate.of(2077, 12, 21), dateTime2ExprTs(today.atTime(LocalTime.of(12, 20, 42)))), - Arguments.of(LocalTime.of(9, 7, 0), LocalTime.of(1, 48), + Arguments.of( + LocalTime.of(9, 7, 0), + LocalTime.of(1, 48), dateTime2ExprTs(today.atTime(LocalTime.of(10, 55, 0)))), - Arguments.of(LocalTime.of(17, 30, 0), LocalDateTime.of(2077, 12, 21, 12, 20, 42), + Arguments.of( + LocalTime.of(17, 30, 0), + LocalDateTime.of(2077, 12, 21, 12, 20, 42), dateTime2ExprTs(today.plusDays(1).atTime(LocalTime.of(5, 50, 42)))), - Arguments.of(LocalTime.of(17, 30, 0), Instant.ofEpochSecond(42), + Arguments.of( + LocalTime.of(17, 30, 0), + Instant.ofEpochSecond(42), dateTime2ExprTs(today.atTime(LocalTime.of(17, 30, 42)))), // DATETIME and STRING/DATE/TIME/DATETIME/TIMESTAMP - Arguments.of(LocalDateTime.of(1961, 4, 12, 9, 7, 0), "2077-12-15 01:48:00", + Arguments.of( + LocalDateTime.of(1961, 4, 12, 9, 7, 0), + "2077-12-15 01:48:00", dateTime2ExprTs(LocalDateTime.of(1961, 4, 12, 10, 55, 0))), - Arguments.of(LocalDateTime.of(1984, 2, 10, 12, 20, 42), LocalDate.of(2077, 12, 21), + Arguments.of( + LocalDateTime.of(1984, 2, 10, 12, 20, 42), + LocalDate.of(2077, 12, 21), dateTime2ExprTs(LocalDateTime.of(1984, 2, 10, 12, 20, 42))), - Arguments.of(LocalDateTime.of(1961, 4, 12, 9, 7, 0), LocalTime.of(1, 48), + Arguments.of( + LocalDateTime.of(1961, 4, 12, 9, 7, 0), + LocalTime.of(1, 48), dateTime2ExprTs(LocalDateTime.of(1961, 4, 12, 10, 55, 0))), - Arguments.of(LocalDateTime.of(2020, 12, 31, 17, 30, 0), + Arguments.of( + LocalDateTime.of(2020, 12, 31, 17, 30, 0), LocalDateTime.of(2077, 12, 21, 12, 20, 42), dateTime2ExprTs(LocalDateTime.of(2021, 1, 1, 5, 50, 42))), - Arguments.of(LocalDateTime.of(2020, 12, 31, 17, 30, 0), Instant.ofEpochSecond(42), + Arguments.of( + LocalDateTime.of(2020, 12, 31, 17, 30, 0), + Instant.ofEpochSecond(42), dateTime2ExprTs(LocalDateTime.of(2020, 12, 31, 17, 30, 42))), // TIMESTAMP and STRING/DATE/TIME/DATETIME/TIMESTAMP - Arguments.of(dateTime2Instant(LocalDateTime.of(1961, 4, 12, 9, 7, 0)), + Arguments.of( + dateTime2Instant(LocalDateTime.of(1961, 4, 12, 9, 7, 0)), "2077-12-15 01:48:00", dateTime2ExprTs(LocalDateTime.of(1961, 4, 12, 10, 55, 0))), - Arguments.of(dateTime2Instant(LocalDateTime.of(1984, 2, 10, 12, 20, 42)), + Arguments.of( + dateTime2Instant(LocalDateTime.of(1984, 2, 10, 12, 20, 42)), LocalDate.of(2077, 12, 21), dateTime2ExprTs(LocalDateTime.of(1984, 2, 10, 12, 20, 42))), - Arguments.of(dateTime2Instant(LocalDateTime.of(1961, 4, 12, 9, 7, 0)), + Arguments.of( + dateTime2Instant(LocalDateTime.of(1961, 4, 12, 9, 7, 0)), LocalTime.of(1, 48), dateTime2ExprTs(LocalDateTime.of(1961, 4, 12, 10, 55, 0))), - Arguments.of(dateTime2Instant(LocalDateTime.of(2020, 12, 31, 17, 30, 0)), + Arguments.of( + dateTime2Instant(LocalDateTime.of(2020, 12, 31, 17, 30, 0)), LocalDateTime.of(2077, 12, 21, 12, 20, 42), dateTime2ExprTs(LocalDateTime.of(2021, 1, 1, 5, 50, 42))), - Arguments.of(dateTime2Instant(LocalDateTime.of(2020, 12, 31, 17, 30, 0)), + Arguments.of( + dateTime2Instant(LocalDateTime.of(2020, 12, 31, 17, 30, 0)), Instant.ofEpochSecond(42), - dateTime2ExprTs(LocalDateTime.of(2020, 12, 31, 17, 30, 42))) - ); + dateTime2ExprTs(LocalDateTime.of(2020, 12, 31, 17, 30, 42)))); } /** * Test `TIMESTAMP` function which takes 2 arguments with input of different types. + * * @param arg1 First argument to be passed to `TIMESTAMP` function. * @param arg2 Second argument to be passed to `TIMESTAMP` function. * @param expected The expected result. @@ -181,9 +228,11 @@ private static Stream getTestData() { @ParameterizedTest @MethodSource("getTestData") public void timestamp_with_two_args(Object arg1, Object arg2, ExprTimestampValue expected) { - var expr = DSL.timestamp(functionProperties, - DSL.literal(ExprValueUtils.fromObjectValue(arg1)), - DSL.literal(ExprValueUtils.fromObjectValue(arg2))); + var expr = + DSL.timestamp( + functionProperties, + DSL.literal(ExprValueUtils.fromObjectValue(arg1)), + DSL.literal(ExprValueUtils.fromObjectValue(arg2))); assertEquals(TIMESTAMP, expr.type()); assertEquals(expected, expr.valueOf()); } diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/ToSecondsTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/ToSecondsTest.java index 1e89659de7..7aa824e61d 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/ToSecondsTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/ToSecondsTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.datetime; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -35,7 +34,6 @@ import org.opensearch.sql.expression.ExpressionTestBase; import org.opensearch.sql.expression.FunctionExpression; - class ToSecondsTest extends ExpressionTestBase { private static final long SECONDS_FROM_0001_01_01_TO_EPOCH_START = 62167219200L; @@ -54,11 +52,9 @@ private static Stream getTestDataForToSeconds() { Arguments.of(new ExprStringValue("2009-11-29 00:00:00"), new ExprLongValue(63426672000L)), Arguments.of(new ExprStringValue("2009-11-29 13:43:32"), new ExprLongValue(63426721412L)), Arguments.of(new ExprDateValue("2009-11-29"), new ExprLongValue(63426672000L)), - Arguments.of(new ExprDatetimeValue("2009-11-29 13:43:32"), - new ExprLongValue(63426721412L)), - Arguments.of(new ExprTimestampValue("2009-11-29 13:43:32"), - new ExprLongValue(63426721412L)) - ); + Arguments.of(new ExprDatetimeValue("2009-11-29 13:43:32"), new ExprLongValue(63426721412L)), + Arguments.of( + new ExprTimestampValue("2009-11-29 13:43:32"), new ExprLongValue(63426721412L))); } @ParameterizedTest @@ -71,12 +67,13 @@ public void testToSeconds(ExprValue arg, ExprValue expected) { @Test public void testToSecondsWithTimeType() { - FunctionExpression expr = DSL.to_seconds(functionProperties, - DSL.literal(new ExprTimeValue("10:11:12"))); + FunctionExpression expr = + DSL.to_seconds(functionProperties, DSL.literal(new ExprTimeValue("10:11:12"))); - long expected = SECONDS_FROM_0001_01_01_TO_EPOCH_START - + LocalDate.now(functionProperties.getQueryStartClock()) - .toEpochSecond(LocalTime.parse("10:11:12"), ZoneOffset.UTC); + long expected = + SECONDS_FROM_0001_01_01_TO_EPOCH_START + + LocalDate.now(functionProperties.getQueryStartClock()) + .toEpochSecond(LocalTime.parse("10:11:12"), ZoneOffset.UTC); assertEquals(expected, eval(expr).longValue()); } @@ -88,8 +85,7 @@ private static Stream getInvalidTestDataForToSeconds() { Arguments.of(new ExprStringValue("2000-10-45")), Arguments.of(new ExprStringValue("2000-10-10 70:00:00")), Arguments.of(new ExprStringValue("2000-10-10 00:70:00")), - Arguments.of(new ExprStringValue("2000-10-10 00:00:70")) - ); + Arguments.of(new ExprStringValue("2000-10-10 00:00:70"))); } @ParameterizedTest @@ -106,9 +102,10 @@ public void testToSecondsWithDateAdd() { long addedSeconds = SECONDS_PER_DAY; long expected = eval(dateExpr).longValue() + addedSeconds; - FunctionExpression dateAddExpr = DSL.date_add( - DSL.literal(new ExprDateValue(date)), - DSL.literal(new ExprIntervalValue(Duration.ofSeconds(addedSeconds)))); + FunctionExpression dateAddExpr = + DSL.date_add( + DSL.literal(new ExprDateValue(date)), + DSL.literal(new ExprIntervalValue(Duration.ofSeconds(addedSeconds)))); long result = eval(DSL.to_seconds(DSL.literal(eval(dateAddExpr)))).longValue(); diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/UnixTimeStampTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/UnixTimeStampTest.java index f6e24f4e27..c979b68302 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/UnixTimeStampTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/UnixTimeStampTest.java @@ -43,19 +43,20 @@ private static Stream getDateSamples() { Arguments.of(LocalDate.of(1999, 12, 31)), Arguments.of(LocalDate.of(2004, 2, 29)), Arguments.of(LocalDate.of(2100, 2, 28)), - Arguments.of(LocalDate.of(2012, 2, 21)) - ); + Arguments.of(LocalDate.of(2012, 2, 21))); } /** * Check processing valid values of type LocalDate. + * * @param value a value */ @ParameterizedTest @MethodSource("getDateSamples") public void checkOfDate(LocalDate value) { assertEquals(value.getLong(ChronoField.EPOCH_DAY) * 24 * 3600, unixTimeStampOf(value)); - assertEquals(value.getLong(ChronoField.EPOCH_DAY) * 24 * 3600, + assertEquals( + value.getLong(ChronoField.EPOCH_DAY) * 24 * 3600, eval(unixTimeStampOf(DSL.literal(new ExprDateValue(value)))).longValue()); } @@ -66,36 +67,39 @@ private static Stream getDateTimeSamples() { Arguments.of(LocalDateTime.of(1999, 12, 31, 23, 59)), Arguments.of(LocalDateTime.of(2004, 2, 29, 7, 40)), Arguments.of(LocalDateTime.of(2100, 2, 28, 13, 14)), - Arguments.of(LocalDateTime.of(2012, 2, 21, 0, 0)) - ); + Arguments.of(LocalDateTime.of(2012, 2, 21, 0, 0))); } /** * Check processing valid values of type LocalDateTime. + * * @param value a value */ @ParameterizedTest @MethodSource("getDateTimeSamples") public void checkOfDateTime(LocalDateTime value) { assertEquals(value.toEpochSecond(ZoneOffset.UTC), unixTimeStampOf(value)); - assertEquals(value.toEpochSecond(ZoneOffset.UTC), + assertEquals( + value.toEpochSecond(ZoneOffset.UTC), eval(unixTimeStampOf(DSL.literal(new ExprDatetimeValue(value)))).longValue()); } private static Stream getInstantSamples() { return getDateTimeSamples() - .map(v -> Arguments.of(((LocalDateTime)v.get()[0]).toInstant(ZoneOffset.UTC))); + .map(v -> Arguments.of(((LocalDateTime) v.get()[0]).toInstant(ZoneOffset.UTC))); } /** * Check processing valid values of type Instant. + * * @param value a value */ @ParameterizedTest @MethodSource("getInstantSamples") public void checkOfInstant(Instant value) { assertEquals(value.getEpochSecond(), unixTimeStampOf(value)); - assertEquals(value.getEpochSecond(), + assertEquals( + value.getEpochSecond(), eval(unixTimeStampOf(DSL.literal(new ExprTimestampValue(value)))).longValue()); } @@ -104,17 +108,17 @@ public void checkOfInstant(Instant value) { private static Stream getDoubleSamples() { return Stream.of( Arguments.of(840101d, LocalDateTime.of(1984, 1, 1, 0, 0, 0)), - Arguments.of(840101112233d, LocalDateTime.of(1984, 1, 1, 11,22,33)), + Arguments.of(840101112233d, LocalDateTime.of(1984, 1, 1, 11, 22, 33)), Arguments.of(840101112233.123456, LocalDateTime.of(1984, 1, 1, 11, 22, 33, 123456000)), Arguments.of(19840101d, LocalDateTime.of(1984, 1, 1, 0, 0, 0)), Arguments.of(19840101000000d, LocalDateTime.of(1984, 1, 1, 0, 0, 0)), - Arguments.of(19840101112233d, LocalDateTime.of(1984, 1, 1, 11,22,33)), - Arguments.of(19840101112233.123456, LocalDateTime.of(1984, 1, 1, 11, 22, 33, 123456000)) - ); + Arguments.of(19840101112233d, LocalDateTime.of(1984, 1, 1, 11, 22, 33)), + Arguments.of(19840101112233.123456, LocalDateTime.of(1984, 1, 1, 11, 22, 33, 123456000))); } /** * Check processing valid Double values. + * * @param valueAsDouble a value * @param valueAsLDT the value as LocalDateTime */ @@ -122,11 +126,13 @@ private static Stream getDoubleSamples() { @MethodSource("getDoubleSamples") public void checkOfDoubleFormats(Double valueAsDouble, LocalDateTime valueAsLDT) { var valueAsStr = new DecimalFormat("0.#").format(valueAsDouble); - assertEquals(valueAsLDT.toEpochSecond(ZoneOffset.UTC), - unixTimeStampOf(valueAsDouble), 1d, valueAsStr); - assertEquals(valueAsLDT.toEpochSecond(ZoneOffset.UTC), + assertEquals( + valueAsLDT.toEpochSecond(ZoneOffset.UTC), unixTimeStampOf(valueAsDouble), 1d, valueAsStr); + assertEquals( + valueAsLDT.toEpochSecond(ZoneOffset.UTC), eval(unixTimeStampOf(DSL.literal(new ExprDoubleValue(valueAsDouble)))).longValue(), - 1d, valueAsStr); + 1d, + valueAsStr); } @Test @@ -165,7 +171,7 @@ public void checkMaxValue() { private static Stream getInvalidDoubleSamples() { return Stream.of( - //invalid dates + // invalid dates Arguments.of(19990231.d), Arguments.of(19991320.d), Arguments.of(19991232.d), @@ -181,7 +187,7 @@ private static Stream getInvalidDoubleSamples() { Arguments.of(9912.d), Arguments.of(199912.d), Arguments.of(1999102.d), - //same as above, but with valid time + // same as above, but with valid time Arguments.of(19990231112233.d), Arguments.of(19991320112233.d), Arguments.of(19991232112233.d), @@ -197,7 +203,7 @@ private static Stream getInvalidDoubleSamples() { Arguments.of(9912112233.d), Arguments.of(199912112233.d), Arguments.of(1999102112233.d), - //invalid time + // invalid time Arguments.of(19840101242233.d), Arguments.of(19840101116033.d), Arguments.of(19840101112260.d), @@ -206,7 +212,7 @@ private static Stream getInvalidDoubleSamples() { Arguments.of(19840101123.d), Arguments.of(1984010113.d), Arguments.of(198401011.d), - //same, but with short date + // same, but with short date Arguments.of(840101242233.d), Arguments.of(840101116033.d), Arguments.of(840101112260.d), @@ -214,23 +220,24 @@ private static Stream getInvalidDoubleSamples() { Arguments.of(8401011123.d), Arguments.of(840101123.d), Arguments.of(8401011.d), - //misc + // misc Arguments.of(0d), Arguments.of(-1d), Arguments.of(42d), - //too many digits - Arguments.of(199902201122330d) - ); + // too many digits + Arguments.of(199902201122330d)); } /** * Check processing invalid Double values. + * * @param value a value */ @ParameterizedTest @MethodSource("getInvalidDoubleSamples") public void checkInvalidDoubleCausesNull(Double value) { - assertEquals(ExprNullValue.of(), + assertEquals( + ExprNullValue.of(), unixTimeStampOf(DSL.literal(new ExprDoubleValue(value))).valueOf(), new DecimalFormat("0.#").format(value)); } diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/UnixTwoWayConversionTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/UnixTwoWayConversionTest.java index 70fe299e97..c74b062fba 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/UnixTwoWayConversionTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/UnixTwoWayConversionTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.datetime; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -33,8 +32,7 @@ public void checkConvertNow_with_eval() { } private LocalDateTime getExpectedNow() { - return LocalDateTime.now( - functionProperties.getQueryStartClock().withZone(UTC_ZONE_ID)) + return LocalDateTime.now(functionProperties.getQueryStartClock().withZone(UTC_ZONE_ID)) .withNano(0); } @@ -44,23 +42,25 @@ private static Stream getDoubleSamples() { Arguments.of(100500.100500d), Arguments.of(1447430881.564d), Arguments.of(2147483647.451232d), - Arguments.of(1662577241.d) - ); + Arguments.of(1662577241.d)); } /** * Test converting valid Double values EpochTime -> DateTime -> EpochTime. + * * @param value a value */ @ParameterizedTest @MethodSource("getDoubleSamples") public void convertEpoch2DateTime2Epoch(Double value) { assertEquals(value, unixTimeStampOf(fromUnixTime(value))); - assertEquals(value, + assertEquals( + value, eval(unixTimeStampOf(fromUnixTime(DSL.literal(new ExprDoubleValue(value))))).doubleValue()); assertEquals(Math.round(value) + 0d, unixTimeStampOf(fromUnixTime(Math.round(value)))); - assertEquals(Math.round(value) + 0d, + assertEquals( + Math.round(value) + 0d, eval(unixTimeStampOf(fromUnixTime(DSL.literal(new ExprLongValue(Math.round(value)))))) .doubleValue()); } @@ -72,19 +72,20 @@ private static Stream getDateTimeSamples() { Arguments.of(LocalDateTime.of(1999, 12, 31, 23, 59, 59)), Arguments.of(LocalDateTime.of(2004, 2, 29, 7, 40)), Arguments.of(LocalDateTime.of(2100, 2, 28, 13, 14, 15)), - Arguments.of(LocalDateTime.of(2012, 2, 21, 0, 0, 17)) - ); + Arguments.of(LocalDateTime.of(2012, 2, 21, 0, 0, 17))); } /** * Test converting valid values DateTime -> EpochTime -> DateTime. + * * @param value a value */ @ParameterizedTest @MethodSource("getDateTimeSamples") public void convertDateTime2Epoch2DateTime(LocalDateTime value) { assertEquals(value, fromUnixTime(unixTimeStampOf(value))); - assertEquals(value, + assertEquals( + value, eval(fromUnixTime(unixTimeStampOf(DSL.literal(new ExprDatetimeValue(value))))) .datetimeValue()); } diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/WeekdayTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/WeekdayTest.java index 4b97639996..d2a6394503 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/WeekdayTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/WeekdayTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.datetime; import static org.junit.jupiter.api.Assertions.assertAll; @@ -28,109 +27,89 @@ import org.opensearch.sql.expression.FunctionExpression; import org.opensearch.sql.expression.LiteralExpression; - class WeekdayTest extends ExpressionTestBase { - private void weekdayQuery( - FunctionExpression dateExpression, - int dayOfWeek, - String testExpr) { + private void weekdayQuery(FunctionExpression dateExpression, int dayOfWeek, String testExpr) { assertAll( () -> assertEquals(INTEGER, dateExpression.type()), () -> assertEquals(integerValue(dayOfWeek), eval(dateExpression)), - () -> assertEquals(testExpr, dateExpression.toString()) - ); + () -> assertEquals(testExpr, dateExpression.toString())); } private static Stream getTestDataForWeekday() { return Stream.of( - Arguments.of( - DSL.literal(new ExprDateValue("2020-08-07")), - 4, - "weekday(DATE '2020-08-07')"), - Arguments.of( - DSL.literal(new ExprDateValue("2020-08-09")), - 6, - "weekday(DATE '2020-08-09')"), - Arguments.of( - DSL.literal("2020-08-09"), - 6, - "weekday(\"2020-08-09\")"), - Arguments.of( - DSL.literal("2020-08-09 01:02:03"), - 6, - "weekday(\"2020-08-09 01:02:03\")") - ); + Arguments.of(DSL.literal(new ExprDateValue("2020-08-07")), 4, "weekday(DATE '2020-08-07')"), + Arguments.of(DSL.literal(new ExprDateValue("2020-08-09")), 6, "weekday(DATE '2020-08-09')"), + Arguments.of(DSL.literal("2020-08-09"), 6, "weekday(\"2020-08-09\")"), + Arguments.of(DSL.literal("2020-08-09 01:02:03"), 6, "weekday(\"2020-08-09 01:02:03\")")); } @MethodSource("getTestDataForWeekday") @ParameterizedTest public void weekday(LiteralExpression arg, int expectedInt, String expectedString) { - FunctionExpression expression = DSL.weekday( - functionProperties, - arg); + FunctionExpression expression = DSL.weekday(functionProperties, arg); weekdayQuery(expression, expectedInt, expectedString); } @Test public void testWeekdayWithTimeType() { - FunctionExpression expression = DSL.weekday( - functionProperties, DSL.literal(new ExprTimeValue("12:23:34"))); + FunctionExpression expression = + DSL.weekday(functionProperties, DSL.literal(new ExprTimeValue("12:23:34"))); assertAll( () -> assertEquals(INTEGER, eval(expression).type()), - () -> assertEquals(( - LocalDate.now( - functionProperties.getQueryStartClock()).getDayOfWeek().getValue() - 1), - eval(expression).integerValue()), - () -> assertEquals("weekday(TIME '12:23:34')", expression.toString()) - ); + () -> + assertEquals( + (LocalDate.now(functionProperties.getQueryStartClock()).getDayOfWeek().getValue() + - 1), + eval(expression).integerValue()), + () -> assertEquals("weekday(TIME '12:23:34')", expression.toString())); } private void testInvalidWeekday(String date) { - FunctionExpression expression = DSL.weekday( - functionProperties, DSL.literal(new ExprDateValue(date))); + FunctionExpression expression = + DSL.weekday(functionProperties, DSL.literal(new ExprDateValue(date))); eval(expression); } @Test public void weekdayLeapYear() { assertAll( - //Feb. 29 of a leap year - () -> weekdayQuery(DSL.weekday( - functionProperties, - DSL.literal("2020-02-29")), 5, "weekday(\"2020-02-29\")"), - //day after Feb. 29 of a leap year - () -> weekdayQuery(DSL.weekday( - functionProperties, - DSL.literal("2020-03-01")), 6, "weekday(\"2020-03-01\")"), - //Feb. 28 of a non-leap year - () -> weekdayQuery(DSL.weekday( - functionProperties, - DSL.literal("2021-02-28")), 6, "weekday(\"2021-02-28\")"), - //Feb. 29 of a non-leap year - () -> assertThrows( - SemanticCheckException.class, () -> testInvalidWeekday("2021-02-29")) - ); + // Feb. 29 of a leap year + () -> + weekdayQuery( + DSL.weekday(functionProperties, DSL.literal("2020-02-29")), + 5, + "weekday(\"2020-02-29\")"), + // day after Feb. 29 of a leap year + () -> + weekdayQuery( + DSL.weekday(functionProperties, DSL.literal("2020-03-01")), + 6, + "weekday(\"2020-03-01\")"), + // Feb. 28 of a non-leap year + () -> + weekdayQuery( + DSL.weekday(functionProperties, DSL.literal("2021-02-28")), + 6, + "weekday(\"2021-02-28\")"), + // Feb. 29 of a non-leap year + () -> assertThrows(SemanticCheckException.class, () -> testInvalidWeekday("2021-02-29"))); } @Test public void weekdayInvalidArgument() { assertAll( - //40th day of the month - () -> assertThrows(SemanticCheckException.class, - () -> testInvalidWeekday("2021-02-40")), - - //13th month of the year - () -> assertThrows(SemanticCheckException.class, - () -> testInvalidWeekday("2021-13-29")), - - //incorrect format - () -> assertThrows(SemanticCheckException.class, - () -> testInvalidWeekday("asdfasdf")) - ); + // 40th day of the month + () -> assertThrows(SemanticCheckException.class, () -> testInvalidWeekday("2021-02-40")), + + // 13th month of the year + () -> assertThrows(SemanticCheckException.class, () -> testInvalidWeekday("2021-13-29")), + + // incorrect format + () -> assertThrows(SemanticCheckException.class, () -> testInvalidWeekday("asdfasdf"))); } private ExprValue eval(Expression expression) { diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/YearweekTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/YearweekTest.java index 7517c5e8bf..4f7208d141 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/YearweekTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/YearweekTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.datetime; import static java.time.temporal.ChronoField.ALIGNED_WEEK_OF_YEAR; @@ -32,20 +31,18 @@ class YearweekTest extends ExpressionTestBase { private void yearweekQuery(String date, int mode, int expectedResult) { - FunctionExpression expression = DSL - .yearweek( - functionProperties, - DSL.literal(new ExprDateValue(date)), DSL.literal(mode)); + FunctionExpression expression = + DSL.yearweek(functionProperties, DSL.literal(new ExprDateValue(date)), DSL.literal(mode)); assertAll( () -> assertEquals(INTEGER, expression.type()), - () -> assertEquals( - String.format("yearweek(DATE '%s', %d)", date, mode), expression.toString()), - () -> assertEquals(integerValue(expectedResult), eval(expression)) - ); + () -> + assertEquals( + String.format("yearweek(DATE '%s', %d)", date, mode), expression.toString()), + () -> assertEquals(integerValue(expectedResult), eval(expression))); } private static Stream getTestDataForYearweek() { - //Test the behavior of different modes passed into the 'yearweek' function + // Test the behavior of different modes passed into the 'yearweek' function return Stream.of( Arguments.of("2019-01-05", 0, 201852), Arguments.of("2019-01-05", 1, 201901), @@ -78,8 +75,7 @@ private static Stream getTestDataForYearweek() { Arguments.of("1999-01-01", 1, 199852), Arguments.of("1999-01-01", 4, 199852), Arguments.of("1999-01-01", 5, 199852), - Arguments.of("1999-01-01", 6, 199852) - ); + Arguments.of("1999-01-01", 6, 199852)); } @ParameterizedTest(name = "{0} | {1}") @@ -90,17 +86,13 @@ public void testYearweak(String date, int mode, int expected) { @Test public void testYearweekWithoutMode() { - LocalDate date = LocalDate.of(2019,1,05); + LocalDate date = LocalDate.of(2019, 1, 05); - FunctionExpression expression = DSL - .yearweek( - functionProperties, - DSL.literal(new ExprDateValue(date)), DSL.literal(0)); + FunctionExpression expression = + DSL.yearweek(functionProperties, DSL.literal(new ExprDateValue(date)), DSL.literal(0)); - FunctionExpression expressionWithoutMode = DSL - .yearweek( - functionProperties, - DSL.literal(new ExprDateValue(date))); + FunctionExpression expressionWithoutMode = + DSL.yearweek(functionProperties, DSL.literal(new ExprDateValue(date))); assertEquals(eval(expression), eval(expressionWithoutMode)); } @@ -111,58 +103,53 @@ public void testYearweekWithTimeType() { int year = LocalDate.now(functionProperties.getQueryStartClock()).getYear(); int expected = Integer.parseInt(String.format("%d%02d", year, week)); - FunctionExpression expression = DSL - .yearweek( - functionProperties, - DSL.literal(new ExprTimeValue("10:11:12")), DSL.literal(0)); + FunctionExpression expression = + DSL.yearweek( + functionProperties, DSL.literal(new ExprTimeValue("10:11:12")), DSL.literal(0)); - FunctionExpression expressionWithoutMode = DSL - .yearweek( - functionProperties, - DSL.literal(new ExprTimeValue("10:11:12"))); + FunctionExpression expressionWithoutMode = + DSL.yearweek(functionProperties, DSL.literal(new ExprTimeValue("10:11:12"))); assertAll( () -> assertEquals(expected, eval(expression).integerValue()), - () -> assertEquals(expected, eval(expressionWithoutMode).integerValue()) - ); + () -> assertEquals(expected, eval(expressionWithoutMode).integerValue())); } @Test public void testInvalidYearWeek() { assertAll( - //test invalid month - () -> assertThrows( - SemanticCheckException.class, - () -> yearweekQuery("2019-13-05 01:02:03", 0, 0)), - //test invalid day - () -> assertThrows( - SemanticCheckException.class, - () -> yearweekQuery("2019-01-50 01:02:03", 0, 0)), - //test invalid leap year - () -> assertThrows( - SemanticCheckException.class, - () -> yearweekQuery("2019-02-29 01:02:03", 0, 0)) - ); + // test invalid month + () -> + assertThrows( + SemanticCheckException.class, () -> yearweekQuery("2019-13-05 01:02:03", 0, 0)), + // test invalid day + () -> + assertThrows( + SemanticCheckException.class, () -> yearweekQuery("2019-01-50 01:02:03", 0, 0)), + // test invalid leap year + () -> + assertThrows( + SemanticCheckException.class, () -> yearweekQuery("2019-02-29 01:02:03", 0, 0))); } @Test public void yearweekModeInUnsupportedFormat() { - FunctionExpression expression1 = DSL - .yearweek( + FunctionExpression expression1 = + DSL.yearweek( functionProperties, - DSL.literal(new ExprDatetimeValue("2019-01-05 10:11:12")), DSL.literal(8)); + DSL.literal(new ExprDatetimeValue("2019-01-05 10:11:12")), + DSL.literal(8)); SemanticCheckException exception = assertThrows(SemanticCheckException.class, () -> eval(expression1)); - assertEquals("mode:8 is invalid, please use mode value between 0-7", - exception.getMessage()); + assertEquals("mode:8 is invalid, please use mode value between 0-7", exception.getMessage()); - FunctionExpression expression2 = DSL - .yearweek( + FunctionExpression expression2 = + DSL.yearweek( functionProperties, - DSL.literal(new ExprDatetimeValue("2019-01-05 10:11:12")), DSL.literal(-1)); + DSL.literal(new ExprDatetimeValue("2019-01-05 10:11:12")), + DSL.literal(-1)); exception = assertThrows(SemanticCheckException.class, () -> eval(expression2)); - assertEquals("mode:-1 is invalid, please use mode value between 0-7", - exception.getMessage()); + assertEquals("mode:-1 is invalid, please use mode value between 0-7", exception.getMessage()); } private ExprValue eval(Expression expression) { diff --git a/core/src/test/java/org/opensearch/sql/expression/function/BuiltinFunctionNameTest.java b/core/src/test/java/org/opensearch/sql/expression/function/BuiltinFunctionNameTest.java index 075d809544..774c471b7d 100644 --- a/core/src/test/java/org/opensearch/sql/expression/function/BuiltinFunctionNameTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/function/BuiltinFunctionNameTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.function; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -20,8 +19,7 @@ class BuiltinFunctionNameTest { private static Stream ofArguments() { Stream.Builder builder = Stream.builder(); - return Arrays.asList(BuiltinFunctionName.values()) - .stream() + return Arrays.asList(BuiltinFunctionName.values()).stream() .map(functionName -> Arguments.of(functionName.getName().getFunctionName(), functionName)); } diff --git a/core/src/test/java/org/opensearch/sql/expression/function/BuiltinFunctionRepositoryTest.java b/core/src/test/java/org/opensearch/sql/expression/function/BuiltinFunctionRepositoryTest.java index 98a4c32e2e..3ee12f59d4 100644 --- a/core/src/test/java/org/opensearch/sql/expression/function/BuiltinFunctionRepositoryTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/function/BuiltinFunctionRepositoryTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.function; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -47,20 +46,13 @@ @ExtendWith(MockitoExtension.class) class BuiltinFunctionRepositoryTest { - @Mock - private DefaultFunctionResolver mockfunctionResolver; - @Mock - private Map mockMap; - @Mock - FunctionProperties functionProperties; - @Mock - private FunctionName mockFunctionName; - @Mock - private FunctionBuilder functionExpressionBuilder; - @Mock - private FunctionSignature functionSignature; - @Mock - private Expression mockExpression; + @Mock private DefaultFunctionResolver mockfunctionResolver; + @Mock private Map mockMap; + @Mock FunctionProperties functionProperties; + @Mock private FunctionName mockFunctionName; + @Mock private FunctionBuilder functionExpressionBuilder; + @Mock private FunctionSignature functionSignature; + @Mock private Expression mockExpression; private BuiltinFunctionRepository repo; @@ -83,16 +75,15 @@ void compile() { when(mockExpression.type()).thenReturn(UNDEFINED); when(functionSignature.getParamTypeList()).thenReturn(Arrays.asList(UNDEFINED)); when(mockfunctionResolver.getFunctionName()).thenReturn(mockFunctionName); - when(mockfunctionResolver.resolve(any())).thenReturn( - Pair.of(functionSignature, functionExpressionBuilder)); + when(mockfunctionResolver.resolve(any())) + .thenReturn(Pair.of(functionSignature, functionExpressionBuilder)); when(mockMap.containsKey(mockFunctionName)).thenReturn(true); when(mockMap.get(mockFunctionName)).thenReturn(mockfunctionResolver); BuiltinFunctionRepository repo = new BuiltinFunctionRepository(mockMap); repo.register(mockfunctionResolver); repo.compile(functionProperties, mockFunctionName, Arrays.asList(mockExpression)); - verify(functionExpressionBuilder, times(1)) - .apply(eq(functionProperties), any()); + verify(functionExpressionBuilder, times(1)).apply(eq(functionProperties), any()); } @Test @@ -101,8 +92,8 @@ void compile_datasource_defined_function() { when(mockExpression.type()).thenReturn(UNDEFINED); when(functionSignature.getParamTypeList()).thenReturn(Arrays.asList(UNDEFINED)); when(dataSourceFunctionResolver.getFunctionName()).thenReturn(mockFunctionName); - when(dataSourceFunctionResolver.resolve(any())).thenReturn( - Pair.of(functionSignature, functionExpressionBuilder)); + when(dataSourceFunctionResolver.resolve(any())) + .thenReturn(Pair.of(functionSignature, functionExpressionBuilder)); BuiltinFunctionRepository repo = new BuiltinFunctionRepository(Map.of()); repo.compile( @@ -110,8 +101,7 @@ void compile_datasource_defined_function() { Collections.singletonList(dataSourceFunctionResolver), mockFunctionName, Arrays.asList(mockExpression)); - verify(functionExpressionBuilder, times(1)) - .apply(eq(functionProperties), any()); + verify(functionExpressionBuilder, times(1)).apply(eq(functionProperties), any()); } @Test @@ -119,22 +109,23 @@ void compile_datasource_defined_function() { void resolve() { when(functionSignature.getFunctionName()).thenReturn(mockFunctionName); when(mockfunctionResolver.getFunctionName()).thenReturn(mockFunctionName); - when(mockfunctionResolver.resolve(functionSignature)).thenReturn( - Pair.of(functionSignature, functionExpressionBuilder)); + when(mockfunctionResolver.resolve(functionSignature)) + .thenReturn(Pair.of(functionSignature, functionExpressionBuilder)); when(mockMap.containsKey(mockFunctionName)).thenReturn(true); when(mockMap.get(mockFunctionName)).thenReturn(mockfunctionResolver); BuiltinFunctionRepository repo = new BuiltinFunctionRepository(mockMap); repo.register(mockfunctionResolver); - assertEquals(functionExpressionBuilder, repo.resolve( - Collections.emptyList(), functionSignature)); + assertEquals( + functionExpressionBuilder, repo.resolve(Collections.emptyList(), functionSignature)); } @Test void resolve_should_not_cast_arguments_in_cast_function() { when(mockExpression.toString()).thenReturn("string"); FunctionImplementation function = - repo.resolve(Collections.emptyList(), + repo.resolve( + Collections.emptyList(), registerFunctionResolver(CAST_TO_BOOLEAN.getName(), DATETIME, BOOLEAN)) .apply(functionProperties, ImmutableList.of(mockExpression)); assertEquals("cast_to_boolean(string)", function.toString()); @@ -145,8 +136,8 @@ void resolve_should_not_cast_arguments_if_same_type() { when(mockFunctionName.getFunctionName()).thenReturn("mock"); when(mockExpression.toString()).thenReturn("string"); FunctionImplementation function = - repo.resolve(Collections.emptyList(), - registerFunctionResolver(mockFunctionName, STRING, STRING)) + repo.resolve( + Collections.emptyList(), registerFunctionResolver(mockFunctionName, STRING, STRING)) .apply(functionProperties, ImmutableList.of(mockExpression)); assertEquals("mock(string)", function.toString()); } @@ -156,8 +147,8 @@ void resolve_should_not_cast_arguments_if_both_numbers() { when(mockFunctionName.getFunctionName()).thenReturn("mock"); when(mockExpression.toString()).thenReturn("byte"); FunctionImplementation function = - repo.resolve(Collections.emptyList(), - registerFunctionResolver(mockFunctionName, BYTE, INTEGER)) + repo.resolve( + Collections.emptyList(), registerFunctionResolver(mockFunctionName, BYTE, INTEGER)) .apply(functionProperties, ImmutableList.of(mockExpression)); assertEquals("mock(byte)", function.toString()); } @@ -168,8 +159,7 @@ void resolve_should_cast_arguments() { when(mockExpression.toString()).thenReturn("string"); when(mockExpression.type()).thenReturn(STRING); - FunctionSignature signature = - registerFunctionResolver(mockFunctionName, STRING, BOOLEAN); + FunctionSignature signature = registerFunctionResolver(mockFunctionName, STRING, BOOLEAN); registerFunctionResolver(CAST_TO_BOOLEAN.getName(), STRING, STRING); FunctionImplementation function = @@ -181,10 +171,13 @@ void resolve_should_cast_arguments() { @Test void resolve_should_throw_exception_for_unsupported_conversion() { ExpressionEvaluationException error = - assertThrows(ExpressionEvaluationException.class, () -> - repo.resolve(Collections.emptyList(), - registerFunctionResolver(mockFunctionName, BYTE, STRUCT)) - .apply(functionProperties, ImmutableList.of(mockExpression))); + assertThrows( + ExpressionEvaluationException.class, + () -> + repo.resolve( + Collections.emptyList(), + registerFunctionResolver(mockFunctionName, BYTE, STRUCT)) + .apply(functionProperties, ImmutableList.of(mockExpression))); assertEquals(error.getMessage(), "Type conversion to type STRUCT is not supported"); } @@ -195,33 +188,37 @@ void resolve_unregistered() { BuiltinFunctionRepository repo = new BuiltinFunctionRepository(mockMap); repo.register(mockfunctionResolver); - ExpressionEvaluationException exception = assertThrows(ExpressionEvaluationException.class, - () -> repo.resolve(Collections.emptyList(), - new FunctionSignature(FunctionName.of("unknown"), List.of()))); + ExpressionEvaluationException exception = + assertThrows( + ExpressionEvaluationException.class, + () -> + repo.resolve( + Collections.emptyList(), + new FunctionSignature(FunctionName.of("unknown"), List.of()))); assertEquals("unsupported function name: unknown", exception.getMessage()); } - private FunctionSignature registerFunctionResolver(FunctionName funcName, - ExprType sourceType, - ExprType targetType) { - FunctionSignature unresolvedSignature = new FunctionSignature( - funcName, ImmutableList.of(sourceType)); - FunctionSignature resolvedSignature = new FunctionSignature( - funcName, ImmutableList.of(targetType)); + private FunctionSignature registerFunctionResolver( + FunctionName funcName, ExprType sourceType, ExprType targetType) { + FunctionSignature unresolvedSignature = + new FunctionSignature(funcName, ImmutableList.of(sourceType)); + FunctionSignature resolvedSignature = + new FunctionSignature(funcName, ImmutableList.of(targetType)); DefaultFunctionResolver funcResolver = mock(DefaultFunctionResolver.class); FunctionBuilder funcBuilder = mock(FunctionBuilder.class); when(mockMap.containsKey(eq(funcName))).thenReturn(true); when(mockMap.get(eq(funcName))).thenReturn(funcResolver); - when(funcResolver.resolve(eq(unresolvedSignature))).thenReturn( - Pair.of(resolvedSignature, funcBuilder)); + when(funcResolver.resolve(eq(unresolvedSignature))) + .thenReturn(Pair.of(resolvedSignature, funcBuilder)); repo.register(funcResolver); // Relax unnecessary stubbing check because error case test doesn't call this - lenient().doAnswer(invocation -> - new FakeFunctionExpression(funcName, invocation.getArgument(1)) - ).when(funcBuilder).apply(eq(functionProperties), any()); + lenient() + .doAnswer(invocation -> new FakeFunctionExpression(funcName, invocation.getArgument(1))) + .when(funcBuilder) + .apply(eq(functionProperties), any()); return unresolvedSignature; } @@ -244,8 +241,9 @@ public ExprType type() { @Override public String toString() { return getFunctionName().getFunctionName() - + "(" + StringUtils.join(getArguments(), ", ") + ")"; + + "(" + + StringUtils.join(getArguments(), ", ") + + ")"; } } - } diff --git a/core/src/test/java/org/opensearch/sql/expression/function/DefaultFunctionResolverTest.java b/core/src/test/java/org/opensearch/sql/expression/function/DefaultFunctionResolverTest.java index 202c1bd0aa..ad9e8a6661 100644 --- a/core/src/test/java/org/opensearch/sql/expression/function/DefaultFunctionResolverTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/function/DefaultFunctionResolverTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.function; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -27,32 +26,24 @@ @DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) @ExtendWith(MockitoExtension.class) class DefaultFunctionResolverTest { - @Mock - private FunctionSignature exactlyMatchFS; - @Mock - private FunctionSignature bestMatchFS; - @Mock - private FunctionSignature leastMatchFS; - @Mock - private FunctionSignature notMatchFS; - @Mock - private FunctionSignature functionSignature; - @Mock - private FunctionBuilder exactlyMatchBuilder; - @Mock - private FunctionBuilder bestMatchBuilder; - @Mock - private FunctionBuilder leastMatchBuilder; - @Mock - private FunctionBuilder notMatchBuilder; + @Mock private FunctionSignature exactlyMatchFS; + @Mock private FunctionSignature bestMatchFS; + @Mock private FunctionSignature leastMatchFS; + @Mock private FunctionSignature notMatchFS; + @Mock private FunctionSignature functionSignature; + @Mock private FunctionBuilder exactlyMatchBuilder; + @Mock private FunctionBuilder bestMatchBuilder; + @Mock private FunctionBuilder leastMatchBuilder; + @Mock private FunctionBuilder notMatchBuilder; private FunctionName functionName = FunctionName.of("add"); @Test void resolve_function_signature_exactly_match() { when(functionSignature.match(exactlyMatchFS)).thenReturn(WideningTypeRule.TYPE_EQUAL); - DefaultFunctionResolver resolver = new DefaultFunctionResolver(functionName, - ImmutableMap.of(exactlyMatchFS, exactlyMatchBuilder)); + DefaultFunctionResolver resolver = + new DefaultFunctionResolver( + functionName, ImmutableMap.of(exactlyMatchFS, exactlyMatchBuilder)); assertEquals(exactlyMatchBuilder, resolver.resolve(functionSignature).getValue()); } @@ -61,8 +52,10 @@ void resolve_function_signature_exactly_match() { void resolve_function_signature_best_match() { when(functionSignature.match(bestMatchFS)).thenReturn(1); when(functionSignature.match(leastMatchFS)).thenReturn(2); - DefaultFunctionResolver resolver = new DefaultFunctionResolver(functionName, - ImmutableMap.of(bestMatchFS, bestMatchBuilder, leastMatchFS, leastMatchBuilder)); + DefaultFunctionResolver resolver = + new DefaultFunctionResolver( + functionName, + ImmutableMap.of(bestMatchFS, bestMatchBuilder, leastMatchFS, leastMatchBuilder)); assertEquals(bestMatchBuilder, resolver.resolve(functionSignature).getValue()); } @@ -72,12 +65,14 @@ void resolve_function_not_match() { when(functionSignature.match(notMatchFS)).thenReturn(WideningTypeRule.IMPOSSIBLE_WIDENING); when(notMatchFS.formatTypes()).thenReturn("[INTEGER,INTEGER]"); when(functionSignature.formatTypes()).thenReturn("[BOOLEAN,BOOLEAN]"); - DefaultFunctionResolver resolver = new DefaultFunctionResolver(functionName, - ImmutableMap.of(notMatchFS, notMatchBuilder)); - - ExpressionEvaluationException exception = assertThrows(ExpressionEvaluationException.class, - () -> resolver.resolve(functionSignature)); - assertEquals("add function expected {[INTEGER,INTEGER]}, but get [BOOLEAN,BOOLEAN]", + DefaultFunctionResolver resolver = + new DefaultFunctionResolver(functionName, ImmutableMap.of(notMatchFS, notMatchBuilder)); + + ExpressionEvaluationException exception = + assertThrows( + ExpressionEvaluationException.class, () -> resolver.resolve(functionSignature)); + assertEquals( + "add function expected {[INTEGER,INTEGER]}, but get [BOOLEAN,BOOLEAN]", exception.getMessage()); } @@ -88,8 +83,8 @@ void resolve_varargs_function_signature_match() { when(functionSignature.getParamTypeList()).thenReturn(ImmutableList.of(STRING)); when(bestMatchFS.getParamTypeList()).thenReturn(ImmutableList.of(ARRAY)); - DefaultFunctionResolver resolver = new DefaultFunctionResolver(functionName, - ImmutableMap.of(bestMatchFS, bestMatchBuilder)); + DefaultFunctionResolver resolver = + new DefaultFunctionResolver(functionName, ImmutableMap.of(bestMatchFS, bestMatchBuilder)); assertEquals(bestMatchBuilder, resolver.resolve(functionSignature).getValue()); } @@ -102,13 +97,13 @@ void resolve_varargs_no_args_function_signature_not_match() { // Concat function with no arguments when(functionSignature.getParamTypeList()).thenReturn(Collections.emptyList()); - DefaultFunctionResolver resolver = new DefaultFunctionResolver(functionName, - ImmutableMap.of(bestMatchFS, bestMatchBuilder)); + DefaultFunctionResolver resolver = + new DefaultFunctionResolver(functionName, ImmutableMap.of(bestMatchFS, bestMatchBuilder)); - ExpressionEvaluationException exception = assertThrows(ExpressionEvaluationException.class, - () -> resolver.resolve(functionSignature)); - assertEquals("concat function expected 1-9 arguments, but got 0", - exception.getMessage()); + ExpressionEvaluationException exception = + assertThrows( + ExpressionEvaluationException.class, () -> resolver.resolve(functionSignature)); + assertEquals("concat function expected 1-9 arguments, but got 0", exception.getMessage()); } @Test @@ -117,16 +112,17 @@ void resolve_varargs_too_many_args_function_signature_not_match() { when(functionSignature.match(bestMatchFS)).thenReturn(WideningTypeRule.TYPE_EQUAL); when(bestMatchFS.getParamTypeList()).thenReturn(ImmutableList.of(ARRAY)); // Concat function with more than 9 arguments - when(functionSignature.getParamTypeList()).thenReturn(ImmutableList - .of(STRING, STRING, STRING, STRING, STRING, - STRING, STRING, STRING, STRING, STRING)); - - DefaultFunctionResolver resolver = new DefaultFunctionResolver(functionName, - ImmutableMap.of(bestMatchFS, bestMatchBuilder)); - - ExpressionEvaluationException exception = assertThrows(ExpressionEvaluationException.class, - () -> resolver.resolve(functionSignature)); - assertEquals("concat function expected 1-9 arguments, but got 10", - exception.getMessage()); + when(functionSignature.getParamTypeList()) + .thenReturn( + ImmutableList.of( + STRING, STRING, STRING, STRING, STRING, STRING, STRING, STRING, STRING, STRING)); + + DefaultFunctionResolver resolver = + new DefaultFunctionResolver(functionName, ImmutableMap.of(bestMatchFS, bestMatchBuilder)); + + ExpressionEvaluationException exception = + assertThrows( + ExpressionEvaluationException.class, () -> resolver.resolve(functionSignature)); + assertEquals("concat function expected 1-9 arguments, but got 10", exception.getMessage()); } } diff --git a/core/src/test/java/org/opensearch/sql/expression/function/FunctionDSLDefineTest.java b/core/src/test/java/org/opensearch/sql/expression/function/FunctionDSLDefineTest.java index 8bf4d7ba24..670605c5a0 100644 --- a/core/src/test/java/org/opensearch/sql/expression/function/FunctionDSLDefineTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/function/FunctionDSLDefineTest.java @@ -52,16 +52,15 @@ void define_name_test() { static class SampleFunctionBuilder implements FunctionBuilder { @Override - public FunctionImplementation apply(FunctionProperties functionProperties, - List arguments) { + public FunctionImplementation apply( + FunctionProperties functionProperties, List arguments) { return new SampleFunctionImplementation(arguments); } } @RequiredArgsConstructor static class SampleFunctionImplementation implements FunctionImplementation { - @Getter - private final List arguments; + @Getter private final List arguments; @Override public FunctionName getFunctionName() { diff --git a/core/src/test/java/org/opensearch/sql/expression/function/FunctionDSLTestBase.java b/core/src/test/java/org/opensearch/sql/expression/function/FunctionDSLTestBase.java index 93d0c32f57..a8b9e8da96 100644 --- a/core/src/test/java/org/opensearch/sql/expression/function/FunctionDSLTestBase.java +++ b/core/src/test/java/org/opensearch/sql/expression/function/FunctionDSLTestBase.java @@ -17,33 +17,33 @@ @ExtendWith(MockitoExtension.class) public class FunctionDSLTestBase { - @Mock - FunctionProperties functionProperties; + @Mock FunctionProperties functionProperties; public static final ExprNullValue NULL = ExprNullValue.of(); public static final ExprMissingValue MISSING = ExprMissingValue.of(); protected static final ExprType ANY_TYPE = () -> "ANY"; - protected static final ExprValue ANY = new ExprValue() { - @Override - public Object value() { - throw new RuntimeException(); - } + protected static final ExprValue ANY = + new ExprValue() { + @Override + public Object value() { + throw new RuntimeException(); + } - @Override - public ExprType type() { - return ANY_TYPE; - } + @Override + public ExprType type() { + return ANY_TYPE; + } - @Override - public String toString() { - return "ANY"; - } + @Override + public String toString() { + return "ANY"; + } - @Override - public int compareTo(ExprValue o) { - throw new RuntimeException(); - } - }; + @Override + public int compareTo(ExprValue o) { + throw new RuntimeException(); + } + }; static final FunctionName SAMPLE_NAME = FunctionName.of("sample"); static final FunctionSignature SAMPLE_SIGNATURE_A = new FunctionSignature(SAMPLE_NAME, List.of(ExprCoreType.UNDEFINED)); @@ -54,18 +54,16 @@ public int compareTo(ExprValue o) { static final SerializableTriFunction twoArgWithProperties = (functionProperties, v1, v2) -> ANY; - static final SerializableQuadFunction - + static final SerializableQuadFunction< + FunctionProperties, ExprValue, ExprValue, ExprValue, ExprValue> threeArgsWithProperties = (functionProperties, v1, v2, v3) -> ANY; - static final SerializableBiFunction - twoArgs = (v1, v2) -> ANY; - static final SerializableTriFunction - threeArgs = (v1, v2, v3) -> ANY; + static final SerializableBiFunction twoArgs = (v1, v2) -> ANY; + static final SerializableTriFunction threeArgs = + (v1, v2, v3) -> ANY; static final SerializableQuadFunction fourArgs = (v1, v2, v3, v4) -> ANY; - @Mock - FunctionProperties mockProperties; + @Mock FunctionProperties mockProperties; } diff --git a/core/src/test/java/org/opensearch/sql/expression/function/FunctionDSLimplTestBase.java b/core/src/test/java/org/opensearch/sql/expression/function/FunctionDSLimplTestBase.java index 8f494c01c3..a76e738e2d 100644 --- a/core/src/test/java/org/opensearch/sql/expression/function/FunctionDSLimplTestBase.java +++ b/core/src/test/java/org/opensearch/sql/expression/function/FunctionDSLimplTestBase.java @@ -42,16 +42,16 @@ void implementation_valid_functionBuilder() { @Test void implementation_functionBuilder_return_functionExpression() { - FunctionImplementation executable = getImplementation().getValue() - .apply(functionProperties, getSampleArguments()); + FunctionImplementation executable = + getImplementation().getValue().apply(functionProperties, getSampleArguments()); assertTrue(executable instanceof FunctionExpression); } @Test void implementation_functionExpression_valueOf() { FunctionExpression executable = - (FunctionExpression) getImplementation().getValue() - .apply(functionProperties, getSampleArguments()); + (FunctionExpression) + getImplementation().getValue().apply(functionProperties, getSampleArguments()); assertEquals(ANY, executable.valueOf(null)); } @@ -59,23 +59,20 @@ void implementation_functionExpression_valueOf() { @Test void implementation_functionExpression_type() { FunctionExpression executable = - (FunctionExpression) getImplementation().getValue() - .apply(functionProperties, getSampleArguments()); + (FunctionExpression) + getImplementation().getValue().apply(functionProperties, getSampleArguments()); assertEquals(ANY_TYPE, executable.type()); } @Test void implementation_functionExpression_toString() { FunctionExpression executable = - (FunctionExpression) getImplementation().getValue() - .apply(functionProperties, getSampleArguments()); + (FunctionExpression) + getImplementation().getValue().apply(functionProperties, getSampleArguments()); assertEquals(getExpected_toString(), executable.toString()); } - /** - * A lambda that takes a function name and returns an implementation - * of the function. - */ + /** A lambda that takes a function name and returns an implementation of the function. */ abstract SerializableFunction> getImplementationGenerator(); diff --git a/core/src/test/java/org/opensearch/sql/expression/function/FunctionDSLimplTwoArgTest.java b/core/src/test/java/org/opensearch/sql/expression/function/FunctionDSLimplTwoArgTest.java index 87d097c9eb..4e2626b9c3 100644 --- a/core/src/test/java/org/opensearch/sql/expression/function/FunctionDSLimplTwoArgTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/function/FunctionDSLimplTwoArgTest.java @@ -19,7 +19,6 @@ class FunctionDSLimplTwoArgTest extends FunctionDSLimplTestBase { return impl(twoArgs, ANY_TYPE, ANY_TYPE, ANY_TYPE); } - @Override List getSampleArguments() { return List.of(DSL.literal(ANY), DSL.literal(ANY)); diff --git a/core/src/test/java/org/opensearch/sql/expression/function/FunctionDSLimplWithPropertiesNoArgsTest.java b/core/src/test/java/org/opensearch/sql/expression/function/FunctionDSLimplWithPropertiesNoArgsTest.java index c3c41b6c0c..f4b8d0090e 100644 --- a/core/src/test/java/org/opensearch/sql/expression/function/FunctionDSLimplWithPropertiesNoArgsTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/function/FunctionDSLimplWithPropertiesNoArgsTest.java @@ -9,7 +9,7 @@ import org.apache.commons.lang3.tuple.Pair; import org.opensearch.sql.expression.Expression; -class FunctionDSLimplWithPropertiesNoArgsTest extends FunctionDSLimplTestBase { +class FunctionDSLimplWithPropertiesNoArgsTest extends FunctionDSLimplTestBase { @Override SerializableFunction> diff --git a/core/src/test/java/org/opensearch/sql/expression/function/FunctionDSLimplWithPropertiesOneArgTest.java b/core/src/test/java/org/opensearch/sql/expression/function/FunctionDSLimplWithPropertiesOneArgTest.java index 4a05326c0a..57960b5c1c 100644 --- a/core/src/test/java/org/opensearch/sql/expression/function/FunctionDSLimplWithPropertiesOneArgTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/function/FunctionDSLimplWithPropertiesOneArgTest.java @@ -5,7 +5,6 @@ package org.opensearch.sql.expression.function; - import java.util.List; import org.apache.commons.lang3.tuple.Pair; import org.opensearch.sql.data.model.ExprValue; @@ -17,8 +16,8 @@ class FunctionDSLimplWithPropertiesOneArgTest extends FunctionDSLimplTestBase { @Override SerializableFunction> getImplementationGenerator() { - SerializableBiFunction functionBody - = (fp, arg) -> ANY; + SerializableBiFunction functionBody = + (fp, arg) -> ANY; return FunctionDSL.implWithProperties(functionBody, ANY_TYPE, ANY_TYPE); } diff --git a/core/src/test/java/org/opensearch/sql/expression/function/FunctionDSLimplWithPropertiesTwoArgTest.java b/core/src/test/java/org/opensearch/sql/expression/function/FunctionDSLimplWithPropertiesTwoArgTest.java index 18444a476e..3c42b412c8 100644 --- a/core/src/test/java/org/opensearch/sql/expression/function/FunctionDSLimplWithPropertiesTwoArgTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/function/FunctionDSLimplWithPropertiesTwoArgTest.java @@ -16,8 +16,8 @@ class FunctionDSLimplWithPropertiesTwoArgTest extends FunctionDSLimplTestBase { @Override SerializableFunction> getImplementationGenerator() { - SerializableTriFunction functionBody - = (fp, arg1, arg2) -> ANY; + SerializableTriFunction functionBody = + (fp, arg1, arg2) -> ANY; return FunctionDSL.implWithProperties(functionBody, ANY_TYPE, ANY_TYPE, ANY_TYPE); } diff --git a/core/src/test/java/org/opensearch/sql/expression/function/FunctionDSLimplWithPropertiesTwoArgsTest.java b/core/src/test/java/org/opensearch/sql/expression/function/FunctionDSLimplWithPropertiesTwoArgsTest.java index f690485801..748d734373 100644 --- a/core/src/test/java/org/opensearch/sql/expression/function/FunctionDSLimplWithPropertiesTwoArgsTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/function/FunctionDSLimplWithPropertiesTwoArgsTest.java @@ -5,7 +5,6 @@ package org.opensearch.sql.expression.function; - import java.util.List; import org.apache.commons.lang3.tuple.Pair; import org.opensearch.sql.data.model.ExprValue; @@ -17,8 +16,8 @@ class FunctionDSLimplWithPropertiesTwoArgsTest extends FunctionDSLimplTestBase { @Override SerializableFunction> getImplementationGenerator() { - SerializableTriFunction functionBody - = (fp, arg1, arg2) -> ANY; + SerializableTriFunction functionBody = + (fp, arg1, arg2) -> ANY; return FunctionDSL.implWithProperties(functionBody, ANY_TYPE, ANY_TYPE, ANY_TYPE); } diff --git a/core/src/test/java/org/opensearch/sql/expression/function/FunctionDSLnullMissingHandlingTest.java b/core/src/test/java/org/opensearch/sql/expression/function/FunctionDSLnullMissingHandlingTest.java index 0cea222843..a1cc305003 100644 --- a/core/src/test/java/org/opensearch/sql/expression/function/FunctionDSLnullMissingHandlingTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/function/FunctionDSLnullMissingHandlingTest.java @@ -28,106 +28,119 @@ void nullMissingHandling_oneArg_apply() { assertEquals(ANY, nullMissingHandling(oneArg).apply(ANY)); } - @Test void nullMissingHandling_oneArg_FunctionProperties_nullValue() { - assertEquals(NULL, + assertEquals( + NULL, nullMissingHandlingWithProperties(oneArgWithProperties).apply(functionProperties, NULL)); } @Test void nullMissingHandling_oneArg_FunctionProperties_missingValue() { - assertEquals(MISSING, + assertEquals( + MISSING, nullMissingHandlingWithProperties(oneArgWithProperties).apply(functionProperties, MISSING)); } @Test void nullMissingHandling_oneArg_FunctionProperties_apply() { - assertEquals(ANY, + assertEquals( + ANY, nullMissingHandlingWithProperties(oneArgWithProperties).apply(functionProperties, ANY)); } @Test void nullMissingHandling_twoArgs_FunctionProperties_nullValue_firstArg() { - assertEquals(NULL, + assertEquals( + NULL, nullMissingHandlingWithProperties(twoArgWithProperties) .apply(functionProperties, NULL, ANY)); } @Test void nullMissingHandling_twoArgs_FunctionProperties_nullValue_secondArg() { - assertEquals(NULL, + assertEquals( + NULL, nullMissingHandlingWithProperties(twoArgWithProperties) .apply(functionProperties, ANY, NULL)); } @Test void nullMissingHandling_twoArgs_FunctionProperties_missingValue_firstArg() { - assertEquals(MISSING, + assertEquals( + MISSING, nullMissingHandlingWithProperties(twoArgWithProperties) .apply(functionProperties, MISSING, ANY)); } @Test void nullMissingHandling_twoArgs_FunctionProperties_missingValue_secondArg() { - assertEquals(MISSING, + assertEquals( + MISSING, nullMissingHandlingWithProperties(twoArgWithProperties) .apply(functionProperties, ANY, MISSING)); } @Test void nullMissingHandling_twoArgs_FunctionProperties_apply() { - assertEquals(ANY, + assertEquals( + ANY, nullMissingHandlingWithProperties(twoArgWithProperties) .apply(functionProperties, ANY, ANY)); } @Test void nullMissingHandling_threeArgs_FunctionProperties_nullValue_firstArg() { - assertEquals(NULL, + assertEquals( + NULL, nullMissingHandlingWithProperties(threeArgsWithProperties) .apply(functionProperties, NULL, ANY, ANY)); } @Test void nullMissingHandling_threeArgs_FunctionProperties_nullValue_secondArg() { - assertEquals(NULL, + assertEquals( + NULL, nullMissingHandlingWithProperties(threeArgsWithProperties) .apply(functionProperties, ANY, NULL, ANY)); } @Test void nullMissingHandling_threeArgs_FunctionProperties_nullValue_thirdArg() { - assertEquals(NULL, + assertEquals( + NULL, nullMissingHandlingWithProperties(threeArgsWithProperties) .apply(functionProperties, ANY, ANY, NULL)); } - @Test void nullMissingHandling_threeArgs_FunctionProperties_missingValue_firstArg() { - assertEquals(MISSING, + assertEquals( + MISSING, nullMissingHandlingWithProperties(threeArgsWithProperties) .apply(functionProperties, MISSING, ANY, ANY)); } @Test void nullMissingHandling_threeArgs_FunctionProperties_missingValue_secondArg() { - assertEquals(MISSING, + assertEquals( + MISSING, nullMissingHandlingWithProperties(threeArgsWithProperties) .apply(functionProperties, ANY, MISSING, ANY)); } @Test void nullMissingHandling_threeArgs_FunctionProperties_missingValue_thirdArg() { - assertEquals(MISSING, + assertEquals( + MISSING, nullMissingHandlingWithProperties(threeArgsWithProperties) .apply(functionProperties, ANY, ANY, MISSING)); } @Test void nullMissingHandling_threeArgs_FunctionProperties_apply() { - assertEquals(ANY, + assertEquals( + ANY, nullMissingHandlingWithProperties(threeArgsWithProperties) .apply(functionProperties, ANY, ANY, ANY)); } @@ -142,7 +155,6 @@ void nullMissingHandling_twoArgs_secondArg_nullValue() { assertEquals(NULL, nullMissingHandling(twoArgs).apply(ANY, NULL)); } - @Test void nullMissingHandling_twoArgs_firstArg_missingValue() { assertEquals(MISSING, nullMissingHandling(twoArgs).apply(MISSING, ANY)); @@ -158,7 +170,6 @@ void nullMissingHandling_twoArgs_apply() { assertEquals(ANY, nullMissingHandling(twoArgs).apply(ANY, ANY)); } - @Test void nullMissingHandling_threeArgs_firstArg_nullValue() { assertEquals(NULL, nullMissingHandling(threeArgs).apply(NULL, ANY, ANY)); @@ -174,7 +185,6 @@ void nullMissingHandling_threeArgs_thirdArg_nullValue() { assertEquals(NULL, nullMissingHandling(threeArgs).apply(ANY, ANY, NULL)); } - @Test void nullMissingHandling_threeArgs_firstArg_missingValue() { assertEquals(MISSING, nullMissingHandling(threeArgs).apply(MISSING, ANY, ANY)); diff --git a/core/src/test/java/org/opensearch/sql/expression/function/FunctionPropertiesTest.java b/core/src/test/java/org/opensearch/sql/expression/function/FunctionPropertiesTest.java index ed4a8a514d..b531d049fd 100644 --- a/core/src/test/java/org/opensearch/sql/expression/function/FunctionPropertiesTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/function/FunctionPropertiesTest.java @@ -75,16 +75,17 @@ void functionProperties_can_be_deserialized() throws IOException, ClassNotFoundE @TestFactory Stream functionProperties_none_throws_on_access() { - Consumer tb = tc -> { - RuntimeException e = assertThrows(FunctionProperties.UnexpectedCallException.class, tc); - assertEquals("FunctionProperties.None is a null object and not meant to be accessed.", - e.getMessage()); - }; + Consumer tb = + tc -> { + RuntimeException e = assertThrows(FunctionProperties.UnexpectedCallException.class, tc); + assertEquals( + "FunctionProperties.None is a null object and not meant to be accessed.", + e.getMessage()); + }; return Stream.of( - DynamicTest.dynamicTest("getQueryStartClock", - () -> tb.accept(FunctionProperties.None::getQueryStartClock)), - DynamicTest.dynamicTest("getSystemClock", - () -> tb.accept(FunctionProperties.None::getSystemClock)) - ); + DynamicTest.dynamicTest( + "getQueryStartClock", () -> tb.accept(FunctionProperties.None::getQueryStartClock)), + DynamicTest.dynamicTest( + "getSystemClock", () -> tb.accept(FunctionProperties.None::getSystemClock))); } } diff --git a/core/src/test/java/org/opensearch/sql/expression/function/FunctionSignatureTest.java b/core/src/test/java/org/opensearch/sql/expression/function/FunctionSignatureTest.java index cc658bff98..2fb5dc468e 100644 --- a/core/src/test/java/org/opensearch/sql/expression/function/FunctionSignatureTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/function/FunctionSignatureTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.function; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -25,10 +24,8 @@ @DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) @ExtendWith(MockitoExtension.class) class FunctionSignatureTest { - @Mock - private FunctionSignature funcSignature; - @Mock - private List funcParamTypeList; + @Mock private FunctionSignature funcSignature; + @Mock private List funcParamTypeList; private FunctionName unresolvedFuncName = FunctionName.of("add"); private List unresolvedParamTypeList = diff --git a/core/src/test/java/org/opensearch/sql/expression/function/OpenSearchFunctionsTest.java b/core/src/test/java/org/opensearch/sql/expression/function/OpenSearchFunctionsTest.java index d90d8295c4..168b73acc4 100644 --- a/core/src/test/java/org/opensearch/sql/expression/function/OpenSearchFunctionsTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/function/OpenSearchFunctionsTest.java @@ -24,44 +24,48 @@ import org.opensearch.sql.expression.NamedArgumentExpression; import org.opensearch.sql.expression.env.Environment; - public class OpenSearchFunctionsTest extends ExpressionTestBase { - private final NamedArgumentExpression field = new NamedArgumentExpression( - "field", DSL.literal("message")); - private final NamedArgumentExpression fields = new NamedArgumentExpression( - "fields", DSL.literal(new ExprTupleValue(new LinkedHashMap<>(Map.of( - "title", ExprValueUtils.floatValue(1.F), - "body", ExprValueUtils.floatValue(.3F)))))); - private final NamedArgumentExpression query = new NamedArgumentExpression( - "query", DSL.literal("search query")); - private final NamedArgumentExpression analyzer = new NamedArgumentExpression( - "analyzer", DSL.literal("keyword")); - private final NamedArgumentExpression autoGenerateSynonymsPhrase = new NamedArgumentExpression( - "auto_generate_synonyms_phrase", DSL.literal("true")); - private final NamedArgumentExpression fuzziness = new NamedArgumentExpression( - "fuzziness", DSL.literal("AUTO")); - private final NamedArgumentExpression maxExpansions = new NamedArgumentExpression( - "max_expansions", DSL.literal("10")); - private final NamedArgumentExpression prefixLength = new NamedArgumentExpression( - "prefix_length", DSL.literal("1")); - private final NamedArgumentExpression fuzzyTranspositions = new NamedArgumentExpression( - "fuzzy_transpositions", DSL.literal("false")); - private final NamedArgumentExpression fuzzyRewrite = new NamedArgumentExpression( - "fuzzy_rewrite", DSL.literal("rewrite method")); - private final NamedArgumentExpression lenient = new NamedArgumentExpression( - "lenient", DSL.literal("true")); - private final NamedArgumentExpression operator = new NamedArgumentExpression( - "operator", DSL.literal("OR")); - private final NamedArgumentExpression minimumShouldMatch = new NamedArgumentExpression( - "minimum_should_match", DSL.literal("1")); - private final NamedArgumentExpression zeroTermsQueryAll = new NamedArgumentExpression( - "zero_terms_query", DSL.literal("ALL")); - private final NamedArgumentExpression zeroTermsQueryNone = new NamedArgumentExpression( - "zero_terms_query", DSL.literal("None")); - private final NamedArgumentExpression boost = new NamedArgumentExpression( - "boost", DSL.literal("2.0")); - private final NamedArgumentExpression slop = new NamedArgumentExpression( - "slop", DSL.literal("3")); + private final NamedArgumentExpression field = + new NamedArgumentExpression("field", DSL.literal("message")); + private final NamedArgumentExpression fields = + new NamedArgumentExpression( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + Map.of( + "title", ExprValueUtils.floatValue(1.F), + "body", ExprValueUtils.floatValue(.3F)))))); + private final NamedArgumentExpression query = + new NamedArgumentExpression("query", DSL.literal("search query")); + private final NamedArgumentExpression analyzer = + new NamedArgumentExpression("analyzer", DSL.literal("keyword")); + private final NamedArgumentExpression autoGenerateSynonymsPhrase = + new NamedArgumentExpression("auto_generate_synonyms_phrase", DSL.literal("true")); + private final NamedArgumentExpression fuzziness = + new NamedArgumentExpression("fuzziness", DSL.literal("AUTO")); + private final NamedArgumentExpression maxExpansions = + new NamedArgumentExpression("max_expansions", DSL.literal("10")); + private final NamedArgumentExpression prefixLength = + new NamedArgumentExpression("prefix_length", DSL.literal("1")); + private final NamedArgumentExpression fuzzyTranspositions = + new NamedArgumentExpression("fuzzy_transpositions", DSL.literal("false")); + private final NamedArgumentExpression fuzzyRewrite = + new NamedArgumentExpression("fuzzy_rewrite", DSL.literal("rewrite method")); + private final NamedArgumentExpression lenient = + new NamedArgumentExpression("lenient", DSL.literal("true")); + private final NamedArgumentExpression operator = + new NamedArgumentExpression("operator", DSL.literal("OR")); + private final NamedArgumentExpression minimumShouldMatch = + new NamedArgumentExpression("minimum_should_match", DSL.literal("1")); + private final NamedArgumentExpression zeroTermsQueryAll = + new NamedArgumentExpression("zero_terms_query", DSL.literal("ALL")); + private final NamedArgumentExpression zeroTermsQueryNone = + new NamedArgumentExpression("zero_terms_query", DSL.literal("None")); + private final NamedArgumentExpression boost = + new NamedArgumentExpression("boost", DSL.literal("2.0")); + private final NamedArgumentExpression slop = + new NamedArgumentExpression("slop", DSL.literal("3")); @Test void match() { @@ -80,50 +84,135 @@ void match() { expr = DSL.match(field, query, analyzer, autoGenerateSynonymsPhrase, fuzziness, maxExpansions); assertEquals(BOOLEAN, expr.type()); - expr = DSL.match( - field, query, analyzer, autoGenerateSynonymsPhrase, fuzziness, maxExpansions, prefixLength); + expr = + DSL.match( + field, + query, + analyzer, + autoGenerateSynonymsPhrase, + fuzziness, + maxExpansions, + prefixLength); assertEquals(BOOLEAN, expr.type()); - expr = DSL.match( - field, query, analyzer, autoGenerateSynonymsPhrase, fuzziness, maxExpansions, prefixLength, - fuzzyTranspositions); + expr = + DSL.match( + field, + query, + analyzer, + autoGenerateSynonymsPhrase, + fuzziness, + maxExpansions, + prefixLength, + fuzzyTranspositions); assertEquals(BOOLEAN, expr.type()); - expr = DSL.match( - field, query, analyzer, autoGenerateSynonymsPhrase, fuzziness, maxExpansions, prefixLength, - fuzzyTranspositions, fuzzyRewrite); + expr = + DSL.match( + field, + query, + analyzer, + autoGenerateSynonymsPhrase, + fuzziness, + maxExpansions, + prefixLength, + fuzzyTranspositions, + fuzzyRewrite); assertEquals(BOOLEAN, expr.type()); - expr = DSL.match( - field, query, analyzer, autoGenerateSynonymsPhrase, fuzziness, maxExpansions, prefixLength, - fuzzyTranspositions, fuzzyRewrite, lenient); + expr = + DSL.match( + field, + query, + analyzer, + autoGenerateSynonymsPhrase, + fuzziness, + maxExpansions, + prefixLength, + fuzzyTranspositions, + fuzzyRewrite, + lenient); assertEquals(BOOLEAN, expr.type()); - expr = DSL.match( - field, query, analyzer, autoGenerateSynonymsPhrase, fuzziness, maxExpansions, prefixLength, - fuzzyTranspositions, fuzzyRewrite, lenient, operator); + expr = + DSL.match( + field, + query, + analyzer, + autoGenerateSynonymsPhrase, + fuzziness, + maxExpansions, + prefixLength, + fuzzyTranspositions, + fuzzyRewrite, + lenient, + operator); assertEquals(BOOLEAN, expr.type()); - expr = DSL.match( - field, query, analyzer, autoGenerateSynonymsPhrase, fuzziness, maxExpansions, prefixLength, - fuzzyTranspositions, fuzzyRewrite, lenient, operator); + expr = + DSL.match( + field, + query, + analyzer, + autoGenerateSynonymsPhrase, + fuzziness, + maxExpansions, + prefixLength, + fuzzyTranspositions, + fuzzyRewrite, + lenient, + operator); assertEquals(BOOLEAN, expr.type()); - expr = DSL.match( - field, query, analyzer, autoGenerateSynonymsPhrase, fuzziness, maxExpansions, prefixLength, - fuzzyTranspositions, fuzzyRewrite, lenient, operator, minimumShouldMatch); + expr = + DSL.match( + field, + query, + analyzer, + autoGenerateSynonymsPhrase, + fuzziness, + maxExpansions, + prefixLength, + fuzzyTranspositions, + fuzzyRewrite, + lenient, + operator, + minimumShouldMatch); assertEquals(BOOLEAN, expr.type()); - expr = DSL.match( - field, query, analyzer, autoGenerateSynonymsPhrase, fuzziness, maxExpansions, prefixLength, - fuzzyTranspositions, fuzzyRewrite, lenient, operator, minimumShouldMatch, - zeroTermsQueryAll); + expr = + DSL.match( + field, + query, + analyzer, + autoGenerateSynonymsPhrase, + fuzziness, + maxExpansions, + prefixLength, + fuzzyTranspositions, + fuzzyRewrite, + lenient, + operator, + minimumShouldMatch, + zeroTermsQueryAll); assertEquals(BOOLEAN, expr.type()); - expr = DSL.match( - field, query, analyzer, autoGenerateSynonymsPhrase, fuzziness, maxExpansions, prefixLength, - fuzzyTranspositions, fuzzyRewrite, lenient, operator, minimumShouldMatch, - zeroTermsQueryNone, boost); + expr = + DSL.match( + field, + query, + analyzer, + autoGenerateSynonymsPhrase, + fuzziness, + maxExpansions, + prefixLength, + fuzzyTranspositions, + fuzzyRewrite, + lenient, + operator, + minimumShouldMatch, + zeroTermsQueryNone, + boost); assertEquals(BOOLEAN, expr.type()); } @@ -134,20 +223,16 @@ void match_phrase() { } } - List match_phrase_dsl_expressions() { return List.of( - DSL.match_phrase(field, query), - DSL.match_phrase(field, query, analyzer), - DSL.match_phrase(field, query, analyzer, zeroTermsQueryAll), - DSL.match_phrase(field, query, analyzer, zeroTermsQueryNone, slop) - ); + DSL.match_phrase(field, query), + DSL.match_phrase(field, query, analyzer), + DSL.match_phrase(field, query, analyzer, zeroTermsQueryAll), + DSL.match_phrase(field, query, analyzer, zeroTermsQueryNone, slop)); } List match_phrase_prefix_dsl_expressions() { - return List.of( - DSL.match_phrase_prefix(field, query) - ); + return List.of(DSL.match_phrase_prefix(field, query)); } @Test @@ -160,7 +245,8 @@ public void match_phrase_prefix() { @Test void match_in_memory() { FunctionExpression expr = DSL.match(field, query); - assertThrows(UnsupportedOperationException.class, + assertThrows( + UnsupportedOperationException.class, () -> expr.valueOf(valueEnv()), "OpenSearch defined function [match] is only supported in WHERE and HAVING clause."); } @@ -174,50 +260,52 @@ void match_to_string() { @Test void multi_match() { FunctionExpression expr = DSL.multi_match(fields, query); - assertEquals(String.format("multi_match(fields=%s, query=%s)", - fields.getValue(), query.getValue()), + assertEquals( + String.format("multi_match(fields=%s, query=%s)", fields.getValue(), query.getValue()), expr.toString()); } @Test void simple_query_string() { FunctionExpression expr = DSL.simple_query_string(fields, query); - assertEquals(String.format("simple_query_string(fields=%s, query=%s)", - fields.getValue(), query.getValue()), + assertEquals( + String.format( + "simple_query_string(fields=%s, query=%s)", fields.getValue(), query.getValue()), expr.toString()); } @Test void query() { FunctionExpression expr = DSL.query(query); - assertEquals(String.format("query(query=%s)", query.getValue()), - expr.toString()); + assertEquals(String.format("query(query=%s)", query.getValue()), expr.toString()); } @Test void query_string() { FunctionExpression expr = DSL.query_string(fields, query); - assertEquals(String.format("query_string(fields=%s, query=%s)", - fields.getValue(), query.getValue()), + assertEquals( + String.format("query_string(fields=%s, query=%s)", fields.getValue(), query.getValue()), expr.toString()); } @Test void wildcard_query() { FunctionExpression expr = DSL.wildcard_query(field, query); - assertEquals(String.format("wildcard_query(field=%s, query=%s)", - field.getValue(), query.getValue()), + assertEquals( + String.format("wildcard_query(field=%s, query=%s)", field.getValue(), query.getValue()), expr.toString()); } @Test void nested_query() { FunctionExpression expr = DSL.nested(DSL.ref("message.info", STRING)); - assertEquals(String.format("FunctionExpression(functionName=%s, arguments=[message.info])", - BuiltinFunctionName.NESTED.getName()), + assertEquals( + String.format( + "FunctionExpression(functionName=%s, arguments=[message.info])", + BuiltinFunctionName.NESTED.getName()), expr.toString()); - Environment nestedTuple = ExprValueUtils.tupleValue( - Map.of("message", Map.of("info", "result"))).bindingTuples(); + Environment nestedTuple = + ExprValueUtils.tupleValue(Map.of("message", Map.of("info", "result"))).bindingTuples(); assertEquals(expr.valueOf(nestedTuple), ExprValueUtils.stringValue("result")); assertEquals(expr.type(), STRING); } diff --git a/core/src/test/java/org/opensearch/sql/expression/function/RelevanceFunctionResolverTest.java b/core/src/test/java/org/opensearch/sql/expression/function/RelevanceFunctionResolverTest.java index 85fc3a0276..c678ac6eb4 100644 --- a/core/src/test/java/org/opensearch/sql/expression/function/RelevanceFunctionResolverTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/function/RelevanceFunctionResolverTest.java @@ -36,18 +36,15 @@ void resolve_correct_name_test() { void resolve_invalid_name_test() { var wrongFuncName = FunctionName.of("wrong_func"); var sig = new FunctionSignature(wrongFuncName, List.of(STRING)); - Exception exception = assertThrows(SemanticCheckException.class, - () -> resolver.resolve(sig)); - assertEquals("Expected 'sample_function' but got 'wrong_func'", - exception.getMessage()); + Exception exception = assertThrows(SemanticCheckException.class, () -> resolver.resolve(sig)); + assertEquals("Expected 'sample_function' but got 'wrong_func'", exception.getMessage()); } @Test void resolve_invalid_third_param_type_test() { var sig = new FunctionSignature(sampleFuncName, List.of(STRING, STRING, INTEGER, STRING)); - Exception exception = assertThrows(SemanticCheckException.class, - () -> resolver.resolve(sig)); - assertEquals("Expected type STRING instead of INTEGER for parameter #3", - exception.getMessage()); + Exception exception = assertThrows(SemanticCheckException.class, () -> resolver.resolve(sig)); + assertEquals( + "Expected type STRING instead of INTEGER for parameter #3", exception.getMessage()); } } diff --git a/core/src/test/java/org/opensearch/sql/expression/function/WideningTypeRuleTest.java b/core/src/test/java/org/opensearch/sql/expression/function/WideningTypeRuleTest.java index 27b36a0fec..3b6e5f7586 100644 --- a/core/src/test/java/org/opensearch/sql/expression/function/WideningTypeRuleTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/function/WideningTypeRuleTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.function; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -39,8 +38,7 @@ class WideningTypeRuleTest { private static Table numberWidenRule = - new ImmutableTable.Builder() + new ImmutableTable.Builder() .put(BYTE, SHORT, 1) .put(BYTE, INTEGER, 2) .put(BYTE, LONG, 3) @@ -77,35 +75,37 @@ class WideningTypeRuleTest { private static Stream distanceArguments() { List exprTypes = ExprCoreType.coreTypes(); return Lists.cartesianProduct(exprTypes, exprTypes).stream() - .map(list -> { - ExprCoreType type1 = list.get(0); - ExprCoreType type2 = list.get(1); - if (type1 == type2) { - return Arguments.of(type1, type2, TYPE_EQUAL); - } else if (numberWidenRule.contains(type1, type2)) { - return Arguments.of(type1, type2, numberWidenRule.get(type1, type2)); - } else { - return Arguments.of(type1, type2, IMPOSSIBLE_WIDENING); - } - }); + .map( + list -> { + ExprCoreType type1 = list.get(0); + ExprCoreType type2 = list.get(1); + if (type1 == type2) { + return Arguments.of(type1, type2, TYPE_EQUAL); + } else if (numberWidenRule.contains(type1, type2)) { + return Arguments.of(type1, type2, numberWidenRule.get(type1, type2)); + } else { + return Arguments.of(type1, type2, IMPOSSIBLE_WIDENING); + } + }); } private static Stream validMaxTypes() { List exprTypes = ExprCoreType.coreTypes(); return Lists.cartesianProduct(exprTypes, exprTypes).stream() - .map(list -> { - ExprCoreType type1 = list.get(0); - ExprCoreType type2 = list.get(1); - if (type1 == type2) { - return Arguments.of(type1, type2, type1); - } else if (numberWidenRule.contains(type1, type2)) { - return Arguments.of(type1, type2, type2); - } else if (numberWidenRule.contains(type2, type1)) { - return Arguments.of(type1, type2, type1); - } else { - return Arguments.of(type1, type2, null); - } - }); + .map( + list -> { + ExprCoreType type1 = list.get(0); + ExprCoreType type2 = list.get(1); + if (type1 == type2) { + return Arguments.of(type1, type2, type1); + } else if (numberWidenRule.contains(type1, type2)) { + return Arguments.of(type1, type2, type2); + } else if (numberWidenRule.contains(type2, type1)) { + return Arguments.of(type1, type2, type1); + } else { + return Arguments.of(type1, type2, null); + } + }); } @ParameterizedTest @@ -118,8 +118,8 @@ public void distance(ExprCoreType v1, ExprCoreType v2, Integer expected) { @MethodSource("validMaxTypes") public void max(ExprCoreType v1, ExprCoreType v2, ExprCoreType expected) { if (null == expected) { - ExpressionEvaluationException exception = assertThrows( - ExpressionEvaluationException.class, () -> WideningTypeRule.max(v1, v2)); + ExpressionEvaluationException exception = + assertThrows(ExpressionEvaluationException.class, () -> WideningTypeRule.max(v1, v2)); assertEquals(String.format("no max type of %s and %s ", v1, v2), exception.getMessage()); } else { assertEquals(expected, WideningTypeRule.max(v1, v2)); @@ -128,10 +128,9 @@ public void max(ExprCoreType v1, ExprCoreType v2, ExprCoreType expected) { @Test public void maxOfUndefinedAndOthersShouldBeTheOtherType() { - ExprCoreType.coreTypes().forEach(type -> - assertEquals(type, WideningTypeRule.max(type, UNDEFINED))); - ExprCoreType.coreTypes().forEach(type -> - assertEquals(type, WideningTypeRule.max(UNDEFINED, type))); + ExprCoreType.coreTypes() + .forEach(type -> assertEquals(type, WideningTypeRule.max(type, UNDEFINED))); + ExprCoreType.coreTypes() + .forEach(type -> assertEquals(type, WideningTypeRule.max(UNDEFINED, type))); } - } diff --git a/core/src/test/java/org/opensearch/sql/expression/operator/arthmetic/ArithmeticFunctionTest.java b/core/src/test/java/org/opensearch/sql/expression/operator/arthmetic/ArithmeticFunctionTest.java index 028ace6231..24c0d0decf 100644 --- a/core/src/test/java/org/opensearch/sql/expression/operator/arthmetic/ArithmeticFunctionTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/operator/arthmetic/ArithmeticFunctionTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.operator.arthmetic; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -40,21 +39,34 @@ @DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) class ArithmeticFunctionTest extends ExpressionTestBase { private static Stream arithmeticFunctionArguments() { - List numberOp1 = Arrays.asList(new ExprByteValue(3), new ExprShortValue(3), - new ExprIntegerValue(3), new ExprLongValue(3L), new ExprFloatValue(3f), - new ExprDoubleValue(3D)); + List numberOp1 = + Arrays.asList( + new ExprByteValue(3), + new ExprShortValue(3), + new ExprIntegerValue(3), + new ExprLongValue(3L), + new ExprFloatValue(3f), + new ExprDoubleValue(3D)); List numberOp2 = - Arrays.asList(new ExprByteValue(2), new ExprShortValue(2), new ExprIntegerValue(2), + Arrays.asList( + new ExprByteValue(2), + new ExprShortValue(2), + new ExprIntegerValue(2), new ExprLongValue(3L), - new ExprFloatValue(2f), new ExprDoubleValue(2D)); + new ExprFloatValue(2f), + new ExprDoubleValue(2D)); return Lists.cartesianProduct(numberOp1, numberOp2).stream() .map(list -> Arguments.of(list.get(0), list.get(1))); } private static Stream arithmeticOperatorArguments() { - return Stream - .of(BuiltinFunctionName.ADD, BuiltinFunctionName.SUBTRACT, BuiltinFunctionName.MULTIPLY, - BuiltinFunctionName.DIVIDE, BuiltinFunctionName.DIVIDE).map(Arguments::of); + return Stream.of( + BuiltinFunctionName.ADD, + BuiltinFunctionName.SUBTRACT, + BuiltinFunctionName.MULTIPLY, + BuiltinFunctionName.DIVIDE, + BuiltinFunctionName.DIVIDE) + .map(Arguments::of); } @ParameterizedTest(name = "add({1}, {2})") @@ -73,10 +85,9 @@ public void addFunction(ExprValue op1, ExprValue op2) { FunctionExpression expression = DSL.addFunction(literal(op1), literal(op2)); ExprType expectedType = WideningTypeRule.max(op1.type(), op2.type()); assertEquals(expectedType, expression.type()); - assertValueEqual(BuiltinFunctionName.ADDFUNCTION, - expectedType, op1, op2, expression.valueOf()); - assertEquals(String.format("add(%s, %s)", - op1.toString(), op2.toString()), expression.toString()); + assertValueEqual(BuiltinFunctionName.ADDFUNCTION, expectedType, op1, op2, expression.valueOf()); + assertEquals( + String.format("add(%s, %s)", op1.toString(), op2.toString()), expression.toString()); } @ParameterizedTest(name = "subtract({1}, {2})") @@ -85,10 +96,8 @@ public void subtract(ExprValue op1, ExprValue op2) { FunctionExpression expression = DSL.subtract(literal(op1), literal(op2)); ExprType expectedType = WideningTypeRule.max(op1.type(), op2.type()); assertEquals(expectedType, expression.type()); - assertValueEqual(BuiltinFunctionName.SUBTRACT, expectedType, op1, op2, - expression.valueOf()); - assertEquals(String.format("-(%s, %s)", op1.toString(), op2.toString()), - expression.toString()); + assertValueEqual(BuiltinFunctionName.SUBTRACT, expectedType, op1, op2, expression.valueOf()); + assertEquals(String.format("-(%s, %s)", op1.toString(), op2.toString()), expression.toString()); } @ParameterizedTest(name = "subtractFunction({1}, {2})") @@ -97,10 +106,10 @@ public void subtractFunction(ExprValue op1, ExprValue op2) { FunctionExpression expression = DSL.subtractFunction(literal(op1), literal(op2)); ExprType expectedType = WideningTypeRule.max(op1.type(), op2.type()); assertEquals(expectedType, expression.type()); - assertValueEqual(BuiltinFunctionName.SUBTRACTFUNCTION, expectedType, op1, op2, - expression.valueOf()); - assertEquals(String.format("subtract(%s, %s)", op1.toString(), op2.toString()), - expression.toString()); + assertValueEqual( + BuiltinFunctionName.SUBTRACTFUNCTION, expectedType, op1, op2, expression.valueOf()); + assertEquals( + String.format("subtract(%s, %s)", op1.toString(), op2.toString()), expression.toString()); } @ParameterizedTest(name = "mod({1}, {2})") @@ -110,8 +119,8 @@ public void mod(ExprValue op1, ExprValue op2) { ExprType expectedType = WideningTypeRule.max(op1.type(), op2.type()); assertEquals(expectedType, expression.type()); assertValueEqual(BuiltinFunctionName.MOD, expectedType, op1, op2, expression.valueOf()); - assertEquals(String.format("mod(%s, %s)", op1.toString(), op2.toString()), - expression.toString()); + assertEquals( + String.format("mod(%s, %s)", op1.toString(), op2.toString()), expression.toString()); expression = DSL.mod(literal(op1), literal(new ExprByteValue(0))); assertTrue(expression.valueOf(valueEnv()).isNull()); @@ -125,8 +134,8 @@ public void modulus(ExprValue op1, ExprValue op2) { ExprType expectedType = WideningTypeRule.max(op1.type(), op2.type()); assertEquals(expectedType, expression.type()); assertValueEqual(BuiltinFunctionName.MODULUS, expectedType, op1, op2, expression.valueOf()); - assertEquals(String.format("%%(%s, %s)", op1.toString(), op2.toString()), - expression.toString()); + assertEquals( + String.format("%%(%s, %s)", op1.toString(), op2.toString()), expression.toString()); expression = DSL.modulus(literal(op1), literal(new ExprByteValue(0))); assertTrue(expression.valueOf(valueEnv()).isNull()); @@ -139,10 +148,10 @@ public void modulusFunction(ExprValue op1, ExprValue op2) { FunctionExpression expression = DSL.modulusFunction(literal(op1), literal(op2)); ExprType expectedType = WideningTypeRule.max(op1.type(), op2.type()); assertEquals(expectedType, expression.type()); - assertValueEqual(BuiltinFunctionName.MODULUSFUNCTION, - expectedType, op1, op2, expression.valueOf()); - assertEquals(String.format("modulus(%s, %s)", op1.toString(), op2.toString()), - expression.toString()); + assertValueEqual( + BuiltinFunctionName.MODULUSFUNCTION, expectedType, op1, op2, expression.valueOf()); + assertEquals( + String.format("modulus(%s, %s)", op1.toString(), op2.toString()), expression.toString()); expression = DSL.modulusFunction(literal(op1), literal(new ExprByteValue(0))); assertTrue(expression.valueOf(valueEnv()).isNull()); @@ -155,10 +164,8 @@ public void multiply(ExprValue op1, ExprValue op2) { FunctionExpression expression = DSL.multiply(literal(op1), literal(op2)); ExprType expectedType = WideningTypeRule.max(op1.type(), op2.type()); assertEquals(expectedType, expression.type()); - assertValueEqual(BuiltinFunctionName.MULTIPLY, expectedType, op1, op2, - expression.valueOf()); - assertEquals(String.format("*(%s, %s)", op1.toString(), op2.toString()), - expression.toString()); + assertValueEqual(BuiltinFunctionName.MULTIPLY, expectedType, op1, op2, expression.valueOf()); + assertEquals(String.format("*(%s, %s)", op1.toString(), op2.toString()), expression.toString()); } @ParameterizedTest(name = "multiplyFunction({1}, {2})") @@ -167,10 +174,10 @@ public void multiplyFunction(ExprValue op1, ExprValue op2) { FunctionExpression expression = DSL.multiplyFunction(literal(op1), literal(op2)); ExprType expectedType = WideningTypeRule.max(op1.type(), op2.type()); assertEquals(expectedType, expression.type()); - assertValueEqual(BuiltinFunctionName.MULTIPLYFUNCTION, expectedType, op1, op2, - expression.valueOf()); - assertEquals(String.format("multiply(%s, %s)", op1.toString(), op2.toString()), - expression.toString()); + assertValueEqual( + BuiltinFunctionName.MULTIPLYFUNCTION, expectedType, op1, op2, expression.valueOf()); + assertEquals( + String.format("multiply(%s, %s)", op1.toString(), op2.toString()), expression.toString()); } @ParameterizedTest(name = "divide({1}, {2})") @@ -180,8 +187,7 @@ public void divide(ExprValue op1, ExprValue op2) { ExprType expectedType = WideningTypeRule.max(op1.type(), op2.type()); assertEquals(expectedType, expression.type()); assertValueEqual(BuiltinFunctionName.DIVIDE, expectedType, op1, op2, expression.valueOf()); - assertEquals(String.format("/(%s, %s)", op1.toString(), op2.toString()), - expression.toString()); + assertEquals(String.format("/(%s, %s)", op1.toString(), op2.toString()), expression.toString()); expression = DSL.divide(literal(op1), literal(new ExprByteValue(0))); assertTrue(expression.valueOf(valueEnv()).isNull()); @@ -194,10 +200,10 @@ public void divideFunction(ExprValue op1, ExprValue op2) { FunctionExpression expression = DSL.divideFunction(literal(op1), literal(op2)); ExprType expectedType = WideningTypeRule.max(op1.type(), op2.type()); assertEquals(expectedType, expression.type()); - assertValueEqual(BuiltinFunctionName.DIVIDEFUNCTION, - expectedType, op1, op2, expression.valueOf()); - assertEquals(String.format("divide(%s, %s)", op1.toString(), op2.toString()), - expression.toString()); + assertValueEqual( + BuiltinFunctionName.DIVIDEFUNCTION, expectedType, op1, op2, expression.valueOf()); + assertEquals( + String.format("divide(%s, %s)", op1.toString(), op2.toString()), expression.toString()); expression = DSL.divideFunction(literal(op1), literal(new ExprByteValue(0))); assertTrue(expression.valueOf(valueEnv()).isNull()); @@ -207,38 +213,51 @@ public void divideFunction(ExprValue op1, ExprValue op2) { @ParameterizedTest(name = "multipleParameters({1},{2})") @MethodSource("arithmeticFunctionArguments") public void multipleParameters(ExprValue op1) { - assertThrows(ExpressionEvaluationException.class, - () -> DSL.add(literal(op1), literal(op1), literal(op1))); - assertThrows(ExpressionEvaluationException.class, - () -> DSL.addFunction(literal(op1), literal(op1), literal(op1))); + assertThrows( + ExpressionEvaluationException.class, + () -> DSL.add(literal(op1), literal(op1), literal(op1))); + assertThrows( + ExpressionEvaluationException.class, + () -> DSL.addFunction(literal(op1), literal(op1), literal(op1))); - assertThrows(ExpressionEvaluationException.class, - () -> DSL.subtract(literal(op1), literal(op1), literal(op1))); - assertThrows(ExpressionEvaluationException.class, - () -> DSL.subtractFunction(literal(op1), literal(op1), literal(op1))); + assertThrows( + ExpressionEvaluationException.class, + () -> DSL.subtract(literal(op1), literal(op1), literal(op1))); + assertThrows( + ExpressionEvaluationException.class, + () -> DSL.subtractFunction(literal(op1), literal(op1), literal(op1))); - assertThrows(ExpressionEvaluationException.class, - () -> DSL.multiply(literal(op1), literal(op1), literal(op1))); - assertThrows(ExpressionEvaluationException.class, - () -> DSL.multiplyFunction(literal(op1), literal(op1), literal(op1))); + assertThrows( + ExpressionEvaluationException.class, + () -> DSL.multiply(literal(op1), literal(op1), literal(op1))); + assertThrows( + ExpressionEvaluationException.class, + () -> DSL.multiplyFunction(literal(op1), literal(op1), literal(op1))); - assertThrows(ExpressionEvaluationException.class, - () -> DSL.divide(literal(op1), literal(op1), literal(op1))); - assertThrows(ExpressionEvaluationException.class, - () -> DSL.divideFunction(literal(op1), literal(op1), literal(op1))); + assertThrows( + ExpressionEvaluationException.class, + () -> DSL.divide(literal(op1), literal(op1), literal(op1))); + assertThrows( + ExpressionEvaluationException.class, + () -> DSL.divideFunction(literal(op1), literal(op1), literal(op1))); - assertThrows(ExpressionEvaluationException.class, - () -> DSL.mod(literal(op1), literal(op1), literal(op1))); - assertThrows(ExpressionEvaluationException.class, - () -> DSL.modulus(literal(op1), literal(op1), literal(op1))); - assertThrows(ExpressionEvaluationException.class, - () -> DSL.modulusFunction(literal(op1), literal(op1), literal(op1))); + assertThrows( + ExpressionEvaluationException.class, + () -> DSL.mod(literal(op1), literal(op1), literal(op1))); + assertThrows( + ExpressionEvaluationException.class, + () -> DSL.modulus(literal(op1), literal(op1), literal(op1))); + assertThrows( + ExpressionEvaluationException.class, + () -> DSL.modulusFunction(literal(op1), literal(op1), literal(op1))); } - protected void assertValueEqual(BuiltinFunctionName builtinFunctionName, ExprType type, - ExprValue op1, - ExprValue op2, - ExprValue actual) { + protected void assertValueEqual( + BuiltinFunctionName builtinFunctionName, + ExprType type, + ExprValue op1, + ExprValue op2, + ExprValue actual) { switch ((ExprCoreType) type) { case BYTE: Byte vb1 = op1.byteValue(); diff --git a/core/src/test/java/org/opensearch/sql/expression/operator/arthmetic/MathematicalFunctionTest.java b/core/src/test/java/org/opensearch/sql/expression/operator/arthmetic/MathematicalFunctionTest.java index 4267805c74..e084bf1d53 100644 --- a/core/src/test/java/org/opensearch/sql/expression/operator/arthmetic/MathematicalFunctionTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/operator/arthmetic/MathematicalFunctionTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.operator.arthmetic; import static org.hamcrest.MatcherAssert.assertThat; @@ -76,28 +75,30 @@ private static Stream testLogDoubleArguments() { } private static Stream testLogInvalidDoubleArguments() { - return Stream.of(Arguments.of(0D, -2D), - Arguments.of(0D, 2D), - Arguments.of(2D, 0D)); + return Stream.of(Arguments.of(0D, -2D), Arguments.of(0D, 2D), Arguments.of(2D, 0D)); } private static Stream trigonometricArguments() { Stream.Builder builder = Stream.builder(); return builder - .add(Arguments.of(1)).add(Arguments.of(1L)).add(Arguments.of(1F)).add(Arguments.of(1D)) + .add(Arguments.of(1)) + .add(Arguments.of(1L)) + .add(Arguments.of(1F)) + .add(Arguments.of(1D)) .build(); } private static Stream trigonometricDoubleArguments() { Stream.Builder builder = Stream.builder(); return builder - .add(Arguments.of(1, 2)).add(Arguments.of(1L, 2L)).add(Arguments.of(1F, 2F)) - .add(Arguments.of(1D, 2D)).build(); + .add(Arguments.of(1, 2)) + .add(Arguments.of(1L, 2L)) + .add(Arguments.of(1F, 2F)) + .add(Arguments.of(1D, 2D)) + .build(); } - /** - * Test abs with byte value. - */ + /** Test abs with byte value. */ @ParameterizedTest(name = "abs({0})") @ValueSource(bytes = {-2, 2}) public void abs_byte_value(Byte value) { @@ -106,61 +107,43 @@ public void abs_byte_value(Byte value) { assertEquals(String.format("abs(%s)", value.toString()), abs.toString()); } - /** - * Test abs with integer value. - */ + /** Test abs with integer value. */ @ParameterizedTest(name = "abs({0})") @ValueSource(ints = {-2, 2}) public void abs_int_value(Integer value) { FunctionExpression abs = DSL.abs(DSL.literal(value)); - assertThat( - abs.valueOf(valueEnv()), - allOf(hasType(INTEGER), hasValue(Math.abs(value)))); + assertThat(abs.valueOf(valueEnv()), allOf(hasType(INTEGER), hasValue(Math.abs(value)))); assertEquals(String.format("abs(%s)", value.toString()), abs.toString()); } - /** - * Test abs with long value. - */ + /** Test abs with long value. */ @ParameterizedTest(name = "abs({0})") @ValueSource(longs = {-2L, 2L}) public void abs_long_value(Long value) { FunctionExpression abs = DSL.abs(DSL.literal(value)); - assertThat( - abs.valueOf(valueEnv()), - allOf(hasType(LONG), hasValue(Math.abs(value)))); + assertThat(abs.valueOf(valueEnv()), allOf(hasType(LONG), hasValue(Math.abs(value)))); assertEquals(String.format("abs(%s)", value.toString()), abs.toString()); } - /** - * Test abs with float value. - */ + /** Test abs with float value. */ @ParameterizedTest(name = "abs({0})") @ValueSource(floats = {-2f, 2f}) public void abs_float_value(Float value) { FunctionExpression abs = DSL.abs(DSL.literal(value)); - assertThat( - abs.valueOf(valueEnv()), - allOf(hasType(FLOAT), hasValue(Math.abs(value)))); + assertThat(abs.valueOf(valueEnv()), allOf(hasType(FLOAT), hasValue(Math.abs(value)))); assertEquals(String.format("abs(%s)", value.toString()), abs.toString()); } - /** - * Test abs with double value. - */ + /** Test abs with double value. */ @ParameterizedTest(name = "abs({0})") @ValueSource(doubles = {-2L, 2L}) public void abs_double_value(Double value) { FunctionExpression abs = DSL.abs(DSL.literal(value)); - assertThat( - abs.valueOf(valueEnv()), - allOf(hasType(DOUBLE), hasValue(Math.abs(value)))); + assertThat(abs.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue(Math.abs(value)))); assertEquals(String.format("abs(%s)", value.toString()), abs.toString()); } - /** - * Test abs with short value. - */ + /** Test abs with short value. */ @ParameterizedTest(name = "abs({0})") @ValueSource(shorts = {-2, 2}) public void abs_short_value(Short value) { @@ -171,16 +154,12 @@ public void abs_short_value(Short value) { assertEquals(String.format("abs(%s)", value.toString()), abs.toString()); } - /** - * Test ceil/ceiling with integer value. - */ + /** Test ceil/ceiling with integer value. */ @ParameterizedTest(name = "ceil({0})") @ValueSource(ints = {2, -2}) public void ceil_int_value(Integer value) { FunctionExpression ceil = DSL.ceil(DSL.literal(value)); - assertThat( - ceil.valueOf(valueEnv()), - allOf(hasType(LONG), hasValue((long) Math.ceil(value)))); + assertThat(ceil.valueOf(valueEnv()), allOf(hasType(LONG), hasValue((long) Math.ceil(value)))); assertEquals(String.format("ceil(%s)", value.toString()), ceil.toString()); FunctionExpression ceiling = DSL.ceiling(DSL.literal(value)); @@ -189,15 +168,12 @@ public void ceil_int_value(Integer value) { assertEquals(String.format("ceiling(%s)", value.toString()), ceiling.toString()); } - /** - * Test ceil/ceiling with long value. - */ + /** Test ceil/ceiling with long value. */ @ParameterizedTest(name = "ceil({0})") @ValueSource(longs = {2L, -2L}) public void ceil_long_value(Long value) { FunctionExpression ceil = DSL.ceil(DSL.literal(value)); - assertThat( - ceil.valueOf(valueEnv()), allOf(hasType(LONG), hasValue((long) Math.ceil(value)))); + assertThat(ceil.valueOf(valueEnv()), allOf(hasType(LONG), hasValue((long) Math.ceil(value)))); assertEquals(String.format("ceil(%s)", value.toString()), ceil.toString()); FunctionExpression ceiling = DSL.ceiling(DSL.literal(value)); @@ -206,15 +182,12 @@ public void ceil_long_value(Long value) { assertEquals(String.format("ceiling(%s)", value.toString()), ceiling.toString()); } - /** - * Test ceil/ceiling with long value. - */ + /** Test ceil/ceiling with long value. */ @ParameterizedTest(name = "ceil({0})") @ValueSource(longs = {9223372036854775805L, -9223372036854775805L}) public void ceil_long_value_long(Long value) { FunctionExpression ceil = DSL.ceil(DSL.literal(value)); - assertThat( - ceil.valueOf(valueEnv()), allOf(hasType(LONG), hasValue((long) Math.ceil(value)))); + assertThat(ceil.valueOf(valueEnv()), allOf(hasType(LONG), hasValue((long) Math.ceil(value)))); assertEquals(String.format("ceil(%s)", value.toString()), ceil.toString()); FunctionExpression ceiling = DSL.ceiling(DSL.literal(value)); @@ -223,15 +196,12 @@ public void ceil_long_value_long(Long value) { assertEquals(String.format("ceiling(%s)", value.toString()), ceiling.toString()); } - /** - * Test ceil/ceiling with float value. - */ + /** Test ceil/ceiling with float value. */ @ParameterizedTest(name = "ceil({0})") @ValueSource(floats = {2F, -2F}) public void ceil_float_value(Float value) { FunctionExpression ceil = DSL.ceil(DSL.literal(value)); - assertThat( - ceil.valueOf(valueEnv()), allOf(hasType(LONG), hasValue((long) Math.ceil(value)))); + assertThat(ceil.valueOf(valueEnv()), allOf(hasType(LONG), hasValue((long) Math.ceil(value)))); assertEquals(String.format("ceil(%s)", value.toString()), ceil.toString()); FunctionExpression ceiling = DSL.ceiling(DSL.literal(value)); @@ -240,15 +210,12 @@ public void ceil_float_value(Float value) { assertEquals(String.format("ceiling(%s)", value.toString()), ceiling.toString()); } - /** - * Test ceil/ceiling with double value. - */ + /** Test ceil/ceiling with double value. */ @ParameterizedTest(name = "ceil({0})") @ValueSource(doubles = {-2L, 2L}) public void ceil_double_value(Double value) { FunctionExpression ceil = DSL.ceil(DSL.literal(value)); - assertThat( - ceil.valueOf(valueEnv()), allOf(hasType(LONG), hasValue((long) Math.ceil(value)))); + assertThat(ceil.valueOf(valueEnv()), allOf(hasType(LONG), hasValue((long) Math.ceil(value)))); assertEquals(String.format("ceil(%s)", value.toString()), ceil.toString()); FunctionExpression ceiling = DSL.ceiling(DSL.literal(value)); @@ -257,9 +224,7 @@ public void ceil_double_value(Double value) { assertEquals(String.format("ceiling(%s)", value.toString()), ceiling.toString()); } - /** - * Test conv from decimal base with string as a number. - */ + /** Test conv from decimal base with string as a number. */ @ParameterizedTest(name = "conv({0})") @ValueSource(strings = {"1", "0", "-1"}) public void conv_from_decimal(String value) { @@ -282,34 +247,27 @@ public void conv_from_decimal(String value) { assertEquals(String.format("conv(\"%s\", 10, 16)", value), conv.toString()); } - /** - * Test conv from decimal base with integer as a number. - */ + /** Test conv from decimal base with integer as a number. */ @ParameterizedTest(name = "conv({0})") @ValueSource(ints = {1, 0, -1}) public void conv_from_decimal(Integer value) { FunctionExpression conv = DSL.conv(DSL.literal(value), DSL.literal(10), DSL.literal(2)); assertThat( - conv.valueOf(valueEnv()), - allOf(hasType(STRING), hasValue(Integer.toString(value, 2)))); + conv.valueOf(valueEnv()), allOf(hasType(STRING), hasValue(Integer.toString(value, 2)))); assertEquals(String.format("conv(%s, 10, 2)", value), conv.toString()); conv = DSL.conv(DSL.literal(value), DSL.literal(10), DSL.literal(8)); assertThat( - conv.valueOf(valueEnv()), - allOf(hasType(STRING), hasValue(Integer.toString(value, 8)))); + conv.valueOf(valueEnv()), allOf(hasType(STRING), hasValue(Integer.toString(value, 8)))); assertEquals(String.format("conv(%s, 10, 8)", value), conv.toString()); conv = DSL.conv(DSL.literal(value), DSL.literal(10), DSL.literal(16)); assertThat( - conv.valueOf(valueEnv()), - allOf(hasType(STRING), hasValue(Integer.toString(value, 16)))); + conv.valueOf(valueEnv()), allOf(hasType(STRING), hasValue(Integer.toString(value, 16)))); assertEquals(String.format("conv(%s, 10, 16)", value), conv.toString()); } - /** - * Test conv to decimal base with string as a number. - */ + /** Test conv to decimal base with string as a number. */ @ParameterizedTest(name = "conv({0})") @ValueSource(strings = {"11", "0", "11111"}) public void conv_to_decimal(String value) { @@ -332,9 +290,7 @@ public void conv_to_decimal(String value) { assertEquals(String.format("conv(\"%s\", 16, 10)", value), conv.toString()); } - /** - * Test conv to decimal base with integer as a number. - */ + /** Test conv to decimal base with integer as a number. */ @ParameterizedTest(name = "conv({0})") @ValueSource(ints = {11, 0, 11111}) public void conv_to_decimal(Integer value) { @@ -357,273 +313,187 @@ public void conv_to_decimal(Integer value) { assertEquals(String.format("conv(%s, 16, 10)", value), conv.toString()); } - /** - * Test crc32 with string value. - */ + /** Test crc32 with string value. */ @ParameterizedTest(name = "crc({0})") @ValueSource(strings = {"odfe", "sql"}) public void crc32_string_value(String value) { FunctionExpression crc = DSL.crc32(DSL.literal(value)); CRC32 crc32 = new CRC32(); crc32.update(value.getBytes()); - assertThat( - crc.valueOf(valueEnv()), - allOf(hasType(LONG), hasValue(crc32.getValue()))); + assertThat(crc.valueOf(valueEnv()), allOf(hasType(LONG), hasValue(crc32.getValue()))); assertEquals(String.format("crc32(\"%s\")", value), crc.toString()); } - /** - * Test constant e. - */ + /** Test constant e. */ @Test public void test_e() { FunctionExpression e = DSL.euler(); assertThat(e.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue(Math.E))); } - /** - * Test exp with integer value. - */ + /** Test exp with integer value. */ @ParameterizedTest(name = "exp({0})") @ValueSource(ints = {-2, 2}) public void exp_int_value(Integer value) { FunctionExpression exp = DSL.exp(DSL.literal(value)); - assertThat( - exp.valueOf(valueEnv()), - allOf(hasType(DOUBLE), hasValue(Math.exp(value)))); + assertThat(exp.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue(Math.exp(value)))); assertEquals(String.format("exp(%s)", value.toString()), exp.toString()); } - /** - * Test exp with long value. - */ + /** Test exp with long value. */ @ParameterizedTest(name = "exp({0})") @ValueSource(longs = {-2L, 2L}) public void exp_long_value(Long value) { FunctionExpression exp = DSL.exp(DSL.literal(value)); - assertThat( - exp.valueOf(valueEnv()), - allOf(hasType(DOUBLE), hasValue(Math.exp(value)))); + assertThat(exp.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue(Math.exp(value)))); assertEquals(String.format("exp(%s)", value.toString()), exp.toString()); } - /** - * Test exp with float value. - */ + /** Test exp with float value. */ @ParameterizedTest(name = "exp({0})") @ValueSource(floats = {-2F, 2F}) public void exp_float_value(Float value) { FunctionExpression exp = DSL.exp(DSL.literal(value)); - assertThat( - exp.valueOf(valueEnv()), - allOf(hasType(DOUBLE), hasValue(Math.exp(value)))); + assertThat(exp.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue(Math.exp(value)))); assertEquals(String.format("exp(%s)", value.toString()), exp.toString()); } - /** - * Test exp with double value. - */ + /** Test exp with double value. */ @ParameterizedTest(name = "exp({0})") @ValueSource(doubles = {-2D, 2D}) public void exp_double_value(Double value) { FunctionExpression exp = DSL.exp(DSL.literal(value)); - assertThat( - exp.valueOf(valueEnv()), - allOf(hasType(DOUBLE), hasValue(Math.exp(value)))); + assertThat(exp.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue(Math.exp(value)))); assertEquals(String.format("exp(%s)", value.toString()), exp.toString()); } - /** - * Test expm1 with integer value. - */ + /** Test expm1 with integer value. */ @ParameterizedTest(name = "expm1({0})") - @ValueSource(ints = { - -1, 0, 1, Integer.MAX_VALUE, Integer.MIN_VALUE}) + @ValueSource(ints = {-1, 0, 1, Integer.MAX_VALUE, Integer.MIN_VALUE}) public void expm1_int_value(Integer value) { FunctionExpression expm1 = DSL.expm1(DSL.literal(value)); - assertThat( - expm1.valueOf(valueEnv()), - allOf(hasType(DOUBLE), hasValue(Math.expm1(value)))); + assertThat(expm1.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue(Math.expm1(value)))); assertEquals(String.format("expm1(%s)", value), expm1.toString()); } - /** - * Test expm1 with long value. - */ + /** Test expm1 with long value. */ @ParameterizedTest(name = "expm1({0})") - @ValueSource(longs = { - -1L, 0L, 1L, Long.MAX_VALUE, Long.MIN_VALUE}) + @ValueSource(longs = {-1L, 0L, 1L, Long.MAX_VALUE, Long.MIN_VALUE}) public void expm1_long_value(Long value) { FunctionExpression expm1 = DSL.expm1(DSL.literal(value)); - assertThat( - expm1.valueOf(valueEnv()), - allOf(hasType(DOUBLE), hasValue(Math.expm1(value)))); + assertThat(expm1.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue(Math.expm1(value)))); assertEquals(String.format("expm1(%s)", value), expm1.toString()); } - /** - * Test expm1 with float value. - */ + /** Test expm1 with float value. */ @ParameterizedTest(name = "expm1({0})") - @ValueSource(floats = { - -1.5F, -1F, 0F, 1F, 1.5F, Float.MAX_VALUE, Float.MIN_VALUE}) + @ValueSource(floats = {-1.5F, -1F, 0F, 1F, 1.5F, Float.MAX_VALUE, Float.MIN_VALUE}) public void expm1_float_value(Float value) { FunctionExpression expm1 = DSL.expm1(DSL.literal(value)); - assertThat( - expm1.valueOf(valueEnv()), - allOf(hasType(DOUBLE), hasValue(Math.expm1(value)))); + assertThat(expm1.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue(Math.expm1(value)))); assertEquals(String.format("expm1(%s)", value), expm1.toString()); } - /** - * Test expm1 with double value. - */ + /** Test expm1 with double value. */ @ParameterizedTest(name = "expm1({0})") - @ValueSource(doubles = { - -1.5D, -1D, 0D, 1D, 1.5D, Double.MAX_VALUE, Double.MIN_VALUE}) + @ValueSource(doubles = {-1.5D, -1D, 0D, 1D, 1.5D, Double.MAX_VALUE, Double.MIN_VALUE}) public void expm1_double_value(Double value) { FunctionExpression expm1 = DSL.expm1(DSL.literal(value)); - assertThat( - expm1.valueOf(valueEnv()), - allOf(hasType(DOUBLE), hasValue(Math.expm1(value)))); + assertThat(expm1.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue(Math.expm1(value)))); assertEquals(String.format("expm1(%s)", value), expm1.toString()); } - /** - * Test expm1 with short value. - */ + /** Test expm1 with short value. */ @ParameterizedTest(name = "expm1({0})") - @ValueSource(shorts = { - -1, 0, 1, Short.MAX_VALUE, Short.MIN_VALUE}) + @ValueSource(shorts = {-1, 0, 1, Short.MAX_VALUE, Short.MIN_VALUE}) public void expm1_short_value(Short value) { FunctionExpression expm1 = DSL.expm1(DSL.literal(value)); - assertThat( - expm1.valueOf(valueEnv()), - allOf(hasType(DOUBLE), hasValue(Math.expm1(value)))); + assertThat(expm1.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue(Math.expm1(value)))); assertEquals(String.format("expm1(%s)", value), expm1.toString()); } - /** - * Test expm1 with byte value. - */ + /** Test expm1 with byte value. */ @ParameterizedTest(name = "expm1({0})") - @ValueSource(bytes = { - -1, 0, 1, Byte.MAX_VALUE, Byte.MIN_VALUE}) + @ValueSource(bytes = {-1, 0, 1, Byte.MAX_VALUE, Byte.MIN_VALUE}) public void expm1_byte_value(Byte value) { FunctionExpression expm1 = DSL.expm1(DSL.literal(value)); - assertThat( - expm1.valueOf(valueEnv()), - allOf(hasType(DOUBLE), hasValue(Math.expm1(value)))); + assertThat(expm1.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue(Math.expm1(value)))); assertEquals(String.format("expm1(%s)", value), expm1.toString()); } - /** - * Test floor with integer value. - */ + /** Test floor with integer value. */ @ParameterizedTest(name = "floor({0})") @ValueSource(ints = {-2, 2}) public void floor_int_value(Integer value) { FunctionExpression floor = DSL.floor(DSL.literal(value)); - assertThat( - floor.valueOf(valueEnv()), - allOf(hasType(LONG), hasValue((long) Math.floor(value)))); + assertThat(floor.valueOf(valueEnv()), allOf(hasType(LONG), hasValue((long) Math.floor(value)))); assertEquals(String.format("floor(%s)", value), floor.toString()); } - /** - * Test floor with long value. - */ + /** Test floor with long value. */ @ParameterizedTest(name = "floor({0})") @ValueSource(longs = {-2L, 2L}) public void floor_long_value(Long value) { FunctionExpression floor = DSL.floor(DSL.literal(value)); - assertThat( - floor.valueOf(valueEnv()), - allOf(hasType(LONG), hasValue((long) Math.floor(value)))); + assertThat(floor.valueOf(valueEnv()), allOf(hasType(LONG), hasValue((long) Math.floor(value)))); assertEquals(String.format("floor(%s)", value.toString()), floor.toString()); } - /** - * Test floor with float value. - */ + /** Test floor with float value. */ @ParameterizedTest(name = "floor({0})") @ValueSource(floats = {-2F, 2F}) public void floor_float_value(Float value) { FunctionExpression floor = DSL.floor(DSL.literal(value)); - assertThat( - floor.valueOf(valueEnv()), - allOf(hasType(LONG), hasValue((long) Math.floor(value)))); + assertThat(floor.valueOf(valueEnv()), allOf(hasType(LONG), hasValue((long) Math.floor(value)))); assertEquals(String.format("floor(%s)", value.toString()), floor.toString()); } - /** - * Test floor with double value. - */ + /** Test floor with double value. */ @ParameterizedTest(name = "floor({0})") @ValueSource(doubles = {-2D, 2D}) public void floor_double_value(Double value) { FunctionExpression floor = DSL.floor(DSL.literal(value)); - assertThat( - floor.valueOf(valueEnv()), - allOf(hasType(LONG), hasValue((long) Math.floor(value)))); + assertThat(floor.valueOf(valueEnv()), allOf(hasType(LONG), hasValue((long) Math.floor(value)))); assertEquals(String.format("floor(%s)", value.toString()), floor.toString()); } - /** - * Test ln with integer value. - */ + /** Test ln with integer value. */ @ParameterizedTest(name = "ln({0})") @ValueSource(ints = {2, 3}) public void ln_int_value(Integer value) { FunctionExpression ln = DSL.ln(DSL.literal(value)); - assertThat( - ln.valueOf(valueEnv()), - allOf(hasType(DOUBLE), hasValue(Math.log(value)))); + assertThat(ln.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue(Math.log(value)))); assertEquals(String.format("ln(%s)", value.toString()), ln.toString()); } - /** - * Test ln with long value. - */ + /** Test ln with long value. */ @ParameterizedTest(name = "ln({0})") @ValueSource(longs = {2L, 3L}) public void ln_long_value(Long value) { FunctionExpression ln = DSL.ln(DSL.literal(value)); - assertThat( - ln.valueOf(valueEnv()), - allOf(hasType(DOUBLE), hasValue(Math.log(value)))); + assertThat(ln.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue(Math.log(value)))); assertEquals(String.format("ln(%s)", value.toString()), ln.toString()); } - /** - * Test ln with float value. - */ + /** Test ln with float value. */ @ParameterizedTest(name = "ln({0})") @ValueSource(floats = {2F, 3F}) public void ln_float_value(Float value) { FunctionExpression ln = DSL.ln(DSL.literal(value)); - assertThat( - ln.valueOf(valueEnv()), - allOf(hasType(DOUBLE), hasValue(Math.log(value)))); + assertThat(ln.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue(Math.log(value)))); assertEquals(String.format("ln(%s)", value.toString()), ln.toString()); } - /** - * Test ln with double value. - */ + /** Test ln with double value. */ @ParameterizedTest(name = "ln({0})") @ValueSource(doubles = {2D, 3D}) public void ln_double_value(Double value) { FunctionExpression ln = DSL.ln(DSL.literal(value)); - assertThat( - ln.valueOf(valueEnv()), - allOf(hasType(DOUBLE), hasValue(Math.log(value)))); + assertThat(ln.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue(Math.log(value)))); assertEquals(String.format("ln(%s)", value.toString()), ln.toString()); } - /** - * Test ln with invalid value. - */ + /** Test ln with invalid value. */ @ParameterizedTest(name = "ln({0})") @ValueSource(doubles = {0D, -3D}) public void ln_invalid_value(Double value) { @@ -632,69 +502,47 @@ public void ln_invalid_value(Double value) { assertTrue(ln.valueOf(valueEnv()).isNull()); } - /** - * Test log with 1 int argument. - */ + /** Test log with 1 int argument. */ @ParameterizedTest(name = "log({0})") @ValueSource(ints = {2, 3}) public void log_int_value(Integer v) { FunctionExpression log = DSL.log(DSL.literal(v)); assertEquals(log.type(), DOUBLE); - assertThat( - getDoubleValue(log.valueOf(valueEnv())), - closeTo(Math.log(v), 0.0001) - ); + assertThat(getDoubleValue(log.valueOf(valueEnv())), closeTo(Math.log(v), 0.0001)); assertEquals(String.format("log(%s)", v.toString()), log.toString()); } - /** - * Test log with 1 long argument. - */ + /** Test log with 1 long argument. */ @ParameterizedTest(name = "log({0})") @ValueSource(longs = {2L, 3L}) public void log_int_value(Long v) { FunctionExpression log = DSL.log(DSL.literal(v)); assertEquals(log.type(), DOUBLE); - assertThat( - getDoubleValue(log.valueOf(valueEnv())), - closeTo(Math.log(v), 0.0001) - ); + assertThat(getDoubleValue(log.valueOf(valueEnv())), closeTo(Math.log(v), 0.0001)); assertEquals(String.format("log(%s)", v.toString()), log.toString()); } - /** - * Test log with 1 float argument. - */ + /** Test log with 1 float argument. */ @ParameterizedTest(name = "log({0})") @ValueSource(floats = {2F, 3F}) public void log_float_value(Float v) { FunctionExpression log = DSL.log(DSL.literal(v)); assertEquals(log.type(), DOUBLE); - assertThat( - getDoubleValue(log.valueOf(valueEnv())), - closeTo(Math.log(v), 0.0001) - ); + assertThat(getDoubleValue(log.valueOf(valueEnv())), closeTo(Math.log(v), 0.0001)); assertEquals(String.format("log(%s)", v.toString()), log.toString()); } - /** - * Test log with 1 double argument. - */ + /** Test log with 1 double argument. */ @ParameterizedTest(name = "log({0})") @ValueSource(doubles = {2D, 3D}) public void log_double_value(Double v) { FunctionExpression log = DSL.log(DSL.literal(v)); assertEquals(log.type(), DOUBLE); - assertThat( - getDoubleValue(log.valueOf(valueEnv())), - closeTo(Math.log(v), 0.0001) - ); + assertThat(getDoubleValue(log.valueOf(valueEnv())), closeTo(Math.log(v), 0.0001)); assertEquals(String.format("log(%s)", v.toString()), log.toString()); } - /** - * Test log with 1 invalid value. - */ + /** Test log with 1 invalid value. */ @ParameterizedTest(name = "log({0})") @ValueSource(doubles = {0D, -3D}) public void log_invalid_value(Double value) { @@ -703,65 +551,51 @@ public void log_invalid_value(Double value) { assertTrue(log.valueOf(valueEnv()).isNull()); } - /** - * Test log with 2 int arguments. - */ + /** Test log with 2 int arguments. */ @ParameterizedTest(name = "log({0}, {1})") @MethodSource("testLogIntegerArguments") public void log_two_int_value(Integer v1, Integer v2) { FunctionExpression log = DSL.log(DSL.literal(v1), DSL.literal(v2)); assertEquals(log.type(), DOUBLE); assertThat( - getDoubleValue(log.valueOf(valueEnv())), - closeTo(Math.log(v2) / Math.log(v1), 0.0001)); + getDoubleValue(log.valueOf(valueEnv())), closeTo(Math.log(v2) / Math.log(v1), 0.0001)); assertEquals(String.format("log(%s, %s)", v1.toString(), v2.toString()), log.toString()); } - /** - * Test log with 2 long arguments. - */ + /** Test log with 2 long arguments. */ @ParameterizedTest(name = "log({0}, {1})") @MethodSource("testLogLongArguments") public void log_two_long_value(Long v1, Long v2) { FunctionExpression log = DSL.log(DSL.literal(v1), DSL.literal(v2)); assertEquals(log.type(), DOUBLE); assertThat( - getDoubleValue(log.valueOf(valueEnv())), - closeTo(Math.log(v2) / Math.log(v1), 0.0001)); + getDoubleValue(log.valueOf(valueEnv())), closeTo(Math.log(v2) / Math.log(v1), 0.0001)); assertEquals(String.format("log(%s, %s)", v1.toString(), v2.toString()), log.toString()); } - /** - * Test log with 2 float arguments. - */ + /** Test log with 2 float arguments. */ @ParameterizedTest(name = "log({0}, {1})") @MethodSource("testLogFloatArguments") public void log_two_double_value(Float v1, Float v2) { FunctionExpression log = DSL.log(DSL.literal(v1), DSL.literal(v2)); assertEquals(log.type(), DOUBLE); assertThat( - getDoubleValue(log.valueOf(valueEnv())), - closeTo(Math.log(v2) / Math.log(v1), 0.0001)); + getDoubleValue(log.valueOf(valueEnv())), closeTo(Math.log(v2) / Math.log(v1), 0.0001)); assertEquals(String.format("log(%s, %s)", v1.toString(), v2.toString()), log.toString()); } - /** - * Test log with 2 double arguments. - */ + /** Test log with 2 double arguments. */ @ParameterizedTest(name = "log({0}, {1})") @MethodSource("testLogDoubleArguments") public void log_two_double_value(Double v1, Double v2) { FunctionExpression log = DSL.log(DSL.literal(v1), DSL.literal(v2)); assertEquals(log.type(), DOUBLE); assertThat( - getDoubleValue(log.valueOf(valueEnv())), - closeTo(Math.log(v2) / Math.log(v1), 0.0001)); + getDoubleValue(log.valueOf(valueEnv())), closeTo(Math.log(v2) / Math.log(v1), 0.0001)); assertEquals(String.format("log(%s, %s)", v1.toString(), v2.toString()), log.toString()); } - /** - * Test log with 2 invalid double arguments. - */ + /** Test log with 2 invalid double arguments. */ @ParameterizedTest(name = "log({0}, {2})") @MethodSource("testLogInvalidDoubleArguments") public void log_two_invalid_double_value(Double v1, Double v2) { @@ -770,69 +604,47 @@ public void log_two_invalid_double_value(Double v1, Double v2) { assertTrue(log.valueOf(valueEnv()).isNull()); } - /** - * Test log10 with int value. - */ + /** Test log10 with int value. */ @ParameterizedTest(name = "log10({0})") @ValueSource(ints = {2, 3}) public void log10_int_value(Integer v) { FunctionExpression log = DSL.log10(DSL.literal(v)); assertEquals(log.type(), DOUBLE); - assertThat( - getDoubleValue(log.valueOf(valueEnv())), - closeTo(Math.log10(v), 0.0001) - ); + assertThat(getDoubleValue(log.valueOf(valueEnv())), closeTo(Math.log10(v), 0.0001)); assertEquals(String.format("log10(%s)", v.toString()), log.toString()); } - /** - * Test log10 with long value. - */ + /** Test log10 with long value. */ @ParameterizedTest(name = "log10({0})") @ValueSource(longs = {2L, 3L}) public void log10_long_value(Long v) { FunctionExpression log = DSL.log10(DSL.literal(v)); assertEquals(log.type(), DOUBLE); - assertThat( - getDoubleValue(log.valueOf(valueEnv())), - closeTo(Math.log10(v), 0.0001) - ); + assertThat(getDoubleValue(log.valueOf(valueEnv())), closeTo(Math.log10(v), 0.0001)); assertEquals(String.format("log10(%s)", v.toString()), log.toString()); } - /** - * Test log10 with float value. - */ + /** Test log10 with float value. */ @ParameterizedTest(name = "log10({0})") @ValueSource(floats = {2F, 3F}) public void log10_float_value(Float v) { FunctionExpression log = DSL.log10(DSL.literal(v)); assertEquals(log.type(), DOUBLE); - assertThat( - getDoubleValue(log.valueOf(valueEnv())), - closeTo(Math.log10(v), 0.0001) - ); + assertThat(getDoubleValue(log.valueOf(valueEnv())), closeTo(Math.log10(v), 0.0001)); assertEquals(String.format("log10(%s)", v.toString()), log.toString()); } - /** - * Test log10 with int value. - */ + /** Test log10 with int value. */ @ParameterizedTest(name = "log10({0})") @ValueSource(doubles = {2D, 3D}) public void log10_double_value(Double v) { FunctionExpression log = DSL.log10(DSL.literal(v)); assertEquals(log.type(), DOUBLE); - assertThat( - getDoubleValue(log.valueOf(valueEnv())), - closeTo(Math.log10(v), 0.0001) - ); + assertThat(getDoubleValue(log.valueOf(valueEnv())), closeTo(Math.log10(v), 0.0001)); assertEquals(String.format("log10(%s)", v.toString()), log.toString()); } - /** - * Test log10 with 1 invalid double argument. - */ + /** Test log10 with 1 invalid double argument. */ @ParameterizedTest(name = "log10({0})") @ValueSource(doubles = {0D, -3D}) public void log10_two_invalid_value(Double v) { @@ -841,69 +653,47 @@ public void log10_two_invalid_value(Double v) { assertTrue(log.valueOf(valueEnv()).isNull()); } - /** - * Test log2 with int value. - */ + /** Test log2 with int value. */ @ParameterizedTest(name = "log10({0})") @ValueSource(ints = {2, 3}) public void log2_int_value(Integer v) { FunctionExpression log = DSL.log2(DSL.literal(v)); assertEquals(log.type(), DOUBLE); - assertThat( - getDoubleValue(log.valueOf(valueEnv())), - closeTo(Math.log(v) / Math.log(2), 0.0001) - ); + assertThat(getDoubleValue(log.valueOf(valueEnv())), closeTo(Math.log(v) / Math.log(2), 0.0001)); assertEquals(String.format("log2(%s)", v.toString()), log.toString()); } - /** - * Test log2 with long value. - */ + /** Test log2 with long value. */ @ParameterizedTest(name = "log10({0})") @ValueSource(longs = {2L, 3L}) public void log2_long_value(Long v) { FunctionExpression log = DSL.log2(DSL.literal(v)); assertEquals(log.type(), DOUBLE); - assertThat( - getDoubleValue(log.valueOf(valueEnv())), - closeTo(Math.log(v) / Math.log(2), 0.0001) - ); + assertThat(getDoubleValue(log.valueOf(valueEnv())), closeTo(Math.log(v) / Math.log(2), 0.0001)); assertEquals(String.format("log2(%s)", v.toString()), log.toString()); } - /** - * Test log2 with float value. - */ + /** Test log2 with float value. */ @ParameterizedTest(name = "log10({0})") @ValueSource(floats = {2F, 3F}) public void log2_float_value(Float v) { FunctionExpression log = DSL.log2(DSL.literal(v)); assertEquals(log.type(), DOUBLE); - assertThat( - getDoubleValue(log.valueOf(valueEnv())), - closeTo(Math.log(v) / Math.log(2), 0.0001) - ); + assertThat(getDoubleValue(log.valueOf(valueEnv())), closeTo(Math.log(v) / Math.log(2), 0.0001)); assertEquals(String.format("log2(%s)", v.toString()), log.toString()); } - /** - * Test log2 with double value. - */ + /** Test log2 with double value. */ @ParameterizedTest(name = "log10({0})") @ValueSource(doubles = {2D, 3D}) public void log2_double_value(Double v) { FunctionExpression log = DSL.log2(DSL.literal(v)); assertEquals(log.type(), DOUBLE); - assertThat( - getDoubleValue(log.valueOf(valueEnv())), - closeTo(Math.log(v) / Math.log(2), 0.0001) - ); + assertThat(getDoubleValue(log.valueOf(valueEnv())), closeTo(Math.log(v) / Math.log(2), 0.0001)); assertEquals(String.format("log2(%s)", v.toString()), log.toString()); } - /** - * Test log2 with an invalid double value. - */ + /** Test log2 with an invalid double value. */ @ParameterizedTest(name = "log2({0})") @ValueSource(doubles = {0D, -2D}) public void log2_invalid_double_value(Double v) { @@ -912,9 +702,7 @@ public void log2_invalid_double_value(Double v) { assertTrue(log.valueOf(valueEnv()).isNull()); } - /** - * Test mod with byte value. - */ + /** Test mod with byte value. */ @ParameterizedTest(name = "mod({0}, {1})") @MethodSource("testLogByteArguments") public void mod_byte_value(Byte v1, Byte v2) { @@ -930,9 +718,7 @@ public void mod_byte_value(Byte v1, Byte v2) { assertTrue(mod.valueOf(valueEnv()).isNull()); } - /** - * Test mod with short value. - */ + /** Test mod with short value. */ @ParameterizedTest(name = "mod({0}, {1})") @MethodSource("testLogShortArguments") public void mod_short_value(Short v1, Short v2) { @@ -940,8 +726,7 @@ public void mod_short_value(Short v1, Short v2) { assertThat( mod.valueOf(valueEnv()), - allOf(hasType(SHORT), - hasValue(Integer.valueOf(v1 % v2).shortValue()))); + allOf(hasType(SHORT), hasValue(Integer.valueOf(v1 % v2).shortValue()))); assertEquals(String.format("mod(%s, %s)", v1, v2), mod.toString()); mod = DSL.mod(DSL.literal(v1), DSL.literal(new ExprShortValue(0))); @@ -949,16 +734,12 @@ public void mod_short_value(Short v1, Short v2) { assertTrue(mod.valueOf(valueEnv()).isNull()); } - /** - * Test mod with integer value. - */ + /** Test mod with integer value. */ @ParameterizedTest(name = "mod({0}, {1})") @MethodSource("testLogIntegerArguments") public void mod_int_value(Integer v1, Integer v2) { FunctionExpression mod = DSL.mod(DSL.literal(v1), DSL.literal(v2)); - assertThat( - mod.valueOf(valueEnv()), - allOf(hasType(INTEGER), hasValue(v1 % v2))); + assertThat(mod.valueOf(valueEnv()), allOf(hasType(INTEGER), hasValue(v1 % v2))); assertEquals(String.format("mod(%s, %s)", v1, v2), mod.toString()); mod = DSL.mod(DSL.literal(v1), DSL.literal(0)); @@ -966,16 +747,12 @@ public void mod_int_value(Integer v1, Integer v2) { assertTrue(mod.valueOf(valueEnv()).isNull()); } - /** - * Test mod with long value. - */ + /** Test mod with long value. */ @ParameterizedTest(name = "mod({0}, {1})") @MethodSource("testLogLongArguments") public void mod_long_value(Long v1, Long v2) { FunctionExpression mod = DSL.mod(DSL.literal(v1), DSL.literal(v2)); - assertThat( - mod.valueOf(valueEnv()), - allOf(hasType(LONG), hasValue(v1 % v2))); + assertThat(mod.valueOf(valueEnv()), allOf(hasType(LONG), hasValue(v1 % v2))); assertEquals(String.format("mod(%s, %s)", v1, v2), mod.toString()); mod = DSL.mod(DSL.literal(v1), DSL.literal(0)); @@ -983,16 +760,12 @@ public void mod_long_value(Long v1, Long v2) { assertTrue(mod.valueOf(valueEnv()).isNull()); } - /** - * Test mod with long value. - */ + /** Test mod with long value. */ @ParameterizedTest(name = "mod({0}, {1})") @MethodSource("testLogFloatArguments") public void mod_float_value(Float v1, Float v2) { FunctionExpression mod = DSL.mod(DSL.literal(v1), DSL.literal(v2)); - assertThat( - mod.valueOf(valueEnv()), - allOf(hasType(FLOAT), hasValue(v1 % v2))); + assertThat(mod.valueOf(valueEnv()), allOf(hasType(FLOAT), hasValue(v1 % v2))); assertEquals(String.format("mod(%s, %s)", v1, v2), mod.toString()); mod = DSL.mod(DSL.literal(v1), DSL.literal(0)); @@ -1000,16 +773,12 @@ public void mod_float_value(Float v1, Float v2) { assertTrue(mod.valueOf(valueEnv()).isNull()); } - /** - * Test mod with double value. - */ + /** Test mod with double value. */ @ParameterizedTest(name = "mod({0}, {1})") @MethodSource("testLogDoubleArguments") public void mod_double_value(Double v1, Double v2) { FunctionExpression mod = DSL.mod(DSL.literal(v1), DSL.literal(v2)); - assertThat( - mod.valueOf(valueEnv()), - allOf(hasType(DOUBLE), hasValue(v1 % v2))); + assertThat(mod.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue(v1 % v2))); assertEquals(String.format("mod(%s, %s)", v1, v2), mod.toString()); mod = DSL.mod(DSL.literal(v1), DSL.literal(0)); @@ -1017,105 +786,72 @@ public void mod_double_value(Double v1, Double v2) { assertTrue(mod.valueOf(valueEnv()).isNull()); } - /** - * Test pow/power with short value. - */ + /** Test pow/power with short value. */ @ParameterizedTest(name = "pow({0}, {1}") @MethodSource("testLogShortArguments") public void pow_short_value(Short v1, Short v2) { FunctionExpression pow = DSL.pow(DSL.literal(v1), DSL.literal(v2)); - assertThat( - pow.valueOf(valueEnv()), - allOf(hasType(DOUBLE), hasValue(Math.pow(v1, v2)))); + assertThat(pow.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue(Math.pow(v1, v2)))); assertEquals(String.format("pow(%s, %s)", v1, v2), pow.toString()); - FunctionExpression power = - DSL.power(DSL.literal(v1), DSL.literal(v2)); - assertThat( - power.valueOf(valueEnv()), - allOf(hasType(DOUBLE), hasValue(Math.pow(v1, v2)))); + FunctionExpression power = DSL.power(DSL.literal(v1), DSL.literal(v2)); + assertThat(power.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue(Math.pow(v1, v2)))); assertEquals(String.format("pow(%s, %s)", v1, v2), pow.toString()); } - /** - * Test pow/power with integer value. - */ + /** Test pow/power with integer value. */ @ParameterizedTest(name = "pow({0}, {1}") @MethodSource("testLogIntegerArguments") public void pow_int_value(Integer v1, Integer v2) { FunctionExpression pow = DSL.pow(DSL.literal(v1), DSL.literal(v2)); - assertThat( - pow.valueOf(valueEnv()), - allOf(hasType(DOUBLE), hasValue(Math.pow(v1, v2)))); + assertThat(pow.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue(Math.pow(v1, v2)))); assertEquals(String.format("pow(%s, %s)", v1, v2), pow.toString()); FunctionExpression power = DSL.power(DSL.literal(v1), DSL.literal(v2)); - assertThat( - power.valueOf(valueEnv()), - allOf(hasType(DOUBLE), hasValue(Math.pow(v1, v2)))); + assertThat(power.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue(Math.pow(v1, v2)))); assertEquals(String.format("pow(%s, %s)", v1, v2), pow.toString()); } - /** - * Test pow/power with long value. - */ + /** Test pow/power with long value. */ @ParameterizedTest(name = "pow({0}, {1}") @MethodSource("testLogLongArguments") public void pow_long_value(Long v1, Long v2) { FunctionExpression pow = DSL.pow(DSL.literal(v1), DSL.literal(v2)); - assertThat( - pow.valueOf(valueEnv()), - allOf(hasType(DOUBLE), hasValue(Math.pow(v1, v2)))); + assertThat(pow.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue(Math.pow(v1, v2)))); assertEquals(String.format("pow(%s, %s)", v1, v2), pow.toString()); FunctionExpression power = DSL.power(DSL.literal(v1), DSL.literal(v2)); - assertThat( - power.valueOf(valueEnv()), - allOf(hasType(DOUBLE), hasValue(Math.pow(v1, v2)))); + assertThat(power.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue(Math.pow(v1, v2)))); assertEquals(String.format("pow(%s, %s)", v1, v2), pow.toString()); } - /** - * Test pow/power with float value. - */ + /** Test pow/power with float value. */ @ParameterizedTest(name = "pow({0}, {1}") @MethodSource("testLogFloatArguments") public void pow_float_value(Float v1, Float v2) { FunctionExpression pow = DSL.pow(DSL.literal(v1), DSL.literal(v2)); - assertThat( - pow.valueOf(valueEnv()), - allOf(hasType(DOUBLE), hasValue(Math.pow(v1, v2)))); + assertThat(pow.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue(Math.pow(v1, v2)))); assertEquals(String.format("pow(%s, %s)", v1, v2), pow.toString()); FunctionExpression power = DSL.power(DSL.literal(v1), DSL.literal(v2)); - assertThat( - power.valueOf(valueEnv()), - allOf(hasType(DOUBLE), hasValue(Math.pow(v1, v2)))); + assertThat(power.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue(Math.pow(v1, v2)))); assertEquals(String.format("pow(%s, %s)", v1, v2), pow.toString()); } - /** - * Test pow/power with double value. - */ + /** Test pow/power with double value. */ @ParameterizedTest(name = "pow({0}, {1}") @MethodSource("testLogDoubleArguments") public void pow_double_value(Double v1, Double v2) { FunctionExpression pow = DSL.pow(DSL.literal(v1), DSL.literal(v2)); - assertThat( - pow.valueOf(valueEnv()), - allOf(hasType(DOUBLE), hasValue(Math.pow(v1, v2)))); + assertThat(pow.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue(Math.pow(v1, v2)))); assertEquals(String.format("pow(%s, %s)", v1, v2), pow.toString()); FunctionExpression power = DSL.power(DSL.literal(v1), DSL.literal(v2)); - assertThat( - power.valueOf(valueEnv()), - allOf(hasType(DOUBLE), hasValue(Math.pow(v1, v2)))); + assertThat(power.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue(Math.pow(v1, v2)))); assertEquals(String.format("pow(%s, %s)", v1, v2), pow.toString()); } - /** - * Test pow/power with null output. - */ + /** Test pow/power with null output. */ @Test public void pow_null_output() { FunctionExpression pow = DSL.pow(DSL.literal((double) -2), DSL.literal(1.5)); @@ -1129,367 +865,318 @@ public void pow_null_output() { assertTrue(pow.valueOf(valueEnv()).isNull()); } - /** - * Test pow/power with edge cases. - */ + /** Test pow/power with edge cases. */ @Test public void pow_edge_cases() { FunctionExpression pow = DSL.pow(DSL.literal((double) -2), DSL.literal((double) 2)); assertEquals(pow.type(), DOUBLE); - assertEquals(String.format("pow(%s, %s)",(double) -2, (double) 2), pow.toString()); - assertThat( - pow.valueOf(valueEnv()), - allOf(hasType(DOUBLE), hasValue(Math.pow(-2, 2)))); + assertEquals(String.format("pow(%s, %s)", (double) -2, (double) 2), pow.toString()); + assertThat(pow.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue(Math.pow(-2, 2)))); pow = DSL.pow(DSL.literal((double) 2), DSL.literal((double) 1.5)); assertEquals(pow.type(), DOUBLE); assertEquals(String.format("pow(%s, %s)", (double) 2, (double) 1.5), pow.toString()); - assertThat( - pow.valueOf(valueEnv()), - allOf(hasType(DOUBLE), hasValue(Math.pow(2, 1.5)))); + assertThat(pow.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue(Math.pow(2, 1.5)))); pow = DSL.pow(DSL.literal((float) -2), DSL.literal((float) 2)); assertEquals(pow.type(), DOUBLE); assertEquals(String.format("pow(%s, %s)", (float) -2, (float) 2), pow.toString()); assertThat( - pow.valueOf(valueEnv()), - allOf(hasType(DOUBLE), hasValue(Math.pow((float) -2, (float) 2)))); + pow.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue(Math.pow((float) -2, (float) 2)))); pow = DSL.pow(DSL.literal((float) 2), DSL.literal((float) 1.5)); assertEquals(pow.type(), DOUBLE); assertEquals(String.format("pow(%s, %s)", (float) 2, (float) 1.5), pow.toString()); assertThat( - pow.valueOf(valueEnv()), - allOf(hasType(DOUBLE), hasValue(Math.pow((float) 2, (float) 1.5)))); + pow.valueOf(valueEnv()), + allOf(hasType(DOUBLE), hasValue(Math.pow((float) 2, (float) 1.5)))); } - /** - * Test rint with byte value. - */ + /** Test rint with byte value. */ @ParameterizedTest(name = "rint({0})") - @ValueSource(bytes = { - -1, 0, 1, Byte.MAX_VALUE, Byte.MIN_VALUE}) + @ValueSource(bytes = {-1, 0, 1, Byte.MAX_VALUE, Byte.MIN_VALUE}) public void rint_byte_value(Byte value) { FunctionExpression rint = DSL.rint(DSL.literal(value)); - assertThat( - rint.valueOf(valueEnv()), - allOf(hasType(DOUBLE), hasValue(Math.rint(value)))); + assertThat(rint.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue(Math.rint(value)))); assertEquals(String.format("rint(%s)", value), rint.toString()); } - /** - * Test rint with short value. - */ + /** Test rint with short value. */ @ParameterizedTest(name = "rint({0})") - @ValueSource(shorts = { - -1, 0, 1, Short.MAX_VALUE, Short.MIN_VALUE}) + @ValueSource(shorts = {-1, 0, 1, Short.MAX_VALUE, Short.MIN_VALUE}) public void rint_short_value(Short value) { FunctionExpression rint = DSL.rint(DSL.literal(value)); - assertThat( - rint.valueOf(valueEnv()), - allOf(hasType(DOUBLE), hasValue(Math.rint(value)))); + assertThat(rint.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue(Math.rint(value)))); assertEquals(String.format("rint(%s)", value), rint.toString()); } - /** - * Test rint with integer value. - */ + /** Test rint with integer value. */ @ParameterizedTest(name = "rint({0})") - @ValueSource(ints = { - -1, 0, 1, Integer.MAX_VALUE, Integer.MIN_VALUE}) + @ValueSource(ints = {-1, 0, 1, Integer.MAX_VALUE, Integer.MIN_VALUE}) public void rint_int_value(Integer value) { FunctionExpression rint = DSL.rint(DSL.literal(value)); - assertThat( - rint.valueOf(valueEnv()), - allOf(hasType(DOUBLE), hasValue(Math.rint(value)))); + assertThat(rint.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue(Math.rint(value)))); assertEquals(String.format("rint(%s)", value), rint.toString()); } - /** - * Test rint with long value. - */ + /** Test rint with long value. */ @ParameterizedTest(name = "rint({0})") - @ValueSource(longs = { - -1L, 0L, 1L, Long.MAX_VALUE, Long.MIN_VALUE}) + @ValueSource(longs = {-1L, 0L, 1L, Long.MAX_VALUE, Long.MIN_VALUE}) public void rint_long_value(Long value) { FunctionExpression rint = DSL.rint(DSL.literal(value)); - assertThat( - rint.valueOf(valueEnv()), - allOf(hasType(DOUBLE), hasValue(Math.rint(value)))); + assertThat(rint.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue(Math.rint(value)))); assertEquals(String.format("rint(%s)", value), rint.toString()); } - /** - * Test rint with float value. - */ + /** Test rint with float value. */ @ParameterizedTest(name = "rint({0})") - @ValueSource(floats = { - -1F, -0.75F, -0.5F, 0F, 0.5F, 0.500000001F, - 0.75F, 1F, 1.9999F, 42.42F, Float.MAX_VALUE, Float.MIN_VALUE}) + @ValueSource( + floats = { + -1F, + -0.75F, + -0.5F, + 0F, + 0.5F, + 0.500000001F, + 0.75F, + 1F, + 1.9999F, + 42.42F, + Float.MAX_VALUE, + Float.MIN_VALUE + }) public void rint_float_value(Float value) { FunctionExpression rint = DSL.rint(DSL.literal(value)); - assertThat( - rint.valueOf(valueEnv()), - allOf(hasType(DOUBLE), hasValue(Math.rint(value)))); + assertThat(rint.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue(Math.rint(value)))); assertEquals(String.format("rint(%s)", value), rint.toString()); } - /** - * Test rint with double value. - */ + /** Test rint with double value. */ @ParameterizedTest(name = "rint({0})") - @ValueSource(doubles = { - -1F, -0.75F, -0.5F, 0F, 0.5F, 0.500000001F, - 0.75F, 1F, 1.9999F, 42.42F, Double.MAX_VALUE, Double.MIN_VALUE}) + @ValueSource( + doubles = { + -1F, + -0.75F, + -0.5F, + 0F, + 0.5F, + 0.500000001F, + 0.75F, + 1F, + 1.9999F, + 42.42F, + Double.MAX_VALUE, + Double.MIN_VALUE + }) public void rint_double_value(Double value) { FunctionExpression rint = DSL.rint(DSL.literal(value)); - assertThat( - rint.valueOf(valueEnv()), - allOf(hasType(DOUBLE), hasValue(Math.rint(value)))); + assertThat(rint.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue(Math.rint(value)))); assertEquals(String.format("rint(%s)", value), rint.toString()); } - /** - * Test round with integer value. - */ + /** Test round with integer value. */ @ParameterizedTest(name = "round({0}") @ValueSource(ints = {21, -21}) public void round_int_value(Integer value) { FunctionExpression round = DSL.round(DSL.literal(value)); - assertThat( - round.valueOf(valueEnv()), - allOf(hasType(LONG), hasValue((long) Math.round(value)))); + assertThat(round.valueOf(valueEnv()), allOf(hasType(LONG), hasValue((long) Math.round(value)))); assertEquals(String.format("round(%s)", value), round.toString()); round = DSL.round(DSL.literal(value), DSL.literal(1)); assertThat( round.valueOf(valueEnv()), - allOf(hasType(LONG), hasValue( - new BigDecimal(value).setScale(1, RoundingMode.HALF_UP).longValue()))); + allOf( + hasType(LONG), + hasValue(new BigDecimal(value).setScale(1, RoundingMode.HALF_UP).longValue()))); assertEquals(String.format("round(%s, 1)", value), round.toString()); round = DSL.round(DSL.literal(value), DSL.literal(-1)); assertThat( round.valueOf(valueEnv()), - allOf(hasType(LONG), hasValue( - new BigDecimal(value).setScale(-1, RoundingMode.HALF_UP).longValue()))); + allOf( + hasType(LONG), + hasValue(new BigDecimal(value).setScale(-1, RoundingMode.HALF_UP).longValue()))); assertEquals(String.format("round(%s, -1)", value), round.toString()); } - /** - * Test round with long value. - */ + /** Test round with long value. */ @ParameterizedTest(name = "round({0}") @ValueSource(longs = {21L, -21L}) public void round_long_value(Long value) { FunctionExpression round = DSL.round(DSL.literal(value)); - assertThat( - round.valueOf(valueEnv()), - allOf(hasType(LONG), hasValue((long) Math.round(value)))); + assertThat(round.valueOf(valueEnv()), allOf(hasType(LONG), hasValue((long) Math.round(value)))); assertEquals(String.format("round(%s)", value), round.toString()); round = DSL.round(DSL.literal(value), DSL.literal(1)); assertThat( round.valueOf(valueEnv()), - allOf(hasType(LONG), hasValue( - new BigDecimal(value).setScale(1, RoundingMode.HALF_UP).longValue()))); + allOf( + hasType(LONG), + hasValue(new BigDecimal(value).setScale(1, RoundingMode.HALF_UP).longValue()))); assertEquals(String.format("round(%s, 1)", value), round.toString()); round = DSL.round(DSL.literal(value), DSL.literal(-1)); assertThat( round.valueOf(valueEnv()), - allOf(hasType(LONG), hasValue( - new BigDecimal(value).setScale(-1, RoundingMode.HALF_UP).longValue()))); + allOf( + hasType(LONG), + hasValue(new BigDecimal(value).setScale(-1, RoundingMode.HALF_UP).longValue()))); assertEquals(String.format("round(%s, -1)", value), round.toString()); } - /** - * Test round with float value. - */ + /** Test round with float value. */ @ParameterizedTest(name = "round({0}") @ValueSource(floats = {21F, -21F}) public void round_float_value(Float value) { FunctionExpression round = DSL.round(DSL.literal(value)); assertThat( - round.valueOf(valueEnv()), - allOf(hasType(DOUBLE), hasValue((double) Math.round(value)))); + round.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue((double) Math.round(value)))); assertEquals(String.format("round(%s)", value), round.toString()); round = DSL.round(DSL.literal(value), DSL.literal(1)); assertThat( round.valueOf(valueEnv()), - allOf(hasType(DOUBLE), hasValue( - new BigDecimal(value).setScale(1, RoundingMode.HALF_UP).doubleValue()))); + allOf( + hasType(DOUBLE), + hasValue(new BigDecimal(value).setScale(1, RoundingMode.HALF_UP).doubleValue()))); assertEquals(String.format("round(%s, 1)", value), round.toString()); round = DSL.round(DSL.literal(value), DSL.literal(-1)); assertThat( round.valueOf(valueEnv()), - allOf(hasType(DOUBLE), hasValue( - new BigDecimal(value).setScale(-1, RoundingMode.HALF_UP).doubleValue()))); + allOf( + hasType(DOUBLE), + hasValue(new BigDecimal(value).setScale(-1, RoundingMode.HALF_UP).doubleValue()))); assertEquals(String.format("round(%s, -1)", value), round.toString()); } - /** - * Test round with double value. - */ + /** Test round with double value. */ @ParameterizedTest(name = "round({0}") @ValueSource(doubles = {21D, -21D}) public void round_double_value(Double value) { FunctionExpression round = DSL.round(DSL.literal(value)); assertThat( - round.valueOf(valueEnv()), - allOf(hasType(DOUBLE), hasValue((double) Math.round(value)))); + round.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue((double) Math.round(value)))); assertEquals(String.format("round(%s)", value), round.toString()); round = DSL.round(DSL.literal(value), DSL.literal(1)); assertThat( round.valueOf(valueEnv()), - allOf(hasType(DOUBLE), hasValue( - new BigDecimal(value).setScale(1, RoundingMode.HALF_UP).doubleValue()))); + allOf( + hasType(DOUBLE), + hasValue(new BigDecimal(value).setScale(1, RoundingMode.HALF_UP).doubleValue()))); assertEquals(String.format("round(%s, 1)", value), round.toString()); round = DSL.round(DSL.literal(value), DSL.literal(-1)); assertThat( round.valueOf(valueEnv()), - allOf(hasType(DOUBLE), hasValue( - new BigDecimal(value).setScale(-1, RoundingMode.HALF_UP).doubleValue()))); + allOf( + hasType(DOUBLE), + hasValue(new BigDecimal(value).setScale(-1, RoundingMode.HALF_UP).doubleValue()))); assertEquals(String.format("round(%s, -1)", value), round.toString()); } - /** - * Test sign with integer value. - */ + /** Test sign with integer value. */ @ParameterizedTest(name = "sign({0})") @ValueSource(ints = {2, -2}) public void sign_int_value(Integer value) { FunctionExpression sign = DSL.sign(DSL.literal(value)); assertThat( - sign.valueOf(valueEnv()), - allOf(hasType(INTEGER), hasValue((int) Math.signum(value)))); + sign.valueOf(valueEnv()), allOf(hasType(INTEGER), hasValue((int) Math.signum(value)))); assertEquals(String.format("sign(%s)", value), sign.toString()); } - /** - * Test sign with long value. - */ + /** Test sign with long value. */ @ParameterizedTest(name = "sign({0})") @ValueSource(longs = {2L, -2L}) public void sign_long_value(Long value) { FunctionExpression sign = DSL.sign(DSL.literal(value)); assertThat( - sign.valueOf(valueEnv()), - allOf(hasType(INTEGER), hasValue((int) Math.signum(value)))); + sign.valueOf(valueEnv()), allOf(hasType(INTEGER), hasValue((int) Math.signum(value)))); assertEquals(String.format("sign(%s)", value), sign.toString()); } - /** - * Test sign with float value. - */ + /** Test sign with float value. */ @ParameterizedTest(name = "sign({0})") @ValueSource(floats = {2F, -2F}) public void sign_float_value(Float value) { FunctionExpression sign = DSL.sign(DSL.literal(value)); assertThat( - sign.valueOf(valueEnv()), - allOf(hasType(INTEGER), hasValue((int) Math.signum(value)))); + sign.valueOf(valueEnv()), allOf(hasType(INTEGER), hasValue((int) Math.signum(value)))); assertEquals(String.format("sign(%s)", value), sign.toString()); } - /** - * Test sign with double value. - */ + /** Test sign with double value. */ @ParameterizedTest(name = "sign({0})") @ValueSource(doubles = {2, -2}) public void sign_double_value(Double value) { FunctionExpression sign = DSL.sign(DSL.literal(value)); assertThat( - sign.valueOf(valueEnv()), - allOf(hasType(INTEGER), hasValue((int) Math.signum(value)))); + sign.valueOf(valueEnv()), allOf(hasType(INTEGER), hasValue((int) Math.signum(value)))); assertEquals(String.format("sign(%s)", value), sign.toString()); } - /** - * Test signum with byte value. - */ + /** Test signum with byte value. */ @ParameterizedTest(name = "signum({0})") @ValueSource(bytes = {2, 0, -2}) public void signum_bytes_value(Byte value) { FunctionExpression signum = DSL.signum(DSL.literal(value)); assertThat( - signum.valueOf(valueEnv()), - allOf(hasType(INTEGER), hasValue((int) Math.signum(value)))); + signum.valueOf(valueEnv()), allOf(hasType(INTEGER), hasValue((int) Math.signum(value)))); assertEquals(String.format("signum(%s)", value), signum.toString()); } - /** - * Test signum with short value. - */ + /** Test signum with short value. */ @ParameterizedTest(name = "signum({0})") @ValueSource(shorts = {2, 0, -2}) public void signum_short_value(Short value) { FunctionExpression signum = DSL.signum(DSL.literal(value)); assertThat( - signum.valueOf(valueEnv()), - allOf(hasType(INTEGER), hasValue((int) Math.signum(value)))); + signum.valueOf(valueEnv()), allOf(hasType(INTEGER), hasValue((int) Math.signum(value)))); assertEquals(String.format("signum(%s)", value), signum.toString()); } - /** - * Test signum with integer value. - */ + /** Test signum with integer value. */ @ParameterizedTest(name = "signum({0})") @ValueSource(ints = {2, 0, -2}) public void signum_int_value(Integer value) { FunctionExpression signum = DSL.signum(DSL.literal(value)); assertThat( - signum.valueOf(valueEnv()), - allOf(hasType(INTEGER), hasValue((int) Math.signum(value)))); + signum.valueOf(valueEnv()), allOf(hasType(INTEGER), hasValue((int) Math.signum(value)))); assertEquals(String.format("signum(%s)", value), signum.toString()); } - /** - * Test signum with long value. - */ + /** Test signum with long value. */ @ParameterizedTest(name = "signum({0})") @ValueSource(longs = {2L, 0L, -2L}) public void signum_long_value(Long value) { FunctionExpression signum = DSL.signum(DSL.literal(value)); assertThat( - signum.valueOf(valueEnv()), - allOf(hasType(INTEGER), hasValue((int) Math.signum(value)))); + signum.valueOf(valueEnv()), allOf(hasType(INTEGER), hasValue((int) Math.signum(value)))); assertEquals(String.format("signum(%s)", value), signum.toString()); } - /** - * Test signum with float value. - */ + /** Test signum with float value. */ @ParameterizedTest(name = "signum({0})") @ValueSource(floats = {2F, 0F, -2F}) public void signum_float_value(Float value) { FunctionExpression signum = DSL.signum(DSL.literal(value)); assertThat( - signum.valueOf(valueEnv()), - allOf(hasType(INTEGER), hasValue((int) Math.signum(value)))); + signum.valueOf(valueEnv()), allOf(hasType(INTEGER), hasValue((int) Math.signum(value)))); assertEquals(String.format("signum(%s)", value), signum.toString()); } - /** - * Test signum with double value. - */ + /** Test signum with double value. */ @ParameterizedTest(name = "signum({0})") @ValueSource(doubles = {2, 0, -2}) public void signum_double_value(Double value) { FunctionExpression signum = DSL.signum(DSL.literal(value)); assertThat( - signum.valueOf(valueEnv()), - allOf(hasType(INTEGER), hasValue((int) Math.signum(value)))); + signum.valueOf(valueEnv()), allOf(hasType(INTEGER), hasValue((int) Math.signum(value)))); assertEquals(String.format("signum(%s)", value), signum.toString()); } - /** - * Test sinh with byte value. - */ + /** Test sinh with byte value. */ @ParameterizedTest(name = "sinh({0})") @ValueSource(bytes = {-1, 1, 2, Byte.MAX_VALUE, Byte.MIN_VALUE}) public void sinh_byte_value(Byte value) { @@ -1498,9 +1185,7 @@ public void sinh_byte_value(Byte value) { assertEquals(String.format("sinh(%s)", value), sinh.toString()); } - /** - * Test sinh with short value. - */ + /** Test sinh with short value. */ @ParameterizedTest(name = "sinh({0})") @ValueSource(shorts = {-1, 1, 2, Short.MAX_VALUE, Short.MIN_VALUE}) public void sinh_short_value(Short value) { @@ -1509,9 +1194,7 @@ public void sinh_short_value(Short value) { assertEquals(String.format("sinh(%s)", value), sinh.toString()); } - /** - * Test sinh with integer value. - */ + /** Test sinh with integer value. */ @ParameterizedTest(name = "sinh({0})") @ValueSource(ints = {-1, 1, 2, Integer.MAX_VALUE, Integer.MIN_VALUE}) public void sinh_int_value(Integer value) { @@ -1520,9 +1203,7 @@ public void sinh_int_value(Integer value) { assertEquals(String.format("sinh(%s)", value), sinh.toString()); } - /** - * Test sinh with long value. - */ + /** Test sinh with long value. */ @ParameterizedTest(name = "sinh({0})") @ValueSource(longs = {-1L, 1L, 2L, Long.MAX_VALUE, Long.MIN_VALUE}) public void sinh_long_value(Long value) { @@ -1531,9 +1212,7 @@ public void sinh_long_value(Long value) { assertEquals(String.format("sinh(%s)", value), sinh.toString()); } - /** - * Test sinh with float value. - */ + /** Test sinh with float value. */ @ParameterizedTest(name = "sinh({0})") @ValueSource(floats = {-1.5F, -1F, 1F, 1.5F, 2F, 2.7F, Float.MAX_VALUE, Float.MIN_VALUE}) public void sinh_float_value(Float value) { @@ -1542,9 +1221,7 @@ public void sinh_float_value(Float value) { assertEquals(String.format("sinh(%s)", value), sinh.toString()); } - /** - * Test sinh with double value. - */ + /** Test sinh with double value. */ @ParameterizedTest(name = "sinh({0})") @ValueSource(doubles = {-1.5, -1D, 1D, 1.5D, 2D, 2.7D, Double.MAX_VALUE, Double.MIN_VALUE}) public void sinh_double_value(Double value) { @@ -1553,9 +1230,7 @@ public void sinh_double_value(Double value) { assertEquals(String.format("sinh(%s)", value), sinh.toString()); } - /** - * Test sqrt with int value. - */ + /** Test sqrt with int value. */ @ParameterizedTest(name = "sqrt({0})") @ValueSource(ints = {1, 2}) public void sqrt_int_value(Integer value) { @@ -1564,9 +1239,7 @@ public void sqrt_int_value(Integer value) { assertEquals(String.format("sqrt(%s)", value), sqrt.toString()); } - /** - * Test sqrt with long value. - */ + /** Test sqrt with long value. */ @ParameterizedTest(name = "sqrt({0})") @ValueSource(longs = {1L, 2L}) public void sqrt_long_value(Long value) { @@ -1575,9 +1248,7 @@ public void sqrt_long_value(Long value) { assertEquals(String.format("sqrt(%s)", value), sqrt.toString()); } - /** - * Test sqrt with float value. - */ + /** Test sqrt with float value. */ @ParameterizedTest(name = "sqrt({0})") @ValueSource(floats = {1F, 2F}) public void sqrt_float_value(Float value) { @@ -1586,9 +1257,7 @@ public void sqrt_float_value(Float value) { assertEquals(String.format("sqrt(%s)", value), sqrt.toString()); } - /** - * Test sqrt with double value. - */ + /** Test sqrt with double value. */ @ParameterizedTest(name = "sqrt({0})") @ValueSource(doubles = {1D, 2D}) public void sqrt_double_value(Double value) { @@ -1597,9 +1266,7 @@ public void sqrt_double_value(Double value) { assertEquals(String.format("sqrt(%s)", value), sqrt.toString()); } - /** - * Test sqrt with negative value. - */ + /** Test sqrt with negative value. */ @ParameterizedTest(name = "sqrt({0})") @ValueSource(doubles = {-1D, -2D}) public void sqrt_negative_value(Double value) { @@ -1608,73 +1275,90 @@ public void sqrt_negative_value(Double value) { assertTrue(sqrt.valueOf(valueEnv()).isNull()); } - /** - * Test truncate with integer value. - */ + /** Test truncate with integer value. */ @ParameterizedTest(name = "truncate({0}, {1})") @ValueSource(ints = {2, -2, Integer.MAX_VALUE, Integer.MIN_VALUE}) public void truncate_int_value(Integer value) { FunctionExpression truncate = DSL.truncate(DSL.literal(value), DSL.literal(1)); assertThat( - truncate.valueOf(valueEnv()), allOf(hasType(LONG), + truncate.valueOf(valueEnv()), + allOf( + hasType(LONG), hasValue(BigDecimal.valueOf(value).setScale(1, RoundingMode.DOWN).longValue()))); assertEquals(String.format("truncate(%s, 1)", value), truncate.toString()); } - /** - * Test truncate with long value. - */ + /** Test truncate with long value. */ @ParameterizedTest(name = "truncate({0}, {1})") @ValueSource(longs = {2L, -2L, Long.MAX_VALUE, Long.MIN_VALUE}) public void truncate_long_value(Long value) { FunctionExpression truncate = DSL.truncate(DSL.literal(value), DSL.literal(1)); assertThat( - truncate.valueOf(valueEnv()), allOf(hasType(LONG), + truncate.valueOf(valueEnv()), + allOf( + hasType(LONG), hasValue(BigDecimal.valueOf(value).setScale(1, RoundingMode.DOWN).longValue()))); assertEquals(String.format("truncate(%s, 1)", value), truncate.toString()); } - /** - * Test truncate with float value. - */ + /** Test truncate with float value. */ @ParameterizedTest(name = "truncate({0}, {1})") @ValueSource(floats = {2F, -2F, Float.MAX_VALUE, Float.MIN_VALUE}) public void truncate_float_value(Float value) { FunctionExpression truncate = DSL.truncate(DSL.literal(value), DSL.literal(1)); assertThat( - truncate.valueOf(valueEnv()), allOf(hasType(DOUBLE), + truncate.valueOf(valueEnv()), + allOf( + hasType(DOUBLE), hasValue(BigDecimal.valueOf(value).setScale(1, RoundingMode.DOWN).doubleValue()))); assertEquals(String.format("truncate(%s, 1)", value), truncate.toString()); } - /** - * Test truncate with double value. - */ + /** Test truncate with double value. */ @ParameterizedTest(name = "truncate({0}, {1})") - @ValueSource(doubles = {2D, -9.223372036854776e+18D, -2147483649.0D, -2147483648.0D, - -32769.0D, -32768.0D, -34.84D, -2.0D, -1.2D, -1.0D, 0.0D, 1.0D, - 1.3D, 2.0D, 1004.3D, 32767.0D, 32768.0D, 2147483647.0D, 2147483648.0D, - 9.223372036854776e+18D, Double.MAX_VALUE, Double.MIN_VALUE}) + @ValueSource( + doubles = { + 2D, + -9.223372036854776e+18D, + -2147483649.0D, + -2147483648.0D, + -32769.0D, + -32768.0D, + -34.84D, + -2.0D, + -1.2D, + -1.0D, + 0.0D, + 1.0D, + 1.3D, + 2.0D, + 1004.3D, + 32767.0D, + 32768.0D, + 2147483647.0D, + 2147483648.0D, + 9.223372036854776e+18D, + Double.MAX_VALUE, + Double.MIN_VALUE + }) public void truncate_double_value(Double value) { FunctionExpression truncate = DSL.truncate(DSL.literal(value), DSL.literal(1)); assertThat( - truncate.valueOf(valueEnv()), allOf(hasType(DOUBLE), + truncate.valueOf(valueEnv()), + allOf( + hasType(DOUBLE), hasValue(BigDecimal.valueOf(value).setScale(1, RoundingMode.DOWN).doubleValue()))); assertEquals(String.format("truncate(%s, 1)", value), truncate.toString()); } - /** - * Test constant pi. - */ + /** Test constant pi. */ @Test public void test_pi() { FunctionExpression pi = DSL.pi(); assertThat(pi.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue(Math.PI))); } - /** - * Test rand with no argument. - */ + /** Test rand with no argument. */ @Test public void rand_no_arg() { FunctionExpression rand = DSL.rand(); @@ -1685,9 +1369,7 @@ public void rand_no_arg() { assertEquals("rand()", rand.toString()); } - /** - * Test rand with integer value. - */ + /** Test rand with integer value. */ @ParameterizedTest(name = "rand({0})") @ValueSource(ints = {2, 3}) public void rand_int_value(Integer n) { @@ -1695,27 +1377,22 @@ public void rand_int_value(Integer n) { assertEquals(FLOAT, rand.type()); assertTrue( getFloatValue(rand.valueOf(valueEnv())) >= 0 - && getFloatValue(rand.valueOf(valueEnv())) < 1); + && getFloatValue(rand.valueOf(valueEnv())) < 1); assertEquals(getFloatValue(rand.valueOf(valueEnv())), new Random(n).nextFloat()); assertEquals(String.format("rand(%s)", n), rand.toString()); } - /** - * Test acos with integer, long, float, double values. - */ + /** Test acos with integer, long, float, double values. */ @ParameterizedTest(name = "acos({0})") @MethodSource("trigonometricArguments") public void test_acos(Number value) { FunctionExpression acos = DSL.acos(DSL.literal(value)); assertThat( - acos.valueOf(valueEnv()), - allOf(hasType(DOUBLE), hasValue(Math.acos(value.doubleValue())))); + acos.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue(Math.acos(value.doubleValue())))); assertEquals(String.format("acos(%s)", value), acos.toString()); } - /** - * Test acos with illegal values. - */ + /** Test acos with illegal values. */ @ParameterizedTest(name = "acos({0})") @ValueSource(doubles = {2D, -2D}) public void acos_with_illegal_value(Number value) { @@ -1724,22 +1401,17 @@ public void acos_with_illegal_value(Number value) { assertTrue(acos.valueOf(valueEnv()).isNull()); } - /** - * Test asin with integer, long, float, double values. - */ + /** Test asin with integer, long, float, double values. */ @ParameterizedTest(name = "asin({0})") @MethodSource("trigonometricArguments") public void test_asin(Number value) { FunctionExpression asin = DSL.asin(DSL.literal(value)); assertThat( - asin.valueOf(valueEnv()), - allOf(hasType(DOUBLE), hasValue(Math.asin(value.doubleValue())))); + asin.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue(Math.asin(value.doubleValue())))); assertEquals(String.format("asin(%s)", value), asin.toString()); } - /** - * Test asin with illegal value. - */ + /** Test asin with illegal value. */ @ParameterizedTest(name = "asin({0})") @ValueSource(doubles = {2D, -2D}) public void asin_with_illegal_value(Number value) { @@ -1748,36 +1420,28 @@ public void asin_with_illegal_value(Number value) { assertTrue(asin.valueOf(valueEnv()).isNull()); } - /** - * Test atan with one argument integer, long, float, double values. - */ + /** Test atan with one argument integer, long, float, double values. */ @ParameterizedTest(name = "atan({0})") @MethodSource("trigonometricArguments") public void atan_one_arg(Number value) { FunctionExpression atan = DSL.atan(DSL.literal(value)); assertThat( - atan.valueOf(valueEnv()), - allOf(hasType(DOUBLE), hasValue(Math.atan(value.doubleValue())))); + atan.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue(Math.atan(value.doubleValue())))); assertEquals(String.format("atan(%s)", value), atan.toString()); } - /** - * Test atan with two arguments of integer, long, float, double values. - */ + /** Test atan with two arguments of integer, long, float, double values. */ @ParameterizedTest(name = "atan({0}, {1})") @MethodSource("trigonometricDoubleArguments") public void atan_two_args(Number v1, Number v2) { - FunctionExpression atan = - DSL.atan(DSL.literal(v1), DSL.literal(v2)); + FunctionExpression atan = DSL.atan(DSL.literal(v1), DSL.literal(v2)); assertThat( atan.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue(Math.atan2(v1.doubleValue(), v2.doubleValue())))); assertEquals(String.format("atan(%s, %s)", v1, v2), atan.toString()); } - /** - * Test atan2 with integer, long, float, double values. - */ + /** Test atan2 with integer, long, float, double values. */ @ParameterizedTest(name = "atan2({0}, {1})") @MethodSource("trigonometricDoubleArguments") public void test_atan2(Number v1, Number v2) { @@ -1788,22 +1452,17 @@ public void test_atan2(Number v1, Number v2) { assertEquals(String.format("atan2(%s, %s)", v1, v2), atan2.toString()); } - /** - * Test cos with integer, long, float, double values. - */ + /** Test cos with integer, long, float, double values. */ @ParameterizedTest(name = "cos({0})") @MethodSource("trigonometricArguments") public void test_cos(Number value) { FunctionExpression cos = DSL.cos(DSL.literal(value)); assertThat( - cos.valueOf(valueEnv()), - allOf(hasType(DOUBLE), hasValue(Math.cos(value.doubleValue())))); + cos.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue(Math.cos(value.doubleValue())))); assertEquals(String.format("cos(%s)", value), cos.toString()); } - /** - * Test cosh with byte value. - */ + /** Test cosh with byte value. */ @ParameterizedTest(name = "cosh({0})") @ValueSource(bytes = {-1, 1, 2}) public void cosh_byte_value(Byte value) { @@ -1812,9 +1471,7 @@ public void cosh_byte_value(Byte value) { assertEquals(String.format("cosh(%s)", value), cosh.toString()); } - /** - * Test cosh with short value. - */ + /** Test cosh with short value. */ @ParameterizedTest(name = "cosh({0})") @ValueSource(shorts = {-1, 1, 2}) public void cosh_short_value(Short value) { @@ -1823,9 +1480,7 @@ public void cosh_short_value(Short value) { assertEquals(String.format("cosh(%s)", value), cosh.toString()); } - /** - * Test cosh with integer value. - */ + /** Test cosh with integer value. */ @ParameterizedTest(name = "cosh({0})") @ValueSource(ints = {-1, 1, 2}) public void cosh_int_value(Integer value) { @@ -1834,9 +1489,7 @@ public void cosh_int_value(Integer value) { assertEquals(String.format("cosh(%s)", value), cosh.toString()); } - /** - * Test cosh with long value. - */ + /** Test cosh with long value. */ @ParameterizedTest(name = "cosh({0})") @ValueSource(longs = {-1L, 1L, 2L}) public void cosh_long_value(Long value) { @@ -1845,9 +1498,7 @@ public void cosh_long_value(Long value) { assertEquals(String.format("cosh(%s)", value), cosh.toString()); } - /** - * Test cosh with float value. - */ + /** Test cosh with float value. */ @ParameterizedTest(name = "cosh({0})") @ValueSource(floats = {-1F, 1F, 2F}) public void cosh_float_value(Float value) { @@ -1856,9 +1507,7 @@ public void cosh_float_value(Float value) { assertEquals(String.format("cosh(%s)", value), cosh.toString()); } - /** - * Test cosh with double value. - */ + /** Test cosh with double value. */ @ParameterizedTest(name = "cosh({0})") @ValueSource(doubles = {-1D, 1D, 2D}) public void cosh_double_value(Double value) { @@ -1867,9 +1516,7 @@ public void cosh_double_value(Double value) { assertEquals(String.format("cosh(%s)", value), cosh.toString()); } - /** - * Test cot with integer, long, float, double values. - */ + /** Test cot with integer, long, float, double values. */ @ParameterizedTest(name = "cot({0})") @MethodSource("trigonometricArguments") public void test_cot(Number value) { @@ -1880,21 +1527,18 @@ public void test_cot(Number value) { assertEquals(String.format("cot(%s)", value), cot.toString()); } - /** - * Test cot with out-of-range value 0. - */ + /** Test cot with out-of-range value 0. */ @ParameterizedTest(name = "cot({0})") @ValueSource(doubles = {0}) public void cot_with_zero(Number value) { FunctionExpression cot = DSL.cot(DSL.literal(value)); assertThrows( - ArithmeticException.class, () -> cot.valueOf(valueEnv()), + ArithmeticException.class, + () -> cot.valueOf(valueEnv()), String.format("Out of range value for cot(%s)", value)); } - /** - * Test degrees with integer, long, float, double values. - */ + /** Test degrees with integer, long, float, double values. */ @ParameterizedTest(name = "degrees({0})") @MethodSource("trigonometricArguments") public void test_degrees(Number value) { @@ -1905,9 +1549,7 @@ public void test_degrees(Number value) { assertEquals(String.format("degrees(%s)", value), degrees.toString()); } - /** - * Test radians with integer, long, float, double values. - */ + /** Test radians with integer, long, float, double values. */ @ParameterizedTest(name = "radians({0})") @MethodSource("trigonometricArguments") public void test_radians(Number value) { @@ -1918,35 +1560,27 @@ public void test_radians(Number value) { assertEquals(String.format("radians(%s)", value), radians.toString()); } - /** - * Test sin with integer, long, float, double values. - */ + /** Test sin with integer, long, float, double values. */ @ParameterizedTest(name = "sin({0})") @MethodSource("trigonometricArguments") public void test_sin(Number value) { FunctionExpression sin = DSL.sin(DSL.literal(value)); assertThat( - sin.valueOf(valueEnv()), - allOf(hasType(DOUBLE), hasValue(Math.sin(value.doubleValue())))); + sin.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue(Math.sin(value.doubleValue())))); assertEquals(String.format("sin(%s)", value), sin.toString()); } - /** - * Test tan with integer, long, float, double values. - */ + /** Test tan with integer, long, float, double values. */ @ParameterizedTest(name = "tan({0})") @MethodSource("trigonometricArguments") public void test_tan(Number value) { FunctionExpression tan = DSL.tan(DSL.literal(value)); assertThat( - tan.valueOf(valueEnv()), - allOf(hasType(DOUBLE), hasValue(Math.tan(value.doubleValue())))); + tan.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue(Math.tan(value.doubleValue())))); assertEquals(String.format("tan(%s)", value), tan.toString()); } - /** - * Test cbrt with int value. - */ + /** Test cbrt with int value. */ @ParameterizedTest(name = "cbrt({0})") @ValueSource(ints = {1, 2}) public void cbrt_int_value(Integer value) { @@ -1955,9 +1589,7 @@ public void cbrt_int_value(Integer value) { assertEquals(String.format("cbrt(%s)", value), cbrt.toString()); } - /** - * Test cbrt with long value. - */ + /** Test cbrt with long value. */ @ParameterizedTest(name = "cbrt({0})") @ValueSource(longs = {1L, 2L}) public void cbrt_long_value(Long value) { @@ -1966,9 +1598,7 @@ public void cbrt_long_value(Long value) { assertEquals(String.format("cbrt(%s)", value), cbrt.toString()); } - /** - * Test cbrt with float value. - */ + /** Test cbrt with float value. */ @ParameterizedTest(name = "cbrt({0})") @ValueSource(floats = {1F, 2F}) public void cbrt_float_value(Float value) { @@ -1977,9 +1607,7 @@ public void cbrt_float_value(Float value) { assertEquals(String.format("cbrt(%s)", value), cbrt.toString()); } - /** - * Test cbrt with double value. - */ + /** Test cbrt with double value. */ @ParameterizedTest(name = "cbrt({0})") @ValueSource(doubles = {1D, 2D, Double.MAX_VALUE, Double.MIN_VALUE}) public void cbrt_double_value(Double value) { @@ -1988,9 +1616,7 @@ public void cbrt_double_value(Double value) { assertEquals(String.format("cbrt(%s)", value), cbrt.toString()); } - /** - * Test cbrt with negative value. - */ + /** Test cbrt with negative value. */ @ParameterizedTest(name = "cbrt({0})") @ValueSource(doubles = {-1D, -2D}) public void cbrt_negative_value(Double value) { diff --git a/core/src/test/java/org/opensearch/sql/expression/operator/convert/TypeCastOperatorTest.java b/core/src/test/java/org/opensearch/sql/expression/operator/convert/TypeCastOperatorTest.java index 742313922a..7803a4dbca 100644 --- a/core/src/test/java/org/opensearch/sql/expression/operator/convert/TypeCastOperatorTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/operator/convert/TypeCastOperatorTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.operator.convert; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -44,8 +43,12 @@ class TypeCastOperatorTest { private static Stream numberData() { - return Stream.of(new ExprByteValue(3), new ExprShortValue(3), - new ExprIntegerValue(3), new ExprLongValue(3L), new ExprFloatValue(3.14f), + return Stream.of( + new ExprByteValue(3), + new ExprShortValue(3), + new ExprIntegerValue(3), + new ExprLongValue(3L), + new ExprFloatValue(3.14f), new ExprDoubleValue(3.1415D)); } @@ -357,5 +360,4 @@ void castToDatetime() { assertEquals(DATETIME, expression.type()); assertEquals(new ExprDatetimeValue("2012-08-07 00:00:00"), expression.valueOf()); } - } diff --git a/core/src/test/java/org/opensearch/sql/expression/operator/predicate/BinaryPredicateOperatorTest.java b/core/src/test/java/org/opensearch/sql/expression/operator/predicate/BinaryPredicateOperatorTest.java index 52b1e8d8fc..e6290553ce 100644 --- a/core/src/test/java/org/opensearch/sql/expression/operator/predicate/BinaryPredicateOperatorTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/operator/predicate/BinaryPredicateOperatorTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.operator.predicate; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -59,7 +58,8 @@ class BinaryPredicateOperatorTest extends ExpressionTestBase { - private static List STRING_PATTERN_PAIRS = ImmutableList.of( + private static List STRING_PATTERN_PAIRS = + ImmutableList.of( new StringPatternPair("Michael!", ".*"), new StringPatternPair("new*\\n*line", "new\\\\*.\\\\*line"), new StringPatternPair("a", "^[a-d]"), @@ -102,8 +102,7 @@ private static List> getValuesForComparisonTests() { List.of(Instant.ofEpochSecond(100500), LocalDate.of(1961, 4, 12)), List.of(Instant.ofEpochSecond(100500), LocalTime.of(7, 40, 0)), List.of(LocalTime.of(7, 40, 0), LocalDateTime.of(1984, 10, 25, 7, 40)), - List.of(Instant.ofEpochSecond(42), LocalDateTime.of(1984, 10, 25, 7, 40)) - ); + List.of(Instant.ofEpochSecond(42), LocalDateTime.of(1984, 10, 25, 7, 40))); } private static Stream testEqualArguments() { @@ -112,22 +111,33 @@ private static Stream testEqualArguments() { builder.add(Arguments.of(fromObjectValue(argPair.get(0)), fromObjectValue(argPair.get(0)))); builder.add(Arguments.of(fromObjectValue(argPair.get(1)), fromObjectValue(argPair.get(1)))); } - builder.add(Arguments.of(fromObjectValue(LocalTime.of(7, 40, 0)), - fromObjectValue(LocalTime.of(7, 40, 0).atDate(LocalDate.now())))); - builder.add(Arguments.of(fromObjectValue(LocalDateTime.of(1970, 1, 1, 0, 0, 42)), - fromObjectValue(Instant.ofEpochSecond(42)))); - builder.add(Arguments.of(fromObjectValue(LocalDate.of(1970, 1, 1)), - fromObjectValue(Instant.ofEpochSecond(0)))); - builder.add(Arguments.of(fromObjectValue(LocalDate.of(1984, 10, 25)), - fromObjectValue(LocalDateTime.of(1984, 10, 25, 0, 0)))); - builder.add(Arguments.of(fromObjectValue(LocalTime.of(0, 0, 0)), - fromObjectValue(LocalDate.now()))); - builder.add(Arguments.of(fromObjectValue(LocalTime.of(0, 0, 0)), - fromObjectValue(LocalDate.now().atStartOfDay(ZoneId.of("UTC")).toInstant()))); - builder.add(Arguments.of(fromObjectValue(ImmutableList.of(1)), - fromObjectValue(ImmutableList.of(1)))); - builder.add(Arguments.of(fromObjectValue(ImmutableMap.of("str", 1)), - fromObjectValue(ImmutableMap.of("str", 1)))); + builder.add( + Arguments.of( + fromObjectValue(LocalTime.of(7, 40, 0)), + fromObjectValue(LocalTime.of(7, 40, 0).atDate(LocalDate.now())))); + builder.add( + Arguments.of( + fromObjectValue(LocalDateTime.of(1970, 1, 1, 0, 0, 42)), + fromObjectValue(Instant.ofEpochSecond(42)))); + builder.add( + Arguments.of( + fromObjectValue(LocalDate.of(1970, 1, 1)), fromObjectValue(Instant.ofEpochSecond(0)))); + builder.add( + Arguments.of( + fromObjectValue(LocalDate.of(1984, 10, 25)), + fromObjectValue(LocalDateTime.of(1984, 10, 25, 0, 0)))); + builder.add( + Arguments.of(fromObjectValue(LocalTime.of(0, 0, 0)), fromObjectValue(LocalDate.now()))); + builder.add( + Arguments.of( + fromObjectValue(LocalTime.of(0, 0, 0)), + fromObjectValue(LocalDate.now().atStartOfDay(ZoneId.of("UTC")).toInstant()))); + builder.add( + Arguments.of(fromObjectValue(ImmutableList.of(1)), fromObjectValue(ImmutableList.of(1)))); + builder.add( + Arguments.of( + fromObjectValue(ImmutableMap.of("str", 1)), + fromObjectValue(ImmutableMap.of("str", 1)))); return builder.build(); } @@ -137,24 +147,37 @@ private static Stream testNotEqualArguments() { builder.add(Arguments.of(fromObjectValue(argPair.get(0)), fromObjectValue(argPair.get(1)))); builder.add(Arguments.of(fromObjectValue(argPair.get(1)), fromObjectValue(argPair.get(0)))); } - builder.add(Arguments.of(fromObjectValue(LocalTime.of(7, 40, 0)), - fromObjectValue(LocalDateTime.of(1984, 10, 25, 7, 40, 0)))); - builder.add(Arguments.of(fromObjectValue(LocalDateTime.of(1984, 10, 25, 7, 40, 0)), - fromObjectValue(Instant.ofEpochSecond(42)))); - builder.add(Arguments.of(fromObjectValue(LocalDate.of(1984, 10, 25)), - fromObjectValue(Instant.ofEpochSecond(42)))); - builder.add(Arguments.of(fromObjectValue(LocalTime.of(7, 40, 0)), - fromObjectValue(Instant.ofEpochSecond(42)))); - builder.add(Arguments.of(fromObjectValue(LocalDate.of(1984, 10, 25)), - fromObjectValue(LocalDateTime.of(1984, 10, 25, 7, 40)))); - builder.add(Arguments.of(fromObjectValue(LocalDate.of(1984, 10, 25)), - fromObjectValue(LocalTime.of(7, 40, 0)))); - builder.add(Arguments.of(fromObjectValue(ImmutableList.of(1)), - fromObjectValue(ImmutableList.of(1, 2)))); - builder.add(Arguments.of(fromObjectValue(ImmutableList.of(1)), - fromObjectValue(ImmutableList.of(2)))); - builder.add(Arguments.of(fromObjectValue(ImmutableMap.of("str", 1)), - fromObjectValue(ImmutableMap.of("str2", 2)))); + builder.add( + Arguments.of( + fromObjectValue(LocalTime.of(7, 40, 0)), + fromObjectValue(LocalDateTime.of(1984, 10, 25, 7, 40, 0)))); + builder.add( + Arguments.of( + fromObjectValue(LocalDateTime.of(1984, 10, 25, 7, 40, 0)), + fromObjectValue(Instant.ofEpochSecond(42)))); + builder.add( + Arguments.of( + fromObjectValue(LocalDate.of(1984, 10, 25)), + fromObjectValue(Instant.ofEpochSecond(42)))); + builder.add( + Arguments.of( + fromObjectValue(LocalTime.of(7, 40, 0)), fromObjectValue(Instant.ofEpochSecond(42)))); + builder.add( + Arguments.of( + fromObjectValue(LocalDate.of(1984, 10, 25)), + fromObjectValue(LocalDateTime.of(1984, 10, 25, 7, 40)))); + builder.add( + Arguments.of( + fromObjectValue(LocalDate.of(1984, 10, 25)), fromObjectValue(LocalTime.of(7, 40, 0)))); + builder.add( + Arguments.of( + fromObjectValue(ImmutableList.of(1)), fromObjectValue(ImmutableList.of(1, 2)))); + builder.add( + Arguments.of(fromObjectValue(ImmutableList.of(1)), fromObjectValue(ImmutableList.of(2)))); + builder.add( + Arguments.of( + fromObjectValue(ImmutableMap.of("str", 1)), + fromObjectValue(ImmutableMap.of("str2", 2)))); return builder.build(); } @@ -169,13 +192,17 @@ private static Stream testCompareValueArguments() { } private static Stream testLikeArguments() { - List> arguments = Arrays.asList( - Arrays.asList("foo", "foo"), Arrays.asList("notFoo", "foo"), - Arrays.asList("foobar", "%bar"), Arrays.asList("bar", "%bar"), - Arrays.asList("foo", "fo_"), Arrays.asList("foo", "foo_"), - Arrays.asList("foorbar", "%o_ar"), Arrays.asList("foobar", "%o_a%"), - Arrays.asList("fooba%_\\^$.*[]()|+r", "%\\%\\_\\\\\\^\\$\\.\\*\\[\\]\\(\\)\\|\\+_") - ); + List> arguments = + Arrays.asList( + Arrays.asList("foo", "foo"), + Arrays.asList("notFoo", "foo"), + Arrays.asList("foobar", "%bar"), + Arrays.asList("bar", "%bar"), + Arrays.asList("foo", "fo_"), + Arrays.asList("foo", "foo_"), + Arrays.asList("foorbar", "%o_ar"), + Arrays.asList("foobar", "%o_a%"), + Arrays.asList("fooba%_\\^$.*[]()|+r", "%\\%\\_\\\\\\^\\$\\.\\*\\[\\]\\(\\)\\|\\+_")); Stream.Builder builder = Stream.builder(); for (List argPair : arguments) { builder.add(Arguments.of(fromObjectValue(argPair.get(0)), fromObjectValue(argPair.get(1)))); @@ -186,8 +213,7 @@ private static Stream testLikeArguments() { @ParameterizedTest(name = "and({0}, {1})") @MethodSource("binaryPredicateArguments") public void test_and(Boolean v1, Boolean v2) { - FunctionExpression and = - DSL.and(DSL.literal(booleanValue(v1)), DSL.literal(booleanValue(v2))); + FunctionExpression and = DSL.and(DSL.literal(booleanValue(v1)), DSL.literal(booleanValue(v2))); assertEquals(BOOLEAN, and.type()); assertEquals(v1 && v2, ExprValueUtils.getBooleanValue(and.valueOf(valueEnv()))); assertEquals(String.format("and(%s, %s)", v1.toString(), v2.toString()), and.toString()); @@ -235,23 +261,31 @@ public void test_boolean_and_missing() { @Test public void test_null_and_missing() { - FunctionExpression and = DSL.and(DSL.ref(BOOL_TYPE_MISSING_VALUE_FIELD, BOOLEAN), - DSL.ref(BOOL_TYPE_MISSING_VALUE_FIELD, BOOLEAN)); + FunctionExpression and = + DSL.and( + DSL.ref(BOOL_TYPE_MISSING_VALUE_FIELD, BOOLEAN), + DSL.ref(BOOL_TYPE_MISSING_VALUE_FIELD, BOOLEAN)); assertEquals(BOOLEAN, and.type()); assertEquals(LITERAL_MISSING, and.valueOf(valueEnv())); - and = DSL.and(DSL.ref(BOOL_TYPE_NULL_VALUE_FIELD, BOOLEAN), - DSL.ref(BOOL_TYPE_NULL_VALUE_FIELD, BOOLEAN)); + and = + DSL.and( + DSL.ref(BOOL_TYPE_NULL_VALUE_FIELD, BOOLEAN), + DSL.ref(BOOL_TYPE_NULL_VALUE_FIELD, BOOLEAN)); assertEquals(BOOLEAN, and.type()); assertEquals(LITERAL_NULL, and.valueOf(valueEnv())); - and = DSL.and(DSL.ref(BOOL_TYPE_NULL_VALUE_FIELD, BOOLEAN), - DSL.ref(BOOL_TYPE_MISSING_VALUE_FIELD, BOOLEAN)); + and = + DSL.and( + DSL.ref(BOOL_TYPE_NULL_VALUE_FIELD, BOOLEAN), + DSL.ref(BOOL_TYPE_MISSING_VALUE_FIELD, BOOLEAN)); assertEquals(BOOLEAN, and.type()); assertEquals(LITERAL_MISSING, and.valueOf(valueEnv())); - and = DSL.and(DSL.ref(BOOL_TYPE_MISSING_VALUE_FIELD, BOOLEAN), - DSL.ref(BOOL_TYPE_NULL_VALUE_FIELD, BOOLEAN)); + and = + DSL.and( + DSL.ref(BOOL_TYPE_MISSING_VALUE_FIELD, BOOLEAN), + DSL.ref(BOOL_TYPE_NULL_VALUE_FIELD, BOOLEAN)); assertEquals(BOOLEAN, and.type()); assertEquals(LITERAL_MISSING, and.valueOf(valueEnv())); } @@ -259,8 +293,7 @@ public void test_null_and_missing() { @ParameterizedTest(name = "or({0}, {1})") @MethodSource("binaryPredicateArguments") public void test_or(Boolean v1, Boolean v2) { - FunctionExpression or = - DSL.or(DSL.literal(booleanValue(v1)), DSL.literal(booleanValue(v2))); + FunctionExpression or = DSL.or(DSL.literal(booleanValue(v1)), DSL.literal(booleanValue(v2))); assertEquals(BOOLEAN, or.type()); assertEquals(v1 || v2, ExprValueUtils.getBooleanValue(or.valueOf(valueEnv()))); assertEquals(String.format("or(%s, %s)", v1.toString(), v2.toString()), or.toString()); @@ -308,34 +341,39 @@ public void test_boolean_or_missing() { @Test public void test_null_or_missing() { - FunctionExpression or = DSL.or(DSL.ref(BOOL_TYPE_MISSING_VALUE_FIELD, BOOLEAN), - DSL.ref(BOOL_TYPE_MISSING_VALUE_FIELD, BOOLEAN)); + FunctionExpression or = + DSL.or( + DSL.ref(BOOL_TYPE_MISSING_VALUE_FIELD, BOOLEAN), + DSL.ref(BOOL_TYPE_MISSING_VALUE_FIELD, BOOLEAN)); assertEquals(BOOLEAN, or.type()); assertEquals(LITERAL_MISSING, or.valueOf(valueEnv())); or = - DSL.or(DSL.ref(BOOL_TYPE_NULL_VALUE_FIELD, BOOLEAN), + DSL.or( + DSL.ref(BOOL_TYPE_NULL_VALUE_FIELD, BOOLEAN), DSL.ref(BOOL_TYPE_NULL_VALUE_FIELD, BOOLEAN)); assertEquals(BOOLEAN, or.type()); assertEquals(LITERAL_NULL, or.valueOf(valueEnv())); - or = DSL.or(DSL.ref(BOOL_TYPE_NULL_VALUE_FIELD, BOOLEAN), - DSL.ref(BOOL_TYPE_MISSING_VALUE_FIELD, BOOLEAN)); + or = + DSL.or( + DSL.ref(BOOL_TYPE_NULL_VALUE_FIELD, BOOLEAN), + DSL.ref(BOOL_TYPE_MISSING_VALUE_FIELD, BOOLEAN)); assertEquals(BOOLEAN, or.type()); assertEquals(LITERAL_NULL, or.valueOf(valueEnv())); - or = DSL.or(DSL.ref(BOOL_TYPE_MISSING_VALUE_FIELD, BOOLEAN), - DSL.ref(BOOL_TYPE_NULL_VALUE_FIELD, BOOLEAN)); + or = + DSL.or( + DSL.ref(BOOL_TYPE_MISSING_VALUE_FIELD, BOOLEAN), + DSL.ref(BOOL_TYPE_NULL_VALUE_FIELD, BOOLEAN)); assertEquals(BOOLEAN, or.type()); assertEquals(LITERAL_NULL, or.valueOf(valueEnv())); } - @ParameterizedTest(name = "xor({0}, {1})") @MethodSource("binaryPredicateArguments") public void test_xor(Boolean v1, Boolean v2) { - FunctionExpression xor = - DSL.xor(DSL.literal(booleanValue(v1)), DSL.literal(booleanValue(v2))); + FunctionExpression xor = DSL.xor(DSL.literal(booleanValue(v1)), DSL.literal(booleanValue(v2))); assertEquals(BOOLEAN, xor.type()); assertEquals(v1 ^ v2, ExprValueUtils.getBooleanValue(xor.valueOf(valueEnv()))); assertEquals(String.format("xor(%s, %s)", v1.toString(), v2.toString()), xor.toString()); @@ -383,23 +421,31 @@ public void test_boolean_xor_missing() { @Test public void test_null_xor_missing() { - FunctionExpression xor = DSL.xor(DSL.ref(BOOL_TYPE_MISSING_VALUE_FIELD, BOOLEAN), - DSL.ref(BOOL_TYPE_MISSING_VALUE_FIELD, BOOLEAN)); + FunctionExpression xor = + DSL.xor( + DSL.ref(BOOL_TYPE_MISSING_VALUE_FIELD, BOOLEAN), + DSL.ref(BOOL_TYPE_MISSING_VALUE_FIELD, BOOLEAN)); assertEquals(BOOLEAN, xor.type()); assertEquals(LITERAL_MISSING, xor.valueOf(valueEnv())); - xor = DSL.xor(DSL.ref(BOOL_TYPE_NULL_VALUE_FIELD, BOOLEAN), - DSL.ref(BOOL_TYPE_NULL_VALUE_FIELD, BOOLEAN)); + xor = + DSL.xor( + DSL.ref(BOOL_TYPE_NULL_VALUE_FIELD, BOOLEAN), + DSL.ref(BOOL_TYPE_NULL_VALUE_FIELD, BOOLEAN)); assertEquals(BOOLEAN, xor.type()); assertEquals(LITERAL_NULL, xor.valueOf(valueEnv())); - xor = DSL.xor(DSL.ref(BOOL_TYPE_NULL_VALUE_FIELD, BOOLEAN), - DSL.ref(BOOL_TYPE_MISSING_VALUE_FIELD, BOOLEAN)); + xor = + DSL.xor( + DSL.ref(BOOL_TYPE_NULL_VALUE_FIELD, BOOLEAN), + DSL.ref(BOOL_TYPE_MISSING_VALUE_FIELD, BOOLEAN)); assertEquals(BOOLEAN, xor.type()); assertEquals(LITERAL_NULL, xor.valueOf(valueEnv())); - xor = DSL.xor(DSL.ref(BOOL_TYPE_MISSING_VALUE_FIELD, BOOLEAN), - DSL.ref(BOOL_TYPE_NULL_VALUE_FIELD, BOOLEAN)); + xor = + DSL.xor( + DSL.ref(BOOL_TYPE_MISSING_VALUE_FIELD, BOOLEAN), + DSL.ref(BOOL_TYPE_NULL_VALUE_FIELD, BOOLEAN)); assertEquals(BOOLEAN, xor.type()); assertEquals(LITERAL_NULL, xor.valueOf(valueEnv())); } @@ -410,24 +456,28 @@ public void test_equal(ExprValue v1, ExprValue v2) { FunctionExpression equal = DSL.equal(functionProperties, DSL.literal(v1), DSL.literal(v2)); assertEquals(BOOLEAN, equal.type()); if (v1.type() == v2.type()) { - assertEquals(v1.value().equals(v2.value()), - ExprValueUtils.getBooleanValue(equal.valueOf(valueEnv()))); + assertEquals( + v1.value().equals(v2.value()), ExprValueUtils.getBooleanValue(equal.valueOf(valueEnv()))); } if (v1.type() != STRUCT && v1.type() != ARRAY) { - assertEquals(0 == compare(functionProperties, v1, v2), + assertEquals( + 0 == compare(functionProperties, v1, v2), ExprValueUtils.getBooleanValue(equal.valueOf(valueEnv()))); } assertStringRepr(v1, v2, "=", equal); } - private void assertStringRepr(ExprValue v1, ExprValue v2, String function, - FunctionExpression functionExpression) { + private void assertStringRepr( + ExprValue v1, ExprValue v2, String function, FunctionExpression functionExpression) { if (v1.type() == v2.type()) { assertEquals(String.format("%s(%s, %s)", function, v1, v2), functionExpression.toString()); } else { var widerType = v1.type() == TIMESTAMP || v2.type() == TIMESTAMP ? TIMESTAMP : DATETIME; - assertEquals(String.format("%s(%s, %s)", function, getExpectedStringRepr(widerType, v1), - getExpectedStringRepr(widerType, v2)), functionExpression.toString()); + assertEquals( + String.format( + "%s(%s, %s)", + function, getExpectedStringRepr(widerType, v1), getExpectedStringRepr(widerType, v2)), + functionExpression.toString()); } } @@ -441,15 +491,17 @@ private String getExpectedStringRepr(ExprType widerType, ExprValue value) { @ParameterizedTest(name = "equal({0}, {1})") @MethodSource({"testEqualArguments", "testNotEqualArguments"}) public void test_notequal(ExprValue v1, ExprValue v2) { - FunctionExpression notequal = DSL.notequal(functionProperties, - DSL.literal(v1), DSL.literal(v2)); + FunctionExpression notequal = + DSL.notequal(functionProperties, DSL.literal(v1), DSL.literal(v2)); assertEquals(BOOLEAN, notequal.type()); if (v1.type() == v2.type()) { - assertEquals(!v1.value().equals(v2.value()), + assertEquals( + !v1.value().equals(v2.value()), ExprValueUtils.getBooleanValue(notequal.valueOf(valueEnv()))); } if (v1.type() != STRUCT && v1.type() != ARRAY) { - assertEquals(0 != compare(functionProperties, v1, v2), + assertEquals( + 0 != compare(functionProperties, v1, v2), ExprValueUtils.getBooleanValue(notequal.valueOf(valueEnv()))); } assertStringRepr(v1, v2, "!=", notequal); @@ -460,7 +512,8 @@ public void test_notequal(ExprValue v1, ExprValue v2) { public void test_less(ExprValue v1, ExprValue v2) { FunctionExpression less = DSL.less(functionProperties, DSL.literal(v1), DSL.literal(v2)); assertEquals(BOOLEAN, less.type()); - assertEquals(compare(functionProperties, v1, v2) < 0, + assertEquals( + compare(functionProperties, v1, v2) < 0, ExprValueUtils.getBooleanValue(less.valueOf(valueEnv()))); assertStringRepr(v1, v2, "<", less); } @@ -470,7 +523,8 @@ public void test_less(ExprValue v1, ExprValue v2) { public void test_lte(ExprValue v1, ExprValue v2) { FunctionExpression lte = DSL.lte(functionProperties, DSL.literal(v1), DSL.literal(v2)); assertEquals(BOOLEAN, lte.type()); - assertEquals(compare(functionProperties, v1, v2) <= 0, + assertEquals( + compare(functionProperties, v1, v2) <= 0, ExprValueUtils.getBooleanValue(lte.valueOf(valueEnv()))); assertStringRepr(v1, v2, "<=", lte); } @@ -480,7 +534,8 @@ public void test_lte(ExprValue v1, ExprValue v2) { public void test_greater(ExprValue v1, ExprValue v2) { FunctionExpression greater = DSL.greater(functionProperties, DSL.literal(v1), DSL.literal(v2)); assertEquals(BOOLEAN, greater.type()); - assertEquals(compare(functionProperties, v1, v2) > 0, + assertEquals( + compare(functionProperties, v1, v2) > 0, ExprValueUtils.getBooleanValue(greater.valueOf(valueEnv()))); assertStringRepr(v1, v2, ">", greater); } @@ -490,7 +545,8 @@ public void test_greater(ExprValue v1, ExprValue v2) { public void test_gte(ExprValue v1, ExprValue v2) { FunctionExpression gte = DSL.gte(functionProperties, DSL.literal(v1), DSL.literal(v2)); assertEquals(BOOLEAN, gte.type()); - assertEquals(compare(functionProperties, v1, v2) >= 0, + assertEquals( + compare(functionProperties, v1, v2) >= 0, ExprValueUtils.getBooleanValue(gte.valueOf(valueEnv()))); assertStringRepr(v1, v2, ">=", gte); } @@ -522,17 +578,15 @@ void test_regexp() { } void testRegexpString(StringPatternPair stringPatternPair) { - FunctionExpression expression = DSL.regexp( + FunctionExpression expression = + DSL.regexp( DSL.literal(new ExprStringValue(stringPatternPair.getStr())), DSL.literal(new ExprStringValue(stringPatternPair.getPatt()))); assertEquals(INTEGER, expression.type()); - assertEquals(stringPatternPair.regExpTest(), expression - .valueOf(valueEnv()).integerValue()); + assertEquals(stringPatternPair.regExpTest(), expression.valueOf(valueEnv()).integerValue()); } - /** - * Todo. remove this test cases after script serilization implemented. - */ + /** Todo. remove this test cases after script serilization implemented. */ @Test public void serializationTest() throws Exception { Expression expression = DSL.equal(DSL.literal("v1"), DSL.literal("v2")); diff --git a/core/src/test/java/org/opensearch/sql/expression/operator/predicate/UnaryPredicateOperatorTest.java b/core/src/test/java/org/opensearch/sql/expression/operator/predicate/UnaryPredicateOperatorTest.java index de3f5a3e7e..f7a1a7008a 100644 --- a/core/src/test/java/org/opensearch/sql/expression/operator/predicate/UnaryPredicateOperatorTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/operator/predicate/UnaryPredicateOperatorTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.operator.predicate; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -50,15 +49,15 @@ private static Stream isNullArguments() { expressions.add(DSL.literal(LITERAL_NULL)); return Lists.cartesianProduct(expressions, expressions).stream() - .map(list -> { - Expression e1 = list.get(0); - if (e1.valueOf(valueEnv()).isNull() - || e1.valueOf(valueEnv()).isMissing()) { - return Arguments.of(e1, DSL.literal(LITERAL_TRUE)); - } else { - return Arguments.of(e1, DSL.literal(LITERAL_FALSE)); - } - }); + .map( + list -> { + Expression e1 = list.get(0); + if (e1.valueOf(valueEnv()).isNull() || e1.valueOf(valueEnv()).isMissing()) { + return Arguments.of(e1, DSL.literal(LITERAL_TRUE)); + } else { + return Arguments.of(e1, DSL.literal(LITERAL_FALSE)); + } + }); } private static Stream ifNullArguments() { @@ -69,16 +68,17 @@ private static Stream ifNullArguments() { exprValueArrayList.add(DSL.literal(LITERAL_NULL)); return Lists.cartesianProduct(exprValueArrayList, exprValueArrayList).stream() - .map(list -> { - Expression e1 = list.get(0); - Expression e2 = list.get(1); - if (e1.valueOf(valueEnv()).value() == LITERAL_NULL.value() + .map( + list -> { + Expression e1 = list.get(0); + Expression e2 = list.get(1); + if (e1.valueOf(valueEnv()).value() == LITERAL_NULL.value() || e1.valueOf(valueEnv()).value() == LITERAL_MISSING) { - return Arguments.of(e1, e2, e2); - } else { - return Arguments.of(e1, e2, e1); - } - }); + return Arguments.of(e1, e2, e2); + } else { + return Arguments.of(e1, e2, e1); + } + }); } private static Stream nullIfArguments() { @@ -87,16 +87,17 @@ private static Stream nullIfArguments() { exprValueArrayList.add(DSL.literal(321)); return Lists.cartesianProduct(exprValueArrayList, exprValueArrayList).stream() - .map(list -> { - Expression e1 = list.get(0); - Expression e2 = list.get(1); + .map( + list -> { + Expression e1 = list.get(0); + Expression e2 = list.get(1); - if (e1.equals(e2)) { - return Arguments.of(e1, e2, DSL.literal(LITERAL_NULL)); - } else { - return Arguments.of(e1, e2, e1); - } - }); + if (e1.equals(e2)) { + return Arguments.of(e1, e2, DSL.literal(LITERAL_NULL)); + } else { + return Arguments.of(e1, e2, e1); + } + }); } private static Stream ifArguments() { @@ -107,14 +108,15 @@ private static Stream ifArguments() { exprValueArrayList.add(DSL.literal(LITERAL_MISSING)); return Lists.cartesianProduct(exprValueArrayList, exprValueArrayList).stream() - .map(list -> { - Expression e1 = list.get(0); - if (e1.valueOf(valueEnv()).value() == LITERAL_TRUE.value()) { - return Arguments.of(e1, DSL.literal("123"), DSL.literal("321"), DSL.literal("123")); - } else { - return Arguments.of(e1, DSL.literal("123"), DSL.literal("321"), DSL.literal("321")); - } - }); + .map( + list -> { + Expression e1 = list.get(0); + if (e1.valueOf(valueEnv()).value() == LITERAL_TRUE.value()) { + return Arguments.of(e1, DSL.literal("123"), DSL.literal("321"), DSL.literal("123")); + } else { + return Arguments.of(e1, DSL.literal("123"), DSL.literal("321"), DSL.literal("321")); + } + }); } private static Stream exprIfNullArguments() { @@ -125,15 +127,16 @@ private static Stream exprIfNullArguments() { exprValues.add(ExprValueUtils.integerValue(456)); return Lists.cartesianProduct(exprValues, exprValues).stream() - .map(list -> { - ExprValue e1 = list.get(0); - ExprValue e2 = list.get(1); - if (e1.isNull() || e1.isMissing()) { - return Arguments.of(e1, e2, e2); - } else { - return Arguments.of(e1, e2, e1); - } - }); + .map( + list -> { + ExprValue e1 = list.get(0); + ExprValue e2 = list.get(1); + if (e1.isNull() || e1.isMissing()) { + return Arguments.of(e1, e2, e2); + } else { + return Arguments.of(e1, e2, e1); + } + }); } private static Stream exprNullIfArguments() { @@ -143,15 +146,16 @@ private static Stream exprNullIfArguments() { exprValues.add(ExprValueUtils.integerValue(123)); return Lists.cartesianProduct(exprValues, exprValues).stream() - .map(list -> { - ExprValue e1 = list.get(0); - ExprValue e2 = list.get(1); - if (e1.equals(e2)) { - return Arguments.of(e1, e2, LITERAL_NULL); - } else { - return Arguments.of(e1, e2, e1); - } - }); + .map( + list -> { + ExprValue e1 = list.get(0); + ExprValue e2 = list.get(1); + if (e1.equals(e2)) { + return Arguments.of(e1, e2, LITERAL_NULL); + } else { + return Arguments.of(e1, e2, e1); + } + }); } @Test @@ -225,5 +229,4 @@ public void test_exprIfNull_predicate(ExprValue v1, ExprValue v2, ExprValue expe public void test_exprNullIf_predicate(ExprValue v1, ExprValue v2, ExprValue expected) { assertEquals(expected.value(), UnaryPredicateOperator.exprNullIf(v1, v2).value()); } - } diff --git a/core/src/test/java/org/opensearch/sql/expression/parse/GrokExpressionTest.java b/core/src/test/java/org/opensearch/sql/expression/parse/GrokExpressionTest.java index 28bd3467fd..fec851c1ed 100644 --- a/core/src/test/java/org/opensearch/sql/expression/parse/GrokExpressionTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/parse/GrokExpressionTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.parse; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -37,70 +36,100 @@ @DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) class GrokExpressionTest extends ExpressionTestBase { - @Mock - Environment env; + @Mock Environment env; @Test public void resolve_grok_groups_and_parsed_values() { - when(DSL.ref("log_value", STRING).valueOf(env)).thenReturn(stringValue( - "145.128.75.121 - - [29/Aug/2022:13:26:44 -0700] \"GET /deliverables HTTP/2.0\" 501 2721")); + when(DSL.ref("log_value", STRING).valueOf(env)) + .thenReturn( + stringValue( + "145.128.75.121 - - [29/Aug/2022:13:26:44 -0700] \"GET /deliverables HTTP/2.0\" 501" + + " 2721")); String rawPattern = "%{COMMONAPACHELOG}"; - Map expected = ImmutableMap.builder() - .put("COMMONAPACHELOG", "145.128.75.121 - - [29/Aug/2022:13:26:44 -0700] " - + "\"GET /deliverables HTTP/2.0\" 501 2721") - .put("clientip", "145.128.75.121") - .put("ident", "-") - .put("auth", "-") - .put("timestamp", "29/Aug/2022:13:26:44 -0700") - .put("MONTHDAY", "29") - .put("MONTH", "Aug") - .put("YEAR", "2022") - .put("TIME", "13:26:44") - .put("HOUR", "13") - .put("MINUTE", "26") - .put("SECOND", "44") - .put("INT", "-0700") - .put("verb", "GET") - .put("request", "/deliverables") - .put("httpversion", "2.0") - .put("rawrequest", "") - .put("response", "501") - .put("bytes", "2721") - .build(); + Map expected = + ImmutableMap.builder() + .put( + "COMMONAPACHELOG", + "145.128.75.121 - - [29/Aug/2022:13:26:44 -0700] " + + "\"GET /deliverables HTTP/2.0\" 501 2721") + .put("clientip", "145.128.75.121") + .put("ident", "-") + .put("auth", "-") + .put("timestamp", "29/Aug/2022:13:26:44 -0700") + .put("MONTHDAY", "29") + .put("MONTH", "Aug") + .put("YEAR", "2022") + .put("TIME", "13:26:44") + .put("HOUR", "13") + .put("MINUTE", "26") + .put("SECOND", "44") + .put("INT", "-0700") + .put("verb", "GET") + .put("request", "/deliverables") + .put("httpversion", "2.0") + .put("rawrequest", "") + .put("response", "501") + .put("bytes", "2721") + .build(); List identifiers = new ArrayList<>(expected.keySet()); assertEquals(identifiers, GrokExpression.getNamedGroupCandidates(rawPattern)); - identifiers.forEach(identifier -> assertEquals(stringValue(expected.get(identifier)), - DSL.grok(DSL.ref("log_value", STRING), DSL.literal(rawPattern), DSL.literal(identifier)) - .valueOf(env))); + identifiers.forEach( + identifier -> + assertEquals( + stringValue(expected.get(identifier)), + DSL.grok( + DSL.ref("log_value", STRING), + DSL.literal(rawPattern), + DSL.literal(identifier)) + .valueOf(env))); } @Test public void resolve_null_and_empty_values() { - assertEquals(stringValue(""), - DSL.grok(DSL.ref("string_value", STRING), DSL.literal("%{COMMONAPACHELOG}"), - DSL.literal("request")).valueOf(valueEnv())); - assertEquals(LITERAL_NULL, - DSL.grok(DSL.ref(STRING_TYPE_NULL_VALUE_FIELD, STRING), - DSL.literal("%{COMMONAPACHELOG}"), DSL.literal("request")).valueOf(valueEnv())); - assertEquals(LITERAL_NULL, - DSL.grok(DSL.ref(STRING_TYPE_MISSING_VALUE_FIELD, STRING), - DSL.literal("p%{COMMONAPACHELOG}"), DSL.literal("request")).valueOf(valueEnv())); + assertEquals( + stringValue(""), + DSL.grok( + DSL.ref("string_value", STRING), + DSL.literal("%{COMMONAPACHELOG}"), + DSL.literal("request")) + .valueOf(valueEnv())); + assertEquals( + LITERAL_NULL, + DSL.grok( + DSL.ref(STRING_TYPE_NULL_VALUE_FIELD, STRING), + DSL.literal("%{COMMONAPACHELOG}"), + DSL.literal("request")) + .valueOf(valueEnv())); + assertEquals( + LITERAL_NULL, + DSL.grok( + DSL.ref(STRING_TYPE_MISSING_VALUE_FIELD, STRING), + DSL.literal("p%{COMMONAPACHELOG}"), + DSL.literal("request")) + .valueOf(valueEnv())); } @Test public void resolve_type() { - assertEquals(STRING, - DSL.grok(DSL.ref("string_value", STRING), DSL.literal("%{COMMONAPACHELOG}"), - DSL.literal("request")).type()); + assertEquals( + STRING, + DSL.grok( + DSL.ref("string_value", STRING), + DSL.literal("%{COMMONAPACHELOG}"), + DSL.literal("request")) + .type()); } @Test public void throws_semantic_exception_if_value_type_is_not_string() { assertThrows( SemanticCheckException.class, - () -> DSL.grok(DSL.ref("boolean_value", BOOLEAN), DSL.literal("%{COMMONAPACHELOG}"), - DSL.literal("request")) - .valueOf(valueEnv())); + () -> + DSL.grok( + DSL.ref("boolean_value", BOOLEAN), + DSL.literal("%{COMMONAPACHELOG}"), + DSL.literal("request")) + .valueOf(valueEnv())); } } diff --git a/core/src/test/java/org/opensearch/sql/expression/parse/PatternsExpressionTest.java b/core/src/test/java/org/opensearch/sql/expression/parse/PatternsExpressionTest.java index 1593e94a8a..7237f0673b 100644 --- a/core/src/test/java/org/opensearch/sql/expression/parse/PatternsExpressionTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/parse/PatternsExpressionTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.parse; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -33,46 +32,58 @@ @DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) class PatternsExpressionTest extends ExpressionTestBase { - @Mock - Environment env; + @Mock Environment env; @Test public void resolve_value() { - when(DSL.ref("log_value", STRING).valueOf(env)).thenReturn(stringValue( - "145.128.75.121 - - [29/Aug/2022:13:26:44 -0700] \"GET /deliverables HTTP/2.0\" 501 2721")); - assertEquals(stringValue("... - - [//::: -] \" / /.\" "), - DSL.patterns(DSL.ref("log_value", STRING), DSL.literal(""), - DSL.literal("punct_field")).valueOf(env)); - assertEquals(stringValue("... - - [/Aug/::: -] \"GET /deliverables HTTP/.\" "), - DSL.patterns(DSL.ref("log_value", STRING), DSL.literal("[0-9]"), - DSL.literal("regex_field")).valueOf(env)); + when(DSL.ref("log_value", STRING).valueOf(env)) + .thenReturn( + stringValue( + "145.128.75.121 - - [29/Aug/2022:13:26:44 -0700] \"GET /deliverables HTTP/2.0\" 501" + + " 2721")); + assertEquals( + stringValue("... - - [//::: -] \" / /.\" "), + DSL.patterns(DSL.ref("log_value", STRING), DSL.literal(""), DSL.literal("punct_field")) + .valueOf(env)); + assertEquals( + stringValue("... - - [/Aug/::: -] \"GET /deliverables HTTP/.\" "), + DSL.patterns(DSL.ref("log_value", STRING), DSL.literal("[0-9]"), DSL.literal("regex_field")) + .valueOf(env)); } @Test public void resolve_null_and_missing_values() { - assertEquals(LITERAL_NULL, - DSL.patterns(DSL.ref(STRING_TYPE_NULL_VALUE_FIELD, STRING), - DSL.literal("pattern"), DSL.literal("patterns_field")).valueOf(valueEnv())); - assertEquals(LITERAL_NULL, - DSL.patterns(DSL.ref(STRING_TYPE_MISSING_VALUE_FIELD, STRING), - DSL.literal("pattern"), DSL.literal("patterns_field")).valueOf(valueEnv())); + assertEquals( + LITERAL_NULL, + DSL.patterns( + DSL.ref(STRING_TYPE_NULL_VALUE_FIELD, STRING), + DSL.literal("pattern"), + DSL.literal("patterns_field")) + .valueOf(valueEnv())); + assertEquals( + LITERAL_NULL, + DSL.patterns( + DSL.ref(STRING_TYPE_MISSING_VALUE_FIELD, STRING), + DSL.literal("pattern"), + DSL.literal("patterns_field")) + .valueOf(valueEnv())); } @Test public void resolve_type() { - assertEquals(STRING, - DSL.patterns(DSL.ref("string_value", STRING), - DSL.literal("pattern"), - DSL.literal("group")).type()); + assertEquals( + STRING, + DSL.patterns(DSL.ref("string_value", STRING), DSL.literal("pattern"), DSL.literal("group")) + .type()); } @Test public void throws_semantic_exception_if_value_type_is_not_string() { assertThrows( SemanticCheckException.class, - () -> DSL.patterns(DSL.ref("boolean_value", BOOLEAN), - DSL.literal("pattern"), - DSL.literal("group")) - .valueOf(valueEnv())); + () -> + DSL.patterns( + DSL.ref("boolean_value", BOOLEAN), DSL.literal("pattern"), DSL.literal("group")) + .valueOf(valueEnv())); } } diff --git a/core/src/test/java/org/opensearch/sql/expression/parse/RegexExpressionTest.java b/core/src/test/java/org/opensearch/sql/expression/parse/RegexExpressionTest.java index 95d836042e..846aff6911 100644 --- a/core/src/test/java/org/opensearch/sql/expression/parse/RegexExpressionTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/parse/RegexExpressionTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.parse; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -37,62 +36,97 @@ @DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) class RegexExpressionTest extends ExpressionTestBase { - @Mock - Environment env; + @Mock Environment env; @Test public void resolve_regex_groups_and_parsed_values() { - when(DSL.ref("log_value", STRING).valueOf(env)).thenReturn(stringValue( - "130.246.123.197 - - [2018-07-22T03:26:21.326Z] \"GET /beats/metricbeat_1 HTTP/1.1\" " - + "200 6850 \"-\" \"Mozilla/5.0 (X11; Linux x86_64; rv:6.0a1) Gecko/20110421 " - + "Firefox/6.0a1\"")); + when(DSL.ref("log_value", STRING).valueOf(env)) + .thenReturn( + stringValue( + "130.246.123.197 - - [2018-07-22T03:26:21.326Z] \"GET /beats/metricbeat_1" + + " HTTP/1.1\" 200 6850 \"-\" \"Mozilla/5.0 (X11; Linux x86_64; rv:6.0a1)" + + " Gecko/20110421 Firefox/6.0a1\"")); String rawPattern = "(?(\\d{1,3}\\.){3}\\d{1,3}) - - \\[(?\\d{4}-[01]\\d-[0-3]\\dT[0-2]\\d:" + "[0-5]\\d:[0-5]\\d\\.\\d+([+-][0-2]\\d:[0-5]\\d|Z))] \"(?[^\"]+)\" " + "(?\\d+) (?\\d+) \"-\" \"(?[^\"]+)\""; Map expected = - ImmutableMap.of("ip", "130.246.123.197", "date", "2018-07-22T03:26:21.326Z", "request", - "GET /beats/metricbeat_1 HTTP/1.1", "status", "200", "bytes", "6850", "userAgent", + ImmutableMap.of( + "ip", + "130.246.123.197", + "date", + "2018-07-22T03:26:21.326Z", + "request", + "GET /beats/metricbeat_1 HTTP/1.1", + "status", + "200", + "bytes", + "6850", + "userAgent", "Mozilla/5.0 (X11; Linux x86_64; rv:6.0a1) Gecko/20110421 Firefox/6.0a1"); List identifiers = new ArrayList<>(expected.keySet()); assertEquals(identifiers, RegexExpression.getNamedGroupCandidates(rawPattern)); - identifiers.forEach(identifier -> assertEquals(stringValue(expected.get(identifier)), - DSL.regex(DSL.ref("log_value", STRING), DSL.literal(rawPattern), DSL.literal(identifier)) - .valueOf(env))); + identifiers.forEach( + identifier -> + assertEquals( + stringValue(expected.get(identifier)), + DSL.regex( + DSL.ref("log_value", STRING), + DSL.literal(rawPattern), + DSL.literal(identifier)) + .valueOf(env))); } @Test public void resolve_not_parsable_inputs_as_empty_string() { - assertEquals(stringValue(""), - DSL.regex(DSL.ref("string_value", STRING), DSL.literal("(?not-matching)"), - DSL.literal("group")).valueOf(valueEnv())); + assertEquals( + stringValue(""), + DSL.regex( + DSL.ref("string_value", STRING), + DSL.literal("(?not-matching)"), + DSL.literal("group")) + .valueOf(valueEnv())); } @Test public void resolve_null_and_missing_values() { - assertEquals(LITERAL_NULL, - DSL.regex(DSL.ref(STRING_TYPE_NULL_VALUE_FIELD, STRING), - DSL.literal("(?\\w{2})\\w"), DSL.literal("group")).valueOf(valueEnv())); - assertEquals(LITERAL_NULL, - DSL.regex(DSL.ref(STRING_TYPE_MISSING_VALUE_FIELD, STRING), - DSL.literal("(?\\w{2})\\w"), DSL.literal("group")).valueOf(valueEnv())); + assertEquals( + LITERAL_NULL, + DSL.regex( + DSL.ref(STRING_TYPE_NULL_VALUE_FIELD, STRING), + DSL.literal("(?\\w{2})\\w"), + DSL.literal("group")) + .valueOf(valueEnv())); + assertEquals( + LITERAL_NULL, + DSL.regex( + DSL.ref(STRING_TYPE_MISSING_VALUE_FIELD, STRING), + DSL.literal("(?\\w{2})\\w"), + DSL.literal("group")) + .valueOf(valueEnv())); } @Test public void resolve_type() { - assertEquals(STRING, - DSL.regex(DSL.ref("string_value", STRING), DSL.literal("(?\\w{2})\\w"), - DSL.literal("group")).type()); + assertEquals( + STRING, + DSL.regex( + DSL.ref("string_value", STRING), + DSL.literal("(?\\w{2})\\w"), + DSL.literal("group")) + .type()); } @Test public void throws_semantic_exception_if_value_type_is_not_string() { assertThrows( SemanticCheckException.class, - () -> DSL.regex(DSL.ref("boolean_value", BOOLEAN), DSL.literal("(?\\w{2})\\w"), - DSL.literal("group")) - .valueOf(valueEnv())); + () -> + DSL.regex( + DSL.ref("boolean_value", BOOLEAN), + DSL.literal("(?\\w{2})\\w"), + DSL.literal("group")) + .valueOf(valueEnv())); } - } diff --git a/core/src/test/java/org/opensearch/sql/expression/system/SystemFunctionsTest.java b/core/src/test/java/org/opensearch/sql/expression/system/SystemFunctionsTest.java index 62d219e576..ac4153f59f 100644 --- a/core/src/test/java/org/opensearch/sql/expression/system/SystemFunctionsTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/system/SystemFunctionsTest.java @@ -62,27 +62,30 @@ void typeof() { assertEquals("TIMESTAMP", typeofGetValue(new ExprTimestampValue(Instant.now()))); assertEquals("UNDEFINED", typeofGetValue(ExprNullValue.of())); assertEquals("UNDEFINED", typeofGetValue(ExprMissingValue.of())); - assertEquals("UNKNOWN", typeofGetValue(new AbstractExprValue() { - @Override - public int compare(ExprValue other) { - return 0; - } + assertEquals( + "UNKNOWN", + typeofGetValue( + new AbstractExprValue() { + @Override + public int compare(ExprValue other) { + return 0; + } - @Override - public boolean equal(ExprValue other) { - return false; - } + @Override + public boolean equal(ExprValue other) { + return false; + } - @Override - public Object value() { - return null; - } + @Override + public Object value() { + return null; + } - @Override - public ExprType type() { - return ExprCoreType.UNKNOWN; - } - })); + @Override + public ExprType type() { + return ExprCoreType.UNKNOWN; + } + })); } private String typeofGetValue(ExprValue input) { diff --git a/core/src/test/java/org/opensearch/sql/expression/text/TextFunctionTest.java b/core/src/test/java/org/opensearch/sql/expression/text/TextFunctionTest.java index 54d2e5c400..84ae0b844f 100644 --- a/core/src/test/java/org/opensearch/sql/expression/text/TextFunctionTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/text/TextFunctionTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.text; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -36,45 +35,44 @@ @ExtendWith(MockitoExtension.class) public class TextFunctionTest extends ExpressionTestBase { - @Mock - Environment env; - - @Mock - Expression nullRef; - - @Mock - Expression missingRef; - - - private static List SUBSTRING_STRINGS = ImmutableList.of( - new SubstringInfo("", 1, 1, ""), - new SubstringInfo("Quadratically", 5, null, "ratically"), - new SubstringInfo("foobarbar", 4, null, "barbar"), - new SubstringInfo("Quadratically", 5, 6, "ratica"), - new SubstringInfo("Quadratically", 5, 600, "ratically"), - new SubstringInfo("Quadratically", 500, 1, ""), - new SubstringInfo("Quadratically", 500, null, ""), - new SubstringInfo("Sakila", -3, null, "ila"), - new SubstringInfo("Sakila", -5, 3, "aki"), - new SubstringInfo("Sakila", -4, 2, "ki"), - new SubstringInfo("Quadratically", 0, null, ""), - new SubstringInfo("Sakila", 0, 2, ""), - new SubstringInfo("Sakila", 2, 0, ""), - new SubstringInfo("Sakila", 0, 0, "")); - private static List UPPER_LOWER_STRINGS = ImmutableList.of( - "test", " test", "test ", " test ", "TesT", "TEST", " TEST", "TEST ", " TEST ", " ", ""); - private static List STRING_PATTERN_PAIRS = ImmutableList.of( - new StringPatternPair("Michael!", "Michael!"), - new StringPatternPair("hello", "world"), - new StringPatternPair("world", "hello")); - private static List TRIM_STRINGS = ImmutableList.of( - " test", " test", "test ", "test", " test ", "", " "); - private static List> CONCAT_STRING_LISTS = ImmutableList.of( - ImmutableList.of("hello", "world"), - ImmutableList.of("123", "5325")); - private static List> CONCAT_STRING_LISTS_WITH_MANY_STRINGS = ImmutableList.of( - ImmutableList.of("he", "llo", "wo", "rld", "!"), - ImmutableList.of("0", "123", "53", "25", "7")); + @Mock Environment env; + + @Mock Expression nullRef; + + @Mock Expression missingRef; + + private static List SUBSTRING_STRINGS = + ImmutableList.of( + new SubstringInfo("", 1, 1, ""), + new SubstringInfo("Quadratically", 5, null, "ratically"), + new SubstringInfo("foobarbar", 4, null, "barbar"), + new SubstringInfo("Quadratically", 5, 6, "ratica"), + new SubstringInfo("Quadratically", 5, 600, "ratically"), + new SubstringInfo("Quadratically", 500, 1, ""), + new SubstringInfo("Quadratically", 500, null, ""), + new SubstringInfo("Sakila", -3, null, "ila"), + new SubstringInfo("Sakila", -5, 3, "aki"), + new SubstringInfo("Sakila", -4, 2, "ki"), + new SubstringInfo("Quadratically", 0, null, ""), + new SubstringInfo("Sakila", 0, 2, ""), + new SubstringInfo("Sakila", 2, 0, ""), + new SubstringInfo("Sakila", 0, 0, "")); + private static List UPPER_LOWER_STRINGS = + ImmutableList.of( + "test", " test", "test ", " test ", "TesT", "TEST", " TEST", "TEST ", " TEST ", " ", ""); + private static List STRING_PATTERN_PAIRS = + ImmutableList.of( + new StringPatternPair("Michael!", "Michael!"), + new StringPatternPair("hello", "world"), + new StringPatternPair("world", "hello")); + private static List TRIM_STRINGS = + ImmutableList.of(" test", " test", "test ", "test", " test ", "", " "); + private static List> CONCAT_STRING_LISTS = + ImmutableList.of(ImmutableList.of("hello", "world"), ImmutableList.of("123", "5325")); + private static List> CONCAT_STRING_LISTS_WITH_MANY_STRINGS = + ImmutableList.of( + ImmutableList.of("he", "llo", "wo", "rld", "!"), + ImmutableList.of("0", "123", "53", "25", "7")); interface SubstrSubstring { FunctionExpression getFunction(SubstringInfo strInfo); @@ -86,9 +84,11 @@ public FunctionExpression getFunction(SubstringInfo strInfo) { if (strInfo.getLen() == null) { expr = DSL.substr(DSL.literal(strInfo.getExpr()), DSL.literal(strInfo.getStart())); } else { - expr = DSL.substr(DSL.literal(strInfo.getExpr()), - DSL.literal(strInfo.getStart()), - DSL.literal(strInfo.getLen())); + expr = + DSL.substr( + DSL.literal(strInfo.getExpr()), + DSL.literal(strInfo.getStart()), + DSL.literal(strInfo.getLen())); } return expr; } @@ -100,9 +100,11 @@ public FunctionExpression getFunction(SubstringInfo strInfo) { if (strInfo.getLen() == null) { expr = DSL.substring(DSL.literal(strInfo.getExpr()), DSL.literal(strInfo.getStart())); } else { - expr = DSL.substring(DSL.literal(strInfo.getExpr()), - DSL.literal(strInfo.getStart()), - DSL.literal(strInfo.getLen())); + expr = + DSL.substring( + DSL.literal(strInfo.getExpr()), + DSL.literal(strInfo.getStart()), + DSL.literal(strInfo.getLen())); } return expr; } @@ -235,15 +237,11 @@ void concat() { when(nullRef.type()).thenReturn(STRING); when(missingRef.type()).thenReturn(STRING); - assertEquals(missingValue(), eval( - DSL.concat(missingRef, DSL.literal("1")))); + assertEquals(missingValue(), eval(DSL.concat(missingRef, DSL.literal("1")))); // If any of the expressions is a NULL value, it returns NULL. - assertEquals(nullValue(), eval( - DSL.concat(nullRef, DSL.literal("1")))); - assertEquals(missingValue(), eval( - DSL.concat(DSL.literal("1"), missingRef))); - assertEquals(nullValue(), eval( - DSL.concat(DSL.literal("1"), nullRef))); + assertEquals(nullValue(), eval(DSL.concat(nullRef, DSL.literal("1")))); + assertEquals(missingValue(), eval(DSL.concat(DSL.literal("1"), missingRef))); + assertEquals(nullValue(), eval(DSL.concat(DSL.literal("1"), nullRef))); } @Test @@ -252,18 +250,15 @@ void concat_ws() { when(nullRef.type()).thenReturn(STRING); when(missingRef.type()).thenReturn(STRING); - assertEquals(missingValue(), eval( - DSL.concat_ws(missingRef, DSL.literal("1"), DSL.literal("1")))); - assertEquals(nullValue(), eval( - DSL.concat_ws(nullRef, DSL.literal("1"), DSL.literal("1")))); - assertEquals(missingValue(), eval( - DSL.concat_ws(DSL.literal("1"), missingRef, DSL.literal("1")))); - assertEquals(nullValue(), eval( - DSL.concat_ws(DSL.literal("1"), nullRef, DSL.literal("1")))); - assertEquals(missingValue(), eval( - DSL.concat_ws(DSL.literal("1"), DSL.literal("1"), missingRef))); - assertEquals(nullValue(), eval( - DSL.concat_ws(DSL.literal("1"), DSL.literal("1"), nullRef))); + assertEquals( + missingValue(), eval(DSL.concat_ws(missingRef, DSL.literal("1"), DSL.literal("1")))); + assertEquals(nullValue(), eval(DSL.concat_ws(nullRef, DSL.literal("1"), DSL.literal("1")))); + assertEquals( + missingValue(), eval(DSL.concat_ws(DSL.literal("1"), missingRef, DSL.literal("1")))); + assertEquals(nullValue(), eval(DSL.concat_ws(DSL.literal("1"), nullRef, DSL.literal("1")))); + assertEquals( + missingValue(), eval(DSL.concat_ws(DSL.literal("1"), DSL.literal("1"), missingRef))); + assertEquals(nullValue(), eval(DSL.concat_ws(DSL.literal("1"), DSL.literal("1"), nullRef))); } @Test @@ -290,9 +285,9 @@ void strcmp() { @Test void right() { - FunctionExpression expression = DSL.right( - DSL.literal(new ExprStringValue("foobarbar")), - DSL.literal(new ExprIntegerValue(4))); + FunctionExpression expression = + DSL.right( + DSL.literal(new ExprStringValue("foobarbar")), DSL.literal(new ExprIntegerValue(4))); assertEquals(STRING, expression.type()); assertEquals("rbar", eval(expression).stringValue()); @@ -319,9 +314,9 @@ void right() { @Test void left() { - FunctionExpression expression = DSL.left( - DSL.literal(new ExprStringValue("helloworld")), - DSL.literal(new ExprIntegerValue(5))); + FunctionExpression expression = + DSL.left( + DSL.literal(new ExprStringValue("helloworld")), DSL.literal(new ExprIntegerValue(5))); assertEquals(STRING, expression.type()); assertEquals("hello", eval(expression).stringValue()); @@ -360,16 +355,11 @@ void ascii() { @Test void locate() { - FunctionExpression expression = DSL.locate( - DSL.literal("world"), - DSL.literal("helloworld")); + FunctionExpression expression = DSL.locate(DSL.literal("world"), DSL.literal("helloworld")); assertEquals(INTEGER, expression.type()); assertEquals(6, eval(expression).integerValue()); - expression = DSL.locate( - DSL.literal("world"), - DSL.literal("helloworldworld"), - DSL.literal(7)); + expression = DSL.locate(DSL.literal("world"), DSL.literal("helloworldworld"), DSL.literal(7)); assertEquals(INTEGER, expression.type()); assertEquals(11, eval(expression).integerValue()); @@ -378,21 +368,18 @@ void locate() { assertEquals(nullValue(), eval(DSL.locate(nullRef, DSL.literal("hello"), DSL.literal(1)))); when(missingRef.type()).thenReturn(STRING); assertEquals(missingValue(), eval(DSL.locate(missingRef, DSL.literal("hello")))); - assertEquals(missingValue(), eval( - DSL.locate(missingRef, DSL.literal("hello"), DSL.literal(1)))); + assertEquals( + missingValue(), eval(DSL.locate(missingRef, DSL.literal("hello"), DSL.literal(1)))); } @Test void position() { - FunctionExpression expression = DSL.position( - DSL.literal("world"), - DSL.literal("helloworldworld")); + FunctionExpression expression = + DSL.position(DSL.literal("world"), DSL.literal("helloworldworld")); assertEquals(INTEGER, expression.type()); assertEquals(6, eval(expression).integerValue()); - expression = DSL.position( - DSL.literal("abc"), - DSL.literal("hello world")); + expression = DSL.position(DSL.literal("abc"), DSL.literal("hello world")); assertEquals(INTEGER, expression.type()); assertEquals(0, eval(expression).integerValue()); @@ -404,10 +391,8 @@ void position() { @Test void replace() { - FunctionExpression expression = DSL.replace( - DSL.literal("helloworld"), - DSL.literal("world"), - DSL.literal("opensearch")); + FunctionExpression expression = + DSL.replace(DSL.literal("helloworld"), DSL.literal("world"), DSL.literal("opensearch")); assertEquals(STRING, expression.type()); assertEquals("helloopensearch", eval(expression).stringValue()); @@ -435,18 +420,17 @@ void testConcatString(List strings) { expected = String.join("", strings); } - FunctionExpression expression = DSL.concat( - DSL.literal(strings.get(0)), DSL.literal(strings.get(1))); + FunctionExpression expression = + DSL.concat(DSL.literal(strings.get(0)), DSL.literal(strings.get(1))); assertEquals(STRING, expression.type()); assertEquals(expected, eval(expression).stringValue()); } void testConcatString(List strings, String delim) { - String expected = strings.stream() - .filter(Objects::nonNull).collect(Collectors.joining(",")); + String expected = strings.stream().filter(Objects::nonNull).collect(Collectors.joining(",")); - FunctionExpression expression = DSL.concat_ws( - DSL.literal(delim), DSL.literal(strings.get(0)), DSL.literal(strings.get(1))); + FunctionExpression expression = + DSL.concat_ws(DSL.literal(delim), DSL.literal(strings.get(0)), DSL.literal(strings.get(1))); assertEquals(STRING, expression.type()); assertEquals(expected, eval(expression).stringValue()); } @@ -457,7 +441,8 @@ void testConcatMultipleString(List strings) { expected = String.join("", strings); } - FunctionExpression expression = DSL.concat( + FunctionExpression expression = + DSL.concat( DSL.literal(strings.get(0)), DSL.literal(strings.get(1)), DSL.literal(strings.get(2)), @@ -474,7 +459,8 @@ void testLengthString(String str) { } void testStcmpString(StringPatternPair stringPatternPair) { - FunctionExpression expression = DSL.strcmp( + FunctionExpression expression = + DSL.strcmp( DSL.literal(new ExprStringValue(stringPatternPair.getStr())), DSL.literal(new ExprStringValue(stringPatternPair.getPatt()))); assertEquals(INTEGER, expression.type()); diff --git a/core/src/test/java/org/opensearch/sql/expression/window/CurrentRowWindowFrameTest.java b/core/src/test/java/org/opensearch/sql/expression/window/CurrentRowWindowFrameTest.java index f2e54f9654..3964f36fb7 100644 --- a/core/src/test/java/org/opensearch/sql/expression/window/CurrentRowWindowFrameTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/window/CurrentRowWindowFrameTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.window; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -29,10 +28,11 @@ class CurrentRowWindowFrameTest { - private final CurrentRowWindowFrame windowFrame = new CurrentRowWindowFrame( - new WindowDefinition( - ImmutableList.of(DSL.ref("state", STRING)), - ImmutableList.of(ImmutablePair.of(DEFAULT_ASC, DSL.ref("age", INTEGER))))); + private final CurrentRowWindowFrame windowFrame = + new CurrentRowWindowFrame( + new WindowDefinition( + ImmutableList.of(DSL.ref("state", STRING)), + ImmutableList.of(ImmutablePair.of(DEFAULT_ASC, DSL.ref("age", INTEGER))))); @Test void test_iterator_methods() { @@ -42,17 +42,21 @@ void test_iterator_methods() { @Test void should_return_new_partition_if_partition_by_field_value_changed() { - PeekingIterator iterator = Iterators.peekingIterator( - Iterators.forArray( - ExprTupleValue.fromExprValueMap(ImmutableMap.of( - "state", new ExprStringValue("WA"), - "age", new ExprIntegerValue(20))), - ExprTupleValue.fromExprValueMap(ImmutableMap.of( - "state", new ExprStringValue("WA"), - "age", new ExprIntegerValue(30))), - ExprTupleValue.fromExprValueMap(ImmutableMap.of( - "state", new ExprStringValue("CA"), - "age", new ExprIntegerValue(18))))); + PeekingIterator iterator = + Iterators.peekingIterator( + Iterators.forArray( + ExprTupleValue.fromExprValueMap( + ImmutableMap.of( + "state", new ExprStringValue("WA"), + "age", new ExprIntegerValue(20))), + ExprTupleValue.fromExprValueMap( + ImmutableMap.of( + "state", new ExprStringValue("WA"), + "age", new ExprIntegerValue(30))), + ExprTupleValue.fromExprValueMap( + ImmutableMap.of( + "state", new ExprStringValue("CA"), + "age", new ExprIntegerValue(18))))); windowFrame.load(iterator); assertTrue(windowFrame.isNewPartition()); @@ -66,24 +70,28 @@ void should_return_new_partition_if_partition_by_field_value_changed() { @Test void can_resolve_single_expression_value() { - windowFrame.load(Iterators.peekingIterator( - Iterators.singletonIterator( - ExprTupleValue.fromExprValueMap(ImmutableMap.of( - "state", new ExprStringValue("WA"), - "age", new ExprIntegerValue(20)))))); - assertEquals( - new ExprIntegerValue(20), - windowFrame.resolve(DSL.ref("age", INTEGER))); + windowFrame.load( + Iterators.peekingIterator( + Iterators.singletonIterator( + ExprTupleValue.fromExprValueMap( + ImmutableMap.of( + "state", new ExprStringValue("WA"), + "age", new ExprIntegerValue(20)))))); + assertEquals(new ExprIntegerValue(20), windowFrame.resolve(DSL.ref("age", INTEGER))); } @Test void can_return_previous_and_current_row() { - ExprValue row1 = ExprTupleValue.fromExprValueMap(ImmutableMap.of( - "state", new ExprStringValue("WA"), - "age", new ExprIntegerValue(20))); - ExprValue row2 = ExprTupleValue.fromExprValueMap(ImmutableMap.of( - "state", new ExprStringValue("WA"), - "age", new ExprIntegerValue(30))); + ExprValue row1 = + ExprTupleValue.fromExprValueMap( + ImmutableMap.of( + "state", new ExprStringValue("WA"), + "age", new ExprIntegerValue(20))); + ExprValue row2 = + ExprTupleValue.fromExprValueMap( + ImmutableMap.of( + "state", new ExprStringValue("WA"), + "age", new ExprIntegerValue(30))); PeekingIterator iterator = Iterators.peekingIterator(Iterators.forArray(row1, row2)); windowFrame.load(iterator); @@ -94,5 +102,4 @@ void can_return_previous_and_current_row() { assertEquals(row1, windowFrame.previous()); assertEquals(row2, windowFrame.current()); } - } diff --git a/core/src/test/java/org/opensearch/sql/expression/window/aggregation/AggregateWindowFunctionTest.java b/core/src/test/java/org/opensearch/sql/expression/window/aggregation/AggregateWindowFunctionTest.java index d7062d4e47..b728a413ba 100644 --- a/core/src/test/java/org/opensearch/sql/expression/window/aggregation/AggregateWindowFunctionTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/window/aggregation/AggregateWindowFunctionTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.window.aggregation; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -27,9 +26,7 @@ import org.opensearch.sql.expression.aggregation.Aggregator; import org.opensearch.sql.expression.window.frame.PeerRowsWindowFrame; -/** - * Aggregate window function test collection. - */ +/** Aggregate window function test collection. */ @SuppressWarnings("unchecked") @DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) @ExtendWith(MockitoExtension.class) @@ -56,15 +53,17 @@ void should_accumulate_all_peer_values_and_not_reset_state_if_same_partition() { new AggregateWindowFunction(DSL.sum(DSL.ref("age", INTEGER))); when(windowFrame.isNewPartition()).thenReturn(true); - when(windowFrame.next()).thenReturn(ImmutableList.of( - fromExprValueMap(ImmutableMap.of("age", new ExprIntegerValue(10))), - fromExprValueMap(ImmutableMap.of("age", new ExprIntegerValue(20))))); + when(windowFrame.next()) + .thenReturn( + ImmutableList.of( + fromExprValueMap(ImmutableMap.of("age", new ExprIntegerValue(10))), + fromExprValueMap(ImmutableMap.of("age", new ExprIntegerValue(20))))); assertEquals(new ExprIntegerValue(30), windowFunction.valueOf(windowFrame)); when(windowFrame.isNewPartition()).thenReturn(false); - when(windowFrame.next()).thenReturn(ImmutableList.of( - fromExprValueMap(ImmutableMap.of("age", new ExprIntegerValue(30))))); + when(windowFrame.next()) + .thenReturn( + ImmutableList.of(fromExprValueMap(ImmutableMap.of("age", new ExprIntegerValue(30))))); assertEquals(new ExprIntegerValue(60), windowFunction.valueOf(windowFrame)); } - } diff --git a/core/src/test/java/org/opensearch/sql/expression/window/frame/PeerRowsWindowFrameTest.java b/core/src/test/java/org/opensearch/sql/expression/window/frame/PeerRowsWindowFrameTest.java index 26f6bb2f5e..1da2e50322 100644 --- a/core/src/test/java/org/opensearch/sql/expression/window/frame/PeerRowsWindowFrameTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/window/frame/PeerRowsWindowFrameTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.window.frame; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -34,15 +33,16 @@ @ExtendWith(MockitoExtension.class) class PeerRowsWindowFrameTest { - private final PeerRowsWindowFrame windowFrame = new PeerRowsWindowFrame( - new WindowDefinition( - ImmutableList.of(DSL.ref("state", STRING)), - ImmutableList.of(Pair.of(DEFAULT_ASC, DSL.ref("age", INTEGER))))); + private final PeerRowsWindowFrame windowFrame = + new PeerRowsWindowFrame( + new WindowDefinition( + ImmutableList.of(DSL.ref("state", STRING)), + ImmutableList.of(Pair.of(DEFAULT_ASC, DSL.ref("age", INTEGER))))); @Test void test_single_row() { - PeekingIterator tuples = Iterators.peekingIterator( - Iterators.singletonIterator(tuple("WA", 10, 100))); + PeekingIterator tuples = + Iterators.peekingIterator(Iterators.singletonIterator(tuple("WA", 10, 100))); windowFrame.load(tuples); assertTrue(windowFrame.isNewPartition()); assertEquals(ImmutableList.of(tuple("WA", 10, 100)), windowFrame.next()); @@ -50,11 +50,9 @@ void test_single_row() { @Test void test_single_partition_with_no_more_rows_after_peers() { - PeekingIterator tuples = Iterators.peekingIterator( - Iterators.forArray( - tuple("WA", 10, 100), - tuple("WA", 20, 200), - tuple("WA", 20, 50))); + PeekingIterator tuples = + Iterators.peekingIterator( + Iterators.forArray(tuple("WA", 10, 100), tuple("WA", 20, 200), tuple("WA", 20, 50))); // Here we simulate how WindowFrame interacts with WindowOperator which calls load() // and WindowFunction which calls isNewPartition() and move() @@ -64,9 +62,7 @@ void test_single_partition_with_no_more_rows_after_peers() { windowFrame.load(tuples); assertFalse(windowFrame.isNewPartition()); - assertEquals( - ImmutableList.of(tuple("WA", 20, 200), tuple("WA", 20, 50)), - windowFrame.next()); + assertEquals(ImmutableList.of(tuple("WA", 20, 200), tuple("WA", 20, 50)), windowFrame.next()); windowFrame.load(tuples); assertFalse(windowFrame.isNewPartition()); @@ -75,180 +71,124 @@ void test_single_partition_with_no_more_rows_after_peers() { @Test void test_single_partition_with_more_rows_after_peers() { - PeekingIterator tuples = Iterators.peekingIterator( - Iterators.forArray( - tuple("WA", 10, 100), - tuple("WA", 20, 200), - tuple("WA", 20, 50), - tuple("WA", 35, 150))); + PeekingIterator tuples = + Iterators.peekingIterator( + Iterators.forArray( + tuple("WA", 10, 100), + tuple("WA", 20, 200), + tuple("WA", 20, 50), + tuple("WA", 35, 150))); windowFrame.load(tuples); assertTrue(windowFrame.isNewPartition()); - assertEquals( - ImmutableList.of( - tuple("WA", 10, 100)), - windowFrame.next()); + assertEquals(ImmutableList.of(tuple("WA", 10, 100)), windowFrame.next()); windowFrame.load(tuples); assertFalse(windowFrame.isNewPartition()); - assertEquals( - ImmutableList.of( - tuple("WA", 20, 200), - tuple("WA", 20, 50)), - windowFrame.next()); + assertEquals(ImmutableList.of(tuple("WA", 20, 200), tuple("WA", 20, 50)), windowFrame.next()); windowFrame.load(tuples); assertFalse(windowFrame.isNewPartition()); - assertEquals( - ImmutableList.of(), - windowFrame.next()); + assertEquals(ImmutableList.of(), windowFrame.next()); windowFrame.load(tuples); assertFalse(windowFrame.isNewPartition()); - assertEquals( - ImmutableList.of( - tuple("WA", 35, 150)), - windowFrame.next()); + assertEquals(ImmutableList.of(tuple("WA", 35, 150)), windowFrame.next()); } @Test void test_two_partitions_with_all_same_peers_in_second_partition() { - PeekingIterator tuples = Iterators.peekingIterator( - Iterators.forArray( - tuple("WA", 10, 100), - tuple("CA", 18, 150), - tuple("CA", 18, 100))); + PeekingIterator tuples = + Iterators.peekingIterator( + Iterators.forArray(tuple("WA", 10, 100), tuple("CA", 18, 150), tuple("CA", 18, 100))); windowFrame.load(tuples); assertTrue(windowFrame.isNewPartition()); - assertEquals( - ImmutableList.of( - tuple("WA", 10, 100)), - windowFrame.next()); + assertEquals(ImmutableList.of(tuple("WA", 10, 100)), windowFrame.next()); windowFrame.load(tuples); assertTrue(windowFrame.isNewPartition()); - assertEquals( - ImmutableList.of( - tuple("CA", 18, 150), - tuple("CA", 18, 100)), - windowFrame.next()); + assertEquals(ImmutableList.of(tuple("CA", 18, 150), tuple("CA", 18, 100)), windowFrame.next()); windowFrame.load(tuples); assertFalse(windowFrame.isNewPartition()); - assertEquals( - ImmutableList.of(), - windowFrame.next()); + assertEquals(ImmutableList.of(), windowFrame.next()); } @Test void test_two_partitions_with_single_row_in_each_partition() { - PeekingIterator tuples = Iterators.peekingIterator( - Iterators.forArray( - tuple("WA", 10, 100), - tuple("CA", 30, 200))); + PeekingIterator tuples = + Iterators.peekingIterator(Iterators.forArray(tuple("WA", 10, 100), tuple("CA", 30, 200))); windowFrame.load(tuples); assertTrue(windowFrame.isNewPartition()); - assertEquals( - ImmutableList.of( - tuple("WA", 10, 100)), - windowFrame.next()); + assertEquals(ImmutableList.of(tuple("WA", 10, 100)), windowFrame.next()); windowFrame.load(tuples); assertTrue(windowFrame.isNewPartition()); - assertEquals( - ImmutableList.of( - tuple("CA", 30, 200)), - windowFrame.next()); + assertEquals(ImmutableList.of(tuple("CA", 30, 200)), windowFrame.next()); } @Test void test_window_definition_with_no_partition_by() { - PeerRowsWindowFrame windowFrame = new PeerRowsWindowFrame( - new WindowDefinition( - ImmutableList.of(), - ImmutableList.of(Pair.of(DEFAULT_ASC, DSL.ref("age", INTEGER))))); + PeerRowsWindowFrame windowFrame = + new PeerRowsWindowFrame( + new WindowDefinition( + ImmutableList.of(), + ImmutableList.of(Pair.of(DEFAULT_ASC, DSL.ref("age", INTEGER))))); - PeekingIterator tuples = Iterators.peekingIterator( - Iterators.forArray( - tuple("WA", 10, 100), - tuple("CA", 30, 200))); + PeekingIterator tuples = + Iterators.peekingIterator(Iterators.forArray(tuple("WA", 10, 100), tuple("CA", 30, 200))); windowFrame.load(tuples); assertTrue(windowFrame.isNewPartition()); - assertEquals( - ImmutableList.of( - tuple("WA", 10, 100)), - windowFrame.next()); + assertEquals(ImmutableList.of(tuple("WA", 10, 100)), windowFrame.next()); windowFrame.load(tuples); assertFalse(windowFrame.isNewPartition()); - assertEquals( - ImmutableList.of( - tuple("CA", 30, 200)), - windowFrame.next()); + assertEquals(ImmutableList.of(tuple("CA", 30, 200)), windowFrame.next()); } @Test void test_window_definition_with_no_order_by() { - PeerRowsWindowFrame windowFrame = new PeerRowsWindowFrame( - new WindowDefinition( - ImmutableList.of(DSL.ref("state", STRING)), - ImmutableList.of())); + PeerRowsWindowFrame windowFrame = + new PeerRowsWindowFrame( + new WindowDefinition(ImmutableList.of(DSL.ref("state", STRING)), ImmutableList.of())); - PeekingIterator tuples = Iterators.peekingIterator( - Iterators.forArray( - tuple("WA", 10, 100), - tuple("CA", 30, 200))); + PeekingIterator tuples = + Iterators.peekingIterator(Iterators.forArray(tuple("WA", 10, 100), tuple("CA", 30, 200))); windowFrame.load(tuples); assertTrue(windowFrame.isNewPartition()); - assertEquals( - ImmutableList.of( - tuple("WA", 10, 100)), - windowFrame.next()); + assertEquals(ImmutableList.of(tuple("WA", 10, 100)), windowFrame.next()); windowFrame.load(tuples); assertTrue(windowFrame.isNewPartition()); - assertEquals( - ImmutableList.of( - tuple("CA", 30, 200)), - windowFrame.next()); + assertEquals(ImmutableList.of(tuple("CA", 30, 200)), windowFrame.next()); } @Test void test_window_definition_with_no_partition_by_and_order_by() { - PeerRowsWindowFrame windowFrame = new PeerRowsWindowFrame( - new WindowDefinition( - ImmutableList.of(), - ImmutableList.of())); + PeerRowsWindowFrame windowFrame = + new PeerRowsWindowFrame(new WindowDefinition(ImmutableList.of(), ImmutableList.of())); - PeekingIterator tuples = Iterators.peekingIterator( - Iterators.forArray( - tuple("WA", 10, 100), - tuple("CA", 30, 200))); + PeekingIterator tuples = + Iterators.peekingIterator(Iterators.forArray(tuple("WA", 10, 100), tuple("CA", 30, 200))); windowFrame.load(tuples); assertTrue(windowFrame.isNewPartition()); - assertEquals( - ImmutableList.of( - tuple("WA", 10, 100), - tuple("CA", 30, 200)), - windowFrame.next()); + assertEquals(ImmutableList.of(tuple("WA", 10, 100), tuple("CA", 30, 200)), windowFrame.next()); windowFrame.load(tuples); assertFalse(windowFrame.isNewPartition()); - assertEquals( - ImmutableList.of(), - windowFrame.next()); + assertEquals(ImmutableList.of(), windowFrame.next()); } private ExprValue tuple(String state, int age, int balance) { - return fromExprValueMap(ImmutableMap.of( - "state", new ExprStringValue(state), - "age", new ExprIntegerValue(age), - "balance", new ExprIntegerValue(balance))); + return fromExprValueMap( + ImmutableMap.of( + "state", new ExprStringValue(state), + "age", new ExprIntegerValue(age), + "balance", new ExprIntegerValue(balance))); } - } diff --git a/core/src/test/java/org/opensearch/sql/expression/window/ranking/RankingWindowFunctionTest.java b/core/src/test/java/org/opensearch/sql/expression/window/ranking/RankingWindowFunctionTest.java index 7b2b5a42f2..dc2f8fd147 100644 --- a/core/src/test/java/org/opensearch/sql/expression/window/ranking/RankingWindowFunctionTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/window/ranking/RankingWindowFunctionTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.window.ranking; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -31,57 +30,72 @@ import org.opensearch.sql.expression.window.WindowDefinition; import org.opensearch.sql.expression.window.frame.CurrentRowWindowFrame; -/** - * Rank window function test collection. - */ +/** Rank window function test collection. */ @DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) @ExtendWith(MockitoExtension.class) class RankingWindowFunctionTest extends ExpressionTestBase { - private final CurrentRowWindowFrame windowFrame1 = new CurrentRowWindowFrame( - new WindowDefinition( - ImmutableList.of(DSL.ref("state", STRING)), - ImmutableList.of(Pair.of(DEFAULT_ASC, DSL.ref("age", INTEGER))))); + private final CurrentRowWindowFrame windowFrame1 = + new CurrentRowWindowFrame( + new WindowDefinition( + ImmutableList.of(DSL.ref("state", STRING)), + ImmutableList.of(Pair.of(DEFAULT_ASC, DSL.ref("age", INTEGER))))); - private final CurrentRowWindowFrame windowFrame2 = new CurrentRowWindowFrame( - new WindowDefinition( - ImmutableList.of(DSL.ref("state", STRING)), - ImmutableList.of())); // No sort items defined + private final CurrentRowWindowFrame windowFrame2 = + new CurrentRowWindowFrame( + new WindowDefinition( + ImmutableList.of(DSL.ref("state", STRING)), + ImmutableList.of())); // No sort items defined private PeekingIterator iterator1; private PeekingIterator iterator2; @BeforeEach void set_up() { - iterator1 = Iterators.peekingIterator(Iterators.forArray( - fromExprValueMap(ImmutableMap.of( - "state", new ExprStringValue("WA"), "age", new ExprIntegerValue(30))), - fromExprValueMap(ImmutableMap.of( - "state", new ExprStringValue("WA"), "age", new ExprIntegerValue(30))), - fromExprValueMap(ImmutableMap.of( - "state", new ExprStringValue("WA"), "age", new ExprIntegerValue(40))), - fromExprValueMap(ImmutableMap.of( - "state", new ExprStringValue("CA"), "age", new ExprIntegerValue(20))))); - - iterator2 = Iterators.peekingIterator(Iterators.forArray( - fromExprValueMap(ImmutableMap.of( - "state", new ExprStringValue("WA"), "age", new ExprIntegerValue(30))), - fromExprValueMap(ImmutableMap.of( - "state", new ExprStringValue("WA"), "age", new ExprIntegerValue(30))), - fromExprValueMap(ImmutableMap.of( - "state", new ExprStringValue("WA"), "age", new ExprIntegerValue(50))), - fromExprValueMap(ImmutableMap.of( - "state", new ExprStringValue("WA"), "age", new ExprIntegerValue(55))), - fromExprValueMap(ImmutableMap.of( - "state", new ExprStringValue("CA"), "age", new ExprIntegerValue(15))))); + iterator1 = + Iterators.peekingIterator( + Iterators.forArray( + fromExprValueMap( + ImmutableMap.of( + "state", new ExprStringValue("WA"), "age", new ExprIntegerValue(30))), + fromExprValueMap( + ImmutableMap.of( + "state", new ExprStringValue("WA"), "age", new ExprIntegerValue(30))), + fromExprValueMap( + ImmutableMap.of( + "state", new ExprStringValue("WA"), "age", new ExprIntegerValue(40))), + fromExprValueMap( + ImmutableMap.of( + "state", new ExprStringValue("CA"), "age", new ExprIntegerValue(20))))); + + iterator2 = + Iterators.peekingIterator( + Iterators.forArray( + fromExprValueMap( + ImmutableMap.of( + "state", new ExprStringValue("WA"), "age", new ExprIntegerValue(30))), + fromExprValueMap( + ImmutableMap.of( + "state", new ExprStringValue("WA"), "age", new ExprIntegerValue(30))), + fromExprValueMap( + ImmutableMap.of( + "state", new ExprStringValue("WA"), "age", new ExprIntegerValue(50))), + fromExprValueMap( + ImmutableMap.of( + "state", new ExprStringValue("WA"), "age", new ExprIntegerValue(55))), + fromExprValueMap( + ImmutableMap.of( + "state", new ExprStringValue("CA"), "age", new ExprIntegerValue(15))))); } @Test void test_value_of() { - PeekingIterator iterator = Iterators.peekingIterator( - Iterators.singletonIterator( - fromExprValueMap(ImmutableMap.of( - "state", new ExprStringValue("WA"), "age", new ExprIntegerValue(30))))); + PeekingIterator iterator = + Iterators.peekingIterator( + Iterators.singletonIterator( + fromExprValueMap( + ImmutableMap.of( + "state", new ExprStringValue("WA"), "age", new ExprIntegerValue(30))))); RankingWindowFunction rowNumber = DSL.rowNumber(); @@ -165,18 +179,24 @@ void row_number_should_work_if_no_sort_items_defined() { @Test void rank_should_always_return_1_if_no_sort_items_defined() { - PeekingIterator iterator = Iterators.peekingIterator( - Iterators.forArray( - fromExprValueMap(ImmutableMap.of( - "state", new ExprStringValue("WA"), "age", new ExprIntegerValue(30))), - fromExprValueMap(ImmutableMap.of( - "state", new ExprStringValue("WA"), "age", new ExprIntegerValue(30))), - fromExprValueMap(ImmutableMap.of( - "state", new ExprStringValue("WA"), "age", new ExprIntegerValue(50))), - fromExprValueMap(ImmutableMap.of( - "state", new ExprStringValue("WA"), "age", new ExprIntegerValue(55))), - fromExprValueMap(ImmutableMap.of( - "state", new ExprStringValue("CA"), "age", new ExprIntegerValue(15))))); + PeekingIterator iterator = + Iterators.peekingIterator( + Iterators.forArray( + fromExprValueMap( + ImmutableMap.of( + "state", new ExprStringValue("WA"), "age", new ExprIntegerValue(30))), + fromExprValueMap( + ImmutableMap.of( + "state", new ExprStringValue("WA"), "age", new ExprIntegerValue(30))), + fromExprValueMap( + ImmutableMap.of( + "state", new ExprStringValue("WA"), "age", new ExprIntegerValue(50))), + fromExprValueMap( + ImmutableMap.of( + "state", new ExprStringValue("WA"), "age", new ExprIntegerValue(55))), + fromExprValueMap( + ImmutableMap.of( + "state", new ExprStringValue("CA"), "age", new ExprIntegerValue(15))))); RankingWindowFunction rank = DSL.rank(); @@ -215,5 +235,4 @@ void dense_rank_should_always_return_1_if_no_sort_items_defined() { windowFrame2.load(iterator2); assertEquals(1, denseRank.rank(windowFrame2)); } - } diff --git a/core/src/test/java/org/opensearch/sql/monitor/AlwaysHealthyMonitorTest.java b/core/src/test/java/org/opensearch/sql/monitor/AlwaysHealthyMonitorTest.java index 676f59a44b..9eb7a816a7 100644 --- a/core/src/test/java/org/opensearch/sql/monitor/AlwaysHealthyMonitorTest.java +++ b/core/src/test/java/org/opensearch/sql/monitor/AlwaysHealthyMonitorTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.monitor; import static org.junit.jupiter.api.Assertions.assertTrue; diff --git a/core/src/test/java/org/opensearch/sql/planner/DefaultImplementorTest.java b/core/src/test/java/org/opensearch/sql/planner/DefaultImplementorTest.java index 2d233e9a6f..45d8f6c03c 100644 --- a/core/src/test/java/org/opensearch/sql/planner/DefaultImplementorTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/DefaultImplementorTest.java @@ -81,8 +81,7 @@ @DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) class DefaultImplementorTest { - @Mock - private Table table; + @Mock private Table table; private final DefaultImplementor implementor = new DefaultImplementor<>(); @@ -107,50 +106,43 @@ public void visit_should_return_default_physical_operator() { ImmutablePair.of(Sort.SortOption.DEFAULT_ASC, ref("name1", STRING)); Integer limit = 1; Integer offset = 1; - List> nestedArgs = List.of( - Map.of( - "field", new ReferenceExpression("message.info", STRING), - "path", new ReferenceExpression("message", STRING) - ) - ); + List> nestedArgs = + List.of( + Map.of( + "field", new ReferenceExpression("message.info", STRING), + "path", new ReferenceExpression("message", STRING))); List nestedProjectList = List.of( - new NamedExpression( - "message.info", - DSL.nested(DSL.ref("message.info", STRING)), - null - ) - ); + new NamedExpression("message.info", DSL.nested(DSL.ref("message.info", STRING)), null)); Set nestedOperatorArgs = Set.of("message.info"); - Map> groupedFieldsByPath = - Map.of("message", List.of("message.info")); - + Map> groupedFieldsByPath = Map.of("message", List.of("message.info")); LogicalPlan plan = project( nested( - limit( - LogicalPlanDSL.dedupe( - rareTopN( - sort( - eval( - remove( - rename( - aggregation( - filter(values(emptyList()), filterExpr), - aggregators, - groupByExprs), - mappings), - exclude), - newEvalField), - sortField), - CommandType.TOP, - topByExprs, - rareTopNField), - dedupeField), - limit, - offset), - nestedArgs, nestedProjectList), + limit( + LogicalPlanDSL.dedupe( + rareTopN( + sort( + eval( + remove( + rename( + aggregation( + filter(values(emptyList()), filterExpr), + aggregators, + groupByExprs), + mappings), + exclude), + newEvalField), + sortField), + CommandType.TOP, + topByExprs, + rareTopNField), + dedupeField), + limit, + offset), + nestedArgs, + nestedProjectList), include); PhysicalPlan actual = plan.accept(implementor, null); @@ -158,37 +150,39 @@ public void visit_should_return_default_physical_operator() { assertEquals( PhysicalPlanDSL.project( PhysicalPlanDSL.nested( - PhysicalPlanDSL.limit( - PhysicalPlanDSL.dedupe( - PhysicalPlanDSL.rareTopN( - PhysicalPlanDSL.sort( - PhysicalPlanDSL.eval( - PhysicalPlanDSL.remove( - PhysicalPlanDSL.rename( - PhysicalPlanDSL.agg( - PhysicalPlanDSL.filter( - PhysicalPlanDSL.values(emptyList()), - filterExpr), - aggregators, - groupByExprs), - mappings), - exclude), - newEvalField), - sortField), - CommandType.TOP, - topByExprs, - rareTopNField), - dedupeField), - limit, - offset), - nestedOperatorArgs, groupedFieldsByPath), + PhysicalPlanDSL.limit( + PhysicalPlanDSL.dedupe( + PhysicalPlanDSL.rareTopN( + PhysicalPlanDSL.sort( + PhysicalPlanDSL.eval( + PhysicalPlanDSL.remove( + PhysicalPlanDSL.rename( + PhysicalPlanDSL.agg( + PhysicalPlanDSL.filter( + PhysicalPlanDSL.values(emptyList()), + filterExpr), + aggregators, + groupByExprs), + mappings), + exclude), + newEvalField), + sortField), + CommandType.TOP, + topByExprs, + rareTopNField), + dedupeField), + limit, + offset), + nestedOperatorArgs, + groupedFieldsByPath), include), actual); } @Test public void visitRelation_should_throw_an_exception() { - assertThrows(UnsupportedOperationException.class, + assertThrows( + UnsupportedOperationException.class, () -> new LogicalRelation("test", table).accept(implementor, null)); } @@ -196,36 +190,27 @@ public void visitRelation_should_throw_an_exception() { @Test public void visitWindowOperator_should_return_PhysicalWindowOperator() { NamedExpression windowFunction = named(new RowNumberFunction()); - WindowDefinition windowDefinition = new WindowDefinition( - Collections.singletonList(ref("state", STRING)), - Collections.singletonList( - ImmutablePair.of(Sort.SortOption.DEFAULT_DESC, ref("age", INTEGER)))); + WindowDefinition windowDefinition = + new WindowDefinition( + Collections.singletonList(ref("state", STRING)), + Collections.singletonList( + ImmutablePair.of(Sort.SortOption.DEFAULT_DESC, ref("age", INTEGER)))); NamedExpression[] projectList = { - named("state", ref("state", STRING)), - named("row_number", ref("row_number", INTEGER)) + named("state", ref("state", STRING)), named("row_number", ref("row_number", INTEGER)) }; Pair[] sortList = { - ImmutablePair.of(Sort.SortOption.DEFAULT_ASC, ref("state", STRING)), - ImmutablePair.of(Sort.SortOption.DEFAULT_DESC, ref("age", STRING)) + ImmutablePair.of(Sort.SortOption.DEFAULT_ASC, ref("state", STRING)), + ImmutablePair.of(Sort.SortOption.DEFAULT_DESC, ref("age", STRING)) }; LogicalPlan logicalPlan = - project( - window( - sort( - values(), - sortList), - windowFunction, - windowDefinition), - projectList); + project(window(sort(values(), sortList), windowFunction, windowDefinition), projectList); PhysicalPlan physicalPlan = PhysicalPlanDSL.project( PhysicalPlanDSL.window( - PhysicalPlanDSL.sort( - PhysicalPlanDSL.values(), - sortList), + PhysicalPlanDSL.sort(PhysicalPlanDSL.values(), sortList), windowFunction, windowDefinition), projectList); @@ -238,20 +223,22 @@ void visitLogicalCursor_deserializes_it() { var engine = mock(StorageEngine.class); var physicalPlan = new TestOperator(); - var logicalPlan = LogicalPlanDSL.fetchCursor(new PlanSerializer(engine) - .convertToCursor(physicalPlan).toString(), engine); + var logicalPlan = + LogicalPlanDSL.fetchCursor( + new PlanSerializer(engine).convertToCursor(physicalPlan).toString(), engine); assertEquals(physicalPlan, logicalPlan.accept(implementor, null)); } @Test public void visitTableScanBuilder_should_build_TableScanOperator() { TableScanOperator tableScanOperator = mock(TableScanOperator.class); - TableScanBuilder tableScanBuilder = new TableScanBuilder() { - @Override - public TableScanOperator build() { - return tableScanOperator; - } - }; + TableScanBuilder tableScanBuilder = + new TableScanBuilder() { + @Override + public TableScanOperator build() { + return tableScanOperator; + } + }; assertEquals(tableScanOperator, tableScanBuilder.accept(implementor, null)); } @@ -259,12 +246,13 @@ public TableScanOperator build() { public void visitTableWriteBuilder_should_build_TableWriteOperator() { LogicalPlan child = values(); TableWriteOperator tableWriteOperator = mock(TableWriteOperator.class); - TableWriteBuilder logicalPlan = new TableWriteBuilder(child) { - @Override - public TableWriteOperator build(PhysicalPlan child) { - return tableWriteOperator; - } - }; + TableWriteBuilder logicalPlan = + new TableWriteBuilder(child) { + @Override + public TableWriteOperator build(PhysicalPlan child) { + return tableWriteOperator; + } + }; assertEquals(tableWriteOperator, logicalPlan.accept(implementor, null)); } @@ -281,11 +269,13 @@ public void visitCloseCursor_should_build_CursorCloseOperator() { @Test public void visitPaginate_should_remove_it_from_tree() { - var logicalPlanTree = new LogicalPaginate(42, List.of( - new LogicalProject( - new LogicalValues(List.of(List.of())), List.of(), List.of()))); - var physicalPlanTree = new ProjectOperator( - new ValuesOperator(List.of(List.of())), List.of(), List.of()); + var logicalPlanTree = + new LogicalPaginate( + 42, + List.of( + new LogicalProject(new LogicalValues(List.of(List.of())), List.of(), List.of()))); + var physicalPlanTree = + new ProjectOperator(new ValuesOperator(List.of(List.of())), List.of(), List.of()); assertEquals(physicalPlanTree, logicalPlanTree.accept(implementor, null)); } } diff --git a/core/src/test/java/org/opensearch/sql/planner/PlanContextTest.java b/core/src/test/java/org/opensearch/sql/planner/PlanContextTest.java index 77ae78f77e..76ccb6613a 100644 --- a/core/src/test/java/org/opensearch/sql/planner/PlanContextTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/PlanContextTest.java @@ -16,8 +16,7 @@ @ExtendWith(MockitoExtension.class) class PlanContextTest { - @Mock - private Split split; + @Mock private Split split; @Test void createEmptyPlanContext() { diff --git a/core/src/test/java/org/opensearch/sql/planner/PlannerTest.java b/core/src/test/java/org/opensearch/sql/planner/PlannerTest.java index 64498f76cd..6ad44c303b 100644 --- a/core/src/test/java/org/opensearch/sql/planner/PlannerTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/PlannerTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner; import static java.util.Collections.emptyList; @@ -47,14 +46,11 @@ @ExtendWith(MockitoExtension.class) public class PlannerTest extends PhysicalPlanTestBase { - @Mock - private PhysicalPlan scan; + @Mock private PhysicalPlan scan; - @Mock - private StorageEngine storageEngine; + @Mock private StorageEngine storageEngine; - @Mock - private LogicalPlanOptimizer optimizer; + @Mock private LogicalPlanOptimizer optimizer; @BeforeEach public void setUp() { @@ -68,29 +64,22 @@ public void planner_test() { PhysicalPlanDSL.rename( PhysicalPlanDSL.agg( PhysicalPlanDSL.filter( - scan, - DSL.equal(DSL.ref("response", INTEGER), DSL.literal(10)) - ), + scan, DSL.equal(DSL.ref("response", INTEGER), DSL.literal(10))), ImmutableList.of(DSL.named("avg(response)", DSL.avg(DSL.ref("response", INTEGER)))), - ImmutableList.of() - ), - ImmutableMap.of(DSL.ref("ivalue", INTEGER), DSL.ref("avg(response)", DOUBLE)) - ), + ImmutableList.of()), + ImmutableMap.of(DSL.ref("ivalue", INTEGER), DSL.ref("avg(response)", DOUBLE))), LogicalPlanDSL.rename( LogicalPlanDSL.aggregation( LogicalPlanDSL.filter( - LogicalPlanDSL.relation("schema", + LogicalPlanDSL.relation( + "schema", storageEngine.getTable( new DataSourceSchemaName(DEFAULT_DATASOURCE_NAME, "default"), - "schema")), - DSL.equal(DSL.ref("response", INTEGER), DSL.literal(10)) - ), + "schema")), + DSL.equal(DSL.ref("response", INTEGER), DSL.literal(10))), ImmutableList.of(DSL.named("avg(response)", DSL.avg(DSL.ref("response", INTEGER)))), - ImmutableList.of() - ), - ImmutableMap.of(DSL.ref("ivalue", INTEGER), DSL.ref("avg(response)", DOUBLE)) - ) - ); + ImmutableList.of()), + ImmutableMap.of(DSL.ref("ivalue", INTEGER), DSL.ref("avg(response)", DOUBLE)))); } @Test @@ -103,15 +92,12 @@ public void plan_a_query_without_relation_involved() { PhysicalPlanDSL.values(emptyList()), DSL.named("123", DSL.literal(123)), DSL.named("hello", DSL.literal("hello")), - DSL.named("false", DSL.literal(false)) - ), + DSL.named("false", DSL.literal(false))), LogicalPlanDSL.project( LogicalPlanDSL.values(emptyList()), DSL.named("123", DSL.literal(123)), DSL.named("hello", DSL.literal("hello")), - DSL.named("false", DSL.literal(false)) - ) - ); + DSL.named("false", DSL.literal(false)))); } protected void assertPhysicalPlan(PhysicalPlan expected, LogicalPlan logicalPlan) { @@ -156,15 +142,15 @@ public PhysicalPlan visitFilter(LogicalFilter plan, Object context) { @Override public PhysicalPlan visitAggregation(LogicalAggregation plan, Object context) { - return new AggregationOperator(plan.getChild().get(0).accept(this, context), - plan.getAggregatorList(), plan.getGroupByList() - ); + return new AggregationOperator( + plan.getChild().get(0).accept(this, context), + plan.getAggregatorList(), + plan.getGroupByList()); } @Override public PhysicalPlan visitRename(LogicalRename plan, Object context) { - return new RenameOperator(plan.getChild().get(0).accept(this, context), - plan.getRenameMap()); + return new RenameOperator(plan.getChild().get(0).accept(this, context), plan.getRenameMap()); } } } diff --git a/core/src/test/java/org/opensearch/sql/planner/logical/LogicalDedupeTest.java b/core/src/test/java/org/opensearch/sql/planner/logical/LogicalDedupeTest.java index fe751506ce..d128885a6a 100644 --- a/core/src/test/java/org/opensearch/sql/planner/logical/LogicalDedupeTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/logical/LogicalDedupeTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.logical; import static org.opensearch.sql.ast.dsl.AstDSL.argument; @@ -30,10 +29,7 @@ public void analyze_dedup_with_two_field_with_default_option() { DSL.ref("integer_value", INTEGER), DSL.ref("double_value", DOUBLE)), dedupe( - relation("schema"), - defaultDedupArgs(), - field("integer_value"), field("double_value") - )); + relation("schema"), defaultDedupArgs(), field("integer_value"), field("double_value"))); } @Test @@ -41,7 +37,9 @@ public void analyze_dedup_with_one_field_with_customize_option() { assertAnalyzeEqual( LogicalPlanDSL.dedupe( LogicalPlanDSL.relation("schema", table), - 3, false, true, + 3, + false, + true, DSL.ref("integer_value", INTEGER), DSL.ref("double_value", DOUBLE)), dedupe( @@ -49,9 +47,8 @@ public void analyze_dedup_with_one_field_with_customize_option() { exprList( argument("number", intLiteral(3)), argument("keepempty", booleanLiteral(false)), - argument("consecutive", booleanLiteral(true)) - ), - field("integer_value"), field("double_value") - )); + argument("consecutive", booleanLiteral(true))), + field("integer_value"), + field("double_value"))); } } diff --git a/core/src/test/java/org/opensearch/sql/planner/logical/LogicalEvalTest.java b/core/src/test/java/org/opensearch/sql/planner/logical/LogicalEvalTest.java index 55bc793045..746441ef4a 100644 --- a/core/src/test/java/org/opensearch/sql/planner/logical/LogicalEvalTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/logical/LogicalEvalTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.logical; import static org.opensearch.sql.ast.dsl.AstDSL.field; @@ -25,8 +24,8 @@ public void analyze_eval_with_one_field() { assertAnalyzeEqual( LogicalPlanDSL.eval( LogicalPlanDSL.relation("schema", table), - ImmutablePair - .of(DSL.ref("absValue", INTEGER), DSL.abs(DSL.ref("integer_value", INTEGER)))), + ImmutablePair.of( + DSL.ref("absValue", INTEGER), DSL.abs(DSL.ref("integer_value", INTEGER)))), AstDSL.eval( AstDSL.relation("schema"), AstDSL.let(AstDSL.field("absValue"), AstDSL.function("abs", field("integer_value"))))); @@ -37,8 +36,8 @@ public void analyze_eval_with_two_field() { assertAnalyzeEqual( LogicalPlanDSL.eval( LogicalPlanDSL.relation("schema", table), - ImmutablePair - .of(DSL.ref("absValue", INTEGER), DSL.abs(DSL.ref("integer_value", INTEGER))), + ImmutablePair.of( + DSL.ref("absValue", INTEGER), DSL.abs(DSL.ref("integer_value", INTEGER))), ImmutablePair.of(DSL.ref("iValue", INTEGER), DSL.abs(DSL.ref("absValue", INTEGER)))), AstDSL.eval( AstDSL.relation("schema"), diff --git a/core/src/test/java/org/opensearch/sql/planner/logical/LogicalPlanNodeVisitorTest.java b/core/src/test/java/org/opensearch/sql/planner/logical/LogicalPlanNodeVisitorTest.java index d4d5c89c9b..74aab31a30 100644 --- a/core/src/test/java/org/opensearch/sql/planner/logical/LogicalPlanNodeVisitorTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/logical/LogicalPlanNodeVisitorTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.logical; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -44,9 +43,7 @@ import org.opensearch.sql.storage.write.TableWriteBuilder; import org.opensearch.sql.storage.write.TableWriteOperator; -/** - * Todo. Temporary added for UT coverage, Will be removed. - */ +/** Added for UT coverage */ @DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) class LogicalPlanNodeVisitorTest { @@ -84,50 +81,59 @@ public void logical_plan_should_be_traversable() { @SuppressWarnings("unchecked") private static Stream getLogicalPlansForVisitorTest() { LogicalPlan relation = LogicalPlanDSL.relation("schema", table); - LogicalPlan tableScanBuilder = new TableScanBuilder() { - @Override - public TableScanOperator build() { - return null; - } - }; - TableWriteBuilder tableWriteBuilder = new TableWriteBuilder(null) { - @Override - public TableWriteOperator build(PhysicalPlan child) { - return null; - } - }; + LogicalPlan tableScanBuilder = + new TableScanBuilder() { + @Override + public TableScanOperator build() { + return null; + } + }; + TableWriteBuilder tableWriteBuilder = + new TableWriteBuilder(null) { + @Override + public TableWriteOperator build(PhysicalPlan child) { + return null; + } + }; LogicalPlan write = LogicalPlanDSL.write(null, table, Collections.emptyList()); LogicalPlan filter = LogicalPlanDSL.filter(relation, expression); - LogicalPlan aggregation = LogicalPlanDSL.aggregation( - filter, ImmutableList.of(DSL.named("avg", aggregator)), ImmutableList.of(DSL.named( - "group", expression))); + LogicalPlan aggregation = + LogicalPlanDSL.aggregation( + filter, + ImmutableList.of(DSL.named("avg", aggregator)), + ImmutableList.of(DSL.named("group", expression))); LogicalPlan rename = LogicalPlanDSL.rename(aggregation, ImmutableMap.of(ref, ref)); LogicalPlan project = LogicalPlanDSL.project(relation, named("ref", ref)); LogicalPlan remove = LogicalPlanDSL.remove(relation, ref); LogicalPlan eval = LogicalPlanDSL.eval(relation, Pair.of(ref, expression)); LogicalPlan sort = LogicalPlanDSL.sort(relation, Pair.of(SortOption.DEFAULT_ASC, expression)); LogicalPlan dedup = LogicalPlanDSL.dedupe(relation, 1, false, false, expression); - LogicalPlan window = LogicalPlanDSL.window(relation, named(expression), new WindowDefinition( - ImmutableList.of(ref), ImmutableList.of(Pair.of(SortOption.DEFAULT_ASC, expression)))); - LogicalPlan rareTopN = LogicalPlanDSL.rareTopN( - relation, CommandType.TOP, ImmutableList.of(expression), expression); - LogicalPlan highlight = new LogicalHighlight(filter, - new LiteralExpression(ExprValueUtils.stringValue("fieldA")), Map.of()); + LogicalPlan window = + LogicalPlanDSL.window( + relation, + named(expression), + new WindowDefinition( + ImmutableList.of(ref), + ImmutableList.of(Pair.of(SortOption.DEFAULT_ASC, expression)))); + LogicalPlan rareTopN = + LogicalPlanDSL.rareTopN( + relation, CommandType.TOP, ImmutableList.of(expression), expression); + LogicalPlan highlight = + new LogicalHighlight( + filter, new LiteralExpression(ExprValueUtils.stringValue("fieldA")), Map.of()); LogicalPlan mlCommons = new LogicalMLCommons(relation, "kmeans", Map.of()); LogicalPlan ad = new LogicalAD(relation, Map.of()); LogicalPlan ml = new LogicalML(relation, Map.of()); LogicalPlan paginate = new LogicalPaginate(42, List.of(relation)); - List> nestedArgs = List.of( - Map.of( - "field", new ReferenceExpression("message.info", STRING), - "path", new ReferenceExpression("message", STRING) - ) - ); + List> nestedArgs = + List.of( + Map.of( + "field", new ReferenceExpression("message.info", STRING), + "path", new ReferenceExpression("message", STRING))); List projectList = List.of( - new NamedExpression("message.info", DSL.nested(DSL.ref("message.info", STRING)), null) - ); + new NamedExpression("message.info", DSL.nested(DSL.ref("message.info", STRING)), null)); LogicalNested nested = new LogicalNested(null, nestedArgs, projectList); @@ -136,20 +142,37 @@ public TableWriteOperator build(PhysicalPlan child) { LogicalCloseCursor closeCursor = new LogicalCloseCursor(cursor); return Stream.of( - relation, tableScanBuilder, write, tableWriteBuilder, filter, aggregation, rename, project, - remove, eval, sort, dedup, window, rareTopN, highlight, mlCommons, ad, ml, paginate, nested, - cursor, closeCursor - ).map(Arguments::of); + relation, + tableScanBuilder, + write, + tableWriteBuilder, + filter, + aggregation, + rename, + project, + remove, + eval, + sort, + dedup, + window, + rareTopN, + highlight, + mlCommons, + ad, + ml, + paginate, + nested, + cursor, + closeCursor) + .map(Arguments::of); } @ParameterizedTest @MethodSource("getLogicalPlansForVisitorTest") public void abstract_plan_node_visitor_should_return_null(LogicalPlan plan) { - assertNull(plan.accept(new LogicalPlanNodeVisitor() { - }, null)); + assertNull(plan.accept(new LogicalPlanNodeVisitor() {}, null)); } - private static class NodesCount extends LogicalPlanNodeVisitor { @Override public Integer visitRelation(LogicalRelation plan, Object context) { @@ -160,28 +183,36 @@ public Integer visitRelation(LogicalRelation plan, Object context) { public Integer visitFilter(LogicalFilter plan, Object context) { return 1 + plan.getChild().stream() - .map(child -> child.accept(this, context)).mapToInt(Integer::intValue).sum(); + .map(child -> child.accept(this, context)) + .mapToInt(Integer::intValue) + .sum(); } @Override public Integer visitAggregation(LogicalAggregation plan, Object context) { return 1 + plan.getChild().stream() - .map(child -> child.accept(this, context)).mapToInt(Integer::intValue).sum(); + .map(child -> child.accept(this, context)) + .mapToInt(Integer::intValue) + .sum(); } @Override public Integer visitRename(LogicalRename plan, Object context) { return 1 + plan.getChild().stream() - .map(child -> child.accept(this, context)).mapToInt(Integer::intValue).sum(); + .map(child -> child.accept(this, context)) + .mapToInt(Integer::intValue) + .sum(); } @Override public Integer visitRareTopN(LogicalRareTopN plan, Object context) { return 1 + plan.getChild().stream() - .map(child -> child.accept(this, context)).mapToInt(Integer::intValue).sum(); + .map(child -> child.accept(this, context)) + .mapToInt(Integer::intValue) + .sum(); } } } diff --git a/core/src/test/java/org/opensearch/sql/planner/logical/LogicalRelationTest.java b/core/src/test/java/org/opensearch/sql/planner/logical/LogicalRelationTest.java index 9cf40d3c4d..7181c91da9 100644 --- a/core/src/test/java/org/opensearch/sql/planner/logical/LogicalRelationTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/logical/LogicalRelationTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.logical; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -17,8 +16,7 @@ @ExtendWith(MockitoExtension.class) class LogicalRelationTest { - @Mock - Table table; + @Mock Table table; @Test public void logicalRelationHasNoInput() { @@ -31,5 +29,4 @@ public void logicalRelationWithDataSourceHasNoInput() { LogicalPlan relation = LogicalPlanDSL.relation("prometheus.index", table); assertEquals(0, relation.getChild().size()); } - } diff --git a/core/src/test/java/org/opensearch/sql/planner/logical/LogicalSortTest.java b/core/src/test/java/org/opensearch/sql/planner/logical/LogicalSortTest.java index 1a33795009..8757473d24 100644 --- a/core/src/test/java/org/opensearch/sql/planner/logical/LogicalSortTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/logical/LogicalSortTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.logical; import static org.opensearch.sql.ast.dsl.AstDSL.argument; diff --git a/core/src/test/java/org/opensearch/sql/planner/optimizer/LogicalPlanOptimizerTest.java b/core/src/test/java/org/opensearch/sql/planner/optimizer/LogicalPlanOptimizerTest.java index faedb88111..2cdcb76e71 100644 --- a/core/src/test/java/org/opensearch/sql/planner/optimizer/LogicalPlanOptimizerTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/optimizer/LogicalPlanOptimizerTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.optimizer; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -61,117 +60,85 @@ @DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) class LogicalPlanOptimizerTest { - @Mock - private Table table; + @Mock private Table table; - @Spy - private TableScanBuilder tableScanBuilder; + @Spy private TableScanBuilder tableScanBuilder; @BeforeEach void setUp() { lenient().when(table.createScanBuilder()).thenReturn(tableScanBuilder); } - /** - * Filter - Filter --> Filter. - */ + /** Filter - Filter --> Filter. */ @Test void filter_merge_filter() { assertEquals( filter( tableScanBuilder, - DSL.and(DSL.equal(DSL.ref("integer_value", INTEGER), DSL.literal(integerValue(2))), - DSL.equal(DSL.ref("integer_value", INTEGER), DSL.literal(integerValue(1)))) - ), + DSL.and( + DSL.equal(DSL.ref("integer_value", INTEGER), DSL.literal(integerValue(2))), + DSL.equal(DSL.ref("integer_value", INTEGER), DSL.literal(integerValue(1))))), optimize( filter( filter( relation("schema", table), - DSL.equal(DSL.ref("integer_value", INTEGER), DSL.literal(integerValue(1))) - ), - DSL.equal(DSL.ref("integer_value", INTEGER), DSL.literal(integerValue(2))) - ) - ) - ); + DSL.equal(DSL.ref("integer_value", INTEGER), DSL.literal(integerValue(1)))), + DSL.equal(DSL.ref("integer_value", INTEGER), DSL.literal(integerValue(2)))))); } - /** - * Filter - Sort --> Sort - Filter. - */ + /** Filter - Sort --> Sort - Filter. */ @Test void push_filter_under_sort() { assertEquals( sort( filter( tableScanBuilder, - DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1))) - ), - Pair.of(Sort.SortOption.DEFAULT_ASC, DSL.ref("longV", LONG)) - ), + DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1)))), + Pair.of(Sort.SortOption.DEFAULT_ASC, DSL.ref("longV", LONG))), optimize( filter( sort( relation("schema", table), - Pair.of(Sort.SortOption.DEFAULT_ASC, DSL.ref("longV", LONG)) - ), - DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1))) - ) - ) - ); + Pair.of(Sort.SortOption.DEFAULT_ASC, DSL.ref("longV", LONG))), + DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1)))))); } - /** - * Filter - Sort --> Sort - Filter. - */ + /** Filter - Sort --> Sort - Filter. */ @Test void multiple_filter_should_eventually_be_merged() { assertEquals( sort( filter( tableScanBuilder, - DSL.and(DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1))), - DSL.less(DSL.ref("longV", INTEGER), DSL.literal(longValue(1L)))) - ), - Pair.of(Sort.SortOption.DEFAULT_ASC, DSL.ref("longV", LONG)) - ), + DSL.and( + DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1))), + DSL.less(DSL.ref("longV", INTEGER), DSL.literal(longValue(1L))))), + Pair.of(Sort.SortOption.DEFAULT_ASC, DSL.ref("longV", LONG))), optimize( filter( sort( filter( relation("schema", table), - DSL.less(DSL.ref("longV", INTEGER), DSL.literal(longValue(1L))) - ), - Pair.of(Sort.SortOption.DEFAULT_ASC, DSL.ref("longV", LONG)) - ), - DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1))) - ) - ) - ); + DSL.less(DSL.ref("longV", INTEGER), DSL.literal(longValue(1L)))), + Pair.of(Sort.SortOption.DEFAULT_ASC, DSL.ref("longV", LONG))), + DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1)))))); } @Test void default_table_scan_builder_should_not_push_down_anything() { LogicalPlan[] plans = { - project( - relation("schema", table), - DSL.named("i", DSL.ref("intV", INTEGER)) - ), - filter( - relation("schema", table), - DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1))) - ), - aggregation( - relation("schema", table), - ImmutableList - .of(DSL.named("AVG(intV)", - DSL.avg(DSL.ref("intV", INTEGER)))), - ImmutableList.of(DSL.named("longV", DSL.ref("longV", LONG)))), - sort( - relation("schema", table), - Pair.of(Sort.SortOption.DEFAULT_ASC, DSL.ref("intV", INTEGER))), - limit( - relation("schema", table), - 1, 1) + project(relation("schema", table), DSL.named("i", DSL.ref("intV", INTEGER))), + filter( + relation("schema", table), + DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1)))), + aggregation( + relation("schema", table), + ImmutableList.of(DSL.named("AVG(intV)", DSL.avg(DSL.ref("intV", INTEGER)))), + ImmutableList.of(DSL.named("longV", DSL.ref("longV", LONG)))), + sort( + relation("schema", table), + Pair.of(Sort.SortOption.DEFAULT_ASC, DSL.ref("intV", INTEGER))), + limit(relation("schema", table), 1, 1) }; for (LogicalPlan plan : plans) { @@ -185,12 +152,7 @@ void table_scan_builder_support_project_push_down_can_apply_its_rule() { assertEquals( tableScanBuilder, - optimize( - project( - relation("schema", table), - DSL.named("i", DSL.ref("intV", INTEGER))) - ) - ); + optimize(project(relation("schema", table), DSL.named("i", DSL.ref("intV", INTEGER))))); } @Test @@ -202,9 +164,7 @@ void table_scan_builder_support_filter_push_down_can_apply_its_rule() { optimize( filter( relation("schema", table), - DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1)))) - ) - ); + DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1)))))); } @Test @@ -216,12 +176,8 @@ void table_scan_builder_support_aggregation_push_down_can_apply_its_rule() { optimize( aggregation( relation("schema", table), - ImmutableList - .of(DSL.named("AVG(intV)", - DSL.avg(DSL.ref("intV", INTEGER)))), - ImmutableList.of(DSL.named("longV", DSL.ref("longV", LONG)))) - ) - ); + ImmutableList.of(DSL.named("AVG(intV)", DSL.avg(DSL.ref("intV", INTEGER)))), + ImmutableList.of(DSL.named("longV", DSL.ref("longV", LONG)))))); } @Test @@ -233,23 +189,14 @@ void table_scan_builder_support_sort_push_down_can_apply_its_rule() { optimize( sort( relation("schema", table), - Pair.of(Sort.SortOption.DEFAULT_ASC, DSL.ref("intV", INTEGER))) - ) - ); + Pair.of(Sort.SortOption.DEFAULT_ASC, DSL.ref("intV", INTEGER))))); } @Test void table_scan_builder_support_limit_push_down_can_apply_its_rule() { when(tableScanBuilder.pushDownLimit(any())).thenReturn(true); - assertEquals( - tableScanBuilder, - optimize( - limit( - relation("schema", table), - 1, 1) - ) - ); + assertEquals(tableScanBuilder, optimize(limit(relation("schema", table), 1, 1))); } @Test @@ -258,13 +205,7 @@ void table_scan_builder_support_highlight_push_down_can_apply_its_rule() { assertEquals( tableScanBuilder, - optimize( - highlight( - relation("schema", table), - DSL.literal("*"), - Collections.emptyMap()) - ) - ); + optimize(highlight(relation("schema", table), DSL.literal("*"), Collections.emptyMap()))); } @Test @@ -277,33 +218,27 @@ void table_scan_builder_support_nested_push_down_can_apply_its_rule() { nested( relation("schema", table), List.of(Map.of("field", new ReferenceExpression("message.info", STRING))), - List.of(new NamedExpression( - "message.info", - DSL.nested(DSL.ref("message.info", STRING)), - null)) - ) - ) - ); + List.of( + new NamedExpression( + "message.info", DSL.nested(DSL.ref("message.info", STRING)), null))))); } @Test void table_not_support_scan_builder_should_not_be_impact() { - Table table = new Table() { - @Override - public Map getFieldTypes() { - return null; - } - - @Override - public PhysicalPlan implement(LogicalPlan plan) { - return null; - } - }; - - assertEquals( - relation("schema", table), - optimize(relation("schema", table)) - ); + Table table = + new Table() { + @Override + public Map getFieldTypes() { + return null; + } + + @Override + public PhysicalPlan implement(LogicalPlan plan) { + return null; + } + }; + + assertEquals(relation("schema", table), optimize(relation("schema", table))); } @Test @@ -311,28 +246,25 @@ void table_support_write_builder_should_be_replaced() { TableWriteBuilder writeBuilder = Mockito.mock(TableWriteBuilder.class); when(table.createWriteBuilder(any())).thenReturn(writeBuilder); - assertEquals( - writeBuilder, - optimize(write(values(), table, Collections.emptyList())) - ); + assertEquals(writeBuilder, optimize(write(values(), table, Collections.emptyList()))); } @Test void table_not_support_write_builder_should_report_error() { - Table table = new Table() { - @Override - public Map getFieldTypes() { - return null; - } - - @Override - public PhysicalPlan implement(LogicalPlan plan) { - return null; - } - }; - - assertThrows(UnsupportedOperationException.class, - () -> table.createWriteBuilder(null)); + Table table = + new Table() { + @Override + public Map getFieldTypes() { + return null; + } + + @Override + public PhysicalPlan implement(LogicalPlan plan) { + return null; + } + }; + + assertThrows(UnsupportedOperationException.class, () -> table.createWriteBuilder(null)); } @Test @@ -340,8 +272,7 @@ void paged_table_scan_builder_support_project_push_down_can_apply_its_rule() { when(tableScanBuilder.pushDownPageSize(any())).thenReturn(true); var relation = relation("schema", table); - var optimized = LogicalPlanOptimizer.create() - .optimize(paginate(project(relation), 4)); + var optimized = LogicalPlanOptimizer.create().optimize(paginate(project(relation), 4)); verify(tableScanBuilder).pushDownPageSize(any()); assertEquals(project(tableScanBuilder), optimized); @@ -350,30 +281,29 @@ void paged_table_scan_builder_support_project_push_down_can_apply_its_rule() { @Test void push_down_page_size_multiple_children() { var relation = relation("schema", table); - var twoChildrenPlan = new LogicalPlan(List.of(relation, relation)) { - @Override - public R accept(LogicalPlanNodeVisitor visitor, C context) { - return null; - } - }; + var twoChildrenPlan = + new LogicalPlan(List.of(relation, relation)) { + @Override + public R accept(LogicalPlanNodeVisitor visitor, C context) { + return null; + } + }; var queryPlan = paginate(twoChildrenPlan, 4); var optimizer = LogicalPlanOptimizer.create(); - final var exception = assertThrows(UnsupportedOperationException.class, - () -> optimizer.optimize(queryPlan)); - assertEquals("Unsupported plan: relation operator cannot have siblings", - exception.getMessage()); + final var exception = + assertThrows(UnsupportedOperationException.class, () -> optimizer.optimize(queryPlan)); + assertEquals( + "Unsupported plan: relation operator cannot have siblings", exception.getMessage()); } @Test void push_down_page_size_push_failed() { when(tableScanBuilder.pushDownPageSize(any())).thenReturn(false); - var queryPlan = paginate( - project( - relation("schema", table)), 4); + var queryPlan = paginate(project(relation("schema", table)), 4); var optimizer = LogicalPlanOptimizer.create(); - final var exception = assertThrows(IllegalStateException.class, - () -> optimizer.optimize(queryPlan)); + final var exception = + assertThrows(IllegalStateException.class, () -> optimizer.optimize(queryPlan)); assertEquals("Failed to push down LogicalPaginate", exception.getMessage()); } @@ -386,8 +316,7 @@ void push_page_size_noop_if_no_relation() { @Test void push_page_size_noop_if_no_sub_plans() { var paginate = new LogicalPaginate(42, List.of()); - assertEquals(paginate, - LogicalPlanOptimizer.create().optimize(paginate)); + assertEquals(paginate, LogicalPlanOptimizer.create().optimize(paginate)); } @Test @@ -395,8 +324,8 @@ void table_scan_builder_support_offset_push_down_can_apply_its_rule() { when(tableScanBuilder.pushDownPageSize(any())).thenReturn(true); var relation = new LogicalRelation("schema", table); - var optimized = LogicalPlanOptimizer.create() - .optimize(new LogicalPaginate(42, List.of(project(relation)))); + var optimized = + LogicalPlanOptimizer.create().optimize(new LogicalPaginate(42, List.of(project(relation)))); // `optimized` structure: LogicalProject -> TableScanBuilder // LogicalRelation replaced by a TableScanBuilder instance assertEquals(project(tableScanBuilder), optimized); diff --git a/core/src/test/java/org/opensearch/sql/planner/optimizer/pattern/PatternsTest.java b/core/src/test/java/org/opensearch/sql/planner/optimizer/pattern/PatternsTest.java index 2cab54d272..d1e1a03521 100644 --- a/core/src/test/java/org/opensearch/sql/planner/optimizer/pattern/PatternsTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/optimizer/pattern/PatternsTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.optimizer.pattern; import static org.junit.jupiter.api.Assertions.assertAll; @@ -27,8 +26,8 @@ void source_is_empty() { when(plan.getChild()).thenReturn(Collections.emptyList()); assertAll( () -> assertFalse(Patterns.source().getFunction().apply(plan).isPresent()), - () -> assertFalse(Patterns.source(null).getProperty().getFunction().apply(plan).isPresent()) - ); + () -> + assertFalse(Patterns.source(null).getProperty().getFunction().apply(plan).isPresent())); } @Test @@ -36,7 +35,6 @@ void table_is_empty() { var plan = mock(LogicalFilter.class); assertAll( () -> assertFalse(Patterns.table().getFunction().apply(plan).isPresent()), - () -> assertFalse(Patterns.writeTable().getFunction().apply(plan).isPresent()) - ); + () -> assertFalse(Patterns.writeTable().getFunction().apply(plan).isPresent())); } } diff --git a/core/src/test/java/org/opensearch/sql/planner/physical/AggregationOperatorTest.java b/core/src/test/java/org/opensearch/sql/planner/physical/AggregationOperatorTest.java index 2585a21d7a..0f3f4bd61f 100644 --- a/core/src/test/java/org/opensearch/sql/planner/physical/AggregationOperatorTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/physical/AggregationOperatorTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.physical; import static org.hamcrest.MatcherAssert.assertThat; @@ -38,563 +37,733 @@ class AggregationOperatorTest extends PhysicalPlanTestBase { @Test public void sum_without_groups() { - PhysicalPlan plan = new AggregationOperator(new TestScan(), - Collections - .singletonList(DSL.named("sum(response)", DSL.sum(DSL.ref("response", INTEGER)))), - Collections.emptyList()); + PhysicalPlan plan = + new AggregationOperator( + new TestScan(), + Collections.singletonList( + DSL.named("sum(response)", DSL.sum(DSL.ref("response", INTEGER)))), + Collections.emptyList()); List result = execute(plan); assertEquals(1, result.size()); - assertThat(result, containsInAnyOrder( - ExprValueUtils.tupleValue(ImmutableMap.of("sum(response)", 1504d)) - )); + assertThat( + result, + containsInAnyOrder(ExprValueUtils.tupleValue(ImmutableMap.of("sum(response)", 1504d)))); } @Test public void avg_with_one_groups() { - PhysicalPlan plan = new AggregationOperator(new TestScan(), - Collections - .singletonList(DSL.named("avg(response)", DSL.avg(DSL.ref("response", INTEGER)))), - Collections.singletonList(DSL.named("action", DSL.ref("action", STRING)))); + PhysicalPlan plan = + new AggregationOperator( + new TestScan(), + Collections.singletonList( + DSL.named("avg(response)", DSL.avg(DSL.ref("response", INTEGER)))), + Collections.singletonList(DSL.named("action", DSL.ref("action", STRING)))); List result = execute(plan); assertEquals(2, result.size()); - assertThat(result, containsInAnyOrder( - ExprValueUtils.tupleValue(ImmutableMap.of("action", "GET", "avg(response)", 268d)), - ExprValueUtils.tupleValue(ImmutableMap.of("action", "POST", "avg(response)", 350d)) - )); + assertThat( + result, + containsInAnyOrder( + ExprValueUtils.tupleValue(ImmutableMap.of("action", "GET", "avg(response)", 268d)), + ExprValueUtils.tupleValue(ImmutableMap.of("action", "POST", "avg(response)", 350d)))); } @Test public void avg_with_two_groups() { - PhysicalPlan plan = new AggregationOperator(new TestScan(), - Collections - .singletonList(DSL.named("avg(response)", DSL.avg(DSL.ref("response", INTEGER)))), - Arrays.asList(DSL.named("action", DSL.ref("action", STRING)), - DSL.named("ip", DSL.ref("ip", STRING)))); + PhysicalPlan plan = + new AggregationOperator( + new TestScan(), + Collections.singletonList( + DSL.named("avg(response)", DSL.avg(DSL.ref("response", INTEGER)))), + Arrays.asList( + DSL.named("action", DSL.ref("action", STRING)), + DSL.named("ip", DSL.ref("ip", STRING)))); List result = execute(plan); assertEquals(3, result.size()); - assertThat(result, containsInAnyOrder( - ExprValueUtils.tupleValue( - ImmutableMap.of("action", "GET", "ip", "209.160.24.63", "avg(response)", 302d)), - ExprValueUtils.tupleValue( - ImmutableMap.of("action", "GET", "ip", "112.111.162.4", "avg(response)", 200d)), - ExprValueUtils.tupleValue( - ImmutableMap.of("action", "POST", "ip", "74.125.19.106", "avg(response)", 350d)) - )); + assertThat( + result, + containsInAnyOrder( + ExprValueUtils.tupleValue( + ImmutableMap.of("action", "GET", "ip", "209.160.24.63", "avg(response)", 302d)), + ExprValueUtils.tupleValue( + ImmutableMap.of("action", "GET", "ip", "112.111.162.4", "avg(response)", 200d)), + ExprValueUtils.tupleValue( + ImmutableMap.of("action", "POST", "ip", "74.125.19.106", "avg(response)", 350d)))); } @Test public void sum_with_one_groups() { - PhysicalPlan plan = new AggregationOperator(new TestScan(), - Collections - .singletonList(DSL.named("sum(response)", DSL.sum(DSL.ref("response", INTEGER)))), - Collections.singletonList(DSL.named("action", DSL.ref("action", STRING)))); + PhysicalPlan plan = + new AggregationOperator( + new TestScan(), + Collections.singletonList( + DSL.named("sum(response)", DSL.sum(DSL.ref("response", INTEGER)))), + Collections.singletonList(DSL.named("action", DSL.ref("action", STRING)))); List result = execute(plan); assertEquals(2, result.size()); - assertThat(result, containsInAnyOrder( - ExprValueUtils.tupleValue(ImmutableMap.of("action", "GET", "sum(response)", 804)), - ExprValueUtils.tupleValue(ImmutableMap.of("action", "POST", "sum(response)", 700)) - )); + assertThat( + result, + containsInAnyOrder( + ExprValueUtils.tupleValue(ImmutableMap.of("action", "GET", "sum(response)", 804)), + ExprValueUtils.tupleValue(ImmutableMap.of("action", "POST", "sum(response)", 700)))); } @Test public void millisecond_span() { - PhysicalPlan plan = new AggregationOperator(testScan(datetimeInputs), - Collections.singletonList(DSL - .named("count", DSL.count(DSL.ref("second", TIMESTAMP)))), - Collections.singletonList(DSL - .named("span", DSL.span(DSL.ref("second", TIMESTAMP), DSL.literal(6 * 1000), "ms")))); + PhysicalPlan plan = + new AggregationOperator( + testScan(datetimeInputs), + Collections.singletonList(DSL.named("count", DSL.count(DSL.ref("second", TIMESTAMP)))), + Collections.singletonList( + DSL.named( + "span", DSL.span(DSL.ref("second", TIMESTAMP), DSL.literal(6 * 1000), "ms")))); List result = execute(plan); assertEquals(2, result.size()); - assertThat(result, containsInRelativeOrder( - ExprValueUtils.tupleValue(ImmutableMap - .of("span", new ExprTimestampValue("2021-01-01 00:00:00"), "count", 2)), - ExprValueUtils.tupleValue(ImmutableMap - .of("span", new ExprTimestampValue("2021-01-01 00:00:12"), "count", 3)) - )); + assertThat( + result, + containsInRelativeOrder( + ExprValueUtils.tupleValue( + ImmutableMap.of("span", new ExprTimestampValue("2021-01-01 00:00:00"), "count", 2)), + ExprValueUtils.tupleValue( + ImmutableMap.of( + "span", new ExprTimestampValue("2021-01-01 00:00:12"), "count", 3)))); } @Test public void second_span() { - PhysicalPlan plan = new AggregationOperator(testScan(datetimeInputs), - Collections.singletonList(DSL - .named("count", DSL.count(DSL.ref("second", TIMESTAMP)))), - Collections.singletonList(DSL - .named("span", DSL.span(DSL.ref("second", TIMESTAMP), DSL.literal(6), "s")))); + PhysicalPlan plan = + new AggregationOperator( + testScan(datetimeInputs), + Collections.singletonList(DSL.named("count", DSL.count(DSL.ref("second", TIMESTAMP)))), + Collections.singletonList( + DSL.named("span", DSL.span(DSL.ref("second", TIMESTAMP), DSL.literal(6), "s")))); List result = execute(plan); assertEquals(2, result.size()); - assertThat(result, containsInRelativeOrder( - ExprValueUtils.tupleValue(ImmutableMap - .of("span", new ExprTimestampValue("2021-01-01 00:00:00"), "count", 2)), - ExprValueUtils.tupleValue(ImmutableMap - .of("span", new ExprTimestampValue("2021-01-01 00:00:12"), "count", 3)) - )); + assertThat( + result, + containsInRelativeOrder( + ExprValueUtils.tupleValue( + ImmutableMap.of("span", new ExprTimestampValue("2021-01-01 00:00:00"), "count", 2)), + ExprValueUtils.tupleValue( + ImmutableMap.of( + "span", new ExprTimestampValue("2021-01-01 00:00:12"), "count", 3)))); } @Test public void minute_span() { - PhysicalPlan plan = new AggregationOperator(testScan(datetimeInputs), - Collections.singletonList(DSL - .named("count", DSL.count(DSL.ref("minute", DATETIME)))), - Collections.singletonList(DSL - .named("span", DSL.span(DSL.ref("minute", DATETIME), DSL.literal(5), "m")))); + PhysicalPlan plan = + new AggregationOperator( + testScan(datetimeInputs), + Collections.singletonList(DSL.named("count", DSL.count(DSL.ref("minute", DATETIME)))), + Collections.singletonList( + DSL.named("span", DSL.span(DSL.ref("minute", DATETIME), DSL.literal(5), "m")))); List result = execute(plan); assertEquals(3, result.size()); - assertThat(result, containsInRelativeOrder( - ExprValueUtils.tupleValue(ImmutableMap - .of("span", new ExprDatetimeValue("2020-12-31 23:50:00"), "count", 1)), - ExprValueUtils.tupleValue(ImmutableMap - .of("span", new ExprDatetimeValue("2021-01-01 00:00:00"), "count", 3)), - ExprValueUtils.tupleValue(ImmutableMap - .of("span", new ExprDatetimeValue("2021-01-01 00:05:00"), "count", 1)) - )); - - plan = new AggregationOperator(testScan(datetimeInputs), - Collections.singletonList(DSL - .named("count", DSL.count(DSL.ref("hour", TIME)))), - Collections.singletonList(DSL - .named("span", DSL.span(DSL.ref("hour", TIME), DSL.literal(30), "m")))); + assertThat( + result, + containsInRelativeOrder( + ExprValueUtils.tupleValue( + ImmutableMap.of("span", new ExprDatetimeValue("2020-12-31 23:50:00"), "count", 1)), + ExprValueUtils.tupleValue( + ImmutableMap.of("span", new ExprDatetimeValue("2021-01-01 00:00:00"), "count", 3)), + ExprValueUtils.tupleValue( + ImmutableMap.of( + "span", new ExprDatetimeValue("2021-01-01 00:05:00"), "count", 1)))); + + plan = + new AggregationOperator( + testScan(datetimeInputs), + Collections.singletonList(DSL.named("count", DSL.count(DSL.ref("hour", TIME)))), + Collections.singletonList( + DSL.named("span", DSL.span(DSL.ref("hour", TIME), DSL.literal(30), "m")))); result = execute(plan); assertEquals(4, result.size()); - assertThat(result, containsInRelativeOrder( - ExprValueUtils.tupleValue(ImmutableMap - .of("span", new ExprTimeValue("17:00:00"), "count", 2)), - ExprValueUtils.tupleValue(ImmutableMap - .of("span", new ExprTimeValue("18:00:00"), "count", 1)), - ExprValueUtils.tupleValue(ImmutableMap - .of("span", new ExprTimeValue("18:30:00"), "count", 1)), - ExprValueUtils.tupleValue(ImmutableMap - .of("span", new ExprTimeValue("19:00:00"), "count", 1)) - )); + assertThat( + result, + containsInRelativeOrder( + ExprValueUtils.tupleValue( + ImmutableMap.of("span", new ExprTimeValue("17:00:00"), "count", 2)), + ExprValueUtils.tupleValue( + ImmutableMap.of("span", new ExprTimeValue("18:00:00"), "count", 1)), + ExprValueUtils.tupleValue( + ImmutableMap.of("span", new ExprTimeValue("18:30:00"), "count", 1)), + ExprValueUtils.tupleValue( + ImmutableMap.of("span", new ExprTimeValue("19:00:00"), "count", 1)))); } @Test public void hour_span() { - PhysicalPlan plan = new AggregationOperator(testScan(datetimeInputs), - Collections.singletonList(DSL - .named("count", DSL.count(DSL.ref("hour", TIME)))), - Collections.singletonList(DSL - .named("span", DSL.span(DSL.ref("hour", TIME), DSL.literal(1), "h")))); + PhysicalPlan plan = + new AggregationOperator( + testScan(datetimeInputs), + Collections.singletonList(DSL.named("count", DSL.count(DSL.ref("hour", TIME)))), + Collections.singletonList( + DSL.named("span", DSL.span(DSL.ref("hour", TIME), DSL.literal(1), "h")))); List result = execute(plan); assertEquals(3, result.size()); - assertThat(result, containsInRelativeOrder( - ExprValueUtils.tupleValue(ImmutableMap - .of("span", new ExprTimeValue("17:00:00"), "count", 2)), - ExprValueUtils.tupleValue(ImmutableMap - .of("span", new ExprTimeValue("18:00:00"), "count", 2)), - ExprValueUtils.tupleValue(ImmutableMap - .of("span", new ExprTimeValue("19:00:00"), "count", 1)) - )); + assertThat( + result, + containsInRelativeOrder( + ExprValueUtils.tupleValue( + ImmutableMap.of("span", new ExprTimeValue("17:00:00"), "count", 2)), + ExprValueUtils.tupleValue( + ImmutableMap.of("span", new ExprTimeValue("18:00:00"), "count", 2)), + ExprValueUtils.tupleValue( + ImmutableMap.of("span", new ExprTimeValue("19:00:00"), "count", 1)))); } @Test public void day_span() { - PhysicalPlan plan = new AggregationOperator(testScan(dateInputs), - Collections.singletonList(DSL - .named("count(day)", DSL.count(DSL.ref("day", DATE)))), - Collections.singletonList(DSL - .named("span", DSL.span(DSL.ref("day", DATE), DSL.literal(1), "d")))); + PhysicalPlan plan = + new AggregationOperator( + testScan(dateInputs), + Collections.singletonList(DSL.named("count(day)", DSL.count(DSL.ref("day", DATE)))), + Collections.singletonList( + DSL.named("span", DSL.span(DSL.ref("day", DATE), DSL.literal(1), "d")))); List result = execute(plan); assertEquals(4, result.size()); - assertThat(result, containsInRelativeOrder( - ExprValueUtils.tupleValue(ImmutableMap - .of("span", new ExprDateValue("2021-01-01"), "count(day)", 2)), - ExprValueUtils.tupleValue(ImmutableMap - .of("span", new ExprDateValue("2021-01-02"), "count(day)", 1)), - ExprValueUtils.tupleValue(ImmutableMap - .of("span", new ExprDateValue("2021-01-03"), "count(day)", 1)), - ExprValueUtils.tupleValue(ImmutableMap - .of("span", new ExprDateValue("2021-01-04"), "count(day)", 1)) - )); - - plan = new AggregationOperator(testScan(dateInputs), - Collections.singletonList(DSL - .named("count", DSL.count(DSL.ref("month", DATE)))), - Collections.singletonList(DSL - .named("span", DSL.span(DSL.ref("month", DATE), DSL.literal(30), "d")))); + assertThat( + result, + containsInRelativeOrder( + ExprValueUtils.tupleValue( + ImmutableMap.of("span", new ExprDateValue("2021-01-01"), "count(day)", 2)), + ExprValueUtils.tupleValue( + ImmutableMap.of("span", new ExprDateValue("2021-01-02"), "count(day)", 1)), + ExprValueUtils.tupleValue( + ImmutableMap.of("span", new ExprDateValue("2021-01-03"), "count(day)", 1)), + ExprValueUtils.tupleValue( + ImmutableMap.of("span", new ExprDateValue("2021-01-04"), "count(day)", 1)))); + + plan = + new AggregationOperator( + testScan(dateInputs), + Collections.singletonList(DSL.named("count", DSL.count(DSL.ref("month", DATE)))), + Collections.singletonList( + DSL.named("span", DSL.span(DSL.ref("month", DATE), DSL.literal(30), "d")))); result = execute(plan); assertEquals(3, result.size()); - assertThat(result, containsInRelativeOrder( - ExprValueUtils.tupleValue(ImmutableMap - .of("span", new ExprDateValue("2020-12-04"), "count", 1)), - ExprValueUtils.tupleValue(ImmutableMap - .of("span", new ExprDateValue("2021-02-02"), "count", 3)), - ExprValueUtils.tupleValue(ImmutableMap - .of("span", new ExprDateValue("2021-03-04"), "count", 1)) - )); + assertThat( + result, + containsInRelativeOrder( + ExprValueUtils.tupleValue( + ImmutableMap.of("span", new ExprDateValue("2020-12-04"), "count", 1)), + ExprValueUtils.tupleValue( + ImmutableMap.of("span", new ExprDateValue("2021-02-02"), "count", 3)), + ExprValueUtils.tupleValue( + ImmutableMap.of("span", new ExprDateValue("2021-03-04"), "count", 1)))); } @Test public void week_span() { - PhysicalPlan plan = new AggregationOperator(testScan(dateInputs), - Collections.singletonList(DSL - .named("count", DSL.count(DSL.ref("month", DATE)))), - Collections.singletonList(DSL - .named("span", DSL.span(DSL.ref("month", DATE), DSL.literal(5), "w")))); + PhysicalPlan plan = + new AggregationOperator( + testScan(dateInputs), + Collections.singletonList(DSL.named("count", DSL.count(DSL.ref("month", DATE)))), + Collections.singletonList( + DSL.named("span", DSL.span(DSL.ref("month", DATE), DSL.literal(5), "w")))); List result = execute(plan); assertEquals(3, result.size()); - assertThat(result, containsInRelativeOrder( - ExprValueUtils.tupleValue(ImmutableMap - .of("span", new ExprDateValue("2020-11-16"), "count", 1)), - ExprValueUtils.tupleValue(ImmutableMap - .of("span", new ExprDateValue("2021-01-25"), "count", 3)), - ExprValueUtils.tupleValue(ImmutableMap - .of("span", new ExprDateValue("2021-03-01"), "count", 1)) - )); + assertThat( + result, + containsInRelativeOrder( + ExprValueUtils.tupleValue( + ImmutableMap.of("span", new ExprDateValue("2020-11-16"), "count", 1)), + ExprValueUtils.tupleValue( + ImmutableMap.of("span", new ExprDateValue("2021-01-25"), "count", 3)), + ExprValueUtils.tupleValue( + ImmutableMap.of("span", new ExprDateValue("2021-03-01"), "count", 1)))); } @Test public void month_span() { - PhysicalPlan plan = new AggregationOperator(testScan(dateInputs), - Collections.singletonList(DSL - .named("count", DSL.count(DSL.ref("month", DATE)))), - Collections.singletonList(DSL - .named("span", DSL.span(DSL.ref("month", DATE), DSL.literal(1), "M")))); + PhysicalPlan plan = + new AggregationOperator( + testScan(dateInputs), + Collections.singletonList(DSL.named("count", DSL.count(DSL.ref("month", DATE)))), + Collections.singletonList( + DSL.named("span", DSL.span(DSL.ref("month", DATE), DSL.literal(1), "M")))); List result = execute(plan); assertEquals(3, result.size()); - assertThat(result, containsInRelativeOrder( - ExprValueUtils.tupleValue(ImmutableMap - .of("span", new ExprDateValue("2020-12-01"), "count", 1)), - ExprValueUtils.tupleValue(ImmutableMap - .of("span", new ExprDateValue("2021-02-01"), "count", 3)), - ExprValueUtils.tupleValue(ImmutableMap - .of("span", new ExprDateValue("2021-03-01"), "count", 1)) - )); - - plan = new AggregationOperator(testScan(dateInputs), - Collections.singletonList(DSL - .named("count", DSL.count(DSL.ref("quarter", DATETIME)))), - Collections.singletonList(DSL - .named("span", DSL.span(DSL.ref("quarter", DATETIME), DSL.literal(2), "M")))); + assertThat( + result, + containsInRelativeOrder( + ExprValueUtils.tupleValue( + ImmutableMap.of("span", new ExprDateValue("2020-12-01"), "count", 1)), + ExprValueUtils.tupleValue( + ImmutableMap.of("span", new ExprDateValue("2021-02-01"), "count", 3)), + ExprValueUtils.tupleValue( + ImmutableMap.of("span", new ExprDateValue("2021-03-01"), "count", 1)))); + + plan = + new AggregationOperator( + testScan(dateInputs), + Collections.singletonList(DSL.named("count", DSL.count(DSL.ref("quarter", DATETIME)))), + Collections.singletonList( + DSL.named("span", DSL.span(DSL.ref("quarter", DATETIME), DSL.literal(2), "M")))); result = execute(plan); assertEquals(4, result.size()); - assertThat(result, containsInRelativeOrder( - ExprValueUtils.tupleValue(ImmutableMap - .of("span", new ExprDatetimeValue("2020-09-01 00:00:00"), "count", 1)), - ExprValueUtils.tupleValue(ImmutableMap - .of("span", new ExprDatetimeValue("2020-11-01 00:00:00"), "count", 1)), - ExprValueUtils.tupleValue(ImmutableMap - .of("span", new ExprDatetimeValue("2021-01-01 00:00:00"), "count", 1)), - ExprValueUtils.tupleValue(ImmutableMap - .of("span", new ExprDatetimeValue("2021-05-01 00:00:00"), "count", 2)) - )); - - plan = new AggregationOperator(testScan(dateInputs), - Collections.singletonList(DSL - .named("count", DSL.count(DSL.ref("year", TIMESTAMP)))), - Collections.singletonList(DSL - .named("span", DSL.span(DSL.ref("year", TIMESTAMP), DSL.literal(10 * 12), "M")))); + assertThat( + result, + containsInRelativeOrder( + ExprValueUtils.tupleValue( + ImmutableMap.of("span", new ExprDatetimeValue("2020-09-01 00:00:00"), "count", 1)), + ExprValueUtils.tupleValue( + ImmutableMap.of("span", new ExprDatetimeValue("2020-11-01 00:00:00"), "count", 1)), + ExprValueUtils.tupleValue( + ImmutableMap.of("span", new ExprDatetimeValue("2021-01-01 00:00:00"), "count", 1)), + ExprValueUtils.tupleValue( + ImmutableMap.of( + "span", new ExprDatetimeValue("2021-05-01 00:00:00"), "count", 2)))); + + plan = + new AggregationOperator( + testScan(dateInputs), + Collections.singletonList(DSL.named("count", DSL.count(DSL.ref("year", TIMESTAMP)))), + Collections.singletonList( + DSL.named( + "span", DSL.span(DSL.ref("year", TIMESTAMP), DSL.literal(10 * 12), "M")))); result = execute(plan); assertEquals(3, result.size()); - assertThat(result, containsInRelativeOrder( - ExprValueUtils.tupleValue(ImmutableMap - .of("span", new ExprTimestampValue("1990-01-01 00:00:00"), "count", 1)), - ExprValueUtils.tupleValue(ImmutableMap - .of("span", new ExprTimestampValue("2010-01-01 00:00:00"), "count", 3)), - ExprValueUtils.tupleValue(ImmutableMap - .of("span", new ExprTimestampValue("2020-01-01 00:00:00"), "count", 1)) - )); - + assertThat( + result, + containsInRelativeOrder( + ExprValueUtils.tupleValue( + ImmutableMap.of("span", new ExprTimestampValue("1990-01-01 00:00:00"), "count", 1)), + ExprValueUtils.tupleValue( + ImmutableMap.of("span", new ExprTimestampValue("2010-01-01 00:00:00"), "count", 3)), + ExprValueUtils.tupleValue( + ImmutableMap.of( + "span", new ExprTimestampValue("2020-01-01 00:00:00"), "count", 1)))); } @Test public void quarter_span() { - PhysicalPlan plan = new AggregationOperator(testScan(dateInputs), - Collections.singletonList(DSL - .named("count", DSL.count(DSL.ref("quarter", DATETIME)))), - Collections.singletonList(DSL - .named("span", DSL.span(DSL.ref("quarter", DATETIME), DSL.literal(2), "q")))); + PhysicalPlan plan = + new AggregationOperator( + testScan(dateInputs), + Collections.singletonList(DSL.named("count", DSL.count(DSL.ref("quarter", DATETIME)))), + Collections.singletonList( + DSL.named("span", DSL.span(DSL.ref("quarter", DATETIME), DSL.literal(2), "q")))); List result = execute(plan); assertEquals(2, result.size()); - assertThat(result, containsInRelativeOrder( - ExprValueUtils.tupleValue(ImmutableMap - .of("span", new ExprDatetimeValue("2020-07-01 00:00:00"), "count", 2)), - ExprValueUtils.tupleValue(ImmutableMap - .of("span", new ExprDatetimeValue("2021-01-01 00:00:00"), "count", 3)) - )); - - plan = new AggregationOperator(testScan(dateInputs), - Collections.singletonList(DSL - .named("count", DSL.count(DSL.ref("year", TIMESTAMP)))), - Collections.singletonList(DSL - .named("span", DSL.span(DSL.ref("year", TIMESTAMP), DSL.literal(10 * 4), "q")))); + assertThat( + result, + containsInRelativeOrder( + ExprValueUtils.tupleValue( + ImmutableMap.of("span", new ExprDatetimeValue("2020-07-01 00:00:00"), "count", 2)), + ExprValueUtils.tupleValue( + ImmutableMap.of( + "span", new ExprDatetimeValue("2021-01-01 00:00:00"), "count", 3)))); + + plan = + new AggregationOperator( + testScan(dateInputs), + Collections.singletonList(DSL.named("count", DSL.count(DSL.ref("year", TIMESTAMP)))), + Collections.singletonList( + DSL.named("span", DSL.span(DSL.ref("year", TIMESTAMP), DSL.literal(10 * 4), "q")))); result = execute(plan); assertEquals(3, result.size()); - assertThat(result, containsInRelativeOrder( - ExprValueUtils.tupleValue(ImmutableMap - .of("span", new ExprTimestampValue("1990-01-01 00:00:00"), "count", 1)), - ExprValueUtils.tupleValue(ImmutableMap - .of("span", new ExprTimestampValue("2010-01-01 00:00:00"), "count", 3)), - ExprValueUtils.tupleValue(ImmutableMap - .of("span", new ExprTimestampValue("2020-01-01 00:00:00"), "count", 1)) - )); + assertThat( + result, + containsInRelativeOrder( + ExprValueUtils.tupleValue( + ImmutableMap.of("span", new ExprTimestampValue("1990-01-01 00:00:00"), "count", 1)), + ExprValueUtils.tupleValue( + ImmutableMap.of("span", new ExprTimestampValue("2010-01-01 00:00:00"), "count", 3)), + ExprValueUtils.tupleValue( + ImmutableMap.of( + "span", new ExprTimestampValue("2020-01-01 00:00:00"), "count", 1)))); } @Test public void year_span() { - PhysicalPlan plan = new AggregationOperator(testScan(dateInputs), - Collections.singletonList(DSL - .named("count", DSL.count(DSL.ref("year", TIMESTAMP)))), - Collections.singletonList(DSL - .named("span", DSL.span(DSL.ref("year", TIMESTAMP), DSL.literal(10), "y")))); + PhysicalPlan plan = + new AggregationOperator( + testScan(dateInputs), + Collections.singletonList(DSL.named("count", DSL.count(DSL.ref("year", TIMESTAMP)))), + Collections.singletonList( + DSL.named("span", DSL.span(DSL.ref("year", TIMESTAMP), DSL.literal(10), "y")))); List result = execute(plan); assertEquals(3, result.size()); - assertThat(result, containsInRelativeOrder( - ExprValueUtils.tupleValue(ImmutableMap - .of("span", new ExprTimestampValue("1990-01-01 00:00:00"), "count", 1)), - ExprValueUtils.tupleValue(ImmutableMap - .of("span", new ExprTimestampValue("2010-01-01 00:00:00"), "count", 3)), - ExprValueUtils.tupleValue(ImmutableMap - .of("span", new ExprTimestampValue("2020-01-01 00:00:00"), "count", 1)) - )); + assertThat( + result, + containsInRelativeOrder( + ExprValueUtils.tupleValue( + ImmutableMap.of("span", new ExprTimestampValue("1990-01-01 00:00:00"), "count", 1)), + ExprValueUtils.tupleValue( + ImmutableMap.of("span", new ExprTimestampValue("2010-01-01 00:00:00"), "count", 3)), + ExprValueUtils.tupleValue( + ImmutableMap.of( + "span", new ExprTimestampValue("2020-01-01 00:00:00"), "count", 1)))); } @Test public void integer_field() { - PhysicalPlan plan = new AggregationOperator(testScan(numericInputs), - Collections.singletonList(DSL.named("count", DSL.count(DSL.ref("integer", INTEGER)))), - Collections.singletonList(DSL.named("span", DSL.span(DSL.ref("integer", INTEGER), DSL - .literal(1), "")))); + PhysicalPlan plan = + new AggregationOperator( + testScan(numericInputs), + Collections.singletonList(DSL.named("count", DSL.count(DSL.ref("integer", INTEGER)))), + Collections.singletonList( + DSL.named("span", DSL.span(DSL.ref("integer", INTEGER), DSL.literal(1), "")))); List result = execute(plan); assertEquals(3, result.size()); - assertThat(result, containsInRelativeOrder( - ExprValueUtils.tupleValue(ImmutableMap.of("span", 1, "count", 1)), - ExprValueUtils.tupleValue(ImmutableMap.of("span", 2, "count", 1)), - ExprValueUtils.tupleValue(ImmutableMap.of("span", 5, "count", 1)))); - - plan = new AggregationOperator(testScan(numericInputs), - Collections.singletonList(DSL.named("count", DSL.count(DSL.ref("integer", INTEGER)))), - Collections.singletonList(DSL.named("span", DSL.span(DSL.ref("integer", INTEGER), DSL - .literal(1.5), "")))); + assertThat( + result, + containsInRelativeOrder( + ExprValueUtils.tupleValue(ImmutableMap.of("span", 1, "count", 1)), + ExprValueUtils.tupleValue(ImmutableMap.of("span", 2, "count", 1)), + ExprValueUtils.tupleValue(ImmutableMap.of("span", 5, "count", 1)))); + + plan = + new AggregationOperator( + testScan(numericInputs), + Collections.singletonList(DSL.named("count", DSL.count(DSL.ref("integer", INTEGER)))), + Collections.singletonList( + DSL.named("span", DSL.span(DSL.ref("integer", INTEGER), DSL.literal(1.5), "")))); result = execute(plan); assertEquals(3, result.size()); - assertThat(result, containsInRelativeOrder( - ExprValueUtils.tupleValue(ImmutableMap.of("span", 0D, "count", 1)), - ExprValueUtils.tupleValue(ImmutableMap.of("span", 1.5D, "count", 1)), - ExprValueUtils.tupleValue(ImmutableMap.of("span", 4.5D, "count", 1)))); + assertThat( + result, + containsInRelativeOrder( + ExprValueUtils.tupleValue(ImmutableMap.of("span", 0D, "count", 1)), + ExprValueUtils.tupleValue(ImmutableMap.of("span", 1.5D, "count", 1)), + ExprValueUtils.tupleValue(ImmutableMap.of("span", 4.5D, "count", 1)))); } @Test public void long_field() { - PhysicalPlan plan = new AggregationOperator(testScan(numericInputs), - Collections.singletonList(DSL.named("count", DSL.count(DSL.ref("long", LONG)))), - Collections.singletonList(DSL.named("span", DSL.span(DSL.ref("long", LONG), DSL - .literal(1), "")))); + PhysicalPlan plan = + new AggregationOperator( + testScan(numericInputs), + Collections.singletonList(DSL.named("count", DSL.count(DSL.ref("long", LONG)))), + Collections.singletonList( + DSL.named("span", DSL.span(DSL.ref("long", LONG), DSL.literal(1), "")))); List result = execute(plan); assertEquals(3, result.size()); - assertThat(result, containsInRelativeOrder( - ExprValueUtils.tupleValue(ImmutableMap.of("span", 1L, "count", 1)), - ExprValueUtils.tupleValue(ImmutableMap.of("span", 2L, "count", 1)), - ExprValueUtils.tupleValue(ImmutableMap.of("span", 5L, "count", 1)))); - - plan = new AggregationOperator(testScan(numericInputs), - Collections.singletonList(DSL.named("count", DSL.count(DSL.ref("long", LONG)))), - Collections.singletonList(DSL.named("span", DSL.span(DSL.ref("long", LONG), DSL - .literal(1.5), "")))); + assertThat( + result, + containsInRelativeOrder( + ExprValueUtils.tupleValue(ImmutableMap.of("span", 1L, "count", 1)), + ExprValueUtils.tupleValue(ImmutableMap.of("span", 2L, "count", 1)), + ExprValueUtils.tupleValue(ImmutableMap.of("span", 5L, "count", 1)))); + + plan = + new AggregationOperator( + testScan(numericInputs), + Collections.singletonList(DSL.named("count", DSL.count(DSL.ref("long", LONG)))), + Collections.singletonList( + DSL.named("span", DSL.span(DSL.ref("long", LONG), DSL.literal(1.5), "")))); result = execute(plan); assertEquals(3, result.size()); - assertThat(result, containsInRelativeOrder( - ExprValueUtils.tupleValue(ImmutableMap.of("span", 0D, "count", 1)), - ExprValueUtils.tupleValue(ImmutableMap.of("span", 1.5D, "count", 1)), - ExprValueUtils.tupleValue(ImmutableMap.of("span", 4.5D, "count", 1)))); + assertThat( + result, + containsInRelativeOrder( + ExprValueUtils.tupleValue(ImmutableMap.of("span", 0D, "count", 1)), + ExprValueUtils.tupleValue(ImmutableMap.of("span", 1.5D, "count", 1)), + ExprValueUtils.tupleValue(ImmutableMap.of("span", 4.5D, "count", 1)))); } @Test public void float_field() { - PhysicalPlan plan = new AggregationOperator(testScan(numericInputs), - Collections.singletonList(DSL.named("count", DSL.count(DSL.ref("float", FLOAT)))), - Collections.singletonList(DSL.named("span", DSL.span(DSL.ref("float", FLOAT), DSL - .literal(1), "")))); + PhysicalPlan plan = + new AggregationOperator( + testScan(numericInputs), + Collections.singletonList(DSL.named("count", DSL.count(DSL.ref("float", FLOAT)))), + Collections.singletonList( + DSL.named("span", DSL.span(DSL.ref("float", FLOAT), DSL.literal(1), "")))); List result = execute(plan); assertEquals(3, result.size()); - assertThat(result, containsInRelativeOrder( - ExprValueUtils.tupleValue(ImmutableMap.of("span", 1F, "count", 1)), - ExprValueUtils.tupleValue(ImmutableMap.of("span", 2F, "count", 1)), - ExprValueUtils.tupleValue(ImmutableMap.of("span", 5F, "count", 1)))); - - plan = new AggregationOperator(testScan(numericInputs), - Collections.singletonList(DSL.named("count", DSL.count(DSL.ref("float", FLOAT)))), - Collections.singletonList(DSL.named("span", DSL.span(DSL.ref("float", FLOAT), DSL - .literal(1.5), "")))); + assertThat( + result, + containsInRelativeOrder( + ExprValueUtils.tupleValue(ImmutableMap.of("span", 1F, "count", 1)), + ExprValueUtils.tupleValue(ImmutableMap.of("span", 2F, "count", 1)), + ExprValueUtils.tupleValue(ImmutableMap.of("span", 5F, "count", 1)))); + + plan = + new AggregationOperator( + testScan(numericInputs), + Collections.singletonList(DSL.named("count", DSL.count(DSL.ref("float", FLOAT)))), + Collections.singletonList( + DSL.named("span", DSL.span(DSL.ref("float", FLOAT), DSL.literal(1.5), "")))); result = execute(plan); assertEquals(3, result.size()); - assertThat(result, containsInRelativeOrder( - ExprValueUtils.tupleValue(ImmutableMap.of("span", 0D, "count", 1)), - ExprValueUtils.tupleValue(ImmutableMap.of("span", 1.5D, "count", 1)), - ExprValueUtils.tupleValue(ImmutableMap.of("span", 4.5D, "count", 1)))); + assertThat( + result, + containsInRelativeOrder( + ExprValueUtils.tupleValue(ImmutableMap.of("span", 0D, "count", 1)), + ExprValueUtils.tupleValue(ImmutableMap.of("span", 1.5D, "count", 1)), + ExprValueUtils.tupleValue(ImmutableMap.of("span", 4.5D, "count", 1)))); } @Test public void double_field() { - PhysicalPlan plan = new AggregationOperator(testScan(numericInputs), - Collections.singletonList(DSL.named("count", DSL.count(DSL.ref("double", DOUBLE)))), - Collections.singletonList(DSL.named("span", DSL.span(DSL.ref("double", DOUBLE), DSL - .literal(1), "")))); + PhysicalPlan plan = + new AggregationOperator( + testScan(numericInputs), + Collections.singletonList(DSL.named("count", DSL.count(DSL.ref("double", DOUBLE)))), + Collections.singletonList( + DSL.named("span", DSL.span(DSL.ref("double", DOUBLE), DSL.literal(1), "")))); List result = execute(plan); assertEquals(3, result.size()); - assertThat(result, containsInRelativeOrder( - ExprValueUtils.tupleValue(ImmutableMap.of("span", 1D, "count", 1)), - ExprValueUtils.tupleValue(ImmutableMap.of("span", 2D, "count", 1)), - ExprValueUtils.tupleValue(ImmutableMap.of("span", 5D, "count", 1)))); - - plan = new AggregationOperator(testScan(numericInputs), - Collections.singletonList(DSL.named("count", DSL.count(DSL.ref("double", DOUBLE)))), - Collections.singletonList(DSL.named("span", DSL.span(DSL.ref("double", DOUBLE), DSL - .literal(1.5), "")))); + assertThat( + result, + containsInRelativeOrder( + ExprValueUtils.tupleValue(ImmutableMap.of("span", 1D, "count", 1)), + ExprValueUtils.tupleValue(ImmutableMap.of("span", 2D, "count", 1)), + ExprValueUtils.tupleValue(ImmutableMap.of("span", 5D, "count", 1)))); + + plan = + new AggregationOperator( + testScan(numericInputs), + Collections.singletonList(DSL.named("count", DSL.count(DSL.ref("double", DOUBLE)))), + Collections.singletonList( + DSL.named("span", DSL.span(DSL.ref("double", DOUBLE), DSL.literal(1.5), "")))); result = execute(plan); assertEquals(3, result.size()); - assertThat(result, containsInRelativeOrder( - ExprValueUtils.tupleValue(ImmutableMap.of("span", 0D, "count", 1)), - ExprValueUtils.tupleValue(ImmutableMap.of("span", 1.5D, "count", 1)), - ExprValueUtils.tupleValue(ImmutableMap.of("span", 4.5D, "count", 1)))); + assertThat( + result, + containsInRelativeOrder( + ExprValueUtils.tupleValue(ImmutableMap.of("span", 0D, "count", 1)), + ExprValueUtils.tupleValue(ImmutableMap.of("span", 1.5D, "count", 1)), + ExprValueUtils.tupleValue(ImmutableMap.of("span", 4.5D, "count", 1)))); } @Test public void twoBucketsSpanAndLong() { - PhysicalPlan plan = new AggregationOperator(testScan(compoundInputs), - Collections.singletonList(DSL.named("max", DSL.max(DSL.ref("errors", INTEGER)))), - Arrays.asList( - DSL.named("span", DSL.span(DSL.ref("day", DATE), DSL.literal(1), "d")), - DSL.named("region", DSL.ref("region", STRING))) - ); + PhysicalPlan plan = + new AggregationOperator( + testScan(compoundInputs), + Collections.singletonList(DSL.named("max", DSL.max(DSL.ref("errors", INTEGER)))), + Arrays.asList( + DSL.named("span", DSL.span(DSL.ref("day", DATE), DSL.literal(1), "d")), + DSL.named("region", DSL.ref("region", STRING)))); List result = execute(plan); assertEquals(4, result.size()); - assertThat(result, containsInRelativeOrder( - ExprValueUtils.tupleValue(ImmutableMap.of( - "span", new ExprDateValue("2021-01-03"), "region","iad", "max", 3)), - ExprValueUtils.tupleValue(ImmutableMap.of( - "span", new ExprDateValue("2021-01-04"), "region","iad", "max", 10)), - ExprValueUtils.tupleValue(ImmutableMap.of( - "span", new ExprDateValue("2021-01-06"), "region","iad", "max", 1)), - ExprValueUtils.tupleValue(ImmutableMap.of( - "span", new ExprDateValue("2021-01-07"), "region","iad", "max", 8)) - )); - - plan = new AggregationOperator(testScan(compoundInputs), - Collections.singletonList(DSL.named("max", DSL.max(DSL.ref("errors", INTEGER)))), - Arrays.asList( - DSL.named("span", DSL.span(DSL.ref("day", DATE), DSL.literal(1), "d")), - DSL.named("region", DSL.ref("region", STRING)), - DSL.named("host", DSL.ref("host", STRING))) - ); + assertThat( + result, + containsInRelativeOrder( + ExprValueUtils.tupleValue( + ImmutableMap.of( + "span", new ExprDateValue("2021-01-03"), "region", "iad", "max", 3)), + ExprValueUtils.tupleValue( + ImmutableMap.of( + "span", new ExprDateValue("2021-01-04"), "region", "iad", "max", 10)), + ExprValueUtils.tupleValue( + ImmutableMap.of( + "span", new ExprDateValue("2021-01-06"), "region", "iad", "max", 1)), + ExprValueUtils.tupleValue( + ImmutableMap.of( + "span", new ExprDateValue("2021-01-07"), "region", "iad", "max", 8)))); + + plan = + new AggregationOperator( + testScan(compoundInputs), + Collections.singletonList(DSL.named("max", DSL.max(DSL.ref("errors", INTEGER)))), + Arrays.asList( + DSL.named("span", DSL.span(DSL.ref("day", DATE), DSL.literal(1), "d")), + DSL.named("region", DSL.ref("region", STRING)), + DSL.named("host", DSL.ref("host", STRING)))); result = execute(plan); assertEquals(7, result.size()); - assertThat(result, containsInRelativeOrder( - ExprValueUtils.tupleValue(ImmutableMap.of( - "span", new ExprDateValue("2021-01-03"), "region","iad", "host", "h1", "max", 2)), - ExprValueUtils.tupleValue(ImmutableMap.of( - "span", new ExprDateValue("2021-01-03"), "region","iad", "host", "h2", "max", 3)), - ExprValueUtils.tupleValue(ImmutableMap.of( - "span", new ExprDateValue("2021-01-04"), "region","iad", "host", "h1", "max", 1)), - ExprValueUtils.tupleValue(ImmutableMap.of( - "span", new ExprDateValue("2021-01-04"), "region","iad", "host", "h2", "max", 10)), - ExprValueUtils.tupleValue(ImmutableMap.of( - "span", new ExprDateValue("2021-01-06"), "region","iad", "host", "h1", "max", 1)), - ExprValueUtils.tupleValue(ImmutableMap.of( - "span", new ExprDateValue("2021-01-07"), "region","iad", "host", "h1", "max", 6)), - ExprValueUtils.tupleValue(ImmutableMap.of( - "span", new ExprDateValue("2021-01-07"), "region","iad", "host", "h2", "max", 8)) - )); + assertThat( + result, + containsInRelativeOrder( + ExprValueUtils.tupleValue( + ImmutableMap.of( + "span", + new ExprDateValue("2021-01-03"), + "region", + "iad", + "host", + "h1", + "max", + 2)), + ExprValueUtils.tupleValue( + ImmutableMap.of( + "span", + new ExprDateValue("2021-01-03"), + "region", + "iad", + "host", + "h2", + "max", + 3)), + ExprValueUtils.tupleValue( + ImmutableMap.of( + "span", + new ExprDateValue("2021-01-04"), + "region", + "iad", + "host", + "h1", + "max", + 1)), + ExprValueUtils.tupleValue( + ImmutableMap.of( + "span", + new ExprDateValue("2021-01-04"), + "region", + "iad", + "host", + "h2", + "max", + 10)), + ExprValueUtils.tupleValue( + ImmutableMap.of( + "span", + new ExprDateValue("2021-01-06"), + "region", + "iad", + "host", + "h1", + "max", + 1)), + ExprValueUtils.tupleValue( + ImmutableMap.of( + "span", + new ExprDateValue("2021-01-07"), + "region", + "iad", + "host", + "h1", + "max", + 6)), + ExprValueUtils.tupleValue( + ImmutableMap.of( + "span", + new ExprDateValue("2021-01-07"), + "region", + "iad", + "host", + "h2", + "max", + 8)))); } @Test public void aggregate_with_two_groups_with_windowing() { - PhysicalPlan plan = new AggregationOperator(testScan(compoundInputs), - Collections.singletonList(DSL.named("sum", DSL.sum(DSL.ref("errors", INTEGER)))), - Arrays.asList( - DSL.named("host", DSL.ref("host", STRING)), - DSL.named("span", DSL.span(DSL.ref("day", DATE), DSL.literal(1), "d")))); + PhysicalPlan plan = + new AggregationOperator( + testScan(compoundInputs), + Collections.singletonList(DSL.named("sum", DSL.sum(DSL.ref("errors", INTEGER)))), + Arrays.asList( + DSL.named("host", DSL.ref("host", STRING)), + DSL.named("span", DSL.span(DSL.ref("day", DATE), DSL.literal(1), "d")))); List result = execute(plan); assertEquals(7, result.size()); - assertThat(result, containsInRelativeOrder( - ExprValueUtils.tupleValue(ImmutableMap.of( - "host", new ExprStringValue("h1"), - "span", new ExprDateValue("2021-01-03"), - "sum", 2)), - ExprValueUtils.tupleValue(ImmutableMap.of( - "host", new ExprStringValue("h1"), - "span", new ExprDateValue("2021-01-04"), - "sum", 1)), - ExprValueUtils.tupleValue(ImmutableMap.of( - "host", new ExprStringValue("h1"), - "span", new ExprDateValue("2021-01-06"), - "sum", 1)), - ExprValueUtils.tupleValue(ImmutableMap.of( - "host", new ExprStringValue("h1"), - "span", new ExprDateValue("2021-01-07"), - "sum", 6)), - ExprValueUtils.tupleValue(ImmutableMap.of( - "host", new ExprStringValue("h2"), - "span", new ExprDateValue("2021-01-03"), - "sum", 3)), - ExprValueUtils.tupleValue(ImmutableMap.of( - "host", new ExprStringValue("h2"), - "span", new ExprDateValue("2021-01-04"), - "sum", 10)), - ExprValueUtils.tupleValue(ImmutableMap.of( - "host", new ExprStringValue("h2"), - "span", new ExprDateValue("2021-01-07"), - "sum", 8)))); + assertThat( + result, + containsInRelativeOrder( + ExprValueUtils.tupleValue( + ImmutableMap.of( + "host", new ExprStringValue("h1"), + "span", new ExprDateValue("2021-01-03"), + "sum", 2)), + ExprValueUtils.tupleValue( + ImmutableMap.of( + "host", new ExprStringValue("h1"), + "span", new ExprDateValue("2021-01-04"), + "sum", 1)), + ExprValueUtils.tupleValue( + ImmutableMap.of( + "host", new ExprStringValue("h1"), + "span", new ExprDateValue("2021-01-06"), + "sum", 1)), + ExprValueUtils.tupleValue( + ImmutableMap.of( + "host", new ExprStringValue("h1"), + "span", new ExprDateValue("2021-01-07"), + "sum", 6)), + ExprValueUtils.tupleValue( + ImmutableMap.of( + "host", new ExprStringValue("h2"), + "span", new ExprDateValue("2021-01-03"), + "sum", 3)), + ExprValueUtils.tupleValue( + ImmutableMap.of( + "host", new ExprStringValue("h2"), + "span", new ExprDateValue("2021-01-04"), + "sum", 10)), + ExprValueUtils.tupleValue( + ImmutableMap.of( + "host", new ExprStringValue("h2"), + "span", new ExprDateValue("2021-01-07"), + "sum", 8)))); } @Test public void aggregate_with_three_groups_with_windowing() { - PhysicalPlan plan = new AggregationOperator(testScan(compoundInputs), - Collections.singletonList(DSL.named("sum", DSL.sum(DSL.ref("errors", INTEGER)))), - Arrays.asList( - DSL.named("host", DSL.ref("host", STRING)), - DSL.named("span", DSL.span(DSL.ref("day", DATE), DSL.literal(1), "d")), - DSL.named("region", DSL.ref("region", STRING)))); + PhysicalPlan plan = + new AggregationOperator( + testScan(compoundInputs), + Collections.singletonList(DSL.named("sum", DSL.sum(DSL.ref("errors", INTEGER)))), + Arrays.asList( + DSL.named("host", DSL.ref("host", STRING)), + DSL.named("span", DSL.span(DSL.ref("day", DATE), DSL.literal(1), "d")), + DSL.named("region", DSL.ref("region", STRING)))); List result = execute(plan); assertEquals(7, result.size()); - assertThat(result, containsInRelativeOrder( - ExprValueUtils.tupleValue(ImmutableMap.of( - "host", new ExprStringValue("h1"), - "span", new ExprDateValue("2021-01-03"), - "region", new ExprStringValue("iad"), - "sum", 2)), - ExprValueUtils.tupleValue(ImmutableMap.of( - "host", new ExprStringValue("h1"), - "span", new ExprDateValue("2021-01-04"), - "region", new ExprStringValue("iad"), - "sum", 1)), - ExprValueUtils.tupleValue(ImmutableMap.of( - "host", new ExprStringValue("h1"), - "span", new ExprDateValue("2021-01-06"), - "region", new ExprStringValue("iad"), - "sum", 1)), - ExprValueUtils.tupleValue(ImmutableMap.of( - "host", new ExprStringValue("h1"), - "span", new ExprDateValue("2021-01-07"), - "region", new ExprStringValue("iad"), - "sum", 6)), - ExprValueUtils.tupleValue(ImmutableMap.of( - "host", new ExprStringValue("h2"), - "span", new ExprDateValue("2021-01-03"), - "region", new ExprStringValue("iad"), - "sum", 3)), - ExprValueUtils.tupleValue(ImmutableMap.of( - "host", new ExprStringValue("h2"), - "span", new ExprDateValue("2021-01-04"), - "region", new ExprStringValue("iad"), - "sum", 10)), - ExprValueUtils.tupleValue(ImmutableMap.of( - "host", new ExprStringValue("h2"), - "span", new ExprDateValue("2021-01-07"), - "region", new ExprStringValue("iad"), - "sum", 8)))); + assertThat( + result, + containsInRelativeOrder( + ExprValueUtils.tupleValue( + ImmutableMap.of( + "host", new ExprStringValue("h1"), + "span", new ExprDateValue("2021-01-03"), + "region", new ExprStringValue("iad"), + "sum", 2)), + ExprValueUtils.tupleValue( + ImmutableMap.of( + "host", new ExprStringValue("h1"), + "span", new ExprDateValue("2021-01-04"), + "region", new ExprStringValue("iad"), + "sum", 1)), + ExprValueUtils.tupleValue( + ImmutableMap.of( + "host", new ExprStringValue("h1"), + "span", new ExprDateValue("2021-01-06"), + "region", new ExprStringValue("iad"), + "sum", 1)), + ExprValueUtils.tupleValue( + ImmutableMap.of( + "host", new ExprStringValue("h1"), + "span", new ExprDateValue("2021-01-07"), + "region", new ExprStringValue("iad"), + "sum", 6)), + ExprValueUtils.tupleValue( + ImmutableMap.of( + "host", new ExprStringValue("h2"), + "span", new ExprDateValue("2021-01-03"), + "region", new ExprStringValue("iad"), + "sum", 3)), + ExprValueUtils.tupleValue( + ImmutableMap.of( + "host", new ExprStringValue("h2"), + "span", new ExprDateValue("2021-01-04"), + "region", new ExprStringValue("iad"), + "sum", 10)), + ExprValueUtils.tupleValue( + ImmutableMap.of( + "host", new ExprStringValue("h2"), + "span", new ExprDateValue("2021-01-07"), + "region", new ExprStringValue("iad"), + "sum", 8)))); } @Test public void copyOfAggregationOperatorShouldSame() { - AggregationOperator plan = new AggregationOperator(testScan(datetimeInputs), - Collections.singletonList(DSL - .named("count", DSL.count(DSL.ref("second", TIMESTAMP)))), - Collections.singletonList(DSL - .named("span", DSL.span(DSL.ref("second", TIMESTAMP), DSL.literal(6 * 1000), "ms")))); - AggregationOperator copy = new AggregationOperator(plan.getInput(), plan.getAggregatorList(), - plan.getGroupByExprList()); + AggregationOperator plan = + new AggregationOperator( + testScan(datetimeInputs), + Collections.singletonList(DSL.named("count", DSL.count(DSL.ref("second", TIMESTAMP)))), + Collections.singletonList( + DSL.named( + "span", DSL.span(DSL.ref("second", TIMESTAMP), DSL.literal(6 * 1000), "ms")))); + AggregationOperator copy = + new AggregationOperator( + plan.getInput(), plan.getAggregatorList(), plan.getGroupByExprList()); assertEquals(plan, copy); } diff --git a/core/src/test/java/org/opensearch/sql/planner/physical/DedupeOperatorTest.java b/core/src/test/java/org/opensearch/sql/planner/physical/DedupeOperatorTest.java index 13f35f9843..f6308aed53 100644 --- a/core/src/test/java/org/opensearch/sql/planner/physical/DedupeOperatorTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/physical/DedupeOperatorTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.physical; import static org.hamcrest.MatcherAssert.assertThat; @@ -26,8 +25,7 @@ @ExtendWith(MockitoExtension.class) class DedupeOperatorTest extends PhysicalPlanTestBase { - @Mock - private PhysicalPlan inputPlan; + @Mock private PhysicalPlan inputPlan; /** * construct the map which contain null value, because {@link ImmutableMap} doesn't support null @@ -182,8 +180,7 @@ public void dedupe_one_field_with_missing_value() { tupleValue(ImmutableMap.of("region", "us-east-1", "action", "GET", "response", 200))) .thenReturn( tupleValue(ImmutableMap.of("region", "us-east-1", "action", "POST", "response", 200))) - .thenReturn( - tupleValue(ImmutableMap.of("action", "POST", "response", 200))) + .thenReturn(tupleValue(ImmutableMap.of("action", "POST", "response", 200))) .thenReturn( tupleValue(ImmutableMap.of("region", "us-east-1", "action", "GET", "response", 200))); @@ -201,8 +198,7 @@ public void dedupe_one_field_with_missing_value_keep_empty() { tupleValue(ImmutableMap.of("region", "us-east-1", "action", "GET", "response", 200))) .thenReturn( tupleValue(ImmutableMap.of("region", "us-east-1", "action", "POST", "response", 200))) - .thenReturn( - tupleValue(ImmutableMap.of("action", "POST", "response", 200))) + .thenReturn(tupleValue(ImmutableMap.of("action", "POST", "response", 200))) .thenReturn( tupleValue(ImmutableMap.of("region", "us-east-1", "action", "GET", "response", 200))); @@ -221,8 +217,7 @@ public void dedupe_one_field_with_null_value() { tupleValue(ImmutableMap.of("region", "us-east-1", "action", "GET", "response", 200))) .thenReturn( tupleValue(ImmutableMap.of("region", "us-east-1", "action", "POST", "response", 200))) - .thenReturn( - tupleValue(NULL_MAP)) + .thenReturn(tupleValue(NULL_MAP)) .thenReturn( tupleValue(ImmutableMap.of("region", "us-east-1", "action", "GET", "response", 200))); diff --git a/core/src/test/java/org/opensearch/sql/planner/physical/EvalOperatorTest.java b/core/src/test/java/org/opensearch/sql/planner/physical/EvalOperatorTest.java index d32622834b..6f10cc435b 100644 --- a/core/src/test/java/org/opensearch/sql/planner/physical/EvalOperatorTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/physical/EvalOperatorTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.physical; import static org.hamcrest.MatcherAssert.assertThat; @@ -29,8 +28,7 @@ @ExtendWith(MockitoExtension.class) class EvalOperatorTest extends PhysicalPlanTestBase { - @Mock - private PhysicalPlan inputPlan; + @Mock private PhysicalPlan inputPlan; @Test public void create_new_field_that_contain_the_result_of_a_calculation() { @@ -63,8 +61,8 @@ public void create_multiple_field_using_field_defined_in_input_tuple() { eval( inputPlan, ImmutablePair.of( - DSL.ref("velocity", DOUBLE), DSL.divide(DSL.ref("distance", INTEGER), DSL.ref( - "time", INTEGER))), + DSL.ref("velocity", DOUBLE), + DSL.divide(DSL.ref("distance", INTEGER), DSL.ref("time", INTEGER))), ImmutablePair.of( DSL.ref("doubleDistance", INTEGER), DSL.multiply(DSL.ref("distance", INTEGER), DSL.literal(2)))); @@ -88,8 +86,8 @@ public void create_multiple_filed_using_field_defined_in_eval_operator() { eval( inputPlan, ImmutablePair.of( - DSL.ref("velocity", INTEGER), DSL.divide(DSL.ref("distance", INTEGER), DSL.ref( - "time", INTEGER))), + DSL.ref("velocity", INTEGER), + DSL.divide(DSL.ref("distance", INTEGER), DSL.ref("time", INTEGER))), ImmutablePair.of( DSL.ref("doubleVelocity", INTEGER), DSL.multiply(DSL.ref("velocity", INTEGER), DSL.literal(2)))); @@ -126,8 +124,8 @@ public void replace_existing_field() { public void do_nothing_with_none_tuple_value() { when(inputPlan.hasNext()).thenReturn(true, false); when(inputPlan.next()).thenReturn(ExprValueUtils.integerValue(1)); - PhysicalPlan plan = eval(inputPlan, ImmutablePair.of(DSL.ref("response", INTEGER), - DSL.ref("referer", STRING))); + PhysicalPlan plan = + eval(inputPlan, ImmutablePair.of(DSL.ref("response", INTEGER), DSL.ref("referer", STRING))); List result = execute(plan); assertThat(result, allOf(iterableWithSize(1), hasItems(ExprValueUtils.integerValue(1)))); diff --git a/core/src/test/java/org/opensearch/sql/planner/physical/FilterOperatorTest.java b/core/src/test/java/org/opensearch/sql/planner/physical/FilterOperatorTest.java index 66ca2de157..bfe3b323c4 100644 --- a/core/src/test/java/org/opensearch/sql/planner/physical/FilterOperatorTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/physical/FilterOperatorTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.physical; import static org.hamcrest.MatcherAssert.assertThat; @@ -31,20 +30,31 @@ @ExtendWith(MockitoExtension.class) @DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) class FilterOperatorTest extends PhysicalPlanTestBase { - @Mock - private PhysicalPlan inputPlan; + @Mock private PhysicalPlan inputPlan; @Test public void filter_test() { - FilterOperator plan = new FilterOperator(new TestScan(), - DSL.and(DSL.notequal(DSL.ref("response", INTEGER), DSL.literal(200)), + FilterOperator plan = + new FilterOperator( + new TestScan(), + DSL.and( + DSL.notequal(DSL.ref("response", INTEGER), DSL.literal(200)), DSL.notequal(DSL.ref("response", INTEGER), DSL.literal(500)))); List result = execute(plan); assertEquals(1, result.size()); - assertThat(result, containsInAnyOrder(ExprValueUtils - .tupleValue(ImmutableMap - .of("ip", "209.160.24.63", "action", "GET", "response", 404, "referer", - "www.amazon.com")))); + assertThat( + result, + containsInAnyOrder( + ExprValueUtils.tupleValue( + ImmutableMap.of( + "ip", + "209.160.24.63", + "action", + "GET", + "response", + 404, + "referer", + "www.amazon.com")))); } @Test @@ -54,8 +64,8 @@ public void null_value_should_been_ignored() { when(inputPlan.hasNext()).thenReturn(true, false); when(inputPlan.next()).thenReturn(new ExprTupleValue(value)); - FilterOperator plan = new FilterOperator(inputPlan, - DSL.equal(DSL.ref("response", INTEGER), DSL.literal(404))); + FilterOperator plan = + new FilterOperator(inputPlan, DSL.equal(DSL.ref("response", INTEGER), DSL.literal(404))); List result = execute(plan); assertEquals(0, result.size()); } @@ -67,8 +77,8 @@ public void missing_value_should_been_ignored() { when(inputPlan.hasNext()).thenReturn(true, false); when(inputPlan.next()).thenReturn(new ExprTupleValue(value)); - FilterOperator plan = new FilterOperator(inputPlan, - DSL.equal(DSL.ref("response", INTEGER), DSL.literal(404))); + FilterOperator plan = + new FilterOperator(inputPlan, DSL.equal(DSL.ref("response", INTEGER), DSL.literal(404))); List result = execute(plan); assertEquals(0, result.size()); } diff --git a/core/src/test/java/org/opensearch/sql/planner/physical/LimitOperatorTest.java b/core/src/test/java/org/opensearch/sql/planner/physical/LimitOperatorTest.java index a9796bb11a..fa3788cc2d 100644 --- a/core/src/test/java/org/opensearch/sql/planner/physical/LimitOperatorTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/physical/LimitOperatorTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.physical; import static org.hamcrest.MatcherAssert.assertThat; @@ -23,10 +22,19 @@ public void limit() { PhysicalPlan plan = new LimitOperator(new TestScan(), 1, 0); List result = execute(plan); assertEquals(1, result.size()); - assertThat(result, containsInAnyOrder( - ExprValueUtils.tupleValue(ImmutableMap.of( - "ip", "209.160.24.63", "action", "GET", "response", 200, "referer", "www.amazon.com")) - )); + assertThat( + result, + containsInAnyOrder( + ExprValueUtils.tupleValue( + ImmutableMap.of( + "ip", + "209.160.24.63", + "action", + "GET", + "response", + 200, + "referer", + "www.amazon.com")))); } @Test @@ -34,15 +42,24 @@ public void limit_and_offset() { PhysicalPlan plan = new LimitOperator(new TestScan(), 1, 1); List result = execute(plan); assertEquals(1, result.size()); - assertThat(result, containsInAnyOrder( - ExprValueUtils.tupleValue(ImmutableMap.of( - "ip", "209.160.24.63", "action", "GET", "response", 404, "referer", "www.amazon.com")) - )); + assertThat( + result, + containsInAnyOrder( + ExprValueUtils.tupleValue( + ImmutableMap.of( + "ip", + "209.160.24.63", + "action", + "GET", + "response", + 404, + "referer", + "www.amazon.com")))); } @Test public void offset_exceeds_row_number() { - PhysicalPlan plan = new LimitOperator(new TestScan(),1, 6); + PhysicalPlan plan = new LimitOperator(new TestScan(), 1, 6); List result = execute(plan); assertEquals(0, result.size()); } diff --git a/core/src/test/java/org/opensearch/sql/planner/physical/NestedOperatorTest.java b/core/src/test/java/org/opensearch/sql/planner/physical/NestedOperatorTest.java index 5b3744c401..cf98183eb3 100644 --- a/core/src/test/java/org/opensearch/sql/planner/physical/NestedOperatorTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/physical/NestedOperatorTest.java @@ -28,68 +28,40 @@ @ExtendWith(MockitoExtension.class) class NestedOperatorTest extends PhysicalPlanTestBase { - @Mock - private PhysicalPlan inputPlan; + @Mock private PhysicalPlan inputPlan; - private final ExprValue testData = tupleValue( - Map.of( - "message", - collectionValue( - List.of( - Map.of("info", "a"), - Map.of("info", "b"), - Map.of("info", "c") - ) - ), - "comment", - collectionValue( - List.of( - Map.of("data", "1"), - Map.of("data", "2"), - Map.of("data", "3") - ) - ) - ) - ); + private final ExprValue testData = + tupleValue( + Map.of( + "message", + collectionValue( + List.of(Map.of("info", "a"), Map.of("info", "b"), Map.of("info", "c"))), + "comment", + collectionValue( + List.of(Map.of("data", "1"), Map.of("data", "2"), Map.of("data", "3"))))); + private final ExprValue testDataWithSamePath = + tupleValue( + Map.of( + "message", + collectionValue( + List.of( + Map.of("info", "a", "id", "1"), + Map.of("info", "b", "id", "2"), + Map.of("info", "c", "id", "3"))))); - private final ExprValue testDataWithSamePath = tupleValue( - Map.of( - "message", - collectionValue( - List.of( - Map.of("info", "a", "id", "1"), - Map.of("info", "b", "id", "2"), - Map.of("info", "c", "id", "3") - ) - ) - ) - ); + private final ExprValue nonNestedTestData = tupleValue(Map.of("message", "val")); - private final ExprValue nonNestedTestData = tupleValue( - Map.of( - "message", "val" - ) - ); - - private final ExprValue missingArrayData = tupleValue( - Map.of( - "missing", - collectionValue( - List.of("value") - ) - ) - ); + private final ExprValue missingArrayData = + tupleValue(Map.of("missing", collectionValue(List.of("value")))); @Test public void nested_one_nested_field() { when(inputPlan.hasNext()).thenReturn(true, false); - when(inputPlan.next()) - .thenReturn(testData); + when(inputPlan.next()).thenReturn(testData); Set fields = Set.of("message.info"); - Map> groupedFieldsByPath = - Map.of("message", List.of("message.info")); + Map> groupedFieldsByPath = Map.of("message", List.of("message.info")); var nested = new NestedOperator(inputPlan, fields, groupedFieldsByPath); @@ -97,77 +69,104 @@ public void nested_one_nested_field() { execute(nested), contains( tupleValue( - new LinkedHashMap<>() {{ - put("message.info", "a"); - put("comment", collectionValue( - new ArrayList<>() {{ - add(new LinkedHashMap<>() {{ - put("data", "1"); - }} - ); - add(new LinkedHashMap<>() {{ - put("data", "2"); - }} - ); - add(new LinkedHashMap<>() {{ - put("data", "3"); - }} - ); - }} - )); - }} - ), + new LinkedHashMap<>() { + { + put("message.info", "a"); + put( + "comment", + collectionValue( + new ArrayList<>() { + { + add( + new LinkedHashMap<>() { + { + put("data", "1"); + } + }); + add( + new LinkedHashMap<>() { + { + put("data", "2"); + } + }); + add( + new LinkedHashMap<>() { + { + put("data", "3"); + } + }); + } + })); + } + }), tupleValue( - new LinkedHashMap<>() {{ - put("message.info", "b"); - put("comment", collectionValue( - new ArrayList<>() {{ - add(new LinkedHashMap<>() {{ - put("data", "1"); - }} - ); - add(new LinkedHashMap<>() {{ - put("data", "2"); - }} - ); - add(new LinkedHashMap<>() {{ - put("data", "3"); - }} - ); - }} - )); - }} - ), + new LinkedHashMap<>() { + { + put("message.info", "b"); + put( + "comment", + collectionValue( + new ArrayList<>() { + { + add( + new LinkedHashMap<>() { + { + put("data", "1"); + } + }); + add( + new LinkedHashMap<>() { + { + put("data", "2"); + } + }); + add( + new LinkedHashMap<>() { + { + put("data", "3"); + } + }); + } + })); + } + }), tupleValue( - new LinkedHashMap<>() {{ - put("message.info", "c"); - put("comment", collectionValue( - new ArrayList<>() {{ - add(new LinkedHashMap<>() {{ - put("data", "1"); - }} - ); - add(new LinkedHashMap<>() {{ - put("data", "2"); - }} - ); - add(new LinkedHashMap<>() {{ - put("data", "3"); - }} - ); - }} - )); - }} - ) - ) - ); + new LinkedHashMap<>() { + { + put("message.info", "c"); + put( + "comment", + collectionValue( + new ArrayList<>() { + { + add( + new LinkedHashMap<>() { + { + put("data", "1"); + } + }); + add( + new LinkedHashMap<>() { + { + put("data", "2"); + } + }); + add( + new LinkedHashMap<>() { + { + put("data", "3"); + } + }); + } + })); + } + }))); } @Test public void nested_two_nested_field() { when(inputPlan.hasNext()).thenReturn(true, false); - when(inputPlan.next()) - .thenReturn(testData); + when(inputPlan.next()).thenReturn(testData); List> fields = List.of( @@ -176,76 +175,81 @@ public void nested_two_nested_field() { "path", new ReferenceExpression("message", STRING)), Map.of( "field", new ReferenceExpression("comment.data", STRING), - "path", new ReferenceExpression("comment", STRING)) - ); + "path", new ReferenceExpression("comment", STRING))); var nested = new NestedOperator(inputPlan, fields); assertThat( execute(nested), contains( tupleValue( - new LinkedHashMap<>() {{ - put("message.info", "a"); - put("comment.data", "1"); - }} - ), + new LinkedHashMap<>() { + { + put("message.info", "a"); + put("comment.data", "1"); + } + }), tupleValue( - new LinkedHashMap<>() {{ - put("message.info", "a"); - put("comment.data", "2"); - }} - ), + new LinkedHashMap<>() { + { + put("message.info", "a"); + put("comment.data", "2"); + } + }), tupleValue( - new LinkedHashMap<>() {{ - put("message.info", "a"); - put("comment.data", "3"); - }} - ), + new LinkedHashMap<>() { + { + put("message.info", "a"); + put("comment.data", "3"); + } + }), tupleValue( - new LinkedHashMap<>() {{ - put("message.info", "b"); - put("comment.data", "1"); - }} - ), + new LinkedHashMap<>() { + { + put("message.info", "b"); + put("comment.data", "1"); + } + }), tupleValue( - new LinkedHashMap<>() {{ - put("message.info", "b"); - put("comment.data", "2"); - }} - ), + new LinkedHashMap<>() { + { + put("message.info", "b"); + put("comment.data", "2"); + } + }), tupleValue( - new LinkedHashMap<>() {{ - put("message.info", "b"); - put("comment.data", "3"); - }} - ), + new LinkedHashMap<>() { + { + put("message.info", "b"); + put("comment.data", "3"); + } + }), tupleValue( - new LinkedHashMap<>() {{ - put("message.info", "c"); - put("comment.data", "1"); - }} - ), + new LinkedHashMap<>() { + { + put("message.info", "c"); + put("comment.data", "1"); + } + }), tupleValue( - new LinkedHashMap<>() {{ - put("message.info", "c"); - put("comment.data", "2"); - }} - ), + new LinkedHashMap<>() { + { + put("message.info", "c"); + put("comment.data", "2"); + } + }), tupleValue( - new LinkedHashMap<>() {{ - put("message.info", "c"); - put("comment.data", "3"); - }} - ) - ) - ); + new LinkedHashMap<>() { + { + put("message.info", "c"); + put("comment.data", "3"); + } + }))); } @Test public void nested_two_nested_fields_with_same_path() { when(inputPlan.hasNext()).thenReturn(true, false); - when(inputPlan.next()) - .thenReturn(testDataWithSamePath); + when(inputPlan.next()).thenReturn(testDataWithSamePath); List> fields = List.of( @@ -254,85 +258,69 @@ public void nested_two_nested_fields_with_same_path() { "path", new ReferenceExpression("message", STRING)), Map.of( "field", new ReferenceExpression("message.id", STRING), - "path", new ReferenceExpression("message", STRING)) - ); + "path", new ReferenceExpression("message", STRING))); var nested = new NestedOperator(inputPlan, fields); assertThat( execute(nested), contains( tupleValue( - new LinkedHashMap<>() {{ - put("message.info", "a"); - put("message.id", "1"); - }} - ), + new LinkedHashMap<>() { + { + put("message.info", "a"); + put("message.id", "1"); + } + }), tupleValue( - new LinkedHashMap<>() {{ - put("message.info", "b"); - put("message.id", "2"); - }} - ), + new LinkedHashMap<>() { + { + put("message.info", "b"); + put("message.id", "2"); + } + }), tupleValue( - new LinkedHashMap<>() {{ - put("message.info", "c"); - put("message.id", "3"); - }} - ) - ) - ); + new LinkedHashMap<>() { + { + put("message.info", "c"); + put("message.id", "3"); + } + }))); } @Test public void non_nested_field_tests() { when(inputPlan.hasNext()).thenReturn(true, false); - when(inputPlan.next()) - .thenReturn(nonNestedTestData); + when(inputPlan.next()).thenReturn(nonNestedTestData); Set fields = Set.of("message"); - Map> groupedFieldsByPath = - Map.of("message", List.of("message.info")); + Map> groupedFieldsByPath = Map.of("message", List.of("message.info")); var nested = new NestedOperator(inputPlan, fields, groupedFieldsByPath); assertThat( - execute(nested), - contains( - tupleValue(new LinkedHashMap<>(Map.of("message", "val"))) - ) - ); + execute(nested), contains(tupleValue(new LinkedHashMap<>(Map.of("message", "val"))))); } @Test public void nested_missing_tuple_field() { when(inputPlan.hasNext()).thenReturn(true, false); - when(inputPlan.next()) - .thenReturn(tupleValue(Map.of())); + when(inputPlan.next()).thenReturn(tupleValue(Map.of())); Set fields = Set.of("message.val"); - Map> groupedFieldsByPath = - Map.of("message", List.of("message.val")); + Map> groupedFieldsByPath = Map.of("message", List.of("message.val")); var nested = new NestedOperator(inputPlan, fields, groupedFieldsByPath); assertThat( execute(nested), - contains( - tupleValue(new LinkedHashMap<>(Map.of("message.val", ExprNullValue.of()))) - ) - ); + contains(tupleValue(new LinkedHashMap<>(Map.of("message.val", ExprNullValue.of()))))); } @Test public void nested_missing_array_field() { when(inputPlan.hasNext()).thenReturn(true, false); - when(inputPlan.next()) - .thenReturn(missingArrayData); + when(inputPlan.next()).thenReturn(missingArrayData); Set fields = Set.of("missing.data"); - Map> groupedFieldsByPath = - Map.of("message", List.of("message.data")); + Map> groupedFieldsByPath = Map.of("message", List.of("message.data")); var nested = new NestedOperator(inputPlan, fields, groupedFieldsByPath); - assertEquals(0, execute(nested) - .get(0) - .tupleValue() - .size()); + assertEquals(0, execute(nested).get(0).tupleValue().size()); } } diff --git a/core/src/test/java/org/opensearch/sql/planner/physical/PhysicalPlanNodeVisitorTest.java b/core/src/test/java/org/opensearch/sql/planner/physical/PhysicalPlanNodeVisitorTest.java index 8ed4881d33..c91ae8787c 100644 --- a/core/src/test/java/org/opensearch/sql/planner/physical/PhysicalPlanNodeVisitorTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/physical/PhysicalPlanNodeVisitorTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.physical; import static java.util.Collections.emptyList; @@ -49,16 +48,12 @@ import org.opensearch.sql.expression.ReferenceExpression; import org.opensearch.sql.expression.window.WindowDefinition; -/** - * Todo, testing purpose, delete later. - */ +/** Todo, testing purpose, delete later. */ @ExtendWith(MockitoExtension.class) @DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) class PhysicalPlanNodeVisitorTest extends PhysicalPlanTestBase { - @Mock - PhysicalPlan plan; - @Mock - ReferenceExpression ref; + @Mock PhysicalPlan plan; + @Mock ReferenceExpression ref; @Test public void print_physical_plan() { @@ -69,16 +64,13 @@ public void print_physical_plan() { agg( rareTopN( filter( - limit( - new TestScan(), - 1, 1 - ), + limit(new TestScan(), 1, 1), DSL.equal(DSL.ref("response", INTEGER), DSL.literal(10))), CommandType.TOP, ImmutableList.of(), DSL.ref("response", INTEGER)), - ImmutableList - .of(DSL.named("avg(response)", DSL.avg(DSL.ref("response", INTEGER)))), + ImmutableList.of( + DSL.named("avg(response)", DSL.avg(DSL.ref("response", INTEGER)))), ImmutableList.of()), ImmutableMap.of(DSL.ref("ivalue", INTEGER), DSL.ref("avg(response)", DOUBLE))), named("ref", ref)), @@ -104,17 +96,20 @@ public static Stream getPhysicalPlanForTest() { filter(new TestScan(), DSL.equal(DSL.ref("response", INTEGER), DSL.literal(10))); PhysicalPlan aggregation = - agg(filter, ImmutableList.of(DSL.named("avg(response)", - DSL.avg(DSL.ref("response", INTEGER)))), ImmutableList.of()); + agg( + filter, + ImmutableList.of(DSL.named("avg(response)", DSL.avg(DSL.ref("response", INTEGER)))), + ImmutableList.of()); PhysicalPlan rename = - rename(aggregation, ImmutableMap.of(DSL.ref("ivalue", INTEGER), DSL.ref("avg(response)", - DOUBLE))); + rename( + aggregation, + ImmutableMap.of(DSL.ref("ivalue", INTEGER), DSL.ref("avg(response)", DOUBLE))); PhysicalPlan project = project(plan, named("ref", ref)); - PhysicalPlan window = window(plan, named(DSL.rowNumber()), - new WindowDefinition(emptyList(), emptyList())); + PhysicalPlan window = + window(plan, named(DSL.rowNumber()), new WindowDefinition(emptyList(), emptyList())); PhysicalPlan remove = remove(plan, ref); @@ -136,26 +131,33 @@ public static Stream getPhysicalPlanForTest() { PhysicalPlan cursorClose = new CursorCloseOperator(plan); - return Stream.of(Arguments.of(filter, "filter"), Arguments.of(aggregation, "aggregation"), - Arguments.of(rename, "rename"), Arguments.of(project, "project"), - Arguments.of(window, "window"), Arguments.of(remove, "remove"), - Arguments.of(eval, "eval"), Arguments.of(sort, "sort"), Arguments.of(dedupe, "dedupe"), - Arguments.of(values, "values"), Arguments.of(rareTopN, "rareTopN"), - Arguments.of(limit, "limit"), Arguments.of(nested, "nested"), + return Stream.of( + Arguments.of(filter, "filter"), + Arguments.of(aggregation, "aggregation"), + Arguments.of(rename, "rename"), + Arguments.of(project, "project"), + Arguments.of(window, "window"), + Arguments.of(remove, "remove"), + Arguments.of(eval, "eval"), + Arguments.of(sort, "sort"), + Arguments.of(dedupe, "dedupe"), + Arguments.of(values, "values"), + Arguments.of(rareTopN, "rareTopN"), + Arguments.of(limit, "limit"), + Arguments.of(nested, "nested"), Arguments.of(cursorClose, "cursorClose")); } @ParameterizedTest(name = "{1}") @MethodSource("getPhysicalPlanForTest") public void test_PhysicalPlanVisitor_should_return_null(PhysicalPlan plan, String name) { - assertNull(plan.accept(new PhysicalPlanNodeVisitor() { - }, null)); + assertNull(plan.accept(new PhysicalPlanNodeVisitor() {}, null)); } @Test public void test_visitMLCommons() { PhysicalPlanNodeVisitor physicalPlanNodeVisitor = - new PhysicalPlanNodeVisitor() {}; + new PhysicalPlanNodeVisitor() {}; assertNull(physicalPlanNodeVisitor.visitMLCommons(plan, null)); } @@ -163,7 +165,7 @@ public void test_visitMLCommons() { @Test public void test_visitAD() { PhysicalPlanNodeVisitor physicalPlanNodeVisitor = - new PhysicalPlanNodeVisitor() {}; + new PhysicalPlanNodeVisitor() {}; assertNull(physicalPlanNodeVisitor.visitAD(plan, null)); } @@ -171,7 +173,7 @@ public void test_visitAD() { @Test public void test_visitML() { PhysicalPlanNodeVisitor physicalPlanNodeVisitor = - new PhysicalPlanNodeVisitor() {}; + new PhysicalPlanNodeVisitor() {}; assertNull(physicalPlanNodeVisitor.visitML(plan, null)); } diff --git a/core/src/test/java/org/opensearch/sql/planner/physical/PhysicalPlanTest.java b/core/src/test/java/org/opensearch/sql/planner/physical/PhysicalPlanTest.java index 1e42857205..d63ab35773 100644 --- a/core/src/test/java/org/opensearch/sql/planner/physical/PhysicalPlanTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/physical/PhysicalPlanTest.java @@ -20,33 +20,32 @@ @ExtendWith(MockitoExtension.class) @DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) class PhysicalPlanTest { - @Mock - Split split; - - @Mock - PhysicalPlan child; - - private PhysicalPlan testPlan = new PhysicalPlan() { - @Override - public R accept(PhysicalPlanNodeVisitor visitor, C context) { - throw new UnsupportedOperationException(); - } - - @Override - public boolean hasNext() { - throw new UnsupportedOperationException(); - } - - @Override - public ExprValue next() { - throw new UnsupportedOperationException(); - } - - @Override - public List getChild() { - return List.of(child); - } - }; + @Mock Split split; + + @Mock PhysicalPlan child; + + private PhysicalPlan testPlan = + new PhysicalPlan() { + @Override + public R accept(PhysicalPlanNodeVisitor visitor, C context) { + throw new UnsupportedOperationException(); + } + + @Override + public boolean hasNext() { + throw new UnsupportedOperationException(); + } + + @Override + public ExprValue next() { + throw new UnsupportedOperationException(); + } + + @Override + public List getChild() { + return List.of(child); + } + }; @Test void add_split_to_child_by_default() { diff --git a/core/src/test/java/org/opensearch/sql/planner/physical/PhysicalPlanTestBase.java b/core/src/test/java/org/opensearch/sql/planner/physical/PhysicalPlanTestBase.java index 60d97f159a..003e59959f 100644 --- a/core/src/test/java/org/opensearch/sql/planner/physical/PhysicalPlanTestBase.java +++ b/core/src/test/java/org/opensearch/sql/planner/physical/PhysicalPlanTestBase.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.physical; import com.google.common.collect.ImmutableList; @@ -29,47 +28,71 @@ public class PhysicalPlanTestBase { - protected static final List countTestInputs = new ImmutableList.Builder() - .add(ExprValueUtils.tupleValue(ImmutableMap - .of("id", 1, "testString", "asdf"))) - .add(ExprValueUtils.tupleValue(ImmutableMap - .of("id", 2, "testString", "asdf"))) - .add(ExprValueUtils.tupleValue(ImmutableMap - .of("id", 3, "testString", "asdf"))) - .add(ExprValueUtils.tupleValue(ImmutableMap - .of("id", 4, "testString", "asdf"))) - .add(ExprValueUtils.tupleValue(ImmutableMap - .of("id", 5, "testString", "asdf"))) - .add(ExprValueUtils.tupleValue(ImmutableMap - .of("id", 6, "testString", "asdf"))) - .add(ExprValueUtils.tupleValue(ImmutableMap - .of("id", 7, "testString", "asdf"))) - .add(ExprValueUtils.tupleValue(ImmutableMap - .of("id", 8, "testString", "asdf"))) - .add(ExprValueUtils.tupleValue(ImmutableMap - .of("id", 9, "testString", "asdf"))) - .add(ExprValueUtils.tupleValue(ImmutableMap - .of("id", 10, "testString", "asdf"))) - .add(ExprValueUtils.tupleValue(ImmutableMap - .of("id", 11, "testString", "asdf"))) + protected static final List countTestInputs = + new ImmutableList.Builder() + .add(ExprValueUtils.tupleValue(ImmutableMap.of("id", 1, "testString", "asdf"))) + .add(ExprValueUtils.tupleValue(ImmutableMap.of("id", 2, "testString", "asdf"))) + .add(ExprValueUtils.tupleValue(ImmutableMap.of("id", 3, "testString", "asdf"))) + .add(ExprValueUtils.tupleValue(ImmutableMap.of("id", 4, "testString", "asdf"))) + .add(ExprValueUtils.tupleValue(ImmutableMap.of("id", 5, "testString", "asdf"))) + .add(ExprValueUtils.tupleValue(ImmutableMap.of("id", 6, "testString", "asdf"))) + .add(ExprValueUtils.tupleValue(ImmutableMap.of("id", 7, "testString", "asdf"))) + .add(ExprValueUtils.tupleValue(ImmutableMap.of("id", 8, "testString", "asdf"))) + .add(ExprValueUtils.tupleValue(ImmutableMap.of("id", 9, "testString", "asdf"))) + .add(ExprValueUtils.tupleValue(ImmutableMap.of("id", 10, "testString", "asdf"))) + .add(ExprValueUtils.tupleValue(ImmutableMap.of("id", 11, "testString", "asdf"))) .build(); - protected static final List inputs = new ImmutableList.Builder() - .add(ExprValueUtils.tupleValue(ImmutableMap - .of("ip", "209.160.24.63", "action", "GET", "response", 200, "referer", - "www.amazon.com"))) - .add(ExprValueUtils.tupleValue(ImmutableMap - .of("ip", "209.160.24.63", "action", "GET", "response", 404, "referer", - "www.amazon.com"))) - .add(ExprValueUtils.tupleValue(ImmutableMap - .of("ip", "112.111.162.4", "action", "GET", "response", 200, "referer", - "www.amazon.com"))) - .add(ExprValueUtils.tupleValue(ImmutableMap - .of("ip", "74.125.19.106", "action", "POST", "response", 200, "referer", - "www.google.com"))) - .add(ExprValueUtils - .tupleValue(ImmutableMap.of("ip", "74.125.19.106", "action", "POST", "response", 500))) - .build(); + protected static final List inputs = + new ImmutableList.Builder() + .add( + ExprValueUtils.tupleValue( + ImmutableMap.of( + "ip", + "209.160.24.63", + "action", + "GET", + "response", + 200, + "referer", + "www.amazon.com"))) + .add( + ExprValueUtils.tupleValue( + ImmutableMap.of( + "ip", + "209.160.24.63", + "action", + "GET", + "response", + 404, + "referer", + "www.amazon.com"))) + .add( + ExprValueUtils.tupleValue( + ImmutableMap.of( + "ip", + "112.111.162.4", + "action", + "GET", + "response", + 200, + "referer", + "www.amazon.com"))) + .add( + ExprValueUtils.tupleValue( + ImmutableMap.of( + "ip", + "74.125.19.106", + "action", + "POST", + "response", + 200, + "referer", + "www.google.com"))) + .add( + ExprValueUtils.tupleValue( + ImmutableMap.of("ip", "74.125.19.106", "action", "POST", "response", 500))) + .build(); private static Map typeMapping = new ImmutableMap.Builder() @@ -79,112 +102,172 @@ public class PhysicalPlanTestBase { .put("referer", ExprCoreType.STRING) .build(); - protected static final List dateInputs = new ImmutableList.Builder() - .add(ExprValueUtils.tupleValue(ImmutableMap.of( - "day", new ExprDateValue("2021-01-03"), - "month", new ExprDateValue("2021-02-04"), - "quarter", new ExprDatetimeValue("2021-01-01 12:25:02"), - "year", new ExprTimestampValue("2013-01-01 12:25:02")))) - .add(ExprValueUtils.tupleValue(ImmutableMap.of( - "day", new ExprDateValue("2021-01-01"), - "month", new ExprDateValue("2021-03-17"), - "quarter", new ExprDatetimeValue("2021-05-17 12:25:01"), - "year", new ExprTimestampValue("2021-01-01 12:25:02")))) - .add(ExprValueUtils.tupleValue(ImmutableMap.of( - "day", new ExprDateValue("2021-01-04"), - "month", new ExprDateValue("2021-02-08"), - "quarter", new ExprDatetimeValue("2021-06-08 12:25:02"), - "year", new ExprTimestampValue("2016-01-01 12:25:02")))) - .add(ExprValueUtils.tupleValue(ImmutableMap.of( - "day", new ExprDateValue("2021-01-02"), - "month", new ExprDateValue("2020-12-12"), - "quarter", new ExprDatetimeValue("2020-12-12 12:25:03"), - "year", new ExprTimestampValue("1999-01-01 12:25:02")))) - .add(ExprValueUtils.tupleValue(ImmutableMap.of( - "day", new ExprDateValue("2021-01-01"), - "month", new ExprDateValue("2021-02-28"), - "quarter", new ExprDatetimeValue("2020-09-28 12:25:01"), - "year", new ExprTimestampValue("2018-01-01 12:25:02")))) - .build(); + protected static final List dateInputs = + new ImmutableList.Builder() + .add( + ExprValueUtils.tupleValue( + ImmutableMap.of( + "day", new ExprDateValue("2021-01-03"), + "month", new ExprDateValue("2021-02-04"), + "quarter", new ExprDatetimeValue("2021-01-01 12:25:02"), + "year", new ExprTimestampValue("2013-01-01 12:25:02")))) + .add( + ExprValueUtils.tupleValue( + ImmutableMap.of( + "day", new ExprDateValue("2021-01-01"), + "month", new ExprDateValue("2021-03-17"), + "quarter", new ExprDatetimeValue("2021-05-17 12:25:01"), + "year", new ExprTimestampValue("2021-01-01 12:25:02")))) + .add( + ExprValueUtils.tupleValue( + ImmutableMap.of( + "day", new ExprDateValue("2021-01-04"), + "month", new ExprDateValue("2021-02-08"), + "quarter", new ExprDatetimeValue("2021-06-08 12:25:02"), + "year", new ExprTimestampValue("2016-01-01 12:25:02")))) + .add( + ExprValueUtils.tupleValue( + ImmutableMap.of( + "day", new ExprDateValue("2021-01-02"), + "month", new ExprDateValue("2020-12-12"), + "quarter", new ExprDatetimeValue("2020-12-12 12:25:03"), + "year", new ExprTimestampValue("1999-01-01 12:25:02")))) + .add( + ExprValueUtils.tupleValue( + ImmutableMap.of( + "day", new ExprDateValue("2021-01-01"), + "month", new ExprDateValue("2021-02-28"), + "quarter", new ExprDatetimeValue("2020-09-28 12:25:01"), + "year", new ExprTimestampValue("2018-01-01 12:25:02")))) + .build(); - protected static final List datetimeInputs = new ImmutableList.Builder() - .add(ExprValueUtils.tupleValue(ImmutableMap.of( - "hour", new ExprTimeValue("17:17:00"), - "minute", new ExprDatetimeValue("2020-12-31 23:54:12"), - "second", new ExprTimestampValue("2021-01-01 00:00:05")))) - .add(ExprValueUtils.tupleValue(ImmutableMap.of( - "hour", new ExprTimeValue("18:17:00"), - "minute", new ExprDatetimeValue("2021-01-01 00:05:12"), - "second", new ExprTimestampValue("2021-01-01 00:00:12")))) - .add(ExprValueUtils.tupleValue(ImmutableMap.of( - "hour", new ExprTimeValue("17:15:00"), - "minute", new ExprDatetimeValue("2021-01-01 00:03:12"), - "second", new ExprTimestampValue("2021-01-01 00:00:17")))) - .add(ExprValueUtils.tupleValue(ImmutableMap.of( - "hour", new ExprTimeValue("19:01:00"), - "minute", new ExprDatetimeValue("2021-01-01 00:02:12"), - "second", new ExprTimestampValue("2021-01-01 00:00:03")))) - .add(ExprValueUtils.tupleValue(ImmutableMap.of( - "hour", new ExprTimeValue("18:50:00"), - "minute", new ExprDatetimeValue("2021-01-01 00:00:12"), - "second", new ExprTimestampValue("2021-01-01 00:00:13")))) - .build(); + protected static final List datetimeInputs = + new ImmutableList.Builder() + .add( + ExprValueUtils.tupleValue( + ImmutableMap.of( + "hour", new ExprTimeValue("17:17:00"), + "minute", new ExprDatetimeValue("2020-12-31 23:54:12"), + "second", new ExprTimestampValue("2021-01-01 00:00:05")))) + .add( + ExprValueUtils.tupleValue( + ImmutableMap.of( + "hour", new ExprTimeValue("18:17:00"), + "minute", new ExprDatetimeValue("2021-01-01 00:05:12"), + "second", new ExprTimestampValue("2021-01-01 00:00:12")))) + .add( + ExprValueUtils.tupleValue( + ImmutableMap.of( + "hour", new ExprTimeValue("17:15:00"), + "minute", new ExprDatetimeValue("2021-01-01 00:03:12"), + "second", new ExprTimestampValue("2021-01-01 00:00:17")))) + .add( + ExprValueUtils.tupleValue( + ImmutableMap.of( + "hour", new ExprTimeValue("19:01:00"), + "minute", new ExprDatetimeValue("2021-01-01 00:02:12"), + "second", new ExprTimestampValue("2021-01-01 00:00:03")))) + .add( + ExprValueUtils.tupleValue( + ImmutableMap.of( + "hour", new ExprTimeValue("18:50:00"), + "minute", new ExprDatetimeValue("2021-01-01 00:00:12"), + "second", new ExprTimestampValue("2021-01-01 00:00:13")))) + .build(); - protected static final List numericInputs = new ImmutableList.Builder() - .add(ExprValueUtils.tupleValue(ImmutableMap.of( - "integer", 2, - "long", 2L, - "float", 2F, - "double", 2D))) - .add(ExprValueUtils.tupleValue(ImmutableMap.of( - "integer", 1, - "long", 1L, - "float", 1F, - "double", 1D))) - .add(ExprValueUtils.tupleValue(ImmutableMap.of( - "integer", 5, - "long", 5L, - "float", 5F, - "double", 5D))) - .build(); + protected static final List numericInputs = + new ImmutableList.Builder() + .add( + ExprValueUtils.tupleValue( + ImmutableMap.of("integer", 2, "long", 2L, "float", 2F, "double", 2D))) + .add( + ExprValueUtils.tupleValue( + ImmutableMap.of("integer", 1, "long", 1L, "float", 1F, "double", 1D))) + .add( + ExprValueUtils.tupleValue( + ImmutableMap.of("integer", 5, "long", 5L, "float", 5F, "double", 5D))) + .build(); - protected static final List compoundInputs = new ImmutableList.Builder() - .add(ExprValueUtils.tupleValue(ImmutableMap.of( - "day", new ExprDateValue("2021-01-03"), - "region", "iad", - "host", "h1", - "errors", 2))) - .add(ExprValueUtils.tupleValue(ImmutableMap.of( - "day", new ExprDateValue("2021-01-03"), - "region", "iad", - "host", "h2", - "errors", 3))) - .add(ExprValueUtils.tupleValue(ImmutableMap.of( - "day", new ExprDateValue("2021-01-04"), - "region", "iad", - "host", "h1", - "errors", 1))) - .add(ExprValueUtils.tupleValue(ImmutableMap.of( - "day", new ExprDateValue("2021-01-04"), - "region", "iad", - "host", "h2", - "errors", 10))) - .add(ExprValueUtils.tupleValue(ImmutableMap.of( - "day", new ExprDateValue("2021-01-06"), - "region", "iad", - "host", "h1", - "errors", 1))) - .add(ExprValueUtils.tupleValue(ImmutableMap.of( - "day", new ExprDateValue("2021-01-07"), - "region", "iad", - "host", "h1", - "errors", 6))) - .add(ExprValueUtils.tupleValue(ImmutableMap.of( - "day", new ExprDateValue("2021-01-07"), - "region", "iad", - "host", "h2", - "errors", 8))) - .build(); + protected static final List compoundInputs = + new ImmutableList.Builder() + .add( + ExprValueUtils.tupleValue( + ImmutableMap.of( + "day", + new ExprDateValue("2021-01-03"), + "region", + "iad", + "host", + "h1", + "errors", + 2))) + .add( + ExprValueUtils.tupleValue( + ImmutableMap.of( + "day", + new ExprDateValue("2021-01-03"), + "region", + "iad", + "host", + "h2", + "errors", + 3))) + .add( + ExprValueUtils.tupleValue( + ImmutableMap.of( + "day", + new ExprDateValue("2021-01-04"), + "region", + "iad", + "host", + "h1", + "errors", + 1))) + .add( + ExprValueUtils.tupleValue( + ImmutableMap.of( + "day", + new ExprDateValue("2021-01-04"), + "region", + "iad", + "host", + "h2", + "errors", + 10))) + .add( + ExprValueUtils.tupleValue( + ImmutableMap.of( + "day", + new ExprDateValue("2021-01-06"), + "region", + "iad", + "host", + "h1", + "errors", + 1))) + .add( + ExprValueUtils.tupleValue( + ImmutableMap.of( + "day", + new ExprDateValue("2021-01-07"), + "region", + "iad", + "host", + "h1", + "errors", + 6))) + .add( + ExprValueUtils.tupleValue( + ImmutableMap.of( + "day", + new ExprDateValue("2021-01-07"), + "region", + "iad", + "host", + "h2", + "errors", + 8))) + .build(); protected Environment typeEnv() { return var -> { @@ -244,12 +327,10 @@ public ExprValue next() { } @Override - public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { - } + public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {} @Override - public void writeExternal(ObjectOutput out) throws IOException { - } + public void writeExternal(ObjectOutput out) throws IOException {} public boolean equals(final Object o) { return o == this || o.hashCode() == hashCode(); diff --git a/core/src/test/java/org/opensearch/sql/planner/physical/ProjectOperatorTest.java b/core/src/test/java/org/opensearch/sql/planner/physical/ProjectOperatorTest.java index f5ecf76bd0..ded8605cf0 100644 --- a/core/src/test/java/org/opensearch/sql/planner/physical/ProjectOperatorTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/physical/ProjectOperatorTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.physical; import static org.hamcrest.MatcherAssert.assertThat; @@ -64,9 +63,11 @@ public void project_two_field_follow_the_project_order() { when(inputPlan.hasNext()).thenReturn(true, false); when(inputPlan.next()) .thenReturn(ExprValueUtils.tupleValue(ImmutableMap.of("action", "GET", "response", 200))); - PhysicalPlan plan = project(inputPlan, - DSL.named("response", DSL.ref("response", INTEGER)), - DSL.named("action", DSL.ref("action", STRING))); + PhysicalPlan plan = + project( + inputPlan, + DSL.named("response", DSL.ref("response", INTEGER)), + DSL.named("action", DSL.ref("action", STRING))); List result = execute(plan); assertThat( @@ -83,9 +84,11 @@ public void project_keep_missing_value() { when(inputPlan.next()) .thenReturn(ExprValueUtils.tupleValue(ImmutableMap.of("action", "GET", "response", 200))) .thenReturn(ExprValueUtils.tupleValue(ImmutableMap.of("action", "POST"))); - PhysicalPlan plan = project(inputPlan, - DSL.named("response", DSL.ref("response", INTEGER)), - DSL.named("action", DSL.ref("action", STRING))); + PhysicalPlan plan = + project( + inputPlan, + DSL.named("response", DSL.ref("response", INTEGER)), + DSL.named("action", DSL.ref("action", STRING))); List result = execute(plan); assertThat( @@ -94,21 +97,23 @@ public void project_keep_missing_value() { iterableWithSize(2), hasItems( ExprValueUtils.tupleValue(ImmutableMap.of("response", 200, "action", "GET")), - ExprTupleValue.fromExprValueMap(ImmutableMap.of("response", - LITERAL_MISSING, - "action", stringValue("POST")))))); + ExprTupleValue.fromExprValueMap( + ImmutableMap.of("response", LITERAL_MISSING, "action", stringValue("POST")))))); } @Test public void project_schema() { - PhysicalPlan project = project(inputPlan, - DSL.named("response", DSL.ref("response", INTEGER)), - DSL.named("action", DSL.ref("action", STRING), "act")); - - assertThat(project.schema().getColumns(), contains( - new ExecutionEngine.Schema.Column("response", null, INTEGER), - new ExecutionEngine.Schema.Column("action", "act", STRING) - )); + PhysicalPlan project = + project( + inputPlan, + DSL.named("response", DSL.ref("response", INTEGER)), + DSL.named("action", DSL.ref("action", STRING), "act")); + + assertThat( + project.schema().getColumns(), + contains( + new ExecutionEngine.Schema.Column("response", null, INTEGER), + new ExecutionEngine.Schema.Column("action", "act", STRING))); } @Test @@ -117,16 +122,24 @@ public void project_fields_with_parse_expressions() { when(inputPlan.next()) .thenReturn(ExprValueUtils.tupleValue(ImmutableMap.of("response", "GET 200"))); PhysicalPlan plan = - project(inputPlan, ImmutableList.of(DSL.named("action", DSL.ref("action", STRING)), + project( + inputPlan, + ImmutableList.of( + DSL.named("action", DSL.ref("action", STRING)), DSL.named("response", DSL.ref("response", STRING))), - ImmutableList.of(DSL.named("action", - DSL.regex(DSL.ref("response", STRING), - DSL.literal("(?\\w+) (?\\d+)"), - DSL.literal("action"))), DSL.named("response", - DSL.regex(DSL.ref("response", STRING), - DSL.literal("(?\\w+) (?\\d+)"), - DSL.literal("response")))) - ); + ImmutableList.of( + DSL.named( + "action", + DSL.regex( + DSL.ref("response", STRING), + DSL.literal("(?\\w+) (?\\d+)"), + DSL.literal("action"))), + DSL.named( + "response", + DSL.regex( + DSL.ref("response", STRING), + DSL.literal("(?\\w+) (?\\d+)"), + DSL.literal("response"))))); List result = execute(plan); assertThat( @@ -143,20 +156,23 @@ public void project_fields_with_unused_parse_expressions() { when(inputPlan.next()) .thenReturn(ExprValueUtils.tupleValue(ImmutableMap.of("response", "GET 200"))); PhysicalPlan plan = - project(inputPlan, ImmutableList.of(DSL.named("response", DSL.ref("response", STRING))), - ImmutableList.of(DSL.named("ignored", - DSL.regex(DSL.ref("response", STRING), - DSL.literal("(?\\w+) (?\\d+)"), - DSL.literal("ignored")))) - ); + project( + inputPlan, + ImmutableList.of(DSL.named("response", DSL.ref("response", STRING))), + ImmutableList.of( + DSL.named( + "ignored", + DSL.regex( + DSL.ref("response", STRING), + DSL.literal("(?\\w+) (?\\d+)"), + DSL.literal("ignored"))))); List result = execute(plan); assertThat( result, allOf( iterableWithSize(1), - hasItems( - ExprValueUtils.tupleValue(ImmutableMap.of("response", "GET 200"))))); + hasItems(ExprValueUtils.tupleValue(ImmutableMap.of("response", "GET 200"))))); } @Test @@ -166,14 +182,19 @@ public void project_fields_with_parse_expressions_and_runtime_fields() { .thenReturn( ExprValueUtils.tupleValue(ImmutableMap.of("response", "GET 200", "eval_field", 1))); PhysicalPlan plan = - project(inputPlan, ImmutableList.of(DSL.named("response", DSL.ref("response", STRING)), + project( + inputPlan, + ImmutableList.of( + DSL.named("response", DSL.ref("response", STRING)), DSL.named("action", DSL.ref("action", STRING)), DSL.named("eval_field", DSL.ref("eval_field", INTEGER))), - ImmutableList.of(DSL.named("action", - DSL.regex(DSL.ref("response", STRING), - DSL.literal("(?\\w+) (?\\d+)"), - DSL.literal("action")))) - ); + ImmutableList.of( + DSL.named( + "action", + DSL.regex( + DSL.ref("response", STRING), + DSL.literal("(?\\w+) (?\\d+)"), + DSL.literal("action"))))); List result = execute(plan); assertThat( @@ -193,16 +214,24 @@ public void project_parse_missing_will_fallback() { ExprValueUtils.tupleValue(ImmutableMap.of("action", "GET", "response", "GET 200"))) .thenReturn(ExprValueUtils.tupleValue(ImmutableMap.of("action", "POST"))); PhysicalPlan plan = - project(inputPlan, ImmutableList.of(DSL.named("action", DSL.ref("action", STRING)), + project( + inputPlan, + ImmutableList.of( + DSL.named("action", DSL.ref("action", STRING)), DSL.named("response", DSL.ref("response", STRING))), - ImmutableList.of(DSL.named("action", - DSL.regex(DSL.ref("response", STRING), - DSL.literal("(?\\w+) (?\\d+)"), - DSL.literal("action"))), DSL.named("response", - DSL.regex(DSL.ref("response", STRING), - DSL.literal("(?\\w+) (?\\d+)"), - DSL.literal("response")))) - ); + ImmutableList.of( + DSL.named( + "action", + DSL.regex( + DSL.ref("response", STRING), + DSL.literal("(?\\w+) (?\\d+)"), + DSL.literal("action"))), + DSL.named( + "response", + DSL.regex( + DSL.ref("response", STRING), + DSL.literal("(?\\w+) (?\\d+)"), + DSL.literal("response"))))); List result = execute(plan); assertThat( @@ -225,8 +254,8 @@ public void serializable() { objectOutput.writeObject(project); objectOutput.flush(); - ObjectInputStream objectInput = new ObjectInputStream( - new ByteArrayInputStream(output.toByteArray())); + ObjectInputStream objectInput = + new ObjectInputStream(new ByteArrayInputStream(output.toByteArray())); var roundTripPlan = (ProjectOperator) objectInput.readObject(); assertEquals(project, roundTripPlan); } diff --git a/core/src/test/java/org/opensearch/sql/planner/physical/RareTopNOperatorTest.java b/core/src/test/java/org/opensearch/sql/planner/physical/RareTopNOperatorTest.java index 5a77bf7f66..e252c41d1f 100644 --- a/core/src/test/java/org/opensearch/sql/planner/physical/RareTopNOperatorTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/physical/RareTopNOperatorTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.physical; import static org.hamcrest.MatcherAssert.assertThat; @@ -24,74 +23,87 @@ public class RareTopNOperatorTest extends PhysicalPlanTestBase { @Test public void rare_without_group() { - PhysicalPlan plan = new RareTopNOperator(new TestScan(), - CommandType.RARE, - Collections.singletonList(DSL.ref("action", ExprCoreType.STRING)), - Collections.emptyList()); + PhysicalPlan plan = + new RareTopNOperator( + new TestScan(), + CommandType.RARE, + Collections.singletonList(DSL.ref("action", ExprCoreType.STRING)), + Collections.emptyList()); List result = execute(plan); assertEquals(2, result.size()); - assertThat(result, containsInAnyOrder( - ExprValueUtils.tupleValue(ImmutableMap.of("action", "POST")), - ExprValueUtils.tupleValue(ImmutableMap.of("action", "GET")) - )); + assertThat( + result, + containsInAnyOrder( + ExprValueUtils.tupleValue(ImmutableMap.of("action", "POST")), + ExprValueUtils.tupleValue(ImmutableMap.of("action", "GET")))); } @Test public void rare_with_group() { - PhysicalPlan plan = new RareTopNOperator(new TestScan(), - CommandType.RARE, - Collections.singletonList(DSL.ref("response", ExprCoreType.INTEGER)), - Collections.singletonList(DSL.ref("action", ExprCoreType.STRING))); + PhysicalPlan plan = + new RareTopNOperator( + new TestScan(), + CommandType.RARE, + Collections.singletonList(DSL.ref("response", ExprCoreType.INTEGER)), + Collections.singletonList(DSL.ref("action", ExprCoreType.STRING))); List result = execute(plan); assertEquals(4, result.size()); - assertThat(result, containsInAnyOrder( - ExprValueUtils.tupleValue(ImmutableMap.of("action", "POST", "response", 200)), - ExprValueUtils.tupleValue(ImmutableMap.of("action", "POST", "response", 500)), - ExprValueUtils.tupleValue(ImmutableMap.of("action", "GET", "response", 404)), - ExprValueUtils.tupleValue(ImmutableMap.of("action", "GET", "response", 200)) - )); + assertThat( + result, + containsInAnyOrder( + ExprValueUtils.tupleValue(ImmutableMap.of("action", "POST", "response", 200)), + ExprValueUtils.tupleValue(ImmutableMap.of("action", "POST", "response", 500)), + ExprValueUtils.tupleValue(ImmutableMap.of("action", "GET", "response", 404)), + ExprValueUtils.tupleValue(ImmutableMap.of("action", "GET", "response", 200)))); } @Test public void top_without_group() { - PhysicalPlan plan = new RareTopNOperator(new TestScan(), - CommandType.TOP, - Collections.singletonList(DSL.ref("action", ExprCoreType.STRING)), - Collections.emptyList()); + PhysicalPlan plan = + new RareTopNOperator( + new TestScan(), + CommandType.TOP, + Collections.singletonList(DSL.ref("action", ExprCoreType.STRING)), + Collections.emptyList()); List result = execute(plan); assertEquals(2, result.size()); - assertThat(result, containsInAnyOrder( - ExprValueUtils.tupleValue(ImmutableMap.of("action", "GET")), - ExprValueUtils.tupleValue(ImmutableMap.of("action", "POST")) - )); + assertThat( + result, + containsInAnyOrder( + ExprValueUtils.tupleValue(ImmutableMap.of("action", "GET")), + ExprValueUtils.tupleValue(ImmutableMap.of("action", "POST")))); } @Test public void top_n_without_group() { - PhysicalPlan plan = new RareTopNOperator(new TestScan(), - CommandType.TOP, - 1, - Collections.singletonList(DSL.ref("action", ExprCoreType.STRING)), - Collections.emptyList()); + PhysicalPlan plan = + new RareTopNOperator( + new TestScan(), + CommandType.TOP, + 1, + Collections.singletonList(DSL.ref("action", ExprCoreType.STRING)), + Collections.emptyList()); List result = execute(plan); assertEquals(1, result.size()); - assertThat(result, containsInAnyOrder( - ExprValueUtils.tupleValue(ImmutableMap.of("action", "GET")) - )); + assertThat( + result, containsInAnyOrder(ExprValueUtils.tupleValue(ImmutableMap.of("action", "GET")))); } @Test public void top_n_with_group() { - PhysicalPlan plan = new RareTopNOperator(new TestScan(), - CommandType.TOP, - 1, - Collections.singletonList(DSL.ref("response", ExprCoreType.INTEGER)), - Collections.singletonList(DSL.ref("action", ExprCoreType.STRING))); + PhysicalPlan plan = + new RareTopNOperator( + new TestScan(), + CommandType.TOP, + 1, + Collections.singletonList(DSL.ref("response", ExprCoreType.INTEGER)), + Collections.singletonList(DSL.ref("action", ExprCoreType.STRING))); List result = execute(plan); assertEquals(2, result.size()); - assertThat(result, containsInAnyOrder( - ExprValueUtils.tupleValue(ImmutableMap.of("action", "POST", "response", 200)), - ExprValueUtils.tupleValue(ImmutableMap.of("action", "GET", "response", 200)) - )); + assertThat( + result, + containsInAnyOrder( + ExprValueUtils.tupleValue(ImmutableMap.of("action", "POST", "response", 200)), + ExprValueUtils.tupleValue(ImmutableMap.of("action", "GET", "response", 200)))); } } diff --git a/core/src/test/java/org/opensearch/sql/planner/physical/RemoveOperatorTest.java b/core/src/test/java/org/opensearch/sql/planner/physical/RemoveOperatorTest.java index ec950e6016..6ba5d3fba8 100644 --- a/core/src/test/java/org/opensearch/sql/planner/physical/RemoveOperatorTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/physical/RemoveOperatorTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.physical; import static org.hamcrest.MatcherAssert.assertThat; @@ -29,8 +28,7 @@ @ExtendWith(MockitoExtension.class) class RemoveOperatorTest extends PhysicalPlanTestBase { - @Mock - private PhysicalPlan inputPlan; + @Mock private PhysicalPlan inputPlan; @Test public void remove_one_field() { diff --git a/core/src/test/java/org/opensearch/sql/planner/physical/RenameOperatorTest.java b/core/src/test/java/org/opensearch/sql/planner/physical/RenameOperatorTest.java index a0da659a38..807d8b8836 100644 --- a/core/src/test/java/org/opensearch/sql/planner/physical/RenameOperatorTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/physical/RenameOperatorTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.physical; import static org.hamcrest.MatcherAssert.assertThat; @@ -27,34 +26,34 @@ @ExtendWith(MockitoExtension.class) public class RenameOperatorTest extends PhysicalPlanTestBase { - @Mock - private PhysicalPlan inputPlan; + @Mock private PhysicalPlan inputPlan; @Test public void avg_aggregation_rename() { - PhysicalPlan plan = new RenameOperator( - new AggregationOperator(new TestScan(), - Collections - .singletonList(DSL.named("avg(response)", DSL.avg(DSL.ref("response", INTEGER)))), - Collections.singletonList(DSL.named("action", DSL.ref("action", STRING)))), - ImmutableMap.of(DSL.ref("avg(response)", DOUBLE), DSL.ref("avg", DOUBLE)) - ); + PhysicalPlan plan = + new RenameOperator( + new AggregationOperator( + new TestScan(), + Collections.singletonList( + DSL.named("avg(response)", DSL.avg(DSL.ref("response", INTEGER)))), + Collections.singletonList(DSL.named("action", DSL.ref("action", STRING)))), + ImmutableMap.of(DSL.ref("avg(response)", DOUBLE), DSL.ref("avg", DOUBLE))); List result = execute(plan); assertEquals(2, result.size()); - assertThat(result, containsInAnyOrder( - ExprValueUtils.tupleValue(ImmutableMap.of("action", "GET", "avg", 268d)), - ExprValueUtils.tupleValue(ImmutableMap.of("action", "POST", "avg", 350d)) - )); + assertThat( + result, + containsInAnyOrder( + ExprValueUtils.tupleValue(ImmutableMap.of("action", "GET", "avg", 268d)), + ExprValueUtils.tupleValue(ImmutableMap.of("action", "POST", "avg", 350d)))); } @Test public void rename_int_value() { when(inputPlan.hasNext()).thenReturn(true, false); when(inputPlan.next()).thenReturn(ExprValueUtils.integerValue(1)); - PhysicalPlan plan = new RenameOperator( - inputPlan, - ImmutableMap.of(DSL.ref("avg(response)", DOUBLE), DSL.ref("avg", DOUBLE)) - ); + PhysicalPlan plan = + new RenameOperator( + inputPlan, ImmutableMap.of(DSL.ref("avg(response)", DOUBLE), DSL.ref("avg", DOUBLE))); List result = execute(plan); assertEquals(1, result.size()); assertThat(result, containsInAnyOrder(ExprValueUtils.integerValue(1))); diff --git a/core/src/test/java/org/opensearch/sql/planner/physical/SortOperatorTest.java b/core/src/test/java/org/opensearch/sql/planner/physical/SortOperatorTest.java index ef9bdfbca4..f9a44be3ab 100644 --- a/core/src/test/java/org/opensearch/sql/planner/physical/SortOperatorTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/physical/SortOperatorTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.physical; import static org.hamcrest.MatcherAssert.assertThat; @@ -27,8 +26,7 @@ @ExtendWith(MockitoExtension.class) class SortOperatorTest extends PhysicalPlanTestBase { - @Mock - private PhysicalPlan inputPlan; + @Mock private PhysicalPlan inputPlan; /** * construct the map which contain null value, because {@link ImmutableMap} doesn't support null @@ -264,7 +262,6 @@ public void sort_one_field_without_input() { assertEquals( 0, - execute(sort(inputPlan, - Pair.of(SortOption.DEFAULT_ASC, ref("response", INTEGER)))).size()); + execute(sort(inputPlan, Pair.of(SortOption.DEFAULT_ASC, ref("response", INTEGER)))).size()); } } diff --git a/core/src/test/java/org/opensearch/sql/planner/physical/ValuesOperatorTest.java b/core/src/test/java/org/opensearch/sql/planner/physical/ValuesOperatorTest.java index 9acab03d2b..ddf4014b7e 100644 --- a/core/src/test/java/org/opensearch/sql/planner/physical/ValuesOperatorTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/physical/ValuesOperatorTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.physical; import static org.hamcrest.MatcherAssert.assertThat; @@ -26,10 +25,7 @@ class ValuesOperatorTest { @Test public void shouldHaveNoChild() { ValuesOperator values = values(ImmutableList.of(literal(1))); - assertThat( - values.getChild(), - is(empty()) - ); + assertThat(values.getChild(), is(empty())); } @Test @@ -40,10 +36,6 @@ public void iterateSingleRow() { results.add(values.next()); } - assertThat( - results, - contains(collectionValue(Arrays.asList(1, "abc"))) - ); + assertThat(results, contains(collectionValue(Arrays.asList(1, "abc")))); } - } diff --git a/core/src/test/java/org/opensearch/sql/planner/physical/WindowOperatorTest.java b/core/src/test/java/org/opensearch/sql/planner/physical/WindowOperatorTest.java index 7f727d391b..3bd145ff91 100644 --- a/core/src/test/java/org/opensearch/sql/planner/physical/WindowOperatorTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/physical/WindowOperatorTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.physical; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -43,21 +42,56 @@ void test_ranking_window_function() { window(DSL.rank()) .partitionBy(ref("action", STRING)) .sortBy(DEFAULT_ASC, ref("response", INTEGER)) - .expectNext(ImmutableMap.of( - "ip", "209.160.24.63", "action", "GET", "response", 200, "referer", "www.amazon.com", - "rank()", 1)) - .expectNext(ImmutableMap.of( - "ip", "112.111.162.4", "action", "GET", "response", 200, "referer", "www.amazon.com", - "rank()", 1)) - .expectNext(ImmutableMap.of( - "ip", "209.160.24.63", "action", "GET", "response", 404, "referer", "www.amazon.com", - "rank()", 3)) - .expectNext(ImmutableMap.of( - "ip", "74.125.19.106", "action", "POST", "response", 200, "referer", "www.google.com", - "rank()", 1)) - .expectNext(ImmutableMap.of( - "ip", "74.125.19.106", "action", "POST", "response", 500, - "rank()", 2)) + .expectNext( + ImmutableMap.of( + "ip", + "209.160.24.63", + "action", + "GET", + "response", + 200, + "referer", + "www.amazon.com", + "rank()", + 1)) + .expectNext( + ImmutableMap.of( + "ip", + "112.111.162.4", + "action", + "GET", + "response", + 200, + "referer", + "www.amazon.com", + "rank()", + 1)) + .expectNext( + ImmutableMap.of( + "ip", + "209.160.24.63", + "action", + "GET", + "response", + 404, + "referer", + "www.amazon.com", + "rank()", + 3)) + .expectNext( + ImmutableMap.of( + "ip", + "74.125.19.106", + "action", + "POST", + "response", + 200, + "referer", + "www.google.com", + "rank()", + 1)) + .expectNext( + ImmutableMap.of("ip", "74.125.19.106", "action", "POST", "response", 500, "rank()", 2)) .done(); } @@ -67,21 +101,57 @@ void test_aggregate_window_function() { window(new AggregateWindowFunction(DSL.sum(ref("response", INTEGER)))) .partitionBy(ref("action", STRING)) .sortBy(DEFAULT_ASC, ref("response", INTEGER)) - .expectNext(ImmutableMap.of( - "ip", "209.160.24.63", "action", "GET", "response", 200, "referer", "www.amazon.com", - "sum(response)", 400)) - .expectNext(ImmutableMap.of( - "ip", "112.111.162.4", "action", "GET", "response", 200, "referer", "www.amazon.com", - "sum(response)", 400)) - .expectNext(ImmutableMap.of( - "ip", "209.160.24.63", "action", "GET", "response", 404, "referer", "www.amazon.com", - "sum(response)", 804)) - .expectNext(ImmutableMap.of( - "ip", "74.125.19.106", "action", "POST", "response", 200, "referer", "www.google.com", - "sum(response)", 200)) - .expectNext(ImmutableMap.of( - "ip", "74.125.19.106", "action", "POST", "response", 500, - "sum(response)", 700)) + .expectNext( + ImmutableMap.of( + "ip", + "209.160.24.63", + "action", + "GET", + "response", + 200, + "referer", + "www.amazon.com", + "sum(response)", + 400)) + .expectNext( + ImmutableMap.of( + "ip", + "112.111.162.4", + "action", + "GET", + "response", + 200, + "referer", + "www.amazon.com", + "sum(response)", + 400)) + .expectNext( + ImmutableMap.of( + "ip", + "209.160.24.63", + "action", + "GET", + "response", + 404, + "referer", + "www.amazon.com", + "sum(response)", + 804)) + .expectNext( + ImmutableMap.of( + "ip", + "74.125.19.106", + "action", + "POST", + "response", + 200, + "referer", + "www.google.com", + "sum(response)", + 200)) + .expectNext( + ImmutableMap.of( + "ip", "74.125.19.106", "action", "POST", "response", 500, "sum(response)", 700)) .done(); } @@ -89,21 +159,57 @@ void test_aggregate_window_function() { @Test void test_aggregate_window_function_without_sort_key() { window(new AggregateWindowFunction(DSL.sum(ref("response", INTEGER)))) - .expectNext(ImmutableMap.of( - "ip", "209.160.24.63", "action", "GET", "response", 200, "referer", "www.amazon.com", - "sum(response)", 1504)) - .expectNext(ImmutableMap.of( - "ip", "74.125.19.106", "action", "POST", "response", 500, - "sum(response)", 1504)) - .expectNext(ImmutableMap.of( - "ip", "74.125.19.106", "action", "POST", "response", 200, "referer", "www.google.com", - "sum(response)", 1504)) - .expectNext(ImmutableMap.of( - "ip", "112.111.162.4", "action", "GET", "response", 200, "referer", "www.amazon.com", - "sum(response)", 1504)) - .expectNext(ImmutableMap.of( - "ip", "209.160.24.63", "action", "GET", "response", 404, "referer", "www.amazon.com", - "sum(response)", 1504)) + .expectNext( + ImmutableMap.of( + "ip", + "209.160.24.63", + "action", + "GET", + "response", + 200, + "referer", + "www.amazon.com", + "sum(response)", + 1504)) + .expectNext( + ImmutableMap.of( + "ip", "74.125.19.106", "action", "POST", "response", 500, "sum(response)", 1504)) + .expectNext( + ImmutableMap.of( + "ip", + "74.125.19.106", + "action", + "POST", + "response", + 200, + "referer", + "www.google.com", + "sum(response)", + 1504)) + .expectNext( + ImmutableMap.of( + "ip", + "112.111.162.4", + "action", + "GET", + "response", + 200, + "referer", + "www.amazon.com", + "sum(response)", + 1504)) + .expectNext( + ImmutableMap.of( + "ip", + "209.160.24.63", + "action", + "GET", + "response", + 404, + "referer", + "www.amazon.com", + "sum(response)", + 1504)) .done(); } @@ -136,10 +242,11 @@ WindowOperatorAssertion sortBy(SortOption option, Expression expr) { WindowOperatorAssertion expectNext(Map expected) { if (windowOperator == null) { WindowDefinition definition = new WindowDefinition(partitionByList, sortList); - windowOperator = new WindowOperator( - new SortOperator(new TestScan(), definition.getAllSortItems()), - windowFunction, - definition); + windowOperator = + new WindowOperator( + new SortOperator(new TestScan(), definition.getAllSortItems()), + windowFunction, + definition); windowOperator.open(); } @@ -153,7 +260,5 @@ void done() { assertFalse(windowOperator.hasNext()); windowOperator.close(); } - } - } diff --git a/core/src/test/java/org/opensearch/sql/planner/physical/collector/RoundingTest.java b/core/src/test/java/org/opensearch/sql/planner/physical/collector/RoundingTest.java index f40e5c058b..3a2601a874 100644 --- a/core/src/test/java/org/opensearch/sql/planner/physical/collector/RoundingTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/physical/collector/RoundingTest.java @@ -22,8 +22,8 @@ public class RoundingTest { void time_rounding_illegal_span() { SpanExpression span = DSL.span(DSL.ref("time", TIME), DSL.literal(1), "d"); Rounding rounding = Rounding.createRounding(span); - assertThrows(ExpressionEvaluationException.class, - () -> rounding.round(new ExprTimeValue("23:30:00"))); + assertThrows( + ExpressionEvaluationException.class, () -> rounding.round(new ExprTimeValue("23:30:00"))); } @Test @@ -36,7 +36,8 @@ void round_unknown_type() { @Test void resolve() { String illegalUnit = "illegal"; - assertThrows(IllegalArgumentException.class, + assertThrows( + IllegalArgumentException.class, () -> Rounding.DateTimeUnit.resolve(illegalUnit), "Unable to resolve unit " + illegalUnit); } diff --git a/core/src/test/java/org/opensearch/sql/planner/physical/datasource/DataSourceTableScanTest.java b/core/src/test/java/org/opensearch/sql/planner/physical/datasource/DataSourceTableScanTest.java index 93c02def86..28851f2454 100644 --- a/core/src/test/java/org/opensearch/sql/planner/physical/datasource/DataSourceTableScanTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/physical/datasource/DataSourceTableScanTest.java @@ -35,11 +35,9 @@ @ExtendWith(MockitoExtension.class) public class DataSourceTableScanTest { - @Mock - private DataSourceService dataSourceService; + @Mock private DataSourceService dataSourceService; - @Mock - private StorageEngine storageEngine; + @Mock private StorageEngine storageEngine; private DataSourceTableScan dataSourceTableScan; @@ -58,10 +56,16 @@ void testIterator() { Set dataSourceSet = new HashSet<>(); dataSourceSet.add(new DataSource("prometheus", DataSourceType.PROMETHEUS, storageEngine)); dataSourceSet.add(new DataSource("opensearch", DataSourceType.OPENSEARCH, storageEngine)); - Set dataSourceMetadata = dataSourceSet.stream() - .map(dataSource -> new DataSourceMetadata(dataSource.getName(), - dataSource.getConnectorType(), Collections.emptyList(), ImmutableMap.of())) - .collect(Collectors.toSet()); + Set dataSourceMetadata = + dataSourceSet.stream() + .map( + dataSource -> + new DataSourceMetadata( + dataSource.getName(), + dataSource.getConnectorType(), + Collections.emptyList(), + ImmutableMap.of())) + .collect(Collectors.toSet()); when(dataSourceService.getDataSourceMetadata(true)).thenReturn(dataSourceMetadata); assertFalse(dataSourceTableScan.hasNext()); @@ -74,11 +78,14 @@ void testIterator() { Set expectedExprTupleValues = new HashSet<>(); for (DataSource dataSource : dataSourceSet) { - expectedExprTupleValues.add(new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "DATASOURCE_NAME", ExprValueUtils.stringValue(dataSource.getName()), - "CONNECTOR_TYPE", ExprValueUtils.stringValue(dataSource.getConnectorType().name()))))); + expectedExprTupleValues.add( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of( + "DATASOURCE_NAME", ExprValueUtils.stringValue(dataSource.getName()), + "CONNECTOR_TYPE", + ExprValueUtils.stringValue(dataSource.getConnectorType().name()))))); } assertEquals(expectedExprTupleValues, exprTupleValues); } - } diff --git a/core/src/test/java/org/opensearch/sql/planner/physical/datasource/DataSourceTableTest.java b/core/src/test/java/org/opensearch/sql/planner/physical/datasource/DataSourceTableTest.java index c82a042770..75f21c3e52 100644 --- a/core/src/test/java/org/opensearch/sql/planner/physical/datasource/DataSourceTableTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/physical/datasource/DataSourceTableTest.java @@ -27,13 +27,12 @@ @ExtendWith(MockitoExtension.class) public class DataSourceTableTest { - @Mock - private DataSourceService dataSourceService; + @Mock private DataSourceService dataSourceService; @Test void testGetFieldTypes() { DataSourceTable dataSourceTable = new DataSourceTable(dataSourceService); - Map fieldTypes = dataSourceTable.getFieldTypes(); + Map fieldTypes = dataSourceTable.getFieldTypes(); Map expectedTypes = new HashMap<>(); expectedTypes.put("DATASOURCE_NAME", ExprCoreType.STRING); expectedTypes.put("CONNECTOR_TYPE", ExprCoreType.STRING); @@ -43,9 +42,8 @@ void testGetFieldTypes() { @Test void testImplement() { DataSourceTable dataSourceTable = new DataSourceTable(dataSourceService); - PhysicalPlan physicalPlan - = dataSourceTable.implement( - LogicalPlanDSL.relation(DATASOURCES_TABLE_NAME, dataSourceTable)); + PhysicalPlan physicalPlan = + dataSourceTable.implement(LogicalPlanDSL.relation(DATASOURCES_TABLE_NAME, dataSourceTable)); assertTrue(physicalPlan instanceof DataSourceTableScan); } @@ -53,7 +51,8 @@ void testImplement() { @Test void testExist() { UnsupportedOperationException exception = - assertThrows(UnsupportedOperationException.class, + assertThrows( + UnsupportedOperationException.class, () -> new DataSourceTable(dataSourceService).exists()); assertEquals("Unsupported Operation", exception.getMessage()); } @@ -62,7 +61,8 @@ void testExist() { @Test void testCreateTable() { UnsupportedOperationException exception = - assertThrows(UnsupportedOperationException.class, + assertThrows( + UnsupportedOperationException.class, () -> new DataSourceTable(dataSourceService).create(new HashMap<>())); assertEquals("Unsupported Operation", exception.getMessage()); } diff --git a/core/src/test/java/org/opensearch/sql/planner/streaming/windowing/assigner/SlidingWindowAssignerTest.java b/core/src/test/java/org/opensearch/sql/planner/streaming/windowing/assigner/SlidingWindowAssignerTest.java index 886537e088..0c63f594d9 100644 --- a/core/src/test/java/org/opensearch/sql/planner/streaming/windowing/assigner/SlidingWindowAssignerTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/streaming/windowing/assigner/SlidingWindowAssignerTest.java @@ -20,33 +20,21 @@ void testAssignWindows() { long slideSize = 500; SlidingWindowAssigner assigner = new SlidingWindowAssigner(windowSize, slideSize); - assertEquals( - List.of( - new Window(0, 1000), - new Window(500, 1500)), - assigner.assign(500)); - - assertEquals( - List.of( - new Window(0, 1000), - new Window(500, 1500)), - assigner.assign(999)); - - assertEquals( - List.of( - new Window(500, 1500), - new Window(1000, 2000)), - assigner.assign(1000)); + assertEquals(List.of(new Window(0, 1000), new Window(500, 1500)), assigner.assign(500)); + + assertEquals(List.of(new Window(0, 1000), new Window(500, 1500)), assigner.assign(999)); + + assertEquals(List.of(new Window(500, 1500), new Window(1000, 2000)), assigner.assign(1000)); } @Test void testConstructWithIllegalArguments() { - IllegalArgumentException error1 = assertThrows(IllegalArgumentException.class, - () -> new SlidingWindowAssigner(-1, 100)); + IllegalArgumentException error1 = + assertThrows(IllegalArgumentException.class, () -> new SlidingWindowAssigner(-1, 100)); assertEquals("Window size [-1] must be positive number", error1.getMessage()); - IllegalArgumentException error2 = assertThrows(IllegalArgumentException.class, - () -> new SlidingWindowAssigner(1000, 0)); + IllegalArgumentException error2 = + assertThrows(IllegalArgumentException.class, () -> new SlidingWindowAssigner(1000, 0)); assertEquals("Slide size [0] must be positive number", error2.getMessage()); } } diff --git a/core/src/test/java/org/opensearch/sql/planner/streaming/windowing/assigner/TumblingWindowAssignerTest.java b/core/src/test/java/org/opensearch/sql/planner/streaming/windowing/assigner/TumblingWindowAssignerTest.java index 55a8750c11..a8ab048701 100644 --- a/core/src/test/java/org/opensearch/sql/planner/streaming/windowing/assigner/TumblingWindowAssignerTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/streaming/windowing/assigner/TumblingWindowAssignerTest.java @@ -19,21 +19,15 @@ void testAssignWindow() { long windowSize = 1000; TumblingWindowAssigner assigner = new TumblingWindowAssigner(windowSize); - assertEquals( - Collections.singletonList(new Window(0, 1000)), - assigner.assign(500)); - assertEquals( - Collections.singletonList(new Window(1000, 2000)), - assigner.assign(1999)); - assertEquals( - Collections.singletonList(new Window(2000, 3000)), - assigner.assign(2000)); + assertEquals(Collections.singletonList(new Window(0, 1000)), assigner.assign(500)); + assertEquals(Collections.singletonList(new Window(1000, 2000)), assigner.assign(1999)); + assertEquals(Collections.singletonList(new Window(2000, 3000)), assigner.assign(2000)); } @Test void testConstructWithIllegalWindowSize() { - IllegalArgumentException error = assertThrows(IllegalArgumentException.class, - () -> new TumblingWindowAssigner(-1)); + IllegalArgumentException error = + assertThrows(IllegalArgumentException.class, () -> new TumblingWindowAssigner(-1)); assertEquals("Window size [-1] must be positive number", error.getMessage()); } } diff --git a/core/src/test/java/org/opensearch/sql/storage/TableScanOperatorTest.java b/core/src/test/java/org/opensearch/sql/storage/TableScanOperatorTest.java index f2f556a957..81680455cc 100644 --- a/core/src/test/java/org/opensearch/sql/storage/TableScanOperatorTest.java +++ b/core/src/test/java/org/opensearch/sql/storage/TableScanOperatorTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.storage; import static org.junit.jupiter.api.Assertions.assertTrue; @@ -15,36 +14,40 @@ class TableScanOperatorTest { - private final TableScanOperator tableScan = new TableScanOperator() { - @Override - public String explain() { - return "explain"; - } + private final TableScanOperator tableScan = + new TableScanOperator() { + @Override + public String explain() { + return "explain"; + } - @Override - public boolean hasNext() { - return false; - } + @Override + public boolean hasNext() { + return false; + } - @Override - public ExprValue next() { - return null; - } - }; + @Override + public ExprValue next() { + return null; + } + }; @Test public void accept() { - Boolean isVisited = tableScan.accept(new PhysicalPlanNodeVisitor() { - @Override - protected Boolean visitNode(PhysicalPlan node, Object context) { - return (node instanceof TableScanOperator); - } - - @Override - public Boolean visitTableScan(TableScanOperator node, Object context) { - return super.visitTableScan(node, context); - } - }, null); + Boolean isVisited = + tableScan.accept( + new PhysicalPlanNodeVisitor() { + @Override + protected Boolean visitNode(PhysicalPlan node, Object context) { + return (node instanceof TableScanOperator); + } + + @Override + public Boolean visitTableScan(TableScanOperator node, Object context) { + return super.visitTableScan(node, context); + } + }, + null); assertTrue(isVisited); } @@ -53,5 +56,4 @@ public Boolean visitTableScan(TableScanOperator node, Object context) { public void getChild() { assertTrue(tableScan.getChild().isEmpty()); } - } diff --git a/core/src/test/java/org/opensearch/sql/storage/bindingtuple/BindingTupleTest.java b/core/src/test/java/org/opensearch/sql/storage/bindingtuple/BindingTupleTest.java index 780d1cd2dd..89d2372707 100644 --- a/core/src/test/java/org/opensearch/sql/storage/bindingtuple/BindingTupleTest.java +++ b/core/src/test/java/org/opensearch/sql/storage/bindingtuple/BindingTupleTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.storage.bindingtuple; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -21,30 +20,31 @@ class BindingTupleTest { public void resolve_ref_expression() { BindingTuple bindingTuple = ExprValueUtils.tupleValue(ImmutableMap.of("ip", "209.160.24.63")).bindingTuples(); - assertEquals(ExprValueUtils.stringValue("209.160.24.63"), - bindingTuple.resolve(DSL.ref("ip", STRING))); + assertEquals( + ExprValueUtils.stringValue("209.160.24.63"), bindingTuple.resolve(DSL.ref("ip", STRING))); } @Test public void resolve_missing_expression() { BindingTuple bindingTuple = ExprValueUtils.tupleValue(ImmutableMap.of("ip", "209.160.24.63")).bindingTuples(); - assertEquals(ExprValueUtils.LITERAL_MISSING, - bindingTuple.resolve(DSL.ref("ip_missing", STRING))); + assertEquals( + ExprValueUtils.LITERAL_MISSING, bindingTuple.resolve(DSL.ref("ip_missing", STRING))); } @Test public void resolve_from_empty_tuple() { - assertEquals(ExprValueUtils.LITERAL_MISSING, - BindingTuple.EMPTY.resolve(DSL.ref("ip_missing", STRING))); + assertEquals( + ExprValueUtils.LITERAL_MISSING, BindingTuple.EMPTY.resolve(DSL.ref("ip_missing", STRING))); } @Test public void resolve_literal_expression_throw_exception() { BindingTuple bindingTuple = ExprValueUtils.tupleValue(ImmutableMap.of("ip", "209.160.24.63")).bindingTuples(); - ExpressionEvaluationException exception = assertThrows(ExpressionEvaluationException.class, - () -> bindingTuple.resolve(DSL.literal(1))); + ExpressionEvaluationException exception = + assertThrows( + ExpressionEvaluationException.class, () -> bindingTuple.resolve(DSL.literal(1))); assertEquals("can resolve expression: 1", exception.getMessage()); } } diff --git a/core/src/test/java/org/opensearch/sql/storage/write/TableWriteOperatorTest.java b/core/src/test/java/org/opensearch/sql/storage/write/TableWriteOperatorTest.java index 112192bde0..e5b2c9f61a 100644 --- a/core/src/test/java/org/opensearch/sql/storage/write/TableWriteOperatorTest.java +++ b/core/src/test/java/org/opensearch/sql/storage/write/TableWriteOperatorTest.java @@ -21,44 +21,47 @@ @ExtendWith(MockitoExtension.class) class TableWriteOperatorTest { - @Mock - private PhysicalPlan child; + @Mock private PhysicalPlan child; private TableWriteOperator tableWrite; @BeforeEach void setUp() { - tableWrite = new TableWriteOperator(child) { - @Override - public String explain() { - return "explain"; - } + tableWrite = + new TableWriteOperator(child) { + @Override + public String explain() { + return "explain"; + } - @Override - public boolean hasNext() { - return false; - } + @Override + public boolean hasNext() { + return false; + } - @Override - public ExprValue next() { - return null; - } - }; + @Override + public ExprValue next() { + return null; + } + }; } @Test void testAccept() { - Boolean isVisited = tableWrite.accept(new PhysicalPlanNodeVisitor<>() { - @Override - protected Boolean visitNode(PhysicalPlan node, Object context) { - return (node instanceof TableWriteOperator); - } + Boolean isVisited = + tableWrite.accept( + new PhysicalPlanNodeVisitor<>() { + @Override + protected Boolean visitNode(PhysicalPlan node, Object context) { + return (node instanceof TableWriteOperator); + } - @Override - public Boolean visitTableWrite(TableWriteOperator node, Object context) { - return super.visitTableWrite(node, context); - } - }, null); + @Override + public Boolean visitTableWrite(TableWriteOperator node, Object context) { + return super.visitTableWrite(node, context); + } + }, + null); assertTrue(isVisited); } diff --git a/core/src/test/java/org/opensearch/sql/utils/ComparisonUtil.java b/core/src/test/java/org/opensearch/sql/utils/ComparisonUtil.java index d6f0ef02cd..b25f4d1053 100644 --- a/core/src/test/java/org/opensearch/sql/utils/ComparisonUtil.java +++ b/core/src/test/java/org/opensearch/sql/utils/ComparisonUtil.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.utils; import static org.opensearch.sql.data.model.ExprValueUtils.getDoubleValue; @@ -21,8 +20,8 @@ public class ComparisonUtil { /** - * Util to compare the object (integer, long, float, double, string) values. - * Allows comparing different datetime types and requires `FunctionProperties` object for that. + * Util to compare the object (integer, long, float, double, string) values. Allows comparing + * different datetime types and requires `FunctionProperties` object for that. */ public static int compare(FunctionProperties functionProperties, ExprValue v1, ExprValue v2) { if (v1.isMissing() || v2.isMissing()) { @@ -36,9 +35,7 @@ public static int compare(FunctionProperties functionProperties, ExprValue v1, E return compare(v1, v2); } - /** - * Util to compare the object (integer, long, float, double, string) values. - */ + /** Util to compare the object (integer, long, float, double, string) values. */ public static int compare(ExprValue v1, ExprValue v2) { if (v1.isMissing() || v2.isMissing()) { throw new ExpressionEvaluationException("invalid to call compare operation on missing value"); @@ -49,21 +46,34 @@ public static int compare(ExprValue v1, ExprValue v2) { "invalid to call compare operation on values of different types"); } - switch ((ExprCoreType)v1.type()) { - case BYTE: return v1.byteValue().compareTo(v2.byteValue()); - case SHORT: return v1.shortValue().compareTo(v2.shortValue()); - case INTEGER: return getIntegerValue(v1).compareTo(getIntegerValue(v2)); - case LONG: return getLongValue(v1).compareTo(getLongValue(v2)); - case FLOAT: return getFloatValue(v1).compareTo(getFloatValue(v2)); - case DOUBLE: return getDoubleValue(v1).compareTo(getDoubleValue(v2)); - case STRING: return getStringValue(v1).compareTo(getStringValue(v2)); - case BOOLEAN: return v1.booleanValue().compareTo(v2.booleanValue()); - case TIME: return v1.timeValue().compareTo(v2.timeValue()); - case DATE: return v1.dateValue().compareTo(v2.dateValue()); - case DATETIME: return v1.datetimeValue().compareTo(v2.datetimeValue()); - case TIMESTAMP: return v1.timestampValue().compareTo(v2.timestampValue()); - default: throw new ExpressionEvaluationException( - String.format("%s instances are not comparable", v1.getClass().getSimpleName())); + switch ((ExprCoreType) v1.type()) { + case BYTE: + return v1.byteValue().compareTo(v2.byteValue()); + case SHORT: + return v1.shortValue().compareTo(v2.shortValue()); + case INTEGER: + return getIntegerValue(v1).compareTo(getIntegerValue(v2)); + case LONG: + return getLongValue(v1).compareTo(getLongValue(v2)); + case FLOAT: + return getFloatValue(v1).compareTo(getFloatValue(v2)); + case DOUBLE: + return getDoubleValue(v1).compareTo(getDoubleValue(v2)); + case STRING: + return getStringValue(v1).compareTo(getStringValue(v2)); + case BOOLEAN: + return v1.booleanValue().compareTo(v2.booleanValue()); + case TIME: + return v1.timeValue().compareTo(v2.timeValue()); + case DATE: + return v1.dateValue().compareTo(v2.dateValue()); + case DATETIME: + return v1.datetimeValue().compareTo(v2.datetimeValue()); + case TIMESTAMP: + return v1.timestampValue().compareTo(v2.timestampValue()); + default: + throw new ExpressionEvaluationException( + String.format("%s instances are not comparable", v1.getClass().getSimpleName())); } } } diff --git a/core/src/test/java/org/opensearch/sql/utils/DateTimeUtilsTest.java b/core/src/test/java/org/opensearch/sql/utils/DateTimeUtilsTest.java index 9bf0a53ff6..989759681f 100644 --- a/core/src/test/java/org/opensearch/sql/utils/DateTimeUtilsTest.java +++ b/core/src/test/java/org/opensearch/sql/utils/DateTimeUtilsTest.java @@ -16,11 +16,16 @@ public class DateTimeUtilsTest { @Test void round() { - long actual = LocalDateTime.parse("2021-09-28T23:40:00").atZone(ZoneId.systemDefault()) - .toInstant().toEpochMilli(); + long actual = + LocalDateTime.parse("2021-09-28T23:40:00") + .atZone(ZoneId.systemDefault()) + .toInstant() + .toEpochMilli(); long rounded = DateTimeUtils.roundFloor(actual, TimeUnit.HOURS.toMillis(1)); assertEquals( - LocalDateTime.parse("2021-09-28T23:00:00").atZone(ZoneId.systemDefault()).toInstant() + LocalDateTime.parse("2021-09-28T23:00:00") + .atZone(ZoneId.systemDefault()) + .toInstant() .toEpochMilli(), Instant.ofEpochMilli(rounded).toEpochMilli()); } diff --git a/core/src/test/java/org/opensearch/sql/utils/MatcherUtils.java b/core/src/test/java/org/opensearch/sql/utils/MatcherUtils.java index 8b1a3dda2e..206f05a38a 100644 --- a/core/src/test/java/org/opensearch/sql/utils/MatcherUtils.java +++ b/core/src/test/java/org/opensearch/sql/utils/MatcherUtils.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.utils; import org.hamcrest.Description; @@ -11,13 +10,9 @@ import org.opensearch.sql.data.model.ExprValue; import org.opensearch.sql.data.type.ExprCoreType; -/** - * Matcher Utils. - */ +/** Matcher Utils. */ public class MatcherUtils { - /** - * Check {@link ExprValue} type equal to {@link ExprCoreType}. - */ + /** Check {@link ExprValue} type equal to {@link ExprCoreType}. */ public static TypeSafeMatcher hasType(ExprCoreType type) { return new TypeSafeMatcher() { @Override @@ -32,9 +27,7 @@ protected boolean matchesSafely(ExprValue value) { }; } - /** - * Check {@link ExprValue} value equal to {@link Object}. - */ + /** Check {@link ExprValue} value equal to {@link Object}. */ public static TypeSafeMatcher hasValue(Object object) { return new TypeSafeMatcher() { @Override diff --git a/core/src/test/java/org/opensearch/sql/utils/SystemIndexUtilsTest.java b/core/src/test/java/org/opensearch/sql/utils/SystemIndexUtilsTest.java index 81d28f40db..903077ca29 100644 --- a/core/src/test/java/org/opensearch/sql/utils/SystemIndexUtilsTest.java +++ b/core/src/test/java/org/opensearch/sql/utils/SystemIndexUtilsTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.utils; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -49,8 +48,8 @@ void test_mapping_info_table() { @Test void test_mapping_info_table_with_special_index_name() { - final SystemIndexUtils.SystemTable table - = systemTable("logs-2021.01.11.MAPPINGS_ODFE_SYS_TABLE"); + final SystemIndexUtils.SystemTable table = + systemTable("logs-2021.01.11.MAPPINGS_ODFE_SYS_TABLE"); assertTrue(table.isMetaInfoTable()); assertFalse(table.isSystemInfoTable()); assertEquals("logs-2021.01.11", table.getTableName()); diff --git a/core/src/test/java/org/opensearch/sql/utils/TestOperator.java b/core/src/test/java/org/opensearch/sql/utils/TestOperator.java index 584cf6f3fd..d356f2f866 100644 --- a/core/src/test/java/org/opensearch/sql/utils/TestOperator.java +++ b/core/src/test/java/org/opensearch/sql/utils/TestOperator.java @@ -18,13 +18,10 @@ public class TestOperator extends PhysicalPlan implements SerializablePlan { private int field; - @Setter - private boolean throwNoCursorOnWrite = false; - @Setter - private boolean throwIoOnWrite = false; + @Setter private boolean throwNoCursorOnWrite = false; + @Setter private boolean throwIoOnWrite = false; - public TestOperator() { - } + public TestOperator() {} public TestOperator(int value) { field = value; diff --git a/core/src/testFixtures/java/org/opensearch/sql/executor/DefaultExecutionEngine.java b/core/src/testFixtures/java/org/opensearch/sql/executor/DefaultExecutionEngine.java index db72498a1d..61219d4637 100644 --- a/core/src/testFixtures/java/org/opensearch/sql/executor/DefaultExecutionEngine.java +++ b/core/src/testFixtures/java/org/opensearch/sql/executor/DefaultExecutionEngine.java @@ -12,9 +12,7 @@ import org.opensearch.sql.executor.pagination.Cursor; import org.opensearch.sql.planner.physical.PhysicalPlan; -/** - * Used for testing purpose. - */ +/** Used for testing purpose. */ public class DefaultExecutionEngine implements ExecutionEngine { @Override public void execute(PhysicalPlan plan, ResponseListener listener) { @@ -33,8 +31,8 @@ public void execute( while (plan.hasNext()) { result.add(plan.next()); } - QueryResponse response = new QueryResponse(new Schema(new ArrayList<>()), new ArrayList<>(), - Cursor.None); + QueryResponse response = + new QueryResponse(new Schema(new ArrayList<>()), new ArrayList<>(), Cursor.None); listener.onResponse(response); } catch (Exception e) { listener.onFailure(e); diff --git a/core/src/testFixtures/java/org/opensearch/sql/executor/DefaultQueryManager.java b/core/src/testFixtures/java/org/opensearch/sql/executor/DefaultQueryManager.java index 41888a0581..3167d4843a 100644 --- a/core/src/testFixtures/java/org/opensearch/sql/executor/DefaultQueryManager.java +++ b/core/src/testFixtures/java/org/opensearch/sql/executor/DefaultQueryManager.java @@ -13,9 +13,7 @@ import java.util.concurrent.TimeUnit; import org.opensearch.sql.executor.execution.AbstractPlan; -/** - * Default QueryManager implementation which execute {@link AbstractPlan} on caller thread. - */ +/** Default QueryManager implementation which execute {@link AbstractPlan} on caller thread. */ public class DefaultQueryManager implements QueryManager { private final ExecutorService executorService; From 6d39aaf8d9df89eaff9857fe83f862214d628bd7 Mon Sep 17 00:00:00 2001 From: Mitchell Gale Date: Thu, 10 Aug 2023 12:23:14 -0700 Subject: [PATCH 14/42] [Spotless] Applying Google Code Format for datasource files #6 (#1939) * Spotless apply for datasources. Signed-off-by: Mitchell Gale * ignore checkstyle for datasources Signed-off-by: Mitchell Gale --------- Signed-off-by: Mitchell Gale Signed-off-by: Mitchell Gale --- build.gradle | 3 +- datasources/build.gradle | 3 + .../datasources/auth/AuthenticationType.java | 4 +- .../DataSourceUserAuthorizationHelper.java | 5 +- ...DataSourceUserAuthorizationHelperImpl.java | 23 +- .../sql/datasources/encryptor/Encryptor.java | 1 - .../datasources/encryptor/EncryptorImpl.java | 42 +-- .../DataSourceNotFoundException.java | 5 +- .../datasources/exceptions/ErrorMessage.java | 18 +- .../CreateDataSourceActionRequest.java | 16 +- .../CreateDataSourceActionResponse.java | 6 +- .../DeleteDataSourceActionRequest.java | 12 +- .../DeleteDataSourceActionResponse.java | 4 +- .../transport/GetDataSourceActionRequest.java | 13 +- .../GetDataSourceActionResponse.java | 4 +- .../UpdateDataSourceActionRequest.java | 12 +- .../UpdateDataSourceActionResponse.java | 6 +- .../rest/RestDataSourceQueryAction.java | 206 ++++++++------ .../service/DataSourceLoaderCache.java | 5 +- .../service/DataSourceLoaderCacheImpl.java | 25 +- .../service/DataSourceMetadataStorage.java | 19 +- .../service/DataSourceServiceImpl.java | 41 ++- .../OpenSearchDataSourceMetadataStorage.java | 142 +++++----- .../TransportCreateDataSourceAction.java | 32 ++- .../TransportDeleteDataSourceAction.java | 32 ++- .../TransportGetDataSourceAction.java | 61 ++-- .../TransportUpdateDataSourceAction.java | 28 +- .../utils/XContentParserUtils.java | 16 +- .../auth/AuthenticationTypeTest.java | 1 - ...SourceUserAuthorizationHelperImplTest.java | 60 ++-- .../encryptor/EncryptorImplTest.java | 66 +++-- .../DataSourceLoaderCacheImplTest.java | 11 +- .../service/DataSourceServiceImplTest.java | 235 +++++++-------- ...enSearchDataSourceMetadataStorageTest.java | 268 ++++++++---------- .../TransportCreateDataSourceActionTest.java | 38 ++- .../TransportDeleteDataSourceActionTest.java | 34 +-- .../TransportGetDataSourceActionTest.java | 49 ++-- .../TransportUpdateDataSourceActionTest.java | 37 ++- .../sql/datasources/utils/SchedulerTest.java | 17 +- .../utils/XContentParserUtilsTest.java | 32 ++- 40 files changed, 813 insertions(+), 819 deletions(-) diff --git a/build.gradle b/build.gradle index 0c58fccfeb..f623b5da4e 100644 --- a/build.gradle +++ b/build.gradle @@ -84,7 +84,8 @@ repositories { spotless { java { target fileTree('.') { - include 'core/**/*.java' + include 'datasources/**/*.java', + 'core/**/*.java' exclude '**/build/**', '**/build-*/**' } importOrder() diff --git a/datasources/build.gradle b/datasources/build.gradle index ef52db2305..830fadbc35 100644 --- a/datasources/build.gradle +++ b/datasources/build.gradle @@ -31,6 +31,9 @@ dependencies { testImplementation 'org.junit.jupiter:junit-jupiter:5.6.2' } +checkstyleTest.ignoreFailures = true +checkstyleMain.ignoreFailures = true + test { useJUnitPlatform() testLogging { diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/auth/AuthenticationType.java b/datasources/src/main/java/org/opensearch/sql/datasources/auth/AuthenticationType.java index 715e72c0c3..b6581608bf 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/auth/AuthenticationType.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/auth/AuthenticationType.java @@ -12,8 +12,8 @@ import java.util.Map; public enum AuthenticationType { - - BASICAUTH("basicauth"), AWSSIGV4AUTH("awssigv4"); + BASICAUTH("basicauth"), + AWSSIGV4AUTH("awssigv4"); private String name; diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/auth/DataSourceUserAuthorizationHelper.java b/datasources/src/main/java/org/opensearch/sql/datasources/auth/DataSourceUserAuthorizationHelper.java index adcfb0bdfd..75d0ec8539 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/auth/DataSourceUserAuthorizationHelper.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/auth/DataSourceUserAuthorizationHelper.java @@ -8,9 +8,8 @@ import org.opensearch.sql.datasource.model.DataSourceMetadata; /** - * Interface for datasource authorization helper. - * The implementation of this class helps in determining - * if authorization is required and the roles associated with the user. + * Interface for datasource authorization helper. The implementation of this class helps in + * determining if authorization is required and the roles associated with the user. */ public interface DataSourceUserAuthorizationHelper { diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/auth/DataSourceUserAuthorizationHelperImpl.java b/datasources/src/main/java/org/opensearch/sql/datasources/auth/DataSourceUserAuthorizationHelperImpl.java index cd55991d00..67d747f0bf 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/auth/DataSourceUserAuthorizationHelperImpl.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/auth/DataSourceUserAuthorizationHelperImpl.java @@ -19,36 +19,39 @@ public class DataSourceUserAuthorizationHelperImpl implements DataSourceUserAuth private final Client client; private Boolean isAuthorizationRequired() { - String userString = client.threadPool() - .getThreadContext().getTransient( - ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT); + String userString = + client + .threadPool() + .getThreadContext() + .getTransient(ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT); return userString != null; } private List getUserRoles() { - String userString = client.threadPool() - .getThreadContext().getTransient( - ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT); + String userString = + client + .threadPool() + .getThreadContext() + .getTransient(ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT); User user = User.parse(userString); return user.getRoles(); } - @Override public void authorizeDataSource(DataSourceMetadata dataSourceMetadata) { if (isAuthorizationRequired() && !dataSourceMetadata.getName().equals(DEFAULT_DATASOURCE_NAME)) { boolean isAuthorized = false; for (String role : getUserRoles()) { - if (dataSourceMetadata.getAllowedRoles().contains(role) - || role.equals("all_access")) { + if (dataSourceMetadata.getAllowedRoles().contains(role) || role.equals("all_access")) { isAuthorized = true; break; } } if (!isAuthorized) { throw new SecurityException( - String.format("User is not authorized to access datasource %s. " + String.format( + "User is not authorized to access datasource %s. " + "User should be mapped to any of the roles in %s for access.", dataSourceMetadata.getName(), dataSourceMetadata.getAllowedRoles().toString())); } diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/encryptor/Encryptor.java b/datasources/src/main/java/org/opensearch/sql/datasources/encryptor/Encryptor.java index 578b66d0ba..4572b45f53 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/encryptor/Encryptor.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/encryptor/Encryptor.java @@ -24,5 +24,4 @@ public interface Encryptor { * @return String plainText. */ String decrypt(String encryptedText); - } diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/encryptor/EncryptorImpl.java b/datasources/src/main/java/org/opensearch/sql/datasources/encryptor/EncryptorImpl.java index 98f693eca1..c6abe78394 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/encryptor/EncryptorImpl.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/encryptor/EncryptorImpl.java @@ -25,32 +25,40 @@ public class EncryptorImpl implements Encryptor { @Override public String encrypt(String plainText) { validate(masterKey); - final AwsCrypto crypto = AwsCrypto.builder() - .withCommitmentPolicy(CommitmentPolicy.RequireEncryptRequireDecrypt) - .build(); + final AwsCrypto crypto = + AwsCrypto.builder() + .withCommitmentPolicy(CommitmentPolicy.RequireEncryptRequireDecrypt) + .build(); - JceMasterKey jceMasterKey - = JceMasterKey.getInstance(new SecretKeySpec(masterKey.getBytes(), "AES"), "Custom", - "opensearch.config.master.key", "AES/GCM/NoPadding"); + JceMasterKey jceMasterKey = + JceMasterKey.getInstance( + new SecretKeySpec(masterKey.getBytes(), "AES"), + "Custom", + "opensearch.config.master.key", + "AES/GCM/NoPadding"); - final CryptoResult encryptResult = crypto.encryptData(jceMasterKey, - plainText.getBytes(StandardCharsets.UTF_8)); + final CryptoResult encryptResult = + crypto.encryptData(jceMasterKey, plainText.getBytes(StandardCharsets.UTF_8)); return Base64.getEncoder().encodeToString(encryptResult.getResult()); } @Override public String decrypt(String encryptedText) { validate(masterKey); - final AwsCrypto crypto = AwsCrypto.builder() - .withCommitmentPolicy(CommitmentPolicy.RequireEncryptRequireDecrypt) - .build(); + final AwsCrypto crypto = + AwsCrypto.builder() + .withCommitmentPolicy(CommitmentPolicy.RequireEncryptRequireDecrypt) + .build(); - JceMasterKey jceMasterKey - = JceMasterKey.getInstance(new SecretKeySpec(masterKey.getBytes(), "AES"), "Custom", - "opensearch.config.master.key", "AES/GCM/NoPadding"); + JceMasterKey jceMasterKey = + JceMasterKey.getInstance( + new SecretKeySpec(masterKey.getBytes(), "AES"), + "Custom", + "opensearch.config.master.key", + "AES/GCM/NoPadding"); - final CryptoResult decryptedResult - = crypto.decryptData(jceMasterKey, Base64.getDecoder().decode(encryptedText)); + final CryptoResult decryptedResult = + crypto.decryptData(jceMasterKey, Base64.getDecoder().decode(encryptedText)); return new String(decryptedResult.getResult()); } @@ -65,6 +73,4 @@ private void validate(String masterKey) { + "admin/datasources.rst#master-key-config-for-encrypting-credential-information"); } } - - } diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/exceptions/DataSourceNotFoundException.java b/datasources/src/main/java/org/opensearch/sql/datasources/exceptions/DataSourceNotFoundException.java index 484b0b92b2..40b601000c 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/exceptions/DataSourceNotFoundException.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/exceptions/DataSourceNotFoundException.java @@ -7,12 +7,9 @@ package org.opensearch.sql.datasources.exceptions; -/** - * DataSourceNotFoundException. - */ +/** DataSourceNotFoundException. */ public class DataSourceNotFoundException extends RuntimeException { public DataSourceNotFoundException(String message) { super(message); } - } diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/exceptions/ErrorMessage.java b/datasources/src/main/java/org/opensearch/sql/datasources/exceptions/ErrorMessage.java index 265b3ddf31..386eb780cd 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/exceptions/ErrorMessage.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/exceptions/ErrorMessage.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.datasources.exceptions; import com.google.gson.Gson; @@ -12,27 +11,20 @@ import lombok.Getter; import org.opensearch.core.rest.RestStatus; -/** - * Error Message. - */ +/** Error Message. */ public class ErrorMessage { protected Throwable exception; private final int status; - @Getter - private final String type; + @Getter private final String type; - @Getter - private final String reason; + @Getter private final String reason; - @Getter - private final String details; + @Getter private final String details; - /** - * Error Message Constructor. - */ + /** Error Message Constructor. */ public ErrorMessage(Throwable exception, int status) { this.exception = exception; this.status = status; diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/CreateDataSourceActionRequest.java b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/CreateDataSourceActionRequest.java index 0cbb2355ca..b01d5b40dd 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/CreateDataSourceActionRequest.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/CreateDataSourceActionRequest.java @@ -7,7 +7,6 @@ package org.opensearch.sql.datasources.model.transport; - import static org.opensearch.sql.analysis.DataSourceSchemaIdentifierNameResolver.DEFAULT_DATASOURCE_NAME; import java.io.IOException; @@ -17,15 +16,11 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.sql.datasource.model.DataSourceMetadata; -public class CreateDataSourceActionRequest - extends ActionRequest { +public class CreateDataSourceActionRequest extends ActionRequest { - @Getter - private DataSourceMetadata dataSourceMetadata; + @Getter private DataSourceMetadata dataSourceMetadata; - /** - * Constructor of CreateDataSourceActionRequest from StreamInput. - */ + /** Constructor of CreateDataSourceActionRequest from StreamInput. */ public CreateDataSourceActionRequest(StreamInput in) throws IOException { super(in); } @@ -38,9 +33,8 @@ public CreateDataSourceActionRequest(DataSourceMetadata dataSourceMetadata) { public ActionRequestValidationException validate() { if (this.dataSourceMetadata.getName().equals(DEFAULT_DATASOURCE_NAME)) { ActionRequestValidationException exception = new ActionRequestValidationException(); - exception - .addValidationError( - "Not allowed to create datasource with name : " + DEFAULT_DATASOURCE_NAME); + exception.addValidationError( + "Not allowed to create datasource with name : " + DEFAULT_DATASOURCE_NAME); return exception; } else { return null; diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/CreateDataSourceActionResponse.java b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/CreateDataSourceActionResponse.java index 377a249a44..a3cf6001ab 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/CreateDataSourceActionResponse.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/CreateDataSourceActionResponse.java @@ -15,11 +15,9 @@ import org.opensearch.core.common.io.stream.StreamOutput; @RequiredArgsConstructor -public class CreateDataSourceActionResponse - extends ActionResponse { +public class CreateDataSourceActionResponse extends ActionResponse { - @Getter - private final String result; + @Getter private final String result; public CreateDataSourceActionResponse(StreamInput in) throws IOException { super(in); diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/DeleteDataSourceActionRequest.java b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/DeleteDataSourceActionRequest.java index 1eb2d17bff..d6e3bcb3f9 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/DeleteDataSourceActionRequest.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/DeleteDataSourceActionRequest.java @@ -18,8 +18,7 @@ public class DeleteDataSourceActionRequest extends ActionRequest { - @Getter - private String dataSourceName; + @Getter private String dataSourceName; /** Constructor of DeleteDataSourceActionRequest from StreamInput. */ public DeleteDataSourceActionRequest(StreamInput in) throws IOException { @@ -34,18 +33,15 @@ public DeleteDataSourceActionRequest(String dataSourceName) { public ActionRequestValidationException validate() { if (StringUtils.isEmpty(this.dataSourceName)) { ActionRequestValidationException exception = new ActionRequestValidationException(); - exception - .addValidationError("Datasource Name cannot be empty or null"); + exception.addValidationError("Datasource Name cannot be empty or null"); return exception; } else if (this.dataSourceName.equals(DEFAULT_DATASOURCE_NAME)) { ActionRequestValidationException exception = new ActionRequestValidationException(); - exception - .addValidationError( - "Not allowed to delete datasource with name : " + DEFAULT_DATASOURCE_NAME); + exception.addValidationError( + "Not allowed to delete datasource with name : " + DEFAULT_DATASOURCE_NAME); return exception; } else { return null; } } - } diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/DeleteDataSourceActionResponse.java b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/DeleteDataSourceActionResponse.java index 4bb6e290c5..0f0c2e0fc7 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/DeleteDataSourceActionResponse.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/DeleteDataSourceActionResponse.java @@ -17,8 +17,7 @@ @RequiredArgsConstructor public class DeleteDataSourceActionResponse extends ActionResponse { - @Getter - private final String result; + @Getter private final String result; public DeleteDataSourceActionResponse(StreamInput in) throws IOException { super(in); @@ -29,5 +28,4 @@ public DeleteDataSourceActionResponse(StreamInput in) throws IOException { public void writeTo(StreamOutput streamOutput) throws IOException { streamOutput.writeString(result); } - } diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/GetDataSourceActionRequest.java b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/GetDataSourceActionRequest.java index 23f4898543..2d9a4de35a 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/GetDataSourceActionRequest.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/GetDataSourceActionRequest.java @@ -19,12 +19,9 @@ @NoArgsConstructor public class GetDataSourceActionRequest extends ActionRequest { - @Getter - private String dataSourceName; + @Getter private String dataSourceName; - /** - * Constructor of GetDataSourceActionRequest from StreamInput. - */ + /** Constructor of GetDataSourceActionRequest from StreamInput. */ public GetDataSourceActionRequest(StreamInput in) throws IOException { super(in); } @@ -37,13 +34,11 @@ public GetDataSourceActionRequest(String dataSourceName) { public ActionRequestValidationException validate() { if (this.dataSourceName != null && this.dataSourceName.equals(DEFAULT_DATASOURCE_NAME)) { ActionRequestValidationException exception = new ActionRequestValidationException(); - exception - .addValidationError( - "Not allowed to fetch datasource with name : " + DEFAULT_DATASOURCE_NAME); + exception.addValidationError( + "Not allowed to fetch datasource with name : " + DEFAULT_DATASOURCE_NAME); return exception; } else { return null; } } - } diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/GetDataSourceActionResponse.java b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/GetDataSourceActionResponse.java index 964e5989f8..2712f515c0 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/GetDataSourceActionResponse.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/GetDataSourceActionResponse.java @@ -17,8 +17,7 @@ @RequiredArgsConstructor public class GetDataSourceActionResponse extends ActionResponse { - @Getter - private final String result; + @Getter private final String result; public GetDataSourceActionResponse(StreamInput in) throws IOException { super(in); @@ -29,5 +28,4 @@ public GetDataSourceActionResponse(StreamInput in) throws IOException { public void writeTo(StreamOutput streamOutput) throws IOException { streamOutput.writeString(result); } - } diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/UpdateDataSourceActionRequest.java b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/UpdateDataSourceActionRequest.java index 11bc2d1e20..b502f348e2 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/UpdateDataSourceActionRequest.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/UpdateDataSourceActionRequest.java @@ -7,7 +7,6 @@ package org.opensearch.sql.datasources.model.transport; - import static org.opensearch.sql.analysis.DataSourceSchemaIdentifierNameResolver.DEFAULT_DATASOURCE_NAME; import java.io.IOException; @@ -17,11 +16,9 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.sql.datasource.model.DataSourceMetadata; -public class UpdateDataSourceActionRequest - extends ActionRequest { +public class UpdateDataSourceActionRequest extends ActionRequest { - @Getter - private DataSourceMetadata dataSourceMetadata; + @Getter private DataSourceMetadata dataSourceMetadata; /** Constructor of UpdateDataSourceActionRequest from StreamInput. */ public UpdateDataSourceActionRequest(StreamInput in) throws IOException { @@ -36,9 +33,8 @@ public UpdateDataSourceActionRequest(DataSourceMetadata dataSourceMetadata) { public ActionRequestValidationException validate() { if (this.dataSourceMetadata.getName().equals(DEFAULT_DATASOURCE_NAME)) { ActionRequestValidationException exception = new ActionRequestValidationException(); - exception - .addValidationError( - "Not allowed to update datasource with name : " + DEFAULT_DATASOURCE_NAME); + exception.addValidationError( + "Not allowed to update datasource with name : " + DEFAULT_DATASOURCE_NAME); return exception; } else { return null; diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/UpdateDataSourceActionResponse.java b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/UpdateDataSourceActionResponse.java index 5f5f6f496a..c5c2eb58af 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/UpdateDataSourceActionResponse.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/UpdateDataSourceActionResponse.java @@ -15,11 +15,9 @@ import org.opensearch.core.common.io.stream.StreamOutput; @RequiredArgsConstructor -public class UpdateDataSourceActionResponse - extends ActionResponse { +public class UpdateDataSourceActionResponse extends ActionResponse { - @Getter - private final String result; + @Getter private final String result; public UpdateDataSourceActionResponse(StreamInput in) throws IOException { super(in); diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/rest/RestDataSourceQueryAction.java b/datasources/src/main/java/org/opensearch/sql/datasources/rest/RestDataSourceQueryAction.java index a56512f838..e93245ed7b 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/rest/RestDataSourceQueryAction.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/rest/RestDataSourceQueryAction.java @@ -47,7 +47,6 @@ import org.opensearch.sql.datasources.utils.Scheduler; import org.opensearch.sql.datasources.utils.XContentParserUtils; - public class RestDataSourceQueryAction extends BaseRestHandler { public static final String DATASOURCE_ACTIONS = "datasource_actions"; @@ -83,8 +82,9 @@ public List routes() { * Response body: * Ref [org.opensearch.sql.plugin.transport.datasource.model.GetDataSourceActionResponse] */ - new Route(GET, String.format(Locale.ROOT, "%s/{%s}", - BASE_DATASOURCE_ACTION_URL, "dataSourceName")), + new Route( + GET, + String.format(Locale.ROOT, "%s/{%s}", BASE_DATASOURCE_ACTION_URL, "dataSourceName")), new Route(GET, BASE_DATASOURCE_ACTION_URL), /* @@ -107,9 +107,9 @@ public List routes() { * Response body: Ref * [org.opensearch.sql.plugin.transport.datasource.model.DeleteDataSourceActionResponse] */ - new Route(DELETE, String.format(Locale.ROOT, "%s/{%s}", - BASE_DATASOURCE_ACTION_URL, "dataSourceName")) - ); + new Route( + DELETE, + String.format(Locale.ROOT, "%s/{%s}", BASE_DATASOURCE_ACTION_URL, "dataSourceName"))); } @Override @@ -125,101 +125,125 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient case DELETE: return executeDeleteRequest(restRequest, nodeClient); default: - return restChannel - -> restChannel.sendResponse(new BytesRestResponse(RestStatus.METHOD_NOT_ALLOWED, - String.valueOf(restRequest.method()))); + return restChannel -> + restChannel.sendResponse( + new BytesRestResponse( + RestStatus.METHOD_NOT_ALLOWED, String.valueOf(restRequest.method()))); } } - private RestChannelConsumer executePostRequest(RestRequest restRequest, - NodeClient nodeClient) throws IOException { - - DataSourceMetadata dataSourceMetadata - = XContentParserUtils.toDataSourceMetadata(restRequest.contentParser()); - return restChannel -> Scheduler.schedule(nodeClient, - () -> nodeClient.execute(TransportCreateDataSourceAction.ACTION_TYPE, - new CreateDataSourceActionRequest(dataSourceMetadata), - new ActionListener<>() { - @Override - public void onResponse( - CreateDataSourceActionResponse createDataSourceActionResponse) { - restChannel.sendResponse( - new BytesRestResponse(RestStatus.CREATED, "application/json; charset=UTF-8", - createDataSourceActionResponse.getResult())); - } - - @Override - public void onFailure(Exception e) { - handleException(e, restChannel); - } - })); + private RestChannelConsumer executePostRequest(RestRequest restRequest, NodeClient nodeClient) + throws IOException { + + DataSourceMetadata dataSourceMetadata = + XContentParserUtils.toDataSourceMetadata(restRequest.contentParser()); + return restChannel -> + Scheduler.schedule( + nodeClient, + () -> + nodeClient.execute( + TransportCreateDataSourceAction.ACTION_TYPE, + new CreateDataSourceActionRequest(dataSourceMetadata), + new ActionListener<>() { + @Override + public void onResponse( + CreateDataSourceActionResponse createDataSourceActionResponse) { + restChannel.sendResponse( + new BytesRestResponse( + RestStatus.CREATED, + "application/json; charset=UTF-8", + createDataSourceActionResponse.getResult())); + } + + @Override + public void onFailure(Exception e) { + handleException(e, restChannel); + } + })); } - private RestChannelConsumer executeGetRequest(RestRequest restRequest, - NodeClient nodeClient) { + private RestChannelConsumer executeGetRequest(RestRequest restRequest, NodeClient nodeClient) { String dataSourceName = restRequest.param("dataSourceName"); - return restChannel -> Scheduler.schedule(nodeClient, - () -> nodeClient.execute(TransportGetDataSourceAction.ACTION_TYPE, - new GetDataSourceActionRequest(dataSourceName), - new ActionListener<>() { - @Override - public void onResponse(GetDataSourceActionResponse getDataSourceActionResponse) { - restChannel.sendResponse( - new BytesRestResponse(RestStatus.OK, "application/json; charset=UTF-8", - getDataSourceActionResponse.getResult())); - } - - @Override - public void onFailure(Exception e) { - handleException(e, restChannel); - } - })); + return restChannel -> + Scheduler.schedule( + nodeClient, + () -> + nodeClient.execute( + TransportGetDataSourceAction.ACTION_TYPE, + new GetDataSourceActionRequest(dataSourceName), + new ActionListener<>() { + @Override + public void onResponse( + GetDataSourceActionResponse getDataSourceActionResponse) { + restChannel.sendResponse( + new BytesRestResponse( + RestStatus.OK, + "application/json; charset=UTF-8", + getDataSourceActionResponse.getResult())); + } + + @Override + public void onFailure(Exception e) { + handleException(e, restChannel); + } + })); } - private RestChannelConsumer executeUpdateRequest(RestRequest restRequest, - NodeClient nodeClient) throws IOException { - DataSourceMetadata dataSourceMetadata - = XContentParserUtils.toDataSourceMetadata(restRequest.contentParser()); - return restChannel -> Scheduler.schedule(nodeClient, - () -> nodeClient.execute(TransportUpdateDataSourceAction.ACTION_TYPE, - new UpdateDataSourceActionRequest(dataSourceMetadata), - new ActionListener<>() { - @Override - public void onResponse( - UpdateDataSourceActionResponse updateDataSourceActionResponse) { - restChannel.sendResponse( - new BytesRestResponse(RestStatus.OK, "application/json; charset=UTF-8", - updateDataSourceActionResponse.getResult())); - } - - @Override - public void onFailure(Exception e) { - handleException(e, restChannel); - } - })); + private RestChannelConsumer executeUpdateRequest(RestRequest restRequest, NodeClient nodeClient) + throws IOException { + DataSourceMetadata dataSourceMetadata = + XContentParserUtils.toDataSourceMetadata(restRequest.contentParser()); + return restChannel -> + Scheduler.schedule( + nodeClient, + () -> + nodeClient.execute( + TransportUpdateDataSourceAction.ACTION_TYPE, + new UpdateDataSourceActionRequest(dataSourceMetadata), + new ActionListener<>() { + @Override + public void onResponse( + UpdateDataSourceActionResponse updateDataSourceActionResponse) { + restChannel.sendResponse( + new BytesRestResponse( + RestStatus.OK, + "application/json; charset=UTF-8", + updateDataSourceActionResponse.getResult())); + } + + @Override + public void onFailure(Exception e) { + handleException(e, restChannel); + } + })); } - private RestChannelConsumer executeDeleteRequest(RestRequest restRequest, - NodeClient nodeClient) { + private RestChannelConsumer executeDeleteRequest(RestRequest restRequest, NodeClient nodeClient) { String dataSourceName = restRequest.param("dataSourceName"); - return restChannel -> Scheduler.schedule(nodeClient, - () -> nodeClient.execute(TransportDeleteDataSourceAction.ACTION_TYPE, - new DeleteDataSourceActionRequest(dataSourceName), - new ActionListener<>() { - @Override - public void onResponse( - DeleteDataSourceActionResponse deleteDataSourceActionResponse) { - restChannel.sendResponse( - new BytesRestResponse(RestStatus.NO_CONTENT, "application/json; charset=UTF-8", - deleteDataSourceActionResponse.getResult())); - } - - @Override - public void onFailure(Exception e) { - handleException(e, restChannel); - } - })); + return restChannel -> + Scheduler.schedule( + nodeClient, + () -> + nodeClient.execute( + TransportDeleteDataSourceAction.ACTION_TYPE, + new DeleteDataSourceActionRequest(dataSourceName), + new ActionListener<>() { + @Override + public void onResponse( + DeleteDataSourceActionResponse deleteDataSourceActionResponse) { + restChannel.sendResponse( + new BytesRestResponse( + RestStatus.NO_CONTENT, + "application/json; charset=UTF-8", + deleteDataSourceActionResponse.getResult())); + } + + @Override + public void onFailure(Exception e) { + handleException(e, restChannel); + } + })); } private void handleException(Exception e, RestChannel restChannel) { @@ -240,8 +264,7 @@ private void handleException(Exception e, RestChannel restChannel) { private void reportError(final RestChannel channel, final Exception e, final RestStatus status) { channel.sendResponse( - new BytesRestResponse( - status, new ErrorMessage(e, status.getStatus()).toString())); + new BytesRestResponse(status, new ErrorMessage(e, status.getStatus()).toString())); } private static boolean isClientError(Exception e) { @@ -250,5 +273,4 @@ private static boolean isClientError(Exception e) { || e instanceof IllegalArgumentException || e instanceof IllegalStateException; } - } diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/service/DataSourceLoaderCache.java b/datasources/src/main/java/org/opensearch/sql/datasources/service/DataSourceLoaderCache.java index 3fe2954c12..dbcc321b3f 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/service/DataSourceLoaderCache.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/service/DataSourceLoaderCache.java @@ -4,8 +4,8 @@ import org.opensearch.sql.datasource.model.DataSourceMetadata; /** - * Interface for DataSourceLoaderCache which provides methods for - * fetch, loading and invalidating DataSource cache. + * Interface for DataSourceLoaderCache which provides methods for fetch, loading and invalidating + * DataSource cache. */ public interface DataSourceLoaderCache { @@ -16,5 +16,4 @@ public interface DataSourceLoaderCache { * @return {@link DataSource} */ DataSource getOrLoadDataSource(DataSourceMetadata dataSourceMetadata); - } diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/service/DataSourceLoaderCacheImpl.java b/datasources/src/main/java/org/opensearch/sql/datasources/service/DataSourceLoaderCacheImpl.java index ba9520fc0c..44454dbd38 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/service/DataSourceLoaderCacheImpl.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/service/DataSourceLoaderCacheImpl.java @@ -12,10 +12,9 @@ import org.opensearch.sql.storage.DataSourceFactory; /** - * Default implementation of DataSourceLoaderCache. This implementation - * utilizes Google Guava Cache {@link Cache} for caching DataSource objects - * against {@link DataSourceMetadata}. Expires the cache objects every 24 hrs after - * the last access. + * Default implementation of DataSourceLoaderCache. This implementation utilizes Google Guava Cache + * {@link Cache} for caching DataSource objects against {@link DataSourceMetadata}. Expires the + * cache objects every 24 hrs after the last access. */ public class DataSourceLoaderCacheImpl implements DataSourceLoaderCache { private final Map dataSourceFactoryMap; @@ -27,24 +26,24 @@ public class DataSourceLoaderCacheImpl implements DataSourceLoaderCache { * @param dataSourceFactorySet set of {@link DataSourceFactory}. */ public DataSourceLoaderCacheImpl(Set dataSourceFactorySet) { - this.dataSourceFactoryMap = dataSourceFactorySet.stream() - .collect(Collectors.toMap(DataSourceFactory::getDataSourceType, f -> f)); - this.dataSourceCache = CacheBuilder.newBuilder() - .maximumSize(1000) - .expireAfterAccess(24, TimeUnit.HOURS) - .build(); + this.dataSourceFactoryMap = + dataSourceFactorySet.stream() + .collect(Collectors.toMap(DataSourceFactory::getDataSourceType, f -> f)); + this.dataSourceCache = + CacheBuilder.newBuilder().maximumSize(1000).expireAfterAccess(24, TimeUnit.HOURS).build(); } @Override public DataSource getOrLoadDataSource(DataSourceMetadata dataSourceMetadata) { DataSource dataSource = this.dataSourceCache.getIfPresent(dataSourceMetadata); if (dataSource == null) { - dataSource = this.dataSourceFactoryMap.get(dataSourceMetadata.getConnector()) - .createDataSource(dataSourceMetadata); + dataSource = + this.dataSourceFactoryMap + .get(dataSourceMetadata.getConnector()) + .createDataSource(dataSourceMetadata); this.dataSourceCache.put(dataSourceMetadata, dataSource); return dataSource; } return dataSource; } - } diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/service/DataSourceMetadataStorage.java b/datasources/src/main/java/org/opensearch/sql/datasources/service/DataSourceMetadataStorage.java index e6483900c6..4d59c68fa0 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/service/DataSourceMetadataStorage.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/service/DataSourceMetadataStorage.java @@ -13,29 +13,26 @@ import org.opensearch.sql.datasource.model.DataSourceMetadata; /** - * Interface for DataSourceMetadata Storage - * which will be only used by DataSourceService for Storage. + * Interface for DataSourceMetadata Storage which will be only used by DataSourceService for + * Storage. */ public interface DataSourceMetadataStorage { /** - * Returns all dataSource Metadata objects. The returned objects won't contain - * any of the credential info. + * Returns all dataSource Metadata objects. The returned objects won't contain any of the + * credential info. * * @return list of {@link DataSourceMetadata}. */ List getDataSourceMetadata(); - /** - * Gets {@link DataSourceMetadata} corresponding to the - * datasourceName from underlying storage. + * Gets {@link DataSourceMetadata} corresponding to the datasourceName from underlying storage. * * @param datasourceName name of the {@link DataSource}. */ Optional getDataSourceMetadata(String datasourceName); - /** * Stores {@link DataSourceMetadata} in underlying storage. * @@ -43,7 +40,6 @@ public interface DataSourceMetadataStorage { */ void createDataSourceMetadata(DataSourceMetadata dataSourceMetadata); - /** * Updates {@link DataSourceMetadata} in underlying storage. * @@ -51,13 +47,10 @@ public interface DataSourceMetadataStorage { */ void updateDataSourceMetadata(DataSourceMetadata dataSourceMetadata); - /** - * Deletes {@link DataSourceMetadata} corresponding to the - * datasourceName from underlying storage. + * Deletes {@link DataSourceMetadata} corresponding to the datasourceName from underlying storage. * * @param datasourceName name of the {@link DataSource}. */ void deleteDataSourceMetadata(String datasourceName); - } diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/service/DataSourceServiceImpl.java b/datasources/src/main/java/org/opensearch/sql/datasources/service/DataSourceServiceImpl.java index 86afa90c2b..2ac480bbf2 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/service/DataSourceServiceImpl.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/service/DataSourceServiceImpl.java @@ -41,13 +41,11 @@ public class DataSourceServiceImpl implements DataSourceService { private final DataSourceUserAuthorizationHelper dataSourceUserAuthorizationHelper; - /** - * Construct from the set of {@link DataSourceFactory} at bootstrap time. - */ - public DataSourceServiceImpl(Set dataSourceFactories, - DataSourceMetadataStorage dataSourceMetadataStorage, - DataSourceUserAuthorizationHelper - dataSourceUserAuthorizationHelper) { + /** Construct from the set of {@link DataSourceFactory} at bootstrap time. */ + public DataSourceServiceImpl( + Set dataSourceFactories, + DataSourceMetadataStorage dataSourceMetadataStorage, + DataSourceUserAuthorizationHelper dataSourceUserAuthorizationHelper) { this.dataSourceMetadataStorage = dataSourceMetadataStorage; this.dataSourceUserAuthorizationHelper = dataSourceUserAuthorizationHelper; this.dataSourceLoaderCache = new DataSourceLoaderCacheImpl(dataSourceFactories); @@ -55,8 +53,8 @@ public DataSourceServiceImpl(Set dataSourceFactories, @Override public Set getDataSourceMetadata(boolean isDefaultDataSourceRequired) { - List dataSourceMetadataList - = this.dataSourceMetadataStorage.getDataSourceMetadata(); + List dataSourceMetadataList = + this.dataSourceMetadataStorage.getDataSourceMetadata(); Set dataSourceMetadataSet = new HashSet<>(dataSourceMetadataList); if (isDefaultDataSourceRequired) { dataSourceMetadataSet.add(DataSourceMetadata.defaultOpenSearchDataSourceMetadata()); @@ -67,28 +65,26 @@ public Set getDataSourceMetadata(boolean isDefaultDataSource @Override public DataSourceMetadata getDataSourceMetadata(String datasourceName) { - Optional dataSourceMetadataOptional - = getDataSourceMetadataFromName(datasourceName); + Optional dataSourceMetadataOptional = + getDataSourceMetadataFromName(datasourceName); if (dataSourceMetadataOptional.isEmpty()) { - throw new IllegalArgumentException("DataSource with name: " + datasourceName - + " doesn't exist."); + throw new IllegalArgumentException( + "DataSource with name: " + datasourceName + " doesn't exist."); } removeAuthInfo(dataSourceMetadataOptional.get()); return dataSourceMetadataOptional.get(); } - @Override public DataSource getDataSource(String dataSourceName) { - Optional - dataSourceMetadataOptional = getDataSourceMetadataFromName(dataSourceName); + Optional dataSourceMetadataOptional = + getDataSourceMetadataFromName(dataSourceName); if (dataSourceMetadataOptional.isEmpty()) { throw new DataSourceNotFoundException( String.format("DataSource with name %s doesn't exist.", dataSourceName)); } else { DataSourceMetadata dataSourceMetadata = dataSourceMetadataOptional.get(); - this.dataSourceUserAuthorizationHelper - .authorizeDataSource(dataSourceMetadata); + this.dataSourceUserAuthorizationHelper.authorizeDataSource(dataSourceMetadata); return dataSourceLoaderCache.getOrLoadDataSource(dataSourceMetadata); } } @@ -130,7 +126,6 @@ public Boolean dataSourceExists(String dataSourceName) { || this.dataSourceMetadataStorage.getDataSourceMetadata(dataSourceName).isPresent(); } - /** * This can be moved to a different validator class when we introduce more connectors. * @@ -159,7 +154,6 @@ private Optional getDataSourceMetadataFromName(String dataSo } } - // It is advised to avoid sending any kind credential // info in api response from security point of view. private void removeAuthInfo(Set dataSourceMetadataSet) { @@ -167,11 +161,8 @@ private void removeAuthInfo(Set dataSourceMetadataSet) { } private void removeAuthInfo(DataSourceMetadata dataSourceMetadata) { - HashMap safeProperties - = new HashMap<>(dataSourceMetadata.getProperties()); - safeProperties - .entrySet() - .removeIf(entry -> entry.getKey().contains("auth")); + HashMap safeProperties = new HashMap<>(dataSourceMetadata.getProperties()); + safeProperties.entrySet().removeIf(entry -> entry.getKey().contains("auth")); dataSourceMetadata.setProperties(safeProperties); } } diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/storage/OpenSearchDataSourceMetadataStorage.java b/datasources/src/main/java/org/opensearch/sql/datasources/storage/OpenSearchDataSourceMetadataStorage.java index d756f2e029..fef684bf1d 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/storage/OpenSearchDataSourceMetadataStorage.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/storage/OpenSearchDataSourceMetadataStorage.java @@ -55,8 +55,8 @@ public class OpenSearchDataSourceMetadataStorage implements DataSourceMetadataSt private static final String DATASOURCE_INDEX_MAPPING_FILE_NAME = "datasources-index-mapping.yml"; private static final Integer DATASOURCE_QUERY_RESULT_SIZE = 10000; - private static final String DATASOURCE_INDEX_SETTINGS_FILE_NAME - = "datasources-index-settings.yml"; + private static final String DATASOURCE_INDEX_SETTINGS_FILE_NAME = + "datasources-index-settings.yml"; private static final Logger LOG = LogManager.getLogger(); private final Client client; private final ClusterService clusterService; @@ -64,15 +64,15 @@ public class OpenSearchDataSourceMetadataStorage implements DataSourceMetadataSt private final Encryptor encryptor; /** - * This class implements DataSourceMetadataStorage interface - * using OpenSearch as underlying storage. + * This class implements DataSourceMetadataStorage interface using OpenSearch as underlying + * storage. * - * @param client opensearch NodeClient. + * @param client opensearch NodeClient. * @param clusterService ClusterService. - * @param encryptor Encryptor. + * @param encryptor Encryptor. */ - public OpenSearchDataSourceMetadataStorage(Client client, ClusterService clusterService, - Encryptor encryptor) { + public OpenSearchDataSourceMetadataStorage( + Client client, ClusterService clusterService, Encryptor encryptor) { this.client = client; this.clusterService = clusterService; this.encryptor = encryptor; @@ -93,8 +93,7 @@ public Optional getDataSourceMetadata(String datasourceName) createDataSourcesIndex(); return Optional.empty(); } - return searchInDataSourcesIndex(QueryBuilders.termQuery("name", datasourceName)) - .stream() + return searchInDataSourcesIndex(QueryBuilders.termQuery("name", datasourceName)).stream() .findFirst() .map(x -> this.encryptDecryptAuthenticationData(x, false)); } @@ -111,14 +110,14 @@ public void createDataSourceMetadata(DataSourceMetadata dataSourceMetadata) { indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); ActionFuture indexResponseActionFuture; IndexResponse indexResponse; - try (ThreadContext.StoredContext storedContext = client.threadPool().getThreadContext() - .stashContext()) { + try (ThreadContext.StoredContext storedContext = + client.threadPool().getThreadContext().stashContext()) { indexRequest.source(XContentParserUtils.convertToXContent(dataSourceMetadata)); indexResponseActionFuture = client.index(indexRequest); indexResponse = indexResponseActionFuture.actionGet(); } catch (VersionConflictEngineException exception) { - throw new IllegalArgumentException("A datasource already exists with name: " - + dataSourceMetadata.getName()); + throw new IllegalArgumentException( + "A datasource already exists with name: " + dataSourceMetadata.getName()); } catch (Exception e) { throw new RuntimeException(e); } @@ -126,27 +125,27 @@ public void createDataSourceMetadata(DataSourceMetadata dataSourceMetadata) { if (indexResponse.getResult().equals(DocWriteResponse.Result.CREATED)) { LOG.debug("DatasourceMetadata : {} successfully created", dataSourceMetadata.getName()); } else { - throw new RuntimeException("Saving dataSource metadata information failed with result : " - + indexResponse.getResult().getLowercase()); + throw new RuntimeException( + "Saving dataSource metadata information failed with result : " + + indexResponse.getResult().getLowercase()); } } @Override public void updateDataSourceMetadata(DataSourceMetadata dataSourceMetadata) { encryptDecryptAuthenticationData(dataSourceMetadata, true); - UpdateRequest updateRequest - = new UpdateRequest(DATASOURCE_INDEX_NAME, dataSourceMetadata.getName()); + UpdateRequest updateRequest = + new UpdateRequest(DATASOURCE_INDEX_NAME, dataSourceMetadata.getName()); UpdateResponse updateResponse; - try (ThreadContext.StoredContext storedContext = client.threadPool().getThreadContext() - .stashContext()) { + try (ThreadContext.StoredContext storedContext = + client.threadPool().getThreadContext().stashContext()) { updateRequest.doc(XContentParserUtils.convertToXContent(dataSourceMetadata)); updateRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); - ActionFuture updateResponseActionFuture - = client.update(updateRequest); + ActionFuture updateResponseActionFuture = client.update(updateRequest); updateResponse = updateResponseActionFuture.actionGet(); } catch (DocumentMissingException exception) { - throw new DataSourceNotFoundException("Datasource with name: " - + dataSourceMetadata.getName() + " doesn't exist"); + throw new DataSourceNotFoundException( + "Datasource with name: " + dataSourceMetadata.getName() + " doesn't exist"); } catch (Exception e) { throw new RuntimeException(e); } @@ -154,8 +153,9 @@ public void updateDataSourceMetadata(DataSourceMetadata dataSourceMetadata) { if (updateResponse.getResult().equals(DocWriteResponse.Result.UPDATED)) { LOG.debug("DatasourceMetadata : {} successfully updated", dataSourceMetadata.getName()); } else { - throw new RuntimeException("Saving dataSource metadata information failed with result : " - + updateResponse.getResult().getLowercase()); + throw new RuntimeException( + "Saving dataSource metadata information failed with result : " + + updateResponse.getResult().getLowercase()); } } @@ -164,48 +164,54 @@ public void deleteDataSourceMetadata(String datasourceName) { DeleteRequest deleteRequest = new DeleteRequest(DATASOURCE_INDEX_NAME); deleteRequest.id(datasourceName); ActionFuture deleteResponseActionFuture; - try (ThreadContext.StoredContext storedContext = client.threadPool().getThreadContext() - .stashContext()) { + try (ThreadContext.StoredContext storedContext = + client.threadPool().getThreadContext().stashContext()) { deleteResponseActionFuture = client.delete(deleteRequest); } DeleteResponse deleteResponse = deleteResponseActionFuture.actionGet(); if (deleteResponse.getResult().equals(DocWriteResponse.Result.DELETED)) { LOG.debug("DatasourceMetadata : {} successfully deleted", datasourceName); } else if (deleteResponse.getResult().equals(DocWriteResponse.Result.NOT_FOUND)) { - throw new DataSourceNotFoundException("Datasource with name: " - + datasourceName + " doesn't exist"); + throw new DataSourceNotFoundException( + "Datasource with name: " + datasourceName + " doesn't exist"); } else { - throw new RuntimeException("Deleting dataSource metadata information failed with result : " - + deleteResponse.getResult().getLowercase()); + throw new RuntimeException( + "Deleting dataSource metadata information failed with result : " + + deleteResponse.getResult().getLowercase()); } } private void createDataSourcesIndex() { try { - InputStream mappingFileStream = OpenSearchDataSourceMetadataStorage.class.getClassLoader() - .getResourceAsStream(DATASOURCE_INDEX_MAPPING_FILE_NAME); - InputStream settingsFileStream = OpenSearchDataSourceMetadataStorage.class.getClassLoader() - .getResourceAsStream(DATASOURCE_INDEX_SETTINGS_FILE_NAME); + InputStream mappingFileStream = + OpenSearchDataSourceMetadataStorage.class + .getClassLoader() + .getResourceAsStream(DATASOURCE_INDEX_MAPPING_FILE_NAME); + InputStream settingsFileStream = + OpenSearchDataSourceMetadataStorage.class + .getClassLoader() + .getResourceAsStream(DATASOURCE_INDEX_SETTINGS_FILE_NAME); CreateIndexRequest createIndexRequest = new CreateIndexRequest(DATASOURCE_INDEX_NAME); - createIndexRequest.mapping(IOUtils.toString(mappingFileStream, StandardCharsets.UTF_8), - XContentType.YAML) - .settings(IOUtils.toString(settingsFileStream, StandardCharsets.UTF_8), - XContentType.YAML); + createIndexRequest + .mapping(IOUtils.toString(mappingFileStream, StandardCharsets.UTF_8), XContentType.YAML) + .settings( + IOUtils.toString(settingsFileStream, StandardCharsets.UTF_8), XContentType.YAML); ActionFuture createIndexResponseActionFuture; - try (ThreadContext.StoredContext ignored = client.threadPool().getThreadContext() - .stashContext()) { + try (ThreadContext.StoredContext ignored = + client.threadPool().getThreadContext().stashContext()) { createIndexResponseActionFuture = client.admin().indices().create(createIndexRequest); } CreateIndexResponse createIndexResponse = createIndexResponseActionFuture.actionGet(); if (createIndexResponse.isAcknowledged()) { LOG.info("Index: {} creation Acknowledged", DATASOURCE_INDEX_NAME); } else { - throw new RuntimeException( - "Index creation is not acknowledged."); + throw new RuntimeException("Index creation is not acknowledged."); } } catch (Throwable e) { throw new RuntimeException( - "Internal server error while creating" + DATASOURCE_INDEX_NAME + " index:: " + "Internal server error while creating" + + DATASOURCE_INDEX_NAME + + " index:: " + e.getMessage()); } } @@ -217,17 +223,19 @@ private List searchInDataSourcesIndex(QueryBuilder query) { searchSourceBuilder.query(query); searchSourceBuilder.size(DATASOURCE_QUERY_RESULT_SIZE); searchRequest.source(searchSourceBuilder); - // strongly consistent reads is requred. more info https://github.com/opensearch-project/sql/issues/1801. + // strongly consistent reads is requred. more info + // https://github.com/opensearch-project/sql/issues/1801. searchRequest.preference("_primary"); ActionFuture searchResponseActionFuture; - try (ThreadContext.StoredContext ignored = client.threadPool().getThreadContext() - .stashContext()) { + try (ThreadContext.StoredContext ignored = + client.threadPool().getThreadContext().stashContext()) { searchResponseActionFuture = client.search(searchRequest); } SearchResponse searchResponse = searchResponseActionFuture.actionGet(); if (searchResponse.status().getStatus() != 200) { - throw new RuntimeException("Fetching dataSource metadata information failed with status : " - + searchResponse.status()); + throw new RuntimeException( + "Fetching dataSource metadata information failed with status : " + + searchResponse.status()); } else { List list = new ArrayList<>(); for (SearchHit searchHit : searchResponse.getHits().getHits()) { @@ -245,14 +253,15 @@ private List searchInDataSourcesIndex(QueryBuilder query) { } @SuppressWarnings("missingswitchdefault") - private DataSourceMetadata encryptDecryptAuthenticationData(DataSourceMetadata dataSourceMetadata, - Boolean isEncryption) { + private DataSourceMetadata encryptDecryptAuthenticationData( + DataSourceMetadata dataSourceMetadata, Boolean isEncryption) { Map propertiesMap = dataSourceMetadata.getProperties(); - Optional authTypeOptional - = propertiesMap.keySet().stream().filter(s -> s.endsWith("auth.type")) - .findFirst() - .map(propertiesMap::get) - .map(AuthenticationType::get); + Optional authTypeOptional = + propertiesMap.keySet().stream() + .filter(s -> s.endsWith("auth.type")) + .findFirst() + .map(propertiesMap::get) + .map(AuthenticationType::get); if (authTypeOptional.isPresent()) { switch (authTypeOptional.get()) { case BASICAUTH: @@ -266,8 +275,8 @@ private DataSourceMetadata encryptDecryptAuthenticationData(DataSourceMetadata d return dataSourceMetadata; } - private void handleBasicAuthPropertiesEncryptionDecryption(Map propertiesMap, - Boolean isEncryption) { + private void handleBasicAuthPropertiesEncryptionDecryption( + Map propertiesMap, Boolean isEncryption) { ArrayList list = new ArrayList<>(); propertiesMap.keySet().stream() .filter(s -> s.endsWith("auth.username")) @@ -280,21 +289,19 @@ private void handleBasicAuthPropertiesEncryptionDecryption(Map p encryptOrDecrypt(propertiesMap, isEncryption, list); } - private void encryptOrDecrypt(Map propertiesMap, Boolean isEncryption, - List keyIdentifiers) { + private void encryptOrDecrypt( + Map propertiesMap, Boolean isEncryption, List keyIdentifiers) { for (String key : keyIdentifiers) { if (isEncryption) { - propertiesMap.put(key, - this.encryptor.encrypt(propertiesMap.get(key))); + propertiesMap.put(key, this.encryptor.encrypt(propertiesMap.get(key))); } else { - propertiesMap.put(key, - this.encryptor.decrypt(propertiesMap.get(key))); + propertiesMap.put(key, this.encryptor.decrypt(propertiesMap.get(key))); } } } - private void handleSigV4PropertiesEncryptionDecryption(Map propertiesMap, - Boolean isEncryption) { + private void handleSigV4PropertiesEncryptionDecryption( + Map propertiesMap, Boolean isEncryption) { ArrayList list = new ArrayList<>(); propertiesMap.keySet().stream() .filter(s -> s.endsWith("auth.access_key")) @@ -306,5 +313,4 @@ private void handleSigV4PropertiesEncryptionDecryption(Map prope .ifPresent(list::add); encryptOrDecrypt(propertiesMap, isEncryption, list); } - } diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportCreateDataSourceAction.java b/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportCreateDataSourceAction.java index ce1c1bb157..e49ff7d796 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportCreateDataSourceAction.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportCreateDataSourceAction.java @@ -23,38 +23,44 @@ public class TransportCreateDataSourceAction extends HandledTransportAction { public static final String NAME = "cluster:admin/opensearch/ql/datasources/create"; - public static final ActionType - ACTION_TYPE = new ActionType<>(NAME, CreateDataSourceActionResponse::new); + public static final ActionType ACTION_TYPE = + new ActionType<>(NAME, CreateDataSourceActionResponse::new); private DataSourceService dataSourceService; /** * TransportCreateDataSourceAction action for creating datasource. * - * @param transportService transportService. - * @param actionFilters actionFilters. + * @param transportService transportService. + * @param actionFilters actionFilters. * @param dataSourceService dataSourceService. */ @Inject - public TransportCreateDataSourceAction(TransportService transportService, - ActionFilters actionFilters, - DataSourceServiceImpl dataSourceService) { - super(TransportCreateDataSourceAction.NAME, transportService, actionFilters, + public TransportCreateDataSourceAction( + TransportService transportService, + ActionFilters actionFilters, + DataSourceServiceImpl dataSourceService) { + super( + TransportCreateDataSourceAction.NAME, + transportService, + actionFilters, CreateDataSourceActionRequest::new); this.dataSourceService = dataSourceService; } @Override - protected void doExecute(Task task, CreateDataSourceActionRequest request, - ActionListener actionListener) { + protected void doExecute( + Task task, + CreateDataSourceActionRequest request, + ActionListener actionListener) { try { DataSourceMetadata dataSourceMetadata = request.getDataSourceMetadata(); dataSourceService.createDataSource(dataSourceMetadata); - actionListener.onResponse(new CreateDataSourceActionResponse("Created DataSource with name " - + dataSourceMetadata.getName())); + actionListener.onResponse( + new CreateDataSourceActionResponse( + "Created DataSource with name " + dataSourceMetadata.getName())); } catch (Exception e) { actionListener.onFailure(e); } } - } diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportDeleteDataSourceAction.java b/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportDeleteDataSourceAction.java index fe2df1ee51..7b814b516a 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportDeleteDataSourceAction.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportDeleteDataSourceAction.java @@ -23,37 +23,43 @@ public class TransportDeleteDataSourceAction extends HandledTransportAction { public static final String NAME = "cluster:admin/opensearch/ql/datasources/delete"; - public static final ActionType - ACTION_TYPE = new ActionType<>(NAME, DeleteDataSourceActionResponse::new); + public static final ActionType ACTION_TYPE = + new ActionType<>(NAME, DeleteDataSourceActionResponse::new); private DataSourceService dataSourceService; /** * TransportDeleteDataSourceAction action for deleting datasource. * - * @param transportService transportService. - * @param actionFilters actionFilters. + * @param transportService transportService. + * @param actionFilters actionFilters. * @param dataSourceService dataSourceService. */ @Inject - public TransportDeleteDataSourceAction(TransportService transportService, - ActionFilters actionFilters, - DataSourceServiceImpl dataSourceService) { - super(TransportDeleteDataSourceAction.NAME, transportService, actionFilters, + public TransportDeleteDataSourceAction( + TransportService transportService, + ActionFilters actionFilters, + DataSourceServiceImpl dataSourceService) { + super( + TransportDeleteDataSourceAction.NAME, + transportService, + actionFilters, DeleteDataSourceActionRequest::new); this.dataSourceService = dataSourceService; } @Override - protected void doExecute(Task task, DeleteDataSourceActionRequest request, - ActionListener actionListener) { + protected void doExecute( + Task task, + DeleteDataSourceActionRequest request, + ActionListener actionListener) { try { dataSourceService.deleteDataSource(request.getDataSourceName()); - actionListener.onResponse(new DeleteDataSourceActionResponse("Deleted DataSource with name " - + request.getDataSourceName())); + actionListener.onResponse( + new DeleteDataSourceActionResponse( + "Deleted DataSource with name " + request.getDataSourceName())); } catch (Exception e) { actionListener.onFailure(e); } } - } diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportGetDataSourceAction.java b/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportGetDataSourceAction.java index 7a36114755..b8147d47ca 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportGetDataSourceAction.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportGetDataSourceAction.java @@ -26,30 +26,36 @@ public class TransportGetDataSourceAction extends HandledTransportAction { public static final String NAME = "cluster:admin/opensearch/ql/datasources/read"; - public static final ActionType - ACTION_TYPE = new ActionType<>(NAME, GetDataSourceActionResponse::new); + public static final ActionType ACTION_TYPE = + new ActionType<>(NAME, GetDataSourceActionResponse::new); private DataSourceService dataSourceService; /** * TransportGetDataSourceAction action for getting datasource. * - * @param transportService transportService. - * @param actionFilters actionFilters. + * @param transportService transportService. + * @param actionFilters actionFilters. * @param dataSourceService dataSourceService. */ @Inject - public TransportGetDataSourceAction(TransportService transportService, - ActionFilters actionFilters, - DataSourceServiceImpl dataSourceService) { - super(TransportGetDataSourceAction.NAME, transportService, actionFilters, + public TransportGetDataSourceAction( + TransportService transportService, + ActionFilters actionFilters, + DataSourceServiceImpl dataSourceService) { + super( + TransportGetDataSourceAction.NAME, + transportService, + actionFilters, GetDataSourceActionRequest::new); this.dataSourceService = dataSourceService; } @Override - protected void doExecute(Task task, GetDataSourceActionRequest request, - ActionListener actionListener) { + protected void doExecute( + Task task, + GetDataSourceActionRequest request, + ActionListener actionListener) { try { String responseContent; if (request.getDataSourceName() == null) { @@ -66,30 +72,27 @@ protected void doExecute(Task task, GetDataSourceActionRequest request, private String handleGetAllDataSourcesRequest() { String responseContent; - Set dataSourceMetadataSet = - dataSourceService.getDataSourceMetadata(false); - responseContent = new JsonResponseFormatter>( - JsonResponseFormatter.Style.PRETTY) { - @Override - protected Object buildJsonObject(Set response) { - return response; - } - }.format(dataSourceMetadataSet); + Set dataSourceMetadataSet = dataSourceService.getDataSourceMetadata(false); + responseContent = + new JsonResponseFormatter>(JsonResponseFormatter.Style.PRETTY) { + @Override + protected Object buildJsonObject(Set response) { + return response; + } + }.format(dataSourceMetadataSet); return responseContent; } private String handleSingleDataSourceRequest(String datasourceName) { String responseContent; - DataSourceMetadata dataSourceMetadata - = dataSourceService - .getDataSourceMetadata(datasourceName); - responseContent = new JsonResponseFormatter( - JsonResponseFormatter.Style.PRETTY) { - @Override - protected Object buildJsonObject(DataSourceMetadata response) { - return response; - } - }.format(dataSourceMetadata); + DataSourceMetadata dataSourceMetadata = dataSourceService.getDataSourceMetadata(datasourceName); + responseContent = + new JsonResponseFormatter(JsonResponseFormatter.Style.PRETTY) { + @Override + protected Object buildJsonObject(DataSourceMetadata response) { + return response; + } + }.format(dataSourceMetadata); return responseContent; } } diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportUpdateDataSourceAction.java b/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportUpdateDataSourceAction.java index 13f80733e6..6964d574dd 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportUpdateDataSourceAction.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportUpdateDataSourceAction.java @@ -23,8 +23,8 @@ public class TransportUpdateDataSourceAction extends HandledTransportAction { public static final String NAME = "cluster:admin/opensearch/ql/datasources/update"; - public static final ActionType - ACTION_TYPE = new ActionType<>(NAME, UpdateDataSourceActionResponse::new); + public static final ActionType ACTION_TYPE = + new ActionType<>(NAME, UpdateDataSourceActionResponse::new); private DataSourceService dataSourceService; @@ -36,24 +36,30 @@ public class TransportUpdateDataSourceAction * @param dataSourceService dataSourceService. */ @Inject - public TransportUpdateDataSourceAction(TransportService transportService, - ActionFilters actionFilters, - DataSourceServiceImpl dataSourceService) { - super(TransportUpdateDataSourceAction.NAME, transportService, actionFilters, + public TransportUpdateDataSourceAction( + TransportService transportService, + ActionFilters actionFilters, + DataSourceServiceImpl dataSourceService) { + super( + TransportUpdateDataSourceAction.NAME, + transportService, + actionFilters, UpdateDataSourceActionRequest::new); this.dataSourceService = dataSourceService; } @Override - protected void doExecute(Task task, UpdateDataSourceActionRequest request, - ActionListener actionListener) { + protected void doExecute( + Task task, + UpdateDataSourceActionRequest request, + ActionListener actionListener) { try { dataSourceService.updateDataSource(request.getDataSourceMetadata()); - actionListener.onResponse(new UpdateDataSourceActionResponse("Updated DataSource with name " - + request.getDataSourceMetadata().getName())); + actionListener.onResponse( + new UpdateDataSourceActionResponse( + "Updated DataSource with name " + request.getDataSourceMetadata().getName())); } catch (Exception e) { actionListener.onFailure(e); } } - } diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/utils/XContentParserUtils.java b/datasources/src/main/java/org/opensearch/sql/datasources/utils/XContentParserUtils.java index 38a500afae..1ad79addac 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/utils/XContentParserUtils.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/utils/XContentParserUtils.java @@ -22,9 +22,7 @@ import org.opensearch.sql.datasource.model.DataSourceMetadata; import org.opensearch.sql.datasource.model.DataSourceType; -/** - * Utitlity class to serialize and deserialize objects in XContent. - */ +/** Utitlity class to serialize and deserialize objects in XContent. */ @UtilityClass public class XContentParserUtils { public static final String NAME_FIELD = "name"; @@ -87,9 +85,13 @@ public static DataSourceMetadata toDataSourceMetadata(XContentParser parser) thr * @throws IOException IOException. */ public static DataSourceMetadata toDataSourceMetadata(String json) throws IOException { - try (XContentParser parser = XContentType.JSON.xContent() - .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, - json)) { + try (XContentParser parser = + XContentType.JSON + .xContent() + .createParser( + NamedXContentRegistry.EMPTY, + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, + json)) { return toDataSourceMetadata(parser); } } @@ -116,6 +118,4 @@ public static XContentBuilder convertToXContent(DataSourceMetadata metadata) thr builder.endObject(); return builder; } - - } diff --git a/datasources/src/test/java/org/opensearch/sql/datasources/auth/AuthenticationTypeTest.java b/datasources/src/test/java/org/opensearch/sql/datasources/auth/AuthenticationTypeTest.java index 23bb4688e1..4bc4800093 100644 --- a/datasources/src/test/java/org/opensearch/sql/datasources/auth/AuthenticationTypeTest.java +++ b/datasources/src/test/java/org/opensearch/sql/datasources/auth/AuthenticationTypeTest.java @@ -5,7 +5,6 @@ package org.opensearch.sql.datasources.auth; - import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNull; diff --git a/datasources/src/test/java/org/opensearch/sql/datasources/auth/DataSourceUserAuthorizationHelperImplTest.java b/datasources/src/test/java/org/opensearch/sql/datasources/auth/DataSourceUserAuthorizationHelperImplTest.java index 552bd0edf9..6ee3c12edd 100644 --- a/datasources/src/test/java/org/opensearch/sql/datasources/auth/DataSourceUserAuthorizationHelperImplTest.java +++ b/datasources/src/test/java/org/opensearch/sql/datasources/auth/DataSourceUserAuthorizationHelperImplTest.java @@ -27,65 +27,76 @@ public class DataSourceUserAuthorizationHelperImplTest { @Mock(answer = Answers.RETURNS_DEEP_STUBS) private Client client; - @InjectMocks - private DataSourceUserAuthorizationHelperImpl dataSourceUserAuthorizationHelper; - + @InjectMocks private DataSourceUserAuthorizationHelperImpl dataSourceUserAuthorizationHelper; @Test public void testAuthorizeDataSourceWithAllowedRoles() { String userString = "myuser|bckrole1,bckrol2|prometheus_access|myTenant"; - Mockito.when(client.threadPool().getThreadContext() - .getTransient(OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT)) + Mockito.when( + client + .threadPool() + .getThreadContext() + .getTransient(OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT)) .thenReturn(userString); DataSourceMetadata dataSourceMetadata = dataSourceMetadata(); - this.dataSourceUserAuthorizationHelper - .authorizeDataSource(dataSourceMetadata); + this.dataSourceUserAuthorizationHelper.authorizeDataSource(dataSourceMetadata); } @Test public void testAuthorizeDataSourceWithAdminRole() { String userString = "myuser|bckrole1,bckrol2|all_access|myTenant"; - Mockito.when(client.threadPool().getThreadContext() - .getTransient(OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT)) + Mockito.when( + client + .threadPool() + .getThreadContext() + .getTransient(OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT)) .thenReturn(userString); DataSourceMetadata dataSourceMetadata = dataSourceMetadata(); - this.dataSourceUserAuthorizationHelper - .authorizeDataSource(dataSourceMetadata); + this.dataSourceUserAuthorizationHelper.authorizeDataSource(dataSourceMetadata); } @Test public void testAuthorizeDataSourceWithNullUserString() { - Mockito.when(client.threadPool().getThreadContext() - .getTransient(OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT)) + Mockito.when( + client + .threadPool() + .getThreadContext() + .getTransient(OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT)) .thenReturn(null); DataSourceMetadata dataSourceMetadata = dataSourceMetadata(); - this.dataSourceUserAuthorizationHelper - .authorizeDataSource(dataSourceMetadata); + this.dataSourceUserAuthorizationHelper.authorizeDataSource(dataSourceMetadata); } @Test public void testAuthorizeDataSourceWithDefaultDataSource() { String userString = "myuser|bckrole1,bckrol2|role1|myTenant"; - Mockito.when(client.threadPool().getThreadContext() - .getTransient(OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT)) + Mockito.when( + client + .threadPool() + .getThreadContext() + .getTransient(OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT)) .thenReturn(userString); DataSourceMetadata dataSourceMetadata = DataSourceMetadata.defaultOpenSearchDataSourceMetadata(); - this.dataSourceUserAuthorizationHelper - .authorizeDataSource(dataSourceMetadata); + this.dataSourceUserAuthorizationHelper.authorizeDataSource(dataSourceMetadata); } @Test public void testAuthorizeDataSourceWithException() { String userString = "myuser|bckrole1,bckrol2|role1|myTenant"; - Mockito.when(client.threadPool().getThreadContext() - .getTransient(OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT)) + Mockito.when( + client + .threadPool() + .getThreadContext() + .getTransient(OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT)) .thenReturn(userString); DataSourceMetadata dataSourceMetadata = dataSourceMetadata(); - SecurityException securityException - = Assert.assertThrows(SecurityException.class, + SecurityException securityException = + Assert.assertThrows( + SecurityException.class, () -> this.dataSourceUserAuthorizationHelper.authorizeDataSource(dataSourceMetadata)); - Assert.assertEquals("User is not authorized to access datasource test. " + Assert.assertEquals( + "User is not authorized to access datasource test. " + "User should be mapped to any of the roles in [prometheus_access] for access.", securityException.getMessage()); } @@ -98,5 +109,4 @@ private DataSourceMetadata dataSourceMetadata() { dataSourceMetadata.setProperties(new HashMap<>()); return dataSourceMetadata; } - } diff --git a/datasources/src/test/java/org/opensearch/sql/datasources/encryptor/EncryptorImplTest.java b/datasources/src/test/java/org/opensearch/sql/datasources/encryptor/EncryptorImplTest.java index d62a5a957a..26432b139b 100644 --- a/datasources/src/test/java/org/opensearch/sql/datasources/encryptor/EncryptorImplTest.java +++ b/datasources/src/test/java/org/opensearch/sql/datasources/encryptor/EncryptorImplTest.java @@ -17,7 +17,6 @@ import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.junit.jupiter.MockitoExtension; - @ExtendWith(MockitoExtension.class) public class EncryptorImplTest { @@ -38,9 +37,11 @@ public void testMasterKeySize() { String input = "This is a test input"; String masterKey8 = "12345678"; Encryptor encryptor8 = new EncryptorImpl(masterKey8); - assertThrows(AwsCryptoException.class, () -> { - encryptor8.encrypt(input); - }); + assertThrows( + AwsCryptoException.class, + () -> { + encryptor8.encrypt(input); + }); String masterKey16 = "1234567812345678"; Encryptor encryptor16 = new EncryptorImpl(masterKey16); @@ -54,9 +55,11 @@ public void testMasterKeySize() { String masterKey17 = "12345678123456781"; Encryptor encryptor17 = new EncryptorImpl(masterKey17); - assertThrows(AwsCryptoException.class, () -> { - encryptor17.encrypt(input); - }); + assertThrows( + AwsCryptoException.class, + () -> { + encryptor17.encrypt(input); + }); } @Test @@ -64,9 +67,11 @@ public void testInvalidBase64String() { String encrypted = "invalidBase64String"; Encryptor encryptor = new EncryptorImpl("randomMasterKey"); - assertThrows(BadCiphertextException.class, () -> { - encryptor.decrypt(encrypted); - }); + assertThrows( + BadCiphertextException.class, + () -> { + encryptor.decrypt(encrypted); + }); } @Test @@ -80,19 +85,21 @@ public void testDecryptWithDifferentKey() { String encrypted = encryptor1.encrypt(input); - assertThrows(Exception.class, () -> { - encryptor2.decrypt(encrypted); - }); + assertThrows( + Exception.class, + () -> { + encryptor2.decrypt(encrypted); + }); } @Test public void testEncryptionAndDecryptionWithNullMasterKey() { String input = "This is a test input"; Encryptor encryptor = new EncryptorImpl(null); - IllegalStateException illegalStateException - = Assertions.assertThrows(IllegalStateException.class, - () -> encryptor.encrypt(input)); - Assertions.assertEquals("Master key is a required config for using create and" + IllegalStateException illegalStateException = + Assertions.assertThrows(IllegalStateException.class, () -> encryptor.encrypt(input)); + Assertions.assertEquals( + "Master key is a required config for using create and" + " update datasource APIs." + "Please set plugins.query.datasources.encryption.masterkey config " + "in opensearch.yml in all the cluster nodes. " @@ -100,10 +107,10 @@ public void testEncryptionAndDecryptionWithNullMasterKey() { + "https://github.com/opensearch-project/sql/blob/main/docs/user/ppl/" + "admin/datasources.rst#master-key-config-for-encrypting-credential-information", illegalStateException.getMessage()); - illegalStateException - = Assertions.assertThrows(IllegalStateException.class, - () -> encryptor.decrypt(input)); - Assertions.assertEquals("Master key is a required config for using create and" + illegalStateException = + Assertions.assertThrows(IllegalStateException.class, () -> encryptor.decrypt(input)); + Assertions.assertEquals( + "Master key is a required config for using create and" + " update datasource APIs." + "Please set plugins.query.datasources.encryption.masterkey config " + "in opensearch.yml in all the cluster nodes. " @@ -118,10 +125,10 @@ public void testEncryptionAndDecryptionWithEmptyMasterKey() { String masterKey = ""; String input = "This is a test input"; Encryptor encryptor = new EncryptorImpl(masterKey); - IllegalStateException illegalStateException - = Assertions.assertThrows(IllegalStateException.class, - () -> encryptor.encrypt(input)); - Assertions.assertEquals("Master key is a required config for using create and" + IllegalStateException illegalStateException = + Assertions.assertThrows(IllegalStateException.class, () -> encryptor.encrypt(input)); + Assertions.assertEquals( + "Master key is a required config for using create and" + " update datasource APIs." + "Please set plugins.query.datasources.encryption.masterkey config " + "in opensearch.yml in all the cluster nodes. " @@ -129,10 +136,10 @@ public void testEncryptionAndDecryptionWithEmptyMasterKey() { + "https://github.com/opensearch-project/sql/blob/main/docs/user/ppl/" + "admin/datasources.rst#master-key-config-for-encrypting-credential-information", illegalStateException.getMessage()); - illegalStateException - = Assertions.assertThrows(IllegalStateException.class, - () -> encryptor.decrypt(input)); - Assertions.assertEquals("Master key is a required config for using create and" + illegalStateException = + Assertions.assertThrows(IllegalStateException.class, () -> encryptor.decrypt(input)); + Assertions.assertEquals( + "Master key is a required config for using create and" + " update datasource APIs." + "Please set plugins.query.datasources.encryption.masterkey config " + "in opensearch.yml in all the cluster nodes. " @@ -141,5 +148,4 @@ public void testEncryptionAndDecryptionWithEmptyMasterKey() { + "admin/datasources.rst#master-key-config-for-encrypting-credential-information", illegalStateException.getMessage()); } - } diff --git a/datasources/src/test/java/org/opensearch/sql/datasources/service/DataSourceLoaderCacheImplTest.java b/datasources/src/test/java/org/opensearch/sql/datasources/service/DataSourceLoaderCacheImplTest.java index bf656857b0..b2ea221eb7 100644 --- a/datasources/src/test/java/org/opensearch/sql/datasources/service/DataSourceLoaderCacheImplTest.java +++ b/datasources/src/test/java/org/opensearch/sql/datasources/service/DataSourceLoaderCacheImplTest.java @@ -25,11 +25,9 @@ @ExtendWith(MockitoExtension.class) class DataSourceLoaderCacheImplTest { - @Mock - private DataSourceFactory dataSourceFactory; + @Mock private DataSourceFactory dataSourceFactory; - @Mock - private StorageEngine storageEngine; + @Mock private StorageEngine storageEngine; @BeforeEach public void setup() { @@ -55,8 +53,8 @@ void testGetOrLoadDataSource() { dataSourceMetadata.setProperties(ImmutableMap.of()); DataSource dataSource = dataSourceLoaderCache.getOrLoadDataSource(dataSourceMetadata); verify(dataSourceFactory, times(1)).createDataSource(dataSourceMetadata); - Assertions.assertEquals(dataSource, - dataSourceLoaderCache.getOrLoadDataSource(dataSourceMetadata)); + Assertions.assertEquals( + dataSource, dataSourceLoaderCache.getOrLoadDataSource(dataSourceMetadata)); verifyNoMoreInteractions(dataSourceFactory); } @@ -81,5 +79,4 @@ private DataSourceMetadata getMetadata() { dataSourceMetadata.setProperties(ImmutableMap.of()); return dataSourceMetadata; } - } diff --git a/datasources/src/test/java/org/opensearch/sql/datasources/service/DataSourceServiceImplTest.java b/datasources/src/test/java/org/opensearch/sql/datasources/service/DataSourceServiceImplTest.java index e1312ec582..56d3586c6e 100644 --- a/datasources/src/test/java/org/opensearch/sql/datasources/service/DataSourceServiceImplTest.java +++ b/datasources/src/test/java/org/opensearch/sql/datasources/service/DataSourceServiceImplTest.java @@ -46,15 +46,11 @@ @ExtendWith(MockitoExtension.class) class DataSourceServiceImplTest { - @Mock - private DataSourceFactory dataSourceFactory; - @Mock - private StorageEngine storageEngine; - @Mock - private DataSourceMetadataStorage dataSourceMetadataStorage; + @Mock private DataSourceFactory dataSourceFactory; + @Mock private StorageEngine storageEngine; + @Mock private DataSourceMetadataStorage dataSourceMetadataStorage; - @Mock - private DataSourceUserAuthorizationHelper dataSourceUserAuthorizationHelper; + @Mock private DataSourceUserAuthorizationHelper dataSourceUserAuthorizationHelper; private DataSourceService dataSourceService; @@ -75,7 +71,8 @@ public void setup() { { add(dataSourceFactory); } - }, dataSourceMetadataStorage, + }, + dataSourceMetadataStorage, dataSourceUserAuthorizationHelper); } @@ -91,22 +88,18 @@ void testGetDataSourceForDefaultOpenSearchDataSource() { @Test void testGetDataSourceForNonExistingDataSource() { - when(dataSourceMetadataStorage.getDataSourceMetadata("test")) - .thenReturn(Optional.empty()); + when(dataSourceMetadataStorage.getDataSourceMetadata("test")).thenReturn(Optional.empty()); DataSourceNotFoundException exception = assertThrows( - DataSourceNotFoundException.class, - () -> - dataSourceService.getDataSource("test")); + DataSourceNotFoundException.class, () -> dataSourceService.getDataSource("test")); assertEquals("DataSource with name test doesn't exist.", exception.getMessage()); - verify(dataSourceMetadataStorage, times(1)) - .getDataSourceMetadata("test"); + verify(dataSourceMetadataStorage, times(1)).getDataSourceMetadata("test"); } @Test void testGetDataSourceSuccessCase() { - DataSourceMetadata dataSourceMetadata = metadata("test", DataSourceType.OPENSEARCH, - Collections.emptyList(), ImmutableMap.of()); + DataSourceMetadata dataSourceMetadata = + metadata("test", DataSourceType.OPENSEARCH, Collections.emptyList(), ImmutableMap.of()); doNothing().when(dataSourceUserAuthorizationHelper).authorizeDataSource(dataSourceMetadata); when(dataSourceMetadataStorage.getDataSourceMetadata("test")) .thenReturn(Optional.of(dataSourceMetadata)); @@ -114,26 +107,31 @@ void testGetDataSourceSuccessCase() { assertEquals("test", dataSource.getName()); assertEquals(DataSourceType.OPENSEARCH, dataSource.getConnectorType()); verify(dataSourceMetadataStorage, times(1)).getDataSourceMetadata("test"); - verify(dataSourceFactory, times(1)) - .createDataSource(dataSourceMetadata); + verify(dataSourceFactory, times(1)).createDataSource(dataSourceMetadata); } @Test void testGetDataSourceWithAuthorizationFailure() { - DataSourceMetadata dataSourceMetadata = metadata("test", DataSourceType.OPENSEARCH, - Collections.singletonList("prometheus_access"), ImmutableMap.of()); - doThrow(new SecurityException("User is not authorized to access datasource test. " - + "User should be mapped to any of the roles in [prometheus_access] for access.")) + DataSourceMetadata dataSourceMetadata = + metadata( + "test", + DataSourceType.OPENSEARCH, + Collections.singletonList("prometheus_access"), + ImmutableMap.of()); + doThrow( + new SecurityException( + "User is not authorized to access datasource test. User should be mapped to any of" + + " the roles in [prometheus_access] for access.")) .when(dataSourceUserAuthorizationHelper) .authorizeDataSource(dataSourceMetadata); when(dataSourceMetadataStorage.getDataSourceMetadata("test")) .thenReturn(Optional.of(dataSourceMetadata)); - - SecurityException securityException - = Assertions.assertThrows(SecurityException.class, - () -> dataSourceService.getDataSource("test")); - Assertions.assertEquals("User is not authorized to access datasource test. " + SecurityException securityException = + Assertions.assertThrows( + SecurityException.class, () -> dataSourceService.getDataSource("test")); + Assertions.assertEquals( + "User is not authorized to access datasource test. " + "User should be mapped to any of the roles in [prometheus_access] for access.", securityException.getMessage()); @@ -141,21 +139,23 @@ void testGetDataSourceWithAuthorizationFailure() { verify(dataSourceFactory, times(0)).createDataSource(dataSourceMetadata); } - @Test void testCreateDataSourceSuccessCase() { - DataSourceMetadata dataSourceMetadata = metadata("testDS", DataSourceType.OPENSEARCH, - Collections.emptyList(), ImmutableMap.of()); + DataSourceMetadata dataSourceMetadata = + metadata("testDS", DataSourceType.OPENSEARCH, Collections.emptyList(), ImmutableMap.of()); dataSourceService.createDataSource(dataSourceMetadata); - verify(dataSourceMetadataStorage, times(1)) - .createDataSourceMetadata(dataSourceMetadata); - verify(dataSourceFactory, times(1)) - .createDataSource(dataSourceMetadata); + verify(dataSourceMetadataStorage, times(1)).createDataSourceMetadata(dataSourceMetadata); + verify(dataSourceFactory, times(1)).createDataSource(dataSourceMetadata); when(dataSourceMetadataStorage.getDataSourceMetadata("testDS")) - .thenReturn(Optional.ofNullable(metadata("testDS", DataSourceType.OPENSEARCH, - Collections.emptyList(), ImmutableMap.of()))); + .thenReturn( + Optional.ofNullable( + metadata( + "testDS", + DataSourceType.OPENSEARCH, + Collections.emptyList(), + ImmutableMap.of()))); DataSource dataSource = dataSourceService.getDataSource("testDS"); assertEquals("testDS", dataSource.getName()); assertEquals(storageEngine, dataSource.getStorageEngine()); @@ -164,14 +164,15 @@ void testCreateDataSourceSuccessCase() { @Test void testCreateDataSourceWithDisallowedDatasourceName() { - DataSourceMetadata dataSourceMetadata = metadata("testDS$$$", DataSourceType.OPENSEARCH, - Collections.emptyList(), ImmutableMap.of()); + DataSourceMetadata dataSourceMetadata = + metadata( + "testDS$$$", DataSourceType.OPENSEARCH, Collections.emptyList(), ImmutableMap.of()); IllegalArgumentException exception = assertThrows( IllegalArgumentException.class, - () -> - dataSourceService.createDataSource(dataSourceMetadata)); - assertEquals("DataSource Name: testDS$$$ contains illegal characters." + () -> dataSourceService.createDataSource(dataSourceMetadata)); + assertEquals( + "DataSource Name: testDS$$$ contains illegal characters." + " Allowed characters: a-zA-Z0-9_-*@.", exception.getMessage()); verify(dataSourceFactory, times(1)).getDataSourceType(); @@ -181,14 +182,14 @@ void testCreateDataSourceWithDisallowedDatasourceName() { @Test void testCreateDataSourceWithEmptyDatasourceName() { - DataSourceMetadata dataSourceMetadata = metadata("", DataSourceType.OPENSEARCH, - Collections.emptyList(), ImmutableMap.of()); + DataSourceMetadata dataSourceMetadata = + metadata("", DataSourceType.OPENSEARCH, Collections.emptyList(), ImmutableMap.of()); IllegalArgumentException exception = assertThrows( IllegalArgumentException.class, - () -> - dataSourceService.createDataSource(dataSourceMetadata)); - assertEquals("Missing Name Field from a DataSource. Name is a required parameter.", + () -> dataSourceService.createDataSource(dataSourceMetadata)); + assertEquals( + "Missing Name Field from a DataSource. Name is a required parameter.", exception.getMessage()); verify(dataSourceFactory, times(1)).getDataSourceType(); verify(dataSourceFactory, times(0)).createDataSource(dataSourceMetadata); @@ -197,14 +198,14 @@ void testCreateDataSourceWithEmptyDatasourceName() { @Test void testCreateDataSourceWithNullParameters() { - DataSourceMetadata dataSourceMetadata = metadata("testDS", DataSourceType.OPENSEARCH, - Collections.emptyList(), null); + DataSourceMetadata dataSourceMetadata = + metadata("testDS", DataSourceType.OPENSEARCH, Collections.emptyList(), null); IllegalArgumentException exception = assertThrows( IllegalArgumentException.class, - () -> - dataSourceService.createDataSource(dataSourceMetadata)); - assertEquals("Missing properties field in datasource configuration. " + () -> dataSourceService.createDataSource(dataSourceMetadata)); + assertEquals( + "Missing properties field in datasource configuration. " + "Properties are required parameters.", exception.getMessage()); verify(dataSourceFactory, times(1)).getDataSourceType(); @@ -219,88 +220,99 @@ void testGetDataSourceMetadataSet() { properties.put("prometheus.auth.type", "basicauth"); properties.put("prometheus.auth.username", "username"); properties.put("prometheus.auth.password", "password"); - when(dataSourceMetadataStorage.getDataSourceMetadata()).thenReturn(new ArrayList<>() { - { - add(metadata("testDS", DataSourceType.PROMETHEUS, Collections.emptyList(), - properties)); - } - }); - Set dataSourceMetadataSet - = dataSourceService.getDataSourceMetadata(false); + when(dataSourceMetadataStorage.getDataSourceMetadata()) + .thenReturn( + new ArrayList<>() { + { + add( + metadata( + "testDS", DataSourceType.PROMETHEUS, Collections.emptyList(), properties)); + } + }); + Set dataSourceMetadataSet = dataSourceService.getDataSourceMetadata(false); assertEquals(1, dataSourceMetadataSet.size()); DataSourceMetadata dataSourceMetadata = dataSourceMetadataSet.iterator().next(); assertTrue(dataSourceMetadata.getProperties().containsKey("prometheus.uri")); assertFalse(dataSourceMetadata.getProperties().containsKey("prometheus.auth.type")); assertFalse(dataSourceMetadata.getProperties().containsKey("prometheus.auth.username")); assertFalse(dataSourceMetadata.getProperties().containsKey("prometheus.auth.password")); - assertFalse(dataSourceMetadataSet - .contains(DataSourceMetadata.defaultOpenSearchDataSourceMetadata())); + assertFalse( + dataSourceMetadataSet.contains(DataSourceMetadata.defaultOpenSearchDataSourceMetadata())); verify(dataSourceMetadataStorage, times(1)).getDataSourceMetadata(); } @Test void testGetDataSourceMetadataSetWithDefaultDatasource() { - when(dataSourceMetadataStorage.getDataSourceMetadata()).thenReturn(new ArrayList<>() { - { - add(metadata("testDS", DataSourceType.PROMETHEUS, Collections.emptyList(), - ImmutableMap.of())); - } - }); - Set dataSourceMetadataSet - = dataSourceService.getDataSourceMetadata(true); + when(dataSourceMetadataStorage.getDataSourceMetadata()) + .thenReturn( + new ArrayList<>() { + { + add( + metadata( + "testDS", + DataSourceType.PROMETHEUS, + Collections.emptyList(), + ImmutableMap.of())); + } + }); + Set dataSourceMetadataSet = dataSourceService.getDataSourceMetadata(true); assertEquals(2, dataSourceMetadataSet.size()); - assertTrue(dataSourceMetadataSet - .contains(DataSourceMetadata.defaultOpenSearchDataSourceMetadata())); + assertTrue( + dataSourceMetadataSet.contains(DataSourceMetadata.defaultOpenSearchDataSourceMetadata())); verify(dataSourceMetadataStorage, times(1)).getDataSourceMetadata(); } @Test void testUpdateDataSourceSuccessCase() { - DataSourceMetadata dataSourceMetadata = metadata("testDS", DataSourceType.OPENSEARCH, - Collections.emptyList(), ImmutableMap.of()); + DataSourceMetadata dataSourceMetadata = + metadata("testDS", DataSourceType.OPENSEARCH, Collections.emptyList(), ImmutableMap.of()); dataSourceService.updateDataSource(dataSourceMetadata); - verify(dataSourceMetadataStorage, times(1)) - .updateDataSourceMetadata(dataSourceMetadata); - verify(dataSourceFactory, times(1)) - .createDataSource(dataSourceMetadata); + verify(dataSourceMetadataStorage, times(1)).updateDataSourceMetadata(dataSourceMetadata); + verify(dataSourceFactory, times(1)).createDataSource(dataSourceMetadata); } @Test void testUpdateDefaultDataSource() { - DataSourceMetadata dataSourceMetadata = metadata(DEFAULT_DATASOURCE_NAME, - DataSourceType.OPENSEARCH, Collections.emptyList(), ImmutableMap.of()); - UnsupportedOperationException unsupportedOperationException - = assertThrows(UnsupportedOperationException.class, + DataSourceMetadata dataSourceMetadata = + metadata( + DEFAULT_DATASOURCE_NAME, + DataSourceType.OPENSEARCH, + Collections.emptyList(), + ImmutableMap.of()); + UnsupportedOperationException unsupportedOperationException = + assertThrows( + UnsupportedOperationException.class, () -> dataSourceService.updateDataSource(dataSourceMetadata)); - assertEquals("Not allowed to update default datasource :" + DEFAULT_DATASOURCE_NAME, + assertEquals( + "Not allowed to update default datasource :" + DEFAULT_DATASOURCE_NAME, unsupportedOperationException.getMessage()); } @Test void testDeleteDatasource() { dataSourceService.deleteDataSource("testDS"); - verify(dataSourceMetadataStorage, times(1)) - .deleteDataSourceMetadata("testDS"); + verify(dataSourceMetadataStorage, times(1)).deleteDataSourceMetadata("testDS"); } @Test void testDeleteDefaultDatasource() { - UnsupportedOperationException unsupportedOperationException - = assertThrows(UnsupportedOperationException.class, - () -> dataSourceService.deleteDataSource(DEFAULT_DATASOURCE_NAME)); - assertEquals("Not allowed to delete default datasource :" + DEFAULT_DATASOURCE_NAME, + UnsupportedOperationException unsupportedOperationException = + assertThrows( + UnsupportedOperationException.class, + () -> dataSourceService.deleteDataSource(DEFAULT_DATASOURCE_NAME)); + assertEquals( + "Not allowed to delete default datasource :" + DEFAULT_DATASOURCE_NAME, unsupportedOperationException.getMessage()); } @Test void testDataSourceExists() { - when(dataSourceMetadataStorage.getDataSourceMetadata("test")) - .thenReturn(Optional.empty()); + when(dataSourceMetadataStorage.getDataSourceMetadata("test")).thenReturn(Optional.empty()); Assertions.assertFalse(dataSourceService.dataSourceExists("test")); when(dataSourceMetadataStorage.getDataSourceMetadata("test")) - .thenReturn(Optional.of(metadata("test", DataSourceType.PROMETHEUS, - List.of(), ImmutableMap.of()))); + .thenReturn( + Optional.of(metadata("test", DataSourceType.PROMETHEUS, List.of(), ImmutableMap.of()))); Assertions.assertTrue(dataSourceService.dataSourceExists("test")); } @@ -310,9 +322,8 @@ void testDataSourceExistsForDefaultDataSource() { verifyNoInteractions(dataSourceMetadataStorage); } - DataSourceMetadata metadata(String name, DataSourceType type, - List allowedRoles, - Map properties) { + DataSourceMetadata metadata( + String name, DataSourceType type, List allowedRoles, Map properties) { DataSourceMetadata dataSourceMetadata = new DataSourceMetadata(); dataSourceMetadata.setName(name); dataSourceMetadata.setConnector(type); @@ -329,13 +340,15 @@ void testRemovalOfAuthorizationInfo() { properties.put("prometheus.auth.username", "username"); properties.put("prometheus.auth.password", "password"); DataSourceMetadata dataSourceMetadata = - new DataSourceMetadata("testDS", DataSourceType.PROMETHEUS, - Collections.singletonList("prometheus_access"), properties); + new DataSourceMetadata( + "testDS", + DataSourceType.PROMETHEUS, + Collections.singletonList("prometheus_access"), + properties); when(dataSourceMetadataStorage.getDataSourceMetadata("testDS")) .thenReturn(Optional.of(dataSourceMetadata)); - DataSourceMetadata dataSourceMetadata1 - = dataSourceService.getDataSourceMetadata("testDS"); + DataSourceMetadata dataSourceMetadata1 = dataSourceService.getDataSourceMetadata("testDS"); assertEquals("testDS", dataSourceMetadata1.getName()); assertEquals(DataSourceType.PROMETHEUS, dataSourceMetadata1.getConnector()); assertFalse(dataSourceMetadata1.getProperties().containsKey("prometheus.auth.type")); @@ -345,10 +358,11 @@ void testRemovalOfAuthorizationInfo() { @Test void testGetDataSourceMetadataForNonExistingDataSource() { - when(dataSourceMetadataStorage.getDataSourceMetadata("testDS")) - .thenReturn(Optional.empty()); - IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, - () -> dataSourceService.getDataSourceMetadata("testDS")); + when(dataSourceMetadataStorage.getDataSourceMetadata("testDS")).thenReturn(Optional.empty()); + IllegalArgumentException exception = + assertThrows( + IllegalArgumentException.class, + () -> dataSourceService.getDataSourceMetadata("testDS")); assertEquals("DataSource with name: testDS doesn't exist.", exception.getMessage()); } @@ -360,16 +374,15 @@ void testGetDataSourceMetadataForSpecificDataSourceName() { properties.put("prometheus.auth.username", "username"); properties.put("prometheus.auth.password", "password"); when(dataSourceMetadataStorage.getDataSourceMetadata("testDS")) - .thenReturn(Optional.ofNullable( - metadata("testDS", DataSourceType.PROMETHEUS, Collections.emptyList(), - properties))); - DataSourceMetadata dataSourceMetadata - = this.dataSourceService.getDataSourceMetadata("testDS"); + .thenReturn( + Optional.ofNullable( + metadata( + "testDS", DataSourceType.PROMETHEUS, Collections.emptyList(), properties))); + DataSourceMetadata dataSourceMetadata = this.dataSourceService.getDataSourceMetadata("testDS"); assertTrue(dataSourceMetadata.getProperties().containsKey("prometheus.uri")); assertFalse(dataSourceMetadata.getProperties().containsKey("prometheus.auth.type")); assertFalse(dataSourceMetadata.getProperties().containsKey("prometheus.auth.username")); assertFalse(dataSourceMetadata.getProperties().containsKey("prometheus.auth.password")); verify(dataSourceMetadataStorage, times(1)).getDataSourceMetadata("testDS"); } - } diff --git a/datasources/src/test/java/org/opensearch/sql/datasources/storage/OpenSearchDataSourceMetadataStorageTest.java b/datasources/src/test/java/org/opensearch/sql/datasources/storage/OpenSearchDataSourceMetadataStorageTest.java index b1554ed585..6e412e8333 100644 --- a/datasources/src/test/java/org/opensearch/sql/datasources/storage/OpenSearchDataSourceMetadataStorageTest.java +++ b/datasources/src/test/java/org/opensearch/sql/datasources/storage/OpenSearchDataSourceMetadataStorageTest.java @@ -52,33 +52,25 @@ public class OpenSearchDataSourceMetadataStorageTest { @Mock(answer = Answers.RETURNS_DEEP_STUBS) private Client client; + @Mock(answer = Answers.RETURNS_DEEP_STUBS) private ClusterService clusterService; - @Mock - private Encryptor encryptor; + + @Mock private Encryptor encryptor; + @Mock(answer = Answers.RETURNS_DEEP_STUBS) private SearchResponse searchResponse; - @Mock - private ActionFuture searchResponseActionFuture; - @Mock - private ActionFuture createIndexResponseActionFuture; - @Mock - private ActionFuture indexResponseActionFuture; - @Mock - private IndexResponse indexResponse; - @Mock - private ActionFuture updateResponseActionFuture; - @Mock - private UpdateResponse updateResponse; - @Mock - private ActionFuture deleteResponseActionFuture; - @Mock - private DeleteResponse deleteResponse; - @Mock - private SearchHit searchHit; - @InjectMocks - private OpenSearchDataSourceMetadataStorage openSearchDataSourceMetadataStorage; + @Mock private ActionFuture searchResponseActionFuture; + @Mock private ActionFuture createIndexResponseActionFuture; + @Mock private ActionFuture indexResponseActionFuture; + @Mock private IndexResponse indexResponse; + @Mock private ActionFuture updateResponseActionFuture; + @Mock private UpdateResponse updateResponse; + @Mock private ActionFuture deleteResponseActionFuture; + @Mock private DeleteResponse deleteResponse; + @Mock private SearchHit searchHit; + @InjectMocks private OpenSearchDataSourceMetadataStorage openSearchDataSourceMetadataStorage; @SneakyThrows @Test @@ -91,28 +83,24 @@ public void testGetDataSourceMetadata() { Mockito.when(searchResponse.getHits()) .thenReturn( new SearchHits( - new SearchHit[] {searchHit}, - new TotalHits(21, TotalHits.Relation.EQUAL_TO), - 1.0F)); - Mockito.when(searchHit.getSourceAsString()) - .thenReturn(getBasicDataSourceMetadataString()); + new SearchHit[] {searchHit}, new TotalHits(21, TotalHits.Relation.EQUAL_TO), 1.0F)); + Mockito.when(searchHit.getSourceAsString()).thenReturn(getBasicDataSourceMetadataString()); Mockito.when(encryptor.decrypt("password")).thenReturn("password"); Mockito.when(encryptor.decrypt("username")).thenReturn("username"); - Optional dataSourceMetadataOptional - = openSearchDataSourceMetadataStorage.getDataSourceMetadata(TEST_DATASOURCE_INDEX_NAME); - + Optional dataSourceMetadataOptional = + openSearchDataSourceMetadataStorage.getDataSourceMetadata(TEST_DATASOURCE_INDEX_NAME); Assertions.assertFalse(dataSourceMetadataOptional.isEmpty()); DataSourceMetadata dataSourceMetadata = dataSourceMetadataOptional.get(); Assertions.assertEquals(TEST_DATASOURCE_INDEX_NAME, dataSourceMetadata.getName()); Assertions.assertEquals(DataSourceType.PROMETHEUS, dataSourceMetadata.getConnector()); - Assertions.assertEquals("password", - dataSourceMetadata.getProperties().get("prometheus.auth.password")); - Assertions.assertEquals("username", - dataSourceMetadata.getProperties().get("prometheus.auth.username")); - Assertions.assertEquals("basicauth", - dataSourceMetadata.getProperties().get("prometheus.auth.type")); + Assertions.assertEquals( + "password", dataSourceMetadata.getProperties().get("prometheus.auth.password")); + Assertions.assertEquals( + "username", dataSourceMetadata.getProperties().get("prometheus.auth.username")); + Assertions.assertEquals( + "basicauth", dataSourceMetadata.getProperties().get("prometheus.auth.type")); } @SneakyThrows @@ -124,9 +112,12 @@ public void testGetDataSourceMetadataWith404SearchResponse() { Mockito.when(searchResponseActionFuture.actionGet()).thenReturn(searchResponse); Mockito.when(searchResponse.status()).thenReturn(RestStatus.NOT_FOUND); - RuntimeException runtimeException = Assertions.assertThrows(RuntimeException.class, - () -> openSearchDataSourceMetadataStorage.getDataSourceMetadata( - TEST_DATASOURCE_INDEX_NAME)); + RuntimeException runtimeException = + Assertions.assertThrows( + RuntimeException.class, + () -> + openSearchDataSourceMetadataStorage.getDataSourceMetadata( + TEST_DATASOURCE_INDEX_NAME)); Assertions.assertEquals( "Fetching dataSource metadata information failed with status : NOT_FOUND", runtimeException.getMessage()); @@ -143,15 +134,13 @@ public void testGetDataSourceMetadataWithParsingFailed() { Mockito.when(searchResponse.getHits()) .thenReturn( new SearchHits( - new SearchHit[] {searchHit}, - new TotalHits(21, TotalHits.Relation.EQUAL_TO), - 1.0F)); - Mockito.when(searchHit.getSourceAsString()) - .thenReturn("..testDs"); + new SearchHit[] {searchHit}, new TotalHits(21, TotalHits.Relation.EQUAL_TO), 1.0F)); + Mockito.when(searchHit.getSourceAsString()).thenReturn("..testDs"); - Assertions.assertThrows(RuntimeException.class, - () -> openSearchDataSourceMetadataStorage.getDataSourceMetadata( - TEST_DATASOURCE_INDEX_NAME)); + Assertions.assertThrows( + RuntimeException.class, + () -> + openSearchDataSourceMetadataStorage.getDataSourceMetadata(TEST_DATASOURCE_INDEX_NAME)); } @SneakyThrows @@ -165,28 +154,24 @@ public void testGetDataSourceMetadataWithAWSSigV4() { Mockito.when(searchResponse.getHits()) .thenReturn( new SearchHits( - new SearchHit[] {searchHit}, - new TotalHits(21, TotalHits.Relation.EQUAL_TO), - 1.0F)); - Mockito.when(searchHit.getSourceAsString()) - .thenReturn(getAWSSigv4DataSourceMetadataString()); + new SearchHit[] {searchHit}, new TotalHits(21, TotalHits.Relation.EQUAL_TO), 1.0F)); + Mockito.when(searchHit.getSourceAsString()).thenReturn(getAWSSigv4DataSourceMetadataString()); Mockito.when(encryptor.decrypt("secret_key")).thenReturn("secret_key"); Mockito.when(encryptor.decrypt("access_key")).thenReturn("access_key"); - Optional dataSourceMetadataOptional - = openSearchDataSourceMetadataStorage.getDataSourceMetadata(TEST_DATASOURCE_INDEX_NAME); - + Optional dataSourceMetadataOptional = + openSearchDataSourceMetadataStorage.getDataSourceMetadata(TEST_DATASOURCE_INDEX_NAME); Assertions.assertFalse(dataSourceMetadataOptional.isEmpty()); DataSourceMetadata dataSourceMetadata = dataSourceMetadataOptional.get(); Assertions.assertEquals(TEST_DATASOURCE_INDEX_NAME, dataSourceMetadata.getName()); Assertions.assertEquals(DataSourceType.PROMETHEUS, dataSourceMetadata.getConnector()); - Assertions.assertEquals("secret_key", - dataSourceMetadata.getProperties().get("prometheus.auth.secret_key")); - Assertions.assertEquals("access_key", - dataSourceMetadata.getProperties().get("prometheus.auth.access_key")); - Assertions.assertEquals("awssigv4", - dataSourceMetadata.getProperties().get("prometheus.auth.type")); + Assertions.assertEquals( + "secret_key", dataSourceMetadata.getProperties().get("prometheus.auth.secret_key")); + Assertions.assertEquals( + "access_key", dataSourceMetadata.getProperties().get("prometheus.auth.access_key")); + Assertions.assertEquals( + "awssigv4", dataSourceMetadata.getProperties().get("prometheus.auth.type")); } @SneakyThrows @@ -200,31 +185,27 @@ public void testGetDataSourceMetadataWithBasicAuth() { Mockito.when(searchResponse.getHits()) .thenReturn( new SearchHits( - new SearchHit[] {searchHit}, - new TotalHits(21, TotalHits.Relation.EQUAL_TO), - 1.0F)); + new SearchHit[] {searchHit}, new TotalHits(21, TotalHits.Relation.EQUAL_TO), 1.0F)); Mockito.when(searchHit.getSourceAsString()) .thenReturn(getDataSourceMetadataStringWithBasicAuthentication()); Mockito.when(encryptor.decrypt("username")).thenReturn("username"); Mockito.when(encryptor.decrypt("password")).thenReturn("password"); - Optional dataSourceMetadataOptional - = openSearchDataSourceMetadataStorage.getDataSourceMetadata(TEST_DATASOURCE_INDEX_NAME); - + Optional dataSourceMetadataOptional = + openSearchDataSourceMetadataStorage.getDataSourceMetadata(TEST_DATASOURCE_INDEX_NAME); Assertions.assertFalse(dataSourceMetadataOptional.isEmpty()); DataSourceMetadata dataSourceMetadata = dataSourceMetadataOptional.get(); Assertions.assertEquals(TEST_DATASOURCE_INDEX_NAME, dataSourceMetadata.getName()); Assertions.assertEquals(DataSourceType.PROMETHEUS, dataSourceMetadata.getConnector()); - Assertions.assertEquals("username", - dataSourceMetadata.getProperties().get("prometheus.auth.username")); - Assertions.assertEquals("password", - dataSourceMetadata.getProperties().get("prometheus.auth.password")); - Assertions.assertEquals("basicauth", - dataSourceMetadata.getProperties().get("prometheus.auth.type")); + Assertions.assertEquals( + "username", dataSourceMetadata.getProperties().get("prometheus.auth.username")); + Assertions.assertEquals( + "password", dataSourceMetadata.getProperties().get("prometheus.auth.password")); + Assertions.assertEquals( + "basicauth", dataSourceMetadata.getProperties().get("prometheus.auth.type")); } - @SneakyThrows @Test public void testGetDataSourceMetadataList() { @@ -236,15 +217,12 @@ public void testGetDataSourceMetadataList() { Mockito.when(searchResponse.getHits()) .thenReturn( new SearchHits( - new SearchHit[] {searchHit}, - new TotalHits(21, TotalHits.Relation.EQUAL_TO), - 1.0F)); + new SearchHit[] {searchHit}, new TotalHits(21, TotalHits.Relation.EQUAL_TO), 1.0F)); Mockito.when(searchHit.getSourceAsString()) .thenReturn(getDataSourceMetadataStringWithNoAuthentication()); - List dataSourceMetadataList - = openSearchDataSourceMetadataStorage.getDataSourceMetadata(); - + List dataSourceMetadataList = + openSearchDataSourceMetadataStorage.getDataSourceMetadata(); Assertions.assertEquals(1, dataSourceMetadataList.size()); DataSourceMetadata dataSourceMetadata = dataSourceMetadataList.get(0); @@ -252,7 +230,6 @@ public void testGetDataSourceMetadataList() { Assertions.assertEquals(DataSourceType.PROMETHEUS, dataSourceMetadata.getConnector()); } - @SneakyThrows @Test public void testGetDataSourceMetadataListWithNoIndex() { @@ -264,8 +241,8 @@ public void testGetDataSourceMetadataListWithNoIndex() { .thenReturn(new CreateIndexResponse(true, true, DATASOURCE_INDEX_NAME)); Mockito.when(client.index(ArgumentMatchers.any())).thenReturn(indexResponseActionFuture); - List dataSourceMetadataList - = openSearchDataSourceMetadataStorage.getDataSourceMetadata(); + List dataSourceMetadataList = + openSearchDataSourceMetadataStorage.getDataSourceMetadata(); Assertions.assertEquals(0, dataSourceMetadataList.size()); } @@ -281,8 +258,8 @@ public void testGetDataSourceMetadataWithNoIndex() { .thenReturn(new CreateIndexResponse(true, true, DATASOURCE_INDEX_NAME)); Mockito.when(client.index(ArgumentMatchers.any())).thenReturn(indexResponseActionFuture); - Optional dataSourceMetadataOptional - = openSearchDataSourceMetadataStorage.getDataSourceMetadata(TEST_DATASOURCE_INDEX_NAME); + Optional dataSourceMetadataOptional = + openSearchDataSourceMetadataStorage.getDataSourceMetadata(TEST_DATASOURCE_INDEX_NAME); Assertions.assertFalse(dataSourceMetadataOptional.isPresent()); } @@ -310,8 +287,6 @@ public void testCreateDataSourceMetadata() { Mockito.verify(client.admin().indices(), Mockito.times(1)).create(ArgumentMatchers.any()); Mockito.verify(client, Mockito.times(1)).index(ArgumentMatchers.any()); Mockito.verify(client.threadPool().getThreadContext(), Mockito.times(2)).stashContext(); - - } @Test @@ -334,7 +309,6 @@ public void testCreateDataSourceMetadataWithOutCreatingIndex() { Mockito.verify(client.threadPool().getThreadContext(), Mockito.times(1)).stashContext(); } - @Test public void testCreateDataSourceMetadataFailedWithNotFoundResponse() { @@ -351,10 +325,14 @@ public void testCreateDataSourceMetadataFailedWithNotFoundResponse() { Mockito.when(indexResponse.getResult()).thenReturn(DocWriteResponse.Result.NOT_FOUND); DataSourceMetadata dataSourceMetadata = getDataSourceMetadata(); - RuntimeException runtimeException = Assertions.assertThrows(RuntimeException.class, - () -> this.openSearchDataSourceMetadataStorage.createDataSourceMetadata( - dataSourceMetadata)); - Assertions.assertEquals("Saving dataSource metadata information failed with result : not_found", + RuntimeException runtimeException = + Assertions.assertThrows( + RuntimeException.class, + () -> + this.openSearchDataSourceMetadataStorage.createDataSourceMetadata( + dataSourceMetadata)); + Assertions.assertEquals( + "Saving dataSource metadata information failed with result : not_found", runtimeException.getMessage()); Mockito.verify(encryptor, Mockito.times(1)).encrypt("secret_key"); @@ -362,8 +340,6 @@ public void testCreateDataSourceMetadataFailedWithNotFoundResponse() { Mockito.verify(client.admin().indices(), Mockito.times(1)).create(ArgumentMatchers.any()); Mockito.verify(client, Mockito.times(1)).index(ArgumentMatchers.any()); Mockito.verify(client.threadPool().getThreadContext(), Mockito.times(2)).stashContext(); - - } @Test @@ -381,20 +357,19 @@ public void testCreateDataSourceMetadataWithVersionConflict() { .thenThrow(VersionConflictEngineException.class); DataSourceMetadata dataSourceMetadata = getDataSourceMetadata(); IllegalArgumentException illegalArgumentException = - Assertions.assertThrows(IllegalArgumentException.class, - () -> this.openSearchDataSourceMetadataStorage.createDataSourceMetadata( - dataSourceMetadata)); - Assertions.assertEquals("A datasource already exists with name: testDS", - illegalArgumentException.getMessage()); - + Assertions.assertThrows( + IllegalArgumentException.class, + () -> + this.openSearchDataSourceMetadataStorage.createDataSourceMetadata( + dataSourceMetadata)); + Assertions.assertEquals( + "A datasource already exists with name: testDS", illegalArgumentException.getMessage()); Mockito.verify(encryptor, Mockito.times(1)).encrypt("secret_key"); Mockito.verify(encryptor, Mockito.times(1)).encrypt("access_key"); Mockito.verify(client.admin().indices(), Mockito.times(1)).create(ArgumentMatchers.any()); Mockito.verify(client, Mockito.times(1)).index(ArgumentMatchers.any()); Mockito.verify(client.threadPool().getThreadContext(), Mockito.times(2)).stashContext(); - - } @Test @@ -412,19 +387,20 @@ public void testCreateDataSourceMetadataWithException() { .thenThrow(new RuntimeException("error while indexing")); DataSourceMetadata dataSourceMetadata = getDataSourceMetadata(); - RuntimeException runtimeException = Assertions.assertThrows(RuntimeException.class, - () -> this.openSearchDataSourceMetadataStorage.createDataSourceMetadata( - dataSourceMetadata)); - Assertions.assertEquals("java.lang.RuntimeException: error while indexing", - runtimeException.getMessage()); + RuntimeException runtimeException = + Assertions.assertThrows( + RuntimeException.class, + () -> + this.openSearchDataSourceMetadataStorage.createDataSourceMetadata( + dataSourceMetadata)); + Assertions.assertEquals( + "java.lang.RuntimeException: error while indexing", runtimeException.getMessage()); Mockito.verify(encryptor, Mockito.times(1)).encrypt("secret_key"); Mockito.verify(encryptor, Mockito.times(1)).encrypt("access_key"); Mockito.verify(client.admin().indices(), Mockito.times(1)).create(ArgumentMatchers.any()); Mockito.verify(client, Mockito.times(1)).index(ArgumentMatchers.any()); Mockito.verify(client.threadPool().getThreadContext(), Mockito.times(2)).stashContext(); - - } @Test @@ -440,9 +416,12 @@ public void testCreateDataSourceMetadataWithIndexCreationFailed() { .thenReturn(new CreateIndexResponse(false, false, DATASOURCE_INDEX_NAME)); DataSourceMetadata dataSourceMetadata = getDataSourceMetadata(); - RuntimeException runtimeException = Assertions.assertThrows(RuntimeException.class, - () -> this.openSearchDataSourceMetadataStorage.createDataSourceMetadata( - dataSourceMetadata)); + RuntimeException runtimeException = + Assertions.assertThrows( + RuntimeException.class, + () -> + this.openSearchDataSourceMetadataStorage.createDataSourceMetadata( + dataSourceMetadata)); Assertions.assertEquals( "Internal server error while creating.ql-datasources index:: " + "Index creation is not acknowledged.", @@ -470,7 +449,6 @@ public void testUpdateDataSourceMetadata() { Mockito.verify(client.admin().indices(), Mockito.times(0)).create(ArgumentMatchers.any()); Mockito.verify(client, Mockito.times(1)).update(ArgumentMatchers.any()); Mockito.verify(client.threadPool().getThreadContext(), Mockito.times(1)).stashContext(); - } @Test @@ -482,10 +460,14 @@ public void testUpdateDataSourceMetadataWithNotFoundResult() { Mockito.when(updateResponse.getResult()).thenReturn(DocWriteResponse.Result.NOT_FOUND); DataSourceMetadata dataSourceMetadata = getDataSourceMetadata(); - RuntimeException runtimeException = Assertions.assertThrows(RuntimeException.class, - () -> this.openSearchDataSourceMetadataStorage.updateDataSourceMetadata( - dataSourceMetadata)); - Assertions.assertEquals("Saving dataSource metadata information failed with result : not_found", + RuntimeException runtimeException = + Assertions.assertThrows( + RuntimeException.class, + () -> + this.openSearchDataSourceMetadataStorage.updateDataSourceMetadata( + dataSourceMetadata)); + Assertions.assertEquals( + "Saving dataSource metadata information failed with result : not_found", runtimeException.getMessage()); Mockito.verify(encryptor, Mockito.times(1)).encrypt("secret_key"); @@ -493,32 +475,31 @@ public void testUpdateDataSourceMetadataWithNotFoundResult() { Mockito.verify(client.admin().indices(), Mockito.times(0)).create(ArgumentMatchers.any()); Mockito.verify(client, Mockito.times(1)).update(ArgumentMatchers.any()); Mockito.verify(client.threadPool().getThreadContext(), Mockito.times(1)).stashContext(); - } @Test public void testUpdateDataSourceMetadataWithDocumentMissingException() { Mockito.when(encryptor.encrypt("secret_key")).thenReturn("secret_key"); Mockito.when(encryptor.encrypt("access_key")).thenReturn("access_key"); - Mockito.when(client.update(ArgumentMatchers.any())).thenThrow(new DocumentMissingException( - ShardId.fromString("[2][2]"), "testDS")); + Mockito.when(client.update(ArgumentMatchers.any())) + .thenThrow(new DocumentMissingException(ShardId.fromString("[2][2]"), "testDS")); DataSourceMetadata dataSourceMetadata = getDataSourceMetadata(); dataSourceMetadata.setName("testDS"); - DataSourceNotFoundException dataSourceNotFoundException = - Assertions.assertThrows(DataSourceNotFoundException.class, - () -> this.openSearchDataSourceMetadataStorage.updateDataSourceMetadata( - dataSourceMetadata)); - Assertions.assertEquals("Datasource with name: testDS doesn't exist", - dataSourceNotFoundException.getMessage()); + Assertions.assertThrows( + DataSourceNotFoundException.class, + () -> + this.openSearchDataSourceMetadataStorage.updateDataSourceMetadata( + dataSourceMetadata)); + Assertions.assertEquals( + "Datasource with name: testDS doesn't exist", dataSourceNotFoundException.getMessage()); Mockito.verify(encryptor, Mockito.times(1)).encrypt("secret_key"); Mockito.verify(encryptor, Mockito.times(1)).encrypt("access_key"); Mockito.verify(client.admin().indices(), Mockito.times(0)).create(ArgumentMatchers.any()); Mockito.verify(client, Mockito.times(1)).update(ArgumentMatchers.any()); Mockito.verify(client.threadPool().getThreadContext(), Mockito.times(1)).stashContext(); - } @Test @@ -530,19 +511,20 @@ public void testUpdateDataSourceMetadataWithRuntimeException() { DataSourceMetadata dataSourceMetadata = getDataSourceMetadata(); dataSourceMetadata.setName("testDS"); - - RuntimeException runtimeException = Assertions.assertThrows(RuntimeException.class, - () -> this.openSearchDataSourceMetadataStorage.updateDataSourceMetadata( - dataSourceMetadata)); - Assertions.assertEquals("java.lang.RuntimeException: error message", - runtimeException.getMessage()); + RuntimeException runtimeException = + Assertions.assertThrows( + RuntimeException.class, + () -> + this.openSearchDataSourceMetadataStorage.updateDataSourceMetadata( + dataSourceMetadata)); + Assertions.assertEquals( + "java.lang.RuntimeException: error message", runtimeException.getMessage()); Mockito.verify(encryptor, Mockito.times(1)).encrypt("secret_key"); Mockito.verify(encryptor, Mockito.times(1)).encrypt("access_key"); Mockito.verify(client.admin().indices(), Mockito.times(0)).create(ArgumentMatchers.any()); Mockito.verify(client, Mockito.times(1)).update(ArgumentMatchers.any()); Mockito.verify(client.threadPool().getThreadContext(), Mockito.times(1)).stashContext(); - } @Test @@ -566,11 +548,11 @@ public void testDeleteDataSourceMetadataWhichisAlreadyDeleted() { Mockito.when(deleteResponse.getResult()).thenReturn(DocWriteResponse.Result.NOT_FOUND); DataSourceNotFoundException dataSourceNotFoundException = - Assertions.assertThrows(DataSourceNotFoundException.class, + Assertions.assertThrows( + DataSourceNotFoundException.class, () -> this.openSearchDataSourceMetadataStorage.deleteDataSourceMetadata("testDS")); - Assertions.assertEquals("Datasource with name: testDS doesn't exist", - dataSourceNotFoundException.getMessage()); - + Assertions.assertEquals( + "Datasource with name: testDS doesn't exist", dataSourceNotFoundException.getMessage()); Mockito.verifyNoInteractions(encryptor); Mockito.verify(client.admin().indices(), Mockito.times(0)).create(ArgumentMatchers.any()); @@ -584,9 +566,12 @@ public void testDeleteDataSourceMetadataWithUnexpectedResult() { Mockito.when(deleteResponseActionFuture.actionGet()).thenReturn(deleteResponse); Mockito.when(deleteResponse.getResult()).thenReturn(DocWriteResponse.Result.NOOP); - RuntimeException runtimeException = Assertions.assertThrows(RuntimeException.class, - () -> this.openSearchDataSourceMetadataStorage.deleteDataSourceMetadata("testDS")); - Assertions.assertEquals("Deleting dataSource metadata information failed with result : noop", + RuntimeException runtimeException = + Assertions.assertThrows( + RuntimeException.class, + () -> this.openSearchDataSourceMetadataStorage.deleteDataSourceMetadata("testDS")); + Assertions.assertEquals( + "Deleting dataSource metadata information failed with result : noop", runtimeException.getMessage()); Mockito.verifyNoInteractions(encryptor); @@ -666,5 +651,4 @@ private DataSourceMetadata getDataSourceMetadata() { dataSourceMetadata.setProperties(properties); return dataSourceMetadata; } - } diff --git a/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportCreateDataSourceActionTest.java b/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportCreateDataSourceActionTest.java index 2ece0eb5cd..a2482602ff 100644 --- a/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportCreateDataSourceActionTest.java +++ b/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportCreateDataSourceActionTest.java @@ -27,27 +27,23 @@ @ExtendWith(MockitoExtension.class) public class TransportCreateDataSourceActionTest { - @Mock - private TransportService transportService; - @Mock - private TransportCreateDataSourceAction action; - @Mock - private DataSourceServiceImpl dataSourceService; - @Mock - private Task task; - @Mock - private ActionListener actionListener; + @Mock private TransportService transportService; + @Mock private TransportCreateDataSourceAction action; + @Mock private DataSourceServiceImpl dataSourceService; + @Mock private Task task; + @Mock private ActionListener actionListener; + @Captor private ArgumentCaptor createDataSourceActionResponseArgumentCaptor; - @Captor - private ArgumentCaptor exceptionArgumentCaptor; + @Captor private ArgumentCaptor exceptionArgumentCaptor; @BeforeEach public void setUp() { - action = new TransportCreateDataSourceAction(transportService, - new ActionFilters(new HashSet<>()), dataSourceService); + action = + new TransportCreateDataSourceAction( + transportService, new ActionFilters(new HashSet<>()), dataSourceService); } @Test @@ -61,10 +57,10 @@ public void testDoExecute() { verify(dataSourceService, times(1)).createDataSource(dataSourceMetadata); Mockito.verify(actionListener) .onResponse(createDataSourceActionResponseArgumentCaptor.capture()); - CreateDataSourceActionResponse createDataSourceActionResponse - = createDataSourceActionResponseArgumentCaptor.getValue(); - Assertions.assertEquals("Created DataSource with name test_datasource", - createDataSourceActionResponse.getResult()); + CreateDataSourceActionResponse createDataSourceActionResponse = + createDataSourceActionResponseArgumentCaptor.getValue(); + Assertions.assertEquals( + "Created DataSource with name test_datasource", createDataSourceActionResponse.getResult()); } @Test @@ -72,7 +68,8 @@ public void testDoExecuteWithException() { DataSourceMetadata dataSourceMetadata = new DataSourceMetadata(); dataSourceMetadata.setName("test_datasource"); dataSourceMetadata.setConnector(DataSourceType.PROMETHEUS); - doThrow(new RuntimeException("Error")).when(dataSourceService) + doThrow(new RuntimeException("Error")) + .when(dataSourceService) .createDataSource(dataSourceMetadata); CreateDataSourceActionRequest request = new CreateDataSourceActionRequest(dataSourceMetadata); action.doExecute(task, request, actionListener); @@ -80,7 +77,6 @@ public void testDoExecuteWithException() { Mockito.verify(actionListener).onFailure(exceptionArgumentCaptor.capture()); Exception exception = exceptionArgumentCaptor.getValue(); Assertions.assertTrue(exception instanceof RuntimeException); - Assertions.assertEquals("Error", - exception.getMessage()); + Assertions.assertEquals("Error", exception.getMessage()); } } diff --git a/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportDeleteDataSourceActionTest.java b/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportDeleteDataSourceActionTest.java index 61b197a805..d836eecabe 100644 --- a/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportDeleteDataSourceActionTest.java +++ b/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportDeleteDataSourceActionTest.java @@ -25,28 +25,23 @@ @ExtendWith(MockitoExtension.class) public class TransportDeleteDataSourceActionTest { - @Mock - private TransportService transportService; - @Mock - private TransportDeleteDataSourceAction action; - @Mock - private DataSourceServiceImpl dataSourceService; - @Mock - private Task task; - @Mock - private ActionListener actionListener; + @Mock private TransportService transportService; + @Mock private TransportDeleteDataSourceAction action; + @Mock private DataSourceServiceImpl dataSourceService; + @Mock private Task task; + @Mock private ActionListener actionListener; @Captor private ArgumentCaptor deleteDataSourceActionResponseArgumentCaptor; - @Captor - private ArgumentCaptor exceptionArgumentCaptor; + @Captor private ArgumentCaptor exceptionArgumentCaptor; @BeforeEach public void setUp() { - action = new TransportDeleteDataSourceAction(transportService, - new ActionFilters(new HashSet<>()), dataSourceService); + action = + new TransportDeleteDataSourceAction( + transportService, new ActionFilters(new HashSet<>()), dataSourceService); } @Test @@ -57,10 +52,10 @@ public void testDoExecute() { verify(dataSourceService, times(1)).deleteDataSource("test_datasource"); Mockito.verify(actionListener) .onResponse(deleteDataSourceActionResponseArgumentCaptor.capture()); - DeleteDataSourceActionResponse deleteDataSourceActionResponse - = deleteDataSourceActionResponseArgumentCaptor.getValue(); - Assertions.assertEquals("Deleted DataSource with name test_datasource", - deleteDataSourceActionResponse.getResult()); + DeleteDataSourceActionResponse deleteDataSourceActionResponse = + deleteDataSourceActionResponseArgumentCaptor.getValue(); + Assertions.assertEquals( + "Deleted DataSource with name test_datasource", deleteDataSourceActionResponse.getResult()); } @Test @@ -72,7 +67,6 @@ public void testDoExecuteWithException() { Mockito.verify(actionListener).onFailure(exceptionArgumentCaptor.capture()); Exception exception = exceptionArgumentCaptor.getValue(); Assertions.assertTrue(exception instanceof RuntimeException); - Assertions.assertEquals("Error", - exception.getMessage()); + Assertions.assertEquals("Error", exception.getMessage()); } } diff --git a/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportGetDataSourceActionTest.java b/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportGetDataSourceActionTest.java index 0546df643d..b6899421ef 100644 --- a/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportGetDataSourceActionTest.java +++ b/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportGetDataSourceActionTest.java @@ -34,27 +34,22 @@ @ExtendWith(MockitoExtension.class) public class TransportGetDataSourceActionTest { - @Mock - private TransportService transportService; - @Mock - private TransportGetDataSourceAction action; - @Mock - private DataSourceServiceImpl dataSourceService; - @Mock - private Task task; - @Mock - private ActionListener actionListener; + @Mock private TransportService transportService; + @Mock private TransportGetDataSourceAction action; + @Mock private DataSourceServiceImpl dataSourceService; + @Mock private Task task; + @Mock private ActionListener actionListener; @Captor private ArgumentCaptor getDataSourceActionResponseArgumentCaptor; - @Captor - private ArgumentCaptor exceptionArgumentCaptor; + @Captor private ArgumentCaptor exceptionArgumentCaptor; @BeforeEach public void setUp() { - action = new TransportGetDataSourceAction(transportService, - new ActionFilters(new HashSet<>()), dataSourceService); + action = + new TransportGetDataSourceAction( + transportService, new ActionFilters(new HashSet<>()), dataSourceService); } @Test @@ -63,23 +58,22 @@ public void testDoExecute() { dataSourceMetadata.setName("test_datasource"); dataSourceMetadata.setConnector(DataSourceType.PROMETHEUS); GetDataSourceActionRequest request = new GetDataSourceActionRequest("test_datasource"); - when(dataSourceService.getDataSourceMetadata("test_datasource")) - .thenReturn(dataSourceMetadata); + when(dataSourceService.getDataSourceMetadata("test_datasource")).thenReturn(dataSourceMetadata); action.doExecute(task, request, actionListener); verify(dataSourceService, times(1)).getDataSourceMetadata("test_datasource"); Mockito.verify(actionListener).onResponse(getDataSourceActionResponseArgumentCaptor.capture()); - GetDataSourceActionResponse getDataSourceActionResponse - = getDataSourceActionResponseArgumentCaptor.getValue(); + GetDataSourceActionResponse getDataSourceActionResponse = + getDataSourceActionResponseArgumentCaptor.getValue(); JsonResponseFormatter dataSourceMetadataJsonResponseFormatter = - new JsonResponseFormatter<>( - JsonResponseFormatter.Style.PRETTY) { + new JsonResponseFormatter<>(JsonResponseFormatter.Style.PRETTY) { @Override protected Object buildJsonObject(DataSourceMetadata response) { return response; } }; - Assertions.assertEquals(dataSourceMetadataJsonResponseFormatter.format(dataSourceMetadata), + Assertions.assertEquals( + dataSourceMetadataJsonResponseFormatter.format(dataSourceMetadata), getDataSourceActionResponse.getResult()); DataSourceMetadata result = new Gson().fromJson(getDataSourceActionResponse.getResult(), DataSourceMetadata.class); @@ -100,18 +94,16 @@ public void testDoExecuteForGetAllDataSources() { action.doExecute(task, request, actionListener); verify(dataSourceService, times(1)).getDataSourceMetadata(false); Mockito.verify(actionListener).onResponse(getDataSourceActionResponseArgumentCaptor.capture()); - GetDataSourceActionResponse getDataSourceActionResponse - = getDataSourceActionResponseArgumentCaptor.getValue(); + GetDataSourceActionResponse getDataSourceActionResponse = + getDataSourceActionResponseArgumentCaptor.getValue(); JsonResponseFormatter> dataSourceMetadataJsonResponseFormatter = - new JsonResponseFormatter<>( - JsonResponseFormatter.Style.PRETTY) { + new JsonResponseFormatter<>(JsonResponseFormatter.Style.PRETTY) { @Override protected Object buildJsonObject(Set response) { return response; } }; - Type setType = new TypeToken>() { - }.getType(); + Type setType = new TypeToken>() {}.getType(); Assertions.assertEquals( dataSourceMetadataJsonResponseFormatter.format(Collections.singleton(dataSourceMetadata)), getDataSourceActionResponse.getResult()); @@ -131,7 +123,6 @@ public void testDoExecuteWithException() { Mockito.verify(actionListener).onFailure(exceptionArgumentCaptor.capture()); Exception exception = exceptionArgumentCaptor.getValue(); Assertions.assertTrue(exception instanceof RuntimeException); - Assertions.assertEquals("Error", - exception.getMessage()); + Assertions.assertEquals("Error", exception.getMessage()); } } diff --git a/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportUpdateDataSourceActionTest.java b/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportUpdateDataSourceActionTest.java index 2b9305c459..6e141918ca 100644 --- a/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportUpdateDataSourceActionTest.java +++ b/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportUpdateDataSourceActionTest.java @@ -27,28 +27,23 @@ @ExtendWith(MockitoExtension.class) public class TransportUpdateDataSourceActionTest { - @Mock - private TransportService transportService; - @Mock - private TransportUpdateDataSourceAction action; - @Mock - private DataSourceServiceImpl dataSourceService; - @Mock - private Task task; - @Mock - private ActionListener actionListener; + @Mock private TransportService transportService; + @Mock private TransportUpdateDataSourceAction action; + @Mock private DataSourceServiceImpl dataSourceService; + @Mock private Task task; + @Mock private ActionListener actionListener; @Captor private ArgumentCaptor updateDataSourceActionResponseArgumentCaptor; - @Captor - private ArgumentCaptor exceptionArgumentCaptor; + @Captor private ArgumentCaptor exceptionArgumentCaptor; @BeforeEach public void setUp() { - action = new TransportUpdateDataSourceAction(transportService, - new ActionFilters(new HashSet<>()), dataSourceService); + action = + new TransportUpdateDataSourceAction( + transportService, new ActionFilters(new HashSet<>()), dataSourceService); } @Test @@ -62,10 +57,10 @@ public void testDoExecute() { verify(dataSourceService, times(1)).updateDataSource(dataSourceMetadata); Mockito.verify(actionListener) .onResponse(updateDataSourceActionResponseArgumentCaptor.capture()); - UpdateDataSourceActionResponse updateDataSourceActionResponse - = updateDataSourceActionResponseArgumentCaptor.getValue(); - Assertions.assertEquals("Updated DataSource with name test_datasource", - updateDataSourceActionResponse.getResult()); + UpdateDataSourceActionResponse updateDataSourceActionResponse = + updateDataSourceActionResponseArgumentCaptor.getValue(); + Assertions.assertEquals( + "Updated DataSource with name test_datasource", updateDataSourceActionResponse.getResult()); } @Test @@ -73,7 +68,8 @@ public void testDoExecuteWithException() { DataSourceMetadata dataSourceMetadata = new DataSourceMetadata(); dataSourceMetadata.setName("test_datasource"); dataSourceMetadata.setConnector(DataSourceType.PROMETHEUS); - doThrow(new RuntimeException("Error")).when(dataSourceService) + doThrow(new RuntimeException("Error")) + .when(dataSourceService) .updateDataSource(dataSourceMetadata); UpdateDataSourceActionRequest request = new UpdateDataSourceActionRequest(dataSourceMetadata); action.doExecute(task, request, actionListener); @@ -81,7 +77,6 @@ public void testDoExecuteWithException() { Mockito.verify(actionListener).onFailure(exceptionArgumentCaptor.capture()); Exception exception = exceptionArgumentCaptor.getValue(); Assertions.assertTrue(exception instanceof RuntimeException); - Assertions.assertEquals("Error", - exception.getMessage()); + Assertions.assertEquals("Error", exception.getMessage()); } } diff --git a/datasources/src/test/java/org/opensearch/sql/datasources/utils/SchedulerTest.java b/datasources/src/test/java/org/opensearch/sql/datasources/utils/SchedulerTest.java index e3dac306cd..ff23cdcabb 100644 --- a/datasources/src/test/java/org/opensearch/sql/datasources/utils/SchedulerTest.java +++ b/datasources/src/test/java/org/opensearch/sql/datasources/utils/SchedulerTest.java @@ -19,27 +19,24 @@ @ExtendWith(MockitoExtension.class) public class SchedulerTest { - @Mock - private NodeClient nodeClient; + @Mock private NodeClient nodeClient; - @Mock - private ThreadPool threadPool; + @Mock private ThreadPool threadPool; @Test public void testSchedule() { Mockito.when(nodeClient.threadPool()).thenReturn(threadPool); Mockito.doAnswer( - invocation -> { - Runnable task = invocation.getArgument(0); - task.run(); - return null; - }) + invocation -> { + Runnable task = invocation.getArgument(0); + task.run(); + return null; + }) .when(threadPool) .schedule(ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any()); AtomicBoolean isRun = new AtomicBoolean(false); Scheduler.schedule(nodeClient, () -> isRun.set(true)); Assert.assertTrue(isRun.get()); } - } diff --git a/datasources/src/test/java/org/opensearch/sql/datasources/utils/XContentParserUtilsTest.java b/datasources/src/test/java/org/opensearch/sql/datasources/utils/XContentParserUtilsTest.java index f47d0503e7..c0c05c0282 100644 --- a/datasources/src/test/java/org/opensearch/sql/datasources/utils/XContentParserUtilsTest.java +++ b/datasources/src/test/java/org/opensearch/sql/datasources/utils/XContentParserUtilsTest.java @@ -30,7 +30,8 @@ public void testConvertToXContent() { XContentBuilder contentBuilder = XContentParserUtils.convertToXContent(dataSourceMetadata); String contentString = BytesReference.bytes(contentBuilder).utf8ToString(); - Assertions.assertEquals("{\"name\":\"testDS\",\"connector\":\"PROMETHEUS\",\"allowedRoles\":[\"prometheus_access\"],\"properties\":{\"prometheus.uri\":\"https://localhost:9090\"}}", + Assertions.assertEquals( + "{\"name\":\"testDS\",\"connector\":\"PROMETHEUS\",\"allowedRoles\":[\"prometheus_access\"],\"properties\":{\"prometheus.uri\":\"https://localhost:9090\"}}", contentString); } @@ -49,7 +50,6 @@ public void testToDataSourceMetadataFromJson() { Assertions.assertEquals(retrievedMetadata, dataSourceMetadata); Assertions.assertEquals("prometheus_access", retrievedMetadata.getAllowedRoles().get(0)); - } @SneakyThrows @@ -62,9 +62,12 @@ public void testToDataSourceMetadataFromJsonWithoutName() { Gson gson = new Gson(); String json = gson.toJson(dataSourceMetadata); - IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, () -> { - XContentParserUtils.toDataSourceMetadata(json); - }); + IllegalArgumentException exception = + assertThrows( + IllegalArgumentException.class, + () -> { + XContentParserUtils.toDataSourceMetadata(json); + }); Assertions.assertEquals("name and connector are required fields.", exception.getMessage()); } @@ -78,9 +81,12 @@ public void testToDataSourceMetadataFromJsonWithoutConnector() { Gson gson = new Gson(); String json = gson.toJson(dataSourceMetadata); - IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, () -> { - XContentParserUtils.toDataSourceMetadata(json); - }); + IllegalArgumentException exception = + assertThrows( + IllegalArgumentException.class, + () -> { + XContentParserUtils.toDataSourceMetadata(json); + }); Assertions.assertEquals("name and connector are required fields.", exception.getMessage()); } @@ -92,10 +98,12 @@ public void testToDataSourceMetadataFromJsonUsingUnknownObject() { Gson gson = new Gson(); String json = gson.toJson(hashMap); - IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, () -> { - XContentParserUtils.toDataSourceMetadata(json); - }); + IllegalArgumentException exception = + assertThrows( + IllegalArgumentException.class, + () -> { + XContentParserUtils.toDataSourceMetadata(json); + }); Assertions.assertEquals("Unknown field: test", exception.getMessage()); } - } From a3d2fae47f091298cbd37e30ce149b26ebd74aa6 Mon Sep 17 00:00:00 2001 From: Shenoy Pratik Date: Thu, 10 Aug 2023 13:49:23 -0700 Subject: [PATCH 15/42] Update backport CI, add PR merged condition (#1952) Signed-off-by: Shenoy Pratik --- .github/workflows/backport.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml index 9537ddebda..6472a968d8 100644 --- a/.github/workflows/backport.yml +++ b/.github/workflows/backport.yml @@ -7,6 +7,7 @@ on: jobs: backport: + if: github.event.pull_request.merged == true runs-on: ubuntu-latest permissions: contents: write @@ -26,5 +27,4 @@ jobs: with: github_token: ${{ steps.github_app_token.outputs.token }} head_template: backport/backport-<%= number %>-to-<%= base %> - labels_template: "<%= JSON.stringify([...labels, 'autocut']) %>" - failure_labels: "failed backport" + failure_labels: backport-failed From 245c4f86bbc5b301051f0dcfc239458752267786 Mon Sep 17 00:00:00 2001 From: Mitchell Gale Date: Mon, 14 Aug 2023 07:50:29 -0700 Subject: [PATCH 16/42] Breaking change OpenSearch main project - Action movement (#1958) * fixing breaking change before ML fix. Signed-off-by: Mitchell Gale * More fixes for core breaking change. Signed-off-by: Mitchell Gale * spotless apply. Signed-off-by: Mitchell Gale * Remove d L at end of actionGet int Signed-off-by: Mitchell Gale * Adding missed import updates for ElasticDefaultRestExecutor and OpenSearchDataSourceMetadataStorageTest.java Signed-off-by: Mitchell Gale * Adding missed import updates for SparkResponse.java Signed-off-by: Mitchell Gale * Adding missed import updates for BindingTupleQueryPlannerExecuteTest CheckScriptContents QueryPlannerTest.java SparkResponseTest.java Signed-off-by: Mitchell Gale --------- Signed-off-by: Mitchell Gale --- .../model/transport/CreateDataSourceActionResponse.java | 2 +- .../model/transport/DeleteDataSourceActionResponse.java | 2 +- .../model/transport/GetDataSourceActionResponse.java | 2 +- .../model/transport/UpdateDataSourceActionResponse.java | 2 +- .../sql/datasources/rest/RestDataSourceQueryAction.java | 2 +- .../storage/OpenSearchDataSourceMetadataStorage.java | 2 +- .../datasources/transport/TransportCreateDataSourceAction.java | 2 +- .../datasources/transport/TransportDeleteDataSourceAction.java | 2 +- .../sql/datasources/transport/TransportGetDataSourceAction.java | 2 +- .../datasources/transport/TransportUpdateDataSourceAction.java | 2 +- .../storage/OpenSearchDataSourceMetadataStorageTest.java | 2 +- .../transport/TransportCreateDataSourceActionTest.java | 2 +- .../transport/TransportDeleteDataSourceActionTest.java | 2 +- .../datasources/transport/TransportGetDataSourceActionTest.java | 2 +- .../transport/TransportUpdateDataSourceActionTest.java | 2 +- .../sql/legacy/executor/ElasticDefaultRestExecutor.java | 2 +- .../sql/legacy/executor/QueryActionElasticExecutor.java | 2 +- .../sql/legacy/executor/adapter/QueryPlanRequestBuilder.java | 2 +- .../sql/legacy/query/SqlElasticDeleteByQueryRequestBuilder.java | 2 +- .../opensearch/sql/legacy/query/SqlElasticRequestBuilder.java | 2 +- .../sql/legacy/query/SqlOpenSearchRequestBuilder.java | 2 +- .../opensearch/sql/legacy/query/join/JoinRequestBuilder.java | 2 +- .../sql/legacy/query/multi/MultiQueryRequestBuilder.java | 2 +- .../query/planner/physical/node/scroll/PhysicalScroll.java | 2 +- .../opensearch/sql/legacy/unittest/OpenSearchClientTest.java | 2 +- .../unittest/planner/BindingTupleQueryPlannerExecuteTest.java | 2 +- .../sql/legacy/unittest/planner/QueryPlannerTest.java | 2 +- .../org/opensearch/sql/legacy/util/CheckScriptContents.java | 2 +- plugin/src/main/java/org/opensearch/sql/plugin/SQLPlugin.java | 2 +- .../java/org/opensearch/sql/plugin/rest/RestPPLQueryAction.java | 2 +- .../sql/plugin/transport/TransportPPLQueryAction.java | 2 +- .../sql/plugin/transport/TransportPPLQueryResponse.java | 2 +- .../java/org/opensearch/sql/spark/response/SparkResponse.java | 2 +- .../org/opensearch/sql/spark/response/SparkResponseTest.java | 2 +- 34 files changed, 34 insertions(+), 34 deletions(-) diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/CreateDataSourceActionResponse.java b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/CreateDataSourceActionResponse.java index a3cf6001ab..aeb1e2d3d9 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/CreateDataSourceActionResponse.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/CreateDataSourceActionResponse.java @@ -10,7 +10,7 @@ import java.io.IOException; import lombok.Getter; import lombok.RequiredArgsConstructor; -import org.opensearch.action.ActionResponse; +import org.opensearch.core.action.ActionResponse; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/DeleteDataSourceActionResponse.java b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/DeleteDataSourceActionResponse.java index 0f0c2e0fc7..d8c29c2a67 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/DeleteDataSourceActionResponse.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/DeleteDataSourceActionResponse.java @@ -10,7 +10,7 @@ import java.io.IOException; import lombok.Getter; import lombok.RequiredArgsConstructor; -import org.opensearch.action.ActionResponse; +import org.opensearch.core.action.ActionResponse; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/GetDataSourceActionResponse.java b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/GetDataSourceActionResponse.java index 2712f515c0..ac8d5d4c62 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/GetDataSourceActionResponse.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/GetDataSourceActionResponse.java @@ -10,7 +10,7 @@ import java.io.IOException; import lombok.Getter; import lombok.RequiredArgsConstructor; -import org.opensearch.action.ActionResponse; +import org.opensearch.core.action.ActionResponse; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/UpdateDataSourceActionResponse.java b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/UpdateDataSourceActionResponse.java index c5c2eb58af..0be992d067 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/UpdateDataSourceActionResponse.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/UpdateDataSourceActionResponse.java @@ -10,7 +10,7 @@ import java.io.IOException; import lombok.Getter; import lombok.RequiredArgsConstructor; -import org.opensearch.action.ActionResponse; +import org.opensearch.core.action.ActionResponse; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/rest/RestDataSourceQueryAction.java b/datasources/src/main/java/org/opensearch/sql/datasources/rest/RestDataSourceQueryAction.java index e93245ed7b..b5929d0f20 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/rest/RestDataSourceQueryAction.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/rest/RestDataSourceQueryAction.java @@ -22,8 +22,8 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.opensearch.OpenSearchException; -import org.opensearch.action.ActionListener; import org.opensearch.client.node.NodeClient; +import org.opensearch.core.action.ActionListener; import org.opensearch.core.rest.RestStatus; import org.opensearch.rest.BaseRestHandler; import org.opensearch.rest.BytesRestResponse; diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/storage/OpenSearchDataSourceMetadataStorage.java b/datasources/src/main/java/org/opensearch/sql/datasources/storage/OpenSearchDataSourceMetadataStorage.java index fef684bf1d..73eb297fea 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/storage/OpenSearchDataSourceMetadataStorage.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/storage/OpenSearchDataSourceMetadataStorage.java @@ -18,7 +18,6 @@ import org.apache.commons.io.IOUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.opensearch.action.ActionFuture; import org.opensearch.action.DocWriteRequest; import org.opensearch.action.DocWriteResponse; import org.opensearch.action.admin.indices.create.CreateIndexRequest; @@ -34,6 +33,7 @@ import org.opensearch.action.update.UpdateResponse; import org.opensearch.client.Client; import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.action.ActionFuture; import org.opensearch.common.util.concurrent.ThreadContext; import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.engine.DocumentMissingException; diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportCreateDataSourceAction.java b/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportCreateDataSourceAction.java index e49ff7d796..54ca92b695 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportCreateDataSourceAction.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportCreateDataSourceAction.java @@ -7,11 +7,11 @@ package org.opensearch.sql.datasources.transport; -import org.opensearch.action.ActionListener; import org.opensearch.action.ActionType; import org.opensearch.action.support.ActionFilters; import org.opensearch.action.support.HandledTransportAction; import org.opensearch.common.inject.Inject; +import org.opensearch.core.action.ActionListener; import org.opensearch.sql.datasource.DataSourceService; import org.opensearch.sql.datasource.model.DataSourceMetadata; import org.opensearch.sql.datasources.model.transport.CreateDataSourceActionRequest; diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportDeleteDataSourceAction.java b/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportDeleteDataSourceAction.java index 7b814b516a..5578d40651 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportDeleteDataSourceAction.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportDeleteDataSourceAction.java @@ -7,11 +7,11 @@ package org.opensearch.sql.datasources.transport; -import org.opensearch.action.ActionListener; import org.opensearch.action.ActionType; import org.opensearch.action.support.ActionFilters; import org.opensearch.action.support.HandledTransportAction; import org.opensearch.common.inject.Inject; +import org.opensearch.core.action.ActionListener; import org.opensearch.sql.datasource.DataSourceService; import org.opensearch.sql.datasources.model.transport.DeleteDataSourceActionRequest; import org.opensearch.sql.datasources.model.transport.DeleteDataSourceActionResponse; diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportGetDataSourceAction.java b/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportGetDataSourceAction.java index b8147d47ca..34ad59c80f 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportGetDataSourceAction.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportGetDataSourceAction.java @@ -8,11 +8,11 @@ package org.opensearch.sql.datasources.transport; import java.util.Set; -import org.opensearch.action.ActionListener; import org.opensearch.action.ActionType; import org.opensearch.action.support.ActionFilters; import org.opensearch.action.support.HandledTransportAction; import org.opensearch.common.inject.Inject; +import org.opensearch.core.action.ActionListener; import org.opensearch.sql.datasource.DataSourceService; import org.opensearch.sql.datasource.model.DataSourceMetadata; import org.opensearch.sql.datasources.model.transport.GetDataSourceActionRequest; diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportUpdateDataSourceAction.java b/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportUpdateDataSourceAction.java index 6964d574dd..4325282f83 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportUpdateDataSourceAction.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportUpdateDataSourceAction.java @@ -7,11 +7,11 @@ package org.opensearch.sql.datasources.transport; -import org.opensearch.action.ActionListener; import org.opensearch.action.ActionType; import org.opensearch.action.support.ActionFilters; import org.opensearch.action.support.HandledTransportAction; import org.opensearch.common.inject.Inject; +import org.opensearch.core.action.ActionListener; import org.opensearch.sql.datasource.DataSourceService; import org.opensearch.sql.datasources.model.transport.UpdateDataSourceActionRequest; import org.opensearch.sql.datasources.model.transport.UpdateDataSourceActionResponse; diff --git a/datasources/src/test/java/org/opensearch/sql/datasources/storage/OpenSearchDataSourceMetadataStorageTest.java b/datasources/src/test/java/org/opensearch/sql/datasources/storage/OpenSearchDataSourceMetadataStorageTest.java index 6e412e8333..cc663d56e6 100644 --- a/datasources/src/test/java/org/opensearch/sql/datasources/storage/OpenSearchDataSourceMetadataStorageTest.java +++ b/datasources/src/test/java/org/opensearch/sql/datasources/storage/OpenSearchDataSourceMetadataStorageTest.java @@ -25,7 +25,6 @@ import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.junit.jupiter.MockitoExtension; -import org.opensearch.action.ActionFuture; import org.opensearch.action.DocWriteResponse; import org.opensearch.action.admin.indices.create.CreateIndexResponse; import org.opensearch.action.delete.DeleteResponse; @@ -34,6 +33,7 @@ import org.opensearch.action.update.UpdateResponse; import org.opensearch.client.Client; import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.action.ActionFuture; import org.opensearch.core.index.shard.ShardId; import org.opensearch.core.rest.RestStatus; import org.opensearch.index.engine.DocumentMissingException; diff --git a/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportCreateDataSourceActionTest.java b/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportCreateDataSourceActionTest.java index a2482602ff..f1a3a2875e 100644 --- a/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportCreateDataSourceActionTest.java +++ b/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportCreateDataSourceActionTest.java @@ -14,8 +14,8 @@ import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.junit.jupiter.MockitoExtension; -import org.opensearch.action.ActionListener; import org.opensearch.action.support.ActionFilters; +import org.opensearch.core.action.ActionListener; import org.opensearch.sql.datasource.model.DataSourceMetadata; import org.opensearch.sql.datasource.model.DataSourceType; import org.opensearch.sql.datasources.model.transport.CreateDataSourceActionRequest; diff --git a/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportDeleteDataSourceActionTest.java b/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportDeleteDataSourceActionTest.java index d836eecabe..ea581de20c 100644 --- a/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportDeleteDataSourceActionTest.java +++ b/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportDeleteDataSourceActionTest.java @@ -14,8 +14,8 @@ import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.junit.jupiter.MockitoExtension; -import org.opensearch.action.ActionListener; import org.opensearch.action.support.ActionFilters; +import org.opensearch.core.action.ActionListener; import org.opensearch.sql.datasources.model.transport.DeleteDataSourceActionRequest; import org.opensearch.sql.datasources.model.transport.DeleteDataSourceActionResponse; import org.opensearch.sql.datasources.service.DataSourceServiceImpl; diff --git a/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportGetDataSourceActionTest.java b/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportGetDataSourceActionTest.java index b6899421ef..4f04afd667 100644 --- a/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportGetDataSourceActionTest.java +++ b/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportGetDataSourceActionTest.java @@ -20,8 +20,8 @@ import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.junit.jupiter.MockitoExtension; -import org.opensearch.action.ActionListener; import org.opensearch.action.support.ActionFilters; +import org.opensearch.core.action.ActionListener; import org.opensearch.sql.datasource.model.DataSourceMetadata; import org.opensearch.sql.datasource.model.DataSourceType; import org.opensearch.sql.datasources.model.transport.GetDataSourceActionRequest; diff --git a/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportUpdateDataSourceActionTest.java b/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportUpdateDataSourceActionTest.java index 6e141918ca..998a1aa7b2 100644 --- a/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportUpdateDataSourceActionTest.java +++ b/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportUpdateDataSourceActionTest.java @@ -14,8 +14,8 @@ import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.junit.jupiter.MockitoExtension; -import org.opensearch.action.ActionListener; import org.opensearch.action.support.ActionFilters; +import org.opensearch.core.action.ActionListener; import org.opensearch.sql.datasource.model.DataSourceMetadata; import org.opensearch.sql.datasource.model.DataSourceType; import org.opensearch.sql.datasources.model.transport.UpdateDataSourceActionRequest; diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/ElasticDefaultRestExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/ElasticDefaultRestExecutor.java index 220903d49d..a5dd066536 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/ElasticDefaultRestExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/ElasticDefaultRestExecutor.java @@ -11,13 +11,13 @@ import java.util.Map; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.opensearch.action.ActionFuture; import org.opensearch.action.ActionRequest; import org.opensearch.action.admin.indices.get.GetIndexRequest; import org.opensearch.action.search.SearchRequest; import org.opensearch.action.search.SearchResponse; import org.opensearch.action.search.SearchScrollRequest; import org.opensearch.client.Client; +import org.opensearch.common.action.ActionFuture; import org.opensearch.core.rest.RestStatus; import org.opensearch.index.reindex.BulkIndexByScrollResponseContentListener; import org.opensearch.index.reindex.DeleteByQueryRequest; diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/QueryActionElasticExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/QueryActionElasticExecutor.java index 185820c0a4..bcb25fd39a 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/QueryActionElasticExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/QueryActionElasticExecutor.java @@ -8,9 +8,9 @@ import java.io.IOException; import java.util.List; -import org.opensearch.action.ActionResponse; import org.opensearch.action.search.SearchResponse; import org.opensearch.client.Client; +import org.opensearch.core.action.ActionResponse; import org.opensearch.search.SearchHits; import org.opensearch.search.aggregations.Aggregations; import org.opensearch.sql.legacy.exception.SqlParseException; diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/adapter/QueryPlanRequestBuilder.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/adapter/QueryPlanRequestBuilder.java index a7b0f96b1b..ef0bc85bc1 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/adapter/QueryPlanRequestBuilder.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/adapter/QueryPlanRequestBuilder.java @@ -10,7 +10,7 @@ import lombok.RequiredArgsConstructor; import org.opensearch.action.ActionRequest; import org.opensearch.action.ActionRequestBuilder; -import org.opensearch.action.ActionResponse; +import org.opensearch.core.action.ActionResponse; import org.opensearch.sql.legacy.expression.domain.BindingTuple; import org.opensearch.sql.legacy.query.SqlElasticRequestBuilder; import org.opensearch.sql.legacy.query.planner.core.BindingTupleQueryPlanner; diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/SqlElasticDeleteByQueryRequestBuilder.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/SqlElasticDeleteByQueryRequestBuilder.java index 69036a6d10..6963996b22 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/SqlElasticDeleteByQueryRequestBuilder.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/SqlElasticDeleteByQueryRequestBuilder.java @@ -8,8 +8,8 @@ import org.opensearch.action.ActionRequest; import org.opensearch.action.ActionRequestBuilder; -import org.opensearch.action.ActionResponse; import org.opensearch.action.search.SearchRequestBuilder; +import org.opensearch.core.action.ActionResponse; import org.opensearch.index.reindex.DeleteByQueryRequestBuilder; /** diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/SqlElasticRequestBuilder.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/SqlElasticRequestBuilder.java index eb04be928c..e1f3db3fa7 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/SqlElasticRequestBuilder.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/SqlElasticRequestBuilder.java @@ -8,7 +8,7 @@ import org.opensearch.action.ActionRequest; import org.opensearch.action.ActionRequestBuilder; -import org.opensearch.action.ActionResponse; +import org.opensearch.core.action.ActionResponse; /** * Created by Eliran on 19/8/2015. diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/SqlOpenSearchRequestBuilder.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/SqlOpenSearchRequestBuilder.java index 3edd8d3fbd..6bba1048c4 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/SqlOpenSearchRequestBuilder.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/SqlOpenSearchRequestBuilder.java @@ -8,7 +8,7 @@ import org.opensearch.action.ActionRequest; import org.opensearch.action.ActionRequestBuilder; -import org.opensearch.action.ActionResponse; +import org.opensearch.core.action.ActionResponse; /** * Created by Eliran on 19/8/2015. diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/join/JoinRequestBuilder.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/join/JoinRequestBuilder.java index 195abcadec..316d17a275 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/join/JoinRequestBuilder.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/join/JoinRequestBuilder.java @@ -10,9 +10,9 @@ import java.io.IOException; import org.opensearch.action.ActionRequest; import org.opensearch.action.ActionRequestBuilder; -import org.opensearch.action.ActionResponse; import org.opensearch.action.search.MultiSearchRequest; import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.core.action.ActionResponse; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/MultiQueryRequestBuilder.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/MultiQueryRequestBuilder.java index 2a5e356073..5340a701ed 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/MultiQueryRequestBuilder.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/MultiQueryRequestBuilder.java @@ -13,9 +13,9 @@ import java.util.Map; import org.opensearch.action.ActionRequest; import org.opensearch.action.ActionRequestBuilder; -import org.opensearch.action.ActionResponse; import org.opensearch.action.search.SearchRequestBuilder; import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.core.action.ActionResponse; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/PhysicalScroll.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/PhysicalScroll.java index ec81913807..8866420218 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/PhysicalScroll.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/PhysicalScroll.java @@ -9,8 +9,8 @@ import java.util.Iterator; import lombok.RequiredArgsConstructor; import lombok.SneakyThrows; -import org.opensearch.action.ActionResponse; import org.opensearch.action.search.SearchResponse; +import org.opensearch.core.action.ActionResponse; import org.opensearch.sql.legacy.exception.SqlParseException; import org.opensearch.sql.legacy.expression.domain.BindingTuple; import org.opensearch.sql.legacy.query.AggregationQueryAction; diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/OpenSearchClientTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/OpenSearchClientTest.java index d0e740de51..2a654774d4 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/OpenSearchClientTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/OpenSearchClientTest.java @@ -17,12 +17,12 @@ import org.mockito.MockitoAnnotations; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; -import org.opensearch.action.ActionFuture; import org.opensearch.action.search.MultiSearchRequest; import org.opensearch.action.search.MultiSearchResponse; import org.opensearch.action.search.SearchRequest; import org.opensearch.action.search.SearchResponse; import org.opensearch.client.Client; +import org.opensearch.common.action.ActionFuture; import org.opensearch.sql.legacy.esdomain.OpenSearchClient; public class OpenSearchClientTest { diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/BindingTupleQueryPlannerExecuteTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/BindingTupleQueryPlannerExecuteTest.java index db474e5136..9f6fcbcc6d 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/BindingTupleQueryPlannerExecuteTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/BindingTupleQueryPlannerExecuteTest.java @@ -23,9 +23,9 @@ import org.mockito.invocation.InvocationOnMock; import org.mockito.runners.MockitoJUnitRunner; import org.mockito.stubbing.Answer; -import org.opensearch.action.ActionFuture; import org.opensearch.action.search.SearchResponse; import org.opensearch.client.Client; +import org.opensearch.common.action.ActionFuture; import org.opensearch.search.aggregations.Aggregations; import org.opensearch.sql.legacy.domain.ColumnTypeProvider; import org.opensearch.sql.legacy.expression.domain.BindingTuple; diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerTest.java index 456889fe53..66380c108d 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerTest.java @@ -31,13 +31,13 @@ import org.mockito.MockitoAnnotations; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; -import org.opensearch.action.ActionFuture; import org.opensearch.action.search.ClearScrollRequestBuilder; import org.opensearch.action.search.ClearScrollResponse; import org.opensearch.action.search.SearchResponse; import org.opensearch.action.search.SearchScrollRequestBuilder; import org.opensearch.client.Client; import org.opensearch.cluster.ClusterName; +import org.opensearch.common.action.ActionFuture; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.unit.TimeValue; import org.opensearch.core.common.bytes.BytesArray; diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/util/CheckScriptContents.java b/legacy/src/test/java/org/opensearch/sql/legacy/util/CheckScriptContents.java index 7b03140ea4..d627cebb27 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/util/CheckScriptContents.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/util/CheckScriptContents.java @@ -26,7 +26,6 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; import org.mockito.stubbing.Answer; -import org.opensearch.action.ActionFuture; import org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsRequest; import org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsResponse; import org.opensearch.action.search.SearchRequestBuilder; @@ -38,6 +37,7 @@ import org.opensearch.cluster.metadata.IndexNameExpressionResolver; import org.opensearch.cluster.metadata.Metadata; import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.action.ActionFuture; import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.xcontent.DeprecationHandler; import org.opensearch.core.xcontent.NamedXContentRegistry; diff --git a/plugin/src/main/java/org/opensearch/sql/plugin/SQLPlugin.java b/plugin/src/main/java/org/opensearch/sql/plugin/SQLPlugin.java index 12d6358c7a..5e156c2f5d 100644 --- a/plugin/src/main/java/org/opensearch/sql/plugin/SQLPlugin.java +++ b/plugin/src/main/java/org/opensearch/sql/plugin/SQLPlugin.java @@ -19,7 +19,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.opensearch.action.ActionRequest; -import org.opensearch.action.ActionResponse; import org.opensearch.action.ActionType; import org.opensearch.client.Client; import org.opensearch.client.node.NodeClient; @@ -34,6 +33,7 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.settings.SettingsFilter; import org.opensearch.common.util.concurrent.OpenSearchExecutors; +import org.opensearch.core.action.ActionResponse; import org.opensearch.core.common.io.stream.NamedWriteableRegistry; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.env.Environment; diff --git a/plugin/src/main/java/org/opensearch/sql/plugin/rest/RestPPLQueryAction.java b/plugin/src/main/java/org/opensearch/sql/plugin/rest/RestPPLQueryAction.java index 8b5074a5aa..55f8dfdfef 100644 --- a/plugin/src/main/java/org/opensearch/sql/plugin/rest/RestPPLQueryAction.java +++ b/plugin/src/main/java/org/opensearch/sql/plugin/rest/RestPPLQueryAction.java @@ -18,8 +18,8 @@ import java.util.function.Supplier; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.opensearch.action.ActionListener; import org.opensearch.client.node.NodeClient; +import org.opensearch.core.action.ActionListener; import org.opensearch.core.rest.RestStatus; import org.opensearch.index.IndexNotFoundException; import org.opensearch.rest.BaseRestHandler; diff --git a/plugin/src/main/java/org/opensearch/sql/plugin/transport/TransportPPLQueryAction.java b/plugin/src/main/java/org/opensearch/sql/plugin/transport/TransportPPLQueryAction.java index dbe5230abf..8a9d276673 100644 --- a/plugin/src/main/java/org/opensearch/sql/plugin/transport/TransportPPLQueryAction.java +++ b/plugin/src/main/java/org/opensearch/sql/plugin/transport/TransportPPLQueryAction.java @@ -9,7 +9,6 @@ import java.util.Locale; import java.util.Optional; -import org.opensearch.action.ActionListener; import org.opensearch.action.ActionRequest; import org.opensearch.action.support.ActionFilters; import org.opensearch.action.support.HandledTransportAction; @@ -18,6 +17,7 @@ import org.opensearch.common.inject.Inject; import org.opensearch.common.inject.Injector; import org.opensearch.common.inject.ModulesBuilder; +import org.opensearch.core.action.ActionListener; import org.opensearch.sql.common.response.ResponseListener; import org.opensearch.sql.common.utils.QueryContext; import org.opensearch.sql.datasource.DataSourceService; diff --git a/plugin/src/main/java/org/opensearch/sql/plugin/transport/TransportPPLQueryResponse.java b/plugin/src/main/java/org/opensearch/sql/plugin/transport/TransportPPLQueryResponse.java index 4f26686f30..169777e8dc 100644 --- a/plugin/src/main/java/org/opensearch/sql/plugin/transport/TransportPPLQueryResponse.java +++ b/plugin/src/main/java/org/opensearch/sql/plugin/transport/TransportPPLQueryResponse.java @@ -8,7 +8,7 @@ import java.io.IOException; import lombok.Getter; import lombok.RequiredArgsConstructor; -import org.opensearch.action.ActionResponse; +import org.opensearch.core.action.ActionResponse; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; diff --git a/spark/src/main/java/org/opensearch/sql/spark/response/SparkResponse.java b/spark/src/main/java/org/opensearch/sql/spark/response/SparkResponse.java index 504d4c96ca..f30072eb3f 100644 --- a/spark/src/main/java/org/opensearch/sql/spark/response/SparkResponse.java +++ b/spark/src/main/java/org/opensearch/sql/spark/response/SparkResponse.java @@ -13,13 +13,13 @@ import org.apache.logging.log4j.Logger; import org.json.JSONObject; import org.opensearch.ResourceNotFoundException; -import org.opensearch.action.ActionFuture; import org.opensearch.action.DocWriteResponse; import org.opensearch.action.delete.DeleteRequest; import org.opensearch.action.delete.DeleteResponse; import org.opensearch.action.search.SearchRequest; import org.opensearch.action.search.SearchResponse; import org.opensearch.client.Client; +import org.opensearch.common.action.ActionFuture; import org.opensearch.index.query.QueryBuilder; import org.opensearch.index.query.QueryBuilders; import org.opensearch.search.SearchHit; diff --git a/spark/src/test/java/org/opensearch/sql/spark/response/SparkResponseTest.java b/spark/src/test/java/org/opensearch/sql/spark/response/SparkResponseTest.java index f6bc23b827..abc4c81626 100644 --- a/spark/src/test/java/org/opensearch/sql/spark/response/SparkResponseTest.java +++ b/spark/src/test/java/org/opensearch/sql/spark/response/SparkResponseTest.java @@ -21,11 +21,11 @@ import org.mockito.Mockito; import org.mockito.junit.jupiter.MockitoExtension; import org.opensearch.ResourceNotFoundException; -import org.opensearch.action.ActionFuture; import org.opensearch.action.DocWriteResponse; import org.opensearch.action.delete.DeleteResponse; import org.opensearch.action.search.SearchResponse; import org.opensearch.client.Client; +import org.opensearch.common.action.ActionFuture; import org.opensearch.core.rest.RestStatus; import org.opensearch.search.SearchHit; import org.opensearch.search.SearchHits; From e377cf29830f90cd920629e4d33dbf9337313104 Mon Sep 17 00:00:00 2001 From: Mitchell Gale Date: Mon, 14 Aug 2023 14:21:27 -0700 Subject: [PATCH 17/42] [Spotless] Applying Google Code Format for common #7 (#1940) * Spotless apply for common directory. Signed-off-by: Mitchell Gale * Igoring checkstyle for common Signed-off-by: Mitchell Gale * Spotless apply on string utils, Signed-off-by: Mitchell Gale * Typo fix Signed-off-by: Yury-Fridlyand --------- Signed-off-by: Mitchell Gale Signed-off-by: Yury-Fridlyand Co-authored-by: Yury-Fridlyand --- build.gradle | 3 +- common/build.gradle | 7 +- .../antlr/CaseInsensitiveCharStream.java | 1 - .../antlr/SyntaxAnalysisErrorListener.java | 1 - .../common/antlr/SyntaxCheckException.java | 1 - .../AwsSigningInterceptor.java | 32 ++-- .../BasicAuthenticationInterceptor.java | 6 +- .../opensearch/sql/common/grok/Converter.java | 57 +++--- .../org/opensearch/sql/common/grok/Grok.java | 66 +++---- .../sql/common/grok/GrokCompiler.java | 62 +++---- .../opensearch/sql/common/grok/GrokUtils.java | 47 ++--- .../org/opensearch/sql/common/grok/Match.java | 143 +++++++------- .../common/grok/exception/GrokException.java | 12 +- .../sql/common/response/ResponseListener.java | 1 - .../sql/common/setting/LegacySettings.java | 24 +-- .../sql/common/setting/Settings.java | 24 +-- .../sql/common/utils/QueryContext.java | 26 ++- .../sql/common/utils/StringUtils.java | 41 ++-- .../AwsSigningInterceptorTest.java | 62 +++---- .../BasicAuthenticationInterceptorTest.java | 25 +-- .../sql/common/grok/ApacheDataTypeTest.java | 29 ++- .../sql/common/grok/ApacheTest.java | 2 - .../opensearch/sql/common/grok/BasicTest.java | 8 +- .../sql/common/grok/CaptureTest.java | 6 +- .../common/grok/GrokDocumentationTest.java | 44 +++-- .../opensearch/sql/common/grok/GrokTest.java | 175 ++++++++++-------- .../sql/common/grok/MessagesTest.java | 7 +- .../sql/common/grok/ResourceManager.java | 4 +- 28 files changed, 421 insertions(+), 495 deletions(-) diff --git a/build.gradle b/build.gradle index f623b5da4e..4f5813fb22 100644 --- a/build.gradle +++ b/build.gradle @@ -84,7 +84,8 @@ repositories { spotless { java { target fileTree('.') { - include 'datasources/**/*.java', + include 'common/**/*.java', + 'datasources/**/*.java', 'core/**/*.java' exclude '**/build/**', '**/build-*/**' } diff --git a/common/build.gradle b/common/build.gradle index 0367035ce2..25cdcd6566 100644 --- a/common/build.gradle +++ b/common/build.gradle @@ -31,6 +31,11 @@ repositories { mavenCentral() } +// Being ignored as a temporary measure before being removed in favour of +// spotless https://github.com/opensearch-project/sql/issues/1101 +checkstyleTest.ignoreFailures = true +checkstyleMain.ignoreFailures = true + dependencies { api "org.antlr:antlr4-runtime:4.7.1" api group: 'com.google.guava', name: 'guava', version: '32.0.1-jre' @@ -62,4 +67,4 @@ configurations.all { resolutionStrategy.force "org.apache.httpcomponents:httpcore:4.4.13" resolutionStrategy.force "joda-time:joda-time:2.10.12" resolutionStrategy.force "org.slf4j:slf4j-api:1.7.36" -} \ No newline at end of file +} diff --git a/common/src/main/java/org/opensearch/sql/common/antlr/CaseInsensitiveCharStream.java b/common/src/main/java/org/opensearch/sql/common/antlr/CaseInsensitiveCharStream.java index 0036da32a1..89381872ce 100644 --- a/common/src/main/java/org/opensearch/sql/common/antlr/CaseInsensitiveCharStream.java +++ b/common/src/main/java/org/opensearch/sql/common/antlr/CaseInsensitiveCharStream.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.common.antlr; import org.antlr.v4.runtime.CharStream; diff --git a/common/src/main/java/org/opensearch/sql/common/antlr/SyntaxAnalysisErrorListener.java b/common/src/main/java/org/opensearch/sql/common/antlr/SyntaxAnalysisErrorListener.java index b499a52967..76cbf52d58 100644 --- a/common/src/main/java/org/opensearch/sql/common/antlr/SyntaxAnalysisErrorListener.java +++ b/common/src/main/java/org/opensearch/sql/common/antlr/SyntaxAnalysisErrorListener.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.common.antlr; import java.util.Locale; diff --git a/common/src/main/java/org/opensearch/sql/common/antlr/SyntaxCheckException.java b/common/src/main/java/org/opensearch/sql/common/antlr/SyntaxCheckException.java index 806cb7208b..d3c9c111ef 100644 --- a/common/src/main/java/org/opensearch/sql/common/antlr/SyntaxCheckException.java +++ b/common/src/main/java/org/opensearch/sql/common/antlr/SyntaxCheckException.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.common.antlr; public class SyntaxCheckException extends RuntimeException { diff --git a/common/src/main/java/org/opensearch/sql/common/authinterceptors/AwsSigningInterceptor.java b/common/src/main/java/org/opensearch/sql/common/authinterceptors/AwsSigningInterceptor.java index 6c65c69c31..e2d33dca8b 100644 --- a/common/src/main/java/org/opensearch/sql/common/authinterceptors/AwsSigningInterceptor.java +++ b/common/src/main/java/org/opensearch/sql/common/authinterceptors/AwsSigningInterceptor.java @@ -31,15 +31,17 @@ public class AwsSigningInterceptor implements Interceptor { private static final Logger LOG = LogManager.getLogger(); /** - * AwsSigningInterceptor which intercepts http requests - * and adds required headers for sigv4 authentication. + * AwsSigningInterceptor which intercepts http requests and adds required headers for sigv4 + * authentication. * * @param awsCredentialsProvider awsCredentialsProvider. * @param region region. * @param serviceName serviceName. */ - public AwsSigningInterceptor(@NonNull AWSCredentialsProvider awsCredentialsProvider, - @NonNull String region, @NonNull String serviceName) { + public AwsSigningInterceptor( + @NonNull AWSCredentialsProvider awsCredentialsProvider, + @NonNull String region, + @NonNull String serviceName) { this.okHttpAwsV4Signer = new OkHttpAwsV4Signer(region, serviceName); this.awsCredentialsProvider = awsCredentialsProvider; } @@ -48,25 +50,27 @@ public AwsSigningInterceptor(@NonNull AWSCredentialsProvider awsCredentialsProvi public Response intercept(Interceptor.Chain chain) throws IOException { Request request = chain.request(); - DateTimeFormatter timestampFormat = DateTimeFormatter.ofPattern("yyyyMMdd'T'HHmmss'Z'") - .withZone(ZoneId.of("GMT")); + DateTimeFormatter timestampFormat = + DateTimeFormatter.ofPattern("yyyyMMdd'T'HHmmss'Z'").withZone(ZoneId.of("GMT")); - - Request.Builder newRequestBuilder = request.newBuilder() - .addHeader("x-amz-date", timestampFormat.format(ZonedDateTime.now())) - .addHeader("host", request.url().host()); + Request.Builder newRequestBuilder = + request + .newBuilder() + .addHeader("x-amz-date", timestampFormat.format(ZonedDateTime.now())) + .addHeader("host", request.url().host()); AWSCredentials awsCredentials = awsCredentialsProvider.getCredentials(); if (awsCredentialsProvider instanceof STSAssumeRoleSessionCredentialsProvider) { - newRequestBuilder.addHeader("x-amz-security-token", + newRequestBuilder.addHeader( + "x-amz-security-token", ((STSAssumeRoleSessionCredentialsProvider) awsCredentialsProvider) .getCredentials() .getSessionToken()); } Request newRequest = newRequestBuilder.build(); - Request signed = okHttpAwsV4Signer.sign(newRequest, - awsCredentials.getAWSAccessKeyId(), awsCredentials.getAWSSecretKey()); + Request signed = + okHttpAwsV4Signer.sign( + newRequest, awsCredentials.getAWSAccessKeyId(), awsCredentials.getAWSSecretKey()); return chain.proceed(signed); } - } diff --git a/common/src/main/java/org/opensearch/sql/common/authinterceptors/BasicAuthenticationInterceptor.java b/common/src/main/java/org/opensearch/sql/common/authinterceptors/BasicAuthenticationInterceptor.java index 34634d1580..2275482e30 100644 --- a/common/src/main/java/org/opensearch/sql/common/authinterceptors/BasicAuthenticationInterceptor.java +++ b/common/src/main/java/org/opensearch/sql/common/authinterceptors/BasicAuthenticationInterceptor.java @@ -22,13 +22,11 @@ public BasicAuthenticationInterceptor(@NonNull String username, @NonNull String this.credentials = Credentials.basic(username, password); } - @Override public Response intercept(Interceptor.Chain chain) throws IOException { Request request = chain.request(); - Request authenticatedRequest = request.newBuilder() - .header("Authorization", credentials).build(); + Request authenticatedRequest = + request.newBuilder().header("Authorization", credentials).build(); return chain.proceed(authenticatedRequest); } - } diff --git a/common/src/main/java/org/opensearch/sql/common/grok/Converter.java b/common/src/main/java/org/opensearch/sql/common/grok/Converter.java index ebbe13f761..ddd3a2bbb4 100644 --- a/common/src/main/java/org/opensearch/sql/common/grok/Converter.java +++ b/common/src/main/java/org/opensearch/sql/common/grok/Converter.java @@ -23,9 +23,7 @@ import java.util.regex.Pattern; import java.util.stream.Collectors; -/** - * Convert String argument to the right type. - */ +/** Convert String argument to the right type. */ public class Converter { public enum Type { @@ -51,13 +49,13 @@ public enum Type { private static final Pattern SPLITTER = Pattern.compile("[:;]"); private static final Map TYPES = - Arrays.stream(Type.values()) - .collect(Collectors.toMap(t -> t.name().toLowerCase(), t -> t)); + Arrays.stream(Type.values()).collect(Collectors.toMap(t -> t.name().toLowerCase(), t -> t)); private static final Map TYPE_ALIASES = Arrays.stream(Type.values()) - .flatMap(type -> type.aliases.stream() - .map(alias -> new AbstractMap.SimpleEntry<>(alias, type))) + .flatMap( + type -> + type.aliases.stream().map(alias -> new AbstractMap.SimpleEntry<>(alias, type))) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); private static Type getType(String key) { @@ -69,34 +67,30 @@ private static Type getType(String key) { return type; } - /** - * getConverters. - */ - public static Map> - getConverters(Collection groupNames, Object... params) { + /** getConverters. */ + public static Map> getConverters( + Collection groupNames, Object... params) { return groupNames.stream() .filter(Converter::containsDelimiter) - .collect(Collectors.toMap(Function.identity(), key -> { - String[] list = splitGrokPattern(key); - IConverter converter = getType(list[1]).converter; - if (list.length == 3) { - converter = converter.newConverter(list[2], params); - } - return converter; - })); + .collect( + Collectors.toMap( + Function.identity(), + key -> { + String[] list = splitGrokPattern(key); + IConverter converter = getType(list[1]).converter; + if (list.length == 3) { + converter = converter.newConverter(list[2], params); + } + return converter; + })); } - /** - * getGroupTypes. - */ + /** getGroupTypes. */ public static Map getGroupTypes(Collection groupNames) { return groupNames.stream() .filter(Converter::containsDelimiter) .map(Converter::splitGrokPattern) - .collect(Collectors.toMap( - l -> l[0], - l -> getType(l[1]) - )); + .collect(Collectors.toMap(l -> l[0], l -> getType(l[1]))); } public static String extractKey(String key) { @@ -120,7 +114,6 @@ default IConverter newConverter(String param, Object... params) { } } - static class DateConverter implements IConverter { private final DateTimeFormatter formatter; @@ -138,8 +131,12 @@ private DateConverter(DateTimeFormatter formatter, ZoneId timeZone) { @Override public Instant convert(String value) { - TemporalAccessor dt = formatter - .parseBest(value.trim(), ZonedDateTime::from, LocalDateTime::from, OffsetDateTime::from, + TemporalAccessor dt = + formatter.parseBest( + value.trim(), + ZonedDateTime::from, + LocalDateTime::from, + OffsetDateTime::from, Instant::from, LocalDate::from); if (dt instanceof ZonedDateTime) { diff --git a/common/src/main/java/org/opensearch/sql/common/grok/Grok.java b/common/src/main/java/org/opensearch/sql/common/grok/Grok.java index f20f99cbc3..6dfab3f791 100644 --- a/common/src/main/java/org/opensearch/sql/common/grok/Grok.java +++ b/common/src/main/java/org/opensearch/sql/common/grok/Grok.java @@ -16,36 +16,29 @@ import org.opensearch.sql.common.grok.Converter.IConverter; /** - * {@code Grok} parse arbitrary text and structure it. - *
- * {@code Grok} is simple API that allows you to easily parse logs - * and other files (single line). With {@code Grok}, - * you can turn unstructured log and event data into structured data. + * {@code Grok} parse arbitrary text and structure it.
+ * {@code Grok} is simple API that allows you to easily parse logs and other files (single line). + * With {@code Grok}, you can turn unstructured log and event data into structured data. * * @since 0.0.1 */ public class Grok implements Serializable { - /** - * Named regex of the originalGrokPattern. - */ + /** Named regex of the originalGrokPattern. */ private final String namedRegex; + /** - * Map of the named regex of the originalGrokPattern - * with id = namedregexid and value = namedregex. + * Map of the named regex of the originalGrokPattern with id = namedregexid and value = + * namedregex. */ private final Map namedRegexCollection; - /** - * Original {@code Grok} pattern (expl: %{IP}). - */ + + /** Original {@code Grok} pattern (expl: %{IP}). */ private final String originalGrokPattern; - /** - * Pattern of the namedRegex. - */ + + /** Pattern of the namedRegex. */ private final Pattern compiledNamedRegex; - /** - * {@code Grok} patterns definition. - */ + /** {@code Grok} patterns definition. */ private final Map grokPatternDefinition; public final Set namedGroups; @@ -54,19 +47,16 @@ public class Grok implements Serializable { public final Map> converters; - /** - * only use in grok discovery. - */ + /** only use in grok discovery. */ private String savedPattern = ""; - /** - * Grok. - */ - public Grok(String pattern, - String namedRegex, - Map namedRegexCollection, - Map patternDefinitions, - ZoneId defaultTimeZone) { + /** Grok. */ + public Grok( + String pattern, + String namedRegex, + Map namedRegexCollection, + Map patternDefinitions, + ZoneId defaultTimeZone) { this.originalGrokPattern = pattern; this.namedRegex = namedRegex; this.compiledNamedRegex = Pattern.compile(namedRegex); @@ -132,8 +122,8 @@ public Map getNamedRegexCollection() { } /** - * Match the given log with the named regex. - * And return the json representation of the matched element + * Match the given log with the named regex. And return the json representation of the + * matched element * * @param log : log to match * @return map containing matches @@ -144,8 +134,8 @@ public Map capture(String log) { } /** - * Match the given list of log with the named regex - * and return the list of json representation of the matched elements. + * Match the given list of log with the named regex and return the list of json + * representation of the matched elements. * * @param logs : list of log * @return list of maps containing matches @@ -159,8 +149,8 @@ public ArrayList> capture(List logs) { } /** - * Match the given text with the named regex - * {@code Grok} will extract data from the string and get an extence of {@link Match}. + * Match the given text with the named regex {@code Grok} will extract data from the + * string and get an extence of {@link Match}. * * @param text : Single line of log * @return Grok Match @@ -172,9 +162,7 @@ public Match match(CharSequence text) { Matcher matcher = compiledNamedRegex.matcher(text); if (matcher.find()) { - return new Match( - text, this, matcher, matcher.start(0), matcher.end(0) - ); + return new Match(text, this, matcher, matcher.start(0), matcher.end(0)); } return Match.EMPTY; diff --git a/common/src/main/java/org/opensearch/sql/common/grok/GrokCompiler.java b/common/src/main/java/org/opensearch/sql/common/grok/GrokCompiler.java index 18894fc7a1..aba96ad4cb 100644 --- a/common/src/main/java/org/opensearch/sql/common/grok/GrokCompiler.java +++ b/common/src/main/java/org/opensearch/sql/common/grok/GrokCompiler.java @@ -31,13 +31,10 @@ public class GrokCompiler implements Serializable { // We don't want \n and commented line private static final Pattern patternLinePattern = Pattern.compile("^([A-z0-9_]+)\\s+(.*)$"); - /** - * {@code Grok} patterns definitions. - */ + /** {@code Grok} patterns definitions. */ private final Map grokPatternDefinitions = new HashMap<>(); - private GrokCompiler() { - } + private GrokCompiler() {} public static GrokCompiler newInstance() { return new GrokCompiler(); @@ -50,10 +47,10 @@ public Map getPatternDefinitions() { /** * Registers a new pattern definition. * - * @param name : Pattern Name + * @param name : Pattern Name * @param pattern : Regular expression Or {@code Grok} pattern * @throws GrokException runtime expt - **/ + */ public void register(String name, String pattern) { name = Objects.requireNonNull(name).trim(); pattern = Objects.requireNonNull(pattern).trim(); @@ -63,9 +60,7 @@ public void register(String name, String pattern) { } } - /** - * Registers multiple pattern definitions. - */ + /** Registers multiple pattern definitions. */ public void register(Map patternDefinitions) { Objects.requireNonNull(patternDefinitions); patternDefinitions.forEach(this::register); @@ -78,12 +73,9 @@ public void register(InputStream input) throws IOException { register(input, StandardCharsets.UTF_8); } - /** - * Registers multiple pattern definitions from a given inputStream. - */ + /** Registers multiple pattern definitions from a given inputStream. */ public void register(InputStream input, Charset charset) throws IOException { - try ( - BufferedReader in = new BufferedReader(new InputStreamReader(input, charset))) { + try (BufferedReader in = new BufferedReader(new InputStreamReader(input, charset))) { in.lines() .map(patternLinePattern::matcher) .filter(Matcher::matches) @@ -91,11 +83,10 @@ public void register(InputStream input, Charset charset) throws IOException { } } - /** - * Registers multiple pattern definitions from a given Reader. - */ + /** Registers multiple pattern definitions from a given Reader. */ public void register(Reader input) throws IOException { - new BufferedReader(input).lines() + new BufferedReader(input) + .lines() .map(patternLinePattern::matcher) .filter(Matcher::matches) .forEach(m -> register(m.group(1), m.group(2))); @@ -109,9 +100,7 @@ public void registerPatternFromClasspath(String path) throws GrokException { registerPatternFromClasspath(path, StandardCharsets.UTF_8); } - /** - * registerPatternFromClasspath. - */ + /** registerPatternFromClasspath. */ public void registerPatternFromClasspath(String path, Charset charset) throws GrokException { final InputStream inputStream = this.getClass().getResourceAsStream(path); try (Reader reader = new InputStreamReader(inputStream, charset)) { @@ -121,9 +110,7 @@ public void registerPatternFromClasspath(String path, Charset charset) throws Gr } } - /** - * Compiles a given Grok pattern and returns a Grok object which can parse the pattern. - */ + /** Compiles a given Grok pattern and returns a Grok object which can parse the pattern. */ public Grok compile(String pattern) throws IllegalArgumentException { return compile(pattern, false); } @@ -135,11 +122,11 @@ public Grok compile(final String pattern, boolean namedOnly) throws IllegalArgum /** * Compiles a given Grok pattern and returns a Grok object which can parse the pattern. * - * @param pattern : Grok pattern (ex: %{IP}) - * @param defaultTimeZone : time zone used to parse a timestamp when it doesn't contain - * the time zone - * @param namedOnly : Whether to capture named expressions only or not (i.e. %{IP:ip} - * but not ${IP}) + * @param pattern : Grok pattern (ex: %{IP}) + * @param defaultTimeZone : time zone used to parse a timestamp when it doesn't contain the time + * zone + * @param namedOnly : Whether to capture named expressions only or not (i.e. %{IP:ip} but not + * ${IP}) * @return a compiled pattern * @throws IllegalArgumentException when pattern definition is invalid */ @@ -184,14 +171,15 @@ public Grok compile(final String pattern, ZoneId defaultTimeZone, boolean namedO for (int i = 0; i < count; i++) { String definitionOfPattern = patternDefinitions.get(group.get("pattern")); if (definitionOfPattern == null) { - throw new IllegalArgumentException(format("No definition for key '%s' found, aborting", - group.get("pattern"))); + throw new IllegalArgumentException( + format("No definition for key '%s' found, aborting", group.get("pattern"))); } String replacement = String.format("(?%s)", index, definitionOfPattern); if (namedOnly && group.get("subname") == null) { replacement = String.format("(?:%s)", definitionOfPattern); } - namedRegexCollection.put("name" + index, + namedRegexCollection.put( + "name" + index, (group.get("subname") != null ? group.get("subname") : group.get("name"))); namedRegex = StringUtils.replace(namedRegex, "%{" + group.get("name") + "}", replacement, 1); @@ -205,12 +193,6 @@ public Grok compile(final String pattern, ZoneId defaultTimeZone, boolean namedO throw new IllegalArgumentException("Pattern not found"); } - return new Grok( - pattern, - namedRegex, - namedRegexCollection, - patternDefinitions, - defaultTimeZone - ); + return new Grok(pattern, namedRegex, namedRegexCollection, patternDefinitions, defaultTimeZone); } } diff --git a/common/src/main/java/org/opensearch/sql/common/grok/GrokUtils.java b/common/src/main/java/org/opensearch/sql/common/grok/GrokUtils.java index 9ff65acde2..4b145bbbe8 100644 --- a/common/src/main/java/org/opensearch/sql/common/grok/GrokUtils.java +++ b/common/src/main/java/org/opensearch/sql/common/grok/GrokUtils.java @@ -12,7 +12,6 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; - /** * {@code GrokUtils} contain set of useful tools or methods. * @@ -20,29 +19,25 @@ */ public class GrokUtils { - /** - * Extract Grok patter like %{FOO} to FOO, Also Grok pattern with semantic. - */ - public static final Pattern GROK_PATTERN = Pattern.compile( - "%\\{" - + "(?" - + "(?[A-z0-9]+)" - + "(?::(?[A-z0-9_:;,\\-\\/\\s\\.']+))?" - + ")" - + "(?:=(?" - + "(?:" - + "(?:[^{}]+|\\.+)+" - + ")+" - + ")" - + ")?" - + "\\}"); - - public static final Pattern NAMED_REGEX = Pattern - .compile("\\(\\?<([a-zA-Z][a-zA-Z0-9]*)>"); - - /** - * getNameGroups. - */ + /** Extract Grok patter like %{FOO} to FOO, Also Grok pattern with semantic. */ + public static final Pattern GROK_PATTERN = + Pattern.compile( + "%\\{" + + "(?" + + "(?[A-z0-9]+)" + + "(?::(?[A-z0-9_:;,\\-\\/\\s\\.']+))?" + + ")" + + "(?:=(?" + + "(?:" + + "(?:[^{}]+|\\.+)+" + + ")+" + + ")" + + ")?" + + "\\}"); + + public static final Pattern NAMED_REGEX = Pattern.compile("\\(\\?<([a-zA-Z][a-zA-Z0-9]*)>"); + + /** getNameGroups. */ public static Set getNameGroups(String regex) { Set namedGroups = new LinkedHashSet<>(); Matcher matcher = NAMED_REGEX.matcher(regex); @@ -52,9 +47,7 @@ public static Set getNameGroups(String regex) { return namedGroups; } - /** - * namedGroups. - */ + /** namedGroups. */ public static Map namedGroups(Matcher matcher, Set groupNames) { Map namedGroups = new LinkedHashMap<>(); for (String groupName : groupNames) { diff --git a/common/src/main/java/org/opensearch/sql/common/grok/Match.java b/common/src/main/java/org/opensearch/sql/common/grok/Match.java index 6831f35cee..3771817bba 100644 --- a/common/src/main/java/org/opensearch/sql/common/grok/Match.java +++ b/common/src/main/java/org/opensearch/sql/common/grok/Match.java @@ -5,7 +5,6 @@ package org.opensearch.sql.common.grok; - import static java.lang.String.format; import java.util.ArrayList; @@ -31,9 +30,7 @@ public class Match { private boolean keepEmptyCaptures = true; private Map capture = Collections.emptyMap(); - /** - * Create a new {@code Match} object. - */ + /** Create a new {@code Match} object. */ public Match(CharSequence subject, Grok grok, Matcher match, int start, int end) { this.subject = subject; this.grok = grok; @@ -42,9 +39,7 @@ public Match(CharSequence subject, Grok grok, Matcher match, int start, int end) this.end = end; } - /** - * Create Empty grok matcher. - */ + /** Create Empty grok matcher. */ public static final Match EMPTY = new Match("", null, null, 0, 0); public Matcher getMatch() { @@ -59,9 +54,7 @@ public int getEnd() { return end; } - /** - * Ignore empty captures. - */ + /** Ignore empty captures. */ public void setKeepEmptyCaptures(boolean ignore) { // clear any cached captures if (capture.size() > 0) { @@ -97,8 +90,8 @@ public Map capture() { * * @param flattened will it flatten values. * @return the matched elements. - * @throws GrokException if a keys has multiple non-null values, but only if flattened is set - * to true. + * @throws GrokException if a keys has multiple non-null values, but only if flattened is set to + * true. */ private Map capture(boolean flattened) throws GrokException { if (match == null) { @@ -116,70 +109,69 @@ private Map capture(boolean flattened) throws GrokException { Map mappedw = GrokUtils.namedGroups(this.match, this.grok.namedGroups); - mappedw.forEach((key, valueString) -> { - String id = this.grok.getNamedRegexCollectionById(key); - if (id != null && !id.isEmpty()) { - key = id; - } - - if ("UNWANTED".equals(key)) { - return; - } - - Object value = valueString; - if (valueString != null) { - IConverter converter = grok.converters.get(key); - - if (converter != null) { - key = Converter.extractKey(key); - try { - value = converter.convert(valueString); - } catch (Exception e) { - capture.put(key + "_grokfailure", e.toString()); + mappedw.forEach( + (key, valueString) -> { + String id = this.grok.getNamedRegexCollectionById(key); + if (id != null && !id.isEmpty()) { + key = id; } - if (value instanceof String) { - value = cleanString((String) value); + if ("UNWANTED".equals(key)) { + return; } - } else { - value = cleanString(valueString); - } - } else if (!isKeepEmptyCaptures()) { - return; - } - - if (capture.containsKey(key)) { - Object currentValue = capture.get(key); - if (flattened) { - if (currentValue == null && value != null) { - capture.put(key, value); - } - if (currentValue != null && value != null) { - throw new GrokException( - format( - "key '%s' has multiple non-null values, this is not allowed in flattened mode," - + " values:'%s', '%s'", - key, - currentValue, - value)); + Object value = valueString; + if (valueString != null) { + IConverter converter = grok.converters.get(key); + + if (converter != null) { + key = Converter.extractKey(key); + try { + value = converter.convert(valueString); + } catch (Exception e) { + capture.put(key + "_grokfailure", e.toString()); + } + + if (value instanceof String) { + value = cleanString((String) value); + } + } else { + value = cleanString(valueString); + } + } else if (!isKeepEmptyCaptures()) { + return; } - } else { - if (currentValue instanceof List) { - @SuppressWarnings("unchecked") - List cvl = (List) currentValue; - cvl.add(value); + + if (capture.containsKey(key)) { + Object currentValue = capture.get(key); + + if (flattened) { + if (currentValue == null && value != null) { + capture.put(key, value); + } + if (currentValue != null && value != null) { + throw new GrokException( + format( + "key '%s' has multiple non-null values, this is not allowed in flattened" + + " mode, values:'%s', '%s'", + key, currentValue, value)); + } + } else { + if (currentValue instanceof List) { + @SuppressWarnings("unchecked") + List cvl = (List) currentValue; + cvl.add(value); + } else { + List list = new ArrayList(); + list.add(currentValue); + list.add(value); + capture.put(key, list); + } + } } else { - List list = new ArrayList(); - list.add(currentValue); - list.add(value); - capture.put(key, list); + capture.put(key, value); } - } - } else { - capture.put(key, value); - } - }); + }); capture = Collections.unmodifiableMap(capture); @@ -189,13 +181,11 @@ private Map capture(boolean flattened) throws GrokException { /** * Match to the subject the regex and save the matched element into a map * - *

Multiple values to the same key are flattened to one value: the sole non-null value will - * be captured. - * Should there be multiple non-null values a RuntimeException is being thrown. + *

Multiple values to the same key are flattened to one value: the sole non-null value will be + * captured. Should there be multiple non-null values a RuntimeException is being thrown. * *

This can be used in cases like: (foo (.*:message) bar|bar (.*:message) foo) where the regexp - * guarantees that only - * one value will be captured. + * guarantees that only one value will be captured. * *

See also {@link #capture} which returns multiple values of the same key as list. * @@ -220,9 +210,7 @@ private String cleanString(String value) { char firstChar = value.charAt(0); char lastChar = value.charAt(value.length() - 1); - if (firstChar == lastChar - && (firstChar == '"' || firstChar == '\'') - ) { + if (firstChar == lastChar && (firstChar == '"' || firstChar == '\'')) { if (value.length() <= 2) { return ""; } else { @@ -249,5 +237,4 @@ private String cleanString(String value) { public Boolean isNull() { return this.match == null; } - } diff --git a/common/src/main/java/org/opensearch/sql/common/grok/exception/GrokException.java b/common/src/main/java/org/opensearch/sql/common/grok/exception/GrokException.java index 54ca8aada3..0e9d6d2ddf 100644 --- a/common/src/main/java/org/opensearch/sql/common/grok/exception/GrokException.java +++ b/common/src/main/java/org/opensearch/sql/common/grok/exception/GrokException.java @@ -6,9 +6,8 @@ package org.opensearch.sql.common.grok.exception; /** - * Signals that an {@code Grok} exception of some sort has occurred. - * This class is the general class of - * exceptions produced by failed or interrupted Grok operations. + * Signals that an {@code Grok} exception of some sort has occurred. This class is the general class + * of exceptions produced by failed or interrupted Grok operations. * * @since 0.0.4 */ @@ -16,9 +15,7 @@ public class GrokException extends RuntimeException { private static final long serialVersionUID = 1L; - /** - * Creates a new GrokException. - */ + /** Creates a new GrokException. */ public GrokException() { super(); } @@ -27,7 +24,7 @@ public GrokException() { * Constructs a new GrokException. * * @param message the reason for the exception - * @param cause the underlying Throwable that caused this exception to be thrown. + * @param cause the underlying Throwable that caused this exception to be thrown. */ public GrokException(String message, Throwable cause) { super(message, cause); @@ -50,5 +47,4 @@ public GrokException(String message) { public GrokException(Throwable cause) { super(cause); } - } diff --git a/common/src/main/java/org/opensearch/sql/common/response/ResponseListener.java b/common/src/main/java/org/opensearch/sql/common/response/ResponseListener.java index 3d5eadc692..bac79ddbbd 100644 --- a/common/src/main/java/org/opensearch/sql/common/response/ResponseListener.java +++ b/common/src/main/java/org/opensearch/sql/common/response/ResponseListener.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.common.response; /** diff --git a/common/src/main/java/org/opensearch/sql/common/setting/LegacySettings.java b/common/src/main/java/org/opensearch/sql/common/setting/LegacySettings.java index 172a0d8023..e8dc76645a 100644 --- a/common/src/main/java/org/opensearch/sql/common/setting/LegacySettings.java +++ b/common/src/main/java/org/opensearch/sql/common/setting/LegacySettings.java @@ -3,42 +3,31 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.common.setting; import lombok.Getter; import lombok.RequiredArgsConstructor; -/** - * Legacy Open Distro Settings. - */ +/** Legacy Open Distro Settings. */ public abstract class LegacySettings { @RequiredArgsConstructor public enum Key { - /** - * Legacy SQL Settings. - */ + /** Legacy SQL Settings. */ SQL_ENABLED("opendistro.sql.enabled"), SQL_QUERY_SLOWLOG("opendistro.sql.query.slowlog"), SQL_CURSOR_KEEPALIVE("opendistro.sql.cursor.keep_alive"), METRICS_ROLLING_WINDOW("opendistro.sql.metrics.rollingwindow"), METRICS_ROLLING_INTERVAL("opendistro.sql.metrics.rollinginterval"), - /** - * Legacy PPL Settings. - */ + /** Legacy PPL Settings. */ PPL_ENABLED("opendistro.ppl.enabled"), PPL_QUERY_MEMORY_LIMIT("opendistro.ppl.query.memory_limit"), - /** - * Legacy Common Settings. - */ + /** Legacy Common Settings. */ QUERY_SIZE_LIMIT("opendistro.query.size_limit"), - /** - * Deprecated Settings. - */ + /** Deprecated Settings. */ SQL_NEW_ENGINE_ENABLED("opendistro.sql.engine.new.enabled"), QUERY_ANALYSIS_ENABLED("opendistro.sql.query.analysis.enabled"), QUERY_ANALYSIS_SEMANTIC_SUGGESTION("opendistro.sql.query.analysis.semantic.suggestion"), @@ -47,8 +36,7 @@ public enum Key { SQL_CURSOR_ENABLED("opendistro.sql.cursor.enabled"), SQL_CURSOR_FETCH_SIZE("opendistro.sql.cursor.fetch_size"); - @Getter - private final String keyValue; + @Getter private final String keyValue; } public abstract T getSettingValue(Key key); diff --git a/common/src/main/java/org/opensearch/sql/common/setting/Settings.java b/common/src/main/java/org/opensearch/sql/common/setting/Settings.java index 3b0eba157d..1e5243f91f 100644 --- a/common/src/main/java/org/opensearch/sql/common/setting/Settings.java +++ b/common/src/main/java/org/opensearch/sql/common/setting/Settings.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.common.setting; import com.google.common.base.Strings; @@ -14,29 +13,21 @@ import lombok.Getter; import lombok.RequiredArgsConstructor; -/** - * Setting. - */ +/** Setting. */ public abstract class Settings { @RequiredArgsConstructor public enum Key { - /** - * SQL Settings. - */ + /** SQL Settings. */ SQL_ENABLED("plugins.sql.enabled"), SQL_SLOWLOG("plugins.sql.slowlog"), SQL_CURSOR_KEEP_ALIVE("plugins.sql.cursor.keep_alive"), SQL_DELETE_ENABLED("plugins.sql.delete.enabled"), - /** - * PPL Settings. - */ + /** PPL Settings. */ PPL_ENABLED("plugins.ppl.enabled"), - /** - * Common Settings for SQL and PPL. - */ + /** Common Settings for SQL and PPL. */ QUERY_MEMORY_LIMIT("plugins.query.memory_limit"), QUERY_SIZE_LIMIT("plugins.query.size_limit"), ENCYRPTION_MASTER_KEY("plugins.query.datasources.encryption.masterkey"), @@ -47,8 +38,7 @@ public enum Key { CLUSTER_NAME("cluster.name"); - @Getter - private final String keyValue; + @Getter private final String keyValue; private static final Map ALL_KEYS; @@ -66,9 +56,7 @@ public static Optional of(String keyValue) { } } - /** - * Get Setting Value. - */ + /** Get Setting Value. */ public abstract T getSettingValue(Key key); public abstract List getSettings(); diff --git a/common/src/main/java/org/opensearch/sql/common/utils/QueryContext.java b/common/src/main/java/org/opensearch/sql/common/utils/QueryContext.java index 19c3d9588c..686263238a 100644 --- a/common/src/main/java/org/opensearch/sql/common/utils/QueryContext.java +++ b/common/src/main/java/org/opensearch/sql/common/utils/QueryContext.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.common.utils; import java.util.Map; @@ -11,25 +10,23 @@ import org.apache.logging.log4j.ThreadContext; /** - * Utility class for recording and accessing context for the query being executed. - * Implementation Details: context variables is being persisted statically in the thread context + * Utility class for recording and accessing context for the query being executed. Implementation + * Details: context variables is being persisted statically in the thread context + * * @see: @ThreadContext */ public class QueryContext { - /** - * The key of the request id in the context map. - */ + /** The key of the request id in the context map. */ private static final String REQUEST_ID_KEY = "request_id"; /** * Generates a random UUID and adds to the {@link ThreadContext} as the request id. - *

- * Note: If a request id already present, this method will overwrite it with a new - * one. This is to pre-vent re-using the same request id for different requests in - * case the same thread handles both of them. But this also means one should not - * call this method twice on the same thread within the lifetime of the request. - *

+ * + *

Note: If a request id already present, this method will overwrite it with a new one. This is + * to pre-vent re-using the same request id for different requests in case the same thread handles + * both of them. But this also means one should not call this method twice on the same thread + * within the lifetime of the request. */ public static String addRequestId() { var id = UUID.randomUUID().toString(); @@ -39,6 +36,7 @@ public static String addRequestId() { /** * Get RequestID. + * * @return the current request id from {@link ThreadContext}. */ public static String getRequestId() { @@ -50,8 +48,8 @@ public static String getRequestId() { } /** - * Wraps a given instance of {@link Runnable} into a new one which gets all the - * entries from current ThreadContext map. + * Wraps a given instance of {@link Runnable} into a new one which gets all the entries from + * current ThreadContext map. * * @param task the instance of Runnable to wrap * @return the new task diff --git a/common/src/main/java/org/opensearch/sql/common/utils/StringUtils.java b/common/src/main/java/org/opensearch/sql/common/utils/StringUtils.java index bd3a5a9779..4b7752a9de 100644 --- a/common/src/main/java/org/opensearch/sql/common/utils/StringUtils.java +++ b/common/src/main/java/org/opensearch/sql/common/utils/StringUtils.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.common.utils; import com.google.common.base.Strings; @@ -12,15 +11,14 @@ public class StringUtils { /** - * Unquote Identifier which has " or ' as mark. - * Strings quoted by ' or " with two of these quotes appearing next to each other in the quote - * acts as an escape + * Unquote Identifier which has " or ' as mark. Strings quoted by ' or " with two of these quotes + * appearing next to each other in the quote acts as an escape
* Example: 'Test''s' will result in 'Test's', similar with those single quotes being replaced - * with double quote. - * Supports escaping quotes (single/double) and escape characters using the `\` characters. + * with double quote. Supports escaping quotes (single/double) and escape characters using the `\` + * characters. + * * @param text string - * @return An unquoted string whose outer pair of (single/double) quotes have been - * removed + * @return An unquoted string whose outer pair of (single/double) quotes have been removed */ public static String unquoteText(String text) { if (text.length() < 2) { @@ -39,9 +37,7 @@ public static String unquoteText(String text) { return text.substring(1, text.length() - 1); } - if (firstChar == lastChar - && (firstChar == '\'' - || firstChar == '"')) { + if (firstChar == lastChar && (firstChar == '\'' || firstChar == '"')) { enclosingQuote = firstChar; } else { return text; @@ -57,12 +53,8 @@ public static String unquoteText(String text) { currentChar = text.charAt(chIndex); nextChar = text.charAt(chIndex + 1); - if ((currentChar == '\\' - && (nextChar == '"' - || nextChar == '\\' - || nextChar == '\'')) - || (currentChar == nextChar - && currentChar == enclosingQuote)) { + if ((currentChar == '\\' && (nextChar == '"' || nextChar == '\\' || nextChar == '\'')) + || (currentChar == nextChar && currentChar == enclosingQuote)) { chIndex++; currentChar = nextChar; } @@ -73,9 +65,9 @@ public static String unquoteText(String text) { /** * Unquote Identifier which has ` as mark. + * * @param identifier identifier that possibly enclosed by double quotes or back ticks - * @return An unquoted string whose outer pair of (double/back-tick) quotes have been - * removed + * @return An unquoted string whose outer pair of (double/back-tick) quotes have been removed */ public static String unquoteIdentifier(String identifier) { if (isQuoted(identifier, "`")) { @@ -86,16 +78,15 @@ public static String unquoteIdentifier(String identifier) { } /** - * Returns a formatted string using the specified format string and - * arguments, as well as the {@link Locale#ROOT} locale. + * Returns a formatted string using the specified format string and arguments, as well as the + * {@link Locale#ROOT} locale. * * @param format format string - * @param args arguments referenced by the format specifiers in the format string + * @param args arguments referenced by the format specifiers in the format string * @return A formatted string * @throws IllegalFormatException If a format string contains an illegal syntax, a format - * specifier that is incompatible with the given arguments, - * insufficient arguments given the format string, or other - * illegal conditions. + * specifier that is incompatible with the given arguments, insufficient arguments given the + * format string, or other illegal conditions. * @see java.lang.String#format(Locale, String, Object...) */ public static String format(final String format, Object... args) { diff --git a/common/src/test/java/org/opensearch/sql/common/authinterceptors/AwsSigningInterceptorTest.java b/common/src/test/java/org/opensearch/sql/common/authinterceptors/AwsSigningInterceptorTest.java index 21a8bbf6e7..435ac9dc93 100644 --- a/common/src/test/java/org/opensearch/sql/common/authinterceptors/AwsSigningInterceptorTest.java +++ b/common/src/test/java/org/opensearch/sql/common/authinterceptors/AwsSigningInterceptorTest.java @@ -7,7 +7,6 @@ package org.opensearch.sql.common.authinterceptors; - import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.auth.AWSSessionCredentials; import com.amazonaws.auth.AWSStaticCredentialsProvider; @@ -28,37 +27,36 @@ @ExtendWith(MockitoExtension.class) public class AwsSigningInterceptorTest { - @Mock - private Interceptor.Chain chain; + @Mock private Interceptor.Chain chain; - @Captor - ArgumentCaptor requestArgumentCaptor; + @Captor ArgumentCaptor requestArgumentCaptor; - @Mock - private STSAssumeRoleSessionCredentialsProvider stsAssumeRoleSessionCredentialsProvider; + @Mock private STSAssumeRoleSessionCredentialsProvider stsAssumeRoleSessionCredentialsProvider; @Test void testConstructors() { - Assertions.assertThrows(NullPointerException.class, () -> - new AwsSigningInterceptor(null, "us-east-1", "aps")); - Assertions.assertThrows(NullPointerException.class, () -> - new AwsSigningInterceptor(getStaticAWSCredentialsProvider("accessKey", "secretKey"), null, - "aps")); - Assertions.assertThrows(NullPointerException.class, () -> - new AwsSigningInterceptor(getStaticAWSCredentialsProvider("accessKey", "secretKey"), - "us-east-1", null)); + Assertions.assertThrows( + NullPointerException.class, () -> new AwsSigningInterceptor(null, "us-east-1", "aps")); + Assertions.assertThrows( + NullPointerException.class, + () -> + new AwsSigningInterceptor( + getStaticAWSCredentialsProvider("accessKey", "secretKey"), null, "aps")); + Assertions.assertThrows( + NullPointerException.class, + () -> + new AwsSigningInterceptor( + getStaticAWSCredentialsProvider("accessKey", "secretKey"), "us-east-1", null)); } @Test @SneakyThrows void testIntercept() { - Mockito.when(chain.request()).thenReturn(new Request.Builder() - .url("http://localhost:9090") - .build()); - AwsSigningInterceptor awsSigningInterceptor - = new AwsSigningInterceptor( - getStaticAWSCredentialsProvider("testAccessKey", "testSecretKey"), - "us-east-1", "aps"); + Mockito.when(chain.request()) + .thenReturn(new Request.Builder().url("http://localhost:9090").build()); + AwsSigningInterceptor awsSigningInterceptor = + new AwsSigningInterceptor( + getStaticAWSCredentialsProvider("testAccessKey", "testSecretKey"), "us-east-1", "aps"); awsSigningInterceptor.intercept(chain); Mockito.verify(chain).proceed(requestArgumentCaptor.capture()); Request request = requestArgumentCaptor.getValue(); @@ -67,31 +65,26 @@ void testIntercept() { Assertions.assertNotNull(request.headers("host")); } - @Test @SneakyThrows void testSTSCredentialsProviderInterceptor() { - Mockito.when(chain.request()).thenReturn(new Request.Builder() - .url("http://localhost:9090") - .build()); + Mockito.when(chain.request()) + .thenReturn(new Request.Builder().url("http://localhost:9090").build()); Mockito.when(stsAssumeRoleSessionCredentialsProvider.getCredentials()) .thenReturn(getAWSSessionCredentials()); - AwsSigningInterceptor awsSigningInterceptor - = new AwsSigningInterceptor(stsAssumeRoleSessionCredentialsProvider, - "us-east-1", "aps"); + AwsSigningInterceptor awsSigningInterceptor = + new AwsSigningInterceptor(stsAssumeRoleSessionCredentialsProvider, "us-east-1", "aps"); awsSigningInterceptor.intercept(chain); Mockito.verify(chain).proceed(requestArgumentCaptor.capture()); Request request = requestArgumentCaptor.getValue(); Assertions.assertNotNull(request.headers("Authorization")); Assertions.assertNotNull(request.headers("x-amz-date")); Assertions.assertNotNull(request.headers("host")); - Assertions.assertEquals("session_token", - request.headers("x-amz-security-token").get(0)); + Assertions.assertEquals("session_token", request.headers("x-amz-security-token").get(0)); } - - private AWSCredentialsProvider getStaticAWSCredentialsProvider(String accessKey, - String secretKey) { + private AWSCredentialsProvider getStaticAWSCredentialsProvider( + String accessKey, String secretKey) { return new AWSStaticCredentialsProvider(new BasicAWSCredentials(accessKey, secretKey)); } @@ -113,5 +106,4 @@ public String getAWSSecretKey() { } }; } - } diff --git a/common/src/test/java/org/opensearch/sql/common/authinterceptors/BasicAuthenticationInterceptorTest.java b/common/src/test/java/org/opensearch/sql/common/authinterceptors/BasicAuthenticationInterceptorTest.java index 596894da6d..d59928d2ef 100644 --- a/common/src/test/java/org/opensearch/sql/common/authinterceptors/BasicAuthenticationInterceptorTest.java +++ b/common/src/test/java/org/opensearch/sql/common/authinterceptors/BasicAuthenticationInterceptorTest.java @@ -24,29 +24,25 @@ @ExtendWith(MockitoExtension.class) public class BasicAuthenticationInterceptorTest { - @Mock - private Interceptor.Chain chain; + @Mock private Interceptor.Chain chain; - @Captor - ArgumentCaptor requestArgumentCaptor; + @Captor ArgumentCaptor requestArgumentCaptor; @Test void testConstructors() { - Assertions.assertThrows(NullPointerException.class, () -> - new BasicAuthenticationInterceptor(null, "test")); - Assertions.assertThrows(NullPointerException.class, () -> - new BasicAuthenticationInterceptor("testAdmin", null)); + Assertions.assertThrows( + NullPointerException.class, () -> new BasicAuthenticationInterceptor(null, "test")); + Assertions.assertThrows( + NullPointerException.class, () -> new BasicAuthenticationInterceptor("testAdmin", null)); } - @Test @SneakyThrows void testIntercept() { - Mockito.when(chain.request()).thenReturn(new Request.Builder() - .url("http://localhost:9090") - .build()); - BasicAuthenticationInterceptor basicAuthenticationInterceptor - = new BasicAuthenticationInterceptor("testAdmin", "testPassword"); + Mockito.when(chain.request()) + .thenReturn(new Request.Builder().url("http://localhost:9090").build()); + BasicAuthenticationInterceptor basicAuthenticationInterceptor = + new BasicAuthenticationInterceptor("testAdmin", "testPassword"); basicAuthenticationInterceptor.intercept(chain); Mockito.verify(chain).proceed(requestArgumentCaptor.capture()); Request request = requestArgumentCaptor.getValue(); @@ -54,5 +50,4 @@ void testIntercept() { Collections.singletonList(Credentials.basic("testAdmin", "testPassword")), request.headers("Authorization")); } - } diff --git a/common/src/test/java/org/opensearch/sql/common/grok/ApacheDataTypeTest.java b/common/src/test/java/org/opensearch/sql/common/grok/ApacheDataTypeTest.java index 09695c8220..7eb0e964de 100644 --- a/common/src/test/java/org/opensearch/sql/common/grok/ApacheDataTypeTest.java +++ b/common/src/test/java/org/opensearch/sql/common/grok/ApacheDataTypeTest.java @@ -5,7 +5,6 @@ package org.opensearch.sql.common.grok; - import static org.junit.Assert.assertEquals; import com.google.common.io.Resources; @@ -42,12 +41,13 @@ public void setup() throws Exception { @Test public void test002_httpd_access_semi() throws GrokException { - Grok grok = compiler.compile( - "%{IPORHOST:clientip} %{USER:ident;boolean} %{USER:auth} " - + "\\[%{HTTPDATE:timestamp;date;dd/MMM/yyyy:HH:mm:ss Z}\\] \"(?:%{WORD:verb;string} " - + "%{NOTSPACE:request}" - + "(?: HTTP/%{NUMBER:httpversion;float})?|%{DATA:rawrequest})\" %{NUMBER:response;int} " - + "(?:%{NUMBER:bytes;long}|-)"); + Grok grok = + compiler.compile( + "%{IPORHOST:clientip} %{USER:ident;boolean} %{USER:auth}" + + " \\[%{HTTPDATE:timestamp;date;dd/MMM/yyyy:HH:mm:ss Z}\\]" + + " \"(?:%{WORD:verb;string} %{NOTSPACE:request}(?:" + + " HTTP/%{NUMBER:httpversion;float})?|%{DATA:rawrequest})\" %{NUMBER:response;int}" + + " (?:%{NUMBER:bytes;long}|-)"); System.out.println(line); Match gm = grok.match(line); @@ -61,17 +61,17 @@ public void test002_httpd_access_semi() throws GrokException { assertEquals(map.get("httpversion"), 1.1f); assertEquals(map.get("bytes"), 12846L); assertEquals("GET", map.get("verb")); - } @Test public void test002_httpd_access_colon() throws GrokException { - Grok grok = compiler.compile( - "%{IPORHOST:clientip} %{USER:ident:boolean} %{USER:auth} " - + "\\[%{HTTPDATE:timestamp:date:dd/MMM/yyyy:HH:mm:ss Z}\\] \"(?:%{WORD:verb:string} " - + "%{NOTSPACE:request}" - + "(?: HTTP/%{NUMBER:httpversion:float})?|%{DATA:rawrequest})\" %{NUMBER:response:int} " - + "(?:%{NUMBER:bytes:long}|-)"); + Grok grok = + compiler.compile( + "%{IPORHOST:clientip} %{USER:ident:boolean} %{USER:auth}" + + " \\[%{HTTPDATE:timestamp:date:dd/MMM/yyyy:HH:mm:ss Z}\\]" + + " \"(?:%{WORD:verb:string} %{NOTSPACE:request}(?:" + + " HTTP/%{NUMBER:httpversion:float})?|%{DATA:rawrequest})\" %{NUMBER:response:int}" + + " (?:%{NUMBER:bytes:long}|-)"); Match gm = grok.match(line); Map map = gm.capture(); @@ -85,6 +85,5 @@ public void test002_httpd_access_colon() throws GrokException { assertEquals(map.get("httpversion"), 1.1f); assertEquals(map.get("bytes"), 12846L); assertEquals("GET", map.get("verb")); - } } diff --git a/common/src/test/java/org/opensearch/sql/common/grok/ApacheTest.java b/common/src/test/java/org/opensearch/sql/common/grok/ApacheTest.java index 33113d1996..db420b16d3 100644 --- a/common/src/test/java/org/opensearch/sql/common/grok/ApacheTest.java +++ b/common/src/test/java/org/opensearch/sql/common/grok/ApacheTest.java @@ -5,7 +5,6 @@ package org.opensearch.sql.common.grok; - import com.google.common.io.Resources; import java.io.BufferedReader; import java.io.File; @@ -65,5 +64,4 @@ public void test002_nasa_httpd_access() throws GrokException, IOException { br.close(); } } - } diff --git a/common/src/test/java/org/opensearch/sql/common/grok/BasicTest.java b/common/src/test/java/org/opensearch/sql/common/grok/BasicTest.java index 26df7ba57e..c724b58f3e 100644 --- a/common/src/test/java/org/opensearch/sql/common/grok/BasicTest.java +++ b/common/src/test/java/org/opensearch/sql/common/grok/BasicTest.java @@ -33,8 +33,7 @@ @FixMethodOrder(MethodSorters.NAME_ASCENDING) public class BasicTest { - @Rule - public TemporaryFolder tempFolder = new TemporaryFolder(); + @Rule public TemporaryFolder tempFolder = new TemporaryFolder(); private GrokCompiler compiler; @@ -111,8 +110,8 @@ public void test005_testLoadPatternFromFile() throws IOException, GrokException public void test006_testLoadPatternFromFileIso_8859_1() throws IOException, GrokException { File temp = tempFolder.newFile("grok-tmp-pattern"); try (FileOutputStream fis = new FileOutputStream(temp); - BufferedWriter bw = new BufferedWriter( - new OutputStreamWriter(fis, StandardCharsets.ISO_8859_1))) { + BufferedWriter bw = + new BufferedWriter(new OutputStreamWriter(fis, StandardCharsets.ISO_8859_1))) { bw.write("TEST \\u2022"); } @@ -130,5 +129,4 @@ public void test007_testLoadPatternFromReader() throws IOException, GrokExceptio Grok grok = compiler.compile("%{TEST}"); assertEquals("(?\\u20AC)", grok.getNamedRegex()); } - } diff --git a/common/src/test/java/org/opensearch/sql/common/grok/CaptureTest.java b/common/src/test/java/org/opensearch/sql/common/grok/CaptureTest.java index 1173541e16..60e2761c83 100644 --- a/common/src/test/java/org/opensearch/sql/common/grok/CaptureTest.java +++ b/common/src/test/java/org/opensearch/sql/common/grok/CaptureTest.java @@ -98,7 +98,8 @@ public void test005_captureSubName() throws GrokException { Map map = match.capture(); assertEquals(1, map.size()); assertEquals("Hello", map.get(subname).toString()); - assertEquals("{abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_abcdef=Hello}", + assertEquals( + "{abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_abcdef=Hello}", map.toString()); } @@ -145,7 +146,8 @@ public void test008_flattenDuplicateKeys() throws GrokException { m3.captureFlattened(); fail("should report error due tu ambiguity"); } catch (RuntimeException e) { - assertThat(e.getMessage(), + assertThat( + e.getMessage(), containsString("has multiple non-null values, this is not allowed in flattened mode")); } } diff --git a/common/src/test/java/org/opensearch/sql/common/grok/GrokDocumentationTest.java b/common/src/test/java/org/opensearch/sql/common/grok/GrokDocumentationTest.java index 22115a825f..15d450e812 100644 --- a/common/src/test/java/org/opensearch/sql/common/grok/GrokDocumentationTest.java +++ b/common/src/test/java/org/opensearch/sql/common/grok/GrokDocumentationTest.java @@ -40,23 +40,43 @@ public void assureCodeInReadmeWorks() { Assertions.assertThat(capture).hasSize(22); final boolean debug = false; - final Object[] keywordArray = new Object[] {"COMBINEDAPACHELOG", - "COMMONAPACHELOG", "clientip", "ident", "auth", "timestamp", "MONTHDAY", - "MONTH", "YEAR", "TIME", "HOUR", "MINUTE", "SECOND", "INT", "verb", - "httpversion", "rawrequest", "request", "response", "bytes", "referrer", - "agent"}; + final Object[] keywordArray = + new Object[] { + "COMBINEDAPACHELOG", + "COMMONAPACHELOG", + "clientip", + "ident", + "auth", + "timestamp", + "MONTHDAY", + "MONTH", + "YEAR", + "TIME", + "HOUR", + "MINUTE", + "SECOND", + "INT", + "verb", + "httpversion", + "rawrequest", + "request", + "response", + "bytes", + "referrer", + "agent" + }; if (debug) { capture.keySet().stream().forEach(System.err::println); } - assertTrue(new HashSet(Arrays.asList(keywordArray)) - .containsAll(new HashSet(capture.keySet()))); + assertTrue( + new HashSet(Arrays.asList(keywordArray)) + .containsAll(new HashSet(capture.keySet()))); Arrays.asList(keywordArray).stream() .forEach(o -> assertThat(capture.keySet(), hasItem((String) o))); - assertThat(new HashSet(capture.keySet()), - containsInAnyOrder(keywordArray)); - assertTrue(new HashSet(capture.keySet()) - .containsAll(new HashSet(Arrays.asList(keywordArray)))); - + assertThat(new HashSet(capture.keySet()), containsInAnyOrder(keywordArray)); + assertTrue( + new HashSet(capture.keySet()) + .containsAll(new HashSet(Arrays.asList(keywordArray)))); } } diff --git a/common/src/test/java/org/opensearch/sql/common/grok/GrokTest.java b/common/src/test/java/org/opensearch/sql/common/grok/GrokTest.java index b5e8366807..862f9b8195 100644 --- a/common/src/test/java/org/opensearch/sql/common/grok/GrokTest.java +++ b/common/src/test/java/org/opensearch/sql/common/grok/GrokTest.java @@ -37,7 +37,6 @@ import org.junit.runners.MethodSorters; import org.opensearch.sql.common.grok.exception.GrokException; - @FixMethodOrder(MethodSorters.NAME_ASCENDING) public class GrokTest { @@ -138,7 +137,6 @@ public void test002_numbers() { Match gm = grok.match("-42"); Map map = gm.capture(); assertEquals("{NUMBER=-42}", map.toString()); - } @Test @@ -152,7 +150,6 @@ public void test003_word() { gm = grok.match("abc"); map = gm.capture(); assertEquals("{WORD=abc}", map.toString()); - } @Test @@ -162,7 +159,6 @@ public void test004_space() { Match gm = grok.match("abc dc"); Map map = gm.capture(); assertEquals("{SPACE=}", map.toString()); - } @Test @@ -172,7 +168,6 @@ public void test004_number() { Match gm = grok.match("Something costs $55.4!"); Map map = gm.capture(); assertEquals("{NUMBER=55.4}", map.toString()); - } @Test @@ -182,7 +177,6 @@ public void test005_notSpace() { Match gm = grok.match("abc dc"); Map map = gm.capture(); assertEquals("{NOTSPACE=abc}", map.toString()); - } @Test @@ -209,7 +203,6 @@ public void test007_uuid() { gm = grok.match("03A8413C-F604-4D21-8F4D-24B19D98B5A7"); map = gm.capture(); assertEquals("{UUID=03A8413C-F604-4D21-8F4D-24B19D98B5A7}", map.toString()); - } @Test @@ -219,7 +212,6 @@ public void test008_mac() { Match gm = grok.match("5E:FF:56:A2:AF:15"); Map map = gm.capture(); assertEquals("{MAC=5E:FF:56:A2:AF:15}", map.toString()); - } @Test @@ -241,10 +233,12 @@ public void test010_hostPort() { Match gm = grok.match("www.google.fr:80"); Map map = gm.capture(); - assertEquals(ImmutableMap.of( - "HOSTPORT", "www.google.fr:80", - "IPORHOST", "www.google.fr", - "PORT", "80"), map); + assertEquals( + ImmutableMap.of( + "HOSTPORT", "www.google.fr:80", + "IPORHOST", "www.google.fr", + "PORT", "80"), + map); } @Test @@ -267,10 +261,11 @@ public void test011_combineApache() { assertEquals(map.get("TIME").toString(), "01:36:30"); gm = - grok.match("112.169.19.192 - - [06/Mar/2013:01:36:30 +0900] \"GET " - + "/wp-content/plugins/easy-table/themes/default/style.css?ver=1.0 HTTP/1.1\" " - + "304 - \"http://www.nflabs.com/\" \"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_2) " - + "AppleWebKit/537.22 (KHTML, like Gecko) Chrome/25.0.1364.152 Safari/537.22\""); + grok.match( + "112.169.19.192 - - [06/Mar/2013:01:36:30 +0900] \"GET" + + " /wp-content/plugins/easy-table/themes/default/style.css?ver=1.0 HTTP/1.1\" 304" + + " - \"http://www.nflabs.com/\" \"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_2)" + + " AppleWebKit/537.22 (KHTML, like Gecko) Chrome/25.0.1364.152 Safari/537.22\""); map = gm.capture(); assertEquals( map.get("agent").toString(), @@ -278,7 +273,8 @@ public void test011_combineApache() { + "Chrome/25.0.1364.152 Safari/537.22"); assertEquals(map.get("clientip").toString(), "112.169.19.192"); assertEquals(map.get("httpversion").toString(), "1.1"); - assertEquals(map.get("request").toString(), + assertEquals( + map.get("request").toString(), "/wp-content/plugins/easy-table/themes/default/style.css?ver=1.0"); assertEquals(map.get("TIME").toString(), "01:36:30"); } @@ -319,7 +315,7 @@ public void test013_IpSet() throws Throwable { Grok grok = compiler.compile("%{IP}"); try (FileReader fr = new FileReader(Resources.getResource(ResourceManager.IP).getFile()); - BufferedReader br = new BufferedReader(fr)) { + BufferedReader br = new BufferedReader(fr)) { String line; System.out.println("Starting test with ip"); while ((line = br.readLine()) != null) { @@ -336,10 +332,31 @@ public void test014_month() { Grok grok = compiler.compile("%{MONTH}"); - String[] months = - {"Jan", "January", "Feb", "February", "Mar", "March", "Apr", "April", "May", "Jun", "June", - "Jul", "July", "Aug", "August", "Sep", "September", "Oct", "October", "Nov", - "November", "Dec", "December"}; + String[] months = { + "Jan", + "January", + "Feb", + "February", + "Mar", + "March", + "Apr", + "April", + "May", + "Jun", + "June", + "Jul", + "July", + "Aug", + "August", + "Sep", + "September", + "Oct", + "October", + "Nov", + "November", + "Dec", + "December" + }; int counter = 0; for (String month : months) { Match match = grok.match(month); @@ -355,20 +372,21 @@ public void test015_iso8601() throws GrokException { Grok grok = compiler.compile("%{TIMESTAMP_ISO8601}"); String[] times = { - "2001-01-01T00:00:00", - "1974-03-02T04:09:09", - "2010-05-03T08:18:18+00:00", - "2004-07-04T12:27:27-00:00", - "2001-09-05T16:36:36+0000", - "2001-11-06T20:45:45-0000", - "2001-12-07T23:54:54Z", - "2001-01-01T00:00:00.123456", - "1974-03-02T04:09:09.123456", - "2010-05-03T08:18:18.123456+00:00", - "2004-07-04T12:27:27.123456-00:00", - "2001-09-05T16:36:36.123456+0000", - "2001-11-06T20:45:45.123456-0000", - "2001-12-07T23:54:54.123456Z"}; + "2001-01-01T00:00:00", + "1974-03-02T04:09:09", + "2010-05-03T08:18:18+00:00", + "2004-07-04T12:27:27-00:00", + "2001-09-05T16:36:36+0000", + "2001-11-06T20:45:45-0000", + "2001-12-07T23:54:54Z", + "2001-01-01T00:00:00.123456", + "1974-03-02T04:09:09.123456", + "2010-05-03T08:18:18.123456+00:00", + "2004-07-04T12:27:27.123456-00:00", + "2001-09-05T16:36:36.123456+0000", + "2001-11-06T20:45:45.123456-0000", + "2001-12-07T23:54:54.123456Z" + }; int counter = 0; for (String time : times) { @@ -385,33 +403,34 @@ public void test016_uri() throws GrokException { Grok grok = compiler.compile("%{URI}"); String[] uris = { - "http://www.google.com", - "telnet://helloworld", - "http://www.example.com/", - "http://www.example.com/test.html", - "http://www.example.com/test.html?foo=bar", - "http://www.example.com/test.html?foo=bar&fizzle=baz", - "http://www.example.com:80/test.html?foo=bar&fizzle=baz", - "https://www.example.com:443/test.html?foo=bar&fizzle=baz", - "https://user@www.example.com:443/test.html?foo=bar&fizzle=baz", - "https://user:pass@somehost/fetch.pl", - "puppet:///", - "http://www.foo.com", - "http://www.foo.com/", - "http://www.foo.com/?testing", - "http://www.foo.com/?one=two", - "http://www.foo.com/?one=two&foo=bar", - "foo://somehost.com:12345", - "foo://user@somehost.com:12345", - "foo://user@somehost.com:12345/", - "foo://user@somehost.com:12345/foo.bar/baz/fizz", - "foo://user@somehost.com:12345/foo.bar/baz/fizz?test", - "foo://user@somehost.com:12345/foo.bar/baz/fizz?test=1&sink&foo=4", - "http://www.google.com/search?hl=en&source=hp&q=hello+world+%5E%40%23%24&btnG=Google+Search", - "http://www.freebsd.org/cgi/url.cgi?ports/sysutils/grok/pkg-descr", - "http://www.google.com/search?q=CAPTCHA+ssh&start=0&ie=utf-8&oe=utf-8&client=firefox-a" - + "&rls=org.mozilla:en-US:official", - "svn+ssh://somehost:12345/testing"}; + "http://www.google.com", + "telnet://helloworld", + "http://www.example.com/", + "http://www.example.com/test.html", + "http://www.example.com/test.html?foo=bar", + "http://www.example.com/test.html?foo=bar&fizzle=baz", + "http://www.example.com:80/test.html?foo=bar&fizzle=baz", + "https://www.example.com:443/test.html?foo=bar&fizzle=baz", + "https://user@www.example.com:443/test.html?foo=bar&fizzle=baz", + "https://user:pass@somehost/fetch.pl", + "puppet:///", + "http://www.foo.com", + "http://www.foo.com/", + "http://www.foo.com/?testing", + "http://www.foo.com/?one=two", + "http://www.foo.com/?one=two&foo=bar", + "foo://somehost.com:12345", + "foo://user@somehost.com:12345", + "foo://user@somehost.com:12345/", + "foo://user@somehost.com:12345/foo.bar/baz/fizz", + "foo://user@somehost.com:12345/foo.bar/baz/fizz?test", + "foo://user@somehost.com:12345/foo.bar/baz/fizz?test=1&sink&foo=4", + "http://www.google.com/search?hl=en&source=hp&q=hello+world+%5E%40%23%24&btnG=Google+Search", + "http://www.freebsd.org/cgi/url.cgi?ports/sysutils/grok/pkg-descr", + "http://www.google.com/search?q=CAPTCHA+ssh&start=0&ie=utf-8&oe=utf-8&client=firefox-a" + + "&rls=org.mozilla:en-US:official", + "svn+ssh://somehost:12345/testing" + }; int counter = 0; for (String uri : uris) { @@ -429,10 +448,7 @@ public void test017_nonMachingList() throws GrokException { Grok grok = compiler.compile("%{URI}"); String[] uris = { - "http://www.google.com", - "telnet://helloworld", - "", - "svn+ssh://somehost:12345/testing" + "http://www.google.com", "telnet://helloworld", "", "svn+ssh://somehost:12345/testing" }; int counter = 0; @@ -458,9 +474,7 @@ public void test018_namedOnlySimpleCase() throws GrokException { String text = "<< barfoobarfoo >>"; Match match = grok.match(text); Map map = match.capture(); - assertEquals("unable to parse: " + text, - text, - map.get("text")); + assertEquals("unable to parse: " + text, text, map.get("text")); } @Test @@ -488,9 +502,7 @@ private void testPatternRepetitions(boolean namedOnly, String pattern) throws Gr private void assertMatches(String description, Grok grok, String text) { Match match = grok.match(text); Map map = match.capture(); - assertEquals(format("%s: unable to parse '%s'", description, text), - text, - map.get("text")); + assertEquals(format("%s: unable to parse '%s'", description, text), text, map.get("text")); } @Test @@ -630,8 +642,8 @@ public void createGrokWithDefaultPatterns() throws GrokException { compiler.compile("%{USERNAME}", false); } - private void ensureAbortsWithDefinitionMissing(String pattern, String compilePattern, - boolean namedOnly) { + private void ensureAbortsWithDefinitionMissing( + String pattern, String compilePattern, boolean namedOnly) { try { compiler.compile(pattern); compiler.compile(compilePattern, namedOnly); @@ -643,10 +655,11 @@ private void ensureAbortsWithDefinitionMissing(String pattern, String compilePat @Test public void testGroupTypes() { - Grok grok = compiler.compile( - "%{HTTPDATE:timestamp;date;dd/MMM/yyyy:HH:mm:ss Z} %{USERNAME:username:text} " - + "%{IPORHOST:host}:%{POSINT:port:integer}", - true); + Grok grok = + compiler.compile( + "%{HTTPDATE:timestamp;date;dd/MMM/yyyy:HH:mm:ss Z} %{USERNAME:username:text} " + + "%{IPORHOST:host}:%{POSINT:port:integer}", + true); assertEquals(Converter.Type.DATETIME, grok.groupTypes.get("timestamp")); assertEquals(Converter.Type.STRING, grok.groupTypes.get("username")); assertEquals(Converter.Type.INT, grok.groupTypes.get("port")); @@ -667,8 +680,8 @@ public void testTimeZone() { DateTimeFormatter dtf = DateTimeFormatter.ofPattern("MM/dd/yyyy HH:mm:ss"); Grok grok = compiler.compile("%{DATESTAMP:timestamp;date;MM/dd/yyyy HH:mm:ss}", true); Instant instant = (Instant) grok.match(date).capture().get("timestamp"); - assertEquals(ZonedDateTime.parse(date, dtf.withZone(ZoneOffset.systemDefault())).toInstant(), - instant); + assertEquals( + ZonedDateTime.parse(date, dtf.withZone(ZoneOffset.systemDefault())).toInstant(), instant); // set default timezone to PST ZoneId pst = ZoneId.of("PST", ZoneId.SHORT_IDS); diff --git a/common/src/test/java/org/opensearch/sql/common/grok/MessagesTest.java b/common/src/test/java/org/opensearch/sql/common/grok/MessagesTest.java index 98cbb3aaeb..930da8caa8 100644 --- a/common/src/test/java/org/opensearch/sql/common/grok/MessagesTest.java +++ b/common/src/test/java/org/opensearch/sql/common/grok/MessagesTest.java @@ -16,7 +16,6 @@ import org.junit.Test; import org.opensearch.sql.common.grok.exception.GrokException; - public class MessagesTest { @Test @@ -26,8 +25,9 @@ public void test001_linux_messages() throws GrokException, IOException { Grok grok = compiler.compile("%{MESSAGESLOG}"); - BufferedReader br = new BufferedReader( - new FileReader(Resources.getResource(ResourceManager.MESSAGES).getFile())); + BufferedReader br = + new BufferedReader( + new FileReader(Resources.getResource(ResourceManager.MESSAGES).getFile())); String line; System.out.println("Starting test with linux messages log -- may take a while"); while ((line = br.readLine()) != null) { @@ -38,5 +38,4 @@ public void test001_linux_messages() throws GrokException, IOException { } br.close(); } - } diff --git a/common/src/test/java/org/opensearch/sql/common/grok/ResourceManager.java b/common/src/test/java/org/opensearch/sql/common/grok/ResourceManager.java index a13a72cd00..fba64b59d3 100644 --- a/common/src/test/java/org/opensearch/sql/common/grok/ResourceManager.java +++ b/common/src/test/java/org/opensearch/sql/common/grok/ResourceManager.java @@ -5,9 +5,7 @@ package org.opensearch.sql.common.grok; -/** - * {@code ResourceManager} . - */ +/** {@code ResourceManager} . */ public final class ResourceManager { public static final String PATTERNS = "patterns/patterns"; From c99549a70138cddcbbc372b34706ecb0808147b1 Mon Sep 17 00:00:00 2001 From: Shenoy Pratik Date: Mon, 14 Aug 2023 21:33:08 -0700 Subject: [PATCH 18/42] Okio upgrade to 3.5.0 (#1957) * okio upgrade to 3.5.0 Signed-off-by: Shenoy Pratik * remove empty line Signed-off-by: Shenoy Pratik * removed kotlin-stdlib resolution Signed-off-by: Shenoy Pratik * upgrade kotlin stdlib to 1.9.0 Signed-off-by: Shenoy Pratik --------- Signed-off-by: Shenoy Pratik --- build.gradle | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/build.gradle b/build.gradle index 4f5813fb22..166ced81ba 100644 --- a/build.gradle +++ b/build.gradle @@ -115,8 +115,9 @@ allprojects { sourceCompatibility = targetCompatibility = "11" } configurations.all { - resolutionStrategy.force "org.jetbrains.kotlin:kotlin-stdlib:1.6.0" - resolutionStrategy.force "org.jetbrains.kotlin:kotlin-stdlib-common:1.6.0" + resolutionStrategy.force "com.squareup.okio:okio:3.5.0" + resolutionStrategy.force "org.jetbrains.kotlin:kotlin-stdlib:1.9.0" + resolutionStrategy.force "org.jetbrains.kotlin:kotlin-stdlib-jdk7:1.9.0" } } From 5381a6f76ddc73c0cb14ad5befc444ec68dfdaed Mon Sep 17 00:00:00 2001 From: Andrew Carbonetto Date: Tue, 15 Aug 2023 08:24:17 -0700 Subject: [PATCH 19/42] (#1506) Remove reservedSymbolTable and replace with HIDDEN_FIELD_NAME (#1936) * (#1506) Remove reservedSymbolTable and replace with HIDDEN_FIELD_NAME (#323) * #1506: Remove reservedSymbolTable and use HIDDEN_FIELD_NAME instead Signed-off-by: acarbonetto * #1506: Remove reservedSymbolTable and use HIDDEN_FIELD_NAME instead Signed-off-by: acarbonetto * #1506: Fix checkstyle errors Signed-off-by: acarbonetto --------- Signed-off-by: acarbonetto * #1506: spotless apply Signed-off-by: acarbonetto --------- Signed-off-by: acarbonetto --- .../main/java/org/opensearch/sql/analysis/Analyzer.java | 4 ++-- .../org/opensearch/sql/analysis/ExpressionAnalyzer.java | 2 +- .../java/org/opensearch/sql/analysis/TypeEnvironment.java | 8 -------- .../org/opensearch/sql/analysis/symbol/Namespace.java | 1 + .../opensearch/sql/analysis/ExpressionAnalyzerTest.java | 6 +++--- 5 files changed, 7 insertions(+), 14 deletions(-) diff --git a/core/src/main/java/org/opensearch/sql/analysis/Analyzer.java b/core/src/main/java/org/opensearch/sql/analysis/Analyzer.java index ad3713ec9a..d5e8b93b13 100644 --- a/core/src/main/java/org/opensearch/sql/analysis/Analyzer.java +++ b/core/src/main/java/org/opensearch/sql/analysis/Analyzer.java @@ -161,7 +161,7 @@ public LogicalPlan visitRelation(Relation node, AnalysisContext context) { table.getFieldTypes().forEach((k, v) -> curEnv.define(new Symbol(Namespace.FIELD_NAME, k), v)); table .getReservedFieldTypes() - .forEach((k, v) -> curEnv.addReservedWord(new Symbol(Namespace.FIELD_NAME, k), v)); + .forEach((k, v) -> curEnv.define(new Symbol(Namespace.HIDDEN_FIELD_NAME, k), v)); // Put index name or its alias in index namespace on type environment so qualifier // can be removed when analyzing qualified name. The value (expr type) here doesn't matter. @@ -215,7 +215,7 @@ public LogicalPlan visitTableFunction(TableFunction node, AnalysisContext contex table.getFieldTypes().forEach((k, v) -> curEnv.define(new Symbol(Namespace.FIELD_NAME, k), v)); table .getReservedFieldTypes() - .forEach((k, v) -> curEnv.addReservedWord(new Symbol(Namespace.FIELD_NAME, k), v)); + .forEach((k, v) -> curEnv.define(new Symbol(Namespace.HIDDEN_FIELD_NAME, k), v)); curEnv.define( new Symbol( Namespace.INDEX_NAME, dataSourceSchemaIdentifierNameResolver.getIdentifierName()), diff --git a/core/src/main/java/org/opensearch/sql/analysis/ExpressionAnalyzer.java b/core/src/main/java/org/opensearch/sql/analysis/ExpressionAnalyzer.java index 8e586f68ff..5a8d6fe976 100644 --- a/core/src/main/java/org/opensearch/sql/analysis/ExpressionAnalyzer.java +++ b/core/src/main/java/org/opensearch/sql/analysis/ExpressionAnalyzer.java @@ -378,7 +378,7 @@ public Expression visitQualifiedName(QualifiedName node, AnalysisContext context typeEnv != null; typeEnv = typeEnv.getParent()) { Optional exprType = - typeEnv.getReservedSymbolTable().lookup(new Symbol(Namespace.FIELD_NAME, part)); + Optional.ofNullable(typeEnv.lookupAllFields(Namespace.HIDDEN_FIELD_NAME).get(part)); if (exprType.isPresent()) { return visitMetadata( qualifierAnalyzer.unqualified(node), (ExprCoreType) exprType.get(), context); diff --git a/core/src/main/java/org/opensearch/sql/analysis/TypeEnvironment.java b/core/src/main/java/org/opensearch/sql/analysis/TypeEnvironment.java index 8baab64810..18693a63e6 100644 --- a/core/src/main/java/org/opensearch/sql/analysis/TypeEnvironment.java +++ b/core/src/main/java/org/opensearch/sql/analysis/TypeEnvironment.java @@ -25,8 +25,6 @@ public class TypeEnvironment implements Environment { @Getter private final TypeEnvironment parent; private final SymbolTable symbolTable; - @Getter private final SymbolTable reservedSymbolTable; - /** * Constructor with empty symbol tables. * @@ -35,7 +33,6 @@ public class TypeEnvironment implements Environment { public TypeEnvironment(TypeEnvironment parent) { this.parent = parent; this.symbolTable = new SymbolTable(); - this.reservedSymbolTable = new SymbolTable(); } /** @@ -47,7 +44,6 @@ public TypeEnvironment(TypeEnvironment parent) { public TypeEnvironment(TypeEnvironment parent, SymbolTable symbolTable) { this.parent = parent; this.symbolTable = symbolTable; - this.reservedSymbolTable = new SymbolTable(); } /** @@ -123,8 +119,4 @@ public void remove(ReferenceExpression ref) { public void clearAllFields() { lookupAllFields(FIELD_NAME).keySet().forEach(v -> remove(new Symbol(Namespace.FIELD_NAME, v))); } - - public void addReservedWord(Symbol symbol, ExprType type) { - reservedSymbolTable.store(symbol, type); - } } diff --git a/core/src/main/java/org/opensearch/sql/analysis/symbol/Namespace.java b/core/src/main/java/org/opensearch/sql/analysis/symbol/Namespace.java index 8211207b2e..e8a7454014 100644 --- a/core/src/main/java/org/opensearch/sql/analysis/symbol/Namespace.java +++ b/core/src/main/java/org/opensearch/sql/analysis/symbol/Namespace.java @@ -9,6 +9,7 @@ public enum Namespace { INDEX_NAME("Index"), FIELD_NAME("Field"), + HIDDEN_FIELD_NAME("HiddenField"), FUNCTION_NAME("Function"); private final String name; diff --git a/core/src/test/java/org/opensearch/sql/analysis/ExpressionAnalyzerTest.java b/core/src/test/java/org/opensearch/sql/analysis/ExpressionAnalyzerTest.java index 9d30ebeaab..b27b8348e2 100644 --- a/core/src/test/java/org/opensearch/sql/analysis/ExpressionAnalyzerTest.java +++ b/core/src/test/java/org/opensearch/sql/analysis/ExpressionAnalyzerTest.java @@ -216,13 +216,13 @@ public void qualified_name_with_qualifier() { public void qualified_name_with_reserved_symbol() { analysisContext.push(); - analysisContext.peek().addReservedWord(new Symbol(Namespace.FIELD_NAME, "_reserved"), STRING); - analysisContext.peek().addReservedWord(new Symbol(Namespace.FIELD_NAME, "_priority"), FLOAT); + analysisContext.peek().define(new Symbol(Namespace.HIDDEN_FIELD_NAME, "_reserved"), STRING); + analysisContext.peek().define(new Symbol(Namespace.HIDDEN_FIELD_NAME, "_priority"), FLOAT); analysisContext.peek().define(new Symbol(Namespace.INDEX_NAME, "index_alias"), STRUCT); assertAnalyzeEqual(DSL.ref("_priority", FLOAT), qualifiedName("_priority")); assertAnalyzeEqual(DSL.ref("_reserved", STRING), qualifiedName("index_alias", "_reserved")); - // reserved fields take priority over symbol table + // cannot replace an existing field type analysisContext.peek().define(new Symbol(Namespace.FIELD_NAME, "_reserved"), LONG); assertAnalyzeEqual(DSL.ref("_reserved", STRING), qualifiedName("index_alias", "_reserved")); From e8e94d0de4371b5b6da6bf1aef929d9dfdb06cb1 Mon Sep 17 00:00:00 2001 From: Mitchell Gale Date: Wed, 16 Aug 2023 13:33:42 -0700 Subject: [PATCH 20/42] [Spotless] Applying Google Code Format for ppl files #12 (#1972) * Spotless apply on PPL (#339) Signed-off-by: Mitchell Gale * Update ppl/src/test/java/org/opensearch/sql/ppl/parser/AstExpressionBuilderTest.java Co-authored-by: Guian Gumpac Signed-off-by: Mitchell Gale --------- Signed-off-by: Mitchell Gale Signed-off-by: Mitchell Gale Co-authored-by: Guian Gumpac --- build.gradle | 3 +- ppl/build.gradle | 5 + .../org/opensearch/sql/ppl/PPLService.java | 16 +- .../sql/ppl/antlr/PPLSyntaxParser.java | 15 +- .../sql/ppl/domain/PPLQueryRequest.java | 24 +- .../sql/ppl/domain/PPLQueryResponse.java | 4 +- .../opensearch/sql/ppl/parser/AstBuilder.java | 264 +++---- .../sql/ppl/parser/AstExpressionBuilder.java | 203 +++--- .../sql/ppl/parser/AstStatementBuilder.java | 4 +- .../sql/ppl/utils/ArgumentFactory.java | 40 +- .../sql/ppl/utils/PPLQueryDataAnonymizer.java | 91 +-- .../sql/ppl/utils/UnresolvedPlanHelper.java | 9 +- .../opensearch/sql/ppl/PPLServiceTest.java | 122 ++-- .../ppl/antlr/NowLikeFunctionParserTest.java | 32 +- ...ntaxParserMatchBoolPrefixSamplesTests.java | 15 +- ...PPLSyntaxParserMatchPhraseSamplesTest.java | 16 +- .../sql/ppl/antlr/PPLSyntaxParserTest.java | 310 +++++--- .../sql/ppl/domain/PPLQueryRequestTest.java | 20 +- .../sql/ppl/domain/PPLQueryResponseTest.java | 1 - .../sql/ppl/parser/AstBuilderTest.java | 607 +++++++--------- .../ppl/parser/AstExpressionBuilderTest.java | 662 ++++++------------ .../ppl/parser/AstNowLikeFunctionTest.java | 55 +- .../ppl/parser/AstStatementBuilderTest.java | 15 +- .../sql/ppl/utils/ArgumentFactoryTest.java | 46 +- .../ppl/utils/PPLQueryDataAnonymizerTest.java | 99 +-- .../ppl/utils/UnresolvedPlanHelperTest.java | 1 - 26 files changed, 1114 insertions(+), 1565 deletions(-) diff --git a/build.gradle b/build.gradle index 166ced81ba..c0f82c502b 100644 --- a/build.gradle +++ b/build.gradle @@ -86,7 +86,8 @@ spotless { target fileTree('.') { include 'common/**/*.java', 'datasources/**/*.java', - 'core/**/*.java' + 'core/**/*.java', + 'ppl/**/*.java' exclude '**/build/**', '**/build-*/**' } importOrder() diff --git a/ppl/build.gradle b/ppl/build.gradle index 484934ddc3..e16b6decfc 100644 --- a/ppl/build.gradle +++ b/ppl/build.gradle @@ -29,6 +29,11 @@ plugins { id 'antlr' } +// Being ignored as a temporary measure before being removed in favour of +// spotless https://github.com/opensearch-project/sql/issues/1101 +checkstyleTest.ignoreFailures = true +checkstyleMain.ignoreFailures = true + generateGrammarSource { arguments += ['-visitor', '-package', 'org.opensearch.sql.ppl.antlr.parser'] source = sourceSets.main.antlr diff --git a/ppl/src/main/java/org/opensearch/sql/ppl/PPLService.java b/ppl/src/main/java/org/opensearch/sql/ppl/PPLService.java index 40a7a85f78..7769f5dfae 100644 --- a/ppl/src/main/java/org/opensearch/sql/ppl/PPLService.java +++ b/ppl/src/main/java/org/opensearch/sql/ppl/PPLService.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.executor.ExecutionEngine.QueryResponse; @@ -27,9 +26,7 @@ import org.opensearch.sql.ppl.parser.AstStatementBuilder; import org.opensearch.sql.ppl.utils.PPLQueryDataAnonymizer; -/** - * PPLService. - */ +/** PPLService. */ @RequiredArgsConstructor public class PPLService { private final PPLSyntaxParser parser; @@ -45,7 +42,7 @@ public class PPLService { /** * Execute the {@link PPLQueryRequest}, using {@link ResponseListener} to get response. * - * @param request {@link PPLQueryRequest} + * @param request {@link PPLQueryRequest} * @param listener {@link ResponseListener} */ public void execute(PPLQueryRequest request, ResponseListener listener) { @@ -57,10 +54,10 @@ public void execute(PPLQueryRequest request, ResponseListener lis } /** - * Explain the query in {@link PPLQueryRequest} using {@link ResponseListener} to - * get and format explain response. + * Explain the query in {@link PPLQueryRequest} using {@link ResponseListener} to get and format + * explain response. * - * @param request {@link PPLQueryRequest} + * @param request {@link PPLQueryRequest} * @param listener {@link ResponseListener} for explain response */ public void explain(PPLQueryRequest request, ResponseListener listener) { @@ -90,7 +87,6 @@ private AbstractPlan plan( QueryContext.getRequestId(), anonymizer.anonymizeStatement(statement)); - return queryExecutionFactory.create( - statement, queryListener, explainListener); + return queryExecutionFactory.create(statement, queryListener, explainListener); } } diff --git a/ppl/src/main/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParser.java b/ppl/src/main/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParser.java index 168ba33a8a..1d4485e749 100644 --- a/ppl/src/main/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParser.java +++ b/ppl/src/main/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParser.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.antlr; import org.antlr.v4.runtime.CommonTokenStream; @@ -15,13 +14,9 @@ import org.opensearch.sql.ppl.antlr.parser.OpenSearchPPLLexer; import org.opensearch.sql.ppl.antlr.parser.OpenSearchPPLParser; -/** - * PPL Syntax Parser. - */ +/** PPL Syntax Parser. */ public class PPLSyntaxParser implements Parser { - /** - * Analyze the query syntax. - */ + /** Analyze the query syntax. */ @Override public ParseTree parse(String query) { OpenSearchPPLParser parser = createParser(createLexer(query)); @@ -30,12 +25,10 @@ public ParseTree parse(String query) { } private OpenSearchPPLParser createParser(Lexer lexer) { - return new OpenSearchPPLParser( - new CommonTokenStream(lexer)); + return new OpenSearchPPLParser(new CommonTokenStream(lexer)); } private OpenSearchPPLLexer createLexer(String query) { - return new OpenSearchPPLLexer( - new CaseInsensitiveCharStream(query)); + return new OpenSearchPPLLexer(new CaseInsensitiveCharStream(query)); } } diff --git a/ppl/src/main/java/org/opensearch/sql/ppl/domain/PPLQueryRequest.java b/ppl/src/main/java/org/opensearch/sql/ppl/domain/PPLQueryRequest.java index 87532e01d0..ca351fcc0a 100644 --- a/ppl/src/main/java/org/opensearch/sql/ppl/domain/PPLQueryRequest.java +++ b/ppl/src/main/java/org/opensearch/sql/ppl/domain/PPLQueryRequest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.domain; import java.util.Locale; @@ -22,12 +21,9 @@ public class PPLQueryRequest { public static final PPLQueryRequest NULL = new PPLQueryRequest("", null, DEFAULT_PPL_PATH, ""); private final String pplQuery; - @Getter - private final JSONObject jsonContent; - @Getter - private final String path; - @Getter - private String format = ""; + @Getter private final JSONObject jsonContent; + @Getter private final String path; + @Getter private String format = ""; @Setter @Getter @@ -43,9 +39,7 @@ public PPLQueryRequest(String pplQuery, JSONObject jsonContent, String path) { this(pplQuery, jsonContent, path, ""); } - /** - * Constructor of PPLQueryRequest. - */ + /** Constructor of PPLQueryRequest. */ public PPLQueryRequest(String pplQuery, JSONObject jsonContent, String path, String format) { this.pplQuery = pplQuery; this.jsonContent = jsonContent; @@ -59,23 +53,21 @@ public String getRequest() { /** * Check if request is to explain rather than execute the query. - * @return true if it is a explain request + * + * @return true if it is a explain request */ public boolean isExplainRequest() { return path.endsWith("/_explain"); } - /** - * Decide on the formatter by the requested format. - */ + /** Decide on the formatter by the requested format. */ public Format format() { Optional optionalFormat = Format.of(format); if (optionalFormat.isPresent()) { return optionalFormat.get(); } else { throw new IllegalArgumentException( - String.format(Locale.ROOT,"response in %s format is not supported.", format)); + String.format(Locale.ROOT, "response in %s format is not supported.", format)); } } - } diff --git a/ppl/src/main/java/org/opensearch/sql/ppl/domain/PPLQueryResponse.java b/ppl/src/main/java/org/opensearch/sql/ppl/domain/PPLQueryResponse.java index 483726702a..5cae8e8f06 100644 --- a/ppl/src/main/java/org/opensearch/sql/ppl/domain/PPLQueryResponse.java +++ b/ppl/src/main/java/org/opensearch/sql/ppl/domain/PPLQueryResponse.java @@ -3,8 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.domain; -public class PPLQueryResponse { -} +public class PPLQueryResponse {} diff --git a/ppl/src/main/java/org/opensearch/sql/ppl/parser/AstBuilder.java b/ppl/src/main/java/org/opensearch/sql/ppl/parser/AstBuilder.java index 323f99a5af..3c693fa0bd 100644 --- a/ppl/src/main/java/org/opensearch/sql/ppl/parser/AstBuilder.java +++ b/ppl/src/main/java/org/opensearch/sql/ppl/parser/AstBuilder.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.parser; import static org.opensearch.sql.ast.dsl.AstDSL.qualifiedName; @@ -74,33 +73,25 @@ import org.opensearch.sql.ppl.antlr.parser.OpenSearchPPLParserBaseVisitor; import org.opensearch.sql.ppl.utils.ArgumentFactory; -/** - * Class of building the AST. - * Refines the visit path and build the AST nodes - */ +/** Class of building the AST. Refines the visit path and build the AST nodes */ @RequiredArgsConstructor public class AstBuilder extends OpenSearchPPLParserBaseVisitor { private final AstExpressionBuilder expressionBuilder; /** - * PPL query to get original token text. This is necessary because token.getText() returns - * text without whitespaces or other characters discarded by lexer. + * PPL query to get original token text. This is necessary because token.getText() returns text + * without whitespaces or other characters discarded by lexer. */ private final String query; @Override public UnresolvedPlan visitQueryStatement(OpenSearchPPLParser.QueryStatementContext ctx) { UnresolvedPlan pplCommand = visit(ctx.pplCommands()); - return ctx.commands() - .stream() - .map(this::visit) - .reduce(pplCommand, (r, e) -> e.attach(r)); + return ctx.commands().stream().map(this::visit).reduce(pplCommand, (r, e) -> e.attach(r)); } - /** - * Search command. - */ + /** Search command. */ @Override public UnresolvedPlan visitSearchFrom(SearchFromContext ctx) { return visitFromClause(ctx.fromClause()); @@ -108,23 +99,22 @@ public UnresolvedPlan visitSearchFrom(SearchFromContext ctx) { @Override public UnresolvedPlan visitSearchFromFilter(SearchFromFilterContext ctx) { - return new Filter(internalVisitExpression(ctx.logicalExpression())).attach( - visit(ctx.fromClause())); + return new Filter(internalVisitExpression(ctx.logicalExpression())) + .attach(visit(ctx.fromClause())); } @Override public UnresolvedPlan visitSearchFilterFrom(SearchFilterFromContext ctx) { - return new Filter(internalVisitExpression(ctx.logicalExpression())).attach( - visit(ctx.fromClause())); + return new Filter(internalVisitExpression(ctx.logicalExpression())) + .attach(visit(ctx.fromClause())); } /** - * Describe command. - * Current logic separates table and metadata info about table by adding - * MAPPING_ODFE_SYS_TABLE as suffix. - * Even with the introduction of datasource and schema name in fully qualified table name, - * we do the same thing by appending MAPPING_ODFE_SYS_TABLE as syffix to the last part - * of qualified name. + * Describe command.
+ * Current logic separates table and metadata info about table by adding MAPPING_ODFE_SYS_TABLE as + * suffix. Even with the introduction of datasource and schema name in fully qualified table name, + * we do the same thing by appending MAPPING_ODFE_SYS_TABLE as syffix to the last part of + * qualified name. */ @Override public UnresolvedPlan visitDescribeCommand(DescribeCommandContext ctx) { @@ -135,63 +125,52 @@ public UnresolvedPlan visitDescribeCommand(DescribeCommandContext ctx) { return new Relation(new QualifiedName(parts)); } - /** - * Show command. - */ + /** Show command. */ @Override public UnresolvedPlan visitShowDataSourcesCommand( OpenSearchPPLParser.ShowDataSourcesCommandContext ctx) { return new Relation(qualifiedName(DATASOURCES_TABLE_NAME)); } - - /** - * Where command. - */ + /** Where command. */ @Override public UnresolvedPlan visitWhereCommand(WhereCommandContext ctx) { return new Filter(internalVisitExpression(ctx.logicalExpression())); } - /** - * Fields command. - */ + /** Fields command. */ @Override public UnresolvedPlan visitFieldsCommand(FieldsCommandContext ctx) { return new Project( - ctx.fieldList() - .fieldExpression() - .stream() + ctx.fieldList().fieldExpression().stream() .map(this::internalVisitExpression) .collect(Collectors.toList()), - ArgumentFactory.getArgumentList(ctx) - ); + ArgumentFactory.getArgumentList(ctx)); } - /** - * Rename command. - */ + /** Rename command. */ @Override public UnresolvedPlan visitRenameCommand(RenameCommandContext ctx) { return new Rename( - ctx.renameClasue() - .stream() - .map(ct -> new Map(internalVisitExpression(ct.orignalField), - internalVisitExpression(ct.renamedField))) - .collect(Collectors.toList()) - ); + ctx.renameClasue().stream() + .map( + ct -> + new Map( + internalVisitExpression(ct.orignalField), + internalVisitExpression(ct.renamedField))) + .collect(Collectors.toList())); } - /** - * Stats command. - */ + /** Stats command. */ @Override public UnresolvedPlan visitStatsCommand(StatsCommandContext ctx) { ImmutableList.Builder aggListBuilder = new ImmutableList.Builder<>(); for (OpenSearchPPLParser.StatsAggTermContext aggCtx : ctx.statsAggTerm()) { UnresolvedExpression aggExpression = internalVisitExpression(aggCtx.statsFunction()); - String name = aggCtx.alias == null ? getTextInQuery(aggCtx) : StringUtils - .unquoteIdentifier(aggCtx.alias.getText()); + String name = + aggCtx.alias == null + ? getTextInQuery(aggCtx) + : StringUtils.unquoteIdentifier(aggCtx.alias.getText()); Alias alias = new Alias(name, aggExpression); aggListBuilder.add(alias); } @@ -199,12 +178,16 @@ public UnresolvedPlan visitStatsCommand(StatsCommandContext ctx) { List groupList = Optional.ofNullable(ctx.statsByClause()) .map(OpenSearchPPLParser.StatsByClauseContext::fieldList) - .map(expr -> expr.fieldExpression().stream() - .map(groupCtx -> - (UnresolvedExpression) new Alias( - StringUtils.unquoteIdentifier(getTextInQuery(groupCtx)), - internalVisitExpression(groupCtx))) - .collect(Collectors.toList())) + .map( + expr -> + expr.fieldExpression().stream() + .map( + groupCtx -> + (UnresolvedExpression) + new Alias( + StringUtils.unquoteIdentifier(getTextInQuery(groupCtx)), + internalVisitExpression(groupCtx))) + .collect(Collectors.toList())) .orElse(Collections.emptyList()); UnresolvedExpression span = @@ -213,30 +196,23 @@ public UnresolvedPlan visitStatsCommand(StatsCommandContext ctx) { .map(this::internalVisitExpression) .orElse(null); - Aggregation aggregation = new Aggregation( - aggListBuilder.build(), - Collections.emptyList(), - groupList, - span, - ArgumentFactory.getArgumentList(ctx) - ); + Aggregation aggregation = + new Aggregation( + aggListBuilder.build(), + Collections.emptyList(), + groupList, + span, + ArgumentFactory.getArgumentList(ctx)); return aggregation; } - /** - * Dedup command. - */ + /** Dedup command. */ @Override public UnresolvedPlan visitDedupCommand(DedupCommandContext ctx) { - return new Dedupe( - ArgumentFactory.getArgumentList(ctx), - getFieldList(ctx.fieldList()) - ); + return new Dedupe(ArgumentFactory.getArgumentList(ctx), getFieldList(ctx.fieldList())); } - /** - * Head command visitor. - */ + /** Head command visitor. */ @Override public UnresolvedPlan visitHeadCommand(HeadCommandContext ctx) { Integer size = ctx.number != null ? Integer.parseInt(ctx.number.getText()) : 10; @@ -244,58 +220,46 @@ public UnresolvedPlan visitHeadCommand(HeadCommandContext ctx) { return new Head(size, from); } - /** - * Sort command. - */ + /** Sort command. */ @Override public UnresolvedPlan visitSortCommand(SortCommandContext ctx) { return new Sort( - ctx.sortbyClause() - .sortField() - .stream() + ctx.sortbyClause().sortField().stream() .map(sort -> (Field) internalVisitExpression(sort)) - .collect(Collectors.toList()) - ); + .collect(Collectors.toList())); } - /** - * Eval command. - */ + /** Eval command. */ @Override public UnresolvedPlan visitEvalCommand(EvalCommandContext ctx) { return new Eval( - ctx.evalClause() - .stream() + ctx.evalClause().stream() .map(ct -> (Let) internalVisitExpression(ct)) - .collect(Collectors.toList()) - ); + .collect(Collectors.toList())); } private List getGroupByList(ByClauseContext ctx) { - return ctx.fieldList().fieldExpression().stream().map(this::internalVisitExpression) + return ctx.fieldList().fieldExpression().stream() + .map(this::internalVisitExpression) .collect(Collectors.toList()); } private List getFieldList(FieldListContext ctx) { - return ctx.fieldExpression() - .stream() + return ctx.fieldExpression().stream() .map(field -> (Field) internalVisitExpression(field)) .collect(Collectors.toList()); } - /** - * Rare command. - */ + /** Rare command. */ @Override public UnresolvedPlan visitRareCommand(RareCommandContext ctx) { - List groupList = ctx.byClause() == null ? Collections.emptyList() : - getGroupByList(ctx.byClause()); + List groupList = + ctx.byClause() == null ? Collections.emptyList() : getGroupByList(ctx.byClause()); return new RareTopN( CommandType.RARE, ArgumentFactory.getArgumentList(ctx), getFieldList(ctx.fieldList()), - groupList - ); + groupList); } @Override @@ -319,34 +283,31 @@ public UnresolvedPlan visitPatternsCommand(OpenSearchPPLParser.PatternsCommandCo UnresolvedExpression sourceField = internalVisitExpression(ctx.source_field); ImmutableMap.Builder builder = ImmutableMap.builder(); ctx.patternsParameter() - .forEach(x -> { - builder.put(x.children.get(0).toString(), - (Literal) internalVisitExpression(x.children.get(2))); - }); + .forEach( + x -> { + builder.put( + x.children.get(0).toString(), + (Literal) internalVisitExpression(x.children.get(2))); + }); java.util.Map arguments = builder.build(); Literal pattern = arguments.getOrDefault("pattern", AstDSL.stringLiteral("")); return new Parse(ParseMethod.PATTERNS, sourceField, pattern, arguments); } - /** - * Top command. - */ + /** Top command. */ @Override public UnresolvedPlan visitTopCommand(TopCommandContext ctx) { - List groupList = ctx.byClause() == null ? Collections.emptyList() : - getGroupByList(ctx.byClause()); + List groupList = + ctx.byClause() == null ? Collections.emptyList() : getGroupByList(ctx.byClause()); return new RareTopN( CommandType.TOP, ArgumentFactory.getArgumentList(ctx), getFieldList(ctx.fieldList()), - groupList - ); + groupList); } - /** - * From clause. - */ + /** From clause. */ @Override public UnresolvedPlan visitFromClause(FromClauseContext ctx) { if (ctx.tableFunction() != null) { @@ -358,34 +319,31 @@ public UnresolvedPlan visitFromClause(FromClauseContext ctx) { @Override public UnresolvedPlan visitTableSourceClause(TableSourceClauseContext ctx) { - return new Relation(ctx.tableSource() - .stream().map(this::internalVisitExpression) - .collect(Collectors.toList())); + return new Relation( + ctx.tableSource().stream().map(this::internalVisitExpression).collect(Collectors.toList())); } @Override public UnresolvedPlan visitTableFunction(TableFunctionContext ctx) { ImmutableList.Builder builder = ImmutableList.builder(); - ctx.functionArgs().functionArg().forEach(arg - -> { - String argName = (arg.ident() != null) ? arg.ident().getText() : null; - builder.add( - new UnresolvedArgument(argName, - this.internalVisitExpression(arg.valueExpression()))); - }); + ctx.functionArgs() + .functionArg() + .forEach( + arg -> { + String argName = (arg.ident() != null) ? arg.ident().getText() : null; + builder.add( + new UnresolvedArgument( + argName, this.internalVisitExpression(arg.valueExpression()))); + }); return new TableFunction(this.internalVisitExpression(ctx.qualifiedName()), builder.build()); } - /** - * Navigate to & build AST expression. - */ + /** Navigate to & build AST expression. */ private UnresolvedExpression internalVisitExpression(ParseTree tree) { return expressionBuilder.visit(tree); } - /** - * Simply return non-default value for now. - */ + /** Simply return non-default value for now. */ @Override protected UnresolvedPlan aggregateResult(UnresolvedPlan aggregate, UnresolvedPlan nextResult) { if (nextResult != defaultResult()) { @@ -394,52 +352,48 @@ protected UnresolvedPlan aggregateResult(UnresolvedPlan aggregate, UnresolvedPla return aggregate; } - /** - * Kmeans command. - */ + /** Kmeans command. */ @Override public UnresolvedPlan visitKmeansCommand(KmeansCommandContext ctx) { ImmutableMap.Builder builder = ImmutableMap.builder(); ctx.kmeansParameter() - .forEach(x -> { - builder.put(x.children.get(0).toString(), - (Literal) internalVisitExpression(x.children.get(2))); - }); + .forEach( + x -> { + builder.put( + x.children.get(0).toString(), + (Literal) internalVisitExpression(x.children.get(2))); + }); return new Kmeans(builder.build()); } - /** - * AD command. - */ + /** AD command. */ @Override public UnresolvedPlan visitAdCommand(AdCommandContext ctx) { ImmutableMap.Builder builder = ImmutableMap.builder(); ctx.adParameter() - .forEach(x -> { - builder.put(x.children.get(0).toString(), - (Literal) internalVisitExpression(x.children.get(2))); - }); + .forEach( + x -> { + builder.put( + x.children.get(0).toString(), + (Literal) internalVisitExpression(x.children.get(2))); + }); return new AD(builder.build()); } - /** - * ml command. - */ + /** ml command. */ @Override public UnresolvedPlan visitMlCommand(OpenSearchPPLParser.MlCommandContext ctx) { ImmutableMap.Builder builder = ImmutableMap.builder(); ctx.mlArg() - .forEach(x -> { - builder.put(x.argName.getText(), - (Literal) internalVisitExpression(x.argValue)); - }); + .forEach( + x -> { + builder.put(x.argName.getText(), (Literal) internalVisitExpression(x.argValue)); + }); return new ML(builder.build()); } - /** - * Get original text in query. - */ + /** Get original text in query. */ private String getTextInQuery(ParserRuleContext ctx) { Token start = ctx.getStart(); Token stop = ctx.getStop(); diff --git a/ppl/src/main/java/org/opensearch/sql/ppl/parser/AstExpressionBuilder.java b/ppl/src/main/java/org/opensearch/sql/ppl/parser/AstExpressionBuilder.java index c775747ec4..690e45d67c 100644 --- a/ppl/src/main/java/org/opensearch/sql/ppl/parser/AstExpressionBuilder.java +++ b/ppl/src/main/java/org/opensearch/sql/ppl/parser/AstExpressionBuilder.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.parser; import static org.opensearch.sql.expression.function.BuiltinFunctionName.IS_NOT_NULL; @@ -83,33 +82,25 @@ import org.opensearch.sql.ppl.antlr.parser.OpenSearchPPLParserBaseVisitor; import org.opensearch.sql.ppl.utils.ArgumentFactory; -/** - * Class of building AST Expression nodes. - */ +/** Class of building AST Expression nodes. */ public class AstExpressionBuilder extends OpenSearchPPLParserBaseVisitor { private static final int DEFAULT_TAKE_FUNCTION_SIZE_VALUE = 10; - /** - * The function name mapping between fronted and core engine. - */ + /** The function name mapping between fronted and core engine. */ private static Map FUNCTION_NAME_MAPPING = new ImmutableMap.Builder() .put("isnull", IS_NULL.getName().getFunctionName()) .put("isnotnull", IS_NOT_NULL.getName().getFunctionName()) .build(); - /** - * Eval clause. - */ + /** Eval clause. */ @Override public UnresolvedExpression visitEvalClause(EvalClauseContext ctx) { return new Let((Field) visit(ctx.fieldExpression()), visit(ctx.expression())); } - /** - * Logical expression excluding boolean, comparison. - */ + /** Logical expression excluding boolean, comparison. */ @Override public UnresolvedExpression visitLogicalNot(LogicalNotContext ctx) { return new Not(visit(ctx.logicalExpression())); @@ -130,9 +121,7 @@ public UnresolvedExpression visitLogicalXor(LogicalXorContext ctx) { return new Xor(visit(ctx.left), visit(ctx.right)); } - /** - * Comparison expression. - */ + /** Comparison expression. */ @Override public UnresolvedExpression visitCompareExpr(CompareExprContext ctx) { return new Compare(ctx.comparisonOperator().getText(), visit(ctx.left), visit(ctx.right)); @@ -142,22 +131,16 @@ public UnresolvedExpression visitCompareExpr(CompareExprContext ctx) { public UnresolvedExpression visitInExpr(InExprContext ctx) { return new In( visit(ctx.valueExpression()), - ctx.valueList() - .literalValue() - .stream() + ctx.valueList().literalValue().stream() .map(this::visitLiteralValue) .collect(Collectors.toList())); } - /** - * Value Expression. - */ + /** Value Expression. */ @Override public UnresolvedExpression visitBinaryArithmetic(BinaryArithmeticContext ctx) { return new Function( - ctx.binaryOperator.getText(), - Arrays.asList(visit(ctx.left), visit(ctx.right)) - ); + ctx.binaryOperator.getText(), Arrays.asList(visit(ctx.left), visit(ctx.right))); } @Override @@ -165,9 +148,7 @@ public UnresolvedExpression visitParentheticValueExpr(ParentheticValueExprContex return visit(ctx.valueExpression()); // Discard parenthesis around } - /** - * Field expression. - */ + /** Field expression. */ @Override public UnresolvedExpression visitFieldExpression(FieldExpressionContext ctx) { return new Field((QualifiedName) visit(ctx.qualifiedName())); @@ -182,13 +163,10 @@ public UnresolvedExpression visitWcFieldExpression(WcFieldExpressionContext ctx) public UnresolvedExpression visitSortField(SortFieldContext ctx) { return new Field( visit(ctx.sortFieldExpression().fieldExpression().qualifiedName()), - ArgumentFactory.getArgumentList(ctx) - ); + ArgumentFactory.getArgumentList(ctx)); } - /** - * Aggregation function. - */ + /** Aggregation function. */ @Override public UnresolvedExpression visitStatsFunctionCall(StatsFunctionCallContext ctx) { return new AggregateFunction(ctx.statsFunctionName().getText(), visit(ctx.valueExpression())); @@ -206,7 +184,9 @@ public UnresolvedExpression visitDistinctCountFunctionCall(DistinctCountFunction @Override public UnresolvedExpression visitPercentileAggFunction(PercentileAggFunctionContext ctx) { - return new AggregateFunction(ctx.PERCENTILE().getText(), visit(ctx.aggField), + return new AggregateFunction( + ctx.PERCENTILE().getText(), + visit(ctx.aggField), Collections.singletonList(new Argument("rank", (Literal) visit(ctx.value)))); } @@ -214,34 +194,32 @@ public UnresolvedExpression visitPercentileAggFunction(PercentileAggFunctionCont public UnresolvedExpression visitTakeAggFunctionCall( OpenSearchPPLParser.TakeAggFunctionCallContext ctx) { ImmutableList.Builder builder = ImmutableList.builder(); - builder.add(new UnresolvedArgument("size", - ctx.takeAggFunction().size != null ? visit(ctx.takeAggFunction().size) : - AstDSL.intLiteral(DEFAULT_TAKE_FUNCTION_SIZE_VALUE))); - return new AggregateFunction("take", visit(ctx.takeAggFunction().fieldExpression()), - builder.build()); + builder.add( + new UnresolvedArgument( + "size", + ctx.takeAggFunction().size != null + ? visit(ctx.takeAggFunction().size) + : AstDSL.intLiteral(DEFAULT_TAKE_FUNCTION_SIZE_VALUE))); + return new AggregateFunction( + "take", visit(ctx.takeAggFunction().fieldExpression()), builder.build()); } - /** - * Eval function. - */ + /** Eval function. */ @Override public UnresolvedExpression visitBooleanFunctionCall(BooleanFunctionCallContext ctx) { final String functionName = ctx.conditionFunctionBase().getText(); - return buildFunction(FUNCTION_NAME_MAPPING.getOrDefault(functionName, functionName), + return buildFunction( + FUNCTION_NAME_MAPPING.getOrDefault(functionName, functionName), ctx.functionArgs().functionArg()); } - /** - * Eval function. - */ + /** Eval function. */ @Override public UnresolvedExpression visitEvalFunctionCall(EvalFunctionCallContext ctx) { return buildFunction(ctx.evalFunctionName().getText(), ctx.functionArgs().functionArg()); } - /** - * Cast function. - */ + /** Cast function. */ @Override public UnresolvedExpression visitDataTypeFunctionCall(DataTypeFunctionCallContext ctx) { return new Cast(visit(ctx.expression()), visit(ctx.convertedDataType())); @@ -252,15 +230,10 @@ public UnresolvedExpression visitConvertedDataType(ConvertedDataTypeContext ctx) return AstDSL.stringLiteral(ctx.getText()); } - private Function buildFunction(String functionName, - List args) { + private Function buildFunction( + String functionName, List args) { return new Function( - functionName, - args - .stream() - .map(this::visitFunctionArg) - .collect(Collectors.toList()) - ); + functionName, args.stream().map(this::visitFunctionArg).collect(Collectors.toList())); } @Override @@ -290,70 +263,62 @@ public UnresolvedExpression visitTableSource(TableSourceContext ctx) { @Override public UnresolvedExpression visitPositionFunction( - OpenSearchPPLParser.PositionFunctionContext ctx) { + OpenSearchPPLParser.PositionFunctionContext ctx) { return new Function( - POSITION.getName().getFunctionName(), - Arrays.asList(visitFunctionArg(ctx.functionArg(0)), - visitFunctionArg(ctx.functionArg(1)))); + POSITION.getName().getFunctionName(), + Arrays.asList(visitFunctionArg(ctx.functionArg(0)), visitFunctionArg(ctx.functionArg(1)))); } @Override public UnresolvedExpression visitExtractFunctionCall( - OpenSearchPPLParser.ExtractFunctionCallContext ctx) { + OpenSearchPPLParser.ExtractFunctionCallContext ctx) { return new Function( - ctx.extractFunction().EXTRACT().toString(), - getExtractFunctionArguments(ctx)); + ctx.extractFunction().EXTRACT().toString(), getExtractFunctionArguments(ctx)); } private List getExtractFunctionArguments( - OpenSearchPPLParser.ExtractFunctionCallContext ctx) { - List args = Arrays.asList( + OpenSearchPPLParser.ExtractFunctionCallContext ctx) { + List args = + Arrays.asList( new Literal(ctx.extractFunction().datetimePart().getText(), DataType.STRING), - visitFunctionArg(ctx.extractFunction().functionArg()) - ); + visitFunctionArg(ctx.extractFunction().functionArg())); return args; } @Override public UnresolvedExpression visitGetFormatFunctionCall( - OpenSearchPPLParser.GetFormatFunctionCallContext ctx) { + OpenSearchPPLParser.GetFormatFunctionCallContext ctx) { return new Function( - ctx.getFormatFunction().GET_FORMAT().toString(), - getFormatFunctionArguments(ctx)); + ctx.getFormatFunction().GET_FORMAT().toString(), getFormatFunctionArguments(ctx)); } private List getFormatFunctionArguments( - OpenSearchPPLParser.GetFormatFunctionCallContext ctx) { - List args = Arrays.asList( + OpenSearchPPLParser.GetFormatFunctionCallContext ctx) { + List args = + Arrays.asList( new Literal(ctx.getFormatFunction().getFormatType().getText(), DataType.STRING), - visitFunctionArg(ctx.getFormatFunction().functionArg()) - ); + visitFunctionArg(ctx.getFormatFunction().functionArg())); return args; } @Override public UnresolvedExpression visitTimestampFunctionCall( - OpenSearchPPLParser.TimestampFunctionCallContext ctx) { + OpenSearchPPLParser.TimestampFunctionCallContext ctx) { return new Function( - ctx.timestampFunction().timestampFunctionName().getText(), - timestampFunctionArguments(ctx)); + ctx.timestampFunction().timestampFunctionName().getText(), timestampFunctionArguments(ctx)); } private List timestampFunctionArguments( - OpenSearchPPLParser.TimestampFunctionCallContext ctx) { - List args = Arrays.asList( - new Literal( - ctx.timestampFunction().simpleDateTimePart().getText(), - DataType.STRING), + OpenSearchPPLParser.TimestampFunctionCallContext ctx) { + List args = + Arrays.asList( + new Literal(ctx.timestampFunction().simpleDateTimePart().getText(), DataType.STRING), visitFunctionArg(ctx.timestampFunction().firstArg), - visitFunctionArg(ctx.timestampFunction().secondArg) - ); + visitFunctionArg(ctx.timestampFunction().secondArg)); return args; } - /** - * Literal and value. - */ + /** Literal and value. */ @Override public UnresolvedExpression visitIdentsAsQualifiedName(IdentsAsQualifiedNameContext ctx) { return visitIdentifiers(ctx.ident()); @@ -406,8 +371,10 @@ public UnresolvedExpression visitBooleanLiteral(BooleanLiteralContext ctx) { @Override public UnresolvedExpression visitBySpanClause(BySpanClauseContext ctx) { String name = ctx.spanClause().getText(); - return ctx.alias != null ? new Alias(name, visit(ctx.spanClause()), StringUtils - .unquoteIdentifier(ctx.alias.getText())) : new Alias(name, visit(ctx.spanClause())); + return ctx.alias != null + ? new Alias( + name, visit(ctx.spanClause()), StringUtils.unquoteIdentifier(ctx.alias.getText())) + : new Alias(name, visit(ctx.spanClause())); } @Override @@ -421,8 +388,7 @@ private QualifiedName visitIdentifiers(List ctx) { ctx.stream() .map(RuleContext::getText) .map(StringUtils::unquoteIdentifier) - .collect(Collectors.toList()) - ); + .collect(Collectors.toList())); } private List singleFieldRelevanceArguments( @@ -430,13 +396,21 @@ private List singleFieldRelevanceArguments( // all the arguments are defaulted to string values // to skip environment resolving and function signature resolving ImmutableList.Builder builder = ImmutableList.builder(); - builder.add(new UnresolvedArgument("field", - new QualifiedName(StringUtils.unquoteText(ctx.field.getText())))); - builder.add(new UnresolvedArgument("query", - new Literal(StringUtils.unquoteText(ctx.query.getText()), DataType.STRING))); - ctx.relevanceArg().forEach(v -> builder.add(new UnresolvedArgument( - v.relevanceArgName().getText().toLowerCase(), new Literal(StringUtils.unquoteText( - v.relevanceArgValue().getText()), DataType.STRING)))); + builder.add( + new UnresolvedArgument( + "field", new QualifiedName(StringUtils.unquoteText(ctx.field.getText())))); + builder.add( + new UnresolvedArgument( + "query", new Literal(StringUtils.unquoteText(ctx.query.getText()), DataType.STRING))); + ctx.relevanceArg() + .forEach( + v -> + builder.add( + new UnresolvedArgument( + v.relevanceArgName().getText().toLowerCase(), + new Literal( + StringUtils.unquoteText(v.relevanceArgValue().getText()), + DataType.STRING)))); return builder.build(); } @@ -445,19 +419,26 @@ private List multiFieldRelevanceArguments( // all the arguments are defaulted to string values // to skip environment resolving and function signature resolving ImmutableList.Builder builder = ImmutableList.builder(); - var fields = new RelevanceFieldList(ctx - .getRuleContexts(OpenSearchPPLParser.RelevanceFieldAndWeightContext.class) - .stream() - .collect(Collectors.toMap( - f -> StringUtils.unquoteText(f.field.getText()), - f -> (f.weight == null) ? 1F : Float.parseFloat(f.weight.getText())))); + var fields = + new RelevanceFieldList( + ctx.getRuleContexts(OpenSearchPPLParser.RelevanceFieldAndWeightContext.class).stream() + .collect( + Collectors.toMap( + f -> StringUtils.unquoteText(f.field.getText()), + f -> (f.weight == null) ? 1F : Float.parseFloat(f.weight.getText())))); builder.add(new UnresolvedArgument("fields", fields)); - builder.add(new UnresolvedArgument("query", - new Literal(StringUtils.unquoteText(ctx.query.getText()), DataType.STRING))); - ctx.relevanceArg().forEach(v -> builder.add(new UnresolvedArgument( - v.relevanceArgName().getText().toLowerCase(), new Literal(StringUtils.unquoteText( - v.relevanceArgValue().getText()), DataType.STRING)))); + builder.add( + new UnresolvedArgument( + "query", new Literal(StringUtils.unquoteText(ctx.query.getText()), DataType.STRING))); + ctx.relevanceArg() + .forEach( + v -> + builder.add( + new UnresolvedArgument( + v.relevanceArgName().getText().toLowerCase(), + new Literal( + StringUtils.unquoteText(v.relevanceArgValue().getText()), + DataType.STRING)))); return builder.build(); } - } diff --git a/ppl/src/main/java/org/opensearch/sql/ppl/parser/AstStatementBuilder.java b/ppl/src/main/java/org/opensearch/sql/ppl/parser/AstStatementBuilder.java index 3b7e5a78dd..e276e6d523 100644 --- a/ppl/src/main/java/org/opensearch/sql/ppl/parser/AstStatementBuilder.java +++ b/ppl/src/main/java/org/opensearch/sql/ppl/parser/AstStatementBuilder.java @@ -21,9 +21,7 @@ import org.opensearch.sql.ppl.antlr.parser.OpenSearchPPLParser; import org.opensearch.sql.ppl.antlr.parser.OpenSearchPPLParserBaseVisitor; -/** - * Build {@link Statement} from PPL Query. - */ +/** Build {@link Statement} from PPL Query. */ @RequiredArgsConstructor public class AstStatementBuilder extends OpenSearchPPLParserBaseVisitor { diff --git a/ppl/src/main/java/org/opensearch/sql/ppl/utils/ArgumentFactory.java b/ppl/src/main/java/org/opensearch/sql/ppl/utils/ArgumentFactory.java index 941bfe680e..f89ecf9c6e 100644 --- a/ppl/src/main/java/org/opensearch/sql/ppl/utils/ArgumentFactory.java +++ b/ppl/src/main/java/org/opensearch/sql/ppl/utils/ArgumentFactory.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.utils; import static org.opensearch.sql.ppl.antlr.parser.OpenSearchPPLParser.BooleanLiteralContext; @@ -24,9 +23,7 @@ import org.opensearch.sql.ast.expression.Literal; import org.opensearch.sql.common.utils.StringUtils; -/** - * Util class to get all arguments as a list from the PPL command. - */ +/** Util class to get all arguments as a list from the PPL command. */ public class ArgumentFactory { /** @@ -39,8 +36,7 @@ public static List getArgumentList(FieldsCommandContext ctx) { return Collections.singletonList( ctx.MINUS() != null ? new Argument("exclude", new Literal(true, DataType.BOOLEAN)) - : new Argument("exclude", new Literal(false, DataType.BOOLEAN)) - ); + : new Argument("exclude", new Literal(false, DataType.BOOLEAN))); } /** @@ -62,8 +58,7 @@ public static List getArgumentList(StatsCommandContext ctx) { : new Argument("delim", new Literal(" ", DataType.STRING)), ctx.dedupsplit != null ? new Argument("dedupsplit", getArgumentValue(ctx.dedupsplit)) - : new Argument("dedupsplit", new Literal(false, DataType.BOOLEAN)) - ); + : new Argument("dedupsplit", new Literal(false, DataType.BOOLEAN))); } /** @@ -82,8 +77,7 @@ public static List getArgumentList(DedupCommandContext ctx) { : new Argument("keepempty", new Literal(false, DataType.BOOLEAN)), ctx.consecutive != null ? new Argument("consecutive", getArgumentValue(ctx.consecutive)) - : new Argument("consecutive", new Literal(false, DataType.BOOLEAN)) - ); + : new Argument("consecutive", new Literal(false, DataType.BOOLEAN))); } /** @@ -100,13 +94,12 @@ public static List getArgumentList(SortFieldContext ctx) { ctx.sortFieldExpression().AUTO() != null ? new Argument("type", new Literal("auto", DataType.STRING)) : ctx.sortFieldExpression().IP() != null - ? new Argument("type", new Literal("ip", DataType.STRING)) - : ctx.sortFieldExpression().NUM() != null - ? new Argument("type", new Literal("num", DataType.STRING)) - : ctx.sortFieldExpression().STR() != null - ? new Argument("type", new Literal("str", DataType.STRING)) - : new Argument("type", new Literal(null, DataType.NULL)) - ); + ? new Argument("type", new Literal("ip", DataType.STRING)) + : ctx.sortFieldExpression().NUM() != null + ? new Argument("type", new Literal("num", DataType.STRING)) + : ctx.sortFieldExpression().STR() != null + ? new Argument("type", new Literal("str", DataType.STRING)) + : new Argument("type", new Literal(null, DataType.NULL))); } /** @@ -119,8 +112,7 @@ public static List getArgumentList(TopCommandContext ctx) { return Collections.singletonList( ctx.number != null ? new Argument("noOfResults", getArgumentValue(ctx.number)) - : new Argument("noOfResults", new Literal(10, DataType.INTEGER)) - ); + : new Argument("noOfResults", new Literal(10, DataType.INTEGER))); } /** @@ -130,21 +122,21 @@ public static List getArgumentList(TopCommandContext ctx) { * @return the list of argument with default number of results for the rare command */ public static List getArgumentList(RareCommandContext ctx) { - return Collections - .singletonList(new Argument("noOfResults", new Literal(10, DataType.INTEGER))); + return Collections.singletonList( + new Argument("noOfResults", new Literal(10, DataType.INTEGER))); } /** * parse argument value into Literal. + * * @param ctx ParserRuleContext instance * @return Literal */ private static Literal getArgumentValue(ParserRuleContext ctx) { return ctx instanceof IntegerLiteralContext - ? new Literal(Integer.parseInt(ctx.getText()), DataType.INTEGER) - : ctx instanceof BooleanLiteralContext + ? new Literal(Integer.parseInt(ctx.getText()), DataType.INTEGER) + : ctx instanceof BooleanLiteralContext ? new Literal(Boolean.valueOf(ctx.getText()), DataType.BOOLEAN) : new Literal(StringUtils.unquoteText(ctx.getText()), DataType.STRING); } - } diff --git a/ppl/src/main/java/org/opensearch/sql/ppl/utils/PPLQueryDataAnonymizer.java b/ppl/src/main/java/org/opensearch/sql/ppl/utils/PPLQueryDataAnonymizer.java index 1f0e6f0d52..d28e5d122b 100644 --- a/ppl/src/main/java/org/opensearch/sql/ppl/utils/PPLQueryDataAnonymizer.java +++ b/ppl/src/main/java/org/opensearch/sql/ppl/utils/PPLQueryDataAnonymizer.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.utils; import com.google.common.base.Strings; @@ -54,9 +53,7 @@ import org.opensearch.sql.planner.logical.LogicalRename; import org.opensearch.sql.planner.logical.LogicalSort; -/** - * Utility class to mask sensitive information in incoming PPL queries. - */ +/** Utility class to mask sensitive information in incoming PPL queries. */ public class PPLQueryDataAnonymizer extends AbstractNodeVisitor { private static final String MASK_LITERAL = "***"; @@ -68,8 +65,8 @@ public PPLQueryDataAnonymizer() { } /** - * This method is used to anonymize sensitive data in PPL query. - * Sensitive data includes user data., + * This method is used to anonymize sensitive data in PPL query. Sensitive data includes user + * data. * * @return ppl query string with all user data replace with "***" */ @@ -81,9 +78,7 @@ public String anonymizeStatement(Statement plan) { return plan.accept(this, null); } - /** - * Handle Query Statement. - */ + /** Handle Query Statement. */ @Override public String visitQuery(Query node, String context) { return node.getPlan().accept(this, null); @@ -103,8 +98,9 @@ public String visitRelation(Relation node, String context) { public String visitTableFunction(TableFunction node, String context) { String arguments = node.getArguments().stream() - .map(unresolvedExpression - -> this.expressionAnalyzer.analyze(unresolvedExpression, context)) + .map( + unresolvedExpression -> + this.expressionAnalyzer.analyze(unresolvedExpression, context)) .collect(Collectors.joining(",")); return StringUtils.format("source=%s(%s)", node.getFunctionName().toString(), arguments); } @@ -116,37 +112,34 @@ public String visitFilter(Filter node, String context) { return StringUtils.format("%s | where %s", child, condition); } - /** - * Build {@link LogicalRename}. - */ + /** Build {@link LogicalRename}. */ @Override public String visitRename(Rename node, String context) { String child = node.getChild().get(0).accept(this, context); ImmutableMap.Builder renameMapBuilder = new ImmutableMap.Builder<>(); for (Map renameMap : node.getRenameList()) { - renameMapBuilder.put(visitExpression(renameMap.getOrigin()), + renameMapBuilder.put( + visitExpression(renameMap.getOrigin()), ((Field) renameMap.getTarget()).getField().toString()); } String renames = - renameMapBuilder.build().entrySet().stream().map(entry -> StringUtils.format("%s as %s", - entry.getKey(), entry.getValue())).collect(Collectors.joining(",")); + renameMapBuilder.build().entrySet().stream() + .map(entry -> StringUtils.format("%s as %s", entry.getKey(), entry.getValue())) + .collect(Collectors.joining(",")); return StringUtils.format("%s | rename %s", child, renames); } - /** - * Build {@link LogicalAggregation}. - */ + /** Build {@link LogicalAggregation}. */ @Override public String visitAggregation(Aggregation node, String context) { String child = node.getChild().get(0).accept(this, context); final String group = visitExpressionList(node.getGroupExprList()); - return StringUtils.format("%s | stats %s", child, - String.join(" ", visitExpressionList(node.getAggExprList()), groupBy(group)).trim()); + return StringUtils.format( + "%s | stats %s", + child, String.join(" ", visitExpressionList(node.getAggExprList()), groupBy(group)).trim()); } - /** - * Build {@link LogicalRareTopN}. - */ + /** Build {@link LogicalRareTopN}. */ @Override public String visitRareTopN(RareTopN node, String context) { final String child = node.getChild().get(0).accept(this, context); @@ -154,16 +147,15 @@ public String visitRareTopN(RareTopN node, String context) { Integer noOfResults = (Integer) options.get(0).getValue().getValue(); String fields = visitFieldList(node.getFields()); String group = visitExpressionList(node.getGroupExprList()); - return StringUtils.format("%s | %s %d %s", child, + return StringUtils.format( + "%s | %s %d %s", + child, node.getCommandType().name().toLowerCase(), noOfResults, - String.join(" ", fields, groupBy(group)).trim() - ); + String.join(" ", fields, groupBy(group)).trim()); } - /** - * Build {@link LogicalProject} or {@link LogicalRemove} from {@link Field}. - */ + /** Build {@link LogicalProject} or {@link LogicalRemove} from {@link Field}. */ @Override public String visitProject(Project node, String context) { String child = node.getChild().get(0).accept(this, context); @@ -180,9 +172,7 @@ public String visitProject(Project node, String context) { return StringUtils.format("%s | fields %s %s", child, arg, fields); } - /** - * Build {@link LogicalEval}. - */ + /** Build {@link LogicalEval}. */ @Override public String visitEval(Eval node, String context) { String child = node.getChild().get(0).accept(this, context); @@ -192,14 +182,14 @@ public String visitEval(Eval node, String context) { String target = let.getVar().getField().toString(); expressionsBuilder.add(ImmutablePair.of(target, expression)); } - String expressions = expressionsBuilder.build().stream().map(pair -> StringUtils.format("%s" - + "=%s", pair.getLeft(), pair.getRight())).collect(Collectors.joining(" ")); + String expressions = + expressionsBuilder.build().stream() + .map(pair -> StringUtils.format("%s" + "=%s", pair.getLeft(), pair.getRight())) + .collect(Collectors.joining(" ")); return StringUtils.format("%s | eval %s", child, expressions); } - /** - * Build {@link LogicalSort}. - */ + /** Build {@link LogicalSort}. */ @Override public String visitSort(Sort node, String context) { String child = node.getChild().get(0).accept(this, context); @@ -208,9 +198,7 @@ public String visitSort(Sort node, String context) { return StringUtils.format("%s | sort %s", child, sortList); } - /** - * Build {@link LogicalDedupe}. - */ + /** Build {@link LogicalDedupe}. */ @Override public String visitDedupe(Dedupe node, String context) { String child = node.getChild().get(0).accept(this, context); @@ -220,10 +208,9 @@ public String visitDedupe(Dedupe node, String context) { Boolean keepEmpty = (Boolean) options.get(1).getValue().getValue(); Boolean consecutive = (Boolean) options.get(2).getValue().getValue(); - return StringUtils - .format("%s | dedup %s %d keepempty=%b consecutive=%b", child, fields, allowedDuplication, - keepEmpty, - consecutive); + return StringUtils.format( + "%s | dedup %s %d keepempty=%b consecutive=%b", + child, fields, allowedDuplication, keepEmpty, consecutive); } @Override @@ -238,8 +225,9 @@ private String visitFieldList(List fieldList) { } private String visitExpressionList(List expressionList) { - return expressionList.isEmpty() ? "" : - expressionList.stream().map(this::visitExpression).collect(Collectors.joining(",")); + return expressionList.isEmpty() + ? "" + : expressionList.stream().map(this::visitExpression).collect(Collectors.joining(",")); } private String visitExpression(UnresolvedExpression expression) { @@ -250,11 +238,8 @@ private String groupBy(String groupBy) { return Strings.isNullOrEmpty(groupBy) ? "" : StringUtils.format("by %s", groupBy); } - /** - * Expression Anonymizer. - */ - private static class AnonymizerExpressionAnalyzer extends AbstractNodeVisitor { + /** Expression Anonymizer. */ + private static class AnonymizerExpressionAnalyzer extends AbstractNodeVisitor { public String analyze(UnresolvedExpression unresolved, String context) { return unresolved.accept(this, context); diff --git a/ppl/src/main/java/org/opensearch/sql/ppl/utils/UnresolvedPlanHelper.java b/ppl/src/main/java/org/opensearch/sql/ppl/utils/UnresolvedPlanHelper.java index 4fb9eee6a0..a502f2d769 100644 --- a/ppl/src/main/java/org/opensearch/sql/ppl/utils/UnresolvedPlanHelper.java +++ b/ppl/src/main/java/org/opensearch/sql/ppl/utils/UnresolvedPlanHelper.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.utils; import com.google.common.collect.ImmutableList; @@ -12,15 +11,11 @@ import org.opensearch.sql.ast.tree.Project; import org.opensearch.sql.ast.tree.UnresolvedPlan; -/** - * The helper to add select to {@link UnresolvedPlan} if needed. - */ +/** The helper to add select to {@link UnresolvedPlan} if needed. */ @UtilityClass public class UnresolvedPlanHelper { - /** - * Attach Select All to PPL commands if required. - */ + /** Attach Select All to PPL commands if required. */ public UnresolvedPlan addSelectAll(UnresolvedPlan plan) { if ((plan instanceof Project) && !((Project) plan).isExcluded()) { return plan; diff --git a/ppl/src/test/java/org/opensearch/sql/ppl/PPLServiceTest.java b/ppl/src/test/java/org/opensearch/sql/ppl/PPLServiceTest.java index c14eb3dba1..598f6691cb 100644 --- a/ppl/src/test/java/org/opensearch/sql/ppl/PPLServiceTest.java +++ b/ppl/src/test/java/org/opensearch/sql/ppl/PPLServiceTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.mockito.ArgumentMatchers.any; @@ -41,21 +40,17 @@ public class PPLServiceTest { private DefaultQueryManager queryManager; - @Mock - private QueryService queryService; + @Mock private QueryService queryService; - @Mock - private ExecutionEngine.Schema schema; + @Mock private ExecutionEngine.Schema schema; - /** - * Setup the test context. - */ + /** Setup the test context. */ @Before public void setUp() { queryManager = DefaultQueryManager.defaultQueryManager(); - pplService = new PPLService(new PPLSyntaxParser(), queryManager, - new QueryPlanFactory(queryService)); + pplService = + new PPLService(new PPLSyntaxParser(), queryManager, new QueryPlanFactory(queryService)); } @After @@ -65,18 +60,20 @@ public void cleanup() throws InterruptedException { @Test public void testExecuteShouldPass() { - doAnswer(invocation -> { - ResponseListener listener = invocation.getArgument(1); - listener.onResponse(new QueryResponse(schema, Collections.emptyList(), Cursor.None)); - return null; - }).when(queryService).execute(any(), any()); - - pplService.execute(new PPLQueryRequest("search source=t a=1", null, QUERY), + doAnswer( + invocation -> { + ResponseListener listener = invocation.getArgument(1); + listener.onResponse(new QueryResponse(schema, Collections.emptyList(), Cursor.None)); + return null; + }) + .when(queryService) + .execute(any(), any()); + + pplService.execute( + new PPLQueryRequest("search source=t a=1", null, QUERY), new ResponseListener() { @Override - public void onResponse(QueryResponse pplQueryResponse) { - - } + public void onResponse(QueryResponse pplQueryResponse) {} @Override public void onFailure(Exception e) { @@ -87,17 +84,20 @@ public void onFailure(Exception e) { @Test public void testExecuteCsvFormatShouldPass() { - doAnswer(invocation -> { - ResponseListener listener = invocation.getArgument(1); - listener.onResponse(new QueryResponse(schema, Collections.emptyList(), Cursor.None)); - return null; - }).when(queryService).execute(any(), any()); - - pplService.execute(new PPLQueryRequest("search source=t a=1", null, QUERY, "csv"), + doAnswer( + invocation -> { + ResponseListener listener = invocation.getArgument(1); + listener.onResponse(new QueryResponse(schema, Collections.emptyList(), Cursor.None)); + return null; + }) + .when(queryService) + .execute(any(), any()); + + pplService.execute( + new PPLQueryRequest("search source=t a=1", null, QUERY, "csv"), new ResponseListener() { @Override - public void onResponse(QueryResponse pplQueryResponse) { - } + public void onResponse(QueryResponse pplQueryResponse) {} @Override public void onFailure(Exception e) { @@ -108,17 +108,20 @@ public void onFailure(Exception e) { @Test public void testExplainShouldPass() { - doAnswer(invocation -> { - ResponseListener listener = invocation.getArgument(1); - listener.onResponse(new ExplainResponse(new ExplainResponseNode("test"))); - return null; - }).when(queryService).explain(any(), any()); - - pplService.explain(new PPLQueryRequest("search source=t a=1", null, EXPLAIN), + doAnswer( + invocation -> { + ResponseListener listener = invocation.getArgument(1); + listener.onResponse(new ExplainResponse(new ExplainResponseNode("test"))); + return null; + }) + .when(queryService) + .explain(any(), any()); + + pplService.explain( + new PPLQueryRequest("search source=t a=1", null, EXPLAIN), new ResponseListener() { @Override - public void onResponse(ExplainResponse pplQueryResponse) { - } + public void onResponse(ExplainResponse pplQueryResponse) {} @Override public void onFailure(Exception e) { @@ -129,7 +132,8 @@ public void onFailure(Exception e) { @Test public void testExecuteWithIllegalQueryShouldBeCaughtByHandler() { - pplService.execute(new PPLQueryRequest("search", null, QUERY), + pplService.execute( + new PPLQueryRequest("search", null, QUERY), new ResponseListener() { @Override public void onResponse(QueryResponse pplQueryResponse) { @@ -137,15 +141,14 @@ public void onResponse(QueryResponse pplQueryResponse) { } @Override - public void onFailure(Exception e) { - - } + public void onFailure(Exception e) {} }); } @Test public void testExplainWithIllegalQueryShouldBeCaughtByHandler() { - pplService.explain(new PPLQueryRequest("search", null, QUERY), + pplService.explain( + new PPLQueryRequest("search", null, QUERY), new ResponseListener<>() { @Override public void onResponse(ExplainResponse pplQueryResponse) { @@ -153,26 +156,26 @@ public void onResponse(ExplainResponse pplQueryResponse) { } @Override - public void onFailure(Exception e) { - - } + public void onFailure(Exception e) {} }); } @Test public void testPrometheusQuery() { - doAnswer(invocation -> { - ResponseListener listener = invocation.getArgument(1); - listener.onResponse(new QueryResponse(schema, Collections.emptyList(), Cursor.None)); - return null; - }).when(queryService).execute(any(), any()); - - pplService.execute(new PPLQueryRequest("source = prometheus.http_requests_total", null, QUERY), + doAnswer( + invocation -> { + ResponseListener listener = invocation.getArgument(1); + listener.onResponse(new QueryResponse(schema, Collections.emptyList(), Cursor.None)); + return null; + }) + .when(queryService) + .execute(any(), any()); + + pplService.execute( + new PPLQueryRequest("source = prometheus.http_requests_total", null, QUERY), new ResponseListener<>() { @Override - public void onResponse(QueryResponse pplQueryResponse) { - - } + public void onResponse(QueryResponse pplQueryResponse) {} @Override public void onFailure(Exception e) { @@ -183,7 +186,8 @@ public void onFailure(Exception e) { @Test public void testInvalidPPLQuery() { - pplService.execute(new PPLQueryRequest("search", null, QUERY), + pplService.execute( + new PPLQueryRequest("search", null, QUERY), new ResponseListener() { @Override public void onResponse(QueryResponse pplQueryResponse) { @@ -191,9 +195,7 @@ public void onResponse(QueryResponse pplQueryResponse) { } @Override - public void onFailure(Exception e) { - - } + public void onFailure(Exception e) {} }); } } diff --git a/ppl/src/test/java/org/opensearch/sql/ppl/antlr/NowLikeFunctionParserTest.java b/ppl/src/test/java/org/opensearch/sql/ppl/antlr/NowLikeFunctionParserTest.java index 9f635fdd81..f6a04983e2 100644 --- a/ppl/src/test/java/org/opensearch/sql/ppl/antlr/NowLikeFunctionParserTest.java +++ b/ppl/src/test/java/org/opensearch/sql/ppl/antlr/NowLikeFunctionParserTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.antlr; import static org.junit.Assert.assertNotEquals; @@ -21,6 +20,7 @@ public class NowLikeFunctionParserTest { /** * Set parameterized values used in test. + * * @param name Function name * @param hasFsp Whether function has fsp argument * @param hasShortcut Whether function has shortcut (call without `()`) @@ -33,24 +33,26 @@ public NowLikeFunctionParserTest(String name, Boolean hasFsp, Boolean hasShortcu /** * Returns function data to test. + * * @return An iterable. */ @Parameterized.Parameters(name = "{0}") public static Iterable functionNames() { - return List.of(new Object[][]{ - {"now", true, false}, - {"current_timestamp", true, true}, - {"localtimestamp", true, true}, - {"localtime", true, true}, - {"sysdate", true, false}, - {"curtime", true, false}, - {"current_time", true, true}, - {"curdate", false, false}, - {"current_date", false, true}, - {"utc_date", false, false}, - {"utc_time", false, false}, - {"utc_timestamp", false, false} - }); + return List.of( + new Object[][] { + {"now", true, false}, + {"current_timestamp", true, true}, + {"localtimestamp", true, true}, + {"localtime", true, true}, + {"sysdate", true, false}, + {"curtime", true, false}, + {"current_time", true, true}, + {"curdate", false, false}, + {"current_date", false, true}, + {"utc_date", false, false}, + {"utc_time", false, false}, + {"utc_timestamp", false, false} + }); } private final String name; diff --git a/ppl/src/test/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParserMatchBoolPrefixSamplesTests.java b/ppl/src/test/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParserMatchBoolPrefixSamplesTests.java index dd146ea2cf..7de197028e 100644 --- a/ppl/src/test/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParserMatchBoolPrefixSamplesTests.java +++ b/ppl/src/test/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParserMatchBoolPrefixSamplesTests.java @@ -13,26 +13,24 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; - @RunWith(Parameterized.class) public class PPLSyntaxParserMatchBoolPrefixSamplesTests { - - /** Returns sample queries that the PPLSyntaxParser is expected to parse successfully. + /** + * Returns sample queries that the PPLSyntaxParser is expected to parse successfully. + * * @return an Iterable of sample queries. */ @Parameterized.Parameters(name = "{0}") public static Iterable sampleQueries() { return List.of( "source=t a= 1 | where match_bool_prefix(a, 'hello world')", - "source=t a = 1 | where match_bool_prefix(a, 'hello world'," - + " minimum_should_match = 3)", + "source=t a = 1 | where match_bool_prefix(a, 'hello world'," + " minimum_should_match = 3)", "source=t a = 1 | where match_bool_prefix(a, 'hello world', fuzziness='AUTO')", "source=t a = 1 | where match_bool_prefix(a, 'hello world', fuzziness='AUTO:4,6')", "source=t a= 1 | where match_bool_prefix(a, 'hello world', prefix_length=0)", "source=t a= 1 | where match_bool_prefix(a, 'hello world', max_expansions=1)", - "source=t a= 1 | where match_bool_prefix(a, 'hello world'," - + " fuzzy_transpositions=true)", + "source=t a= 1 | where match_bool_prefix(a, 'hello world'," + " fuzzy_transpositions=true)", "source=t a= 1 | where match_bool_prefix(a, 'hello world'," + " fuzzy_rewrite=constant_score)", "source=t a= 1 | where match_bool_prefix(a, 'hello world'," @@ -43,8 +41,7 @@ public static Iterable sampleQueries() { + " fuzzy_rewrite=top_terms_blended_freqs_1)", "source=t a= 1 | where match_bool_prefix(a, 'hello world'," + " fuzzy_rewrite=top_terms_boost_1)", - "source=t a= 1 | where match_bool_prefix(a, 'hello world'," - + " fuzzy_rewrite=top_terms_1)", + "source=t a= 1 | where match_bool_prefix(a, 'hello world'," + " fuzzy_rewrite=top_terms_1)", "source=t a= 1 | where match_bool_prefix(a, 'hello world', boost=1)", "source=t a = 1 | where match_bool_prefix(a, 'hello world', analyzer = 'standard'," + "prefix_length = '0', boost = 1)"); diff --git a/ppl/src/test/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParserMatchPhraseSamplesTest.java b/ppl/src/test/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParserMatchPhraseSamplesTest.java index aef6d1d69e..94222ec103 100644 --- a/ppl/src/test/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParserMatchPhraseSamplesTest.java +++ b/ppl/src/test/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParserMatchPhraseSamplesTest.java @@ -13,22 +13,22 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; - @RunWith(Parameterized.class) public class PPLSyntaxParserMatchPhraseSamplesTest { - - /** Returns sample queries that the PPLSyntaxParser is expected to parse successfully. + /** + * Returns sample queries that the PPLSyntaxParser is expected to parse successfully. + * * @return an Iterable of sample queries. */ @Parameterized.Parameters(name = "{0}") public static Iterable sampleQueries() { return List.of( - "source=t a= 1 | where match_phrase(a, 'hello world')", - "source=t a = 1 | where match_phrase(a, 'hello world', slop = 3)", - "source=t a = 1 | where match_phrase(a, 'hello world', analyzer = 'standard'," - + "zero_terms_query = 'none', slop = 3)", - "source=t a = 1 | where match_phrase(a, 'hello world', zero_terms_query = all)"); + "source=t a= 1 | where match_phrase(a, 'hello world')", + "source=t a = 1 | where match_phrase(a, 'hello world', slop = 3)", + "source=t a = 1 | where match_phrase(a, 'hello world', analyzer = 'standard'," + + "zero_terms_query = 'none', slop = 3)", + "source=t a = 1 | where match_phrase(a, 'hello world', zero_terms_query = all)"); } private final String query; diff --git a/ppl/src/test/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParserTest.java b/ppl/src/test/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParserTest.java index 57cee7fa1d..943953d416 100644 --- a/ppl/src/test/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParserTest.java +++ b/ppl/src/test/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParserTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.antlr; import static org.junit.Assert.assertNotEquals; @@ -19,8 +18,7 @@ public class PPLSyntaxParserTest { - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); + @Rule public ExpectedException exceptionRule = ExpectedException.none(); @Test public void testSearchCommandShouldPass() { @@ -140,99 +138,170 @@ public void testTopCommandWithoutNAndGroupByShouldPass() { @Test public void testCanParseMultiMatchRelevanceFunction() { - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE multi_match(['address'], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE multi_match(['address', 'notes'], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE multi_match([\"*\"], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE multi_match([\"address\"], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE multi_match([`address`], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE multi_match([address], 'query')")); - - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE multi_match(['address' ^ 1.0, 'notes' ^ 2.2], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE multi_match(['address' ^ 1.1, 'notes'], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE multi_match(['address', 'notes' ^ 1.5], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE multi_match(['address', 'notes' 3], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE multi_match(['address' ^ .3, 'notes' 3], 'query')")); - - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE multi_match([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE multi_match([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query'," - + "analyzer=keyword, quote_field_suffix=\".exact\", fuzzy_prefix_length = 4)")); + assertNotEquals( + null, new PPLSyntaxParser().parse("SOURCE=test | WHERE multi_match(['address'], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE multi_match(['address', 'notes'], 'query')")); + assertNotEquals( + null, new PPLSyntaxParser().parse("SOURCE=test | WHERE multi_match([\"*\"], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser().parse("SOURCE=test | WHERE multi_match([\"address\"], 'query')")); + assertNotEquals( + null, new PPLSyntaxParser().parse("SOURCE=test | WHERE multi_match([`address`], 'query')")); + assertNotEquals( + null, new PPLSyntaxParser().parse("SOURCE=test | WHERE multi_match([address], 'query')")); + + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE multi_match(['address' ^ 1.0, 'notes' ^ 2.2], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE multi_match(['address' ^ 1.1, 'notes'], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE multi_match(['address', 'notes' ^ 1.5], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE multi_match(['address', 'notes' 3], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE multi_match(['address' ^ .3, 'notes' 3], 'query')")); + + assertNotEquals( + null, + new PPLSyntaxParser() + .parse( + "SOURCE=test | WHERE multi_match([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse( + "SOURCE=test | WHERE multi_match([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query'," + + "analyzer=keyword, quote_field_suffix=\".exact\", fuzzy_prefix_length = 4)")); } @Test public void testCanParseSimpleQueryStringRelevanceFunction() { - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE simple_query_string(['address'], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE simple_query_string(['address', 'notes'], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE simple_query_string([\"*\"], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE simple_query_string([\"address\"], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE simple_query_string([`address`], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE simple_query_string([address], 'query')")); - - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE simple_query_string(['address' ^ 1.0, 'notes' ^ 2.2], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE simple_query_string(['address' ^ 1.1, 'notes'], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE simple_query_string(['address', 'notes' ^ 1.5], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE simple_query_string(['address', 'notes' 3], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE simple_query_string(['address' ^ .3, 'notes' 3], 'query')")); - - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE simple_query_string([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE simple_query_string([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query'," - + "analyzer=keyword, quote_field_suffix=\".exact\", fuzzy_prefix_length = 4)")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE simple_query_string(['address'], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE simple_query_string(['address', 'notes'], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser().parse("SOURCE=test | WHERE simple_query_string([\"*\"], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE simple_query_string([\"address\"], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE simple_query_string([`address`], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser().parse("SOURCE=test | WHERE simple_query_string([address], 'query')")); + + assertNotEquals( + null, + new PPLSyntaxParser() + .parse( + "SOURCE=test | WHERE simple_query_string(['address' ^ 1.0, 'notes' ^ 2.2]," + + " 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE simple_query_string(['address' ^ 1.1, 'notes'], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE simple_query_string(['address', 'notes' ^ 1.5], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE simple_query_string(['address', 'notes' 3], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse( + "SOURCE=test | WHERE simple_query_string(['address' ^ .3, 'notes' 3], 'query')")); + + assertNotEquals( + null, + new PPLSyntaxParser() + .parse( + "SOURCE=test | WHERE simple_query_string([\"Tags\" ^ 1.5, Title, `Body` 4.2]," + + " 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse( + "SOURCE=test | WHERE simple_query_string([\"Tags\" ^ 1.5, Title, `Body` 4.2]," + + " 'query',analyzer=keyword, quote_field_suffix=\".exact\"," + + " fuzzy_prefix_length = 4)")); } @Test public void testCanParseQueryStringRelevanceFunction() { - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE query_string(['address'], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE query_string(['address', 'notes'], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE query_string([\"*\"], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE query_string([\"address\"], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE query_string([`address`], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE query_string([address], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE query_string(['address' ^ 1.0, 'notes' ^ 2.2], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE query_string(['address' ^ 1.1, 'notes'], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE query_string(['address', 'notes' ^ 1.5], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE query_string(['address', 'notes' 3], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE query_string(['address' ^ .3, 'notes' 3], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE query_string([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE query_string([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query'," - + "analyzer=keyword, quote_field_suffix=\".exact\", fuzzy_prefix_length = 4)")); + assertNotEquals( + null, + new PPLSyntaxParser().parse("SOURCE=test | WHERE query_string(['address'], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE query_string(['address', 'notes'], 'query')")); + assertNotEquals( + null, new PPLSyntaxParser().parse("SOURCE=test | WHERE query_string([\"*\"], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser().parse("SOURCE=test | WHERE query_string([\"address\"], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser().parse("SOURCE=test | WHERE query_string([`address`], 'query')")); + assertNotEquals( + null, new PPLSyntaxParser().parse("SOURCE=test | WHERE query_string([address], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE query_string(['address' ^ 1.0, 'notes' ^ 2.2], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE query_string(['address' ^ 1.1, 'notes'], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE query_string(['address', 'notes' ^ 1.5], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE query_string(['address', 'notes' 3], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE query_string(['address' ^ .3, 'notes' 3], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse( + "SOURCE=test | WHERE query_string([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse( + "SOURCE=test | WHERE query_string([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query'," + + "analyzer=keyword, quote_field_suffix=\".exact\", fuzzy_prefix_length = 4)")); } @Test @@ -275,15 +344,35 @@ public void testDescribeCommandWithSourceShouldFail() { @Test public void testCanParseExtractFunction() { - String[] parts = List.of("MICROSECOND", "SECOND", "MINUTE", "HOUR", "DAY", - "WEEK", "MONTH", "QUARTER", "YEAR", "SECOND_MICROSECOND", - "MINUTE_MICROSECOND", "MINUTE_SECOND", "HOUR_MICROSECOND", - "HOUR_SECOND", "HOUR_MINUTE", "DAY_MICROSECOND", - "DAY_SECOND", "DAY_MINUTE", "DAY_HOUR", "YEAR_MONTH").toArray(new String[0]); + String[] parts = + List.of( + "MICROSECOND", + "SECOND", + "MINUTE", + "HOUR", + "DAY", + "WEEK", + "MONTH", + "QUARTER", + "YEAR", + "SECOND_MICROSECOND", + "MINUTE_MICROSECOND", + "MINUTE_SECOND", + "HOUR_MICROSECOND", + "HOUR_SECOND", + "HOUR_MINUTE", + "DAY_MICROSECOND", + "DAY_SECOND", + "DAY_MINUTE", + "DAY_HOUR", + "YEAR_MONTH") + .toArray(new String[0]); for (String part : parts) { - assertNotNull(new PPLSyntaxParser().parse( - String.format("SOURCE=test | eval k = extract(%s FROM \"2023-02-06\")", part))); + assertNotNull( + new PPLSyntaxParser() + .parse( + String.format("SOURCE=test | eval k = extract(%s FROM \"2023-02-06\")", part))); } } @@ -294,8 +383,9 @@ public void testCanParseGetFormatFunction() { for (String type : types) { for (String format : formats) { - assertNotNull(new PPLSyntaxParser().parse( - String.format("SOURCE=test | eval k = get_format(%s, %s)", type, format))); + assertNotNull( + new PPLSyntaxParser() + .parse(String.format("SOURCE=test | eval k = get_format(%s, %s)", type, format))); } } } @@ -303,24 +393,28 @@ public void testCanParseGetFormatFunction() { @Test public void testCannotParseGetFormatFunctionWithBadArg() { assertThrows( - SyntaxCheckException.class, - () -> new PPLSyntaxParser().parse( - "SOURCE=test | eval k = GET_FORMAT(NONSENSE_ARG,'INTERNAL')")); + SyntaxCheckException.class, + () -> + new PPLSyntaxParser() + .parse("SOURCE=test | eval k = GET_FORMAT(NONSENSE_ARG,'INTERNAL')")); } @Test public void testCanParseTimestampaddFunction() { - assertNotNull(new PPLSyntaxParser().parse( - "SOURCE=test | eval k = TIMESTAMPADD(MINUTE, 1, '2003-01-02')")); - assertNotNull(new PPLSyntaxParser().parse( - "SOURCE=test | eval k = TIMESTAMPADD(WEEK,1,'2003-01-02')")); + assertNotNull( + new PPLSyntaxParser() + .parse("SOURCE=test | eval k = TIMESTAMPADD(MINUTE, 1, '2003-01-02')")); + assertNotNull( + new PPLSyntaxParser().parse("SOURCE=test | eval k = TIMESTAMPADD(WEEK,1,'2003-01-02')")); } @Test public void testCanParseTimestampdiffFunction() { - assertNotNull(new PPLSyntaxParser().parse( - "SOURCE=test | eval k = TIMESTAMPDIFF(MINUTE, '2003-01-02', '2003-01-02')")); - assertNotNull(new PPLSyntaxParser().parse( - "SOURCE=test | eval k = TIMESTAMPDIFF(WEEK,'2003-01-02','2003-01-02')")); + assertNotNull( + new PPLSyntaxParser() + .parse("SOURCE=test | eval k = TIMESTAMPDIFF(MINUTE, '2003-01-02', '2003-01-02')")); + assertNotNull( + new PPLSyntaxParser() + .parse("SOURCE=test | eval k = TIMESTAMPDIFF(WEEK,'2003-01-02','2003-01-02')")); } } diff --git a/ppl/src/test/java/org/opensearch/sql/ppl/domain/PPLQueryRequestTest.java b/ppl/src/test/java/org/opensearch/sql/ppl/domain/PPLQueryRequestTest.java index b53656e252..29e6ff3298 100644 --- a/ppl/src/test/java/org/opensearch/sql/ppl/domain/PPLQueryRequestTest.java +++ b/ppl/src/test/java/org/opensearch/sql/ppl/domain/PPLQueryRequestTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.domain; import static org.junit.Assert.assertEquals; @@ -16,8 +15,7 @@ public class PPLQueryRequestTest { - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); + @Rule public ExpectedException exceptionRule = ExpectedException.none(); @Test public void getRequestShouldPass() { @@ -27,40 +25,34 @@ public void getRequestShouldPass() { @Test public void testExplainRequest() { - PPLQueryRequest request = new PPLQueryRequest( - "source=t a=1", null, "/_plugins/_ppl/_explain"); + PPLQueryRequest request = new PPLQueryRequest("source=t a=1", null, "/_plugins/_ppl/_explain"); assertTrue(request.isExplainRequest()); } @Test public void testDefaultFormat() { - PPLQueryRequest request = new PPLQueryRequest( - "source=test", null, "/_plugins/_ppl"); + PPLQueryRequest request = new PPLQueryRequest("source=test", null, "/_plugins/_ppl"); assertEquals(request.format(), Format.JDBC); } @Test public void testJDBCFormat() { - PPLQueryRequest request = new PPLQueryRequest( - "source=test", null, "/_plugins/_ppl", "jdbc"); + PPLQueryRequest request = new PPLQueryRequest("source=test", null, "/_plugins/_ppl", "jdbc"); assertEquals(request.format(), Format.JDBC); } @Test public void testCSVFormat() { - PPLQueryRequest request = new PPLQueryRequest( - "source=test", null, "/_plugins/_ppl", "csv"); + PPLQueryRequest request = new PPLQueryRequest("source=test", null, "/_plugins/_ppl", "csv"); assertEquals(request.format(), Format.CSV); } @Test public void testUnsupportedFormat() { String format = "notsupport"; - PPLQueryRequest request = new PPLQueryRequest( - "source=test", null, "/_plugins/_ppl", format); + PPLQueryRequest request = new PPLQueryRequest("source=test", null, "/_plugins/_ppl", format); exceptionRule.expect(IllegalArgumentException.class); exceptionRule.expectMessage("response in " + format + " format is not supported."); request.format(); } - } diff --git a/ppl/src/test/java/org/opensearch/sql/ppl/domain/PPLQueryResponseTest.java b/ppl/src/test/java/org/opensearch/sql/ppl/domain/PPLQueryResponseTest.java index 03eaaf22f4..50be4efa2e 100644 --- a/ppl/src/test/java/org/opensearch/sql/ppl/domain/PPLQueryResponseTest.java +++ b/ppl/src/test/java/org/opensearch/sql/ppl/domain/PPLQueryResponseTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.domain; import org.junit.Test; diff --git a/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstBuilderTest.java b/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstBuilderTest.java index 599f6bdd75..c9989a49c4 100644 --- a/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstBuilderTest.java +++ b/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstBuilderTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.parser; import static java.util.Collections.emptyList; @@ -62,353 +61,264 @@ public class AstBuilderTest { - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); + @Rule public ExpectedException exceptionRule = ExpectedException.none(); private PPLSyntaxParser parser = new PPLSyntaxParser(); @Test public void testSearchCommand() { - assertEqual("search source=t a=1", - filter( - relation("t"), - compare("=", field("a"), intLiteral(1)) - ) - ); + assertEqual( + "search source=t a=1", filter(relation("t"), compare("=", field("a"), intLiteral(1)))); } @Test public void testSearchCrossClusterCommand() { - assertEqual("search source=c:t", - relation(qualifiedName("c:t")) - ); + assertEqual("search source=c:t", relation(qualifiedName("c:t"))); } @Test public void testSearchMatchAllCrossClusterCommand() { - assertEqual("search source=*:t", - relation(qualifiedName("*:t")) - ); + assertEqual("search source=*:t", relation(qualifiedName("*:t"))); } @Test public void testPrometheusSearchCommand() { - assertEqual("search source = prometheus.http_requests_total", - relation(qualifiedName("prometheus", "http_requests_total")) - ); + assertEqual( + "search source = prometheus.http_requests_total", + relation(qualifiedName("prometheus", "http_requests_total"))); } @Test public void testSearchCommandWithDataSourceEscape() { - assertEqual("search source = `prometheus.http_requests_total`", - relation("prometheus.http_requests_total") - ); + assertEqual( + "search source = `prometheus.http_requests_total`", + relation("prometheus.http_requests_total")); } @Test public void testSearchCommandWithDotInIndexName() { - assertEqual("search source = http_requests_total.test", - relation(qualifiedName("http_requests_total","test")) - ); + assertEqual( + "search source = http_requests_total.test", + relation(qualifiedName("http_requests_total", "test"))); } @Test public void testSearchWithPrometheusQueryRangeWithPositionedArguments() { - assertEqual("search source = prometheus.query_range(\"test{code='200'}\",1234, 12345, 3)", - tableFunction(Arrays.asList("prometheus", "query_range"), + assertEqual( + "search source = prometheus.query_range(\"test{code='200'}\",1234, 12345, 3)", + tableFunction( + Arrays.asList("prometheus", "query_range"), unresolvedArg(null, stringLiteral("test{code='200'}")), unresolvedArg(null, intLiteral(1234)), unresolvedArg(null, intLiteral(12345)), - unresolvedArg(null, intLiteral(3)) - )); + unresolvedArg(null, intLiteral(3)))); } @Test public void testSearchWithPrometheusQueryRangeWithNamedArguments() { - assertEqual("search source = prometheus.query_range(query = \"test{code='200'}\", " + assertEqual( + "search source = prometheus.query_range(query = \"test{code='200'}\", " + "starttime = 1234, step=3, endtime=12345)", - tableFunction(Arrays.asList("prometheus", "query_range"), + tableFunction( + Arrays.asList("prometheus", "query_range"), unresolvedArg("query", stringLiteral("test{code='200'}")), unresolvedArg("starttime", intLiteral(1234)), unresolvedArg("step", intLiteral(3)), - unresolvedArg("endtime", intLiteral(12345)) - )); + unresolvedArg("endtime", intLiteral(12345)))); } @Test public void testSearchCommandString() { - assertEqual("search source=t a=\"a\"", - filter( - relation("t"), - compare("=", field("a"), stringLiteral("a")) - ) - ); + assertEqual( + "search source=t a=\"a\"", + filter(relation("t"), compare("=", field("a"), stringLiteral("a")))); } @Test public void testSearchCommandWithoutSearch() { - assertEqual("source=t a=1", - filter( - relation("t"), - compare("=", field("a"), intLiteral(1)) - ) - ); + assertEqual("source=t a=1", filter(relation("t"), compare("=", field("a"), intLiteral(1)))); } @Test public void testSearchCommandWithFilterBeforeSource() { - assertEqual("search a=1 source=t", - filter( - relation("t"), - compare("=", field("a"), intLiteral(1)) - )); + assertEqual( + "search a=1 source=t", filter(relation("t"), compare("=", field("a"), intLiteral(1)))); } @Test public void testWhereCommand() { - assertEqual("search source=t | where a=1", - filter( - relation("t"), - compare("=", field("a"), intLiteral(1)) - ) - ); + assertEqual( + "search source=t | where a=1", + filter(relation("t"), compare("=", field("a"), intLiteral(1)))); } @Test public void testWhereCommandWithQualifiedName() { - assertEqual("search source=t | where a.v=1", - filter( - relation("t"), - compare("=", field(qualifiedName("a", "v")), intLiteral(1)) - ) - ); + assertEqual( + "search source=t | where a.v=1", + filter(relation("t"), compare("=", field(qualifiedName("a", "v")), intLiteral(1)))); } @Test public void testFieldsCommandWithoutArguments() { - assertEqual("source=t | fields f, g", - projectWithArg( - relation("t"), - defaultFieldsArgs(), - field("f"), field("g") - )); + assertEqual( + "source=t | fields f, g", + projectWithArg(relation("t"), defaultFieldsArgs(), field("f"), field("g"))); } @Test public void testFieldsCommandWithIncludeArguments() { - assertEqual("source=t | fields + f, g", - projectWithArg( - relation("t"), - defaultFieldsArgs(), - field("f"), field("g") - )); + assertEqual( + "source=t | fields + f, g", + projectWithArg(relation("t"), defaultFieldsArgs(), field("f"), field("g"))); } @Test public void testFieldsCommandWithExcludeArguments() { - assertEqual("source=t | fields - f, g", + assertEqual( + "source=t | fields - f, g", projectWithArg( relation("t"), exprList(argument("exclude", booleanLiteral(true))), - field("f"), field("g") - )); + field("f"), + field("g"))); } @Test public void testSearchCommandWithQualifiedName() { - assertEqual("source=t | fields f.v, g.v", + assertEqual( + "source=t | fields f.v, g.v", projectWithArg( relation("t"), defaultFieldsArgs(), - field(qualifiedName("f", "v")), field(qualifiedName("g", "v")) - )); + field(qualifiedName("f", "v")), + field(qualifiedName("g", "v")))); } @Test public void testRenameCommand() { - assertEqual("source=t | rename f as g", - rename( - relation("t"), - map("f", "g") - )); + assertEqual("source=t | rename f as g", rename(relation("t"), map("f", "g"))); } @Test public void testRenameCommandWithMultiFields() { - assertEqual("source=t | rename f as g, h as i, j as k", - rename( - relation("t"), - map("f", "g"), - map("h", "i"), - map("j", "k") - )); + assertEqual( + "source=t | rename f as g, h as i, j as k", + rename(relation("t"), map("f", "g"), map("h", "i"), map("j", "k"))); } @Test public void testStatsCommand() { - assertEqual("source=t | stats count(a)", + assertEqual( + "source=t | stats count(a)", agg( relation("t"), - exprList( - alias( - "count(a)", - aggregate("count", field("a")) - ) - ), + exprList(alias("count(a)", aggregate("count", field("a")))), emptyList(), emptyList(), - defaultStatsArgs() - )); + defaultStatsArgs())); } @Test public void testStatsCommandWithByClause() { - assertEqual("source=t | stats count(a) by b DEDUP_SPLITVALUES=false", + assertEqual( + "source=t | stats count(a) by b DEDUP_SPLITVALUES=false", agg( relation("t"), - exprList( - alias( - "count(a)", - aggregate("count", field("a")) - ) - ), + exprList(alias("count(a)", aggregate("count", field("a")))), emptyList(), - exprList( - alias( - "b", - field("b") - )), - defaultStatsArgs() - )); + exprList(alias("b", field("b"))), + defaultStatsArgs())); } @Test public void testStatsCommandWithByClauseInBackticks() { - assertEqual("source=t | stats count(a) by `b` DEDUP_SPLITVALUES=false", + assertEqual( + "source=t | stats count(a) by `b` DEDUP_SPLITVALUES=false", agg( relation("t"), - exprList( - alias( - "count(a)", - aggregate("count", field("a")) - ) - ), + exprList(alias("count(a)", aggregate("count", field("a")))), emptyList(), - exprList( - alias( - "b", - field("b") - )), - defaultStatsArgs() - )); + exprList(alias("b", field("b"))), + defaultStatsArgs())); } @Test public void testStatsCommandWithAlias() { - assertEqual("source=t | stats count(a) as alias", + assertEqual( + "source=t | stats count(a) as alias", agg( relation("t"), - exprList( - alias( - "alias", - aggregate("count", field("a")) - ) - ), + exprList(alias("alias", aggregate("count", field("a")))), emptyList(), emptyList(), - defaultStatsArgs() - ) - ); + defaultStatsArgs())); } @Test public void testStatsCommandWithNestedFunctions() { - assertEqual("source=t | stats sum(a+b)", + assertEqual( + "source=t | stats sum(a+b)", agg( relation("t"), - exprList( - alias( - "sum(a+b)", - aggregate( - "sum", - function("+", field("a"), field("b")) - )) - ), + exprList(alias("sum(a+b)", aggregate("sum", function("+", field("a"), field("b"))))), emptyList(), emptyList(), - defaultStatsArgs() - )); - assertEqual("source=t | stats sum(abs(a)/2)", + defaultStatsArgs())); + assertEqual( + "source=t | stats sum(abs(a)/2)", agg( relation("t"), exprList( alias( "sum(abs(a)/2)", - aggregate( - "sum", - function( - "/", - function("abs", field("a")), - intLiteral(2) - ) - ) - ) - ), + aggregate("sum", function("/", function("abs", field("a")), intLiteral(2))))), emptyList(), emptyList(), - defaultStatsArgs() - )); + defaultStatsArgs())); } @Test public void testStatsCommandWithSpan() { - assertEqual("source=t | stats avg(price) by span(timestamp, 1h)", + assertEqual( + "source=t | stats avg(price) by span(timestamp, 1h)", agg( relation("t"), - exprList( - alias("avg(price)", aggregate("avg", field("price"))) - ), + exprList(alias("avg(price)", aggregate("avg", field("price")))), emptyList(), emptyList(), alias("span(timestamp,1h)", span(field("timestamp"), intLiteral(1), SpanUnit.H)), - defaultStatsArgs() - )); + defaultStatsArgs())); - assertEqual("source=t | stats count(a) by span(age, 10)", + assertEqual( + "source=t | stats count(a) by span(age, 10)", agg( relation("t"), - exprList( - alias("count(a)", aggregate("count", field("a"))) - ), + exprList(alias("count(a)", aggregate("count", field("a")))), emptyList(), emptyList(), alias("span(age,10)", span(field("age"), intLiteral(10), SpanUnit.NONE)), - defaultStatsArgs() - )); + defaultStatsArgs())); - assertEqual("source=t | stats avg(price) by span(timestamp, 1h), b", + assertEqual( + "source=t | stats avg(price) by span(timestamp, 1h), b", agg( relation("t"), - exprList( - alias("avg(price)", aggregate("avg", field("price"))) - ), + exprList(alias("avg(price)", aggregate("avg", field("price")))), emptyList(), exprList(alias("b", field("b"))), alias("span(timestamp,1h)", span(field("timestamp"), intLiteral(1), SpanUnit.H)), - defaultStatsArgs() - )); + defaultStatsArgs())); - assertEqual("source=t | stats avg(price) by span(timestamp, 1h), f1, f2", + assertEqual( + "source=t | stats avg(price) by span(timestamp, 1h), f1, f2", agg( relation("t"), - exprList( - alias("avg(price)", aggregate("avg", field("price"))) - ), + exprList(alias("avg(price)", aggregate("avg", field("price")))), emptyList(), exprList(alias("f1", field("f1")), alias("f2", field("f2"))), alias("span(timestamp,1h)", span(field("timestamp"), intLiteral(1), SpanUnit.H)), - defaultStatsArgs() - )); + defaultStatsArgs())); } @Test(expected = org.opensearch.sql.common.antlr.SyntaxCheckException.class) @@ -423,152 +333,128 @@ public void throwExceptionWithEmptyGroupByList() { @Test public void testStatsSpanWithAlias() { - assertEqual("source=t | stats avg(price) by span(timestamp, 1h) as time_span", + assertEqual( + "source=t | stats avg(price) by span(timestamp, 1h) as time_span", agg( relation("t"), - exprList( - alias("avg(price)", aggregate("avg", field("price"))) - ), + exprList(alias("avg(price)", aggregate("avg", field("price")))), emptyList(), emptyList(), - alias("span(timestamp,1h)", span( - field("timestamp"), intLiteral(1), SpanUnit.H), "time_span"), - defaultStatsArgs() - )); + alias( + "span(timestamp,1h)", + span(field("timestamp"), intLiteral(1), SpanUnit.H), + "time_span"), + defaultStatsArgs())); - assertEqual("source=t | stats count(a) by span(age, 10) as numeric_span", + assertEqual( + "source=t | stats count(a) by span(age, 10) as numeric_span", agg( relation("t"), - exprList( - alias("count(a)", aggregate("count", field("a"))) - ), + exprList(alias("count(a)", aggregate("count", field("a")))), emptyList(), emptyList(), - alias("span(age,10)", span( - field("age"), intLiteral(10), SpanUnit.NONE), "numeric_span"), - defaultStatsArgs() - )); + alias( + "span(age,10)", span(field("age"), intLiteral(10), SpanUnit.NONE), "numeric_span"), + defaultStatsArgs())); } @Test public void testDedupCommand() { - assertEqual("source=t | dedup f1, f2", - dedupe( - relation("t"), - defaultDedupArgs(), - field("f1"), field("f2") - )); + assertEqual( + "source=t | dedup f1, f2", + dedupe(relation("t"), defaultDedupArgs(), field("f1"), field("f2"))); } - /** - * disable sortby from the dedup command syntax. - */ + /** disable sortby from the dedup command syntax. */ @Ignore(value = "disable sortby from the dedup command syntax") public void testDedupCommandWithSortby() { - assertEqual("source=t | dedup f1, f2 sortby f3", + assertEqual( + "source=t | dedup f1, f2 sortby f3", agg( relation("t"), exprList(field("f1"), field("f2")), exprList(field("f3", defaultSortFieldArgs())), null, - defaultDedupArgs() - )); + defaultDedupArgs())); } @Test public void testHeadCommand() { - assertEqual("source=t | head", - head(relation("t"), 10, 0)); + assertEqual("source=t | head", head(relation("t"), 10, 0)); } @Test public void testHeadCommandWithNumber() { - assertEqual("source=t | head 3", - head(relation("t"), 3, 0)); + assertEqual("source=t | head 3", head(relation("t"), 3, 0)); } @Test public void testHeadCommandWithNumberAndOffset() { - assertEqual("source=t | head 3 from 4", - head(relation("t"), 3, 4)); + assertEqual("source=t | head 3 from 4", head(relation("t"), 3, 4)); } @Test public void testSortCommand() { - assertEqual("source=t | sort f1, f2", + assertEqual( + "source=t | sort f1, f2", sort( relation("t"), field("f1", defaultSortFieldArgs()), - field("f2", defaultSortFieldArgs()) - )); + field("f2", defaultSortFieldArgs()))); } @Test public void testSortCommandWithOptions() { - assertEqual("source=t | sort - f1, + f2", + assertEqual( + "source=t | sort - f1, + f2", sort( relation("t"), - field("f1", exprList(argument("asc", booleanLiteral(false)), - argument("type", nullLiteral()))), - field("f2", defaultSortFieldArgs()) - )); + field( + "f1", + exprList(argument("asc", booleanLiteral(false)), argument("type", nullLiteral()))), + field("f2", defaultSortFieldArgs()))); } @Test public void testEvalCommand() { - assertEqual("source=t | eval r=abs(f)", - eval( - relation("t"), - let( - field("r"), - function("abs", field("f")) - ) - )); + assertEqual( + "source=t | eval r=abs(f)", + eval(relation("t"), let(field("r"), function("abs", field("f"))))); } @Test public void testIndexName() { - assertEqual("source=`log.2020.04.20.` a=1", - filter( - relation("log.2020.04.20."), - compare("=", field("a"), intLiteral(1)) - )); - assertEqual("describe `log.2020.04.20.`", - relation(mappingTable("log.2020.04.20."))); + assertEqual( + "source=`log.2020.04.20.` a=1", + filter(relation("log.2020.04.20."), compare("=", field("a"), intLiteral(1)))); + assertEqual("describe `log.2020.04.20.`", relation(mappingTable("log.2020.04.20."))); } @Test public void testIdentifierAsIndexNameStartWithDot() { - assertEqual("source=.opensearch_dashboards", - relation(".opensearch_dashboards")); - assertEqual("describe .opensearch_dashboards", - relation(mappingTable(".opensearch_dashboards"))); + assertEqual("source=.opensearch_dashboards", relation(".opensearch_dashboards")); + assertEqual( + "describe .opensearch_dashboards", relation(mappingTable(".opensearch_dashboards"))); } @Test public void testIdentifierAsIndexNameWithDotInTheMiddle() { assertEqual("source=log.2020.10.10", relation("log.2020.10.10")); assertEqual("source=log-7.10-2020.10.10", relation("log-7.10-2020.10.10")); - assertEqual("describe log.2020.10.10", - relation(mappingTable("log.2020.10.10"))); - assertEqual("describe log-7.10-2020.10.10", - relation(mappingTable("log-7.10-2020.10.10"))); + assertEqual("describe log.2020.10.10", relation(mappingTable("log.2020.10.10"))); + assertEqual("describe log-7.10-2020.10.10", relation(mappingTable("log-7.10-2020.10.10"))); } @Test public void testIdentifierAsIndexNameWithSlashInTheMiddle() { - assertEqual("source=log-2020", - relation("log-2020")); - assertEqual("describe log-2020", - relation(mappingTable("log-2020"))); + assertEqual("source=log-2020", relation("log-2020")); + assertEqual("describe log-2020", relation(mappingTable("log-2020"))); } @Test public void testIdentifierAsIndexNameContainStar() { - assertEqual("source=log-2020-10-*", - relation("log-2020-10-*")); - assertEqual("describe log-2020-10-*", - relation(mappingTable("log-2020-10-*"))); + assertEqual("source=log-2020-10-*", relation("log-2020-10-*")); + assertEqual("describe log-2020-10-*", relation(mappingTable("log-2020-10-*"))); } @Test @@ -576,138 +462,132 @@ public void testIdentifierAsIndexNameContainStarAndDots() { assertEqual("source=log-2020.10.*", relation("log-2020.10.*")); assertEqual("source=log-2020.*.01", relation("log-2020.*.01")); assertEqual("source=log-2020.*.*", relation("log-2020.*.*")); - assertEqual("describe log-2020.10.*", - relation(mappingTable("log-2020.10.*"))); - assertEqual("describe log-2020.*.01", - relation(mappingTable("log-2020.*.01"))); - assertEqual("describe log-2020.*.*", - relation(mappingTable("log-2020.*.*"))); + assertEqual("describe log-2020.10.*", relation(mappingTable("log-2020.10.*"))); + assertEqual("describe log-2020.*.01", relation(mappingTable("log-2020.*.01"))); + assertEqual("describe log-2020.*.*", relation(mappingTable("log-2020.*.*"))); } @Test public void testIdentifierAsFieldNameStartWithAt() { - assertEqual("source=log-2020 | fields @timestamp", - projectWithArg( - relation("log-2020"), - defaultFieldsArgs(), - field("@timestamp") - )); + assertEqual( + "source=log-2020 | fields @timestamp", + projectWithArg(relation("log-2020"), defaultFieldsArgs(), field("@timestamp"))); } @Test public void testRareCommand() { - assertEqual("source=t | rare a", + assertEqual( + "source=t | rare a", rareTopN( relation("t"), CommandType.RARE, exprList(argument("noOfResults", intLiteral(10))), emptyList(), - field("a") - )); + field("a"))); } @Test public void testRareCommandWithGroupBy() { - assertEqual("source=t | rare a by b", + assertEqual( + "source=t | rare a by b", rareTopN( relation("t"), CommandType.RARE, exprList(argument("noOfResults", intLiteral(10))), exprList(field("b")), - field("a") - )); + field("a"))); } @Test public void testRareCommandWithMultipleFields() { - assertEqual("source=t | rare `a`, `b` by `c`", + assertEqual( + "source=t | rare `a`, `b` by `c`", rareTopN( relation("t"), CommandType.RARE, exprList(argument("noOfResults", intLiteral(10))), exprList(field("c")), field("a"), - field("b") - )); + field("b"))); } @Test public void testTopCommandWithN() { - assertEqual("source=t | top 1 a", + assertEqual( + "source=t | top 1 a", rareTopN( relation("t"), CommandType.TOP, exprList(argument("noOfResults", intLiteral(1))), emptyList(), - field("a") - )); + field("a"))); } @Test public void testTopCommandWithoutNAndGroupBy() { - assertEqual("source=t | top a", + assertEqual( + "source=t | top a", rareTopN( relation("t"), CommandType.TOP, exprList(argument("noOfResults", intLiteral(10))), emptyList(), - field("a") - )); + field("a"))); } @Test public void testTopCommandWithNAndGroupBy() { - assertEqual("source=t | top 1 a by b", + assertEqual( + "source=t | top 1 a by b", rareTopN( relation("t"), CommandType.TOP, exprList(argument("noOfResults", intLiteral(1))), exprList(field("b")), - field("a") - )); + field("a"))); } @Test public void testTopCommandWithMultipleFields() { - assertEqual("source=t | top 1 `a`, `b` by `c`", + assertEqual( + "source=t | top 1 `a`, `b` by `c`", rareTopN( relation("t"), CommandType.TOP, exprList(argument("noOfResults", intLiteral(1))), exprList(field("c")), field("a"), - field("b") - )); + field("b"))); } @Test public void testGrokCommand() { - assertEqual("source=t | grok raw \"pattern\"", + assertEqual( + "source=t | grok raw \"pattern\"", parse( relation("t"), ParseMethod.GROK, field("raw"), stringLiteral("pattern"), - ImmutableMap.of() - )); + ImmutableMap.of())); } @Test public void testParseCommand() { - assertEqual("source=t | parse raw \"pattern\"", + assertEqual( + "source=t | parse raw \"pattern\"", parse( relation("t"), ParseMethod.REGEX, field("raw"), stringLiteral("pattern"), - ImmutableMap.of() - )); + ImmutableMap.of())); } @Test public void testPatternsCommand() { - assertEqual("source=t | patterns new_field=\"custom_field\" " - + "pattern=\"custom_pattern\" raw", + assertEqual( + "source=t | patterns new_field=\"custom_field\" " + "pattern=\"custom_pattern\" raw", parse( relation("t"), ParseMethod.PATTERNS, @@ -716,8 +596,7 @@ public void testPatternsCommand() { ImmutableMap.builder() .put("new_field", stringLiteral("custom_field")) .put("pattern", stringLiteral("custom_pattern")) - .build() - )); + .build())); } @Test @@ -734,114 +613,118 @@ public void testPatternsCommandWithoutArguments() { @Test public void testKmeansCommand() { - assertEqual("source=t | kmeans centroids=3 iterations=2 distance_type='l1'", - new Kmeans(relation("t"), ImmutableMap.builder() - .put("centroids", new Literal(3, DataType.INTEGER)) - .put("iterations", new Literal(2, DataType.INTEGER)) - .put("distance_type", new Literal("l1", DataType.STRING)) - .build() - )); + assertEqual( + "source=t | kmeans centroids=3 iterations=2 distance_type='l1'", + new Kmeans( + relation("t"), + ImmutableMap.builder() + .put("centroids", new Literal(3, DataType.INTEGER)) + .put("iterations", new Literal(2, DataType.INTEGER)) + .put("distance_type", new Literal("l1", DataType.STRING)) + .build())); } @Test public void testKmeansCommandWithoutParameter() { - assertEqual("source=t | kmeans", - new Kmeans(relation("t"), ImmutableMap.of())); + assertEqual("source=t | kmeans", new Kmeans(relation("t"), ImmutableMap.of())); } @Test public void testMLCommand() { - assertEqual("source=t | ml action='trainandpredict' " - + "algorithm='kmeans' centroid=3 iteration=2 dist_type='l1'", - new ML(relation("t"), ImmutableMap.builder() - .put("action", new Literal("trainandpredict", DataType.STRING)) - .put("algorithm", new Literal("kmeans", DataType.STRING)) - .put("centroid", new Literal(3, DataType.INTEGER)) - .put("iteration", new Literal(2, DataType.INTEGER)) - .put("dist_type", new Literal("l1", DataType.STRING)) - .build() - )); + assertEqual( + "source=t | ml action='trainandpredict' " + + "algorithm='kmeans' centroid=3 iteration=2 dist_type='l1'", + new ML( + relation("t"), + ImmutableMap.builder() + .put("action", new Literal("trainandpredict", DataType.STRING)) + .put("algorithm", new Literal("kmeans", DataType.STRING)) + .put("centroid", new Literal(3, DataType.INTEGER)) + .put("iteration", new Literal(2, DataType.INTEGER)) + .put("dist_type", new Literal("l1", DataType.STRING)) + .build())); } @Test public void testDescribeCommand() { - assertEqual("describe t", - relation(mappingTable("t"))); + assertEqual("describe t", relation(mappingTable("t"))); } @Test public void testDescribeMatchAllCrossClusterSearchCommand() { - assertEqual("describe *:t", - relation(mappingTable("*:t"))); + assertEqual("describe *:t", relation(mappingTable("*:t"))); } @Test public void testDescribeCommandWithMultipleIndices() { - assertEqual("describe t,u", - relation(mappingTable("t,u"))); + assertEqual("describe t,u", relation(mappingTable("t,u"))); } @Test public void testDescribeCommandWithFullyQualifiedTableName() { - assertEqual("describe prometheus.http_metric", + assertEqual( + "describe prometheus.http_metric", relation(qualifiedName("prometheus", mappingTable("http_metric")))); - assertEqual("describe prometheus.schema.http_metric", + assertEqual( + "describe prometheus.schema.http_metric", relation(qualifiedName("prometheus", "schema", mappingTable("http_metric")))); } @Test public void test_fitRCFADCommand_withoutDataFormat() { - assertEqual("source=t | AD shingle_size=10 time_decay=0.0001 time_field='timestamp' " + assertEqual( + "source=t | AD shingle_size=10 time_decay=0.0001 time_field='timestamp' " + "anomaly_rate=0.1 anomaly_score_threshold=0.1 sample_size=256 " + "number_of_trees=256 time_zone='PST' output_after=256 " + "training_data_size=256", - new AD(relation("t"), ImmutableMap.builder() - .put("anomaly_rate", new Literal(0.1, DataType.DOUBLE)) - .put("anomaly_score_threshold", new Literal(0.1, DataType.DOUBLE)) - .put("sample_size", new Literal(256, DataType.INTEGER)) - .put("number_of_trees", new Literal(256, DataType.INTEGER)) - .put("time_zone", new Literal("PST", DataType.STRING)) - .put("output_after", new Literal(256, DataType.INTEGER)) - .put("shingle_size", new Literal(10, DataType.INTEGER)) - .put("time_decay", new Literal(0.0001, DataType.DOUBLE)) - .put("time_field", new Literal("timestamp", DataType.STRING)) - .put("training_data_size", new Literal(256, DataType.INTEGER)) - .build() - )); + new AD( + relation("t"), + ImmutableMap.builder() + .put("anomaly_rate", new Literal(0.1, DataType.DOUBLE)) + .put("anomaly_score_threshold", new Literal(0.1, DataType.DOUBLE)) + .put("sample_size", new Literal(256, DataType.INTEGER)) + .put("number_of_trees", new Literal(256, DataType.INTEGER)) + .put("time_zone", new Literal("PST", DataType.STRING)) + .put("output_after", new Literal(256, DataType.INTEGER)) + .put("shingle_size", new Literal(10, DataType.INTEGER)) + .put("time_decay", new Literal(0.0001, DataType.DOUBLE)) + .put("time_field", new Literal("timestamp", DataType.STRING)) + .put("training_data_size", new Literal(256, DataType.INTEGER)) + .build())); } @Test public void test_fitRCFADCommand_withDataFormat() { - assertEqual("source=t | AD shingle_size=10 time_decay=0.0001 time_field='timestamp' " + assertEqual( + "source=t | AD shingle_size=10 time_decay=0.0001 time_field='timestamp' " + "anomaly_rate=0.1 anomaly_score_threshold=0.1 sample_size=256 " + "number_of_trees=256 time_zone='PST' output_after=256 " + "training_data_size=256 date_format='HH:mm:ss yyyy-MM-dd'", - new AD(relation("t"), ImmutableMap.builder() - .put("anomaly_rate", new Literal(0.1, DataType.DOUBLE)) - .put("anomaly_score_threshold", new Literal(0.1, DataType.DOUBLE)) - .put("sample_size", new Literal(256, DataType.INTEGER)) - .put("number_of_trees", new Literal(256, DataType.INTEGER)) - .put("date_format", new Literal("HH:mm:ss yyyy-MM-dd", DataType.STRING)) - .put("time_zone", new Literal("PST", DataType.STRING)) - .put("output_after", new Literal(256, DataType.INTEGER)) - .put("shingle_size", new Literal(10, DataType.INTEGER)) - .put("time_decay", new Literal(0.0001, DataType.DOUBLE)) - .put("time_field", new Literal("timestamp", DataType.STRING)) - .put("training_data_size", new Literal(256, DataType.INTEGER)) - .build() - )); + new AD( + relation("t"), + ImmutableMap.builder() + .put("anomaly_rate", new Literal(0.1, DataType.DOUBLE)) + .put("anomaly_score_threshold", new Literal(0.1, DataType.DOUBLE)) + .put("sample_size", new Literal(256, DataType.INTEGER)) + .put("number_of_trees", new Literal(256, DataType.INTEGER)) + .put("date_format", new Literal("HH:mm:ss yyyy-MM-dd", DataType.STRING)) + .put("time_zone", new Literal("PST", DataType.STRING)) + .put("output_after", new Literal(256, DataType.INTEGER)) + .put("shingle_size", new Literal(10, DataType.INTEGER)) + .put("time_decay", new Literal(0.0001, DataType.DOUBLE)) + .put("time_field", new Literal("timestamp", DataType.STRING)) + .put("training_data_size", new Literal(256, DataType.INTEGER)) + .build())); } @Test public void test_batchRCFADCommand() { - assertEqual("source=t | AD", - new AD(relation("t"), ImmutableMap.of())); + assertEqual("source=t | AD", new AD(relation("t"), ImmutableMap.of())); } @Test public void testShowDataSourcesCommand() { - assertEqual("show datasources", - relation(DATASOURCES_TABLE_NAME)); + assertEqual("show datasources", relation(DATASOURCES_TABLE_NAME)); } protected void assertEqual(String query, Node expectedPlan) { diff --git a/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstExpressionBuilderTest.java b/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstExpressionBuilderTest.java index 8472e61361..aa25a6fcc6 100644 --- a/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstExpressionBuilderTest.java +++ b/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstExpressionBuilderTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.parser; import static java.util.Collections.emptyList; @@ -58,464 +57,313 @@ public class AstExpressionBuilderTest extends AstBuilderTest { @Test public void testLogicalNotExpr() { - assertEqual("source=t not a=1", - filter( - relation("t"), - not( - compare("=", field("a"), intLiteral(1)) - ) - )); + assertEqual( + "source=t not a=1", filter(relation("t"), not(compare("=", field("a"), intLiteral(1))))); } @Test public void testLogicalOrExpr() { - assertEqual("source=t a=1 or b=2", + assertEqual( + "source=t a=1 or b=2", filter( relation("t"), - or( - compare("=", field("a"), intLiteral(1)), - compare("=", field("b"), intLiteral(2)) - ) - )); + or(compare("=", field("a"), intLiteral(1)), compare("=", field("b"), intLiteral(2))))); } @Test public void testLogicalAndExpr() { - assertEqual("source=t a=1 and b=2", + assertEqual( + "source=t a=1 and b=2", filter( relation("t"), - and( - compare("=", field("a"), intLiteral(1)), - compare("=", field("b"), intLiteral(2)) - ) - )); + and(compare("=", field("a"), intLiteral(1)), compare("=", field("b"), intLiteral(2))))); } @Test public void testLogicalAndExprWithoutKeywordAnd() { - assertEqual("source=t a=1 b=2", + assertEqual( + "source=t a=1 b=2", filter( relation("t"), - and( - compare("=", field("a"), intLiteral(1)), - compare("=", field("b"), intLiteral(2)) - ) - )); + and(compare("=", field("a"), intLiteral(1)), compare("=", field("b"), intLiteral(2))))); } @Test public void testLogicalXorExpr() { - assertEqual("source=t a=1 xor b=2", + assertEqual( + "source=t a=1 xor b=2", filter( relation("t"), - xor( - compare("=", field("a"), intLiteral(1)), - compare("=", field("b"), intLiteral(2)) - ) - )); + xor(compare("=", field("a"), intLiteral(1)), compare("=", field("b"), intLiteral(2))))); } @Test public void testLogicalLikeExpr() { - assertEqual("source=t like(a, '_a%b%c_d_')", - filter( - relation("t"), - function("like", field("a"), stringLiteral("_a%b%c_d_")) - )); + assertEqual( + "source=t like(a, '_a%b%c_d_')", + filter(relation("t"), function("like", field("a"), stringLiteral("_a%b%c_d_")))); } @Test public void testBooleanIsNullFunction() { - assertEqual("source=t isnull(a)", - filter( - relation("t"), - function("is null", field("a")) - )); + assertEqual("source=t isnull(a)", filter(relation("t"), function("is null", field("a")))); } @Test public void testBooleanIsNotNullFunction() { - assertEqual("source=t isnotnull(a)", - filter( - relation("t"), - function("is not null", field("a")) - )); + assertEqual( + "source=t isnotnull(a)", filter(relation("t"), function("is not null", field("a")))); } - /** - * Todo. search operator should not include functionCall, need to change antlr. - */ + /** Todo. search operator should not include functionCall, need to change antlr. */ @Ignore("search operator should not include functionCall, need to change antlr") public void testEvalExpr() { - assertEqual("source=t f=abs(a)", - filter( - relation("t"), - equalTo( - field("f"), - function("abs", field("a")) - ) - )); + assertEqual( + "source=t f=abs(a)", + filter(relation("t"), equalTo(field("f"), function("abs", field("a"))))); } @Test public void testEvalFunctionExpr() { - assertEqual("source=t | eval f=abs(a)", - eval( - relation("t"), - let( - field("f"), - function("abs", field("a")) - ) - )); + assertEqual( + "source=t | eval f=abs(a)", + eval(relation("t"), let(field("f"), function("abs", field("a"))))); } @Test public void testEvalFunctionExprNoArgs() { - assertEqual("source=t | eval f=PI()", - eval( - relation("t"), - let( - field("f"), - function("PI") - ) - )); + assertEqual("source=t | eval f=PI()", eval(relation("t"), let(field("f"), function("PI")))); } @Test public void testPositionFunctionExpr() { - assertEqual("source=t | eval f=position('substr' IN 'str')", + assertEqual( + "source=t | eval f=position('substr' IN 'str')", eval( relation("t"), - let( - field("f"), - function("position", - stringLiteral("substr"), stringLiteral("str")) - ) - )); + let(field("f"), function("position", stringLiteral("substr"), stringLiteral("str"))))); } @Test public void testEvalBinaryOperationExpr() { - assertEqual("source=t | eval f=a+b", - eval( - relation("t"), - let( - field("f"), - function("+", field("a"), field("b")) - ) - )); - assertEqual("source=t | eval f=(a+b)", - eval( - relation("t"), - let( - field("f"), - function("+", field("a"), field("b")) - ) - )); + assertEqual( + "source=t | eval f=a+b", + eval(relation("t"), let(field("f"), function("+", field("a"), field("b"))))); + assertEqual( + "source=t | eval f=(a+b)", + eval(relation("t"), let(field("f"), function("+", field("a"), field("b"))))); } @Test public void testLiteralValueBinaryOperationExpr() { - assertEqual("source=t | eval f=3+2", - eval( - relation("t"), - let( - field("f"), - function("+", intLiteral(3), intLiteral(2)) - ) - )); + assertEqual( + "source=t | eval f=3+2", + eval(relation("t"), let(field("f"), function("+", intLiteral(3), intLiteral(2))))); } @Test public void testBinaryOperationExprWithParentheses() { - assertEqual("source = t | where a = (1 + 2) * 3", + assertEqual( + "source = t | where a = (1 + 2) * 3", filter( relation("t"), - compare("=", + compare( + "=", field("a"), - function("*", - function("+", intLiteral(1), intLiteral(2)), - intLiteral(3))))); + function("*", function("+", intLiteral(1), intLiteral(2)), intLiteral(3))))); } @Test public void testBinaryOperationExprPrecedence() { - assertEqual("source = t | where a = 1 + 2 * 3", + assertEqual( + "source = t | where a = 1 + 2 * 3", filter( relation("t"), - compare("=", + compare( + "=", field("a"), - function("+", - intLiteral(1), - function("*", intLiteral(2), intLiteral(3)))))); + function("+", intLiteral(1), function("*", intLiteral(2), intLiteral(3)))))); } @Test public void testCompareExpr() { - assertEqual("source=t a='b'", - filter( - relation("t"), - compare("=", field("a"), stringLiteral("b")) - )); + assertEqual( + "source=t a='b'", filter(relation("t"), compare("=", field("a"), stringLiteral("b")))); } @Test public void testCompareFieldsExpr() { - assertEqual("source=t a>b", - filter( - relation("t"), - compare(">", field("a"), field("b")) - )); + assertEqual("source=t a>b", filter(relation("t"), compare(">", field("a"), field("b")))); } @Test public void testInExpr() { - assertEqual("source=t f in (1, 2, 3)", - filter( - relation("t"), - in( - field("f"), - intLiteral(1), intLiteral(2), intLiteral(3)) - )); + assertEqual( + "source=t f in (1, 2, 3)", + filter(relation("t"), in(field("f"), intLiteral(1), intLiteral(2), intLiteral(3)))); } @Test public void testFieldExpr() { - assertEqual("source=t | sort + f", - sort( - relation("t"), - field("f", defaultSortFieldArgs()) - )); + assertEqual("source=t | sort + f", sort(relation("t"), field("f", defaultSortFieldArgs()))); } @Test public void testSortFieldWithMinusKeyword() { - assertEqual("source=t | sort - f", + assertEqual( + "source=t | sort - f", sort( relation("t"), - field( - "f", - argument("asc", booleanLiteral(false)), - argument("type", nullLiteral()) - ) - )); + field("f", argument("asc", booleanLiteral(false)), argument("type", nullLiteral())))); } @Test public void testSortFieldWithBackticks() { - assertEqual("source=t | sort `f`", - sort( - relation("t"), - field("f", defaultSortFieldArgs()) - )); + assertEqual("source=t | sort `f`", sort(relation("t"), field("f", defaultSortFieldArgs()))); } @Test public void testSortFieldWithAutoKeyword() { - assertEqual("source=t | sort auto(f)", + assertEqual( + "source=t | sort auto(f)", sort( relation("t"), field( "f", argument("asc", booleanLiteral(true)), - argument("type", stringLiteral("auto")) - ) - )); + argument("type", stringLiteral("auto"))))); } @Test public void testSortFieldWithIpKeyword() { - assertEqual("source=t | sort ip(f)", + assertEqual( + "source=t | sort ip(f)", sort( relation("t"), field( "f", argument("asc", booleanLiteral(true)), - argument("type", stringLiteral("ip")) - ) - )); + argument("type", stringLiteral("ip"))))); } @Test public void testSortFieldWithNumKeyword() { - assertEqual("source=t | sort num(f)", + assertEqual( + "source=t | sort num(f)", sort( relation("t"), field( "f", argument("asc", booleanLiteral(true)), - argument("type", stringLiteral("num")) - ) - )); + argument("type", stringLiteral("num"))))); } @Test public void testSortFieldWithStrKeyword() { - assertEqual("source=t | sort str(f)", + assertEqual( + "source=t | sort str(f)", sort( relation("t"), field( "f", argument("asc", booleanLiteral(true)), - argument("type", stringLiteral("str")) - ) - )); + argument("type", stringLiteral("str"))))); } @Test public void testAggFuncCallExpr() { - assertEqual("source=t | stats avg(a) by b", + assertEqual( + "source=t | stats avg(a) by b", agg( relation("t"), - exprList( - alias( - "avg(a)", - aggregate("avg", field("a")) - ) - ), + exprList(alias("avg(a)", aggregate("avg", field("a")))), emptyList(), - exprList( - alias( - "b", - field("b") - )), - defaultStatsArgs() - )); + exprList(alias("b", field("b"))), + defaultStatsArgs())); } @Test public void testVarAggregationShouldPass() { - assertEqual("source=t | stats var_samp(a) by b", + assertEqual( + "source=t | stats var_samp(a) by b", agg( relation("t"), - exprList( - alias( - "var_samp(a)", - aggregate("var_samp", field("a")) - ) - ), + exprList(alias("var_samp(a)", aggregate("var_samp", field("a")))), emptyList(), - exprList( - alias( - "b", - field("b") - )), - defaultStatsArgs() - )); + exprList(alias("b", field("b"))), + defaultStatsArgs())); } @Test public void testVarpAggregationShouldPass() { - assertEqual("source=t | stats var_pop(a) by b", + assertEqual( + "source=t | stats var_pop(a) by b", agg( relation("t"), - exprList( - alias( - "var_pop(a)", - aggregate("var_pop", field("a")) - ) - ), + exprList(alias("var_pop(a)", aggregate("var_pop", field("a")))), emptyList(), - exprList( - alias( - "b", - field("b") - )), - defaultStatsArgs() - )); + exprList(alias("b", field("b"))), + defaultStatsArgs())); } @Test public void testStdDevAggregationShouldPass() { - assertEqual("source=t | stats stddev_samp(a) by b", + assertEqual( + "source=t | stats stddev_samp(a) by b", agg( relation("t"), - exprList( - alias( - "stddev_samp(a)", - aggregate("stddev_samp", field("a")) - ) - ), + exprList(alias("stddev_samp(a)", aggregate("stddev_samp", field("a")))), emptyList(), - exprList( - alias( - "b", - field("b") - )), - defaultStatsArgs() - )); + exprList(alias("b", field("b"))), + defaultStatsArgs())); } @Test public void testStdDevPAggregationShouldPass() { - assertEqual("source=t | stats stddev_pop(a) by b", + assertEqual( + "source=t | stats stddev_pop(a) by b", agg( relation("t"), - exprList( - alias( - "stddev_pop(a)", - aggregate("stddev_pop", field("a")) - ) - ), + exprList(alias("stddev_pop(a)", aggregate("stddev_pop", field("a")))), emptyList(), - exprList( - alias( - "b", - field("b") - )), - defaultStatsArgs() - )); + exprList(alias("b", field("b"))), + defaultStatsArgs())); } @Test public void testPercentileAggFuncExpr() { - assertEqual("source=t | stats percentile<1>(a)", + assertEqual( + "source=t | stats percentile<1>(a)", agg( relation("t"), exprList( - alias("percentile<1>(a)", - aggregate( - "percentile", - field("a"), - argument("rank", intLiteral(1)) - ) - ) - ), + alias( + "percentile<1>(a)", + aggregate("percentile", field("a"), argument("rank", intLiteral(1))))), emptyList(), emptyList(), - defaultStatsArgs() - )); + defaultStatsArgs())); } @Test public void testCountFuncCallExpr() { - assertEqual("source=t | stats count() by b", + assertEqual( + "source=t | stats count() by b", agg( relation("t"), - exprList( - alias( - "count()", - aggregate("count", AllFields.of()) - ) - ), + exprList(alias("count()", aggregate("count", AllFields.of()))), emptyList(), - exprList( - alias( - "b", - field("b") - )), - defaultStatsArgs() - )); + exprList(alias("b", field("b"))), + defaultStatsArgs())); } @Test public void testDistinctCount() { - assertEqual("source=t | stats distinct_count(a)", + assertEqual( + "source=t | stats distinct_count(a)", agg( relation("t"), - exprList( - alias("distinct_count(a)", - distinctAggregate("count", field("a")))), + exprList(alias("distinct_count(a)", distinctAggregate("count", field("a")))), emptyList(), emptyList(), defaultStatsArgs())); @@ -523,168 +371,114 @@ public void testDistinctCount() { @Test public void testTakeAggregationNoArgsShouldPass() { - assertEqual("source=t | stats take(a)", + assertEqual( + "source=t | stats take(a)", agg( relation("t"), - exprList(alias("take(a)", - aggregate("take", field("a"), unresolvedArg("size", intLiteral(10))))), + exprList( + alias( + "take(a)", + aggregate("take", field("a"), unresolvedArg("size", intLiteral(10))))), emptyList(), emptyList(), - defaultStatsArgs() - )); + defaultStatsArgs())); } @Test public void testTakeAggregationWithArgsShouldPass() { - assertEqual("source=t | stats take(a, 5)", + assertEqual( + "source=t | stats take(a, 5)", agg( relation("t"), - exprList(alias("take(a, 5)", - aggregate("take", field("a"), unresolvedArg("size", intLiteral(5))))), + exprList( + alias( + "take(a, 5)", + aggregate("take", field("a"), unresolvedArg("size", intLiteral(5))))), emptyList(), emptyList(), - defaultStatsArgs() - )); + defaultStatsArgs())); } - @Test public void testEvalFuncCallExpr() { - assertEqual("source=t | eval f=abs(a)", - eval( - relation("t"), - let( - field("f"), - function("abs", field("a")) - ) - )); + assertEqual( + "source=t | eval f=abs(a)", + eval(relation("t"), let(field("f"), function("abs", field("a"))))); } @Test public void testDataTypeFuncCall() { - assertEqual("source=t | eval f=cast(1 as string)", - eval( - relation("t"), - let( - field("f"), - cast(intLiteral(1), stringLiteral("string")) - ) - )); + assertEqual( + "source=t | eval f=cast(1 as string)", + eval(relation("t"), let(field("f"), cast(intLiteral(1), stringLiteral("string"))))); } @Test public void testNestedFieldName() { - assertEqual("source=t | fields field0.field1.field2", + assertEqual( + "source=t | fields field0.field1.field2", projectWithArg( relation("t"), defaultFieldsArgs(), - field( - qualifiedName("field0", "field1", "field2") - ) - )); + field(qualifiedName("field0", "field1", "field2")))); } @Test public void testFieldNameWithSpecialChars() { - assertEqual("source=t | fields `field-0`", - projectWithArg( - relation("t"), - defaultFieldsArgs(), - field( - qualifiedName("field-0") - ) - )); + assertEqual( + "source=t | fields `field-0`", + projectWithArg(relation("t"), defaultFieldsArgs(), field(qualifiedName("field-0")))); } @Test public void testNestedFieldNameWithSpecialChars() { - assertEqual("source=t | fields `field-0`.`field#1`.`field*2`", + assertEqual( + "source=t | fields `field-0`.`field#1`.`field*2`", projectWithArg( relation("t"), defaultFieldsArgs(), - field( - qualifiedName("field-0", "field#1", "field*2") - ) - )); + field(qualifiedName("field-0", "field#1", "field*2")))); } @Test public void testStringLiteralExpr() { - assertEqual("source=t a=\"string\"", - filter( - relation("t"), - compare( - "=", - field("a"), - stringLiteral("string") - ) - )); + assertEqual( + "source=t a=\"string\"", + filter(relation("t"), compare("=", field("a"), stringLiteral("string")))); } @Test public void testIntegerLiteralExpr() { - assertEqual("source=t a=1 b=-1", + assertEqual( + "source=t a=1 b=-1", filter( relation("t"), and( - compare( - "=", - field("a"), - intLiteral(1) - ), - compare( - "=", - field("b"), - intLiteral(-1) - ) - ) - )); + compare("=", field("a"), intLiteral(1)), + compare("=", field("b"), intLiteral(-1))))); } @Test public void testLongLiteralExpr() { - assertEqual("source=t a=1234567890123 b=-1234567890123", + assertEqual( + "source=t a=1234567890123 b=-1234567890123", filter( relation("t"), and( - compare( - "=", - field("a"), - longLiteral(1234567890123L) - ), - compare( - "=", - field("b"), - longLiteral(-1234567890123L) - ) - ) - )); + compare("=", field("a"), longLiteral(1234567890123L)), + compare("=", field("b"), longLiteral(-1234567890123L))))); } @Test public void testDoubleLiteralExpr() { - assertEqual("source=t b=0.1", - filter( - relation("t"), - compare( - "=", - field("b"), - doubleLiteral(0.1) - ) - )); + assertEqual( + "source=t b=0.1", filter(relation("t"), compare("=", field("b"), doubleLiteral(0.1)))); } @Test public void testBooleanLiteralExpr() { - assertEqual("source=t a=true", - filter( - relation("t"), - compare( - "=", - field("a"), - booleanLiteral(true) - ) - )); + assertEqual( + "source=t a=true", filter(relation("t"), compare("=", field("a"), booleanLiteral(true)))); } @Test @@ -692,42 +486,23 @@ public void testIntervalLiteralExpr() { assertEqual( "source=t a = interval 1 day", filter( - relation("t"), - compare( - "=", - field("a"), - intervalLiteral(1, DataType.INTEGER, "day") - ) - )); + relation("t"), compare("=", field("a"), intervalLiteral(1, DataType.INTEGER, "day")))); } @Test public void testKeywordsAsIdentifiers() { - assertEqual( - "source=timestamp", - relation("timestamp") - ); + assertEqual("source=timestamp", relation("timestamp")); assertEqual( "source=t | fields timestamp", - projectWithArg( - relation("t"), - defaultFieldsArgs(), - field("timestamp") - ) - ); + projectWithArg(relation("t"), defaultFieldsArgs(), field("timestamp"))); } @Test public void canBuildKeywordsAsIdentInQualifiedName() { assertEqual( "source=test | fields timestamp", - projectWithArg( - relation("test"), - defaultFieldsArgs(), - field("timestamp") - ) - ); + projectWithArg(relation("test"), defaultFieldsArgs(), field("timestamp"))); } @Test @@ -740,10 +515,7 @@ public void canBuildMatchRelevanceFunctionWithArguments() { "match", unresolvedArg("field", qualifiedName("message")), unresolvedArg("query", stringLiteral("test query")), - unresolvedArg("analyzer", stringLiteral("keyword")) - ) - ) - ); + unresolvedArg("analyzer", stringLiteral("keyword"))))); } @Test @@ -755,13 +527,11 @@ public void canBuildMulti_matchRelevanceFunctionWithArguments() { relation("test"), function( "multi_match", - unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of( - "field1", 1.F, "field2", 3.2F))), + unresolvedArg( + "fields", + new RelevanceFieldList(ImmutableMap.of("field1", 1.F, "field2", 3.2F))), unresolvedArg("query", stringLiteral("test query")), - unresolvedArg("analyzer", stringLiteral("keyword")) - ) - ) - ); + unresolvedArg("analyzer", stringLiteral("keyword"))))); } @Test @@ -773,13 +543,11 @@ public void canBuildSimple_query_stringRelevanceFunctionWithArguments() { relation("test"), function( "simple_query_string", - unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of( - "field1", 1.F, "field2", 3.2F))), + unresolvedArg( + "fields", + new RelevanceFieldList(ImmutableMap.of("field1", 1.F, "field2", 3.2F))), unresolvedArg("query", stringLiteral("test query")), - unresolvedArg("analyzer", stringLiteral("keyword")) - ) - ) - ); + unresolvedArg("analyzer", stringLiteral("keyword"))))); } @Test @@ -791,13 +559,11 @@ public void canBuildQuery_stringRelevanceFunctionWithArguments() { relation("test"), function( "query_string", - unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of( - "field1", 1.F, "field2", 3.2F))), + unresolvedArg( + "fields", + new RelevanceFieldList(ImmutableMap.of("field1", 1.F, "field2", 3.2F))), unresolvedArg("query", stringLiteral("test query")), - unresolvedArg("analyzer", stringLiteral("keyword")) - ) - ) - ); + unresolvedArg("analyzer", stringLiteral("keyword"))))); } @Test @@ -816,11 +582,10 @@ public void functionNameCanBeUsedAsIdentifier() { + "| TIME_TO_SEC | TIMESTAMP | TO_DAYS | UNIX_TIMESTAMP | WEEK | YEAR"); assertFunctionNameCouldBeId( "SUBSTR | SUBSTRING | TRIM | LTRIM | RTRIM | LOWER | UPPER | CONCAT | CONCAT_WS | LENGTH " - + "| STRCMP | RIGHT | LEFT | ASCII | LOCATE | REPLACE" - ); + + "| STRCMP | RIGHT | LEFT | ASCII | LOCATE | REPLACE"); assertFunctionNameCouldBeId( "ABS | CEIL | CEILING | CONV | CRC32 | E | EXP | FLOOR | LN | LOG" - + " | LOG10 | LOG2 | MOD | PI |POW | POWER | RAND | ROUND | SIGN | SQRT | TRUNCATE " + + " | LOG10 | LOG2 | MOD | PI |POW | POWER | RAND | ROUND | SIGN | SQRT | TRUNCATE " + "| ACOS | ASIN | ATAN | ATAN2 | COS | COT | DEGREES | RADIANS | SIN | TAN"); assertFunctionNameCouldBeId( "SEARCH | DESCRIBE | SHOW | FROM | WHERE | FIELDS | RENAME | STATS " @@ -831,100 +596,79 @@ public void functionNameCanBeUsedAsIdentifier() { void assertFunctionNameCouldBeId(String antlrFunctionName) { List functionList = - Arrays.stream(antlrFunctionName.split("\\|")).map(String::stripLeading) - .map(String::stripTrailing).collect( - Collectors.toList()); + Arrays.stream(antlrFunctionName.split("\\|")) + .map(String::stripLeading) + .map(String::stripTrailing) + .collect(Collectors.toList()); assertFalse(functionList.isEmpty()); for (String functionName : functionList) { - assertEqual(String.format(Locale.ROOT, "source=t | fields %s", functionName), - projectWithArg( - relation("t"), - defaultFieldsArgs(), - field( - qualifiedName(functionName) - ) - )); + assertEqual( + String.format(Locale.ROOT, "source=t | fields %s", functionName), + projectWithArg(relation("t"), defaultFieldsArgs(), field(qualifiedName(functionName)))); } } // https://github.com/opensearch-project/sql/issues/1318 @Test public void indexCanBeId() { - assertEqual("source = index | stats count() by index", + assertEqual( + "source = index | stats count() by index", agg( relation("index"), - exprList( - alias( - "count()", - aggregate("count", AllFields.of()) - ) - ), + exprList(alias("count()", aggregate("count", AllFields.of()))), emptyList(), - exprList( - alias( - "index", - field("index") - )), - defaultStatsArgs() - )); + exprList(alias("index", field("index"))), + defaultStatsArgs())); } @Test public void testExtractFunctionExpr() { - assertEqual("source=t | eval f=extract(day from '2001-05-07 10:11:12')", + assertEqual( + "source=t | eval f=extract(day from '2001-05-07 10:11:12')", eval( relation("t"), let( field("f"), - function("extract", - stringLiteral("day"), stringLiteral("2001-05-07 10:11:12")) - ) - )); + function("extract", stringLiteral("day"), stringLiteral("2001-05-07 10:11:12"))))); } - @Test public void testGet_FormatFunctionExpr() { - assertEqual("source=t | eval f=get_format(DATE,'USA')", + assertEqual( + "source=t | eval f=get_format(DATE,'USA')", eval( relation("t"), - let( - field("f"), - function("get_format", - stringLiteral("DATE"), stringLiteral("USA")) - ) - )); + let(field("f"), function("get_format", stringLiteral("DATE"), stringLiteral("USA"))))); } @Test public void testTimeStampAddFunctionExpr() { - assertEqual("source=t | eval f=timestampadd(YEAR, 15, '2001-03-06 00:00:00')", + assertEqual( + "source=t | eval f=timestampadd(YEAR, 15, '2001-03-06 00:00:00')", eval( relation("t"), let( field("f"), - function("timestampadd", + function( + "timestampadd", stringLiteral("YEAR"), intLiteral(15), - stringLiteral("2001-03-06 00:00:00")) - ) - )); + stringLiteral("2001-03-06 00:00:00"))))); } @Test public void testTimeStampDiffFunctionExpr() { - assertEqual("source=t | eval f=timestampdiff(" - + "YEAR, '1997-01-01 00:00:00', '2001-03-06 00:00:00')", + assertEqual( + "source=t | eval f=timestampdiff(YEAR, '1997-01-01 00:00:00', '2001-03-06 00:00:00')", eval( relation("t"), let( field("f"), - function("timestampdiff", + function( + "timestampdiff", stringLiteral("YEAR"), stringLiteral("1997-01-01 00:00:00"), - stringLiteral("2001-03-06 00:00:00")) - ) - )); + stringLiteral("2001-03-06 00:00:00"))))); } } diff --git a/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstNowLikeFunctionTest.java b/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstNowLikeFunctionTest.java index ddcde513dd..16aa0752e6 100644 --- a/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstNowLikeFunctionTest.java +++ b/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstNowLikeFunctionTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.parser; import static org.junit.Assert.assertEquals; @@ -31,6 +30,7 @@ public class AstNowLikeFunctionTest { /** * Set parameterized values used in test. + * * @param name Function name * @param hasFsp Whether function has fsp argument * @param hasShortcut Whether function has shortcut (call without `()`) @@ -43,24 +43,26 @@ public AstNowLikeFunctionTest(String name, Boolean hasFsp, Boolean hasShortcut) /** * Returns function data to test. + * * @return An iterable. */ @Parameterized.Parameters(name = "{0}") public static Iterable functionNames() { - return List.of(new Object[][]{ - {"now", false, false }, - {"current_timestamp", false, false}, - {"localtimestamp", false, false}, - {"localtime", false, false}, - {"sysdate", true, false}, - {"curtime", false, false}, - {"current_time", false, false}, - {"curdate", false, false}, - {"current_date", false, false}, - {"utc_date", false, false}, - {"utc_time", false, false}, - {"utc_timestamp", false, false} - }); + return List.of( + new Object[][] { + {"now", false, false}, + {"current_timestamp", false, false}, + {"localtimestamp", false, false}, + {"localtime", false, false}, + {"sysdate", true, false}, + {"curtime", false, false}, + {"current_time", false, false}, + {"curdate", false, false}, + {"current_date", false, false}, + {"utc_date", false, false}, + {"utc_time", false, false}, + {"utc_timestamp", false, false} + }); } private final String name; @@ -70,26 +72,20 @@ public static Iterable functionNames() { @Test public void test_function_call_eval() { assertEqual( - eval(relation("t"), let(field("r"), function(name))), - "source=t | eval r=" + name + "()" - ); + eval(relation("t"), let(field("r"), function(name))), "source=t | eval r=" + name + "()"); } @Test public void test_shortcut_eval() { Assume.assumeTrue(hasShortcut); - assertEqual( - eval(relation("t"), let(field("r"), function(name))), - "source=t | eval r=" + name - ); + assertEqual(eval(relation("t"), let(field("r"), function(name))), "source=t | eval r=" + name); } @Test public void test_function_call_where() { assertEqual( filter(relation("t"), compare("=", field("a"), function(name))), - "search source=t | where a=" + name + "()" - ); + "search source=t | where a=" + name + "()"); } @Test @@ -97,18 +93,15 @@ public void test_shortcut_where() { Assume.assumeTrue(hasShortcut); assertEqual( filter(relation("t"), compare("=", field("a"), function(name))), - "search source=t | where a=" + name - ); + "search source=t | where a=" + name); } @Test public void test_function_call_fsp() { Assume.assumeTrue(hasFsp); - assertEqual(filter( - relation("t"), - compare("=", field("a"), function(name, intLiteral(0))) - ), "search source=t | where a=" + name + "(0)" - ); + assertEqual( + filter(relation("t"), compare("=", field("a"), function(name, intLiteral(0)))), + "search source=t | where a=" + name + "(0)"); } protected void assertEqual(Node expectedPlan, String query) { diff --git a/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstStatementBuilderTest.java b/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstStatementBuilderTest.java index de74e4932f..7d7b31e822 100644 --- a/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstStatementBuilderTest.java +++ b/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstStatementBuilderTest.java @@ -28,8 +28,7 @@ public class AstStatementBuilderTest { - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); + @Rule public ExpectedException exceptionRule = ExpectedException.none(); private PPLSyntaxParser parser = new PPLSyntaxParser(); @@ -38,9 +37,8 @@ public void buildQueryStatement() { assertEqual( "search source=t a=1", new Query( - project( - filter(relation("t"), compare("=", field("a"), - intLiteral(1))), AllFields.of()), 0)); + project(filter(relation("t"), compare("=", field("a"), intLiteral(1))), AllFields.of()), + 0)); } @Test @@ -50,8 +48,8 @@ public void buildExplainStatement() { new Explain( new Query( project( - filter(relation("t"), compare("=", field("a"), intLiteral(1))), - AllFields.of()), 0))); + filter(relation("t"), compare("=", field("a"), intLiteral(1))), AllFields.of()), + 0))); } private void assertEqual(String query, Statement expectedStatement) { @@ -66,7 +64,8 @@ private void assertExplainEqual(String query, Statement expectedStatement) { private Node plan(String query, boolean isExplain) { final AstStatementBuilder builder = - new AstStatementBuilder(new AstBuilder(new AstExpressionBuilder(), query), + new AstStatementBuilder( + new AstBuilder(new AstExpressionBuilder(), query), AstStatementBuilder.StatementBuilderContext.builder().isExplain(isExplain).build()); return builder.visit(parser.parse(query)); } diff --git a/ppl/src/test/java/org/opensearch/sql/ppl/utils/ArgumentFactoryTest.java b/ppl/src/test/java/org/opensearch/sql/ppl/utils/ArgumentFactoryTest.java index e18dfbd65c..761dbe2997 100644 --- a/ppl/src/test/java/org/opensearch/sql/ppl/utils/ArgumentFactoryTest.java +++ b/ppl/src/test/java/org/opensearch/sql/ppl/utils/ArgumentFactoryTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.utils; import static java.util.Collections.emptyList; @@ -28,12 +27,10 @@ public class ArgumentFactoryTest extends AstBuilderTest { @Test public void testFieldsCommandArgument() { - assertEqual("source=t | fields - a", + assertEqual( + "source=t | fields - a", projectWithArg( - relation("t"), - exprList(argument("exclude", booleanLiteral(true))), - field("a") - )); + relation("t"), exprList(argument("exclude", booleanLiteral(true))), field("a"))); } @Test @@ -47,20 +44,14 @@ public void testStatsCommandArgument() { "source=t | stats partitions=1 allnum=false delim=',' avg(a) dedup_splitvalues=true", agg( relation("t"), - exprList( - alias( - "avg(a)", - aggregate("avg", field("a"))) - ), + exprList(alias("avg(a)", aggregate("avg", field("a")))), emptyList(), emptyList(), exprList( argument("partitions", intLiteral(1)), argument("allnum", booleanLiteral(false)), argument("delim", stringLiteral(",")), - argument("dedupsplit", booleanLiteral(true)) - ) - )); + argument("dedupsplit", booleanLiteral(true))))); } @Test @@ -72,52 +63,43 @@ public void testStatsCommandDefaultArgument() { @Test public void testDedupCommandArgument() { - assertEqual("source=t | dedup 3 field0 keepempty=false consecutive=true", + assertEqual( + "source=t | dedup 3 field0 keepempty=false consecutive=true", dedupe( relation("t"), exprList( argument("number", intLiteral(3)), argument("keepempty", booleanLiteral(false)), - argument("consecutive", booleanLiteral(true)) - ), - field("field0") - )); + argument("consecutive", booleanLiteral(true))), + field("field0"))); } @Test public void testDedupCommandDefaultArgument() { assertEqual( - "source=t | dedup 1 field0 keepempty=false consecutive=false", - "source=t | dedup field0" - ); + "source=t | dedup 1 field0 keepempty=false consecutive=false", "source=t | dedup field0"); } @Test public void testSortCommandDefaultArgument() { - assertEqual( - "source=t | sort field0", - "source=t | sort field0" - ); + assertEqual("source=t | sort field0", "source=t | sort field0"); } @Test public void testSortFieldArgument() { - assertEqual("source=t | sort - auto(field0)", + assertEqual( + "source=t | sort - auto(field0)", sort( relation("t"), field( "field0", exprList( argument("asc", booleanLiteral(false)), - argument("type", stringLiteral("auto")) - ) - ) - )); + argument("type", stringLiteral("auto")))))); } @Test public void testNoArgConstructorForArgumentFactoryShouldPass() { new ArgumentFactory(); } - } diff --git a/ppl/src/test/java/org/opensearch/sql/ppl/utils/PPLQueryDataAnonymizerTest.java b/ppl/src/test/java/org/opensearch/sql/ppl/utils/PPLQueryDataAnonymizerTest.java index 1998647dba..cd51ea07df 100644 --- a/ppl/src/test/java/org/opensearch/sql/ppl/utils/PPLQueryDataAnonymizerTest.java +++ b/ppl/src/test/java/org/opensearch/sql/ppl/utils/PPLQueryDataAnonymizerTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.utils; import static org.junit.Assert.assertEquals; @@ -29,166 +28,140 @@ public class PPLQueryDataAnonymizerTest { @Test public void testSearchCommand() { - assertEquals("source=t | where a = ***", - anonymize("search source=t a=1") - ); + assertEquals("source=t | where a = ***", anonymize("search source=t a=1")); } @Test public void testTableFunctionCommand() { - assertEquals("source=prometheus.query_range(***,***,***,***)", - anonymize("source=prometheus.query_range('afsd',123,123,3)") - ); + assertEquals( + "source=prometheus.query_range(***,***,***,***)", + anonymize("source=prometheus.query_range('afsd',123,123,3)")); } @Test public void testPrometheusPPLCommand() { - assertEquals("source=prometheus.http_requests_process", - anonymize("source=prometheus.http_requests_process") - ); + assertEquals( + "source=prometheus.http_requests_process", + anonymize("source=prometheus.http_requests_process")); } @Test public void testWhereCommand() { - assertEquals("source=t | where a = ***", - anonymize("search source=t | where a=1") - ); + assertEquals("source=t | where a = ***", anonymize("search source=t | where a=1")); } @Test public void testFieldsCommandWithoutArguments() { - assertEquals("source=t | fields + f,g", - anonymize("source=t | fields f,g")); + assertEquals("source=t | fields + f,g", anonymize("source=t | fields f,g")); } @Test public void testFieldsCommandWithIncludeArguments() { - assertEquals("source=t | fields + f,g", - anonymize("source=t | fields + f,g")); + assertEquals("source=t | fields + f,g", anonymize("source=t | fields + f,g")); } @Test public void testFieldsCommandWithExcludeArguments() { - assertEquals("source=t | fields - f,g", - anonymize("source=t | fields - f,g")); + assertEquals("source=t | fields - f,g", anonymize("source=t | fields - f,g")); } @Test public void testRenameCommandWithMultiFields() { - assertEquals("source=t | rename f as g,h as i,j as k", + assertEquals( + "source=t | rename f as g,h as i,j as k", anonymize("source=t | rename f as g,h as i,j as k")); } @Test public void testStatsCommandWithByClause() { - assertEquals("source=t | stats count(a) by b", - anonymize("source=t | stats count(a) by b")); + assertEquals("source=t | stats count(a) by b", anonymize("source=t | stats count(a) by b")); } @Test public void testStatsCommandWithNestedFunctions() { - assertEquals("source=t | stats sum(+(a,b))", - anonymize("source=t | stats sum(a+b)")); + assertEquals("source=t | stats sum(+(a,b))", anonymize("source=t | stats sum(a+b)")); } @Test public void testDedupCommand() { - assertEquals("source=t | dedup f1,f2 1 keepempty=false consecutive=false", + assertEquals( + "source=t | dedup f1,f2 1 keepempty=false consecutive=false", anonymize("source=t | dedup f1, f2")); } @Test public void testHeadCommandWithNumber() { - assertEquals("source=t | head 3", - anonymize("source=t | head 3")); + assertEquals("source=t | head 3", anonymize("source=t | head 3")); } - //todo, sort order is ignored, it doesn't impact the log analysis. + // todo, sort order is ignored, it doesn't impact the log analysis. @Test public void testSortCommandWithOptions() { - assertEquals("source=t | sort f1,f2", - anonymize("source=t | sort - f1, + f2")); + assertEquals("source=t | sort f1,f2", anonymize("source=t | sort - f1, + f2")); } @Test public void testEvalCommand() { - assertEquals("source=t | eval r=abs(f)", - anonymize("source=t | eval r=abs(f)")); + assertEquals("source=t | eval r=abs(f)", anonymize("source=t | eval r=abs(f)")); } @Test public void testRareCommandWithGroupBy() { - assertEquals("source=t | rare 10 a by b", - anonymize("source=t | rare a by b")); + assertEquals("source=t | rare 10 a by b", anonymize("source=t | rare a by b")); } @Test public void testTopCommandWithNAndGroupBy() { - assertEquals("source=t | top 1 a by b", - anonymize("source=t | top 1 a by b")); + assertEquals("source=t | top 1 a by b", anonymize("source=t | top 1 a by b")); } @Test public void testAndExpression() { - assertEquals("source=t | where a = *** and b = ***", - anonymize("source=t | where a=1 and b=2") - ); + assertEquals("source=t | where a = *** and b = ***", anonymize("source=t | where a=1 and b=2")); } @Test public void testOrExpression() { - assertEquals("source=t | where a = *** or b = ***", - anonymize("source=t | where a=1 or b=2") - ); + assertEquals("source=t | where a = *** or b = ***", anonymize("source=t | where a=1 or b=2")); } @Test public void testXorExpression() { - assertEquals("source=t | where a = *** xor b = ***", - anonymize("source=t | where a=1 xor b=2") - ); + assertEquals("source=t | where a = *** xor b = ***", anonymize("source=t | where a=1 xor b=2")); } @Test public void testNotExpression() { - assertEquals("source=t | where not a = ***", - anonymize("source=t | where not a=1 ") - ); + assertEquals("source=t | where not a = ***", anonymize("source=t | where not a=1 ")); } @Test public void testQualifiedName() { - assertEquals("source=t | fields + field0", - anonymize("source=t | fields field0") - ); + assertEquals("source=t | fields + field0", anonymize("source=t | fields field0")); } @Test public void testDateFunction() { - assertEquals("source=t | eval date=DATE_ADD(DATE(***),INTERVAL *** HOUR)", - anonymize("source=t | eval date=DATE_ADD(DATE('2020-08-26'),INTERVAL 1 HOUR)") - ); + assertEquals( + "source=t | eval date=DATE_ADD(DATE(***),INTERVAL *** HOUR)", + anonymize("source=t | eval date=DATE_ADD(DATE('2020-08-26'),INTERVAL 1 HOUR)")); } @Test public void testExplain() { - assertEquals("source=t | fields + a", - anonymizeStatement("source=t | fields a", true) - ); + assertEquals("source=t | fields + a", anonymizeStatement("source=t | fields a", true)); } @Test public void testQuery() { - assertEquals("source=t | fields + a", - anonymizeStatement("source=t | fields a", false) - ); + assertEquals("source=t | fields + a", anonymizeStatement("source=t | fields a", false)); } @Test public void anonymizeFieldsNoArg() { - assertEquals("source=t | fields + f", - anonymize(projectWithArg(relation("t"), Collections.emptyList(), field("f"))) - ); + assertEquals( + "source=t | fields + f", + anonymize(projectWithArg(relation("t"), Collections.emptyList(), field("f")))); } private String anonymize(String query) { diff --git a/ppl/src/test/java/org/opensearch/sql/ppl/utils/UnresolvedPlanHelperTest.java b/ppl/src/test/java/org/opensearch/sql/ppl/utils/UnresolvedPlanHelperTest.java index d64c8d5db4..7c1264e0b6 100644 --- a/ppl/src/test/java/org/opensearch/sql/ppl/utils/UnresolvedPlanHelperTest.java +++ b/ppl/src/test/java/org/opensearch/sql/ppl/utils/UnresolvedPlanHelperTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.utils; import static org.hamcrest.MatcherAssert.assertThat; From c60a4f1cb9a469aeb91b2427f4e74e66e1f48cc3 Mon Sep 17 00:00:00 2001 From: Mitchell Gale Date: Wed, 16 Aug 2023 13:34:47 -0700 Subject: [PATCH 21/42] [Spotless] Applying Google Code Format for integ-tests #8 (#1962) * spotless apply for 81 integ-test files (#327) add ignore failures for build.gradle. Reverting ignore for checkstyle in integ-test Addressed PR comments. Addressed PR comments to expand jav doc. fixed string formatting Fixed string formatting. Fixed string formatting in MatchPhrasePrefixIT Signed-off-by: Mitchell Gale * Apply suggestions from code review Co-authored-by: Yury-Fridlyand Signed-off-by: Mitchell Gale * address PR comments Signed-off-by: Mitchell Gale --------- Signed-off-by: Mitchell Gale Signed-off-by: Mitchell Gale Co-authored-by: Yury-Fridlyand --- .../sql/correctness/CorrectnessIT.java | 29 +- .../sql/correctness/report/ErrorTestCase.java | 10 +- .../correctness/report/FailedTestCase.java | 27 +- .../correctness/runner/ComparisonTest.java | 40 +- .../runner/connection/DBConnection.java | 20 +- .../runner/connection/JDBCConnection.java | 62 +- .../runner/resultset/DBResult.java | 86 +- .../correctness/tests/ComparisonTestTest.java | 156 +- .../sql/correctness/tests/DBResultTest.java | 55 +- .../correctness/tests/JDBCConnectionTest.java | 89 +- .../sql/datasource/DataSourceAPIsIT.java | 124 +- .../DatasourceClusterSettingsIT.java | 24 +- .../org/opensearch/sql/jdbc/CursorIT.java | 48 +- .../sql/legacy/AggregationExpressionIT.java | 252 ++- .../opensearch/sql/legacy/AggregationIT.java | 1285 ++++++++------- .../sql/legacy/CsvFormatResponseIT.java | 326 ++-- .../org/opensearch/sql/legacy/CursorIT.java | 170 +- .../opensearch/sql/legacy/DateFormatIT.java | 193 +-- .../sql/legacy/DateFunctionsIT.java | 108 +- .../org/opensearch/sql/legacy/DeleteIT.java | 49 +- .../org/opensearch/sql/legacy/ExplainIT.java | 235 +-- .../sql/legacy/GetEndpointQueryIT.java | 8 +- .../org/opensearch/sql/legacy/HashJoinIT.java | 76 +- .../org/opensearch/sql/legacy/HavingIT.java | 111 +- .../opensearch/sql/legacy/JSONRequestIT.java | 92 +- .../org/opensearch/sql/legacy/JdbcTestIT.java | 151 +- .../sql/legacy/JoinAliasWriterRuleIT.java | 115 +- .../org/opensearch/sql/legacy/JoinIT.java | 569 ++++--- .../sql/legacy/MathFunctionsIT.java | 103 +- .../sql/legacy/MetaDataQueriesIT.java | 33 +- .../opensearch/sql/legacy/MethodQueryIT.java | 137 +- .../org/opensearch/sql/legacy/MetricsIT.java | 6 +- .../opensearch/sql/legacy/MultiQueryIT.java | 135 +- .../sql/ppl/ConvertTZFunctionIT.java | 362 ++--- .../sql/ppl/CrossClusterSearchIT.java | 39 +- .../org/opensearch/sql/ppl/CsvFormatIT.java | 48 +- .../org/opensearch/sql/ppl/DataTypeIT.java | 34 +- .../sql/ppl/DateTimeComparisonIT.java | 986 +++++++----- .../sql/ppl/DateTimeFunctionIT.java | 1372 +++++++++++------ .../sql/ppl/DateTimeImplementationIT.java | 119 +- .../opensearch/sql/ppl/DedupCommandIT.java | 1 - .../opensearch/sql/ppl/DescribeCommandIT.java | 15 +- .../org/opensearch/sql/ppl/ExplainIT.java | 13 +- .../opensearch/sql/ppl/FieldsCommandIT.java | 11 +- .../org/opensearch/sql/ppl/HeadCommandIT.java | 41 +- .../sql/ppl/InformationSchemaCommandIT.java | 111 +- .../sql/ppl/LegacyAPICompatibilityIT.java | 23 +- .../org/opensearch/sql/ppl/LikeQueryIT.java | 46 +- .../opensearch/sql/ppl/MatchBoolPrefixIT.java | 11 +- .../java/org/opensearch/sql/ppl/MatchIT.java | 1 - .../org/opensearch/sql/ppl/MatchPhraseIT.java | 15 +- .../sql/ppl/MatchPhrasePrefixIT.java | 60 +- .../sql/ppl/MathematicalFunctionIT.java | 320 ++-- .../org/opensearch/sql/ppl/MetricsIT.java | 6 +- .../org/opensearch/sql/ppl/MultiMatchIT.java | 34 +- .../java/org/opensearch/sql/sql/AdminIT.java | 7 +- .../org/opensearch/sql/sql/AggregationIT.java | 509 +++--- .../sql/sql/ArithmeticFunctionIT.java | 438 +++--- .../org/opensearch/sql/sql/ConditionalIT.java | 224 +-- .../sql/sql/CorrectnessTestBase.java | 44 +- .../org/opensearch/sql/sql/CsvFormatIT.java | 50 +- .../sql/sql/DateTimeComparisonIT.java | 979 +++++++----- .../opensearch/sql/sql/DateTimeFormatsIT.java | 85 +- .../sql/sql/DateTimeFunctionIT.java | 861 ++++++----- .../sql/sql/DateTimeImplementationIT.java | 110 +- .../org/opensearch/sql/sql/ExpressionIT.java | 23 +- .../sql/sql/HighlightFunctionIT.java | 145 +- .../org/opensearch/sql/sql/IdentifierIT.java | 107 +- .../org/opensearch/sql/sql/JdbcFormatIT.java | 22 +- .../sql/sql/LegacyAPICompatibilityIT.java | 41 +- .../org/opensearch/sql/sql/LikeQueryIT.java | 46 +- .../opensearch/sql/sql/MatchBoolPrefixIT.java | 22 +- .../java/org/opensearch/sql/sql/MatchIT.java | 110 +- .../org/opensearch/sql/sql/MatchPhraseIT.java | 1 - .../sql/sql/MatchPhrasePrefixIT.java | 44 +- .../sql/sql/MathematicalFunctionIT.java | 25 +- .../org/opensearch/sql/sql/MetricsIT.java | 5 +- .../org/opensearch/sql/sql/MultiMatchIT.java | 113 +- .../sql/util/InternalRestHighLevelClient.java | 4 +- .../org/opensearch/sql/util/MatcherUtils.java | 90 +- 80 files changed, 7155 insertions(+), 5542 deletions(-) diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/CorrectnessIT.java b/integ-test/src/test/java/org/opensearch/sql/correctness/CorrectnessIT.java index 9ec80c55a7..329aed80aa 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/CorrectnessIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/CorrectnessIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness; import static org.opensearch.sql.util.TestUtils.getResourceFilePath; @@ -32,11 +31,12 @@ import org.opensearch.sql.correctness.testset.TestDataSet; import org.opensearch.test.OpenSearchIntegTestCase; -/** - * Correctness integration test by performing comparison test with other databases. - */ +/** Correctness integration test by performing comparison test with other databases. */ @OpenSearchIntegTestCase.SuiteScopeTestCase -@OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.SUITE, numDataNodes = 3, supportsDedicatedMasters = false) +@OpenSearchIntegTestCase.ClusterScope( + scope = OpenSearchIntegTestCase.Scope.SUITE, + numDataNodes = 3, + supportsDedicatedMasters = false) @ThreadLeakScope(ThreadLeakScope.Scope.NONE) public class CorrectnessIT extends OpenSearchIntegTestCase { @@ -47,8 +47,8 @@ public void performComparisonTest() throws URISyntaxException { TestConfig config = new TestConfig(getCmdLineArgs()); LOG.info("Starting comparison test {}", config); - try (ComparisonTest test = new ComparisonTest(getThisDBConnection(config), - getOtherDBConnections(config))) { + try (ComparisonTest test = + new ComparisonTest(getThisDBConnection(config), getOtherDBConnections(config))) { LOG.info("Loading test data set..."); test.connect(); for (TestDataSet dataSet : config.getTestDataSets()) { @@ -81,9 +81,7 @@ private DBConnection getThisDBConnection(TestConfig config) throws URISyntaxExce return new JDBCConnection("DB Tested", dbUrl); } - /** - * Use OpenSearch cluster given on CLI arg or internal embedded in SQLIntegTestCase - */ + /** Use OpenSearch cluster given on CLI arg or internal embedded in SQLIntegTestCase */ private DBConnection getOpenSearchConnection(TestConfig config) throws URISyntaxException { RestClient client; String openSearchHost = config.getOpenSearchHostUrl(); @@ -96,14 +94,11 @@ private DBConnection getOpenSearchConnection(TestConfig config) throws URISyntax return new OpenSearchConnection("jdbc:opensearch://" + openSearchHost, client); } - /** - * Create database connection with database name and connect URL - */ + /** Create database connection with database name and connect URL */ private DBConnection[] getOtherDBConnections(TestConfig config) { - return config.getOtherDbConnectionNameAndUrls(). - entrySet().stream(). - map(e -> new JDBCConnection(e.getKey(), e.getValue())). - toArray(DBConnection[]::new); + return config.getOtherDbConnectionNameAndUrls().entrySet().stream() + .map(e -> new JDBCConnection(e.getKey(), e.getValue())) + .toArray(DBConnection[]::new); } private void store(TestReport report) { diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/report/ErrorTestCase.java b/integ-test/src/test/java/org/opensearch/sql/correctness/report/ErrorTestCase.java index cb13a01f98..1d69ff10ee 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/report/ErrorTestCase.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/report/ErrorTestCase.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.report; import static org.opensearch.sql.correctness.report.TestCaseReport.TestResult.FAILURE; @@ -12,22 +11,17 @@ import lombok.Getter; import lombok.ToString; -/** - * Report for test case that ends with an error. - */ +/** Report for test case that ends with an error. */ @EqualsAndHashCode(callSuper = true) @ToString(callSuper = true) @Getter public class ErrorTestCase extends TestCaseReport { - /** - * Root cause of the error - */ + /** Root cause of the error */ private final String reason; public ErrorTestCase(int id, String sql, String reason) { super(id, sql, FAILURE); this.reason = reason; } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/report/FailedTestCase.java b/integ-test/src/test/java/org/opensearch/sql/correctness/report/FailedTestCase.java index 86693b98e9..2b5ab431e4 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/report/FailedTestCase.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/report/FailedTestCase.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.report; import static org.opensearch.sql.correctness.report.TestCaseReport.TestResult.FAILURE; @@ -16,30 +15,21 @@ import lombok.ToString; import org.opensearch.sql.correctness.runner.resultset.DBResult; -/** - * Report for test case that fails due to inconsistent result set. - */ +/** Report for test case that fails due to inconsistent result set. */ @EqualsAndHashCode(callSuper = true) @ToString(callSuper = true) @Getter public class FailedTestCase extends TestCaseReport { - /** - * Inconsistent result sets for reporting - */ + /** Inconsistent result sets for reporting */ private final List resultSets; - /** - * Explain where the difference is caused the test failure. - */ + /** Explain where the difference is caused the test failure. */ private final String explain; - /** - * Errors occurred for partial other databases. - */ + /** Errors occurred for partial other databases. */ private final String errors; - public FailedTestCase(int id, String sql, List resultSets, String errors) { super(id, sql, FAILURE); this.resultSets = resultSets; @@ -47,10 +37,9 @@ public FailedTestCase(int id, String sql, List resultSets, String erro this.errors = errors; // Generate explanation by diff the first result with remaining - this.explain = resultSets.subList(1, resultSets.size()) - .stream() - .map(result -> resultSets.get(0).diff(result)) - .collect(Collectors.joining(", ")); + this.explain = + resultSets.subList(1, resultSets.size()).stream() + .map(result -> resultSets.get(0).diff(result)) + .collect(Collectors.joining(", ")); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/runner/ComparisonTest.java b/integ-test/src/test/java/org/opensearch/sql/correctness/runner/ComparisonTest.java index 129bc70426..1fee41f1fe 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/runner/ComparisonTest.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/runner/ComparisonTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.runner; import static com.google.common.collect.ObjectArrays.concat; @@ -25,24 +24,16 @@ import org.opensearch.sql.correctness.testset.TestQuerySet; import org.opensearch.sql.legacy.utils.StringUtils; -/** - * Comparison test runner for query result correctness. - */ +/** Comparison test runner for query result correctness. */ public class ComparisonTest implements AutoCloseable { - /** - * Next id for test case - */ + /** Next id for test case */ private int testCaseId = 1; - /** - * Connection for database being tested - */ + /** Connection for database being tested */ private final DBConnection thisConnection; - /** - * Database connections for reference databases - */ + /** Database connections for reference databases */ private final DBConnection[] otherDbConnections; public ComparisonTest(DBConnection thisConnection, DBConnection[] otherDbConnections) { @@ -53,9 +44,7 @@ public ComparisonTest(DBConnection thisConnection, DBConnection[] otherDbConnect Arrays.sort(this.otherDbConnections, Comparator.comparing(DBConnection::getDatabaseName)); } - /** - * Open database connection. - */ + /** Open database connection. */ public void connect() { for (DBConnection conn : concat(thisConnection, otherDbConnections)) { conn.connect(); @@ -87,8 +76,11 @@ public TestReport verify(TestQuerySet querySet) { DBResult openSearchResult = thisConnection.select(sql); report.addTestCase(compareWithOtherDb(sql, openSearchResult)); } catch (Exception e) { - report.addTestCase(new ErrorTestCase(nextId(), sql, - StringUtils.format("%s: %s", e.getClass().getSimpleName(), extractRootCause(e)))); + report.addTestCase( + new ErrorTestCase( + nextId(), + sql, + StringUtils.format("%s: %s", e.getClass().getSimpleName(), extractRootCause(e)))); } } return report; @@ -116,9 +108,7 @@ public void close() { } } - /** - * Execute the query and compare with current result - */ + /** Execute the query and compare with current result */ private TestCaseReport compareWithOtherDb(String sql, DBResult openSearchResult) { List mismatchResults = Lists.newArrayList(openSearchResult); StringBuilder reasons = new StringBuilder(); @@ -137,7 +127,8 @@ private TestCaseReport compareWithOtherDb(String sql, DBResult openSearchResult) } } - if (mismatchResults.size() == 1) { // Only OpenSearch result on list. Cannot find other database support this query + if (mismatchResults.size() + == 1) { // Only OpenSearch result on list. Cannot find other database support this query return new ErrorTestCase(nextId(), sql, "No other databases support this query: " + reasons); } return new FailedTestCase(nextId(), sql, mismatchResults, reasons.toString()); @@ -150,8 +141,8 @@ private int nextId() { private void insertTestDataInBatch(DBConnection conn, String tableName, List testData) { Iterator iterator = testData.iterator(); String[] fieldNames = (String[]) iterator.next(); // first row is header of column names - Iterators.partition(iterator, 100). - forEachRemaining(batch -> conn.insert(tableName, fieldNames, batch)); + Iterators.partition(iterator, 100) + .forEachRemaining(batch -> conn.insert(tableName, fieldNames, batch)); } private String extractRootCause(Throwable e) { @@ -167,5 +158,4 @@ private String extractRootCause(Throwable e) { } return e.toString(); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/runner/connection/DBConnection.java b/integ-test/src/test/java/org/opensearch/sql/correctness/runner/connection/DBConnection.java index a475428735..b01762fd21 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/runner/connection/DBConnection.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/runner/connection/DBConnection.java @@ -3,15 +3,12 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.runner.connection; import java.util.List; import org.opensearch.sql.correctness.runner.resultset.DBResult; -/** - * Abstraction for different databases. - */ +/** Abstraction for different databases. */ public interface DBConnection { /** @@ -19,25 +16,23 @@ public interface DBConnection { */ String getDatabaseName(); - /** - * Connect to database by opening a connection. - */ + /** Connect to database by opening a connection. */ void connect(); /** * Create table with the schema. * * @param tableName table name - * @param schema schema json in OpenSearch mapping format + * @param schema schema json in OpenSearch mapping format */ void create(String tableName, String schema); /** * Insert batch of data to database. * - * @param tableName table name + * @param tableName table name * @param columnNames column names - * @param batch batch of rows + * @param batch batch of rows */ void insert(String tableName, String[] columnNames, List batch); @@ -56,9 +51,6 @@ public interface DBConnection { */ void drop(String tableName); - /** - * Close the database connection. - */ + /** Close the database connection. */ void close(); - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/runner/connection/JDBCConnection.java b/integ-test/src/test/java/org/opensearch/sql/correctness/runner/connection/JDBCConnection.java index d2d7d2aee6..7a67022117 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/runner/connection/JDBCConnection.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/runner/connection/JDBCConnection.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.runner.connection; import static java.util.stream.Collectors.joining; @@ -23,33 +22,23 @@ import org.opensearch.sql.correctness.runner.resultset.Row; import org.opensearch.sql.legacy.utils.StringUtils; -/** - * Database connection by JDBC driver. - */ +/** Database connection by JDBC driver. */ public class JDBCConnection implements DBConnection { private static final String SINGLE_QUOTE = "'"; private static final String DOUBLE_QUOTE = "''"; private static final String BACKTICK = "`"; - /** - * Database name for display - */ + /** Database name for display */ private final String databaseName; - /** - * Database connection URL - */ + /** Database connection URL */ private final String connectionUrl; - /** - * JDBC driver config properties. - */ + /** JDBC driver config properties. */ private final Properties properties; - /** - * Current live connection - */ + /** Current live connection */ private Connection connection; public JDBCConnection(String databaseName, String connectionUrl) { @@ -58,9 +47,10 @@ public JDBCConnection(String databaseName, String connectionUrl) { /** * Create a JDBC connection with parameters given (but not connect to database at the moment). - * @param databaseName database name - * @param connectionUrl connection URL - * @param properties config properties + * + * @param databaseName database name + * @param connectionUrl connection URL + * @param properties config properties */ public JDBCConnection(String databaseName, String connectionUrl, Properties properties) { this.databaseName = databaseName; @@ -104,11 +94,11 @@ public void drop(String tableName) { @Override public void insert(String tableName, String[] columnNames, List batch) { try (Statement stmt = connection.createStatement()) { - String names = - Arrays.stream(columnNames).map(this::delimited).collect(joining(",")); + String names = Arrays.stream(columnNames).map(this::delimited).collect(joining(",")); for (Object[] fieldValues : batch) { - stmt.addBatch(StringUtils.format( - "INSERT INTO %s(%s) VALUES (%s)", tableName, names, getValueList(fieldValues))); + stmt.addBatch( + StringUtils.format( + "INSERT INTO %s(%s) VALUES (%s)", tableName, names, getValueList(fieldValues))); } stmt.executeBatch(); } catch (SQLException e) { @@ -120,8 +110,10 @@ public void insert(String tableName, String[] columnNames, List batch) public DBResult select(String query) { try (Statement stmt = connection.createStatement()) { ResultSet resultSet = stmt.executeQuery(query); - DBResult result = isOrderByQuery(query) - ? DBResult.resultInOrder(databaseName) : DBResult.result(databaseName); + DBResult result = + isOrderByQuery(query) + ? DBResult.resultInOrder(databaseName) + : DBResult.result(databaseName); populateMetaData(resultSet, result); populateData(resultSet, result); return result; @@ -140,20 +132,22 @@ public void close() { } /** - * Parse out type in schema json and convert to field name and type pairs for CREATE TABLE statement. + * Parse out type in schema json and convert to field name and type pairs for CREATE TABLE + * statement. */ private String parseColumnNameAndTypesInSchemaJson(String schema) { JSONObject json = (JSONObject) new JSONObject(schema).query("/mappings/properties"); - return json.keySet().stream(). - map(colName -> delimited(colName) + " " + mapToJDBCType(json.getJSONObject(colName) - .getString("type"))) + return json.keySet().stream() + .map( + colName -> + delimited(colName) + + " " + + mapToJDBCType(json.getJSONObject(colName).getString("type"))) .collect(joining(",")); } private String getValueList(Object[] fieldValues) { - return Arrays.stream(fieldValues). - map(this::convertValueObjectToString). - collect(joining(",")); + return Arrays.stream(fieldValues).map(this::convertValueObjectToString).collect(joining(",")); } private String convertValueObjectToString(Object value) { @@ -209,9 +203,7 @@ private boolean isOrderByQuery(String query) { return query.trim().toUpperCase().contains("ORDER BY"); } - /** - * Setter for unit test mock - */ + /** Setter for unit test mock */ public void setConnection(Connection connection) { this.connection = connection; } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/runner/resultset/DBResult.java b/integ-test/src/test/java/org/opensearch/sql/correctness/runner/resultset/DBResult.java index 52b7d26cc4..6ee3bb37f6 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/runner/resultset/DBResult.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/runner/resultset/DBResult.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.runner.resultset; import com.google.common.collect.HashMultiset; @@ -20,53 +19,44 @@ import org.opensearch.sql.legacy.utils.StringUtils; /** - * Query result for equality comparison. Based on different type of query, such as query with/without ORDER BY and - * query with SELECT columns or just *, order of column and row may matter or not. So the internal data structure of this - * class is passed in from outside either list or set, hash map or linked hash map etc. + * Query result for equality comparison. Based on different type of query, such as query + * with/without ORDER BY and query with SELECT columns or just *, order of column and row may matter + * or not. So the internal data structure of this class is passed in from outside either list or + * set, hash map or linked hash map etc. */ @ToString public class DBResult { /** - * Possible types for floating point number + * Possible types for floating point number.
* H2 2.x use DOUBLE PRECISION instead of DOUBLE. */ private static final Set FLOAT_TYPES = ImmutableSet.of("FLOAT", "DOUBLE", "REAL", "DOUBLE PRECISION", "DECFLOAT"); /** - * Possible types for varchar. + * Possible types for varchar.
* H2 2.x use CHARACTER VARYING instead of VARCHAR. */ private static final Set VARCHAR = ImmutableSet.of("CHARACTER VARYING", "VARCHAR"); - /** - * Database name for display - */ + /** Database name for display */ private final String databaseName; - /** - * Column name and types from result set meta data - */ - @Getter - private final Collection schema; + /** Column name and types from result set meta data */ + @Getter private final Collection schema; - /** - * Data rows from result set - */ + /** Data rows from result set */ private final Collection dataRows; - /** - * In theory, a result set is a multi-set (bag) that allows duplicate and doesn't - * have order. - */ + /** In theory, a result set is a multi-set (bag) that allows duplicate and doesn't have order. */ public static DBResult result(String databaseName) { return new DBResult(databaseName, new ArrayList<>(), HashMultiset.create()); } /** - * But for queries with ORDER BY clause, we want to preserve the original order of data rows - * so we can check if the order is correct. + * But for queries with ORDER BY clause, we want to preserve the original order of data rows so we + * can check if the order is correct. */ public static DBResult resultInOrder(String databaseName) { return new DBResult(databaseName, new ArrayList<>(), new ArrayList<>()); @@ -103,21 +93,20 @@ public String getDatabaseName() { return databaseName; } - /** - * Flatten for simplifying json generated. - */ + /** Flatten for simplifying json generated. */ public Collection> getDataRows() { - Collection> values = isDataRowOrdered() - ? new ArrayList<>() : HashMultiset.create(); + Collection> values = + isDataRowOrdered() ? new ArrayList<>() : HashMultiset.create(); dataRows.stream().map(Row::getValues).forEach(values::add); return values; } /** - * Explain the difference between this and other DB result which is helpful for - * troubleshooting in final test report. - * @param other other DB result - * @return explain the difference + * Explain the difference between this and other DB result which is helpful for troubleshooting in + * final test report. + * + * @param other other DB result + * @return explain the difference */ public String diff(DBResult other) { String result = diffSchema(other); @@ -143,26 +132,27 @@ private String diffDataRows(DBResult other) { } /** - * Check if two lists are same otherwise explain if size or any element - * is different at some position. + * Check if two lists are same otherwise explain if size or any element is different at some + * position. */ private String diff(String name, List thisList, List otherList) { if (thisList.size() != otherList.size()) { - return StringUtils.format("%s size is different: this=[%d], other=[%d]", - name, thisList.size(), otherList.size()); + return StringUtils.format( + "%s size is different: this=[%d], other=[%d]", name, thisList.size(), otherList.size()); } int diff = findFirstDifference(thisList, otherList); if (diff >= 0) { - return StringUtils.format("%s at [%d] is different: this=[%s], other=[%s]", + return StringUtils.format( + "%s at [%d] is different: this=[%s], other=[%s]", name, diff, thisList.get(diff), otherList.get(diff)); } return ""; } /** - * Find first different element with assumption that the lists given have same size - * and there is no NULL element inside. + * Find first different element with assumption that the lists given have same size and there is + * no NULL element inside. */ private static int findFirstDifference(List list1, List list2) { for (int i = 0; i < list1.size(); i++) { @@ -174,16 +164,14 @@ private static int findFirstDifference(List list1, List list2) { } /** - * Is data row a list that represent original order of data set - * which doesn't/shouldn't sort again. + * Is data row a list that represent original order of data set which doesn't/shouldn't sort + * again. */ private boolean isDataRowOrdered() { return (dataRows instanceof List); } - /** - * Convert a collection to list and sort and return this new list. - */ + /** Convert a collection to list and sort and return this new list. */ private static > List sort(Collection collection) { ArrayList list = new ArrayList<>(collection); Collections.sort(list); @@ -200,12 +188,16 @@ public boolean equals(final Object o) { final DBResult other = (DBResult) o; // H2 calculates the value before setting column name // for example, for query "select 1 + 1" it returns a column named "2" instead of "1 + 1" - boolean skipColumnNameCheck = databaseName.equalsIgnoreCase("h2") || other.databaseName.equalsIgnoreCase("h2"); + boolean skipColumnNameCheck = + databaseName.equalsIgnoreCase("h2") || other.databaseName.equalsIgnoreCase("h2"); if (!skipColumnNameCheck && !schema.equals(other.schema)) { return false; } - if (skipColumnNameCheck && !schema.stream().map(Type::getType).collect(Collectors.toList()) - .equals(other.schema.stream().map(Type::getType).collect(Collectors.toList()))) { + if (skipColumnNameCheck + && !schema.stream() + .map(Type::getType) + .collect(Collectors.toList()) + .equals(other.schema.stream().map(Type::getType).collect(Collectors.toList()))) { return false; } return dataRows.equals(other.dataRows); diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/ComparisonTestTest.java b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/ComparisonTestTest.java index 03c3967544..5cab5b3175 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/ComparisonTestTest.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/ComparisonTestTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.tests; import static java.util.Arrays.asList; @@ -29,37 +28,37 @@ import org.opensearch.sql.correctness.runner.resultset.Type; import org.opensearch.sql.correctness.testset.TestQuerySet; -/** - * Tests for {@link ComparisonTest} - */ +/** Tests for {@link ComparisonTest} */ @RunWith(MockitoJUnitRunner.class) public class ComparisonTestTest { - @Mock - private DBConnection openSearchConnection; + @Mock private DBConnection openSearchConnection; - @Mock - private DBConnection otherDbConnection; + @Mock private DBConnection otherDbConnection; private ComparisonTest correctnessTest; @Before public void setUp() { when(otherDbConnection.getDatabaseName()).thenReturn("Other"); - correctnessTest = new ComparisonTest( - openSearchConnection, new DBConnection[] {otherDbConnection} - ); + correctnessTest = + new ComparisonTest(openSearchConnection, new DBConnection[] {otherDbConnection}); } @Test public void testSuccess() { - when(openSearchConnection.select(anyString())).thenReturn( - new DBResult("OpenSearch", asList(new Type("firstname", "text")), asList(new Row(asList("John")))) - ); - when(otherDbConnection.select(anyString())).thenReturn( - new DBResult("Other DB", asList(new Type("firstname", "text")), - asList(new Row(asList("John")))) - ); + when(openSearchConnection.select(anyString())) + .thenReturn( + new DBResult( + "OpenSearch", + asList(new Type("firstname", "text")), + asList(new Row(asList("John"))))); + when(otherDbConnection.select(anyString())) + .thenReturn( + new DBResult( + "Other DB", + asList(new Type("firstname", "text")), + asList(new Row(asList("John"))))); TestReport expected = new TestReport(); expected.addTestCase(new SuccessTestCase(1, "SELECT * FROM accounts")); @@ -70,15 +69,18 @@ public void testSuccess() { @Test public void testFailureDueToInconsistency() { DBResult openSearchResult = - new DBResult("OpenSearch", asList(new Type("firstname", "text")), asList(new Row(asList("John")))); - DBResult otherDbResult = new DBResult("Other DB", asList(new Type("firstname", "text")), - asList(new Row(asList("JOHN")))); + new DBResult( + "OpenSearch", asList(new Type("firstname", "text")), asList(new Row(asList("John")))); + DBResult otherDbResult = + new DBResult( + "Other DB", asList(new Type("firstname", "text")), asList(new Row(asList("JOHN")))); when(openSearchConnection.select(anyString())).thenReturn(openSearchResult); when(otherDbConnection.select(anyString())).thenReturn(otherDbResult); TestReport expected = new TestReport(); expected.addTestCase( - new FailedTestCase(1, "SELECT * FROM accounts", asList(openSearchResult, otherDbResult), "")); + new FailedTestCase( + 1, "SELECT * FROM accounts", asList(openSearchResult, otherDbResult), "")); TestReport actual = correctnessTest.verify(querySet("SELECT * FROM accounts")); assertEquals(expected, actual); } @@ -87,16 +89,19 @@ public void testFailureDueToInconsistency() { public void testSuccessFinally() { DBConnection anotherDbConnection = mock(DBConnection.class); when(anotherDbConnection.getDatabaseName()).thenReturn("Another"); - correctnessTest = new ComparisonTest( - openSearchConnection, new DBConnection[] {otherDbConnection, anotherDbConnection} - ); + correctnessTest = + new ComparisonTest( + openSearchConnection, new DBConnection[] {otherDbConnection, anotherDbConnection}); DBResult openSearchResult = - new DBResult("OpenSearch", asList(new Type("firstname", "text")), asList(new Row(asList("John")))); - DBResult otherDbResult = new DBResult("Other DB", asList(new Type("firstname", "text")), - asList(new Row(asList("JOHN")))); - DBResult anotherDbResult = new DBResult("Another DB", asList(new Type("firstname", "text")), - asList(new Row(asList("John")))); + new DBResult( + "OpenSearch", asList(new Type("firstname", "text")), asList(new Row(asList("John")))); + DBResult otherDbResult = + new DBResult( + "Other DB", asList(new Type("firstname", "text")), asList(new Row(asList("JOHN")))); + DBResult anotherDbResult = + new DBResult( + "Another DB", asList(new Type("firstname", "text")), asList(new Row(asList("John")))); when(openSearchConnection.select(anyString())).thenReturn(openSearchResult); when(anotherDbConnection.select(anyString())).thenReturn(anotherDbResult); @@ -111,30 +116,38 @@ public void testFailureDueToEventualInconsistency() { DBConnection anotherDbConnection = mock(DBConnection.class); when(anotherDbConnection.getDatabaseName()) .thenReturn("ZZZ DB"); // Make sure this will be called after Other DB - correctnessTest = new ComparisonTest( - openSearchConnection, new DBConnection[] {otherDbConnection, anotherDbConnection} - ); + correctnessTest = + new ComparisonTest( + openSearchConnection, new DBConnection[] {otherDbConnection, anotherDbConnection}); DBResult openSearchResult = - new DBResult("OpenSearch", asList(new Type("firstname", "text")), asList(new Row(asList("John")))); - DBResult otherDbResult = new DBResult("Other DB", asList(new Type("firstname", "text")), - asList(new Row(asList("JOHN")))); - DBResult anotherDbResult = new DBResult("ZZZ DB", asList(new Type("firstname", "text")), - asList(new Row(asList("Hank")))); + new DBResult( + "OpenSearch", asList(new Type("firstname", "text")), asList(new Row(asList("John")))); + DBResult otherDbResult = + new DBResult( + "Other DB", asList(new Type("firstname", "text")), asList(new Row(asList("JOHN")))); + DBResult anotherDbResult = + new DBResult( + "ZZZ DB", asList(new Type("firstname", "text")), asList(new Row(asList("Hank")))); when(openSearchConnection.select(anyString())).thenReturn(openSearchResult); when(otherDbConnection.select(anyString())).thenReturn(otherDbResult); when(anotherDbConnection.select(anyString())).thenReturn(anotherDbResult); TestReport expected = new TestReport(); - expected.addTestCase(new FailedTestCase(1, "SELECT * FROM accounts", - asList(openSearchResult, otherDbResult, anotherDbResult), "")); + expected.addTestCase( + new FailedTestCase( + 1, + "SELECT * FROM accounts", + asList(openSearchResult, otherDbResult, anotherDbResult), + "")); TestReport actual = correctnessTest.verify(querySet("SELECT * FROM accounts")); assertEquals(expected, actual); } @Test public void testErrorDueToESException() { - when(openSearchConnection.select(anyString())).thenThrow(new RuntimeException("All shards failure")); + when(openSearchConnection.select(anyString())) + .thenThrow(new RuntimeException("All shards failure")); TestReport expected = new TestReport(); expected.addTestCase( @@ -145,15 +158,21 @@ public void testErrorDueToESException() { @Test public void testErrorDueToNoOtherDBSupportThisQuery() { - when(openSearchConnection.select(anyString())).thenReturn( - new DBResult("OpenSearch", asList(new Type("firstname", "text")), asList(new Row(asList("John")))) - ); + when(openSearchConnection.select(anyString())) + .thenReturn( + new DBResult( + "OpenSearch", + asList(new Type("firstname", "text")), + asList(new Row(asList("John"))))); when(otherDbConnection.select(anyString())) .thenThrow(new RuntimeException("Unsupported feature")); TestReport expected = new TestReport(); - expected.addTestCase(new ErrorTestCase(1, "SELECT * FROM accounts", - "No other databases support this query: Unsupported feature;")); + expected.addTestCase( + new ErrorTestCase( + 1, + "SELECT * FROM accounts", + "No other databases support this query: Unsupported feature;")); TestReport actual = correctnessTest.verify(querySet("SELECT * FROM accounts")); assertEquals(expected, actual); } @@ -162,17 +181,22 @@ public void testErrorDueToNoOtherDBSupportThisQuery() { public void testSuccessWhenOneDBSupportThisQuery() { DBConnection anotherDbConnection = mock(DBConnection.class); when(anotherDbConnection.getDatabaseName()).thenReturn("Another"); - correctnessTest = new ComparisonTest( - openSearchConnection, new DBConnection[] {otherDbConnection, anotherDbConnection} - ); - - when(openSearchConnection.select(anyString())).thenReturn( - new DBResult("OpenSearch", asList(new Type("firstname", "text")), asList(new Row(asList("John")))) - ); - when(anotherDbConnection.select(anyString())).thenReturn( - new DBResult("Another DB", asList(new Type("firstname", "text")), - asList(new Row(asList("John")))) - ); + correctnessTest = + new ComparisonTest( + openSearchConnection, new DBConnection[] {otherDbConnection, anotherDbConnection}); + + when(openSearchConnection.select(anyString())) + .thenReturn( + new DBResult( + "OpenSearch", + asList(new Type("firstname", "text")), + asList(new Row(asList("John"))))); + when(anotherDbConnection.select(anyString())) + .thenReturn( + new DBResult( + "Another DB", + asList(new Type("firstname", "text")), + asList(new Row(asList("John"))))); TestReport expected = new TestReport(); expected.addTestCase(new SuccessTestCase(1, "SELECT * FROM accounts")); @@ -184,12 +208,13 @@ public void testSuccessWhenOneDBSupportThisQuery() { public void testFailureDueToInconsistencyAndExceptionMixed() { DBConnection otherDBConnection2 = mock(DBConnection.class); when(otherDBConnection2.getDatabaseName()).thenReturn("ZZZ DB"); - correctnessTest = new ComparisonTest( - openSearchConnection, new DBConnection[] {otherDbConnection, otherDBConnection2} - ); + correctnessTest = + new ComparisonTest( + openSearchConnection, new DBConnection[] {otherDbConnection, otherDBConnection2}); DBResult openSearchResult = - new DBResult("OpenSearch", asList(new Type("firstname", "text")), asList(new Row(asList("John")))); + new DBResult( + "OpenSearch", asList(new Type("firstname", "text")), asList(new Row(asList("John")))); DBResult otherResult = new DBResult("Other", asList(new Type("firstname", "text")), Collections.emptyList()); @@ -199,8 +224,12 @@ public void testFailureDueToInconsistencyAndExceptionMixed() { .thenThrow(new RuntimeException("Unsupported feature")); TestReport expected = new TestReport(); - expected.addTestCase(new FailedTestCase(1, "SELECT * FROM accounts", - asList(openSearchResult, otherResult), "Unsupported feature;")); + expected.addTestCase( + new FailedTestCase( + 1, + "SELECT * FROM accounts", + asList(openSearchResult, otherResult), + "Unsupported feature;")); TestReport actual = correctnessTest.verify(querySet("SELECT * FROM accounts")); assertEquals(expected, actual); } @@ -208,5 +237,4 @@ public void testFailureDueToInconsistencyAndExceptionMixed() { private TestQuerySet querySet(String query) { return new TestQuerySet(new String[] {query}); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/DBResultTest.java b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/DBResultTest.java index 3f6da0c39d..793728a9e9 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/DBResultTest.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/DBResultTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.tests; import static java.util.Collections.emptyList; @@ -19,9 +18,7 @@ import org.opensearch.sql.correctness.runner.resultset.Row; import org.opensearch.sql.correctness.runner.resultset.Type; -/** - * Unit tests for {@link DBResult} - */ +/** Unit tests for {@link DBResult} */ public class DBResultTest { @Test @@ -80,35 +77,45 @@ public void dbResultWithDifferentColumnTypeShouldNotEqual() { @Test public void shouldExplainColumnTypeDifference() { - DBResult result1 = new DBResult("DB 1", - Arrays.asList(new Type("name", "VARCHAR"), new Type("age", "FLOAT")), emptyList()); - DBResult result2 = new DBResult("DB 2", - Arrays.asList(new Type("name", "VARCHAR"), new Type("age", "INT")), emptyList()); + DBResult result1 = + new DBResult( + "DB 1", + Arrays.asList(new Type("name", "VARCHAR"), new Type("age", "FLOAT")), + emptyList()); + DBResult result2 = + new DBResult( + "DB 2", + Arrays.asList(new Type("name", "VARCHAR"), new Type("age", "INT")), + emptyList()); assertEquals( "Schema type at [1] is different: " + "this=[Type(name=age, type=FLOAT)], other=[Type(name=age, type=INT)]", - result1.diff(result2) - ); + result1.diff(result2)); } @Test public void shouldExplainDataRowsDifference() { - DBResult result1 = new DBResult("DB 1", Arrays.asList(new Type("name", "VARCHAR")), - Sets.newHashSet( - new Row(Arrays.asList("hello")), - new Row(Arrays.asList("world")), - new Row(Lists.newArrayList((Object) null)))); - DBResult result2 = new DBResult("DB 2",Arrays.asList(new Type("name", "VARCHAR")), - Sets.newHashSet( - new Row(Lists.newArrayList((Object) null)), - new Row(Arrays.asList("hello")), - new Row(Arrays.asList("world123")))); + DBResult result1 = + new DBResult( + "DB 1", + Arrays.asList(new Type("name", "VARCHAR")), + Sets.newHashSet( + new Row(Arrays.asList("hello")), + new Row(Arrays.asList("world")), + new Row(Lists.newArrayList((Object) null)))); + DBResult result2 = + new DBResult( + "DB 2", + Arrays.asList(new Type("name", "VARCHAR")), + Sets.newHashSet( + new Row(Lists.newArrayList((Object) null)), + new Row(Arrays.asList("hello")), + new Row(Arrays.asList("world123")))); assertEquals( "Data row at [1] is different: this=[Row(values=[world])], other=[Row(values=[world123])]", - result1.diff(result2) - ); + result1.diff(result2)); } @Test @@ -125,8 +132,6 @@ public void shouldExplainDataRowsOrderDifference() { assertEquals( "Data row at [0] is different: this=[Row(values=[hello])], other=[Row(values=[world])]", - result1.diff(result2) - ); + result1.diff(result2)); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/JDBCConnectionTest.java b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/JDBCConnectionTest.java index 0e70066136..a8e01145e7 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/JDBCConnectionTest.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/JDBCConnectionTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.tests; import static org.junit.Assert.assertEquals; @@ -36,17 +35,13 @@ import org.opensearch.sql.correctness.runner.resultset.DBResult; import org.opensearch.sql.correctness.runner.resultset.Type; -/** - * Tests for {@link JDBCConnection} - */ +/** Tests for {@link JDBCConnection} */ @RunWith(MockitoJUnitRunner.class) public class JDBCConnectionTest { - @Mock - private Connection connection; + @Mock private Connection connection; - @Mock - private Statement statement; + @Mock private Statement statement; private JDBCConnection conn; @@ -60,7 +55,8 @@ public void setUp() throws SQLException { @Test public void testCreateTable() throws SQLException { - conn.create("test", + conn.create( + "test", "{\"mappings\":{\"properties\":{\"name\":{\"type\":\"keyword\"},\"age\":{\"type\":\"INT\"}}}}"); ArgumentCaptor argCap = ArgumentCaptor.forClass(String.class); @@ -83,7 +79,9 @@ public void testDropTable() throws SQLException { @Test public void testInsertData() throws SQLException { - conn.insert("test", new String[] {"name", "age"}, + conn.insert( + "test", + new String[] {"name", "age"}, Arrays.asList(new String[] {"John", "25"}, new String[] {"Hank", "30"})); ArgumentCaptor argCap = ArgumentCaptor.forClass(String.class); @@ -93,18 +91,17 @@ public void testInsertData() throws SQLException { assertEquals( Arrays.asList( "INSERT INTO test(`name`,`age`) VALUES ('John','25')", - "INSERT INTO test(`name`,`age`) VALUES ('Hank','30')" - ), actual - ); + "INSERT INTO test(`name`,`age`) VALUES ('Hank','30')"), + actual); } @Test public void testInsertNullData() throws SQLException { - conn.insert("test", new String[] {"name", "age"}, + conn.insert( + "test", + new String[] {"name", "age"}, Arrays.asList( - new Object[] {"John", null}, - new Object[] {null, 25}, - new Object[] {"Hank", 30})); + new Object[] {"John", null}, new Object[] {null, 25}, new Object[] {"Hank", 30})); ArgumentCaptor argCap = ArgumentCaptor.forClass(String.class); verify(statement, times(3)).addBatch(argCap.capture()); @@ -114,9 +111,8 @@ public void testInsertNullData() throws SQLException { Arrays.asList( "INSERT INTO test(`name`,`age`) VALUES ('John',NULL)", "INSERT INTO test(`name`,`age`) VALUES (NULL,'25')", - "INSERT INTO test(`name`,`age`) VALUES ('Hank','30')" - ), actual - ); + "INSERT INTO test(`name`,`age`) VALUES ('Hank','30')"), + actual); } @Test @@ -129,19 +125,10 @@ public void testSelectQuery() throws SQLException { DBResult result = conn.select("SELECT * FROM test"); assertEquals("Test DB", result.getDatabaseName()); assertEquals( - Arrays.asList( - new Type("NAME", "VARCHAR"), - new Type("AGE", "INT") - ), - result.getSchema() - ); + Arrays.asList(new Type("NAME", "VARCHAR"), new Type("AGE", "INT")), result.getSchema()); assertEquals( - HashMultiset.create(ImmutableList.of( - Arrays.asList("John", 25), - Arrays.asList("Hank", 30) - )), - result.getDataRows() - ); + HashMultiset.create(ImmutableList.of(Arrays.asList("John", 25), Arrays.asList("Hank", 30))), + result.getDataRows()); } @Test @@ -153,24 +140,18 @@ public void testSelectQueryWithAlias() throws SQLException { when(resultSet.getMetaData()).thenReturn(metaData); DBResult result = conn.select("SELECT * FROM test"); - assertEquals( - Arrays.asList( - new Type("N", "VARCHAR"), - new Type("A", "INT") - ), - result.getSchema() - ); + assertEquals(Arrays.asList(new Type("N", "VARCHAR"), new Type("A", "INT")), result.getSchema()); } @Test public void testSelectQueryWithFloatInResultSet() throws SQLException { ResultSetMetaData metaData = mockMetaData(ImmutableMap.of("name", "VARCHAR", "balance", "FLOAT")); - ResultSet resultSet = mockResultSet( - new Object[] {"John", 25.123}, - new Object[] {"Hank", 30.456}, - new Object[] {"Allen", 15.1} - ); + ResultSet resultSet = + mockResultSet( + new Object[] {"John", 25.123}, + new Object[] {"Hank", 30.456}, + new Object[] {"Allen", 15.1}); when(statement.executeQuery(anyString())).thenReturn(resultSet); when(resultSet.getMetaData()).thenReturn(metaData); @@ -178,18 +159,15 @@ public void testSelectQueryWithFloatInResultSet() throws SQLException { assertEquals( Arrays.asList( new Type("NAME", "VARCHAR"), - new Type("BALANCE", "[FLOAT, DOUBLE, REAL, DOUBLE PRECISION, DECFLOAT]") - ), - result.getSchema() - ); + new Type("BALANCE", "[FLOAT, DOUBLE, REAL, DOUBLE PRECISION, DECFLOAT]")), + result.getSchema()); assertEquals( - HashMultiset.create(ImmutableList.of( - Arrays.asList("John", 25.13), - Arrays.asList("Hank", 30.46), - Arrays.asList("Allen", 15.1) - )), - result.getDataRows() - ); + HashMultiset.create( + ImmutableList.of( + Arrays.asList("John", 25.13), + Arrays.asList("Hank", 30.46), + Arrays.asList("Allen", 15.1))), + result.getDataRows()); } private ResultSet mockResultSet(Object[]... rows) throws SQLException { @@ -233,5 +211,4 @@ private ResultSetMetaData mockMetaData(Map nameAndTypes, String. when(metaData.getColumnCount()).thenReturn(nameAndTypes.size()); return metaData; } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/datasource/DataSourceAPIsIT.java b/integ-test/src/test/java/org/opensearch/sql/datasource/DataSourceAPIsIT.java index 86af85727d..e1d071d522 100644 --- a/integ-test/src/test/java/org/opensearch/sql/datasource/DataSourceAPIsIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/datasource/DataSourceAPIsIT.java @@ -47,130 +47,155 @@ protected static void deleteDataSourcesCreated() throws IOException { @SneakyThrows @Test public void createDataSourceAPITest() { - //create datasource + // create datasource DataSourceMetadata createDSM = - new DataSourceMetadata("create_prometheus", DataSourceType.PROMETHEUS, - ImmutableList.of(), ImmutableMap.of("prometheus.uri", "https://localhost:9090", - "prometheus.auth.type","basicauth", - "prometheus.auth.username", "username", - "prometheus.auth.password", "password")); + new DataSourceMetadata( + "create_prometheus", + DataSourceType.PROMETHEUS, + ImmutableList.of(), + ImmutableMap.of( + "prometheus.uri", + "https://localhost:9090", + "prometheus.auth.type", + "basicauth", + "prometheus.auth.username", + "username", + "prometheus.auth.password", + "password")); Request createRequest = getCreateDataSourceRequest(createDSM); Response response = client().performRequest(createRequest); Assert.assertEquals(201, response.getStatusLine().getStatusCode()); String createResponseString = getResponseBody(response); Assert.assertEquals("Created DataSource with name create_prometheus", createResponseString); - //Datasource is not immediately created. so introducing a sleep of 2s. + // Datasource is not immediately created. so introducing a sleep of 2s. Thread.sleep(2000); - //get datasource to validate the creation. + // get datasource to validate the creation. Request getRequest = getFetchDataSourceRequest("create_prometheus"); Response getResponse = client().performRequest(getRequest); Assert.assertEquals(200, getResponse.getStatusLine().getStatusCode()); String getResponseString = getResponseBody(getResponse); DataSourceMetadata dataSourceMetadata = new Gson().fromJson(getResponseString, DataSourceMetadata.class); - Assert.assertEquals("https://localhost:9090", - dataSourceMetadata.getProperties().get("prometheus.uri")); + Assert.assertEquals( + "https://localhost:9090", dataSourceMetadata.getProperties().get("prometheus.uri")); } - @SneakyThrows @Test public void updateDataSourceAPITest() { - //create datasource + // create datasource DataSourceMetadata createDSM = - new DataSourceMetadata("update_prometheus", DataSourceType.PROMETHEUS, - ImmutableList.of(), ImmutableMap.of("prometheus.uri", "https://localhost:9090")); + new DataSourceMetadata( + "update_prometheus", + DataSourceType.PROMETHEUS, + ImmutableList.of(), + ImmutableMap.of("prometheus.uri", "https://localhost:9090")); Request createRequest = getCreateDataSourceRequest(createDSM); client().performRequest(createRequest); - //Datasource is not immediately created. so introducing a sleep of 2s. + // Datasource is not immediately created. so introducing a sleep of 2s. Thread.sleep(2000); - //update datasource + // update datasource DataSourceMetadata updateDSM = - new DataSourceMetadata("update_prometheus", DataSourceType.PROMETHEUS, - ImmutableList.of(), ImmutableMap.of("prometheus.uri", "https://randomtest.com:9090")); + new DataSourceMetadata( + "update_prometheus", + DataSourceType.PROMETHEUS, + ImmutableList.of(), + ImmutableMap.of("prometheus.uri", "https://randomtest.com:9090")); Request updateRequest = getUpdateDataSourceRequest(updateDSM); Response updateResponse = client().performRequest(updateRequest); Assert.assertEquals(200, updateResponse.getStatusLine().getStatusCode()); String updateResponseString = getResponseBody(updateResponse); Assert.assertEquals("Updated DataSource with name update_prometheus", updateResponseString); - //Datasource is not immediately updated. so introducing a sleep of 2s. + // Datasource is not immediately updated. so introducing a sleep of 2s. Thread.sleep(2000); - //update datasource with invalid URI + // update datasource with invalid URI updateDSM = - new DataSourceMetadata("update_prometheus", DataSourceType.PROMETHEUS, - ImmutableList.of(), ImmutableMap.of("prometheus.uri", "https://randomtest:9090")); - final Request illFormedUpdateRequest - = getUpdateDataSourceRequest(updateDSM); - ResponseException updateResponseException - = Assert.assertThrows(ResponseException.class, () -> client().performRequest(illFormedUpdateRequest)); + new DataSourceMetadata( + "update_prometheus", + DataSourceType.PROMETHEUS, + ImmutableList.of(), + ImmutableMap.of("prometheus.uri", "https://randomtest:9090")); + final Request illFormedUpdateRequest = getUpdateDataSourceRequest(updateDSM); + ResponseException updateResponseException = + Assert.assertThrows( + ResponseException.class, () -> client().performRequest(illFormedUpdateRequest)); Assert.assertEquals(400, updateResponseException.getResponse().getStatusLine().getStatusCode()); updateResponseString = getResponseBody(updateResponseException.getResponse()); JsonObject errorMessage = new Gson().fromJson(updateResponseString, JsonObject.class); - Assert.assertEquals("Invalid hostname in the uri: https://randomtest:9090", + Assert.assertEquals( + "Invalid hostname in the uri: https://randomtest:9090", errorMessage.get("error").getAsJsonObject().get("details").getAsString()); Thread.sleep(2000); - //get datasource to validate the modification. - //get datasource + // get datasource to validate the modification. + // get datasource Request getRequest = getFetchDataSourceRequest("update_prometheus"); Response getResponse = client().performRequest(getRequest); Assert.assertEquals(200, getResponse.getStatusLine().getStatusCode()); String getResponseString = getResponseBody(getResponse); DataSourceMetadata dataSourceMetadata = new Gson().fromJson(getResponseString, DataSourceMetadata.class); - Assert.assertEquals("https://randomtest.com:9090", - dataSourceMetadata.getProperties().get("prometheus.uri")); + Assert.assertEquals( + "https://randomtest.com:9090", dataSourceMetadata.getProperties().get("prometheus.uri")); } - @SneakyThrows @Test public void deleteDataSourceTest() { - //create datasource for deletion + // create datasource for deletion DataSourceMetadata createDSM = - new DataSourceMetadata("delete_prometheus", DataSourceType.PROMETHEUS, - ImmutableList.of(), ImmutableMap.of("prometheus.uri", "https://localhost:9090")); + new DataSourceMetadata( + "delete_prometheus", + DataSourceType.PROMETHEUS, + ImmutableList.of(), + ImmutableMap.of("prometheus.uri", "https://localhost:9090")); Request createRequest = getCreateDataSourceRequest(createDSM); client().performRequest(createRequest); - //Datasource is not immediately created. so introducing a sleep of 2s. + // Datasource is not immediately created. so introducing a sleep of 2s. Thread.sleep(2000); - //delete datasource + // delete datasource Request deleteRequest = getDeleteDataSourceRequest("delete_prometheus"); Response deleteResponse = client().performRequest(deleteRequest); Assert.assertEquals(204, deleteResponse.getStatusLine().getStatusCode()); - //Datasource is not immediately deleted. so introducing a sleep of 2s. + // Datasource is not immediately deleted. so introducing a sleep of 2s. Thread.sleep(2000); - //get datasources to verify the deletion + // get datasources to verify the deletion final Request prometheusGetRequest = getFetchDataSourceRequest("delete_prometheus"); - ResponseException prometheusGetResponseException - = Assert.assertThrows(ResponseException.class, () -> client().performRequest(prometheusGetRequest)); - Assert.assertEquals( 400, prometheusGetResponseException.getResponse().getStatusLine().getStatusCode()); - String prometheusGetResponseString = getResponseBody(prometheusGetResponseException.getResponse()); + ResponseException prometheusGetResponseException = + Assert.assertThrows( + ResponseException.class, () -> client().performRequest(prometheusGetRequest)); + Assert.assertEquals( + 400, prometheusGetResponseException.getResponse().getStatusLine().getStatusCode()); + String prometheusGetResponseString = + getResponseBody(prometheusGetResponseException.getResponse()); JsonObject errorMessage = new Gson().fromJson(prometheusGetResponseString, JsonObject.class); - Assert.assertEquals("DataSource with name: delete_prometheus doesn't exist.", + Assert.assertEquals( + "DataSource with name: delete_prometheus doesn't exist.", errorMessage.get("error").getAsJsonObject().get("details").getAsString()); - } @SneakyThrows @Test public void getAllDataSourceTest() { -//create datasource for deletion + // create datasource for deletion DataSourceMetadata createDSM = - new DataSourceMetadata("get_all_prometheus", DataSourceType.PROMETHEUS, - ImmutableList.of(), ImmutableMap.of("prometheus.uri", "https://localhost:9090")); + new DataSourceMetadata( + "get_all_prometheus", + DataSourceType.PROMETHEUS, + ImmutableList.of(), + ImmutableMap.of("prometheus.uri", "https://localhost:9090")); Request createRequest = getCreateDataSourceRequest(createDSM); client().performRequest(createRequest); - //Datasource is not immediately created. so introducing a sleep of 2s. + // Datasource is not immediately created. so introducing a sleep of 2s. Thread.sleep(2000); Request getRequest = getFetchDataSourceRequest(null); @@ -183,5 +208,4 @@ public void getAllDataSourceTest() { Assert.assertTrue( dataSourceMetadataList.stream().anyMatch(ds -> ds.getName().equals("get_all_prometheus"))); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/datasource/DatasourceClusterSettingsIT.java b/integ-test/src/test/java/org/opensearch/sql/datasource/DatasourceClusterSettingsIT.java index 8c4959707a..a54d89dabe 100644 --- a/integ-test/src/test/java/org/opensearch/sql/datasource/DatasourceClusterSettingsIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/datasource/DatasourceClusterSettingsIT.java @@ -19,25 +19,33 @@ public class DatasourceClusterSettingsIT extends PPLIntegTestCase { private static final Logger LOG = LogManager.getLogger(); + @Test public void testGetDatasourceClusterSettings() throws IOException { JSONObject clusterSettings = getAllClusterSettings(); - assertThat(clusterSettings.query("/defaults/plugins.query.datasources.encryption.masterkey"), + assertThat( + clusterSettings.query("/defaults/plugins.query.datasources.encryption.masterkey"), equalTo(null)); } - @Test public void testPutDatasourceClusterSettings() throws IOException { final ResponseException exception = - expectThrows(ResponseException.class, () -> updateClusterSettings(new ClusterSetting(PERSISTENT, - "plugins.query.datasources.encryption.masterkey", - "masterkey"))); + expectThrows( + ResponseException.class, + () -> + updateClusterSettings( + new ClusterSetting( + PERSISTENT, + "plugins.query.datasources.encryption.masterkey", + "masterkey"))); JSONObject resp = new JSONObject(TestUtils.getResponseBody(exception.getResponse())); assertThat(resp.getInt("status"), equalTo(400)); - assertThat(resp.query("/error/root_cause/0/reason"), - equalTo("final persistent setting [plugins.query.datasources.encryption.masterkey], not updateable")); + assertThat( + resp.query("/error/root_cause/0/reason"), + equalTo( + "final persistent setting [plugins.query.datasources.encryption.masterkey], not" + + " updateable")); assertThat(resp.query("/error/type"), equalTo("settings_exception")); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/jdbc/CursorIT.java b/integ-test/src/test/java/org/opensearch/sql/jdbc/CursorIT.java index 959621dbad..81b2aad785 100644 --- a/integ-test/src/test/java/org/opensearch/sql/jdbc/CursorIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/jdbc/CursorIT.java @@ -66,12 +66,14 @@ public void init() { public static void initConnection() { var driverFile = System.getProperty("jdbcFile"); if (driverFile != null) { - URLClassLoader loader = new URLClassLoader( - new URL[]{new File(driverFile).toURI().toURL()}, - ClassLoader.getSystemClassLoader() - ); - Driver driver = (Driver) Class.forName("org.opensearch.jdbc.Driver", true, loader) - .getDeclaredConstructor().newInstance(); + URLClassLoader loader = + new URLClassLoader( + new URL[] {new File(driverFile).toURI().toURL()}, ClassLoader.getSystemClassLoader()); + Driver driver = + (Driver) + Class.forName("org.opensearch.jdbc.Driver", true, loader) + .getDeclaredConstructor() + .newInstance(); connection = driver.connect(getConnectionString(), null); } else { connection = DriverManager.getConnection(getConnectionString()); @@ -93,7 +95,8 @@ public static void closeConnection() { @SneakyThrows public void check_driver_version() { var version = System.getProperty("jdbcDriverVersion"); - Assume.assumeTrue("Parameter `jdbcDriverVersion` is not given, test platform uses default driver version", + Assume.assumeTrue( + "Parameter `jdbcDriverVersion` is not given, test platform uses default driver version", version != null); assertEquals(version, connection.getMetaData().getDriverVersion()); } @@ -103,11 +106,13 @@ public void check_driver_version() { public void select_all_no_cursor() { Statement stmt = connection.createStatement(); - for (var table : List.of(TEST_INDEX_CALCS, TEST_INDEX_ONLINE, TEST_INDEX_BANK, TEST_INDEX_ACCOUNT)) { + for (var table : + List.of(TEST_INDEX_CALCS, TEST_INDEX_ONLINE, TEST_INDEX_BANK, TEST_INDEX_ACCOUNT)) { var query = String.format("SELECT * FROM %s", table); ResultSet rs = stmt.executeQuery(query); int rows = 0; - for (; rs.next(); rows++) ; + while(rs.next()) + rows++; var restResponse = executeRestQuery(query, null); assertEquals(rows, restResponse.getInt("total")); @@ -119,11 +124,13 @@ public void select_all_no_cursor() { public void select_count_all_no_cursor() { Statement stmt = connection.createStatement(); - for (var table : List.of(TEST_INDEX_CALCS, TEST_INDEX_ONLINE, TEST_INDEX_BANK, TEST_INDEX_ACCOUNT)) { + for (var table : + List.of(TEST_INDEX_CALCS, TEST_INDEX_ONLINE, TEST_INDEX_BANK, TEST_INDEX_ACCOUNT)) { var query = String.format("SELECT COUNT(*) FROM %s", table); ResultSet rs = stmt.executeQuery(query); int rows = 0; - for (; rs.next(); rows++) ; + for (; rs.next(); rows++) + ; var restResponse = executeRestQuery(query, null); assertEquals(rows, restResponse.getInt("total")); @@ -140,7 +147,8 @@ public void select_all_small_table_big_cursor() { stmt.setFetchSize(200); ResultSet rs = stmt.executeQuery(query); int rows = 0; - for (; rs.next(); rows++) ; + for (; rs.next(); rows++) + ; var restResponse = executeRestQuery(query, null); assertEquals(rows, restResponse.getInt("total")); @@ -157,7 +165,8 @@ public void select_all_small_table_small_cursor() { stmt.setFetchSize(3); ResultSet rs = stmt.executeQuery(query); int rows = 0; - for (; rs.next(); rows++) ; + for (; rs.next(); rows++) + ; var restResponse = executeRestQuery(query, null); assertEquals(rows, restResponse.getInt("total")); @@ -174,7 +183,8 @@ public void select_all_big_table_small_cursor() { stmt.setFetchSize(10); ResultSet rs = stmt.executeQuery(query); int rows = 0; - for (; rs.next(); rows++) ; + for (; rs.next(); rows++) + ; var restResponse = executeRestQuery(query, null); assertEquals(rows, restResponse.getInt("total")); @@ -191,16 +201,15 @@ public void select_all_big_table_big_cursor() { stmt.setFetchSize(500); ResultSet rs = stmt.executeQuery(query); int rows = 0; - for (; rs.next(); rows++) ; + for (; rs.next(); rows++) + ; var restResponse = executeRestQuery(query, null); assertEquals(rows, restResponse.getInt("total")); } } - /** - * Use OpenSearch cluster initialized by OpenSearch Gradle task. - */ + /** Use OpenSearch cluster initialized by OpenSearch Gradle task. */ private static String getConnectionString() { // string like "[::1]:46751,127.0.0.1:34403" var clusterUrls = System.getProperty("tests.rest.cluster").split(","); @@ -211,7 +220,8 @@ private static String getConnectionString() { protected JSONObject executeRestQuery(String query, @Nullable Integer fetch_size) { Request request = new Request("POST", QUERY_API_ENDPOINT); if (fetch_size != null) { - request.setJsonEntity(String.format("{ \"query\": \"%s\", \"fetch_size\": %d }", query, fetch_size)); + request.setJsonEntity( + String.format("{ \"query\": \"%s\", \"fetch_size\": %d }", query, fetch_size)); } else { request.setJsonEntity(String.format("{ \"query\": \"%s\" }", query)); } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/AggregationExpressionIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/AggregationExpressionIT.java index e064300e4f..37398220ff 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/AggregationExpressionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/AggregationExpressionIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.opensearch.sql.util.MatcherUtils.rows; @@ -24,10 +23,9 @@ protected void init() throws Exception { @Test public void noGroupKeySingleFuncOverAggWithoutAliasShouldPass() { - JSONObject response = executeJdbcRequest(String.format( - "SELECT abs(MAX(age)) " + - "FROM %s", - Index.ACCOUNT.getName())); + JSONObject response = + executeJdbcRequest( + String.format("SELECT abs(MAX(age)) " + "FROM %s", Index.ACCOUNT.getName())); verifySchema(response, schema("abs(MAX(age))", null, "long")); verifyDataRows(response, rows(40)); @@ -35,10 +33,10 @@ public void noGroupKeySingleFuncOverAggWithoutAliasShouldPass() { @Test public void noGroupKeyMaxAddMinShouldPass() { - JSONObject response = executeJdbcRequest(String.format( - "SELECT MAX(age) + MIN(age) as addValue " + - "FROM %s", - Index.ACCOUNT.getName())); + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT MAX(age) + MIN(age) as addValue " + "FROM %s", Index.ACCOUNT.getName())); verifySchema(response, schema("MAX(age) + MIN(age)", "addValue", "long")); verifyDataRows(response, rows(60)); @@ -46,10 +44,9 @@ public void noGroupKeyMaxAddMinShouldPass() { @Test public void noGroupKeyMaxAddLiteralShouldPass() { - JSONObject response = executeJdbcRequest(String.format( - "SELECT MAX(age) + 1 as `add` " + - "FROM %s", - Index.ACCOUNT.getName())); + JSONObject response = + executeJdbcRequest( + String.format("SELECT MAX(age) + 1 as `add` " + "FROM %s", Index.ACCOUNT.getName())); verifySchema(response, schema("MAX(age) + 1", "add", "long")); verifyDataRows(response, rows(41)); @@ -57,10 +54,9 @@ public void noGroupKeyMaxAddLiteralShouldPass() { @Test public void noGroupKeyAvgOnIntegerShouldPass() { - JSONObject response = executeJdbcRequest(String.format( - "SELECT AVG(age) as `avg` " + - "FROM %s", - Index.BANK.getName())); + JSONObject response = + executeJdbcRequest( + String.format("SELECT AVG(age) as `avg` " + "FROM %s", Index.BANK.getName())); verifySchema(response, schema("AVG(age)", "avg", "double")); verifyDataRows(response, rows(34D)); @@ -68,58 +64,49 @@ public void noGroupKeyAvgOnIntegerShouldPass() { @Test public void hasGroupKeyAvgOnIntegerShouldPass() { - JSONObject response = executeJdbcRequest(String.format( - "SELECT gender, AVG(age) as `avg` " + - "FROM %s " + - "GROUP BY gender", - Index.BANK.getName())); + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT gender, AVG(age) as `avg` " + "FROM %s " + "GROUP BY gender", + Index.BANK.getName())); - verifySchema(response, - schema("gender", null, "text"), - schema("AVG(age)", "avg", "double")); - verifyDataRows(response, - rows("m", 34.25), - rows("f", 33.666666666666664d)); + verifySchema(response, schema("gender", null, "text"), schema("AVG(age)", "avg", "double")); + verifyDataRows(response, rows("m", 34.25), rows("f", 33.666666666666664d)); } @Test public void hasGroupKeyMaxAddMinShouldPass() { - JSONObject response = executeJdbcRequest(String.format( - "SELECT gender, MAX(age) + MIN(age) as addValue " + - "FROM %s " + - "GROUP BY gender", - Index.ACCOUNT.getName())); - - verifySchema(response, + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT gender, MAX(age) + MIN(age) as addValue " + "FROM %s " + "GROUP BY gender", + Index.ACCOUNT.getName())); + + verifySchema( + response, schema("gender", null, "text"), schema("MAX(age) + MIN(age)", "addValue", "long")); - verifyDataRows(response, - rows("m", 60), - rows("f", 60)); + verifyDataRows(response, rows("m", 60), rows("f", 60)); } @Test public void hasGroupKeyMaxAddLiteralShouldPass() { - JSONObject response = executeJdbcRequest(String.format( - "SELECT gender, MAX(age) + 1 as `add` " + - "FROM %s " + - "GROUP BY gender", - Index.ACCOUNT.getName())); + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT gender, MAX(age) + 1 as `add` " + "FROM %s " + "GROUP BY gender", + Index.ACCOUNT.getName())); - verifySchema(response, - schema("gender", null, "text"), - schema("MAX(age) + 1", "add", "long")); - verifyDataRows(response, - rows("m", 41), - rows("f", 41)); + verifySchema(response, schema("gender", null, "text"), schema("MAX(age) + 1", "add", "long")); + verifyDataRows(response, rows("m", 41), rows("f", 41)); } @Test public void noGroupKeyLogMaxAddMinShouldPass() { - JSONObject response = executeJdbcRequest(String.format( - "SELECT Log(MAX(age) + MIN(age)) as `log` " + - "FROM %s", - Index.ACCOUNT.getName())); + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT Log(MAX(age) + MIN(age)) as `log` " + "FROM %s", Index.ACCOUNT.getName())); verifySchema(response, schema("Log(MAX(age) + MIN(age))", "log", "double")); verifyDataRows(response, rows(4.0943445622221d)); @@ -127,117 +114,124 @@ public void noGroupKeyLogMaxAddMinShouldPass() { @Test public void hasGroupKeyLogMaxAddMinShouldPass() { - JSONObject response = executeJdbcRequest(String.format( - "SELECT gender, Log(MAX(age) + MIN(age)) as logValue " + - "FROM %s " + - "GROUP BY gender", - Index.ACCOUNT.getName())); - - verifySchema(response, + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT gender, Log(MAX(age) + MIN(age)) as logValue " + + "FROM %s " + + "GROUP BY gender", + Index.ACCOUNT.getName())); + + verifySchema( + response, schema("gender", null, "text"), schema("Log(MAX(age) + MIN(age))", "logValue", "double")); - verifyDataRows(response, - rows("m", 4.0943445622221d), - rows("f", 4.0943445622221d)); + verifyDataRows(response, rows("m", 4.0943445622221d), rows("f", 4.0943445622221d)); } @Test public void AddLiteralOnGroupKeyShouldPass() { - JSONObject response = executeJdbcRequest(String.format( - "SELECT gender, age+10, max(balance) as `max` " + - "FROM %s " + - "WHERE gender = 'm' and age < 22 " + - "GROUP BY gender, age " + - "ORDER BY age", - Index.ACCOUNT.getName())); - - verifySchema(response, + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT gender, age+10, max(balance) as `max` " + + "FROM %s " + + "WHERE gender = 'm' and age < 22 " + + "GROUP BY gender, age " + + "ORDER BY age", + Index.ACCOUNT.getName())); + + verifySchema( + response, schema("gender", null, "text"), schema("age+10", null, "long"), schema("max(balance)", "max", "long")); - verifyDataRows(response, - rows("m", 30, 49568), - rows("m", 31, 49433)); + verifyDataRows(response, rows("m", 30, 49568), rows("m", 31, 49433)); } @Test public void logWithAddLiteralOnGroupKeyShouldPass() { - JSONObject response = executeJdbcRequest(String.format( - "SELECT gender, Log(age+10) as logAge, max(balance) as max " + - "FROM %s " + - "WHERE gender = 'm' and age < 22 " + - "GROUP BY gender, age " + - "ORDER BY age", - Index.ACCOUNT.getName())); - - verifySchema(response, + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT gender, Log(age+10) as logAge, max(balance) as max " + + "FROM %s " + + "WHERE gender = 'm' and age < 22 " + + "GROUP BY gender, age " + + "ORDER BY age", + Index.ACCOUNT.getName())); + + verifySchema( + response, schema("gender", null, "text"), schema("Log(age+10)", "logAge", "double"), schema("max(balance)", "max", "long")); - verifyDataRows(response, - rows("m", 3.4011973816621555d, 49568), - rows("m", 3.4339872044851463d, 49433)); + verifyDataRows( + response, rows("m", 3.4011973816621555d, 49568), rows("m", 3.4339872044851463d, 49433)); } @Test public void logWithAddLiteralOnGroupKeyAndMaxSubtractLiteralShouldPass() { - JSONObject response = executeJdbcRequest(String.format( - "SELECT gender, Log(age+10) as logAge, max(balance) - 100 as max " + - "FROM %s " + - "WHERE gender = 'm' and age < 22 " + - "GROUP BY gender, age " + - "ORDER BY age", - Index.ACCOUNT.getName())); - - verifySchema(response, + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT gender, Log(age+10) as logAge, max(balance) - 100 as max " + + "FROM %s " + + "WHERE gender = 'm' and age < 22 " + + "GROUP BY gender, age " + + "ORDER BY age", + Index.ACCOUNT.getName())); + + verifySchema( + response, schema("gender", null, "text"), schema("Log(age+10)", "logAge", "double"), schema("max(balance) - 100", "max", "long")); - verifyDataRows(response, - rows("m", 3.4011973816621555d, 49468), - rows("m", 3.4339872044851463d, 49333)); + verifyDataRows( + response, rows("m", 3.4011973816621555d, 49468), rows("m", 3.4339872044851463d, 49333)); } - /** - * The date is in JDBC format. - */ + /** The date is in JDBC format. */ @Test public void groupByDateShouldPass() { - JSONObject response = executeJdbcRequest(String.format( - "SELECT birthdate, count(*) as `count` " + - "FROM %s " + - "WHERE age < 30 " + - "GROUP BY birthdate ", - Index.BANK.getName())); - - verifySchema(response, - schema("birthdate", null, "timestamp"), - schema("count(*)", "count", "integer")); - verifyDataRows(response, - rows("2018-06-23 00:00:00", 1)); + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT birthdate, count(*) as `count` " + + "FROM %s " + + "WHERE age < 30 " + + "GROUP BY birthdate ", + Index.BANK.getName())); + + verifySchema( + response, schema("birthdate", null, "timestamp"), schema("count(*)", "count", "integer")); + verifyDataRows(response, rows("2018-06-23 00:00:00", 1)); } @Test public void groupByDateWithAliasShouldPass() { - JSONObject response = executeJdbcRequest(String.format( - "SELECT birthdate as birth, count(*) as `count` " + - "FROM %s " + - "WHERE age < 30 " + - "GROUP BY birthdate ", - Index.BANK.getName())); - - verifySchema(response, + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT birthdate as birth, count(*) as `count` " + + "FROM %s " + + "WHERE age < 30 " + + "GROUP BY birthdate ", + Index.BANK.getName())); + + verifySchema( + response, schema("birthdate", "birth", "timestamp"), schema("count(*)", "count", "integer")); - verifyDataRows(response, - rows("2018-06-23 00:00:00", 1)); + verifyDataRows(response, rows("2018-06-23 00:00:00", 1)); } @Test public void aggregateCastStatementShouldNotReturnZero() { - JSONObject response = executeJdbcRequest(String.format( - "SELECT SUM(CAST(male AS INT)) AS male_sum FROM %s", - Index.BANK.getName())); + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT SUM(CAST(male AS INT)) AS male_sum FROM %s", Index.BANK.getName())); verifySchema(response, schema("SUM(CAST(male AS INT))", "male_sum", "integer")); verifyDataRows(response, rows(4)); @@ -245,8 +239,8 @@ public void aggregateCastStatementShouldNotReturnZero() { @Test public void groupByConstantShouldPass() { - JSONObject response = executeJdbcRequest(String.format( - "select 1 from %s GROUP BY 1", Index.BANK.getName())); + JSONObject response = + executeJdbcRequest(String.format("select 1 from %s GROUP BY 1", Index.BANK.getName())); verifySchema(response, schema("1", null, "integer")); verifyDataRows(response, rows(1)); diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/AggregationIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/AggregationIT.java index 9687e43823..e053d3d7cf 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/AggregationIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/AggregationIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.containsString; @@ -60,8 +59,9 @@ public void countTest() throws IOException { @Ignore("The distinct is not supported in new engine") public void countDistinctTest() { - JSONObject response = executeJdbcRequest( - String.format("SELECT COUNT(distinct gender) FROM %s", TEST_INDEX_ACCOUNT)); + JSONObject response = + executeJdbcRequest( + String.format("SELECT COUNT(distinct gender) FROM %s", TEST_INDEX_ACCOUNT)); verifySchema(response, schema("COUNT(DISTINCT gender)", null, "integer")); verifyDataRows(response, rows(2)); @@ -71,8 +71,9 @@ public void countDistinctTest() { public void countWithDocsHintTest() throws Exception { JSONObject result = - executeQuery(String.format("SELECT /*! DOCS_WITH_AGGREGATION(10) */ count(*) from %s", - TEST_INDEX_ACCOUNT)); + executeQuery( + String.format( + "SELECT /*! DOCS_WITH_AGGREGATION(10) */ count(*) from %s", TEST_INDEX_ACCOUNT)); JSONArray hits = (JSONArray) result.query("/hits/hits"); Assert.assertThat(hits.length(), equalTo(10)); } @@ -83,8 +84,8 @@ public void sumTest() throws IOException { JSONObject result = executeQuery(String.format("SELECT SUM(balance) FROM %s", TEST_INDEX_ACCOUNT)); Assert.assertThat(getTotalHits(result), equalTo(1000)); - Assert.assertThat(getDoubleAggregationValue(result, "SUM(balance)", "value"), - equalTo(25714837.0)); + Assert.assertThat( + getDoubleAggregationValue(result, "SUM(balance)", "value"), equalTo(25714837.0)); } @Test @@ -127,23 +128,28 @@ public void statsTest() throws IOException { @Test public void extendedStatsTest() throws IOException { - JSONObject result = executeQuery(String.format("SELECT EXTENDED_STATS(age) FROM %s", - TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery(String.format("SELECT EXTENDED_STATS(age) FROM %s", TEST_INDEX_ACCOUNT)); Assert.assertThat(getTotalHits(result), equalTo(1000)); - Assert - .assertThat(getDoubleAggregationValue(result, "EXTENDED_STATS(age)", "min"), equalTo(20.0)); - Assert - .assertThat(getDoubleAggregationValue(result, "EXTENDED_STATS(age)", "max"), equalTo(40.0)); - Assert.assertThat(getDoubleAggregationValue(result, "EXTENDED_STATS(age)", "avg"), - equalTo(30.171)); - Assert.assertThat(getDoubleAggregationValue(result, "EXTENDED_STATS(age)", "sum"), - equalTo(30171.0)); - Assert.assertThat(getDoubleAggregationValue(result, "EXTENDED_STATS(age)", "sum_of_squares"), + Assert.assertThat( + getDoubleAggregationValue(result, "EXTENDED_STATS(age)", "min"), equalTo(20.0)); + Assert.assertThat( + getDoubleAggregationValue(result, "EXTENDED_STATS(age)", "max"), equalTo(40.0)); + Assert.assertThat( + getDoubleAggregationValue(result, "EXTENDED_STATS(age)", "avg"), equalTo(30.171)); + Assert.assertThat( + getDoubleAggregationValue(result, "EXTENDED_STATS(age)", "sum"), equalTo(30171.0)); + Assert.assertThat( + getDoubleAggregationValue(result, "EXTENDED_STATS(age)", "sum_of_squares"), equalTo(946393.0)); - Assert.assertEquals(6.008640362012022, - getDoubleAggregationValue(result, "EXTENDED_STATS(age)", "std_deviation"), 0.0001); - Assert.assertEquals(36.10375899999996, - getDoubleAggregationValue(result, "EXTENDED_STATS(age)", "variance"), 0.0001); + Assert.assertEquals( + 6.008640362012022, + getDoubleAggregationValue(result, "EXTENDED_STATS(age)", "std_deviation"), + 0.0001); + Assert.assertEquals( + 36.10375899999996, + getDoubleAggregationValue(result, "EXTENDED_STATS(age)", "variance"), + 0.0001); } @Test @@ -152,72 +158,73 @@ public void percentileTest() throws IOException { JSONObject result = executeQuery(String.format("SELECT PERCENTILES(age) FROM %s", TEST_INDEX_ACCOUNT)); Assert.assertThat(getTotalHits(result), equalTo(1000)); - Assert - .assertEquals(20.0, getDoubleAggregationValue(result, "PERCENTILES(age)", "values", "1.0"), - 0.001); - Assert - .assertEquals(21.0, getDoubleAggregationValue(result, "PERCENTILES(age)", "values", "5.0"), - 0.001); - Assert - .assertEquals(25.0, getDoubleAggregationValue(result, "PERCENTILES(age)", "values", "25.0"), - 0.001); - // All percentiles are approximations calculated by t-digest, however, P50 has the widest distribution (not sure why) - Assert - .assertEquals(30.5, getDoubleAggregationValue(result, "PERCENTILES(age)", "values", "50.0"), - 0.6); - Assert - .assertEquals(35.0, getDoubleAggregationValue(result, "PERCENTILES(age)", "values", "75.0"), - 0.6); - Assert - .assertEquals(39.0, getDoubleAggregationValue(result, "PERCENTILES(age)", "values", "95.0"), - 0.6); - Assert - .assertEquals(40.0, getDoubleAggregationValue(result, "PERCENTILES(age)", "values", "99.0"), - 0.6); + Assert.assertEquals( + 20.0, getDoubleAggregationValue(result, "PERCENTILES(age)", "values", "1.0"), 0.001); + Assert.assertEquals( + 21.0, getDoubleAggregationValue(result, "PERCENTILES(age)", "values", "5.0"), 0.001); + Assert.assertEquals( + 25.0, getDoubleAggregationValue(result, "PERCENTILES(age)", "values", "25.0"), 0.001); + // All percentiles are approximations calculated by t-digest, however, P50 has the widest + // distribution (not sure why) + Assert.assertEquals( + 30.5, getDoubleAggregationValue(result, "PERCENTILES(age)", "values", "50.0"), 0.6); + Assert.assertEquals( + 35.0, getDoubleAggregationValue(result, "PERCENTILES(age)", "values", "75.0"), 0.6); + Assert.assertEquals( + 39.0, getDoubleAggregationValue(result, "PERCENTILES(age)", "values", "95.0"), 0.6); + Assert.assertEquals( + 40.0, getDoubleAggregationValue(result, "PERCENTILES(age)", "values", "99.0"), 0.6); } @Test public void percentileTestSpecific() throws IOException { - JSONObject result = executeQuery(String.format("SELECT PERCENTILES(age,25.0,75.0) FROM %s", - TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + String.format("SELECT PERCENTILES(age,25.0,75.0) FROM %s", TEST_INDEX_ACCOUNT)); Assert.assertThat(getTotalHits(result), equalTo(1000)); - Assert.assertEquals(25.0, - getDoubleAggregationValue(result, "PERCENTILES(age,25.0,75.0)", "values", "25.0"), 0.6); - Assert.assertEquals(35.0, - getDoubleAggregationValue(result, "PERCENTILES(age,25.0,75.0)", "values", "75.0"), 0.6); + Assert.assertEquals( + 25.0, + getDoubleAggregationValue(result, "PERCENTILES(age,25.0,75.0)", "values", "25.0"), + 0.6); + Assert.assertEquals( + 35.0, + getDoubleAggregationValue(result, "PERCENTILES(age,25.0,75.0)", "values", "75.0"), + 0.6); } @Test public void aliasTest() throws IOException { - JSONObject result = executeQuery(String.format("SELECT COUNT(*) AS mycount FROM %s", - TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery(String.format("SELECT COUNT(*) AS mycount FROM %s", TEST_INDEX_ACCOUNT)); Assert.assertThat(getTotalHits(result), equalTo(1000)); Assert.assertThat(getIntAggregationValue(result, "mycount", "value"), equalTo(1000)); } @Test public void groupByTest() throws Exception { - JSONObject result = executeQuery(String.format("SELECT COUNT(*) FROM %s GROUP BY gender", - TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery(String.format("SELECT COUNT(*) FROM %s GROUP BY gender", TEST_INDEX_ACCOUNT)); assertResultForGroupByTest(result); } @Test public void groupByUsingTableAliasTest() throws Exception { - JSONObject result = executeQuery(String.format("SELECT COUNT(*) FROM %s a GROUP BY a.gender", - TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + String.format("SELECT COUNT(*) FROM %s a GROUP BY a.gender", TEST_INDEX_ACCOUNT)); assertResultForGroupByTest(result); } @Test public void groupByUsingTableNamePrefixTest() throws Exception { - JSONObject result = executeQuery(String.format( - "SELECT COUNT(*) FROM %s GROUP BY opensearch-sql_test_index_account.gender", - TEST_INDEX_ACCOUNT - )); + JSONObject result = + executeQuery( + String.format( + "SELECT COUNT(*) FROM %s GROUP BY opensearch-sql_test_index_account.gender", + TEST_INDEX_ACCOUNT)); assertResultForGroupByTest(result); } @@ -241,31 +248,34 @@ private void assertResultForGroupByTest(JSONObject result) { @Test public void groupByHavingTest() throws Exception { - JSONObject result = executeQuery(String.format( - "SELECT gender " + - "FROM %s " + - "GROUP BY gender " + - "HAVING COUNT(*) > 0", TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + String.format( + "SELECT gender FROM %s GROUP BY gender HAVING COUNT(*) > 0", + TEST_INDEX_ACCOUNT)); assertResultForGroupByHavingTest(result); } @Test public void groupByHavingUsingTableAliasTest() throws Exception { - JSONObject result = executeQuery(String.format( - "SELECT a.gender " + - "FROM %s a " + - "GROUP BY a.gender " + - "HAVING COUNT(*) > 0", TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + String.format( + "SELECT a.gender FROM %s a GROUP BY a.gender HAVING COUNT(*) > 0", + TEST_INDEX_ACCOUNT)); assertResultForGroupByHavingTest(result); } @Test public void groupByHavingUsingTableNamePrefixTest() throws Exception { - JSONObject result = executeQuery(String.format( - "SELECT opensearch-sql_test_index_account.gender " + - "FROM %s " + - "GROUP BY opensearch-sql_test_index_account.gender " + - "HAVING COUNT(*) > 0", TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + String.format( + "SELECT opensearch-sql_test_index_account.gender " + + "FROM %s " + + "GROUP BY opensearch-sql_test_index_account.gender " + + "HAVING COUNT(*) > 0", + TEST_INDEX_ACCOUNT)); assertResultForGroupByHavingTest(result); } @@ -287,15 +297,17 @@ private void assertResultForGroupByHavingTest(JSONObject result) { Assert.assertThat(gender.query(femaleBucketPrefix + "/count_0/value"), equalTo(493)); } - @Ignore //todo VerificationException: table alias or field name missing + @Ignore // todo VerificationException: table alias or field name missing @Test public void groupBySubqueryTest() throws Exception { - JSONObject result = executeQuery(String.format( - "SELECT COUNT(*) FROM %s " + - "WHERE firstname IN (SELECT firstname FROM %s) " + - "GROUP BY gender", - TEST_INDEX_ACCOUNT, TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + String.format( + "SELECT COUNT(*) FROM %s " + + "WHERE firstname IN (SELECT firstname FROM %s) " + + "GROUP BY gender", + TEST_INDEX_ACCOUNT, TEST_INDEX_ACCOUNT)); Assert.assertThat(getTotalHits(result), equalTo(1000)); JSONObject gender = getAggregation(result, "gender"); Assert.assertThat(gender.getJSONArray("buckets").length(), equalTo(2)); @@ -316,9 +328,12 @@ public void groupBySubqueryTest() throws Exception { @Test public void postFilterTest() throws Exception { - JSONObject result = executeQuery(String.format("SELECT /*! POST_FILTER({\\\"term\\\":" + - "{\\\"gender\\\":\\\"m\\\"}}) */ COUNT(*) FROM %s GROUP BY gender", - TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + String.format( + "SELECT /*! POST_FILTER({\\\"term\\\":" + + "{\\\"gender\\\":\\\"m\\\"}}) */ COUNT(*) FROM %s GROUP BY gender", + TEST_INDEX_ACCOUNT)); Assert.assertThat(getTotalHits(result), equalTo(507)); JSONObject gender = getAggregation(result, "gender"); Assert.assertThat(gender.getJSONArray("buckets").length(), equalTo(2)); @@ -339,9 +354,12 @@ public void postFilterTest() throws Exception { @Test public void multipleGroupByTest() throws Exception { - JSONObject result = executeQuery(String.format("SELECT COUNT(*) FROM %s GROUP BY gender," + - " terms('field'='age','size'=200,'alias'='age')", - TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + String.format( + "SELECT COUNT(*) FROM %s GROUP BY gender," + + " terms('field'='age','size'=200,'alias'='age')", + TEST_INDEX_ACCOUNT)); Assert.assertThat(getTotalHits(result), equalTo(1000)); JSONObject gender = getAggregation(result, "gender"); Assert.assertThat(gender.getJSONArray("buckets").length(), equalTo(2)); @@ -364,9 +382,11 @@ public void multipleGroupByTest() throws Exception { final Set actualAgesM = new HashSet<>(expectedAges.size()); final Set actualAgesF = new HashSet<>(expectedAges.size()); - mAgeBuckets.iterator() + mAgeBuckets + .iterator() .forEachRemaining(json -> actualAgesM.add(((JSONObject) json).getInt("key"))); - fAgeBuckets.iterator() + fAgeBuckets + .iterator() .forEachRemaining(json -> actualAgesF.add(((JSONObject) json).getInt("key"))); Assert.assertThat(actualAgesM, equalTo(expectedAges)); @@ -376,9 +396,12 @@ public void multipleGroupByTest() throws Exception { @Test public void multipleGroupBysWithSize() throws Exception { - JSONObject result = executeQuery(String.format("SELECT COUNT(*) FROM %s GROUP BY gender," + - " terms('alias'='ageAgg','field'='age','size'=3)", - TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + String.format( + "SELECT COUNT(*) FROM %s GROUP BY gender," + + " terms('alias'='ageAgg','field'='age','size'=3)", + TEST_INDEX_ACCOUNT)); Assert.assertThat(getTotalHits(result), equalTo(1000)); JSONObject gender = getAggregation(result, "gender"); Assert.assertThat(gender.getJSONArray("buckets").length(), equalTo(2)); @@ -393,9 +416,12 @@ public void multipleGroupBysWithSize() throws Exception { @Test public void termsWithSize() throws Exception { - JSONObject result = executeQuery(String.format("SELECT COUNT(*) FROM %s GROUP BY terms" + - "('alias'='ageAgg','field'='age','size'=3)", - TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + String.format( + "SELECT COUNT(*) FROM %s GROUP BY terms" + + "('alias'='ageAgg','field'='age','size'=3)", + TEST_INDEX_ACCOUNT)); Assert.assertThat(getTotalHits(result), equalTo(1000)); JSONObject gender = getAggregation(result, "ageAgg"); Assert.assertThat(gender.getJSONArray("buckets").length(), equalTo(3)); @@ -404,9 +430,12 @@ public void termsWithSize() throws Exception { @Test public void termsWithMissing() throws Exception { - JSONObject result = executeQuery(String.format("SELECT count(*) FROM %s GROUP BY terms" + - "('alias'='nick','field'='nickname','missing'='no_nickname')", - TEST_INDEX_GAME_OF_THRONES)); + JSONObject result = + executeQuery( + String.format( + "SELECT count(*) FROM %s GROUP BY terms" + + "('alias'='nick','field'='nickname','missing'='no_nickname')", + TEST_INDEX_GAME_OF_THRONES)); JSONObject nick = getAggregation(result, "nick"); Optional noNicknameBucket = Optional.empty(); @@ -427,9 +456,12 @@ public void termsWithOrder() throws Exception { final String dog1 = "snoopy"; final String dog2 = "rex"; - JSONObject result = executeQuery(String.format("SELECT count(*) FROM %s GROUP BY terms" + - "('field'='dog_name', 'alias'='dog_name', 'order'='desc')", - TEST_INDEX_DOG)); + JSONObject result = + executeQuery( + String.format( + "SELECT count(*) FROM %s GROUP BY terms" + + "('field'='dog_name', 'alias'='dog_name', 'order'='desc')", + TEST_INDEX_DOG)); JSONObject dogName = getAggregation(result, "dog_name"); String firstDog = (String) (dogName.optQuery("/buckets/0/key")); @@ -437,8 +469,12 @@ public void termsWithOrder() throws Exception { Assert.assertThat(firstDog, equalTo(dog1)); Assert.assertThat(secondDog, equalTo(dog2)); - result = executeQuery(String.format("SELECT count(*) FROM %s GROUP BY terms" + - "('field'='dog_name', 'alias'='dog_name', 'order'='asc')", TEST_INDEX_DOG)); + result = + executeQuery( + String.format( + "SELECT count(*) FROM %s GROUP BY terms" + + "('field'='dog_name', 'alias'='dog_name', 'order'='asc')", + TEST_INDEX_DOG)); dogName = getAggregation(result, "dog_name"); @@ -450,92 +486,96 @@ public void termsWithOrder() throws Exception { @Test public void orderByAscTest() { - JSONObject response = executeJdbcRequest(String.format("SELECT COUNT(*) FROM %s " + - "GROUP BY gender ORDER BY COUNT(*)", TEST_INDEX_ACCOUNT)); + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT COUNT(*) FROM %s " + "GROUP BY gender ORDER BY COUNT(*)", + TEST_INDEX_ACCOUNT)); verifySchema(response, schema("COUNT(*)", null, "integer")); - verifyDataRows(response, - rows(493), - rows(507)); + verifyDataRows(response, rows(493), rows(507)); } @Test public void orderByAliasAscTest() { - JSONObject response = executeJdbcRequest(String.format("SELECT COUNT(*) as count FROM %s " + - "GROUP BY gender ORDER BY count", TEST_INDEX_ACCOUNT)); + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT COUNT(*) as count FROM %s " + "GROUP BY gender ORDER BY count", + TEST_INDEX_ACCOUNT)); verifySchema(response, schema("COUNT(*)", "count", "integer")); - verifyDataRowsInOrder(response, - rows(493), - rows(507)); + verifyDataRowsInOrder(response, rows(493), rows(507)); } @Test public void orderByDescTest() throws IOException { - JSONObject response = executeJdbcRequest(String.format("SELECT COUNT(*) FROM %s " + - "GROUP BY gender ORDER BY COUNT(*) DESC", TEST_INDEX_ACCOUNT)); + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT COUNT(*) FROM %s " + "GROUP BY gender ORDER BY COUNT(*) DESC", + TEST_INDEX_ACCOUNT)); verifySchema(response, schema("COUNT(*)", null, "integer")); - verifyDataRowsInOrder(response, - rows(507), - rows(493)); + verifyDataRowsInOrder(response, rows(507), rows(493)); } @Test public void orderByAliasDescTest() throws IOException { - JSONObject response = executeJdbcRequest(String.format("SELECT COUNT(*) as count FROM %s " + - "GROUP BY gender ORDER BY count DESC", TEST_INDEX_ACCOUNT)); + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT COUNT(*) as count FROM %s " + "GROUP BY gender ORDER BY count DESC", + TEST_INDEX_ACCOUNT)); verifySchema(response, schema("COUNT(*)", "count", "integer")); - verifyDataRowsInOrder(response, - rows(507), - rows(493)); + verifyDataRowsInOrder(response, rows(507), rows(493)); } @Test public void orderByGroupFieldWithAlias() throws IOException { // ORDER BY field name - JSONObject response = executeJdbcRequest(String.format("SELECT gender as g, COUNT(*) as count " - + "FROM %s GROUP BY gender ORDER BY gender", TEST_INDEX_ACCOUNT)); + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT gender as g, COUNT(*) as count " + + "FROM %s GROUP BY gender ORDER BY gender", + TEST_INDEX_ACCOUNT)); - verifySchema(response, - schema("gender", "g", "text"), - schema("COUNT(*)", "count", "integer")); - verifyDataRowsInOrder(response, - rows("f", 493), - rows("m", 507)); + verifySchema(response, schema("gender", "g", "text"), schema("COUNT(*)", "count", "integer")); + verifyDataRowsInOrder(response, rows("f", 493), rows("m", 507)); // ORDER BY field alias - response = executeJdbcRequest(String.format("SELECT gender as g, COUNT(*) as count " - + "FROM %s GROUP BY gender ORDER BY g", TEST_INDEX_ACCOUNT)); - - verifySchema(response, - schema("gender", "g", "text"), - schema("COUNT(*)", "count", "integer")); - verifyDataRowsInOrder(response, - rows("f", 493), - rows("m", 507)); + response = + executeJdbcRequest( + String.format( + "SELECT gender as g, COUNT(*) as count " + "FROM %s GROUP BY gender ORDER BY g", + TEST_INDEX_ACCOUNT)); + + verifySchema(response, schema("gender", "g", "text"), schema("COUNT(*)", "count", "integer")); + verifyDataRowsInOrder(response, rows("f", 493), rows("m", 507)); } @Test public void limitTest() throws IOException { - JSONObject response = executeJdbcRequest(String.format("SELECT COUNT(*) FROM %s " + - "GROUP BY age ORDER BY COUNT(*) LIMIT 5", TEST_INDEX_ACCOUNT)); + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT COUNT(*) FROM %s " + "GROUP BY age ORDER BY COUNT(*) LIMIT 5", + TEST_INDEX_ACCOUNT)); verifySchema(response, schema("COUNT(*)", null, "integer")); - verifyDataRowsInOrder(response, - rows(35), - rows(39), - rows(39), - rows(42), - rows(42)); + verifyDataRowsInOrder(response, rows(35), rows(39), rows(39), rows(42), rows(42)); } @Test public void countGroupByRange() throws IOException { - JSONObject result = executeQuery(String.format("SELECT COUNT(age) FROM %s" + - " GROUP BY range(age, 20,25,30,35,40)", TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + String.format( + "SELECT COUNT(age) FROM %s" + " GROUP BY range(age, 20,25,30,35,40)", + TEST_INDEX_ACCOUNT)); JSONObject ageAgg = getAggregation(result, "range(age,20,25,30,35,40)"); JSONArray buckets = ageAgg.getJSONArray("buckets"); Assert.assertThat(buckets.length(), equalTo(4)); @@ -544,7 +584,8 @@ public void countGroupByRange() throws IOException { for (int i = 0; i < expectedResults.length; ++i) { - Assert.assertThat(buckets.query(String.format(Locale.ROOT, "/%d/COUNT(age)/value", i)), + Assert.assertThat( + buckets.query(String.format(Locale.ROOT, "/%d/COUNT(age)/value", i)), equalTo(expectedResults[i])); } } @@ -556,42 +597,58 @@ public void countGroupByRange() throws IOException { public void countGroupByDateTest() throws IOException { String result = - explainQuery(String.format("select insert_time from %s group by date_histogram" + - "('field'='insert_time','fixed_interval'='1h','format'='yyyy-MM','min_doc_count'=5) ", - TEST_INDEX_ONLINE)); - Assert.assertThat(result.replaceAll("\\s+", ""), - containsString("{\"date_histogram\":{\"field\":\"insert_time\",\"format\":\"yyyy-MM\"," + - "\"fixed_interval\":\"1h\",\"offset\":0,\"order\":{\"_key\":\"asc\"},\"keyed\":false," + - "\"min_doc_count\":5}")); + explainQuery( + String.format( + "select insert_time from %s group by" + + " date_histogram('field'='insert_time','fixed_interval'='1h','format'='yyyy-MM','min_doc_count'=5)" + + " ", + TEST_INDEX_ONLINE)); + Assert.assertThat( + result.replaceAll("\\s+", ""), + containsString( + "{\"date_histogram\":{\"field\":\"insert_time\",\"format\":\"yyyy-MM\"," + + "\"fixed_interval\":\"1h\",\"offset\":0,\"order\":{\"_key\":\"asc\"},\"keyed\":false," + + "\"min_doc_count\":5}")); } @Test public void countGroupByDateTestWithAlias() throws IOException { String result = - explainQuery(String.format("select insert_time from %s group by date_histogram" + - "('field'='insert_time','fixed_interval'='1h','format'='yyyy-MM','alias'='myAlias')", - TEST_INDEX_ONLINE)); - Assert.assertThat(result.replaceAll("\\s+", ""), - containsString("myAlias\":{\"date_histogram\":{\"field\":\"insert_time\"," + - "\"format\":\"yyyy-MM\",\"fixed_interval\":\"1h\"")); - } - -// /** -// * http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-aggregations-bucket-daterange-aggregation.html -// */ -// @Test -// public void countDateRangeTest() throws IOException, SqlParseException, SQLFeatureNotSupportedException { -// String result = explainQuery(String.format("select online from %s group by date_range(field='insert_time'," + -// "'format'='yyyy-MM-dd' ,'2014-08-18','2014-08-17','now-8d','now-7d','now-6d','now')", -// TEST_INDEX_ONLINE)); -// // TODO: fix the query or fix the code for the query to work -// } + explainQuery( + String.format( + "select insert_time from %s group by date_histogram" + + "('field'='insert_time','fixed_interval'='1h','format'='yyyy-MM','alias'='myAlias')", + TEST_INDEX_ONLINE)); + Assert.assertThat( + result.replaceAll("\\s+", ""), + containsString( + "myAlias\":{\"date_histogram\":{\"field\":\"insert_time\"," + + "\"format\":\"yyyy-MM\",\"fixed_interval\":\"1h\"")); + } + + /** + * + * http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-aggregations-bucket-daterange-aggregation.html + */ + @Test + @Ignore + public void countDateRangeTest() throws IOException { + String result = + explainQuery( + String.format( + "select online from %s group by date_range(" + + "field='insert_time', 'format'='yyyy-MM-dd' ,'2014-08-18','2014-08-17', " + + "'now-8d','now-7d','now-6d','now')", + TEST_INDEX_ONLINE)); + // TODO: fix the query or fix the code for the query to work + } @Test public void topHitTest() throws IOException { - String query = String - .format("select topHits('size'=3,age='desc') from %s group by gender", TEST_INDEX_ACCOUNT); + String query = + String.format( + "select topHits('size'=3,age='desc') from %s group by gender", TEST_INDEX_ACCOUNT); JSONObject result = executeQuery(query); JSONObject gender = getAggregation(result, "gender"); Assert.assertThat(gender.getJSONArray("buckets").length(), equalTo(2)); @@ -604,7 +661,8 @@ public void topHitTest() throws IOException { final String femaleBucketPrefix = String.format(Locale.ROOT, "/buckets/%d", femaleBucketId); Assert.assertThat(gender.query(maleBucketPrefix + "/key"), equalTo("m")); - Assert.assertThat(gender.query(maleBucketPrefix + "/topHits(size=3,age=desc)/hits/total/value"), + Assert.assertThat( + gender.query(maleBucketPrefix + "/topHits(size=3,age=desc)/hits/total/value"), equalTo(507)); Assert.assertThat( gender.query(maleBucketPrefix + "/topHits(size=3,age=desc)/hits/total/relation"), @@ -614,9 +672,9 @@ public void topHitTest() throws IOException { .length(), equalTo(3)); Assert.assertThat(gender.query(femaleBucketPrefix + "/key"), equalTo("f")); - Assert - .assertThat(gender.query(femaleBucketPrefix + "/topHits(size=3,age=desc)/hits/total/value"), - equalTo(493)); + Assert.assertThat( + gender.query(femaleBucketPrefix + "/topHits(size=3,age=desc)/hits/total/value"), + equalTo(493)); Assert.assertThat( gender.query(femaleBucketPrefix + "/topHits(size=3,age=desc)/hits/total/relation"), equalTo("eq")); @@ -630,7 +688,8 @@ public void topHitTest() throws IOException { public void topHitTest_WithInclude() throws IOException { String query = - String.format("select topHits('size'=3,age='desc','include'=age) from %s group by gender", + String.format( + "select topHits('size'=3,age='desc','include'=age) from %s group by gender", TEST_INDEX_ACCOUNT); JSONObject result = executeQuery(query); JSONObject gender = getAggregation(result, "gender"); @@ -647,28 +706,41 @@ public void topHitTest_WithInclude() throws IOException { Assert.assertThat( gender.query(maleBucketPrefix + "/topHits(size=3,age=desc,include=age)/hits/total/value"), equalTo(507)); - Assert.assertThat(gender - .query(maleBucketPrefix + "/topHits(size=3,age=desc,include=age)/hits/total/relation"), + Assert.assertThat( + gender.query( + maleBucketPrefix + "/topHits(size=3,age=desc,include=age)/hits/total/relation"), equalTo("eq")); - Assert.assertThat(((JSONArray) gender.query( - maleBucketPrefix + "/topHits(size=3,age=desc,include=age)/hits/hits")).length(), + Assert.assertThat( + ((JSONArray) + gender.query(maleBucketPrefix + "/topHits(size=3,age=desc,include=age)/hits/hits")) + .length(), equalTo(3)); Assert.assertThat(gender.query(femaleBucketPrefix + "/key"), equalTo("f")); Assert.assertThat( gender.query(femaleBucketPrefix + "/topHits(size=3,age=desc,include=age)/hits/total/value"), equalTo(493)); - Assert.assertThat(gender - .query(femaleBucketPrefix + "/topHits(size=3,age=desc,include=age)/hits/total/relation"), + Assert.assertThat( + gender.query( + femaleBucketPrefix + "/topHits(size=3,age=desc,include=age)/hits/total/relation"), equalTo("eq")); - Assert.assertThat(((JSONArray) gender.query( - femaleBucketPrefix + "/topHits(size=3,age=desc,include=age)/hits/hits")).length(), + Assert.assertThat( + ((JSONArray) + gender.query( + femaleBucketPrefix + "/topHits(size=3,age=desc,include=age)/hits/hits")) + .length(), equalTo(3)); for (int i = 0; i < 2; ++i) { for (int j = 0; j < 3; ++j) { - JSONObject source = (JSONObject) gender.query(String.format(Locale.ROOT, - "/buckets/%d/topHits(size=3,age=desc,include=age)/hits/hits/%d/_source", i, j)); + JSONObject source = + (JSONObject) + gender.query( + String.format( + Locale.ROOT, + "/buckets/%d/topHits(size=3,age=desc,include=age)/hits/hits/%d/_source", + i, + j)); Assert.assertThat(source.length(), equalTo(1)); Assert.assertTrue(source.has("age")); Assert.assertThat(source.getInt("age"), equalTo(40)); @@ -680,17 +752,24 @@ public void topHitTest_WithInclude() throws IOException { public void topHitTest_WithIncludeTwoFields() throws IOException { String query = - String.format("select topHits('size'=3,'include'='age,firstname',age='desc') from %s " + - "group by gender", TEST_INDEX_ACCOUNT); + String.format( + "select topHits('size'=3,'include'='age,firstname',age='desc') from %s " + + "group by gender", + TEST_INDEX_ACCOUNT); JSONObject result = executeQuery(query); JSONObject gender = getAggregation(result, "gender"); Assert.assertThat(gender.getJSONArray("buckets").length(), equalTo(2)); for (int i = 0; i < 2; ++i) { for (int j = 0; j < 3; ++j) { - JSONObject source = (JSONObject) gender.query(String.format(Locale.ROOT, - "/buckets/%d/topHits(size=3,include=age,firstname,age=desc)/hits/hits/%d/_source", i, - j)); + JSONObject source = + (JSONObject) + gender.query( + String.format( + Locale.ROOT, + "/buckets/%d/topHits(size=3,include=age,firstname,age=desc)/hits/hits/%d/_source", + i, + j)); Assert.assertThat(source.length(), equalTo(2)); Assert.assertTrue(source.has("age")); Assert.assertThat(source.getInt("age"), equalTo(40)); @@ -704,8 +783,10 @@ public void topHitTest_WithIncludeTwoFields() throws IOException { @Test public void topHitTest_WithExclude() throws IOException { - String query = String.format("select topHits('size'=3,'exclude'='lastname',age='desc') from " + - "%s group by gender", TEST_INDEX_ACCOUNT); + String query = + String.format( + "select topHits('size'=3,'exclude'='lastname',age='desc') from " + "%s group by gender", + TEST_INDEX_ACCOUNT); JSONObject result = executeQuery(query); JSONObject gender = getAggregation(result, "gender"); Assert.assertThat(gender.getJSONArray("buckets").length(), equalTo(2)); @@ -718,44 +799,61 @@ public void topHitTest_WithExclude() throws IOException { final String femaleBucketPrefix = String.format(Locale.ROOT, "/buckets/%d", femaleBucketId); Assert.assertThat(gender.query(maleBucketPrefix + "/key"), equalTo("m")); - Assert.assertThat(gender - .query(maleBucketPrefix + "/topHits(size=3,exclude=lastname,age=desc)/hits/total/value"), + Assert.assertThat( + gender.query( + maleBucketPrefix + "/topHits(size=3,exclude=lastname,age=desc)/hits/total/value"), equalTo(507)); - Assert.assertThat(gender - .query(maleBucketPrefix + "/topHits(size=3,exclude=lastname,age=desc)/hits/total/relation"), + Assert.assertThat( + gender.query( + maleBucketPrefix + "/topHits(size=3,exclude=lastname,age=desc)/hits/total/relation"), equalTo("eq")); - Assert.assertThat(((JSONArray) gender.query( - maleBucketPrefix + "/topHits(size=3,exclude=lastname,age=desc)/hits/hits")).length(), + Assert.assertThat( + ((JSONArray) + gender.query( + maleBucketPrefix + "/topHits(size=3,exclude=lastname,age=desc)/hits/hits")) + .length(), equalTo(3)); Assert.assertThat(gender.query(femaleBucketPrefix + "/key"), equalTo("f")); - Assert.assertThat(gender - .query(femaleBucketPrefix + "/topHits(size=3,exclude=lastname,age=desc)/hits/total/value"), + Assert.assertThat( + gender.query( + femaleBucketPrefix + "/topHits(size=3,exclude=lastname,age=desc)/hits/total/value"), equalTo(493)); - Assert.assertThat(gender.query( - femaleBucketPrefix + "/topHits(size=3,exclude=lastname,age=desc)/hits/total/relation"), + Assert.assertThat( + gender.query( + femaleBucketPrefix + "/topHits(size=3,exclude=lastname,age=desc)/hits/total/relation"), equalTo("eq")); - Assert.assertThat(((JSONArray) gender.query( - femaleBucketPrefix + "/topHits(size=3,exclude=lastname,age=desc)/hits/hits")).length(), + Assert.assertThat( + ((JSONArray) + gender.query( + femaleBucketPrefix + "/topHits(size=3,exclude=lastname,age=desc)/hits/hits")) + .length(), equalTo(3)); - final Set expectedFields = new HashSet<>(Arrays.asList( - "account_number", - "firstname", - "address", - "balance", - "gender", - "city", - "employer", - "state", - "age", - "email" - )); + final Set expectedFields = + new HashSet<>( + Arrays.asList( + "account_number", + "firstname", + "address", + "balance", + "gender", + "city", + "employer", + "state", + "age", + "email")); for (int i = 0; i < 2; ++i) { for (int j = 0; j < 3; ++j) { - JSONObject source = (JSONObject) gender.query(String.format(Locale.ROOT, - "/buckets/%d/topHits(size=3,exclude=lastname,age=desc)/hits/hits/%d/_source", i, j)); + JSONObject source = + (JSONObject) + gender.query( + String.format( + Locale.ROOT, + "/buckets/%d/topHits(size=3,exclude=lastname,age=desc)/hits/hits/%d/_source", + i, + j)); Assert.assertThat(source.length(), equalTo(expectedFields.size())); Assert.assertFalse(source.has("lastname")); Assert.assertThat(source.keySet().containsAll(expectedFields), equalTo(true)); @@ -763,254 +861,300 @@ public void topHitTest_WithExclude() throws IOException { } } - //region not migrated - - // script on metric aggregation tests. uncomment if your elastic has scripts enable (disabled by default) -// @Test -// public void sumWithScriptTest() throws IOException, SqlParseException, SQLFeatureNotSupportedException { -// Aggregations result = query(String.format("SELECT SUM(script('','doc[\\'balance\\'].value + doc[\\'balance\\'].value')) as doubleSum FROM %s", TEST_INDEX)); -// Sum sum = result.get("doubleSum"); -// assertThat(sum.getValue(), equalTo(25714837.0*2)); -// } -// -// @Test -// public void sumWithImplicitScriptTest() throws IOException, SqlParseException, SQLFeatureNotSupportedException { -// Aggregations result = query(String.format("SELECT SUM(balance + balance) as doubleSum FROM %s", TEST_INDEX)); -// Sum sum = result.get("doubleSum"); -// assertThat(sum.getValue(), equalTo(25714837.0*2)); -// } -// -// @Test -// public void sumWithScriptTestNoAlias() throws IOException, SqlParseException, SQLFeatureNotSupportedException { -// Aggregations result = query(String.format("SELECT SUM(balance + balance) FROM %s", TEST_INDEX)); -// Sum sum = result.get("SUM(script=script(balance + balance,doc('balance').value + doc('balance').value))"); -// assertThat(sum.getValue(), equalTo(25714837.0*2)); -// } -// -// @Test -// public void scriptedMetricAggregation() throws SQLFeatureNotSupportedException, SqlParseException { -// Aggregations result = query ("select scripted_metric('map_script'='if(doc[\\'balance\\'].value > 49670){ if(!_agg.containsKey(\\'ages\\')) { _agg.put(\\'ages\\',doc[\\'age\\'].value); } " + -// "else { _agg.put(\\'ages\\',_agg.get(\\'ages\\')+doc[\\'age\\'].value); }}'," + -// "'reduce_script'='sumThem = 0; for (a in _aggs) { if(a.containsKey(\\'ages\\')){ sumThem += a.get(\\'ages\\');} }; return sumThem;') as wierdSum from " + TEST_INDEX + ""); -// ScriptedMetric metric = result.get("wierdSum"); -// Assert.assertEquals(136L,metric.aggregation()); -// } -// -// @Test -// public void scriptedMetricConcatWithStringParamAndReduceParamAggregation() throws SQLFeatureNotSupportedException, SqlParseException { -// String query = "select scripted_metric(\n" + -// " 'init_script' = '_agg[\"concat\"]=[] ',\n" + -// " 'map_script'='_agg.concat.add(doc[field].value)' ,\n" + -// " 'combine_script'='return _agg.concat.join(delim);',\t\t\t\t\n" + -// " 'reduce_script'='_aggs.removeAll(\"\"); return _aggs.join(delim)'," + -// "'@field' = 'name.firstname' , '@delim'=';',@reduce_delim =';' ) as all_characters \n" + -// "from "+TEST_INDEX+""; -// Aggregations result = query (query); -// ScriptedMetric metric = result.get("all_characters"); -// List names = Arrays.asList(metric.aggregation().toString().split(";")); -// -// -// Assert.assertEquals(4,names.size()); -// String[] expectedNames = new String[]{"brandon","daenerys","eddard","jaime"}; -// for(String name : expectedNames){ -// Assert.assertTrue("not contains:" + name,names.contains(name)); -// } -// } -// -// @Test -// public void scriptedMetricAggregationWithNumberParams() throws SQLFeatureNotSupportedException, SqlParseException { -// Aggregations result = query ("select scripted_metric('map_script'='if(doc[\\'balance\\'].value > 49670){ if(!_agg.containsKey(\\'ages\\')) { _agg.put(\\'ages\\',doc[\\'age\\'].value+x); } " + -// "else { _agg.put(\\'ages\\',_agg.get(\\'ages\\')+doc[\\'age\\'].value+x); }}'," + -// "'reduce_script'='sumThem = 0; for (a in _aggs) { if(a.containsKey(\\'ages\\')){ sumThem += a.get(\\'ages\\');} }; return sumThem;'" + -// ",'@x'=3) as wierdSum from " + TEST_INDEX + ""); -// ScriptedMetric metric = result.get("wierdSum"); -// Assert.assertEquals(148L,metric.aggregation()); -// } -// - -// @Test -// public void topHitTest_WithIncludeAndExclude() throws IOException, SqlParseException, SQLFeatureNotSupportedException { -// Aggregations result = query(String.format("select topHits('size'=3,'exclude'='lastname','include'='firstname,lastname',age='desc') from %s group by gender ", TEST_INDEX_ACCOUNT)); -// List buckets = ((Terms) (result.asList().get(0))).getBuckets(); -// for (Terms.Bucket bucket : buckets) { -// SearchHits hits = ((InternalTopHits) bucket.getAggregations().asList().get(0)).getHits(); -// for (SearchHit hit : hits) { -// Set fields = hit.getSourceAsMap().keySet(); -// Assert.assertEquals(1, fields.size()); -// Assert.assertTrue(fields.contains("firstname")); -// } -// } -// } -// -// private Aggregations query(String query) throws SqlParseException, SQLFeatureNotSupportedException { -// SqlElasticSearchRequestBuilder select = getSearchRequestBuilder(query); -// return ((SearchResponse)select.get()).getAggregations(); -// } -// -// private SqlElasticSearchRequestBuilder getSearchRequestBuilder(String query) throws SqlParseException, SQLFeatureNotSupportedException { -// SearchDao searchDao = MainTestSuite.getSearchDao(); -// return (SqlElasticSearchRequestBuilder) searchDao.explain(query).explain(); -// } -// -// @Test -// public void testFromSizeWithAggregations() throws Exception { -// final String query1 = String.format("SELECT /*! DOCS_WITH_AGGREGATION(0,1) */" + -// " account_number FROM %s GROUP BY gender", TEST_INDEX_ACCOUNT); -// SearchResponse response1 = (SearchResponse) getSearchRequestBuilder(query1).get(); -// -// Assert.assertEquals(1, response1.getHits().getHits().length); -// Terms gender1 = response1.getAggregations().get("gender"); -// Assert.assertEquals(2, gender1.getBuckets().size()); -// Object account1 = response1.getHits().getHits()[0].getSourceAsMap().get("account_number"); -// -// final String query2 = String.format("SELECT /*! DOCS_WITH_AGGREGATION(1,1) */" + -// " account_number FROM %s GROUP BY gender", TEST_INDEX_ACCOUNT); -// SearchResponse response2 = (SearchResponse) getSearchRequestBuilder(query2).get(); -// -// Assert.assertEquals(1, response2.getHits().getHits().length); -// Terms gender2 = response2.getAggregations().get("gender"); -// Assert.assertEquals(2, gender2.getBuckets().size()); -// Object account2 = response2.getHits().getHits()[0].getSourceAsMap().get("account_number"); -// -// Assert.assertEquals(response1.getHits().getTotalHits(), response2.getHits().getTotalHits()); -// Assert.assertNotEquals(account1, account2); -// } -// -// @Test -// public void testSubAggregations() throws Exception { -// Set expectedAges = new HashSet<>(ContiguousSet.create(Range.closed(20, 40), DiscreteDomain.integers())); -// final String query = String.format("SELECT /*! DOCS_WITH_AGGREGATION(10) */" + -// " * FROM %s GROUP BY (gender, terms('field'='age','size'=200,'alias'='age')), (state) LIMIT 200,200", TEST_INDEX_ACCOUNT); -// -// Map> buckets = new HashMap<>(); -// -// SqlElasticSearchRequestBuilder select = getSearchRequestBuilder(query); -// SearchResponse response = (SearchResponse) select.get(); -// Aggregations result = response.getAggregations(); -// -// Terms gender = result.get("gender"); -// for(Terms.Bucket genderBucket : gender.getBuckets()) { -// String genderKey = genderBucket.getKey().toString(); -// buckets.put(genderKey, new HashSet()); -// Terms ageBuckets = (Terms) genderBucket.getAggregations().get("age"); -// for(Terms.Bucket ageBucket : ageBuckets.getBuckets()) { -// buckets.get(genderKey).add(Integer.parseInt(ageBucket.getKey().toString())); -// } -// } -// -// Assert.assertEquals(2, buckets.keySet().size()); -// Assert.assertEquals(expectedAges, buckets.get("m")); -// Assert.assertEquals(expectedAges, buckets.get("f")); -// -// Terms state = result.get("state.keyword"); -// for(Terms.Bucket stateBucket : state.getBuckets()) { -// if(stateBucket.getKey().toString().equalsIgnoreCase("ak")) { -// Assert.assertTrue("There are 22 entries for state ak", stateBucket.getDocCount() == 22); -// } -// } -// -// Assert.assertEquals(response.getHits().getTotalHits(), 1000); -// Assert.assertEquals(response.getHits().getHits().length, 10); -// } -// -// @Test -// public void testSimpleSubAggregations() throws Exception { -// final String query = String.format("SELECT /*! DOCS_WITH_AGGREGATION(10) */ * FROM %s GROUP BY (gender), (state) ", TEST_INDEX_ACCOUNT); -// -// SqlElasticSearchRequestBuilder select = getSearchRequestBuilder(query); -// SearchResponse response = (SearchResponse) select.get(); -// Aggregations result = response.getAggregations(); -// -// Terms gender = result.get("gender"); -// for(Terms.Bucket genderBucket : gender.getBuckets()) { -// String genderKey = genderBucket.getKey().toString(); -// Assert.assertTrue("Gender should be m or f", genderKey.equals("m") || genderKey.equals("f")); -// } -// -// Assert.assertEquals(2, gender.getBuckets().size()); -// -// Terms state = result.get("state.keyword"); -// for(Terms.Bucket stateBucket : state.getBuckets()) { -// if(stateBucket.getKey().toString().equalsIgnoreCase("ak")) { -// Assert.assertTrue("There are 22 entries for state ak", stateBucket.getDocCount() == 22); -// } -// } -// -// Assert.assertEquals(response.getHits().getTotalHits(), 1000); -// Assert.assertEquals(response.getHits().getHits().length, 10); -// } -// -// @Test -// public void geoHashGrid() throws SQLFeatureNotSupportedException, SqlParseException { -// Aggregations result = query(String.format("SELECT COUNT(*) FROM %s/location GROUP BY geohash_grid(field='center',precision=5) ", TEST_INDEX_LOCATION)); -// InternalGeoHashGrid grid = result.get("geohash_grid(field=center,precision=5)"); -// Collection buckets = grid.getBuckets(); -// for (InternalMultiBucketAggregation.InternalBucket bucket : buckets) { -// Assert.assertTrue(bucket.getKeyAsString().equals("w2fsm") || bucket.getKeyAsString().equals("w0p6y") ); -// Assert.assertEquals(1,bucket.getDocCount()); -// } -// } -// -// @Test -// public void geoBounds() throws SQLFeatureNotSupportedException, SqlParseException { -// Aggregations result = query(String.format("SELECT * FROM %s/location GROUP BY geo_bounds(field='center',alias='bounds') ", TEST_INDEX_LOCATION)); -// InternalGeoBounds bounds = result.get("bounds"); -// Assert.assertEquals(0.5,bounds.bottomRight().getLat(),0.001); -// Assert.assertEquals(105.0,bounds.bottomRight().getLon(),0.001); -// Assert.assertEquals(5.0,bounds.topLeft().getLat(),0.001); -// Assert.assertEquals(100.5,bounds.topLeft().getLon(),0.001); -// } -// -// @Test -// public void groupByOnNestedFieldTest() throws Exception { -// Aggregations result = query(String.format("SELECT COUNT(*) FROM %s GROUP BY nested(message.info)", TEST_INDEX_NESTED_TYPE)); -// InternalNested nested = result.get("message.info@NESTED"); -// Terms infos = nested.getAggregations().get("message.info"); -// Assert.assertEquals(3,infos.getBuckets().size()); -// for(Terms.Bucket bucket : infos.getBuckets()) { -// String key = bucket.getKey().toString(); -// long count = ((ValueCount) bucket.getAggregations().get("COUNT(*)")).getValue(); -// if(key.equalsIgnoreCase("a")) { -// Assert.assertEquals(2, count); -// } -// else if(key.equalsIgnoreCase("c")) { -// Assert.assertEquals(2, count); -// } -// else if(key.equalsIgnoreCase("b")) { -// Assert.assertEquals(1, count); -// } -// else { -// throw new Exception(String.format("Unexpected key. expected: a OR b OR c . found: %s", key)); -// } -// } -// } -// -// @Test -// public void groupByTestWithFilter() throws Exception { -// Aggregations result = query(String.format("SELECT COUNT(*) FROM %s GROUP BY filter(gender='m'),gender", TEST_INDEX_ACCOUNT)); -// InternalFilter filter = result.get("filter(gender = 'm')@FILTER"); -// Terms gender = filter.getAggregations().get("gender"); -// -// for(Terms.Bucket bucket : gender.getBuckets()) { -// String key = bucket.getKey().toString(); -// long count = ((ValueCount) bucket.getAggregations().get("COUNT(*)")).getValue(); -// if(key.equalsIgnoreCase("m")) { -// Assert.assertEquals(507, count); -// } -// else { -// throw new Exception(String.format("Unexpected key. expected: only m. found: %s", key)); -// } -// } -// } -// -// - //endregion not migrated + // region not migrated + + // script on metric aggregation tests. uncomment if your elastic has scripts enable (disabled by + // default) + // @Test + // public void sumWithScriptTest() throws IOException, SqlParseException, + // SQLFeatureNotSupportedException { + // Aggregations result = query(String.format("SELECT + // SUM(script('','doc[\\'balance\\'].value + doc[\\'balance\\'].value')) as doubleSum FROM %s", + // TEST_INDEX)); + // Sum sum = result.get("doubleSum"); + // assertThat(sum.getValue(), equalTo(25714837.0*2)); + // } + // + // @Test + // public void sumWithImplicitScriptTest() throws IOException, SqlParseException, + // SQLFeatureNotSupportedException { + // Aggregations result = query(String.format("SELECT SUM(balance + balance) as doubleSum + // FROM %s", TEST_INDEX)); + // Sum sum = result.get("doubleSum"); + // assertThat(sum.getValue(), equalTo(25714837.0*2)); + // } + // + // @Test + // public void sumWithScriptTestNoAlias() throws IOException, SqlParseException, + // SQLFeatureNotSupportedException { + // Aggregations result = query(String.format("SELECT SUM(balance + balance) FROM %s", + // TEST_INDEX)); + // Sum sum = result.get("SUM(script=script(balance + balance,doc('balance').value + + // doc('balance').value))"); + // assertThat(sum.getValue(), equalTo(25714837.0*2)); + // } + // + // @Test + // public void scriptedMetricAggregation() throws SQLFeatureNotSupportedException, + // SqlParseException { + // Aggregations result = query ("select + // scripted_metric('map_script'='if(doc[\\'balance\\'].value > 49670){ + // if(!_agg.containsKey(\\'ages\\')) { _agg.put(\\'ages\\',doc[\\'age\\'].value); } " + + // "else { _agg.put(\\'ages\\',_agg.get(\\'ages\\')+doc[\\'age\\'].value); }}'," + + // "'reduce_script'='sumThem = 0; for (a in _aggs) { if(a.containsKey(\\'ages\\')){ + // sumThem += a.get(\\'ages\\');} }; return sumThem;') as wierdSum from " + TEST_INDEX + ""); + // ScriptedMetric metric = result.get("wierdSum"); + // Assert.assertEquals(136L,metric.aggregation()); + // } + // + // @Test + // public void scriptedMetricConcatWithStringParamAndReduceParamAggregation() throws + // SQLFeatureNotSupportedException, SqlParseException { + // String query = "select scripted_metric(\n" + + // " 'init_script' = '_agg[\"concat\"]=[] ',\n" + + // " 'map_script'='_agg.concat.add(doc[field].value)' ,\n" + + // " 'combine_script'='return _agg.concat.join(delim);',\t\t\t\t\n" + + // " 'reduce_script'='_aggs.removeAll(\"\"); return _aggs.join(delim)'," + + // "'@field' = 'name.firstname' , '@delim'=';',@reduce_delim =';' ) as + // all_characters \n" + + // "from "+TEST_INDEX+""; + // Aggregations result = query (query); + // ScriptedMetric metric = result.get("all_characters"); + // List names = Arrays.asList(metric.aggregation().toString().split(";")); + // + // + // Assert.assertEquals(4,names.size()); + // String[] expectedNames = new String[]{"brandon","daenerys","eddard","jaime"}; + // for(String name : expectedNames){ + // Assert.assertTrue("not contains:" + name,names.contains(name)); + // } + // } + // + // @Test + // public void scriptedMetricAggregationWithNumberParams() throws + // SQLFeatureNotSupportedException, SqlParseException { + // Aggregations result = query ("select + // scripted_metric('map_script'='if(doc[\\'balance\\'].value > 49670){ + // if(!_agg.containsKey(\\'ages\\')) { _agg.put(\\'ages\\',doc[\\'age\\'].value+x); } " + + // "else { _agg.put(\\'ages\\',_agg.get(\\'ages\\')+doc[\\'age\\'].value+x); }}'," + // + + // "'reduce_script'='sumThem = 0; for (a in _aggs) { if(a.containsKey(\\'ages\\')){ + // sumThem += a.get(\\'ages\\');} }; return sumThem;'" + + // ",'@x'=3) as wierdSum from " + TEST_INDEX + ""); + // ScriptedMetric metric = result.get("wierdSum"); + // Assert.assertEquals(148L,metric.aggregation()); + // } + // + + // @Test + // public void topHitTest_WithIncludeAndExclude() throws IOException, SqlParseException, + // SQLFeatureNotSupportedException { + // Aggregations result = query(String.format("select + // topHits('size'=3,'exclude'='lastname','include'='firstname,lastname',age='desc') from %s group + // by gender ", TEST_INDEX_ACCOUNT)); + // List buckets = ((Terms) (result.asList().get(0))).getBuckets(); + // for (Terms.Bucket bucket : buckets) { + // SearchHits hits = ((InternalTopHits) + // bucket.getAggregations().asList().get(0)).getHits(); + // for (SearchHit hit : hits) { + // Set fields = hit.getSourceAsMap().keySet(); + // Assert.assertEquals(1, fields.size()); + // Assert.assertTrue(fields.contains("firstname")); + // } + // } + // } + // + // private Aggregations query(String query) throws SqlParseException, + // SQLFeatureNotSupportedException { + // SqlElasticSearchRequestBuilder select = getSearchRequestBuilder(query); + // return ((SearchResponse)select.get()).getAggregations(); + // } + // + // private SqlElasticSearchRequestBuilder getSearchRequestBuilder(String query) throws + // SqlParseException, SQLFeatureNotSupportedException { + // SearchDao searchDao = MainTestSuite.getSearchDao(); + // return (SqlElasticSearchRequestBuilder) searchDao.explain(query).explain(); + // } + // + // @Test + // public void testFromSizeWithAggregations() throws Exception { + // final String query1 = String.format("SELECT /*! DOCS_WITH_AGGREGATION(0,1) */" + + // " account_number FROM %s GROUP BY gender", TEST_INDEX_ACCOUNT); + // SearchResponse response1 = (SearchResponse) getSearchRequestBuilder(query1).get(); + // + // Assert.assertEquals(1, response1.getHits().getHits().length); + // Terms gender1 = response1.getAggregations().get("gender"); + // Assert.assertEquals(2, gender1.getBuckets().size()); + // Object account1 = + // response1.getHits().getHits()[0].getSourceAsMap().get("account_number"); + // + // final String query2 = String.format("SELECT /*! DOCS_WITH_AGGREGATION(1,1) */" + + // " account_number FROM %s GROUP BY gender", TEST_INDEX_ACCOUNT); + // SearchResponse response2 = (SearchResponse) getSearchRequestBuilder(query2).get(); + // + // Assert.assertEquals(1, response2.getHits().getHits().length); + // Terms gender2 = response2.getAggregations().get("gender"); + // Assert.assertEquals(2, gender2.getBuckets().size()); + // Object account2 = + // response2.getHits().getHits()[0].getSourceAsMap().get("account_number"); + // + // Assert.assertEquals(response1.getHits().getTotalHits(), + // response2.getHits().getTotalHits()); + // Assert.assertNotEquals(account1, account2); + // } + // + // @Test + // public void testSubAggregations() throws Exception { + // Set expectedAges = new HashSet<>(ContiguousSet.create(Range.closed(20, 40), + // DiscreteDomain.integers())); + // final String query = String.format("SELECT /*! DOCS_WITH_AGGREGATION(10) */" + + // " * FROM %s GROUP BY (gender, terms('field'='age','size'=200,'alias'='age')), + // (state) LIMIT 200,200", TEST_INDEX_ACCOUNT); + // + // Map> buckets = new HashMap<>(); + // + // SqlElasticSearchRequestBuilder select = getSearchRequestBuilder(query); + // SearchResponse response = (SearchResponse) select.get(); + // Aggregations result = response.getAggregations(); + // + // Terms gender = result.get("gender"); + // for(Terms.Bucket genderBucket : gender.getBuckets()) { + // String genderKey = genderBucket.getKey().toString(); + // buckets.put(genderKey, new HashSet()); + // Terms ageBuckets = (Terms) genderBucket.getAggregations().get("age"); + // for(Terms.Bucket ageBucket : ageBuckets.getBuckets()) { + // buckets.get(genderKey).add(Integer.parseInt(ageBucket.getKey().toString())); + // } + // } + // + // Assert.assertEquals(2, buckets.keySet().size()); + // Assert.assertEquals(expectedAges, buckets.get("m")); + // Assert.assertEquals(expectedAges, buckets.get("f")); + // + // Terms state = result.get("state.keyword"); + // for(Terms.Bucket stateBucket : state.getBuckets()) { + // if(stateBucket.getKey().toString().equalsIgnoreCase("ak")) { + // Assert.assertTrue("There are 22 entries for state ak", stateBucket.getDocCount() + // == 22); + // } + // } + // + // Assert.assertEquals(response.getHits().getTotalHits(), 1000); + // Assert.assertEquals(response.getHits().getHits().length, 10); + // } + // + // @Test + // public void testSimpleSubAggregations() throws Exception { + // final String query = String.format("SELECT /*! DOCS_WITH_AGGREGATION(10) */ * FROM %s + // GROUP BY (gender), (state) ", TEST_INDEX_ACCOUNT); + // + // SqlElasticSearchRequestBuilder select = getSearchRequestBuilder(query); + // SearchResponse response = (SearchResponse) select.get(); + // Aggregations result = response.getAggregations(); + // + // Terms gender = result.get("gender"); + // for(Terms.Bucket genderBucket : gender.getBuckets()) { + // String genderKey = genderBucket.getKey().toString(); + // Assert.assertTrue("Gender should be m or f", genderKey.equals("m") || + // genderKey.equals("f")); + // } + // + // Assert.assertEquals(2, gender.getBuckets().size()); + // + // Terms state = result.get("state.keyword"); + // for(Terms.Bucket stateBucket : state.getBuckets()) { + // if(stateBucket.getKey().toString().equalsIgnoreCase("ak")) { + // Assert.assertTrue("There are 22 entries for state ak", stateBucket.getDocCount() + // == 22); + // } + // } + // + // Assert.assertEquals(response.getHits().getTotalHits(), 1000); + // Assert.assertEquals(response.getHits().getHits().length, 10); + // } + // + // @Test + // public void geoHashGrid() throws SQLFeatureNotSupportedException, SqlParseException { + // Aggregations result = query(String.format("SELECT COUNT(*) FROM %s/location GROUP BY + // geohash_grid(field='center',precision=5) ", TEST_INDEX_LOCATION)); + // InternalGeoHashGrid grid = result.get("geohash_grid(field=center,precision=5)"); + // Collection buckets = + // grid.getBuckets(); + // for (InternalMultiBucketAggregation.InternalBucket bucket : buckets) { + // Assert.assertTrue(bucket.getKeyAsString().equals("w2fsm") || + // bucket.getKeyAsString().equals("w0p6y") ); + // Assert.assertEquals(1,bucket.getDocCount()); + // } + // } + // + // @Test + // public void geoBounds() throws SQLFeatureNotSupportedException, SqlParseException { + // Aggregations result = query(String.format("SELECT * FROM %s/location GROUP BY + // geo_bounds(field='center',alias='bounds') ", TEST_INDEX_LOCATION)); + // InternalGeoBounds bounds = result.get("bounds"); + // Assert.assertEquals(0.5,bounds.bottomRight().getLat(),0.001); + // Assert.assertEquals(105.0,bounds.bottomRight().getLon(),0.001); + // Assert.assertEquals(5.0,bounds.topLeft().getLat(),0.001); + // Assert.assertEquals(100.5,bounds.topLeft().getLon(),0.001); + // } + // + // @Test + // public void groupByOnNestedFieldTest() throws Exception { + // Aggregations result = query(String.format("SELECT COUNT(*) FROM %s GROUP BY + // nested(message.info)", TEST_INDEX_NESTED_TYPE)); + // InternalNested nested = result.get("message.info@NESTED"); + // Terms infos = nested.getAggregations().get("message.info"); + // Assert.assertEquals(3,infos.getBuckets().size()); + // for(Terms.Bucket bucket : infos.getBuckets()) { + // String key = bucket.getKey().toString(); + // long count = ((ValueCount) bucket.getAggregations().get("COUNT(*)")).getValue(); + // if(key.equalsIgnoreCase("a")) { + // Assert.assertEquals(2, count); + // } + // else if(key.equalsIgnoreCase("c")) { + // Assert.assertEquals(2, count); + // } + // else if(key.equalsIgnoreCase("b")) { + // Assert.assertEquals(1, count); + // } + // else { + // throw new Exception(String.format("Unexpected key. expected: a OR b OR c . + // found: %s", key)); + // } + // } + // } + // + // @Test + // public void groupByTestWithFilter() throws Exception { + // Aggregations result = query(String.format("SELECT COUNT(*) FROM %s GROUP BY + // filter(gender='m'),gender", TEST_INDEX_ACCOUNT)); + // InternalFilter filter = result.get("filter(gender = 'm')@FILTER"); + // Terms gender = filter.getAggregations().get("gender"); + // + // for(Terms.Bucket bucket : gender.getBuckets()) { + // String key = bucket.getKey().toString(); + // long count = ((ValueCount) bucket.getAggregations().get("COUNT(*)")).getValue(); + // if(key.equalsIgnoreCase("m")) { + // Assert.assertEquals(507, count); + // } + // else { + // throw new Exception(String.format("Unexpected key. expected: only m. found: %s", + // key)); + // } + // } + // } + // + // + // endregion not migrated @Test public void groupByOnNestedFieldWithFilterTest() throws Exception { - String query = String.format("SELECT COUNT(*) FROM %s GROUP BY nested(message.info)," + - "filter('myFilter',message.info = 'a')", TEST_INDEX_NESTED_TYPE); + String query = + String.format( + "SELECT COUNT(*) FROM %s GROUP BY nested(message.info)," + + "filter('myFilter',message.info = 'a')", + TEST_INDEX_NESTED_TYPE); JSONObject result = executeQuery(query); JSONObject aggregation = getAggregation(result, "message.info@NESTED"); @@ -1026,29 +1170,36 @@ public void groupByOnNestedFieldWithFilterTest() throws Exception { @Test public void minOnNestedField() throws Exception { - String query = String.format("SELECT min(nested(message.dayOfWeek)) as minDays FROM %s", - TEST_INDEX_NESTED_TYPE); + String query = + String.format( + "SELECT min(nested(message.dayOfWeek)) as minDays FROM %s", TEST_INDEX_NESTED_TYPE); JSONObject result = executeQuery(query); JSONObject aggregation = getAggregation(result, "message.dayOfWeek@NESTED"); - Assert.assertEquals(1.0, ((BigDecimal) aggregation.query("/minDays/value")).doubleValue(), 0.0001); + Assert.assertEquals( + 1.0, ((BigDecimal) aggregation.query("/minDays/value")).doubleValue(), 0.0001); } @Test public void sumOnNestedField() throws Exception { - String query = String.format("SELECT sum(nested(message.dayOfWeek)) as sumDays FROM %s", - TEST_INDEX_NESTED_TYPE); + String query = + String.format( + "SELECT sum(nested(message.dayOfWeek)) as sumDays FROM %s", TEST_INDEX_NESTED_TYPE); JSONObject result = executeQuery(query); JSONObject aggregation = getAggregation(result, "message.dayOfWeek@NESTED"); - Assert.assertEquals(19.0, ((BigDecimal) aggregation.query("/sumDays/value")).doubleValue(), 0.0001); + Assert.assertEquals( + 19.0, ((BigDecimal) aggregation.query("/sumDays/value")).doubleValue(), 0.0001); } @Test public void histogramOnNestedField() throws Exception { - String query = String.format("select count(*) from %s group by histogram" + - "('field'='message.dayOfWeek','nested'='message','interval'='2' , 'alias' = 'someAlias' )", - TEST_INDEX_NESTED_TYPE); + String query = + String.format( + "select count(*) from %s group by" + + " histogram('field'='message.dayOfWeek','nested'='message','interval'='2' ," + + " 'alias' = 'someAlias' )", + TEST_INDEX_NESTED_TYPE); JSONObject result = executeQuery(query); JSONObject aggregation = getAggregation(result, "message@NESTED"); @@ -1061,22 +1212,26 @@ public void histogramOnNestedField() throws Exception { JSONArray buckets = (JSONArray) aggregation.query("/someAlias/buckets"); Assert.assertThat(buckets.length(), equalTo(4)); - buckets.forEach(obj -> { - JSONObject bucket = (JSONObject) obj; - final double key = bucket.getDouble("key"); - Assert.assertTrue(expectedCountsByKey.containsKey(key)); - Assert.assertThat(bucket.getJSONObject("COUNT(*)").getInt("value"), - equalTo(expectedCountsByKey.get(key))); - }); + buckets.forEach( + obj -> { + JSONObject bucket = (JSONObject) obj; + final double key = bucket.getDouble("key"); + Assert.assertTrue(expectedCountsByKey.containsKey(key)); + Assert.assertThat( + bucket.getJSONObject("COUNT(*)").getInt("value"), + equalTo(expectedCountsByKey.get(key))); + }); } @Test public void reverseToRootGroupByOnNestedFieldWithFilterTestWithReverseNestedAndEmptyPath() throws Exception { - String query = String.format("SELECT COUNT(*) FROM %s GROUP BY nested(message.info)," + - "filter('myFilter',message.info = 'a'),reverse_nested(someField,'')", - TEST_INDEX_NESTED_TYPE); + String query = + String.format( + "SELECT COUNT(*) FROM %s GROUP BY nested(message.info)," + + "filter('myFilter',message.info = 'a'),reverse_nested(someField,'')", + TEST_INDEX_NESTED_TYPE); JSONObject result = executeQuery(query); JSONObject aggregation = getAggregation(result, "message.info@NESTED"); @@ -1097,8 +1252,11 @@ public void reverseToRootGroupByOnNestedFieldWithFilterTestWithReverseNestedAndE public void reverseToRootGroupByOnNestedFieldWithFilterTestWithReverseNestedNoPath() throws Exception { - String query = String.format("SELECT COUNT(*) FROM %s GROUP BY nested(message.info),filter" + - "('myFilter',message.info = 'a'),reverse_nested(someField)", TEST_INDEX_NESTED_TYPE); + String query = + String.format( + "SELECT COUNT(*) FROM %s GROUP BY nested(message.info),filter" + + "('myFilter',message.info = 'a'),reverse_nested(someField)", + TEST_INDEX_NESTED_TYPE); JSONObject result = executeQuery(query); JSONObject aggregation = getAggregation(result, "message.info@NESTED"); @@ -1119,9 +1277,12 @@ public void reverseToRootGroupByOnNestedFieldWithFilterTestWithReverseNestedNoPa public void reverseToRootGroupByOnNestedFieldWithFilterTestWithReverseNestedOnHistogram() throws Exception { - String query = String.format("SELECT COUNT(*) FROM %s GROUP BY nested(message.info)," + - "filter('myFilter',message.info = 'a'),histogram('field'='myNum','reverse_nested'='','interval'='2', " + - "'alias' = 'someAlias' )", TEST_INDEX_NESTED_TYPE); + String query = + String.format( + "SELECT COUNT(*) FROM %s GROUP BY nested(message.info),filter('myFilter',message.info" + + " = 'a'),histogram('field'='myNum','reverse_nested'='','interval'='2', 'alias' =" + + " 'someAlias' )", + TEST_INDEX_NESTED_TYPE); JSONObject result = executeQuery(query); JSONObject aggregation = getAggregation(result, "message.info@NESTED"); @@ -1140,21 +1301,26 @@ public void reverseToRootGroupByOnNestedFieldWithFilterTestWithReverseNestedOnHi expectedCountsByKey.put(2.0, 0); expectedCountsByKey.put(4.0, 1); - someAliasBuckets.forEach(obj -> { - JSONObject bucket = (JSONObject) obj; - final double key = bucket.getDouble("key"); - Assert.assertTrue(expectedCountsByKey.containsKey(key)); - Assert.assertThat(bucket.getJSONObject("COUNT(*)").getInt("value"), - equalTo(expectedCountsByKey.get(key))); - }); + someAliasBuckets.forEach( + obj -> { + JSONObject bucket = (JSONObject) obj; + final double key = bucket.getDouble("key"); + Assert.assertTrue(expectedCountsByKey.containsKey(key)); + Assert.assertThat( + bucket.getJSONObject("COUNT(*)").getInt("value"), + equalTo(expectedCountsByKey.get(key))); + }); } @Test public void reverseToRootGroupByOnNestedFieldWithFilterAndSumOnReverseNestedField() throws Exception { - String query = String.format("SELECT sum(reverse_nested(myNum)) bla FROM %s GROUP BY " + - "nested(message.info),filter('myFilter',message.info = 'a')", TEST_INDEX_NESTED_TYPE); + String query = + String.format( + "SELECT sum(reverse_nested(myNum)) bla FROM %s GROUP BY " + + "nested(message.info),filter('myFilter',message.info = 'a')", + TEST_INDEX_NESTED_TYPE); JSONObject result = executeQuery(query); JSONObject aggregation = getAggregation(result, "message.info@NESTED"); @@ -1172,9 +1338,11 @@ public void reverseToRootGroupByOnNestedFieldWithFilterAndSumOnReverseNestedFiel public void reverseAnotherNestedGroupByOnNestedFieldWithFilterTestWithReverseNestedNoPath() throws Exception { - String query = String.format("SELECT COUNT(*) FROM %s GROUP BY nested(message.info)," + - "filter('myFilter',message.info = 'a'),reverse_nested(comment.data,'~comment')", - TEST_INDEX_NESTED_TYPE); + String query = + String.format( + "SELECT COUNT(*) FROM %s GROUP BY nested(message.info)," + + "filter('myFilter',message.info = 'a'),reverse_nested(comment.data,'~comment')", + TEST_INDEX_NESTED_TYPE); JSONObject result = executeQuery(query); JSONObject aggregation = getAggregation(result, "message.info@NESTED"); @@ -1184,8 +1352,9 @@ public void reverseAnotherNestedGroupByOnNestedFieldWithFilterTestWithReverseNes Assert.assertThat(msgInfoBuckets.length(), equalTo(1)); JSONArray commentDataBuckets = - (JSONArray) msgInfoBuckets.optQuery("/0/comment.data@NESTED_REVERSED" + - "/comment.data@NESTED/comment.data/buckets"); + (JSONArray) + msgInfoBuckets.optQuery( + "/0/comment.data@NESTED_REVERSED" + "/comment.data@NESTED/comment.data/buckets"); Assert.assertNotNull(commentDataBuckets); Assert.assertThat(commentDataBuckets.length(), equalTo(1)); Assert.assertThat(commentDataBuckets.query("/0/key"), equalTo("ab")); @@ -1196,9 +1365,12 @@ public void reverseAnotherNestedGroupByOnNestedFieldWithFilterTestWithReverseNes public void reverseAnotherNestedGroupByOnNestedFieldWithFilterTestWithReverseNestedOnHistogram() throws Exception { - String query = String.format("SELECT COUNT(*) FROM %s GROUP BY nested(message.info),filter" + - "('myFilter',message.info = 'a'),histogram('field'='comment.likes','reverse_nested'='~comment'," + - "'interval'='2' , 'alias' = 'someAlias' )", TEST_INDEX_NESTED_TYPE); + String query = + String.format( + "SELECT COUNT(*) FROM %s GROUP BY nested(message.info),filter('myFilter',message.info" + + " = 'a'),histogram('field'='comment.likes','reverse_nested'='~comment','interval'='2'" + + " , 'alias' = 'someAlias' )", + TEST_INDEX_NESTED_TYPE); JSONObject result = executeQuery(query); JSONObject aggregation = getAggregation(result, "message.info@NESTED"); @@ -1207,8 +1379,10 @@ public void reverseAnotherNestedGroupByOnNestedFieldWithFilterTestWithReverseNes Assert.assertNotNull(msgInfoBuckets); Assert.assertThat(msgInfoBuckets.length(), equalTo(1)); - JSONArray someAliasBuckets = (JSONArray) msgInfoBuckets.optQuery( - "/0/~comment@NESTED_REVERSED/~comment@NESTED/someAlias/buckets"); + JSONArray someAliasBuckets = + (JSONArray) + msgInfoBuckets.optQuery( + "/0/~comment@NESTED_REVERSED/~comment@NESTED/someAlias/buckets"); Assert.assertNotNull(msgInfoBuckets); Assert.assertThat(someAliasBuckets.length(), equalTo(2)); @@ -1216,13 +1390,15 @@ public void reverseAnotherNestedGroupByOnNestedFieldWithFilterTestWithReverseNes expectedCountsByKey.put(0.0, 1); expectedCountsByKey.put(2.0, 1); - someAliasBuckets.forEach(obj -> { - JSONObject bucket = (JSONObject) obj; - final double key = bucket.getDouble("key"); - Assert.assertTrue(expectedCountsByKey.containsKey(key)); - Assert.assertThat(bucket.getJSONObject("COUNT(*)").getInt("value"), - equalTo(expectedCountsByKey.get(key))); - }); + someAliasBuckets.forEach( + obj -> { + JSONObject bucket = (JSONObject) obj; + final double key = bucket.getDouble("key"); + Assert.assertTrue(expectedCountsByKey.containsKey(key)); + Assert.assertThat( + bucket.getJSONObject("COUNT(*)").getInt("value"), + equalTo(expectedCountsByKey.get(key))); + }); } @Test @@ -1230,8 +1406,9 @@ public void reverseAnotherNestedGroupByOnNestedFieldWithFilterAndSumOnReverseNes throws Exception { String query = - String.format("SELECT sum(reverse_nested(comment.likes,'~comment')) bla FROM %s " + - "GROUP BY nested(message.info),filter('myFilter',message.info = 'a')", + String.format( + "SELECT sum(reverse_nested(comment.likes,'~comment')) bla FROM %s " + + "GROUP BY nested(message.info),filter('myFilter',message.info = 'a')", TEST_INDEX_NESTED_TYPE); JSONObject result = executeQuery(query); JSONObject aggregation = getAggregation(result, "message.info@NESTED"); @@ -1241,10 +1418,11 @@ public void reverseAnotherNestedGroupByOnNestedFieldWithFilterAndSumOnReverseNes Assert.assertNotNull(msgInfoBuckets); Assert.assertThat(msgInfoBuckets.length(), equalTo(1)); - Assert.assertNotNull(msgInfoBuckets.optQuery( - "/0/comment.likes@NESTED_REVERSED/comment.likes@NESTED/bla/value")); - JSONObject bla = (JSONObject) msgInfoBuckets - .query("/0/comment.likes@NESTED_REVERSED/comment.likes@NESTED/bla"); + Assert.assertNotNull( + msgInfoBuckets.optQuery("/0/comment.likes@NESTED_REVERSED/comment.likes@NESTED/bla/value")); + JSONObject bla = + (JSONObject) + msgInfoBuckets.query("/0/comment.likes@NESTED_REVERSED/comment.likes@NESTED/bla"); Assert.assertEquals(4.0, bla.getDouble("value"), 0.000001); } @@ -1257,8 +1435,9 @@ public void docsReturnedTestWithoutDocsHint() throws Exception { @Test public void docsReturnedTestWithDocsHint() throws Exception { - String query = String.format("SELECT /*! DOCS_WITH_AGGREGATION(10) */ count(*) from %s", - TEST_INDEX_ACCOUNT); + String query = + String.format( + "SELECT /*! DOCS_WITH_AGGREGATION(10) */ count(*) from %s", TEST_INDEX_ACCOUNT); JSONObject result = executeQuery(query); Assert.assertThat(getHits(result).length(), equalTo(10)); } @@ -1267,9 +1446,11 @@ public void docsReturnedTestWithDocsHint() throws Exception { @Test public void termsWithScript() throws Exception { String query = - String.format("select count(*), avg(all_client) from %s group by terms('alias'='asdf'," + - " substring(field, 0, 1)), date_histogram('alias'='time', 'field'='timestamp', " + - "'interval'='20d ', 'format'='yyyy-MM-dd') limit 1000", TEST_INDEX_ONLINE); + String.format( + "select count(*), avg(all_client) from %s group by terms('alias'='asdf'," + + " substring(field, 0, 1)), date_histogram('alias'='time', 'field'='timestamp', " + + "'interval'='20d ', 'format'='yyyy-MM-dd') limit 1000", + TEST_INDEX_ONLINE); String result = explainQuery(query); Assert.assertThat(result, containsString("\"script\":{\"source\"")); @@ -1278,9 +1459,10 @@ public void termsWithScript() throws Exception { @Test public void groupByScriptedDateHistogram() throws Exception { - String query = String - .format("select count(*), avg(all_client) from %s group by date_histogram('alias'='time'," + - " ceil(all_client), 'fixed_interval'='20d ', 'format'='yyyy-MM-dd') limit 1000", + String query = + String.format( + "select count(*), avg(all_client) from %s group by date_histogram('alias'='time'," + + " ceil(all_client), 'fixed_interval'='20d ', 'format'='yyyy-MM-dd') limit 1000", TEST_INDEX_ONLINE); String result = explainQuery(query); @@ -1290,9 +1472,10 @@ public void groupByScriptedDateHistogram() throws Exception { @Test public void groupByScriptedHistogram() throws Exception { - String query = String.format( - "select count(*) from %s group by histogram('alias'='all_field', pow(all_client,1))", - TEST_INDEX_ONLINE); + String query = + String.format( + "select count(*) from %s group by histogram('alias'='all_field', pow(all_client,1))", + TEST_INDEX_ONLINE); String result = explainQuery(query); Assert.assertThat(result, containsString("Math.pow(doc['all_client'].value, 1)")); @@ -1303,18 +1486,17 @@ public void groupByScriptedHistogram() throws Exception { public void distinctWithOneField() { Assert.assertEquals( executeQuery("SELECT DISTINCT name.lastname FROM " + TEST_INDEX_GAME_OF_THRONES, "jdbc"), - executeQuery("SELECT name.lastname FROM " + TEST_INDEX_GAME_OF_THRONES - + " GROUP BY name.lastname", "jdbc") - ); + executeQuery( + "SELECT name.lastname FROM " + TEST_INDEX_GAME_OF_THRONES + " GROUP BY name.lastname", + "jdbc")); } @Test public void distinctWithMultipleFields() { Assert.assertEquals( executeQuery("SELECT DISTINCT age, gender FROM " + TEST_INDEX_ACCOUNT, "jdbc"), - executeQuery("SELECT age, gender FROM " + TEST_INDEX_ACCOUNT - + " GROUP BY age, gender", "jdbc") - ); + executeQuery( + "SELECT age, gender FROM " + TEST_INDEX_ACCOUNT + " GROUP BY age, gender", "jdbc")); } private JSONObject getAggregation(final JSONObject queryResult, final String aggregationName) { @@ -1326,26 +1508,27 @@ private JSONObject getAggregation(final JSONObject queryResult, final String agg return aggregations.getJSONObject(aggregationName); } - private int getIntAggregationValue(final JSONObject queryResult, final String aggregationName, - final String fieldName) { + private int getIntAggregationValue( + final JSONObject queryResult, final String aggregationName, final String fieldName) { final JSONObject targetAggregation = getAggregation(queryResult, aggregationName); Assert.assertTrue(targetAggregation.has(fieldName)); return targetAggregation.getInt(fieldName); } - private double getDoubleAggregationValue(final JSONObject queryResult, - final String aggregationName, - final String fieldName) { + private double getDoubleAggregationValue( + final JSONObject queryResult, final String aggregationName, final String fieldName) { final JSONObject targetAggregation = getAggregation(queryResult, aggregationName); Assert.assertTrue(targetAggregation.has(fieldName)); return targetAggregation.getDouble(fieldName); } - private double getDoubleAggregationValue(final JSONObject queryResult, - final String aggregationName, - final String fieldName, final String subFieldName) { + private double getDoubleAggregationValue( + final JSONObject queryResult, + final String aggregationName, + final String fieldName, + final String subFieldName) { final JSONObject targetAggregation = getAggregation(queryResult, aggregationName); Assert.assertTrue(targetAggregation.has(fieldName)); diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/CsvFormatResponseIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/CsvFormatResponseIT.java index 9952b0c68a..9a416c9683 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/CsvFormatResponseIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/CsvFormatResponseIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.anyOf; @@ -40,9 +39,7 @@ import org.opensearch.client.Response; import org.opensearch.sql.legacy.executor.csv.CSVResult; -/** - * Tests to cover requests with "?format=csv" parameter - */ +/** Tests to cover requests with "?format=csv" parameter */ public class CsvFormatResponseIT extends SQLIntegTestCase { private boolean flatOption = false; @@ -75,16 +72,16 @@ public void allPercentilesByDefault() throws IOException { final String result = executeQueryWithStringOutput(query); final String expectedHeaders = - "PERCENTILES(age).1.0,PERCENTILES(age).5.0,PERCENTILES(age).25.0," + - "PERCENTILES(age).50.0,PERCENTILES(age).75.0,PERCENTILES(age).95.0,PERCENTILES(age).99.0"; + "PERCENTILES(age).1.0,PERCENTILES(age).5.0,PERCENTILES(age).25.0," + + "PERCENTILES(age).50.0,PERCENTILES(age).75.0,PERCENTILES(age).95.0,PERCENTILES(age).99.0"; Assert.assertThat(result, containsString(expectedHeaders)); } @Test public void specificPercentilesIntAndDouble() throws IOException { - final String query = String.format(Locale.ROOT, "SELECT PERCENTILES(age,10,49.0) FROM %s", - TEST_INDEX_ACCOUNT); + final String query = + String.format(Locale.ROOT, "SELECT PERCENTILES(age,10,49.0) FROM %s", TEST_INDEX_ACCOUNT); final String result = executeQueryWithStringOutput(query); final String[] unexpectedPercentiles = {"1.0", "5.0", "25.0", "50.0", "75.0", "95.0", "99.0"}; @@ -92,14 +89,14 @@ public void specificPercentilesIntAndDouble() throws IOException { "\"PERCENTILES(age,10,49.0).10.0\",\"PERCENTILES(age,10,49.0).49.0\""; Assert.assertThat(result, containsString(expectedHeaders)); for (final String unexpectedPercentile : unexpectedPercentiles) { - Assert.assertThat(result, - not(containsString("PERCENTILES(age,10,49.0)." + unexpectedPercentile))); + Assert.assertThat( + result, not(containsString("PERCENTILES(age,10,49.0)." + unexpectedPercentile))); } } public void nestedObjectsAndArraysAreQuoted() throws IOException { - final String query = String.format(Locale.ROOT, "SELECT * FROM %s WHERE _id = 5", - TEST_INDEX_NESTED_TYPE); + final String query = + String.format(Locale.ROOT, "SELECT * FROM %s WHERE _id = 5", TEST_INDEX_NESTED_TYPE); final String result = executeQueryWithStringOutput(query); final String expectedMyNum = "\"[3, 4]\""; @@ -114,8 +111,8 @@ public void nestedObjectsAndArraysAreQuoted() throws IOException { public void arraysAreQuotedInFlatMode() throws IOException { setFlatOption(true); - final String query = String.format(Locale.ROOT, "SELECT * FROM %s WHERE _id = 5", - TEST_INDEX_NESTED_TYPE); + final String query = + String.format(Locale.ROOT, "SELECT * FROM %s WHERE _id = 5", TEST_INDEX_NESTED_TYPE); final String result = executeQueryWithStringOutput(query); final String expectedMyNum = "\"[3, 4]\""; @@ -166,15 +163,19 @@ public void fieldOrderOther() throws IOException { public void fieldOrderWithScriptFields() throws IOException { final String[] expectedFields = {"email", "script1", "script2", "gender", "address"}; - final String query = String.format(Locale.ROOT, "SELECT email, " + - "script(script1, \"doc['balance'].value * 2\"), " + - "script(script2, painless, \"doc['balance'].value + 10\"), gender, address " + - "FROM %s WHERE email='amberduke@pyrami.com'", TEST_INDEX_ACCOUNT); + final String query = + String.format( + Locale.ROOT, + "SELECT email, " + + "script(script1, \"doc['balance'].value * 2\"), " + + "script(script2, painless, \"doc['balance'].value + 10\"), gender, address " + + "FROM %s WHERE email='amberduke@pyrami.com'", + TEST_INDEX_ACCOUNT); verifyFieldOrder(expectedFields, query); } - //region Tests migrated from CSVResultsExtractorTests + // region Tests migrated from CSVResultsExtractorTests @Test public void simpleSearchResultNotNestedNotFlatNoAggs() throws Exception { @@ -195,8 +196,8 @@ public void simpleSearchResultNotNestedNotFlatNoAggs() throws Exception { @Test public void simpleSearchResultWithNestedNotFlatNoAggs() throws Exception { - String query = String.format(Locale.ROOT, "select name,house from %s", - TEST_INDEX_GAME_OF_THRONES); + String query = + String.format(Locale.ROOT, "select name,house from %s", TEST_INDEX_GAME_OF_THRONES); CSVResult csvResult = executeCsvRequest(query, false); List headers = csvResult.getHeaders(); @@ -207,21 +208,42 @@ public void simpleSearchResultWithNestedNotFlatNoAggs() throws Exception { List lines = csvResult.getLines(); Assert.assertEquals(7, lines.size()); - Assert.assertThat(lines, hasRow(null, "Targaryen", - Arrays.asList("firstname=Daenerys", "lastname=Targaryen", "ofHerName=1"), true)); - Assert.assertThat(lines, hasRow(null, "Stark", - Arrays.asList("firstname=Eddard", "lastname=Stark", "ofHisName=1"), true)); - Assert.assertThat(lines, hasRow(null, "Stark", - Arrays.asList("firstname=Brandon", "lastname=Stark", "ofHisName=4"), true)); - Assert.assertThat(lines, hasRow(null, "Lannister", - Arrays.asList("firstname=Jaime", "lastname=Lannister", "ofHisName=1"), true)); + Assert.assertThat( + lines, + hasRow( + null, + "Targaryen", + Arrays.asList("firstname=Daenerys", "lastname=Targaryen", "ofHerName=1"), + true)); + Assert.assertThat( + lines, + hasRow( + null, + "Stark", + Arrays.asList("firstname=Eddard", "lastname=Stark", "ofHisName=1"), + true)); + Assert.assertThat( + lines, + hasRow( + null, + "Stark", + Arrays.asList("firstname=Brandon", "lastname=Stark", "ofHisName=4"), + true)); + Assert.assertThat( + lines, + hasRow( + null, + "Lannister", + Arrays.asList("firstname=Jaime", "lastname=Lannister", "ofHisName=1"), + true)); } @Ignore("headers incorrect in case of nested fields") @Test public void simpleSearchResultWithNestedOneFieldNotFlatNoAggs() throws Exception { - String query = String.format(Locale.ROOT, "select name.firstname,house from %s", - TEST_INDEX_GAME_OF_THRONES); + String query = + String.format( + Locale.ROOT, "select name.firstname,house from %s", TEST_INDEX_GAME_OF_THRONES); CSVResult csvResult = executeCsvRequest(query, false); List headers = csvResult.getHeaders(); @@ -235,14 +257,16 @@ public void simpleSearchResultWithNestedOneFieldNotFlatNoAggs() throws Exception Assert.assertThat(lines, hasItem("{firstname=Eddard},Stark")); Assert.assertThat(lines, hasItem("{firstname=Brandon},Stark")); Assert.assertThat(lines, hasItem("{firstname=Jaime},Lannister")); - } @Ignore("headers incorrect in case of nested fields") @Test public void simpleSearchResultWithNestedTwoFieldsFromSameNestedNotFlatNoAggs() throws Exception { - String query = String.format(Locale.ROOT, "select name.firstname,name.lastname,house from %s", - TEST_INDEX_GAME_OF_THRONES); + String query = + String.format( + Locale.ROOT, + "select name.firstname,name.lastname,house from %s", + TEST_INDEX_GAME_OF_THRONES); CSVResult csvResult = executeCsvRequest(query, false); List headers = csvResult.getHeaders(); @@ -253,20 +277,23 @@ public void simpleSearchResultWithNestedTwoFieldsFromSameNestedNotFlatNoAggs() t List lines = csvResult.getLines(); Assert.assertEquals(7, lines.size()); - Assert.assertThat(lines, hasRow(null, "Targaryen", - Arrays.asList("firstname=Daenerys", "lastname=Targaryen"), true)); - Assert.assertThat(lines, hasRow(null, "Stark", - Arrays.asList("firstname=Eddard", "lastname=Stark"), true)); - Assert.assertThat(lines, hasRow(null, "Stark", - Arrays.asList("firstname=Brandon", "lastname=Stark"), true)); - Assert.assertThat(lines, hasRow(null, "Lannister", - Arrays.asList("firstname=Jaime", "lastname=Lannister"), true)); + Assert.assertThat( + lines, + hasRow(null, "Targaryen", Arrays.asList("firstname=Daenerys", "lastname=Targaryen"), true)); + Assert.assertThat( + lines, hasRow(null, "Stark", Arrays.asList("firstname=Eddard", "lastname=Stark"), true)); + Assert.assertThat( + lines, hasRow(null, "Stark", Arrays.asList("firstname=Brandon", "lastname=Stark"), true)); + Assert.assertThat( + lines, + hasRow(null, "Lannister", Arrays.asList("firstname=Jaime", "lastname=Lannister"), true)); } @Test public void simpleSearchResultWithNestedWithFlatNoAggs() throws Exception { - String query = String.format(Locale.ROOT, "select name.firstname,house from %s", - TEST_INDEX_GAME_OF_THRONES); + String query = + String.format( + Locale.ROOT, "select name.firstname,house from %s", TEST_INDEX_GAME_OF_THRONES); CSVResult csvResult = executeCsvRequest(query, true); List headers = csvResult.getHeaders(); @@ -284,9 +311,12 @@ public void simpleSearchResultWithNestedWithFlatNoAggs() throws Exception { @Test public void joinSearchResultNotNestedNotFlatNoAggs() throws Exception { - String query = String.format(Locale.ROOT, "select c.gender , h.hname,h.words from %s c " + - "JOIN %s h " + - "on h.hname = c.house ", TEST_INDEX_GAME_OF_THRONES, TEST_INDEX_GAME_OF_THRONES); + String query = + String.format( + Locale.ROOT, + "select c.gender , h.hname,h.words from %s c " + "JOIN %s h " + "on h.hname = c.house ", + TEST_INDEX_GAME_OF_THRONES, + TEST_INDEX_GAME_OF_THRONES); CSVResult csvResult = executeCsvRequest(query, false); List headers = csvResult.getHeaders(); @@ -298,8 +328,8 @@ public void joinSearchResultNotNestedNotFlatNoAggs() throws Exception { List lines = csvResult.getLines(); Assert.assertEquals(4, lines.size()); - Assert.assertThat(lines, - hasRow(null, null, Arrays.asList("F", "fireAndBlood", "Targaryen"), false)); + Assert.assertThat( + lines, hasRow(null, null, Arrays.asList("F", "fireAndBlood", "Targaryen"), false)); } @Test @@ -311,7 +341,6 @@ public void simpleNumericValueAgg() throws Exception { Assert.assertEquals(1, headers.size()); Assert.assertEquals("count(*)", headers.get(0)); - List lines = csvResult.getLines(); Assert.assertEquals(1, lines.size()); Assert.assertEquals("2", lines.get(0)); @@ -327,18 +356,16 @@ public void simpleNumericValueAggWithAlias() throws Exception { Assert.assertEquals(1, headers.size()); Assert.assertEquals("myAlias", headers.get(0)); - List lines = csvResult.getLines(); Assert.assertEquals(1, lines.size()); Assert.assertEquals("3.0", lines.get(0)); - } @Ignore("only work for legacy engine") public void twoNumericAggWithAlias() throws Exception { String query = - String.format(Locale.ROOT, "select count(*) as count, avg(age) as myAlias from %s ", - TEST_INDEX_DOG); + String.format( + Locale.ROOT, "select count(*) as count, avg(age) as myAlias from %s ", TEST_INDEX_DOG); CSVResult csvResult = executeCsvRequest(query, false); List headers = csvResult.getHeaders(); @@ -347,7 +374,6 @@ public void twoNumericAggWithAlias() throws Exception { Assert.assertTrue(headers.contains("count")); Assert.assertTrue(headers.contains("myAlias")); - List lines = csvResult.getLines(); Assert.assertEquals(1, lines.size()); Assert.assertEquals("2,3.0", lines.get(0)); @@ -355,8 +381,8 @@ public void twoNumericAggWithAlias() throws Exception { @Test public void aggAfterTermsGroupBy() throws Exception { - String query = String.format(Locale.ROOT, "SELECT COUNT(*) FROM %s GROUP BY gender", - TEST_INDEX_ACCOUNT); + String query = + String.format(Locale.ROOT, "SELECT COUNT(*) FROM %s GROUP BY gender", TEST_INDEX_ACCOUNT); CSVResult csvResult = executeCsvRequest(query, false); List headers = csvResult.getHeaders(); Assert.assertEquals(1, headers.size()); @@ -369,9 +395,11 @@ public void aggAfterTermsGroupBy() throws Exception { @Test public void aggAfterTwoTermsGroupBy() throws Exception { - String query = String.format(Locale.ROOT, - "SELECT COUNT(*) FROM %s where age in (35,36) GROUP BY gender,age", - TEST_INDEX_ACCOUNT); + String query = + String.format( + Locale.ROOT, + "SELECT COUNT(*) FROM %s where age in (35,36) GROUP BY gender,age", + TEST_INDEX_ACCOUNT); CSVResult csvResult = executeCsvRequest(query, false); List headers = csvResult.getHeaders(); Assert.assertEquals(1, headers.size()); @@ -379,18 +407,17 @@ public void aggAfterTwoTermsGroupBy() throws Exception { List lines = csvResult.getLines(); Assert.assertEquals(4, lines.size()); - assertThat(lines, containsInAnyOrder( - equalTo("31"), - equalTo("28"), - equalTo("21"), - equalTo("24"))); + assertThat( + lines, containsInAnyOrder(equalTo("31"), equalTo("28"), equalTo("21"), equalTo("24"))); } @Test public void multipleAggAfterTwoTermsGroupBy() throws Exception { - String query = String.format(Locale.ROOT, - "SELECT COUNT(*) , sum(balance) FROM %s where age in (35,36) GROUP BY gender,age", - TEST_INDEX_ACCOUNT); + String query = + String.format( + Locale.ROOT, + "SELECT COUNT(*) , sum(balance) FROM %s where age in (35,36) GROUP BY gender,age", + TEST_INDEX_ACCOUNT); CSVResult csvResult = executeCsvRequest(query, false); List headers = csvResult.getHeaders(); Assert.assertEquals(2, headers.size()); @@ -398,18 +425,23 @@ public void multipleAggAfterTwoTermsGroupBy() throws Exception { List lines = csvResult.getLines(); Assert.assertEquals(4, lines.size()); - assertThat(lines, containsInAnyOrder( - equalTo("31,647425"), - equalTo("28,678337"), - equalTo("21,505660"), - equalTo("24,472771"))); + assertThat( + lines, + containsInAnyOrder( + equalTo("31,647425"), + equalTo("28,678337"), + equalTo("21,505660"), + equalTo("24,472771"))); } @Test public void dateHistogramTest() throws Exception { - String query = String.format(Locale.ROOT, "select count(*) from %s" + - " group by date_histogram('field'='insert_time','fixed_interval'='4d','alias'='days')", - TEST_INDEX_ONLINE); + String query = + String.format( + Locale.ROOT, + "select count(*) from %s group by" + + " date_histogram('field'='insert_time','fixed_interval'='4d','alias'='days')", + TEST_INDEX_ONLINE); CSVResult csvResult = executeCsvRequest(query, false); List headers = csvResult.getHeaders(); Assert.assertEquals(1, headers.size()); @@ -417,10 +449,7 @@ public void dateHistogramTest() throws Exception { List lines = csvResult.getLines(); Assert.assertEquals(3, lines.size()); - assertThat(lines, containsInAnyOrder( - equalTo("477.0"), - equalTo("5664.0"), - equalTo("3795.0"))); + assertThat(lines, containsInAnyOrder(equalTo("477.0"), equalTo("5664.0"), equalTo("3795.0"))); } @Test @@ -447,10 +476,16 @@ public void extendedStatsAggregationTest() throws Exception { CSVResult csvResult = executeCsvRequest(query, false); List headers = csvResult.getHeaders(); - final String[] expectedHeaders = {"EXTENDED_STATS(age).count", "EXTENDED_STATS(age).sum", - "EXTENDED_STATS(age).avg", "EXTENDED_STATS(age).min", "EXTENDED_STATS(age).max", - "EXTENDED_STATS(age).sumOfSquares", "EXTENDED_STATS(age).variance", - "EXTENDED_STATS(age).stdDeviation"}; + final String[] expectedHeaders = { + "EXTENDED_STATS(age).count", + "EXTENDED_STATS(age).sum", + "EXTENDED_STATS(age).avg", + "EXTENDED_STATS(age).min", + "EXTENDED_STATS(age).max", + "EXTENDED_STATS(age).sumOfSquares", + "EXTENDED_STATS(age).variance", + "EXTENDED_STATS(age).stdDeviation" + }; Assert.assertEquals(expectedHeaders.length, headers.size()); Assert.assertThat(headers, contains(expectedHeaders)); @@ -466,7 +501,9 @@ public void extendedStatsAggregationTest() throws Exception { @Test public void percentileAggregationTest() throws Exception { String query = - String.format(Locale.ROOT, "select percentiles(age) as per from %s where age > 31", + String.format( + Locale.ROOT, + "select percentiles(age) as per from %s where age > 31", TEST_INDEX_ACCOUNT); CSVResult csvResult = executeCsvRequest(query, false); List headers = csvResult.getHeaders(); @@ -479,7 +516,6 @@ public void percentileAggregationTest() throws Exception { Assert.assertEquals("per.95.0", headers.get(5)); Assert.assertEquals("per.99.0", headers.get(6)); - List lines = csvResult.getLines(); Assert.assertEquals(1, lines.size()); @@ -516,9 +552,11 @@ private void assertEquals(String expected, String actual, Double delta) { @Test public void includeScore() throws Exception { - String query = String.format(Locale.ROOT, - "select age, firstname, _score from %s where age > 31 order by _score desc limit 2 ", - TEST_INDEX_ACCOUNT); + String query = + String.format( + Locale.ROOT, + "select age, firstname, _score from %s where age > 31 order by _score desc limit 2 ", + TEST_INDEX_ACCOUNT); CSVResult csvResult = executeCsvRequest(query, false, true, false); List headers = csvResult.getHeaders(); Assert.assertEquals(3, headers.size()); @@ -538,9 +576,11 @@ public void includeScore() throws Exception { @Test public void scriptedField() throws Exception { - String query = String.format(Locale.ROOT, - "select age+1 as agePlusOne ,age , firstname from %s where age = 31 limit 1", - TEST_INDEX_ACCOUNT); + String query = + String.format( + Locale.ROOT, + "select age+1 as agePlusOne ,age , firstname from %s where age = 31 limit 1", + TEST_INDEX_ACCOUNT); CSVResult csvResult = executeCsvRequest(query, false); List headers = csvResult.getHeaders(); Assert.assertEquals(3, headers.size()); @@ -548,8 +588,11 @@ public void scriptedField() throws Exception { Assert.assertTrue(headers.contains("age")); Assert.assertTrue(headers.contains("firstname")); List lines = csvResult.getLines(); - Assert.assertTrue(lines.get(0).contains("32,31") || lines.get(0).contains("32.0,31.0") || - lines.get(0).contains("31,32") || lines.get(0).contains("31.0,32.0")); + Assert.assertTrue( + lines.get(0).contains("32,31") + || lines.get(0).contains("32.0,31.0") + || lines.get(0).contains("31,32") + || lines.get(0).contains("31.0,32.0")); } @Ignore("separator not exposed") @@ -568,13 +611,15 @@ public void twoCharsSeperator() throws Exception { Assert.assertEquals(2, lines.size()); Assert.assertTrue("rex||2".equals(lines.get(0)) || "2||rex".equals(lines.get(0))); Assert.assertTrue("snoopy||4".equals(lines.get(1)) || "4||snoopy".equals(lines.get(1))); - } @Ignore("tested in @see: org.opensearch.sql.sql.IdentifierIT.testMetafieldIdentifierTest") public void includeIdAndNotTypeOrScore() throws Exception { - String query = String.format(Locale.ROOT, - "select age, firstname, _id from %s where lastname = 'Marquez' ", TEST_INDEX_ACCOUNT); + String query = + String.format( + Locale.ROOT, + "select age, firstname, _id from %s where lastname = 'Marquez' ", + TEST_INDEX_ACCOUNT); CSVResult csvResult = executeCsvRequest(query, false, false, true); List headers = csvResult.getHeaders(); Assert.assertEquals(3, headers.size()); @@ -584,15 +629,16 @@ public void includeIdAndNotTypeOrScore() throws Exception { List lines = csvResult.getLines(); Assert.assertTrue(lines.get(0).contains(",437") || lines.get(0).contains("437,")); } - //endregion Tests migrated from CSVResultsExtractorTests + + // endregion Tests migrated from CSVResultsExtractorTests @Ignore("only work for legacy engine") public void sensitiveCharacterSanitizeTest() throws IOException { String requestBody = - "{" + - " \"=cmd|' /C notepad'!_xlbgnm.A1\": \"+cmd|' /C notepad'!_xlbgnm.A1\",\n" + - " \"-cmd|' /C notepad'!_xlbgnm.A1\": \"@cmd|' /C notepad'!_xlbgnm.A1\"\n" + - "}"; + "{" + + " \"=cmd|' /C notepad'!_xlbgnm.A1\": \"+cmd|' /C notepad'!_xlbgnm.A1\",\n" + + " \"-cmd|' /C notepad'!_xlbgnm.A1\": \"@cmd|' /C notepad'!_xlbgnm.A1\"\n" + + "}"; Request request = new Request("PUT", "/userdata/_doc/1?refresh=true"); request.setJsonEntity(requestBody); @@ -613,11 +659,11 @@ public void sensitiveCharacterSanitizeTest() throws IOException { @Ignore("only work for legacy engine") public void sensitiveCharacterSanitizeAndQuotedTest() throws IOException { String requestBody = - "{" + - " \"=cmd|' /C notepad'!_xlbgnm.A1,,\": \",+cmd|' /C notepad'!_xlbgnm.A1\",\n" + - " \",@cmd|' /C notepad'!_xlbgnm.A1\": \"+cmd|' /C notepad,,'!_xlbgnm.A1\",\n" + - " \"-cmd|' /C notepad,,'!_xlbgnm.A1\": \",,,@cmd|' /C notepad'!_xlbgnm.A1\"\n" + - "}"; + "{" + + " \"=cmd|' /C notepad'!_xlbgnm.A1,,\": \",+cmd|' /C notepad'!_xlbgnm.A1\",\n" + + " \",@cmd|' /C notepad'!_xlbgnm.A1\": \"+cmd|' /C notepad,,'!_xlbgnm.A1\",\n" + + " \"-cmd|' /C notepad,,'!_xlbgnm.A1\": \",,,@cmd|' /C notepad'!_xlbgnm.A1\"\n" + + "}"; Request request = new Request("PUT", "/userdata2/_doc/1?refresh=true"); request.setJsonEntity(requestBody); @@ -638,8 +684,11 @@ public void sensitiveCharacterSanitizeAndQuotedTest() throws IOException { @Test public void sanitizeTest() throws IOException { - CSVResult csvResult = executeCsvRequest( - String.format(Locale.ROOT, "SELECT firstname, lastname FROM %s", TEST_INDEX_BANK_CSV_SANITIZE), false); + CSVResult csvResult = + executeCsvRequest( + String.format( + Locale.ROOT, "SELECT firstname, lastname FROM %s", TEST_INDEX_BANK_CSV_SANITIZE), + false); List lines = csvResult.getLines(); assertEquals(5, lines.size()); assertEquals(lines.get(0), "'+Amber JOHnny,Duke Willmington+"); @@ -660,8 +709,12 @@ public void selectFunctionAsFieldTest() throws IOException { private void verifyFieldOrder(final String[] expectedFields) throws IOException { final String fields = String.join(", ", expectedFields); - final String query = String.format(Locale.ROOT, "SELECT %s FROM %s " + - "WHERE email='amberduke@pyrami.com'", fields, TEST_INDEX_ACCOUNT); + final String query = + String.format( + Locale.ROOT, + "SELECT %s FROM %s " + "WHERE email='amberduke@pyrami.com'", + fields, + TEST_INDEX_ACCOUNT); verifyFieldOrder(expectedFields, query); } @@ -685,13 +738,18 @@ private CSVResult executeCsvRequest(final String query, boolean flat) throws IOE return executeCsvRequest(query, flat, false, false); } - private CSVResult executeCsvRequest(final String query, boolean flat, boolean includeScore, - boolean includeId) throws IOException { + private CSVResult executeCsvRequest( + final String query, boolean flat, boolean includeScore, boolean includeId) + throws IOException { final String requestBody = super.makeRequest(query); - final String endpoint = String.format(Locale.ROOT, - "/_plugins/_sql?format=csv&flat=%b&_id=%b&_score=%b", - flat, includeId, includeScore); + final String endpoint = + String.format( + Locale.ROOT, + "/_plugins/_sql?format=csv&flat=%b&_id=%b&_score=%b", + flat, + includeId, + includeScore); final Request sqlRequest = new Request("POST", endpoint); sqlRequest.setJsonEntity(requestBody); RequestOptions.Builder restOptionsBuilder = RequestOptions.DEFAULT.toBuilder(); @@ -729,22 +787,32 @@ private CSVResult csvResultFromStringResponse(final String response) { return new CSVResult(headers, rows); } - private static AnyOf> hasRow(final String prefix, final String suffix, - final List items, - final boolean areItemsNested) { + private static AnyOf> hasRow( + final String prefix, + final String suffix, + final List items, + final boolean areItemsNested) { final Collection> permutations = TestUtils.getPermutations(items); - final List>> matchers = permutations.stream().map(permutation -> { - - final String delimiter = areItemsNested ? ", " : ","; - final String objectField = String.join(delimiter, permutation); - final String row = String.format(Locale.ROOT, "%s%s%s%s%s", - printablePrefix(prefix), areItemsNested ? "\"{" : "", - objectField, areItemsNested ? "}\"" : "", printableSuffix(suffix)); - return hasItem(row); - - }).collect(Collectors.toCollection(LinkedList::new)); + final List>> matchers = + permutations.stream() + .map( + permutation -> { + final String delimiter = areItemsNested ? ", " : ","; + final String objectField = String.join(delimiter, permutation); + final String row = + String.format( + Locale.ROOT, + "%s%s%s%s%s", + printablePrefix(prefix), + areItemsNested ? "\"{" : "", + objectField, + areItemsNested ? "}\"" : "", + printableSuffix(suffix)); + return hasItem(row); + }) + .collect(Collectors.toCollection(LinkedList::new)); return anyOf(matchers); } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/CursorIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/CursorIT.java index b246bb6224..abd2bbbcc2 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/CursorIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/CursorIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.containsString; @@ -43,9 +42,8 @@ protected void init() throws Exception { } /** - * Acceptable fetch_size are positive numbers. - * For example 0, 24, 53.0, "110" (parsable string) , "786.23" - * Negative values should throw 400 + * Acceptable fetch_size are positive numbers. For example 0, 24, 53.0, "110" (parsable string) , + * "786.23". Negative values should throw 400. */ @Test public void invalidNegativeFetchSize() throws IOException { @@ -65,9 +63,7 @@ public void invalidNegativeFetchSize() throws IOException { assertThat(resp.query("/error/type"), equalTo("IllegalArgumentException")); } - /** - * Non-numeric fetch_size value should throw 400 - */ + /** Non-numeric fetch_size value should throw 400 */ @Test public void invalidNonNumericFetchSize() throws IOException { String query = @@ -105,19 +101,22 @@ public void testExceptionOnCursorExplain() throws IOException { } /** - * For fetch_size = 0, default to non-pagination behaviour for simple queries - * This can be verified by checking that cursor is not present, and old default limit applies + * For fetch_size = 0, default to non-pagination behaviour for simple queries This can be verified + * by checking that cursor is not present, and old default limit applies */ @Test public void noPaginationWhenFetchSizeZero() throws IOException { String selectQuery = StringUtils.format("SELECT firstname, state FROM %s", TEST_INDEX_ACCOUNT); JSONObject response = new JSONObject(executeFetchQuery(selectQuery, 0, JDBC)); assertFalse(response.has(CURSOR)); - assertThat(response.getJSONArray(DATAROWS).length(), equalTo(1000)); // Default limit is 1000 in new engine + assertThat( + response.getJSONArray(DATAROWS).length(), + equalTo(1000)); // Default limit is 1000 in new engine } /** - * The index has 1000 records, with fetch size of 50 we should get 20 pages with no cursor on last page + * The index has 1000 records, with fetch size of 50 we should get 20 pages with no cursor on last + * page */ @Test public void validNumberOfPages() throws IOException { @@ -128,7 +127,7 @@ public void validNumberOfPages() throws IOException { int pageCount = 1; - while (!cursor.isEmpty()) { //this condition also checks that there is no cursor on last page + while (!cursor.isEmpty()) { // this condition also checks that there is no cursor on last page response = executeCursorQuery(cursor); cursor = response.optString(CURSOR); if (!cursor.isEmpty()) { @@ -162,7 +161,6 @@ public void validNumberOfPages() throws IOException { assertThat(pageCount, equalTo(36)); } - @Test public void validTotalResultWithAndWithoutPagination() throws IOException { // simple query - accounts index has 1000 docs, using higher limit to get all docs @@ -172,72 +170,78 @@ public void validTotalResultWithAndWithoutPagination() throws IOException { @Test public void validTotalResultWithAndWithoutPaginationWhereClause() throws IOException { - String selectQuery = StringUtils.format( - "SELECT firstname, state FROM %s WHERE balance < 25000 AND age > 32", TEST_INDEX_ACCOUNT - ); + String selectQuery = + StringUtils.format( + "SELECT firstname, state FROM %s WHERE balance < 25000 AND age > 32", + TEST_INDEX_ACCOUNT); verifyWithAndWithoutPaginationResponse(selectQuery + " LIMIT 2000", selectQuery, 17, false); } @Test public void validTotalResultWithAndWithoutPaginationOrderBy() throws IOException { - String selectQuery = StringUtils.format( - "SELECT firstname, state FROM %s ORDER BY balance DESC ", TEST_INDEX_ACCOUNT - ); + String selectQuery = + StringUtils.format( + "SELECT firstname, state FROM %s ORDER BY balance DESC ", TEST_INDEX_ACCOUNT); verifyWithAndWithoutPaginationResponse(selectQuery + " LIMIT 2000", selectQuery, 26, false); } @Test public void validTotalResultWithAndWithoutPaginationWhereAndOrderBy() throws IOException { - String selectQuery = StringUtils.format( - "SELECT firstname, state FROM %s WHERE balance < 25000 ORDER BY balance ASC ", - TEST_INDEX_ACCOUNT - ); + String selectQuery = + StringUtils.format( + "SELECT firstname, state FROM %s WHERE balance < 25000 ORDER BY balance ASC ", + TEST_INDEX_ACCOUNT); verifyWithAndWithoutPaginationResponse(selectQuery + " LIMIT 2000", selectQuery, 80, false); } @Test public void validTotalResultWithAndWithoutPaginationNested() throws IOException { loadIndex(Index.NESTED_SIMPLE); - String selectQuery = StringUtils.format( - "SELECT name, a.city, a.state FROM %s m , m.address as a ", TEST_INDEX_NESTED_SIMPLE - ); + String selectQuery = + StringUtils.format( + "SELECT name, a.city, a.state FROM %s m , m.address as a ", TEST_INDEX_NESTED_SIMPLE); verifyWithAndWithoutPaginationResponse(selectQuery + " LIMIT 2000", selectQuery, 1, true); } @Test public void noCursorWhenResultsLessThanFetchSize() throws IOException { // fetch_size is 100, but actual number of rows returned from OpenSearch is 97 - // a scroll context will be opened but will be closed after first page as all records are fetched - String selectQuery = StringUtils.format( - "SELECT * FROM %s WHERE balance < 25000 AND age > 36 LIMIT 2000", TEST_INDEX_ACCOUNT - ); + // a scroll context will be opened but will be closed after first page as all records are + // fetched + String selectQuery = + StringUtils.format( + "SELECT * FROM %s WHERE balance < 25000 AND age > 36 LIMIT 2000", TEST_INDEX_ACCOUNT); JSONObject response = new JSONObject(executeFetchQuery(selectQuery, 100, JDBC)); assertFalse(response.has(CURSOR)); } @Ignore("Temporary deactivate the test until parameter substitution implemented in V2") - // Test was passing before, because such paging query was executed in V1, but now it is executed in V2 + // Test was passing before, because such paging query was executed in V1, but now it is executed + // in V2 @Test public void testCursorWithPreparedStatement() throws IOException { - JSONObject response = executeJDBCRequest(String.format("{" + - " \"fetch_size\": 200," + - " \"query\": \" SELECT age, state FROM %s WHERE age > ? OR state IN (?, ?)\"," + - " \"parameters\": [" + - " {" + - " \"type\": \"integer\"," + - " \"value\": 25" + - " }," + - " {" + - " \"type\": \"string\"," + - " \"value\": \"WA\"" + - " }," + - " {" + - " \"type\": \"string\"," + - " \"value\": \"UT\"" + - " }" + - " ]" + - "}", TestsConstants.TEST_INDEX_ACCOUNT)); - + JSONObject response = + executeJDBCRequest( + String.format( + "{" + + "\"fetch_size\": 200," + + "\"query\": \" SELECT age, state FROM %s WHERE age > ? OR state IN (?, ?)\"," + + "\"parameters\": [" + + " {" + + " \"type\": \"integer\"," + + " \"value\": 25" + + " }," + + " {" + + " \"type\": \"string\"," + + " \"value\": \"WA\"" + + " }," + + " {" + + " \"type\": \"string\"," + + " \"value\": \"UT\"" + + " }" + + "]" + + "}" + + TestsConstants.TEST_INDEX_ACCOUNT)); assertTrue(response.has(CURSOR)); verifyIsV1Cursor(response.getString(CURSOR)); } @@ -247,15 +251,16 @@ public void testRegressionOnDateFormatChange() throws IOException { loadIndex(Index.DATETIME); /** * With pagination, the field should be date formatted to MySQL format as in - * @see PR #367PR #367 * TEST_INDEX_DATE_TIME has three docs with login_time as date field with following values * 1.2015-01-01 * 2.2015-01-01T12:10:30Z * 3.1585882955 * 4.2020-04-08T11:10:30+05:00 + * */ - List actualDateList = new ArrayList<>(); String selectQuery = StringUtils.format("SELECT login_time FROM %s LIMIT 500", TEST_INDEX_DATE_TIME); @@ -271,16 +276,16 @@ public void testRegressionOnDateFormatChange() throws IOException { actualDateList.add(response.getJSONArray(DATAROWS).getJSONArray(0).getString(0)); } - List expectedDateList = Arrays.asList( - "2015-01-01 00:00:00.000", - "2015-01-01 12:10:30.000", - "1585882955", // by existing design, this is not formatted in MySQL standard format - "2020-04-08 06:10:30.000"); + List expectedDateList = + Arrays.asList( + "2015-01-01 00:00:00.000", + "2015-01-01 12:10:30.000", + "1585882955", // by existing design, this is not formatted in MySQL standard format + "2020-04-08 06:10:30.000"); assertThat(actualDateList, equalTo(expectedDateList)); } - @Ignore("Breaking change for OpenSearch: deprecate and enable cursor always") @Test public void defaultBehaviorWhenCursorSettingIsDisabled() throws IOException { @@ -296,7 +301,6 @@ public void defaultBehaviorWhenCursorSettingIsDisabled() throws IOException { wipeAllClusterSettings(); } - @Test public void testCursorSettings() throws IOException { // Assert default cursor settings @@ -307,13 +311,11 @@ public void testCursorSettings() throws IOException { new ClusterSetting(PERSISTENT, Settings.Key.SQL_CURSOR_KEEP_ALIVE.getKeyValue(), "200s")); clusterSettings = getAllClusterSettings(); - assertThat(clusterSettings.query("/persistent/plugins.sql.cursor.keep_alive"), - equalTo("200s")); + assertThat(clusterSettings.query("/persistent/plugins.sql.cursor.keep_alive"), equalTo("200s")); wipeAllClusterSettings(); } - @Ignore("Breaking change for OpenSearch: no pagination if fetch_size field absent in request") @Test public void testDefaultFetchSizeFromClusterSettings() throws IOException { @@ -339,8 +341,9 @@ public void testDefaultFetchSizeFromClusterSettings() throws IOException { public void testCursorCloseAPI() throws IOException { // multiple invocation of closing cursor should return success // fetch page using old cursor should throw error - String selectQuery = StringUtils.format( - "SELECT firstname, state FROM %s WHERE balance > 100 and age < 40", TEST_INDEX_ACCOUNT); + String selectQuery = + StringUtils.format( + "SELECT firstname, state FROM %s WHERE balance > 100 and age < 40", TEST_INDEX_ACCOUNT); JSONObject result = new JSONObject(executeFetchQuery(selectQuery, 50, JDBC)); String cursor = result.getString(CURSOR); verifyIsV2Cursor(result); @@ -350,11 +353,11 @@ public void testCursorCloseAPI() throws IOException { cursor = result.optString(CURSOR); verifyIsV2Cursor(result); } - //Closing the cursor + // Closing the cursor JSONObject closeResp = executeCursorCloseQuery(cursor); assertThat(closeResp.getBoolean("succeeded"), equalTo(true)); - //Closing the cursor multiple times is idempotent + // Closing the cursor multiple times is idempotent for (int i = 0; i < 5; i++) { closeResp = executeCursorCloseQuery(cursor); assertThat(closeResp.getBoolean("succeeded"), equalTo(true)); @@ -371,8 +374,7 @@ public void testCursorCloseAPI() throws IOException { JSONObject resp = new JSONObject(TestUtils.getResponseBody(response)); assertThat(resp.getInt("status"), equalTo(404)); assertThat(resp.query("/error/reason").toString(), containsString("all shards failed")); - assertThat(resp.query("/error/details").toString(), - containsString("No search context found")); + assertThat(resp.query("/error/details").toString(), containsString("No search context found")); assertThat(resp.query("/error/type"), equalTo("SearchPhaseExecutionException")); } @@ -395,9 +397,9 @@ public void invalidCursorIdNotDecodable() throws IOException { } /** - * The index has 1000 records, with fetch size of 50 and LIMIT in place - * we should get Math.ceil(limit/fetchSize) pages and LIMIT number of rows. - * Basically it should not retrieve all records in presence of a smaller LIMIT value. + * The index has 1000 records, with fetch size of 50 and LIMIT in place we should get + * Math.ceil(limit/fetchSize) pages and LIMIT number of rows. Basically it should not retrieve all + * records in presence of a smaller LIMIT value. */ @Test public void respectLimitPassedInSelectClause() throws IOException { @@ -422,7 +424,6 @@ public void respectLimitPassedInSelectClause() throws IOException { assertThat(actualDataRowCount, equalTo(limit)); } - @Test public void noPaginationWithNonJDBCFormat() throws IOException { // checking for CSV, RAW format @@ -439,10 +440,9 @@ public void noPaginationWithNonJDBCFormat() throws IOException { assertThat(rows.length, equalTo(1000)); } - - public void verifyWithAndWithoutPaginationResponse(String sqlQuery, String cursorQuery, - int fetch_size, boolean shouldFallBackToV1) - throws IOException { + public void verifyWithAndWithoutPaginationResponse( + String sqlQuery, String cursorQuery, int fetch_size, boolean shouldFallBackToV1) + throws IOException { // we are only checking here for schema and datarows JSONObject withoutCursorResponse = new JSONObject(executeFetchQuery(sqlQuery, 0, JDBC)); @@ -473,10 +473,10 @@ public void verifyWithAndWithoutPaginationResponse(String sqlQuery, String curso } } - verifySchema(withoutCursorResponse.optJSONArray(SCHEMA), - withCursorResponse.optJSONArray(SCHEMA)); - verifyDataRows(withoutCursorResponse.optJSONArray(DATAROWS), - withCursorResponse.optJSONArray(DATAROWS)); + verifySchema( + withoutCursorResponse.optJSONArray(SCHEMA), withCursorResponse.optJSONArray(SCHEMA)); + verifyDataRows( + withoutCursorResponse.optJSONArray(DATAROWS), withCursorResponse.optJSONArray(DATAROWS)); } public void verifySchema(JSONArray schemaOne, JSONArray schemaTwo) { @@ -504,14 +504,14 @@ private void verifyIsV1Cursor(String cursor) { if (cursor.isEmpty()) { return; } - assertTrue("The cursor '" + cursor.substring(0, 50) + "...' is not from v1 engine.", cursor.startsWith("d:")); + assertTrue( + "The cursor '" + cursor.substring(0, 50) + "...' is not from v1 engine.", + cursor.startsWith("d:")); } private String makeRequest(String query, String fetch_size) { - return String.format("{" + - " \"fetch_size\": \"%s\"," + - " \"query\": \"%s\"" + - "}", fetch_size, query); + return String.format( + "{" + " \"fetch_size\": \"%s\"," + " \"query\": \"%s\"" + "}", fetch_size, query); } private JSONObject executeJDBCRequest(String requestBody) throws IOException { diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/DateFormatIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/DateFormatIT.java index a0b4b19898..63d37dbad1 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/DateFormatIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/DateFormatIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.contains; @@ -33,8 +32,7 @@ public class DateFormatIT extends SQLIntegTestCase { private static final String SELECT_FROM = - "SELECT insert_time " + - "FROM " + TestsConstants.TEST_INDEX_ONLINE + " "; + "SELECT insert_time " + "FROM " + TestsConstants.TEST_INDEX_ONLINE + " "; @Override protected void init() throws Exception { @@ -42,21 +40,20 @@ protected void init() throws Exception { } /** - * All of the following tests use UTC as their date_format timezone as this is the same timezone of the data - * being queried. This is to prevent discrepancies in the OpenSearch query and the actual field data that is - * being checked for the integration tests. - *

- * Large LIMIT values were given for some of these queries since the default result size of the query is 200 and - * this ends up excluding some of the expected values causing the assertion to fail. LIMIT overrides this. + * All the following tests use UTC as their date_format timezone as this is the same timezone + * of the data being queried. This is to prevent discrepancies in the OpenSearch query and the + * actual field data that is being checked for the integration tests. + * + *

Large LIMIT values were given for some of these queries since the default result size of the + * query is 200 and this ends up excluding some of the expected values causing the assertion to + * fail. LIMIT overrides this. */ - @Test public void equalTo() throws SqlParseException { assertThat( dateQuery( SELECT_FROM + "WHERE date_format(insert_time, 'yyyy-MM-dd', 'UTC') = '2014-08-17'"), - contains("2014-08-17") - ); + contains("2014-08-17")); } @Test @@ -64,19 +61,18 @@ public void lessThan() throws SqlParseException { assertThat( dateQuery( SELECT_FROM + "WHERE date_format(insert_time, 'yyyy-MM-dd', 'UTC') < '2014-08-18'"), - contains("2014-08-17") - ); + contains("2014-08-17")); } @Test public void lessThanOrEqualTo() throws SqlParseException { assertThat( dateQuery( - SELECT_FROM + "WHERE date_format(insert_time, 'yyyy-MM-dd', 'UTC') <= '2014-08-18' " + - "ORDER BY insert_time " + - "LIMIT 1000"), - contains("2014-08-17", "2014-08-18") - ); + SELECT_FROM + + "WHERE date_format(insert_time, 'yyyy-MM-dd', 'UTC') <= '2014-08-18' " + + "ORDER BY insert_time " + + "LIMIT 1000"), + contains("2014-08-17", "2014-08-18")); } @Test @@ -84,92 +80,101 @@ public void greaterThan() throws SqlParseException { assertThat( dateQuery( SELECT_FROM + "WHERE date_format(insert_time, 'yyyy-MM-dd', 'UTC') > '2014-08-23'"), - contains("2014-08-24") - ); + contains("2014-08-24")); } @Test public void greaterThanOrEqualTo() throws SqlParseException { assertThat( dateQuery( - SELECT_FROM + "WHERE date_format(insert_time, 'yyyy-MM-dd', 'UTC') >= '2014-08-23' " + - "ORDER BY insert_time " + - "LIMIT 2000"), - contains("2014-08-23", "2014-08-24") - ); + SELECT_FROM + + "WHERE date_format(insert_time, 'yyyy-MM-dd', 'UTC') >= '2014-08-23' " + + "ORDER BY insert_time " + + "LIMIT 2000"), + contains("2014-08-23", "2014-08-24")); } @Test public void and() throws SqlParseException { assertThat( - dateQuery(SELECT_FROM + - "WHERE date_format(insert_time, 'yyyy-MM-dd', 'UTC') >= '2014-08-21' " + - "AND date_format(insert_time, 'yyyy-MM-dd', 'UTC') <= '2014-08-23' " + - "ORDER BY insert_time " + - "LIMIT 3000"), - contains("2014-08-21", "2014-08-22", "2014-08-23") - ); + dateQuery( + SELECT_FROM + + "WHERE date_format(insert_time, 'yyyy-MM-dd', 'UTC') >= '2014-08-21' " + + "AND date_format(insert_time, 'yyyy-MM-dd', 'UTC') <= '2014-08-23' " + + "ORDER BY insert_time " + + "LIMIT 3000"), + contains("2014-08-21", "2014-08-22", "2014-08-23")); } @Test public void andWithDefaultTimeZone() throws SqlParseException { assertThat( - dateQuery(SELECT_FROM + - "WHERE date_format(insert_time, 'yyyy-MM-dd HH:mm:ss') >= '2014-08-17 16:13:12' " + - "AND date_format(insert_time, 'yyyy-MM-dd HH:mm:ss') <= '2014-08-17 16:13:13'", + dateQuery( + SELECT_FROM + + "WHERE date_format(insert_time, 'yyyy-MM-dd HH:mm:ss') >= '2014-08-17 16:13:12' " + + "AND date_format(insert_time, 'yyyy-MM-dd HH:mm:ss') <= '2014-08-17 16:13:13'", "yyyy-MM-dd HH:mm:ss"), - contains("2014-08-17 16:13:12") - ); + contains("2014-08-17 16:13:12")); } @Test public void or() throws SqlParseException { assertThat( - dateQuery(SELECT_FROM + - "WHERE date_format(insert_time, 'yyyy-MM-dd', 'UTC') < '2014-08-18' " + - "OR date_format(insert_time, 'yyyy-MM-dd', 'UTC') > '2014-08-23' " + - "ORDER BY insert_time " + - "LIMIT 1000"), - contains("2014-08-17", "2014-08-24") - ); + dateQuery( + SELECT_FROM + + "WHERE date_format(insert_time, 'yyyy-MM-dd', 'UTC') < '2014-08-18' " + + "OR date_format(insert_time, 'yyyy-MM-dd', 'UTC') > '2014-08-23' " + + "ORDER BY insert_time " + + "LIMIT 1000"), + contains("2014-08-17", "2014-08-24")); } - @Test public void sortByDateFormat() throws IOException { - // Sort by expression in descending order, but sort inside in ascending order, so we increase our confidence + // Sort by expression in descending order, but sort inside in ascending order, so we increase + // our confidence // that successful test isn't just random chance. JSONArray hits = - getHits(executeQuery("SELECT all_client, insert_time " + - " FROM " + TestsConstants.TEST_INDEX_ONLINE + - " ORDER BY date_format(insert_time, 'dd-MM-YYYY', 'UTC') DESC, insert_time " + - " LIMIT 10")); + getHits( + executeQuery( + "SELECT all_client, insert_time " + + " FROM " + + TestsConstants.TEST_INDEX_ONLINE + + " ORDER BY date_format(insert_time, 'dd-MM-YYYY', 'UTC') DESC, insert_time " + + " LIMIT 10")); - assertThat(new DateTime(getSource(hits.getJSONObject(0)).get("insert_time"), DateTimeZone.UTC), + assertThat( + new DateTime(getSource(hits.getJSONObject(0)).get("insert_time"), DateTimeZone.UTC), is(new DateTime("2014-08-24T00:00:41.221Z", DateTimeZone.UTC))); } @Test public void sortByAliasedDateFormat() throws IOException { JSONArray hits = - getHits(executeQuery( - "SELECT all_client, insert_time, date_format(insert_time, 'dd-MM-YYYY', 'UTC') date" + - " FROM " + TestsConstants.TEST_INDEX_ONLINE + - " ORDER BY date DESC, insert_time " + - " LIMIT 10")); + getHits( + executeQuery( + "SELECT all_client, insert_time, date_format(insert_time, 'dd-MM-YYYY', 'UTC')" + + " date FROM " + + TestsConstants.TEST_INDEX_ONLINE + + " ORDER BY date DESC, insert_time " + + " LIMIT 10")); - assertThat(new DateTime(getSource(hits.getJSONObject(0)).get("insert_time"), DateTimeZone.UTC), + assertThat( + new DateTime(getSource(hits.getJSONObject(0)).get("insert_time"), DateTimeZone.UTC), is(new DateTime("2014-08-24T00:00:41.221Z", DateTimeZone.UTC))); } @Ignore("skip this test due to inconsistency in type in new engine") @Test public void selectDateTimeWithDefaultTimeZone() throws SqlParseException { - JSONObject response = executeJdbcRequest("SELECT date_format(insert_time, 'yyyy-MM-dd') as date " + - " FROM " + TestsConstants.TEST_INDEX_ONLINE + - " WHERE date_format(insert_time, 'yyyy-MM-dd HH:mm:ss') >= '2014-08-17 16:13:12' " + - " AND date_format(insert_time, 'yyyy-MM-dd HH:mm:ss') <= '2014-08-17 16:13:13'"); + JSONObject response = + executeJdbcRequest( + "SELECT date_format(insert_time, 'yyyy-MM-dd') as date " + + " FROM " + + TestsConstants.TEST_INDEX_ONLINE + + " WHERE date_format(insert_time, 'yyyy-MM-dd HH:mm:ss') >= '2014-08-17 16:13:12' " + + " AND date_format(insert_time, 'yyyy-MM-dd HH:mm:ss') <= '2014-08-17 16:13:13'"); verifySchema(response, schema("date", "", "text")); verifyDataRows(response, rows("2014-08-17")); @@ -177,52 +182,57 @@ public void selectDateTimeWithDefaultTimeZone() throws SqlParseException { @Test public void groupByAndSort() throws IOException { - JSONObject aggregations = executeQuery( - "SELECT date_format(insert_time, 'dd-MM-YYYY') " + - "FROM opensearch-sql_test_index_online " + - "GROUP BY date_format(insert_time, 'dd-MM-YYYY') " + - "ORDER BY date_format(insert_time, 'dd-MM-YYYY') DESC") - .getJSONObject("aggregations"); + JSONObject aggregations = + executeQuery( + "SELECT date_format(insert_time, 'dd-MM-YYYY') " + + "FROM opensearch-sql_test_index_online " + + "GROUP BY date_format(insert_time, 'dd-MM-YYYY') " + + "ORDER BY date_format(insert_time, 'dd-MM-YYYY') DESC") + .getJSONObject("aggregations"); checkAggregations(aggregations, "date_format", Ordering.natural().reverse()); } @Test public void groupByAndSortAliasedReversed() throws IOException { - JSONObject aggregations = executeQuery( - "SELECT date_format(insert_time, 'dd-MM-YYYY') date " + - "FROM opensearch-sql_test_index_online " + - "GROUP BY date " + - "ORDER BY date DESC") - .getJSONObject("aggregations"); + JSONObject aggregations = + executeQuery( + "SELECT date_format(insert_time, 'dd-MM-YYYY') date " + + "FROM opensearch-sql_test_index_online " + + "GROUP BY date " + + "ORDER BY date DESC") + .getJSONObject("aggregations"); checkAggregations(aggregations, "date", Ordering.natural().reverse()); } @Test public void groupByAndSortAliased() throws IOException { - JSONObject aggregations = executeQuery( - "SELECT date_format(insert_time, 'dd-MM-YYYY') date " + - "FROM opensearch-sql_test_index_online " + - "GROUP BY date " + - "ORDER BY date ") - .getJSONObject("aggregations"); + JSONObject aggregations = + executeQuery( + "SELECT date_format(insert_time, 'dd-MM-YYYY') date " + + "FROM opensearch-sql_test_index_online " + + "GROUP BY date " + + "ORDER BY date ") + .getJSONObject("aggregations"); checkAggregations(aggregations, "date", Ordering.natural()); } - private void checkAggregations(JSONObject aggregations, String key, - Ordering ordering) { + private void checkAggregations( + JSONObject aggregations, String key, Ordering ordering) { String date = getScriptAggregationKey(aggregations, key); JSONArray buckets = aggregations.getJSONObject(date).getJSONArray("buckets"); assertThat(buckets.length(), is(8)); - List aggregationSortKeys = IntStream.range(0, 8) - .mapToObj(index -> buckets.getJSONObject(index).getString("key")) - .collect(Collectors.toList()); + List aggregationSortKeys = + IntStream.range(0, 8) + .mapToObj(index -> buckets.getJSONObject(index).getString("key")) + .collect(Collectors.toList()); - assertTrue("The query result must be sorted by date in descending order", + assertTrue( + "The query result must be sorted by date in descending order", ordering.isOrdered(aggregationSortKeys)); } @@ -239,7 +249,8 @@ private Set dateQuery(String sql, String format) throws SqlParseExceptio } } - private Set getResult(JSONObject response, String fieldName, DateTimeFormatter formatter) { + private Set getResult( + JSONObject response, String fieldName, DateTimeFormatter formatter) { JSONArray hits = getHits(response); Set result = new TreeSet<>(); // Using TreeSet so order is maintained for (int i = 0; i < hits.length(); i++) { @@ -255,11 +266,11 @@ private Set getResult(JSONObject response, String fieldName, DateTimeFor } public static String getScriptAggregationKey(JSONObject aggregation, String prefix) { - return aggregation.keySet() - .stream() + return aggregation.keySet().stream() .filter(x -> x.startsWith(prefix)) .findFirst() - .orElseThrow(() -> new RuntimeException( - "Can't find key" + prefix + " in aggregation " + aggregation)); + .orElseThrow( + () -> + new RuntimeException("Can't find key" + prefix + " in aggregation " + aggregation)); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/DateFunctionsIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/DateFunctionsIT.java index 369984d0a3..d9a6849fc8 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/DateFunctionsIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/DateFunctionsIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.equalTo; @@ -31,13 +30,13 @@ public class DateFunctionsIT extends SQLIntegTestCase { private static final String FROM = "FROM " + TestsConstants.TEST_INDEX_ONLINE; /** - * Some of the first few SQL functions are tested in both SELECT and WHERE cases for flexibility and the remainder - * are merely tested in SELECT for simplicity. - *

- * There is a limitation in all date SQL functions in that they expect a date field as input. In the future this - * can be expanded on by supporting CAST and casting dates given as Strings to TIMESTAMP (SQL's date type). + * Some of the first few SQL functions are tested in both SELECT and WHERE cases for flexibility + * and the remainder are merely tested in SELECT for simplicity. + * + *

There is a limitation in all date SQL functions in that they expect a date field as input. + * In the future this can be expanded on by supporting CAST and casting dates given as Strings to + * TIMESTAMP (SQL's date type). */ - @Override protected void init() throws Exception { loadIndex(Index.ONLINE); @@ -45,9 +44,7 @@ protected void init() throws Exception { @Test public void year() throws IOException { - SearchHit[] hits = query( - "SELECT YEAR(insert_time) as year" - ); + SearchHit[] hits = query("SELECT YEAR(insert_time) as year"); for (SearchHit hit : hits) { int year = (int) getField(hit, "year"); DateTime insertTime = getDateFromSource(hit, "insert_time"); @@ -57,9 +54,7 @@ public void year() throws IOException { @Test public void monthOfYear() throws IOException { - SearchHit[] hits = query( - "SELECT MONTH_OF_YEAR(insert_time) as month_of_year" - ); + SearchHit[] hits = query("SELECT MONTH_OF_YEAR(insert_time) as month_of_year"); for (SearchHit hit : hits) { int monthOfYear = (int) getField(hit, "month_of_year"); DateTime insertTime = getDateFromSource(hit, "insert_time"); @@ -69,9 +64,7 @@ public void monthOfYear() throws IOException { @Test public void weekOfYearInSelect() throws IOException { - SearchHit[] hits = query( - "SELECT WEEK_OF_YEAR(insert_time) as week_of_year" - ); + SearchHit[] hits = query("SELECT WEEK_OF_YEAR(insert_time) as week_of_year"); for (SearchHit hit : hits) { int weekOfYear = (int) getField(hit, "week_of_year"); DateTime insertTime = getDateFromSource(hit, "insert_time"); @@ -81,12 +74,12 @@ public void weekOfYearInSelect() throws IOException { @Test public void weekOfYearInWhere() throws IOException { - SearchHit[] hits = query( - "SELECT insert_time", - "WHERE DATE_FORMAT(insert_time, 'YYYY-MM-dd') < '2014-08-19' AND " + - "WEEK_OF_YEAR(insert_time) > 33", - "LIMIT 2000" - ); + SearchHit[] hits = + query( + "SELECT insert_time", + "WHERE DATE_FORMAT(insert_time, 'YYYY-MM-dd') < '2014-08-19' AND " + + "WEEK_OF_YEAR(insert_time) > 33", + "LIMIT 2000"); for (SearchHit hit : hits) { DateTime insertTime = getDateFromSource(hit, "insert_time"); assertThat(insertTime.weekOfWeekyear().get(), greaterThan(33)); @@ -95,9 +88,7 @@ public void weekOfYearInWhere() throws IOException { @Test public void dayOfYearInSelect() throws IOException { - SearchHit[] hits = query( - "SELECT DAY_OF_YEAR(insert_time) as day_of_year", "LIMIT 2000" - ); + SearchHit[] hits = query("SELECT DAY_OF_YEAR(insert_time) as day_of_year", "LIMIT 2000"); for (SearchHit hit : hits) { int dayOfYear = (int) getField(hit, "day_of_year"); DateTime insertTime = getDateFromSource(hit, "insert_time"); @@ -107,9 +98,8 @@ public void dayOfYearInSelect() throws IOException { @Test public void dayOfYearInWhere() throws IOException { - SearchHit[] hits = query( - "SELECT insert_time", "WHERE DAY_OF_YEAR(insert_time) < 233", "LIMIT 10000" - ); + SearchHit[] hits = + query("SELECT insert_time", "WHERE DAY_OF_YEAR(insert_time) < 233", "LIMIT 10000"); for (SearchHit hit : hits) { DateTime insertTime = getDateFromSource(hit, "insert_time"); assertThat(insertTime.dayOfYear().get(), lessThan(233)); @@ -118,9 +108,7 @@ public void dayOfYearInWhere() throws IOException { @Test public void dayOfMonthInSelect() throws IOException { - SearchHit[] hits = query( - "SELECT DAY_OF_MONTH(insert_time) as day_of_month", "LIMIT 2000" - ); + SearchHit[] hits = query("SELECT DAY_OF_MONTH(insert_time) as day_of_month", "LIMIT 2000"); for (SearchHit hit : hits) { int dayOfMonth = (int) getField(hit, "day_of_month"); DateTime insertTime = getDateFromSource(hit, "insert_time"); @@ -130,9 +118,8 @@ public void dayOfMonthInSelect() throws IOException { @Test public void dayOfMonthInWhere() throws IOException { - SearchHit[] hits = query( - "SELECT insert_time", "WHERE DAY_OF_MONTH(insert_time) < 21", "LIMIT 10000" - ); + SearchHit[] hits = + query("SELECT insert_time", "WHERE DAY_OF_MONTH(insert_time) < 21", "LIMIT 10000"); for (SearchHit hit : hits) { DateTime insertTime = getDateFromSource(hit, "insert_time"); assertThat(insertTime.dayOfMonth().get(), lessThan(21)); @@ -141,9 +128,7 @@ public void dayOfMonthInWhere() throws IOException { @Test public void dayOfWeek() throws IOException { - SearchHit[] hits = query( - "SELECT DAY_OF_WEEK(insert_time) as day_of_week", "LIMIT 2000" - ); + SearchHit[] hits = query("SELECT DAY_OF_WEEK(insert_time) as day_of_week", "LIMIT 2000"); for (SearchHit hit : hits) { int dayOfWeek = (int) getField(hit, "day_of_week"); DateTime insertTime = getDateFromSource(hit, "insert_time"); @@ -153,9 +138,7 @@ public void dayOfWeek() throws IOException { @Test public void hourOfDay() throws IOException { - SearchHit[] hits = query( - "SELECT HOUR_OF_DAY(insert_time) as hour_of_day", "LIMIT 1000" - ); + SearchHit[] hits = query("SELECT HOUR_OF_DAY(insert_time) as hour_of_day", "LIMIT 1000"); for (SearchHit hit : hits) { int hourOfDay = (int) getField(hit, "hour_of_day"); DateTime insertTime = getDateFromSource(hit, "insert_time"); @@ -165,9 +148,7 @@ public void hourOfDay() throws IOException { @Test public void minuteOfDay() throws IOException { - SearchHit[] hits = query( - "SELECT MINUTE_OF_DAY(insert_time) as minute_of_day", "LIMIT 500" - ); + SearchHit[] hits = query("SELECT MINUTE_OF_DAY(insert_time) as minute_of_day", "LIMIT 500"); for (SearchHit hit : hits) { int minuteOfDay = (int) getField(hit, "minute_of_day"); DateTime insertTime = getDateFromSource(hit, "insert_time"); @@ -177,9 +158,7 @@ public void minuteOfDay() throws IOException { @Test public void minuteOfHour() throws IOException { - SearchHit[] hits = query( - "SELECT MINUTE_OF_HOUR(insert_time) as minute_of_hour", "LIMIT 500" - ); + SearchHit[] hits = query("SELECT MINUTE_OF_HOUR(insert_time) as minute_of_hour", "LIMIT 500"); for (SearchHit hit : hits) { int minuteOfHour = (int) getField(hit, "minute_of_hour"); DateTime insertTime = getDateFromSource(hit, "insert_time"); @@ -189,9 +168,8 @@ public void minuteOfHour() throws IOException { @Test public void secondOfMinute() throws IOException { - SearchHit[] hits = query( - "SELECT SECOND_OF_MINUTE(insert_time) as second_of_minute", "LIMIT 500" - ); + SearchHit[] hits = + query("SELECT SECOND_OF_MINUTE(insert_time) as second_of_minute", "LIMIT 500"); for (SearchHit hit : hits) { int secondOfMinute = (int) getField(hit, "second_of_minute"); DateTime insertTime = getDateFromSource(hit, "insert_time"); @@ -201,9 +179,7 @@ public void secondOfMinute() throws IOException { @Test public void month() throws IOException { - SearchHit[] hits = query( - "SELECT MONTH(insert_time) AS month", "LIMIT 500" - ); + SearchHit[] hits = query("SELECT MONTH(insert_time) AS month", "LIMIT 500"); for (SearchHit hit : hits) { int month = (int) getField(hit, "month"); DateTime dateTime = getDateFromSource(hit, "insert_time"); @@ -213,9 +189,7 @@ public void month() throws IOException { @Test public void dayofmonth() throws IOException { - SearchHit[] hits = query( - "SELECT DAYOFMONTH(insert_time) AS dayofmonth", "LIMIT 500" - ); + SearchHit[] hits = query("SELECT DAYOFMONTH(insert_time) AS dayofmonth", "LIMIT 500"); for (SearchHit hit : hits) { int dayofmonth = (int) getField(hit, "dayofmonth"); DateTime dateTime = getDateFromSource(hit, "insert_time"); @@ -225,9 +199,7 @@ public void dayofmonth() throws IOException { @Test public void date() throws IOException { - SearchHit[] hits = query( - "SELECT DATE(insert_time) AS date", "LIMIT 500" - ); + SearchHit[] hits = query("SELECT DATE(insert_time) AS date", "LIMIT 500"); for (SearchHit hit : hits) { String date = (String) getField(hit, "date"); DateTime dateTime = getDateFromSource(hit, "insert_time"); @@ -237,9 +209,7 @@ public void date() throws IOException { @Test public void monthname() throws IOException { - SearchHit[] hits = query( - "SELECT MONTHNAME(insert_time) AS monthname", "LIMIT 500" - ); + SearchHit[] hits = query("SELECT MONTHNAME(insert_time) AS monthname", "LIMIT 500"); for (SearchHit hit : hits) { String monthname = (String) getField(hit, "monthname"); DateTime dateTime = getDateFromSource(hit, "insert_time"); @@ -249,9 +219,7 @@ public void monthname() throws IOException { @Test public void timestamp() throws IOException { - SearchHit[] hits = query( - "SELECT TIMESTAMP(insert_time) AS timestamp", "LIMIT 500" - ); + SearchHit[] hits = query("SELECT TIMESTAMP(insert_time) AS timestamp", "LIMIT 500"); for (SearchHit hit : hits) { String timastamp = (String) getField(hit, "timestamp"); DateTime dateTime = getDateFromSource(hit, "insert_time"); @@ -284,14 +252,16 @@ private SearchHit[] query(String select, String... statements) throws IOExceptio return execute(select + " " + FROM + " " + String.join(" ", statements)); } - // TODO: I think this code is now re-used in multiple classes, would be good to move to the base class. + // TODO: I think this code is now re-used in multiple classes, would be good to move to the base + // class. private SearchHit[] execute(String sqlRequest) throws IOException { final JSONObject jsonObject = executeRequest(makeRequest(sqlRequest)); - final XContentParser parser = new JsonXContentParser( - NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, - new JsonFactory().createParser(jsonObject.toString())); + final XContentParser parser = + new JsonXContentParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + new JsonFactory().createParser(jsonObject.toString())); return SearchResponse.fromXContent(parser).getHits().getHits(); } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/DeleteIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/DeleteIT.java index 4fad5a23b7..24895b5b69 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/DeleteIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/DeleteIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.core.IsEqual.equalTo; @@ -20,8 +19,8 @@ public class DeleteIT extends SQLIntegTestCase { protected void init() throws Exception { loadIndex(Index.ACCOUNT); loadIndex(Index.PHRASE); - updateClusterSettings(new ClusterSetting(PERSISTENT, - Settings.Key.SQL_DELETE_ENABLED.getKeyValue(), "true")); + updateClusterSettings( + new ClusterSetting(PERSISTENT, Settings.Key.SQL_DELETE_ENABLED.getKeyValue(), "true")); } @Test @@ -34,7 +33,8 @@ public void deleteAllTest() throws IOException, InterruptedException { response = executeRequest(makeRequest(deleteQuery)); assertThat(response.getInt("deleted"), equalTo(totalHits)); - // The documents are not deleted immediately, causing the next search call to return all results. + // The documents are not deleted immediately, causing the next search call to return all + // results. // To prevent flakiness, the minimum value of 2000 msec works fine. Thread.sleep(2000); @@ -44,20 +44,21 @@ public void deleteAllTest() throws IOException, InterruptedException { @Test public void deleteWithConditionTest() throws IOException, InterruptedException { - String selectQuery = StringUtils.format( - "SELECT * FROM %s WHERE match_phrase(phrase, 'quick fox here')", - TestsConstants.TEST_INDEX_PHRASE - ); + String selectQuery = + StringUtils.format( + "SELECT * FROM %s WHERE match_phrase(phrase, 'quick fox here')", + TestsConstants.TEST_INDEX_PHRASE); JSONObject response = executeRequest(makeRequest(selectQuery)); int totalHits = getTotalHits(response); - String deleteQuery = StringUtils.format( - "DELETE FROM %s WHERE match_phrase(phrase, 'quick fox here')", - TestsConstants.TEST_INDEX_PHRASE - ); + String deleteQuery = + StringUtils.format( + "DELETE FROM %s WHERE match_phrase(phrase, 'quick fox here')", + TestsConstants.TEST_INDEX_PHRASE); response = executeRequest(makeRequest(deleteQuery)); assertThat(response.getInt("deleted"), equalTo(totalHits)); - // The documents are not deleted immediately, causing the next search call to return all results. + // The documents are not deleted immediately, causing the next search call to return all + // results. // To prevent flakiness, the minimum value of 2000 msec works fine. Thread.sleep(2000); @@ -84,7 +85,8 @@ public void deleteAllWithJdbcFormat() throws IOException, InterruptedException { assertThat(response.query("/status"), equalTo(200)); assertThat(response.query("/size"), equalTo(1)); - // The documents are not deleted immediately, causing the next search call to return all results. + // The documents are not deleted immediately, causing the next search call to return all + // results. // To prevent flakiness, the minimum value of 2000 msec works fine. Thread.sleep(2000); @@ -98,18 +100,18 @@ public void deleteAllWithJdbcFormat() throws IOException, InterruptedException { @Test public void deleteWithConditionTestJdbcFormat() throws IOException, InterruptedException { - String selectQuery = StringUtils.format( - "SELECT * FROM %s WHERE match_phrase(phrase, 'quick fox here')", - TestsConstants.TEST_INDEX_PHRASE - ); + String selectQuery = + StringUtils.format( + "SELECT * FROM %s WHERE match_phrase(phrase, 'quick fox here')", + TestsConstants.TEST_INDEX_PHRASE); JSONObject response = executeRequest(makeRequest(selectQuery)); int totalHits = getTotalHits(response); - String deleteQuery = StringUtils.format( - "DELETE FROM %s WHERE match_phrase(phrase, 'quick fox here')", - TestsConstants.TEST_INDEX_PHRASE - ); + String deleteQuery = + StringUtils.format( + "DELETE FROM %s WHERE match_phrase(phrase, 'quick fox here')", + TestsConstants.TEST_INDEX_PHRASE); response = new JSONObject(executeQuery(deleteQuery, "jdbc")); System.out.println(response); @@ -120,7 +122,8 @@ public void deleteWithConditionTestJdbcFormat() throws IOException, InterruptedE assertThat(response.query("/status"), equalTo(200)); assertThat(response.query("/size"), equalTo(1)); - // The documents are not deleted immediately, causing the next search call to return all results. + // The documents are not deleted immediately, causing the next search call to return all + // results. // To prevent flakiness, the minimum value of 2000 msec works fine. Thread.sleep(2000); diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/ExplainIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/ExplainIT.java index 4ecabdbf01..b42e9f84f4 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/ExplainIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/ExplainIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.containsString; @@ -40,16 +39,20 @@ protected void init() throws Exception { @Test public void searchSanity() throws IOException { - String expectedOutputFilePath = TestUtils.getResourceFilePath( - "src/test/resources/expectedOutput/search_explain.json"); - String expectedOutput = Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) - .replaceAll("\r", ""); + String expectedOutputFilePath = + TestUtils.getResourceFilePath("src/test/resources/expectedOutput/search_explain.json"); + String expectedOutput = + Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) + .replaceAll("\r", ""); String result = - explainQuery(String.format("SELECT * FROM %s WHERE firstname LIKE 'A%%' AND age > 20 " + - "GROUP BY gender order by _score", TEST_INDEX_ACCOUNT)); - Assert - .assertThat(result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); + explainQuery( + String.format( + "SELECT * FROM %s WHERE firstname LIKE 'A%%' AND age > 20 " + + "GROUP BY gender order by _score", + TEST_INDEX_ACCOUNT)); + Assert.assertThat( + result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); } // This test was ignored because group by case function is not supported @@ -57,118 +60,153 @@ public void searchSanity() throws IOException { @Test public void aggregationQuery() throws IOException { - String expectedOutputFilePath = TestUtils.getResourceFilePath( - "src/test/resources/expectedOutput/aggregation_query_explain.json"); - String expectedOutput = Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) - .replaceAll("\r", ""); - - String result = explainQuery( - String.format("SELECT address, CASE WHEN gender='0' then 'aaa' else 'bbb'end a2345," + - "count(age) FROM %s GROUP BY terms('field'='address','execution_hint'='global_ordinals'),a2345", - TEST_INDEX_ACCOUNT)); - Assert - .assertThat(result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); + String expectedOutputFilePath = + TestUtils.getResourceFilePath( + "src/test/resources/expectedOutput/aggregation_query_explain.json"); + String expectedOutput = + Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) + .replaceAll("\r", ""); + + String result = + explainQuery( + String.format( + "SELECT address, CASE WHEN gender='0' then 'aaa' else 'bbb'end a2345,count(age)" + + " FROM %s GROUP BY" + + " terms('field'='address','execution_hint'='global_ordinals'),a2345", + TEST_INDEX_ACCOUNT)); + Assert.assertThat( + result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); } @Test public void explainScriptValue() throws IOException { - String expectedOutputFilePath = TestUtils.getResourceFilePath( - "src/test/resources/expectedOutput/script_value.json"); - String expectedOutput = Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) - .replaceAll("\r", ""); + String expectedOutputFilePath = + TestUtils.getResourceFilePath("src/test/resources/expectedOutput/script_value.json"); + String expectedOutput = + Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) + .replaceAll("\r", ""); - String result = explainQuery(String.format("SELECT case when gender is null then 'aaa' " + - "else gender end test , account_number FROM %s", TEST_INDEX_ACCOUNT)); - Assert - .assertThat(result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); + String result = + explainQuery( + String.format( + "SELECT case when gender is null then 'aaa' " + + "else gender end test , account_number FROM %s", + TEST_INDEX_ACCOUNT)); + Assert.assertThat( + result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); } @Test public void betweenScriptValue() throws IOException { - String expectedOutputFilePath = TestUtils.getResourceFilePath( - "src/test/resources/expectedOutput/between_query.json"); - String expectedOutput = Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) - .replaceAll("\r", ""); + String expectedOutputFilePath = + TestUtils.getResourceFilePath("src/test/resources/expectedOutput/between_query.json"); + String expectedOutput = + Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) + .replaceAll("\r", ""); String result = - explainQuery(String.format("SELECT case when balance between 100 and 200 then 'aaa' " + - "else balance end test, account_number FROM %s", TEST_INDEX_ACCOUNT)); - Assert - .assertThat(result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); + explainQuery( + String.format( + "SELECT case when balance between 100 and 200 then 'aaa' " + + "else balance end test, account_number FROM %s", + TEST_INDEX_ACCOUNT)); + Assert.assertThat( + result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); } @Test public void searchSanityFilter() throws IOException { - String expectedOutputFilePath = TestUtils.getResourceFilePath( - "src/test/resources/expectedOutput/search_explain_filter.json"); - String expectedOutput = Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) - .replaceAll("\r", ""); + String expectedOutputFilePath = + TestUtils.getResourceFilePath( + "src/test/resources/expectedOutput/search_explain_filter.json"); + String expectedOutput = + Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) + .replaceAll("\r", ""); - String result = explainQuery(String.format("SELECT * FROM %s WHERE firstname LIKE 'A%%' " + - "AND age > 20 GROUP BY gender", TEST_INDEX_ACCOUNT)); - Assert - .assertThat(result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); + String result = + explainQuery( + String.format( + "SELECT * FROM %s WHERE firstname LIKE 'A%%' " + "AND age > 20 GROUP BY gender", + TEST_INDEX_ACCOUNT)); + Assert.assertThat( + result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); } @Test public void deleteSanity() throws IOException { - String expectedOutputFilePath = TestUtils.getResourceFilePath( - "src/test/resources/expectedOutput/delete_explain.json"); - String expectedOutput = Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) - .replaceAll("\r", ""); + String expectedOutputFilePath = + TestUtils.getResourceFilePath("src/test/resources/expectedOutput/delete_explain.json"); + String expectedOutput = + Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) + .replaceAll("\r", ""); ; String result = - explainQuery(String.format("DELETE FROM %s WHERE firstname LIKE 'A%%' AND age > 20", - TEST_INDEX_ACCOUNT)); - Assert - .assertThat(result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); + explainQuery( + String.format( + "DELETE FROM %s WHERE firstname LIKE 'A%%' AND age > 20", TEST_INDEX_ACCOUNT)); + Assert.assertThat( + result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); } @Test public void spatialFilterExplainTest() throws IOException { - String expectedOutputFilePath = TestUtils.getResourceFilePath( - "src/test/resources/expectedOutput/search_spatial_explain.json"); - String expectedOutput = Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) - .replaceAll("\r", ""); + String expectedOutputFilePath = + TestUtils.getResourceFilePath( + "src/test/resources/expectedOutput/search_spatial_explain.json"); + String expectedOutput = + Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) + .replaceAll("\r", ""); ; - String result = explainQuery(String.format("SELECT * FROM %s WHERE GEO_INTERSECTS" + - "(place,'POLYGON ((102 2, 103 2, 103 3, 102 3, 102 2))')", TEST_INDEX_LOCATION)); - Assert - .assertThat(result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); + String result = + explainQuery( + String.format( + "SELECT * FROM %s WHERE GEO_INTERSECTS" + + "(place,'POLYGON ((102 2, 103 2, 103 3, 102 3, 102 2))')", + TEST_INDEX_LOCATION)); + Assert.assertThat( + result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); } @Test public void orderByOnNestedFieldTest() throws Exception { String result = - explainQuery(String.format("SELECT * FROM %s ORDER BY NESTED('message.info','message')", - TEST_INDEX_NESTED_TYPE)); - Assert.assertThat(result.replaceAll("\\s+", ""), - equalTo("{\"from\":0,\"size\":200,\"sort\":[{\"message.info\":" + - "{\"order\":\"asc\",\"nested\":{\"path\":\"message\"}}}]}")); + explainQuery( + String.format( + "SELECT * FROM %s ORDER BY NESTED('message.info','message')", + TEST_INDEX_NESTED_TYPE)); + Assert.assertThat( + result.replaceAll("\\s+", ""), + equalTo( + "{\"from\":0,\"size\":200,\"sort\":[{\"message.info\":" + + "{\"order\":\"asc\",\"nested\":{\"path\":\"message\"}}}]}")); } @Test public void multiMatchQuery() throws IOException { - String expectedOutputFilePath = TestUtils.getResourceFilePath( - "src/test/resources/expectedOutput/multi_match_query.json"); - String expectedOutput = Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) - .replaceAll("\r", ""); + String expectedOutputFilePath = + TestUtils.getResourceFilePath("src/test/resources/expectedOutput/multi_match_query.json"); + String expectedOutput = + Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) + .replaceAll("\r", ""); String result = - explainQuery(String.format("SELECT * FROM %s WHERE multimatch('query'='this is a test'," + - "'fields'='subject^3,message','analyzer'='standard','type'='best_fields','boost'=1.0," + - "'slop'=0,'tie_breaker'=0.3,'operator'='and')", TEST_INDEX_ACCOUNT)); - Assert - .assertThat(result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); + explainQuery( + String.format( + "SELECT * FROM %s WHERE multimatch('query'='this is a test'," + + "'fields'='subject^3,message','analyzer'='standard','type'='best_fields','boost'=1.0," + + "'slop'=0,'tie_breaker'=0.3,'operator'='and')", + TEST_INDEX_ACCOUNT)); + Assert.assertThat( + result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); } @Test @@ -180,36 +218,49 @@ public void termsIncludeExcludeExplainTest() throws IOException { final String expected3 = "\"include\":{\"partition\":0,\"num_partitions\":20}"; String result = - explainQuery(queryPrefix + " terms('field'='correspond_brand_name','size'='10'," + - "'alias'='correspond_brand_name','include'='\\\".*sport.*\\\"','exclude'='\\\"water_.*\\\"')"); + explainQuery( + queryPrefix + + " terms('field'='correspond_brand_name','size'='10'," + + "'alias'='correspond_brand_name','include'='\\\".*sport.*\\\"','exclude'='\\\"water_.*\\\"')"); Assert.assertThat(result, containsString(expected1)); - result = explainQuery(queryPrefix + "terms('field'='correspond_brand_name','size'='10'," + - "'alias'='correspond_brand_name','include'='[\\\"mazda\\\", \\\"honda\\\"]'," + - "'exclude'='[\\\"rover\\\", \\\"jensen\\\"]')"); + result = + explainQuery( + queryPrefix + + "terms('field'='correspond_brand_name','size'='10'," + + "'alias'='correspond_brand_name','include'='[\\\"mazda\\\", \\\"honda\\\"]'," + + "'exclude'='[\\\"rover\\\", \\\"jensen\\\"]')"); Assert.assertThat(result, containsString(expected2)); - result = explainQuery(queryPrefix + "terms('field'='correspond_brand_name','size'='10'," + - "'alias'='correspond_brand_name','include'='{\\\"partition\\\":0,\\\"num_partitions\\\":20}')"); + result = + explainQuery( + queryPrefix + + "terms('field'='correspond_brand_name','size'='10'," + + "'alias'='correspond_brand_name','include'='{\\\"partition\\\":0,\\\"num_partitions\\\":20}')"); Assert.assertThat(result, containsString(expected3)); } @Test public void explainNLJoin() throws IOException { - String expectedOutputFilePath = TestUtils.getResourceFilePath( - "src/test/resources/expectedOutput/nested_loop_join_explain.json"); - String expectedOutput = Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) - .replaceAll("\r", ""); - - String query = "SELECT /*! USE_NL*/ a.firstname ,a.lastname , a.gender ,d.dog_name FROM " + - TEST_INDEX_PEOPLE + "/people a JOIN " + TEST_INDEX_DOG + - "/dog d on d.holdersName = a.firstname" + - " WHERE (a.age > 10 OR a.balance > 2000) AND d.age > 1"; + String expectedOutputFilePath = + TestUtils.getResourceFilePath( + "src/test/resources/expectedOutput/nested_loop_join_explain.json"); + String expectedOutput = + Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) + .replaceAll("\r", ""); + + String query = + "SELECT /*! USE_NL*/ a.firstname ,a.lastname , a.gender ,d.dog_name FROM " + + TEST_INDEX_PEOPLE + + "/people a JOIN " + + TEST_INDEX_DOG + + "/dog d on d.holdersName = a.firstname" + + " WHERE (a.age > 10 OR a.balance > 2000) AND d.age > 1"; String result = explainQuery(query); - Assert - .assertThat(result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); + Assert.assertThat( + result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); } public void testContentTypeOfExplainRequestShouldBeJson() throws IOException { diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/GetEndpointQueryIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/GetEndpointQueryIT.java index e23753bbd2..81edb54556 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/GetEndpointQueryIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/GetEndpointQueryIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_ACCOUNT; @@ -14,13 +13,10 @@ import org.junit.rules.ExpectedException; import org.opensearch.client.ResponseException; -/** - * Tests to cover requests with "?format=csv" parameter - */ +/** Tests to cover requests with "?format=csv" parameter */ public class GetEndpointQueryIT extends SQLIntegTestCase { - @Rule - public ExpectedException rule = ExpectedException.none(); + @Rule public ExpectedException rule = ExpectedException.none(); @Override protected void init() throws Exception { diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/HashJoinIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/HashJoinIT.java index f796010bbe..02c55d8eb8 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/HashJoinIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/HashJoinIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.equalTo; @@ -22,35 +21,23 @@ import org.junit.Assert; import org.junit.Test; -/** - * Test new hash join algorithm by comparison with old implementation. - */ +/** Test new hash join algorithm by comparison with old implementation. */ public class HashJoinIT extends SQLIntegTestCase { - /** - * Hint to use old join algorithm - */ + /** Hint to use old join algorithm */ private static final String USE_OLD_JOIN_ALGORITHM = "/*! USE_NL*/"; - /** - * Set limit to 100% to bypass circuit break check - */ + /** Set limit to 100% to bypass circuit break check */ private static final String BYPASS_CIRCUIT_BREAK = "/*! JOIN_CIRCUIT_BREAK_LIMIT(100)*/"; - /** - * Enable term filter optimization - */ + /** Enable term filter optimization */ private static final String ENABLE_TERMS_FILTER = "/*! HASH_WITH_TERMS_FILTER*/"; - /** - * Default page size is greater than block size - */ + /** Default page size is greater than block size */ private static final String PAGE_SIZE_GREATER_THAN_BLOCK_SIZE = "/*! JOIN_ALGORITHM_BLOCK_SIZE(5)*/"; - /** - * Page size is smaller than block size - */ + /** Page size is smaller than block size */ private static final String PAGE_SIZE_LESS_THAN_BLOCK_SIZE = "/*! JOIN_ALGORITHM_BLOCK_SIZE(5)*/ /*! JOIN_SCROLL_PAGE_SIZE(2)*/"; @@ -75,14 +62,16 @@ public void leftJoin() throws IOException { @Test public void innerJoinUnexpandedObjectField() { - String query = String.format(Locale.ROOT, - "SELECT " + - "a.id.serial, b.id.serial " + - "FROM %1$s AS a " + - "JOIN %1$s AS b " + - "ON a.id.serial = b.attributes.hardware.correlate_id " + - "WHERE b.attributes.hardware.platform = 'Linux' ", - TEST_INDEX_UNEXPANDED_OBJECT); + String query = + String.format( + Locale.ROOT, + "SELECT " + + "a.id.serial, b.id.serial " + + "FROM %1$s AS a " + + "JOIN %1$s AS b " + + "ON a.id.serial = b.attributes.hardware.correlate_id " + + "WHERE b.attributes.hardware.platform = 'Linux' ", + TEST_INDEX_UNEXPANDED_OBJECT); JSONObject response = executeJdbcRequest(query); verifyDataRows(response, rows(3, 1), rows(3, 3)); @@ -135,8 +124,8 @@ private void testJoin(final String join) throws IOException { // TODO: reduce the balance threshold to 10000 when the memory circuit breaker issue // (https://github.com/opendistro-for-elasticsearch/sql/issues/73) is fixed. final String querySuffixTemplate = - "a.firstname, a.lastname, b.city, b.state FROM %1$s a %2$s %1$s b " + - "ON b.age = a.age WHERE a.balance > 45000 AND b.age > 25 LIMIT 1000000"; + "a.firstname, a.lastname, b.city, b.state FROM %1$s a %2$s %1$s b " + + "ON b.age = a.age WHERE a.balance > 45000 AND b.age > 25 LIMIT 1000000"; final String querySuffix = String.format(Locale.ROOT, querySuffixTemplate, TEST_INDEX_ACCOUNT, join); @@ -152,10 +141,11 @@ private void testJoinWithObjectField(final String join, final String hint) throw // TODO: reduce the balance threshold to 10000 when the memory circuit breaker issue // (https://github.com/opendistro-for-elasticsearch/sql/issues/73) is fixed. - final String querySuffixTemplate = "c.name.firstname, c.name.lastname, f.hname, f.seat " + - "FROM %1$s c %2$s %1$s f ON f.gender.keyword = c.gender.keyword " + - "AND f.house.keyword = c.house.keyword " + - "WHERE c.gender = 'M' LIMIT 1000000"; + final String querySuffixTemplate = + "c.name.firstname, c.name.lastname, f.hname, f.seat " + + "FROM %1$s c %2$s %1$s f ON f.gender.keyword = c.gender.keyword " + + "AND f.house.keyword = c.house.keyword " + + "WHERE c.gender = 'M' LIMIT 1000000"; final String querySuffix = String.format(Locale.ROOT, querySuffixTemplate, TEST_INDEX_GAME_OF_THRONES, join); @@ -180,14 +170,16 @@ private void executeAndCompareOldAndNewJoins(final String oldQuery, final String Set idsOld = new HashSet<>(); - hitsOld.forEach(hitObj -> { - JSONObject hit = (JSONObject) hitObj; - idsOld.add(hit.getString("_id")); - }); - - hitsNew.forEach(hitObj -> { - JSONObject hit = (JSONObject) hitObj; - Assert.assertTrue(idsOld.contains(hit.getString("_id"))); - }); + hitsOld.forEach( + hitObj -> { + JSONObject hit = (JSONObject) hitObj; + idsOld.add(hit.getString("_id")); + }); + + hitsNew.forEach( + hitObj -> { + JSONObject hit = (JSONObject) hitObj; + Assert.assertTrue(idsOld.contains(hit.getString("_id"))); + }); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/HavingIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/HavingIT.java index 34e6af02b4..3bd2195a89 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/HavingIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/HavingIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.arrayContaining; @@ -26,15 +25,19 @@ public class HavingIT extends SQLIntegTestCase { private static final String SELECT_FROM_WHERE_GROUP_BY = - "SELECT state, COUNT(*) cnt " + - "FROM " + TestsConstants.TEST_INDEX_ACCOUNT + " " + - "WHERE age = 30 " + - "GROUP BY state "; - - private static final Set> states1 = rowSet(1, Arrays.asList( - "AK", "AR", "CT", "DE", "HI", "IA", "IL", "IN", "LA", "MA", "MD", "MN", - "MO", "MT", "NC", "ND", "NE", "NH", "NJ", "NV", "SD", "VT", "WV", "WY" - )); + "SELECT state, COUNT(*) cnt " + + "FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " " + + "WHERE age = 30 " + + "GROUP BY state "; + + private static final Set> states1 = + rowSet( + 1, + Arrays.asList( + "AK", "AR", "CT", "DE", "HI", "IA", "IL", "IN", "LA", "MA", "MD", "MN", "MO", "MT", + "NC", "ND", "NE", "NH", "NJ", "NV", "SD", "VT", "WV", "WY")); private static final Set> states2 = rowSet(2, Arrays.asList("AZ", "DC", "KS", "ME")); private static final Set> states3 = @@ -47,118 +50,67 @@ protected void init() throws Exception { @Test public void equalsTo() throws IOException { - assertThat( - query(SELECT_FROM_WHERE_GROUP_BY + "HAVING cnt = 2"), - resultSet( - states2 - ) - ); + assertThat(query(SELECT_FROM_WHERE_GROUP_BY + "HAVING cnt = 2"), resultSet(states2)); } @Test public void lessThanOrEqual() throws IOException { - assertThat( - query(SELECT_FROM_WHERE_GROUP_BY + "HAVING cnt <= 2"), - resultSet( - states1, - states2 - ) - ); + assertThat(query(SELECT_FROM_WHERE_GROUP_BY + "HAVING cnt <= 2"), resultSet(states1, states2)); } @Test public void notEqualsTo() throws IOException { - assertThat( - query(SELECT_FROM_WHERE_GROUP_BY + "HAVING cnt <> 2"), - resultSet( - states1, - states3 - ) - ); + assertThat(query(SELECT_FROM_WHERE_GROUP_BY + "HAVING cnt <> 2"), resultSet(states1, states3)); } @Test public void between() throws IOException { assertThat( query(SELECT_FROM_WHERE_GROUP_BY + "HAVING cnt BETWEEN 1 AND 2"), - resultSet( - states1, - states2 - ) - ); + resultSet(states1, states2)); } @Test public void notBetween() throws IOException { assertThat( - query(SELECT_FROM_WHERE_GROUP_BY + "HAVING cnt NOT BETWEEN 1 AND 2"), - resultSet( - states3 - ) - ); + query(SELECT_FROM_WHERE_GROUP_BY + "HAVING cnt NOT BETWEEN 1 AND 2"), resultSet(states3)); } @Test public void in() throws IOException { assertThat( - query(SELECT_FROM_WHERE_GROUP_BY + "HAVING cnt IN (2, 3)"), - resultSet( - states2, - states3 - ) - ); + query(SELECT_FROM_WHERE_GROUP_BY + "HAVING cnt IN (2, 3)"), resultSet(states2, states3)); } @Test public void notIn() throws IOException { - assertThat( - query(SELECT_FROM_WHERE_GROUP_BY + "HAVING cnt NOT IN (2, 3)"), - resultSet( - states1 - ) - ); + assertThat(query(SELECT_FROM_WHERE_GROUP_BY + "HAVING cnt NOT IN (2, 3)"), resultSet(states1)); } @Test public void and() throws IOException { assertThat( query(SELECT_FROM_WHERE_GROUP_BY + "HAVING cnt >= 1 AND cnt < 3"), - resultSet( - states1, - states2 - ) - ); + resultSet(states1, states2)); } @Test public void or() throws IOException { assertThat( query(SELECT_FROM_WHERE_GROUP_BY + "HAVING cnt = 1 OR cnt = 3"), - resultSet( - states1, - states3 - ) - ); + resultSet(states1, states3)); } @Test public void not() throws IOException { - assertThat( - query(SELECT_FROM_WHERE_GROUP_BY + "HAVING NOT cnt >= 2"), - resultSet( - states1 - ) - ); + assertThat(query(SELECT_FROM_WHERE_GROUP_BY + "HAVING NOT cnt >= 2"), resultSet(states1)); } @Test public void notAndOr() throws IOException { assertThat( query(SELECT_FROM_WHERE_GROUP_BY + "HAVING NOT (cnt > 0 AND cnt <= 2)"), - resultSet( - states3 - ) - ); + resultSet(states3)); } private Set query(String query) throws IOException { @@ -174,10 +126,8 @@ private Set getResult(JSONObject response, String aggName, String aggF Set result = new HashSet<>(); for (int i = 0; i < buckets.length(); i++) { JSONObject bucket = buckets.getJSONObject(i); - result.add(new Object[] { - bucket.get("key"), - ((JSONObject) bucket.get(aggFunc)).getLong("value") - }); + result.add( + new Object[] {bucket.get("key"), ((JSONObject) bucket.get(aggFunc)).getLong("value")}); } return result; @@ -185,15 +135,12 @@ private Set getResult(JSONObject response, String aggName, String aggF @SafeVarargs private final Matcher> resultSet(Set>... rowSets) { - return containsInAnyOrder(Arrays.stream(rowSets) - .flatMap(Collection::stream) - .collect(Collectors.toList())); + return containsInAnyOrder( + Arrays.stream(rowSets).flatMap(Collection::stream).collect(Collectors.toList())); } private static Set> rowSet(long count, List states) { - return states.stream() - .map(state -> row(state, count)) - .collect(Collectors.toSet()); + return states.stream().map(state -> row(state, count)).collect(Collectors.toSet()); } private static Matcher row(String state, long count) { diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/JSONRequestIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/JSONRequestIT.java index dcc90a9acf..b6c0942ba4 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/JSONRequestIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/JSONRequestIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.CoreMatchers.anyOf; @@ -35,11 +34,11 @@ protected void init() throws Exception { @Test public void search() throws IOException { int ageToCompare = 25; - SearchHits response = query(String.format("{\"query\":\"" + - "SELECT * " + - "FROM %s " + - "WHERE age > %s " + - "LIMIT 1000\"}", TestsConstants.TEST_INDEX_ACCOUNT, ageToCompare)); + SearchHits response = + query( + String.format( + "{\"query\":\"SELECT * FROM %s WHERE age > %s LIMIT 1000\"}", + TestsConstants.TEST_INDEX_ACCOUNT, ageToCompare)); SearchHit[] hits = response.getHits(); for (SearchHit hit : hits) { int age = (int) hit.getSourceAsMap().get("age"); @@ -50,7 +49,7 @@ public void search() throws IOException { @Test public void searchWithFilterAndNoWhere() throws IOException { /* - * Human readable format of the request defined below: + * Human-readable format of the request defined below: * { * "query": "SELECT * FROM accounts LIMIT 1000", * "filter": { @@ -63,11 +62,14 @@ public void searchWithFilterAndNoWhere() throws IOException { * } */ int ageToCompare = 25; - SearchHits response = query(String.format("{\"query\":\"" + - "SELECT * " + - "FROM %s " + - "LIMIT 1000\",\"filter\":{\"range\":{\"age\":{\"gt\":%s}}}}", - TestsConstants.TEST_INDEX_ACCOUNT, ageToCompare)); + SearchHits response = + query( + String.format( + "{\"query\":\"" + + "SELECT * " + + "FROM %s " + + "LIMIT 1000\",\"filter\":{\"range\":{\"age\":{\"gt\":%s}}}}", + TestsConstants.TEST_INDEX_ACCOUNT, ageToCompare)); SearchHit[] hits = response.getHits(); for (SearchHit hit : hits) { int age = (int) hit.getSourceAsMap().get("age"); @@ -78,7 +80,7 @@ public void searchWithFilterAndNoWhere() throws IOException { @Test public void searchWithRangeFilter() throws IOException { /* - * Human readable format of the request defined below: + * Human-readable format of the request defined below: * { * "query": "SELECT * FROM accounts WHERE age > 25 LIMIT 1000", * "filter": { @@ -92,12 +94,15 @@ public void searchWithRangeFilter() throws IOException { */ int ageToCompare = 25; int balanceToCompare = 35000; - SearchHits response = query(String.format("{\"query\":\"" + - "SELECT * " + - "FROM %s " + - "WHERE age > %s " + - "LIMIT 1000\",\"filter\":{\"range\":{\"balance\":{\"lt\":%s}}}}", - TestsConstants.TEST_INDEX_ACCOUNT, ageToCompare, balanceToCompare)); + SearchHits response = + query( + String.format( + "{\"query\":\"" + + "SELECT * " + + "FROM %s " + + "WHERE age > %s " + + "LIMIT 1000\",\"filter\":{\"range\":{\"balance\":{\"lt\":%s}}}}", + TestsConstants.TEST_INDEX_ACCOUNT, ageToCompare, balanceToCompare)); SearchHit[] hits = response.getHits(); for (SearchHit hit : hits) { int age = (int) hit.getSourceAsMap().get("age"); @@ -109,12 +114,12 @@ public void searchWithRangeFilter() throws IOException { @Test /** - * Using TEST_INDEX_NESTED_TYPE here since term filter does not work properly on analyzed fields like text. - * The field 'someField' in TEST_INDEX_NESTED_TYPE is of type keyword. + * Using TEST_INDEX_NESTED_TYPE here since term filter does not work properly on analyzed fields + * like text. The field 'someField' in TEST_INDEX_NESTED_TYPE is of type keyword. */ public void searchWithTermFilter() throws IOException { /* - * Human readable format of the request defined below: + * Human-readable format of the request defined below: * { * "query": "SELECT * FROM nested_objects WHERE nested(comment.likes) < 3", * "filter": { @@ -126,12 +131,15 @@ public void searchWithTermFilter() throws IOException { */ int likesToCompare = 3; String fieldToCompare = "a"; - SearchHits response = query(String.format("{\"query\":\"" + - "SELECT * " + - "FROM %s " + - "WHERE nested(comment.likes) < %s\"," + - "\"filter\":{\"term\":{\"someField\":\"%s\"}}}", - TestsConstants.TEST_INDEX_NESTED_TYPE, likesToCompare, fieldToCompare)); + SearchHits response = + query( + String.format( + "{\"query\":\"" + + "SELECT * " + + "FROM %s " + + "WHERE nested(comment.likes) < %s\"," + + "\"filter\":{\"term\":{\"someField\":\"%s\"}}}", + TestsConstants.TEST_INDEX_NESTED_TYPE, likesToCompare, fieldToCompare)); SearchHit[] hits = response.getHits(); for (SearchHit hit : hits) { int likes = (int) ((Map) hit.getSourceAsMap().get("comment")).get("likes"); @@ -144,7 +152,7 @@ public void searchWithTermFilter() throws IOException { @Test public void searchWithNestedFilter() throws IOException { /* - * Human readable format of the request defined below: + * Human-readable format of the request defined below: * { * "query": "SELECT * FROM nested_objects WHERE nested(comment.likes) > 1", * "filter": { @@ -165,13 +173,16 @@ public void searchWithNestedFilter() throws IOException { */ int likesToCompare = 1; String dataToCompare = "aa"; - SearchHits response = query(String.format("{\"query\":\"" + - "SELECT * " + - "FROM %s " + - "WHERE nested(comment.likes) > %s\"," + - "\"filter\":{\"nested\":{\"path\":\"comment\"," + - "\"query\":{\"bool\":{\"must\":{\"term\":{\"comment.data\":\"%s\"}}}}}}}", - TestsConstants.TEST_INDEX_NESTED_TYPE, likesToCompare, dataToCompare)); + SearchHits response = + query( + String.format( + "{\"query\":\"" + + "SELECT * " + + "FROM %s " + + "WHERE nested(comment.likes) > %s\"," + + "\"filter\":{\"nested\":{\"path\":\"comment\"," + + "\"query\":{\"bool\":{\"must\":{\"term\":{\"comment.data\":\"%s\"}}}}}}}", + TestsConstants.TEST_INDEX_NESTED_TYPE, likesToCompare, dataToCompare)); SearchHit[] hits = response.getHits(); for (SearchHit hit : hits) { int likes = (int) ((Map) hit.getSourceAsMap().get("comment")).get("likes"); @@ -184,10 +195,11 @@ public void searchWithNestedFilter() throws IOException { private SearchHits query(String request) throws IOException { final JSONObject jsonObject = executeRequest(request); - final XContentParser parser = new JsonXContentParser( - NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, - new JsonFactory().createParser(jsonObject.toString())); + final XContentParser parser = + new JsonXContentParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + new JsonFactory().createParser(jsonObject.toString())); return SearchResponse.fromXContent(parser).getHits(); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/JdbcTestIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/JdbcTestIT.java index bd72877e1c..74acad4f52 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/JdbcTestIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/JdbcTestIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.containsString; @@ -27,9 +26,10 @@ protected void init() throws Exception { } public void testPercentilesQuery() { - JSONObject response = executeJdbcRequest( - "SELECT percentiles(age, 25.0, 50.0, 75.0, 99.9) age_percentiles " + - "FROM opensearch-sql_test_index_people"); + JSONObject response = + executeJdbcRequest( + "SELECT percentiles(age, 25.0, 50.0, 75.0, 99.9) age_percentiles " + + "FROM opensearch-sql_test_index_people"); assertThat(response.getJSONArray("datarows").length(), equalTo(1)); @@ -47,9 +47,10 @@ public void testSlowQuery() throws IOException { // set slow log threshold = 0s updateClusterSettings(new ClusterSetting(PERSISTENT, "plugins.sql.slowlog", "0")); - JSONObject response = executeJdbcRequest( - "SELECT percentiles(age, 25.0, 50.0, 75.0, 99.9) age_percentiles " + - "FROM opensearch-sql_test_index_people"); + JSONObject response = + executeJdbcRequest( + "SELECT percentiles(age, 25.0, 50.0, 75.0, 99.9) age_percentiles " + + "FROM opensearch-sql_test_index_people"); assertThat(response.getJSONArray("datarows").length(), equalTo(1)); JSONObject percentileRow = (JSONObject) response.query("/datarows/0/0"); @@ -61,42 +62,39 @@ public void testSlowQuery() throws IOException { wipeAllClusterSettings(); } - @Ignore("flaky test, trigger resource not enough exception. " - + "ORDER BY date_format(insert_time, 'dd-MM-YYYY') can't be pushed down ") + @Ignore( + "flaky test, trigger resource not enough exception. " + + "ORDER BY date_format(insert_time, 'dd-MM-YYYY') can't be pushed down ") public void testDateTimeInQuery() { - JSONObject response = executeJdbcRequest( - "SELECT date_format(insert_time, 'dd-MM-YYYY') " + - "FROM opensearch-sql_test_index_online " + - "ORDER BY date_format(insert_time, 'dd-MM-YYYY') " + - "LIMIT 1" - ); + JSONObject response = + executeJdbcRequest( + "SELECT date_format(insert_time, 'dd-MM-YYYY') " + + "FROM opensearch-sql_test_index_online " + + "ORDER BY date_format(insert_time, 'dd-MM-YYYY') " + + "LIMIT 1"); assertThat( - response.getJSONArray("datarows") - .getJSONArray(0) - .getString(0), - equalTo("17-08-2014")); + response.getJSONArray("datarows").getJSONArray(0).getString(0), equalTo("17-08-2014")); } - @Ignore("flaky test, trigger resource not enough exception. " - + "ORDER BY all_client/10 can't be pushed down ") + @Ignore( + "flaky test, trigger resource not enough exception. " + + "ORDER BY all_client/10 can't be pushed down ") public void testDivisionInQuery() { - JSONObject response = executeJdbcRequest( - "SELECT all_client/10 from opensearch-sql_test_index_online ORDER BY all_client/10 desc limit 1"); + JSONObject response = + executeJdbcRequest( + "SELECT all_client/10 from opensearch-sql_test_index_online ORDER BY all_client/10 desc" + + " limit 1"); - assertThat( - response.getJSONArray("datarows") - .getJSONArray(0) - .getDouble(0), - equalTo(16827.0)); + assertThat(response.getJSONArray("datarows").getJSONArray(0).getDouble(0), equalTo(16827.0)); } public void testGroupByInQuery() { - JSONObject response = executeJdbcRequest( - "SELECT date_format(insert_time, 'YYYY-MM-dd'), COUNT(*) " + - "FROM opensearch-sql_test_index_online " + - "GROUP BY date_format(insert_time, 'YYYY-MM-dd')" - ); + JSONObject response = + executeJdbcRequest( + "SELECT date_format(insert_time, 'YYYY-MM-dd'), COUNT(*) " + + "FROM opensearch-sql_test_index_online " + + "GROUP BY date_format(insert_time, 'YYYY-MM-dd')"); assertThat(response.getJSONArray("schema").length(), equalTo(2)); assertThat(response.getJSONArray("datarows").length(), equalTo(8)); @@ -105,28 +103,31 @@ public void testGroupByInQuery() { @Test public void numberOperatorNameCaseInsensitiveTest() { assertSchemaContains( - executeQuery("SELECT ABS(age) FROM opensearch-sql_test_index_account " + - "WHERE age IS NOT NULL ORDER BY age LIMIT 5", "jdbc"), - "ABS(age)" - ); + executeQuery( + "SELECT ABS(age) FROM opensearch-sql_test_index_account " + + "WHERE age IS NOT NULL ORDER BY age LIMIT 5", + "jdbc"), + "ABS(age)"); } @Test public void trigFunctionNameCaseInsensitiveTest() { assertSchemaContains( - executeQuery("SELECT Cos(age) FROM opensearch-sql_test_index_account " + - "WHERE age is NOT NULL ORDER BY age LIMIT 5", "jdbc"), - "Cos(age)" - ); + executeQuery( + "SELECT Cos(age) FROM opensearch-sql_test_index_account " + + "WHERE age is NOT NULL ORDER BY age LIMIT 5", + "jdbc"), + "Cos(age)"); } @Test public void stringOperatorNameCaseInsensitiveTest() { assertSchemaContains( - executeQuery("SELECT SubStrinG(lastname, 0, 2) FROM opensearch-sql_test_index_account " + - "ORDER BY age LIMIT 5", "jdbc"), - "SubStrinG(lastname, 0, 2)" - ); + executeQuery( + "SELECT SubStrinG(lastname, 0, 2) FROM opensearch-sql_test_index_account " + + "ORDER BY age LIMIT 5", + "jdbc"), + "SubStrinG(lastname, 0, 2)"); } @Ignore("DATE_FORMAT function signature changed in new engine") @@ -134,45 +135,52 @@ public void stringOperatorNameCaseInsensitiveTest() { public void dateFunctionNameCaseInsensitiveTest() { assertTrue( executeQuery( - "SELECT DATE_FORMAT(insert_time, 'yyyy-MM-dd', 'UTC') FROM opensearch-sql_test_index_online " + - "WHERE date_FORMAT(insert_time, 'yyyy-MM-dd', 'UTC') > '2014-01-01' " + - "GROUP BY DAte_format(insert_time, 'yyyy-MM-dd', 'UTC') " + - "ORDER BY date_forMAT(insert_time, 'yyyy-MM-dd', 'UTC')", "jdbc").equalsIgnoreCase( - executeQuery( - "SELECT date_format(insert_time, 'yyyy-MM-dd', 'UTC') FROM opensearch-sql_test_index_online " + - "WHERE date_format(insert_time, 'yyyy-MM-dd', 'UTC') > '2014-01-01' " + - "GROUP BY date_format(insert_time, 'yyyy-MM-dd', 'UTC') " + - "ORDER BY date_format(insert_time, 'yyyy-MM-dd', 'UTC')", "jdbc") - ) - ); + "SELECT DATE_FORMAT(insert_time, 'yyyy-MM-dd', 'UTC') FROM" + + " opensearch-sql_test_index_online WHERE date_FORMAT(insert_time," + + " 'yyyy-MM-dd', 'UTC') > '2014-01-01' GROUP BY DAte_format(insert_time," + + " 'yyyy-MM-dd', 'UTC') ORDER BY date_forMAT(insert_time, 'yyyy-MM-dd'," + + " 'UTC')", + "jdbc") + .equalsIgnoreCase( + executeQuery( + "SELECT date_format(insert_time, 'yyyy-MM-dd', 'UTC') FROM" + + " opensearch-sql_test_index_online WHERE date_format(insert_time," + + " 'yyyy-MM-dd', 'UTC') > '2014-01-01' GROUP BY date_format(insert_time," + + " 'yyyy-MM-dd', 'UTC') ORDER BY date_format(insert_time, 'yyyy-MM-dd'," + + " 'UTC')", + "jdbc"))); } @Test public void ipTypeShouldPassJdbcFormatter() { assertThat( - executeQuery("SELECT host AS hostIP FROM " + TestsConstants.TEST_INDEX_WEBLOG - + " ORDER BY hostIP", "jdbc"), - containsString("\"type\": \"ip\"") - ); + executeQuery( + "SELECT host AS hostIP FROM " + TestsConstants.TEST_INDEX_WEBLOG + " ORDER BY hostIP", + "jdbc"), + containsString("\"type\": \"ip\"")); } @Test public void functionWithoutAliasShouldHaveEntireFunctionAsNameInSchema() { assertThat( - executeQuery("SELECT substring(lastname, 1, 2) FROM " + TestsConstants.TEST_INDEX_ACCOUNT - + " ORDER BY substring(lastname, 1, 2)", "jdbc"), - containsString("\"name\": \"substring(lastname, 1, 2)\"") - ); + executeQuery( + "SELECT substring(lastname, 1, 2) FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY substring(lastname, 1, 2)", + "jdbc"), + containsString("\"name\": \"substring(lastname, 1, 2)\"")); } @Ignore("Handled by v2 engine which returns 'name': 'substring(lastname, 1, 2)' instead") @Test public void functionWithAliasShouldHaveAliasAsNameInSchema() { assertThat( - executeQuery("SELECT substring(lastname, 1, 2) AS substring FROM " - + TestsConstants.TEST_INDEX_ACCOUNT + " ORDER BY substring", "jdbc"), - containsString("\"name\": \"substring\"") - ); + executeQuery( + "SELECT substring(lastname, 1, 2) AS substring FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY substring", + "jdbc"), + containsString("\"name\": \"substring\"")); } private void assertSchemaContains(String actualResponse, String expected) { @@ -183,7 +191,10 @@ private void assertSchemaContains(String actualResponse, String expected) { return; } } - Assert.fail("Expected field name [" + expected + "] is not found in response schema: " + - actualResponse); + Assert.fail( + "Expected field name [" + + expected + + "] is not found in response schema: " + + actualResponse); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/JoinAliasWriterRuleIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/JoinAliasWriterRuleIT.java index 31c77fa7c0..75b2b45df6 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/JoinAliasWriterRuleIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/JoinAliasWriterRuleIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.equalTo; @@ -15,18 +14,15 @@ import org.junit.rules.ExpectedException; import org.opensearch.client.ResponseException; -/** - * Test cases for writing missing join table aliases. - */ +/** Test cases for writing missing join table aliases. */ public class JoinAliasWriterRuleIT extends SQLIntegTestCase { - @Rule - public ExpectedException exception = ExpectedException.none(); + @Rule public ExpectedException exception = ExpectedException.none(); protected void init() throws Exception { - loadIndex(Index.ORDER); // opensearch-sql_test_index_order - loadIndex(Index.BANK); // opensearch-sql_test_index_bank - loadIndex(Index.BANK_TWO); // opensearch-sql_test_index_bank_two + loadIndex(Index.ORDER); // opensearch-sql_test_index_order + loadIndex(Index.BANK); // opensearch-sql_test_index_bank + loadIndex(Index.BANK_TWO); // opensearch-sql_test_index_bank_two } @Test @@ -38,12 +34,14 @@ public void noTableAliasNoCommonColumns() throws IOException { "INNER JOIN opensearch-sql_test_index_bank ", "ON name = firstname WHERE state = 'WA' OR id < 7"), query( - "SELECT opensearch-sql_test_index_order_0.id, opensearch-sql_test_index_bank_1.firstname ", + "SELECT opensearch-sql_test_index_order_0.id," + + " opensearch-sql_test_index_bank_1.firstname ", "FROM opensearch-sql_test_index_order opensearch-sql_test_index_order_0 ", "INNER JOIN opensearch-sql_test_index_bank opensearch-sql_test_index_bank_1 ", - "ON opensearch-sql_test_index_order_0.name = opensearch-sql_test_index_bank_1.firstname ", - "WHERE opensearch-sql_test_index_bank_1.state = 'WA' OR opensearch-sql_test_index_order_0.id < 7") - ); + "ON opensearch-sql_test_index_order_0.name = opensearch-sql_test_index_bank_1.firstname" + + " ", + "WHERE opensearch-sql_test_index_bank_1.state = 'WA' OR" + + " opensearch-sql_test_index_order_0.id < 7")); } @Test @@ -59,8 +57,7 @@ public void oneTableAliasNoCommonColumns() throws IOException { "FROM opensearch-sql_test_index_order a ", "INNER JOIN opensearch-sql_test_index_bank opensearch-sql_test_index_bank_0 ", "ON a.name = opensearch-sql_test_index_bank_0.firstname ", - "WHERE opensearch-sql_test_index_bank_0.state = 'WA' OR a.id < 7") - ); + "WHERE opensearch-sql_test_index_bank_0.state = 'WA' OR a.id < 7")); } @Test @@ -76,8 +73,7 @@ public void bothTableAliasNoCommonColumns() throws IOException { "FROM opensearch-sql_test_index_order a ", "INNER JOIN opensearch-sql_test_index_bank b ", "ON a.name = b.firstname ", - "WHERE b.state = 'WA' OR a.id < 7 ") - ); + "WHERE b.state = 'WA' OR a.id < 7 ")); } @Test @@ -90,12 +86,14 @@ public void tableNamesWithTypeName() throws IOException { "INNER JOIN opensearch-sql_test_index_bank/account ", "ON name = firstname WHERE state = 'WA' OR id < 7"), query( - "SELECT opensearch-sql_test_index_order_0.id, opensearch-sql_test_index_bank_1.firstname ", + "SELECT opensearch-sql_test_index_order_0.id," + + " opensearch-sql_test_index_bank_1.firstname ", "FROM opensearch-sql_test_index_order/_doc opensearch-sql_test_index_order_0 ", "INNER JOIN opensearch-sql_test_index_bank/_account opensearch-sql_test_index_bank_1 ", - "ON opensearch-sql_test_index_order_0.name = opensearch-sql_test_index_bank_1.firstname ", - "WHERE opensearch-sql_test_index_bank_1.state = 'WA' OR opensearch-sql_test_index_order_0.id < 7") - ); + "ON opensearch-sql_test_index_order_0.name = opensearch-sql_test_index_bank_1.firstname" + + " ", + "WHERE opensearch-sql_test_index_bank_1.state = 'WA' OR" + + " opensearch-sql_test_index_order_0.id < 7")); } @Ignore @@ -112,8 +110,7 @@ public void tableNamesWithTypeNameExplicitTableAlias() throws IOException { "FROM opensearch-sql_test_index_order a ", "INNER JOIN opensearch-sql_test_index_bank b ", "ON a.name = b.firstname ", - "WHERE b.state = 'WA' OR a.id < 7") - ); + "WHERE b.state = 'WA' OR a.id < 7")); } @Test @@ -129,8 +126,7 @@ public void actualTableNameAsAliasOnColumnFields() throws IOException { "FROM opensearch-sql_test_index_order opensearch-sql_test_index_order_0 ", "INNER JOIN opensearch-sql_test_index_bank b ", "ON opensearch-sql_test_index_order_0.name = b.firstname ", - "WHERE b.state = 'WA' OR opensearch-sql_test_index_order_0.id < 7") - ); + "WHERE b.state = 'WA' OR opensearch-sql_test_index_order_0.id < 7")); } @Test @@ -143,12 +139,14 @@ public void actualTableNameAsAliasOnColumnFieldsTwo() throws IOException { "ON opensearch-sql_test_index_order.name = firstname ", "WHERE opensearch-sql_test_index_bank.state = 'WA' OR id < 7"), query( - "SELECT opensearch-sql_test_index_order_0.id, opensearch-sql_test_index_bank_1.firstname ", + "SELECT opensearch-sql_test_index_order_0.id," + + " opensearch-sql_test_index_bank_1.firstname ", "FROM opensearch-sql_test_index_order opensearch-sql_test_index_order_0 ", "INNER JOIN opensearch-sql_test_index_bank opensearch-sql_test_index_bank_1", - "ON opensearch-sql_test_index_order_0.name = opensearch-sql_test_index_bank_1.firstname ", - "WHERE opensearch-sql_test_index_bank_1.state = 'WA' OR opensearch-sql_test_index_order_0.id < 7") - ); + "ON opensearch-sql_test_index_order_0.name = opensearch-sql_test_index_bank_1.firstname" + + " ", + "WHERE opensearch-sql_test_index_bank_1.state = 'WA' OR" + + " opensearch-sql_test_index_order_0.id < 7")); } @Test @@ -164,44 +162,47 @@ public void columnsWithTableAliasNotAffected() throws IOException { "FROM opensearch-sql_test_index_order a ", "INNER JOIN opensearch-sql_test_index_bank b ", "ON a.name = b.firstname ", - "WHERE b.state = 'WA' OR a.id < 7") - ); + "WHERE b.state = 'WA' OR a.id < 7")); } @Test public void commonColumnWithoutTableAliasDifferentTables() throws IOException { exception.expect(ResponseException.class); exception.expectMessage("Field name [firstname] is ambiguous"); - String explain = explainQuery(query( - "SELECT firstname, lastname ", - "FROM opensearch-sql_test_index_bank ", - "LEFT JOIN opensearch-sql_test_index_bank_two ", - "ON firstname = lastname WHERE state = 'VA' " - )); + String explain = + explainQuery( + query( + "SELECT firstname, lastname ", + "FROM opensearch-sql_test_index_bank ", + "LEFT JOIN opensearch-sql_test_index_bank_two ", + "ON firstname = lastname WHERE state = 'VA' ")); } @Test public void sameTablesNoAliasAndNoAliasOnColumns() throws IOException { exception.expect(ResponseException.class); exception.expectMessage("Not unique table/alias: [opensearch-sql_test_index_bank]"); - String explain = explainQuery(query( - "SELECT firstname, lastname ", - "FROM opensearch-sql_test_index_bank ", - "LEFT JOIN opensearch-sql_test_index_bank ", - "ON firstname = lastname WHERE state = 'VA' " - )); + String explain = + explainQuery( + query( + "SELECT firstname, lastname ", + "FROM opensearch-sql_test_index_bank ", + "LEFT JOIN opensearch-sql_test_index_bank ", + "ON firstname = lastname WHERE state = 'VA' ")); } @Test public void sameTablesNoAliasWithTableNameAsAliasOnColumns() throws IOException { exception.expect(ResponseException.class); exception.expectMessage("Not unique table/alias: [opensearch-sql_test_index_bank]"); - String explain = explainQuery(query( - "SELECT opensearch-sql_test_index_bank.firstname", - "FROM opensearch-sql_test_index_bank ", - "JOIN opensearch-sql_test_index_bank ", - "ON opensearch-sql_test_index_bank.firstname = opensearch-sql_test_index_bank.lastname" - )); + String explain = + explainQuery( + query( + "SELECT opensearch-sql_test_index_bank.firstname", + "FROM opensearch-sql_test_index_bank ", + "JOIN opensearch-sql_test_index_bank ", + "ON opensearch-sql_test_index_bank.firstname =" + + " opensearch-sql_test_index_bank.lastname")); } @Test @@ -211,16 +212,12 @@ public void sameTablesWithExplicitAliasOnFirst() throws IOException { "SELECT opensearch-sql_test_index_bank.firstname, a.lastname ", "FROM opensearch-sql_test_index_bank a", "JOIN opensearch-sql_test_index_bank ", - "ON opensearch-sql_test_index_bank.firstname = a.lastname " - ), + "ON opensearch-sql_test_index_bank.firstname = a.lastname "), query( "SELECT opensearch-sql_test_index_bank_0.firstname, a.lastname ", "FROM opensearch-sql_test_index_bank a", "JOIN opensearch-sql_test_index_bank opensearch-sql_test_index_bank_0", - "ON opensearch-sql_test_index_bank_0.firstname = a.lastname " - ) - - ); + "ON opensearch-sql_test_index_bank_0.firstname = a.lastname ")); } @Test @@ -230,16 +227,12 @@ public void sameTablesWithExplicitAliasOnSecond() throws IOException { "SELECT opensearch-sql_test_index_bank.firstname, a.lastname ", "FROM opensearch-sql_test_index_bank ", "JOIN opensearch-sql_test_index_bank a", - "ON opensearch-sql_test_index_bank.firstname = a.lastname " - ), + "ON opensearch-sql_test_index_bank.firstname = a.lastname "), query( "SELECT opensearch-sql_test_index_bank_0.firstname, a.lastname ", "FROM opensearch-sql_test_index_bank opensearch-sql_test_index_bank_0", "JOIN opensearch-sql_test_index_bank a", - "ON opensearch-sql_test_index_bank_0.firstname = a.lastname " - ) - - ); + "ON opensearch-sql_test_index_bank_0.firstname = a.lastname ")); } private void sameExplain(String actualQuery, String expectedQuery) throws IOException { diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/JoinIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/JoinIT.java index 46515be134..8019454b77 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/JoinIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/JoinIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.anyOf; @@ -62,10 +61,14 @@ public void joinParseCheckSelectedFieldsSplitNL() throws IOException { @Test public void joinParseWithHintsCheckSelectedFieldsSplitHASH() throws IOException { - String query = String.format(Locale.ROOT, "SELECT /*! HASH_WITH_TERMS_FILTER*/ " + - "a.firstname ,a.lastname, a.gender ,d.dog_name FROM %s a JOIN %s d " + - "ON d.holdersName = a.firstname WHERE (a.age > 10 OR a.balance > 2000) AND d.age > 1", - TEST_INDEX_PEOPLE, TEST_INDEX_DOG); + String query = + String.format( + Locale.ROOT, + "SELECT /*! HASH_WITH_TERMS_FILTER*/ a.firstname ,a.lastname, a.gender ,d.dog_name FROM" + + " %s a JOIN %s d ON d.holdersName = a.firstname WHERE (a.age > 10 OR a.balance >" + + " 2000) AND d.age > 1", + TEST_INDEX_PEOPLE, + TEST_INDEX_DOG); JSONObject result = executeQuery(query); verifyJoinParseCheckSelectedFieldsSplitResult(result, false); @@ -75,9 +78,9 @@ public void joinParseWithHintsCheckSelectedFieldsSplitHASH() throws IOException // TODO: figure out why explain does not show results from first query in term filter and // fix either the test or the code. - //Arrays.asList("daenerys","nanette","virginia","aurelia","mcgee","hattie","elinor","burton").forEach(name -> { + // Arrays.asList("daenerys","nanette","virginia","aurelia","mcgee","hattie","elinor","burton").forEach(name -> { // Assert.assertThat(explanation, containsString(name)); - //}); + // }); } @Test @@ -95,8 +98,11 @@ public void joinWithNoWhereButWithConditionNL() throws IOException { @Test public void joinWithStarHASH() throws IOException { - String query = String.format(Locale.ROOT, "SELECT * FROM %1$s c " + - "JOIN %1$s h ON h.hname = c.house ", TEST_INDEX_GAME_OF_THRONES); + String query = + String.format( + Locale.ROOT, + "SELECT * FROM %1$s c " + "JOIN %1$s h ON h.hname = c.house ", + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); @@ -269,9 +275,13 @@ public void testLeftJoinWithLimitNL() throws IOException { @Test public void hintMultiSearchCanRunFewTimesNL() throws IOException { - String query = String.format(Locale.ROOT, "SELECT /*! USE_NL*/ /*! NL_MULTISEARCH_SIZE(2)*/ " + - "c.name.firstname,c.parents.father,h.hname,h.words FROM %1$s c " + - "JOIN %1$s h", TEST_INDEX_GAME_OF_THRONES); + String query = + String.format( + Locale.ROOT, + "SELECT /*! USE_NL*/ /*! NL_MULTISEARCH_SIZE(2)*/ " + + "c.name.firstname,c.parents.father,h.hname,h.words FROM %1$s c " + + "JOIN %1$s h", + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); @@ -281,9 +291,13 @@ public void hintMultiSearchCanRunFewTimesNL() throws IOException { @Test public void joinWithGeoIntersectNL() throws IOException { - String query = String.format(Locale.ROOT, "SELECT p1.description,p2.description " + - "FROM %s p1 JOIN %s p2 ON GEO_INTERSECTS(p2.place,p1.place)", - TEST_INDEX_LOCATION, TEST_INDEX_LOCATION2); + String query = + String.format( + Locale.ROOT, + "SELECT p1.description,p2.description " + + "FROM %s p1 JOIN %s p2 ON GEO_INTERSECTS(p2.place,p1.place)", + TEST_INDEX_LOCATION, + TEST_INDEX_LOCATION2); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); @@ -299,11 +313,15 @@ public void joinWithGeoIntersectNL() throws IOException { @Test public void joinWithInQuery() throws IOException { - //TODO: Either change the ON condition field to keyword or create a different subquery - String query = String.format(Locale.ROOT, "SELECT c.gender,c.name.firstname,h.hname,h.words " + - "FROM %1$s c JOIN %1$s h ON h.hname = c.house " + - "WHERE c.name.firstname IN (SELECT holdersName FROM %2$s)", - TEST_INDEX_GAME_OF_THRONES, TEST_INDEX_DOG); + // TODO: Either change the ON condition field to keyword or create a different subquery + String query = + String.format( + Locale.ROOT, + "SELECT c.gender,c.name.firstname,h.hname,h.words " + + "FROM %1$s c JOIN %1$s h ON h.hname = c.house " + + "WHERE c.name.firstname IN (SELECT holdersName FROM %2$s)", + TEST_INDEX_GAME_OF_THRONES, + TEST_INDEX_DOG); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); @@ -327,10 +345,14 @@ public void joinWithOrNL() throws IOException { @Test public void joinWithOrWithTermsFilterOpt() throws IOException { - String query = String.format(Locale.ROOT, "SELECT /*! HASH_WITH_TERMS_FILTER*/ " + - "d.dog_name,c.name.firstname FROM %s c " + - "JOIN %s d ON d.holdersName = c.name.firstname OR d.age = c.name.ofHisName", - TEST_INDEX_GAME_OF_THRONES, TEST_INDEX_DOG); + String query = + String.format( + Locale.ROOT, + "SELECT /*! HASH_WITH_TERMS_FILTER*/ " + + "d.dog_name,c.name.firstname FROM %s c " + + "JOIN %s d ON d.holdersName = c.name.firstname OR d.age = c.name.ofHisName", + TEST_INDEX_GAME_OF_THRONES, + TEST_INDEX_DOG); executeQuery(query); String explanation = explainQuery(query); @@ -338,9 +360,8 @@ public void joinWithOrWithTermsFilterOpt() throws IOException { Assert.assertTrue(containsTerm(explanation, "holdersName")); Assert.assertTrue(containsTerm(explanation, "age")); - Arrays.asList("daenerys", "brandon", "eddard", "jaime").forEach( - name -> Assert.assertTrue(explanation.contains(name)) - ); + Arrays.asList("daenerys", "brandon", "eddard", "jaime") + .forEach(name -> Assert.assertTrue(explanation.contains(name))); } @Test @@ -394,26 +415,32 @@ public void leftJoinWithAllFromSecondTableNL() throws IOException { @Test public void joinParseCheckSelectedFieldsSplitNLConditionOrderEQ() throws IOException { - final String query = String.format(Locale.ROOT, "SELECT /*! USE_NL*/ " + - "a.firstname, a.lastname, a.gender, d.dog_name FROM %s a JOIN %s d " + - "ON a.firstname = d.holdersName WHERE (a.age > 10 OR a.balance > 2000) AND d.age > 1", - TEST_INDEX_PEOPLE2, TEST_INDEX_DOG2); + final String query = + String.format( + Locale.ROOT, + "SELECT /*! USE_NL*/ a.firstname, a.lastname, a.gender, d.dog_name FROM %s a JOIN %s d" + + " ON a.firstname = d.holdersName WHERE (a.age > 10 OR a.balance > 2000) AND d.age" + + " > 1", + TEST_INDEX_PEOPLE2, + TEST_INDEX_DOG2); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); Assert.assertThat(hits.length(), equalTo(2)); - Map match1 = ImmutableMap.of( - "a.firstname", "Daenerys", - "a.lastname", "Targaryen", - "a.gender", "M", - "d.dog_name", "rex"); - Map match2 = ImmutableMap.of( - "a.firstname", "Hattie", - "a.lastname", "Bond", - "a.gender", "M", - "d.dog_name", "snoopy"); + Map match1 = + ImmutableMap.of( + "a.firstname", "Daenerys", + "a.lastname", "Targaryen", + "a.gender", "M", + "d.dog_name", "rex"); + Map match2 = + ImmutableMap.of( + "a.firstname", "Hattie", + "a.lastname", "Bond", + "a.gender", "M", + "d.dog_name", "snoopy"); Assert.assertTrue(hitsInclude(hits, match1)); Assert.assertTrue(hitsInclude(hits, match2)); @@ -422,21 +449,44 @@ public void joinParseCheckSelectedFieldsSplitNLConditionOrderEQ() throws IOExcep @Test public void joinParseCheckSelectedFieldsSplitNLConditionOrderGT() throws IOException { - final String query = String.format(Locale.ROOT, "SELECT /*! USE_NL*/ " + - "a.firstname, a.lastname, a.gender, d.firstname, d.age FROM " + - "%s a JOIN %s d on a.age < d.age " + - "WHERE (d.firstname = 'Lynn' OR d.firstname = 'Obrien') AND a.firstname = 'Mcgee'", - TEST_INDEX_PEOPLE, TEST_INDEX_ACCOUNT); + final String query = + String.format( + Locale.ROOT, + "SELECT /*! USE_NL*/ a.firstname, a.lastname, a.gender, d.firstname, d.age FROM %s a" + + " JOIN %s d on a.age < d.age WHERE (d.firstname = 'Lynn' OR d.firstname =" + + " 'Obrien') AND a.firstname = 'Mcgee'", + TEST_INDEX_PEOPLE, + TEST_INDEX_ACCOUNT); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); Assert.assertThat(hits.length(), equalTo(2)); - Map oneMatch = ImmutableMap.of("a.firstname", "Mcgee", "a.lastname", "Mooney", - "a.gender", "M", "d.firstname", "Obrien", "d.age", 40); - Map secondMatch = ImmutableMap.of("a.firstname", "Mcgee", "a.lastname", "Mooney", - "a.gender", "M", "d.firstname", "Lynn", "d.age", 40); + Map oneMatch = + ImmutableMap.of( + "a.firstname", + "Mcgee", + "a.lastname", + "Mooney", + "a.gender", + "M", + "d.firstname", + "Obrien", + "d.age", + 40); + Map secondMatch = + ImmutableMap.of( + "a.firstname", + "Mcgee", + "a.lastname", + "Mooney", + "a.gender", + "M", + "d.firstname", + "Lynn", + "d.age", + 40); Assert.assertTrue(hitsInclude(hits, oneMatch)); Assert.assertTrue(hitsInclude(hits, secondMatch)); @@ -445,21 +495,44 @@ public void joinParseCheckSelectedFieldsSplitNLConditionOrderGT() throws IOExcep @Test public void joinParseCheckSelectedFieldsSplitNLConditionOrderLT() throws IOException { - final String query = String.format(Locale.ROOT, "SELECT /*! USE_NL*/ " + - "a.firstname, a.lastname, a.gender, d.firstname, d.age FROM " + - "%s a JOIN %s d on a.age > d.age " + - "WHERE (d.firstname = 'Sandoval' OR d.firstname = 'Hewitt') AND a.firstname = 'Fulton'", - TEST_INDEX_PEOPLE, TEST_INDEX_ACCOUNT); + final String query = + String.format( + Locale.ROOT, + "SELECT /*! USE_NL*/ a.firstname, a.lastname, a.gender, d.firstname, d.age FROM %s a" + + " JOIN %s d on a.age > d.age WHERE (d.firstname = 'Sandoval' OR d.firstname =" + + " 'Hewitt') AND a.firstname = 'Fulton'", + TEST_INDEX_PEOPLE, + TEST_INDEX_ACCOUNT); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); Assert.assertThat(hits.length(), equalTo(2)); - Map oneMatch = ImmutableMap.of("a.firstname", "Fulton", "a.lastname", "Holt", - "a.gender", "F", "d.firstname", "Sandoval", "d.age", 22); - Map secondMatch = ImmutableMap.of("a.firstname", "Fulton", "a.lastname", "Holt", - "a.gender", "F", "d.firstname", "Hewitt", "d.age", 22); + Map oneMatch = + ImmutableMap.of( + "a.firstname", + "Fulton", + "a.lastname", + "Holt", + "a.gender", + "F", + "d.firstname", + "Sandoval", + "d.age", + 22); + Map secondMatch = + ImmutableMap.of( + "a.firstname", + "Fulton", + "a.lastname", + "Holt", + "a.gender", + "F", + "d.firstname", + "Hewitt", + "d.age", + 22); Assert.assertTrue(hitsInclude(hits, oneMatch)); Assert.assertTrue(hitsInclude(hits, secondMatch)); @@ -516,9 +589,12 @@ public void innerJoinNLWithNullInCondition3() throws IOException { private void joinWithAllFromSecondTable(boolean useNestedLoops) throws IOException { final String hint = useNestedLoops ? USE_NL_HINT : ""; - final String query = String.format(Locale.ROOT, "SELECT%1$s c.name.firstname, d.* " + - "FROM %2$s c JOIN %2$s d ON d.hname = c.house", - hint, TEST_INDEX_GAME_OF_THRONES); + final String query = + String.format( + Locale.ROOT, + "SELECT%1$s c.name.firstname, d.* " + "FROM %2$s c JOIN %2$s d ON d.hname = c.house", + hint, + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); @@ -534,9 +610,12 @@ private void joinWithAllFromSecondTable(boolean useNestedLoops) throws IOExcepti private void joinWithAllFromFirstTable(boolean useNestedLoops) throws IOException { final String hint = useNestedLoops ? USE_NL_HINT : ""; - final String query = String.format(Locale.ROOT, "SELECT%1$s c.name.firstname " + - "FROM %2$s d JOIN %2$s c ON c.house = d.hname", - hint, TEST_INDEX_GAME_OF_THRONES); + final String query = + String.format( + Locale.ROOT, + "SELECT%1$s c.name.firstname " + "FROM %2$s d JOIN %2$s c ON c.house = d.hname", + hint, + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); @@ -552,30 +631,40 @@ private void joinWithAllFromFirstTable(boolean useNestedLoops) throws IOExceptio private void leftJoinWithAllFromSecondTable(boolean useNestedLoops) throws IOException { final String hint = useNestedLoops ? USE_NL_HINT : ""; - final String query = String.format(Locale.ROOT, "SELECT%1$s c.name.firstname, d.* " + - "FROM %2$s c LEFT JOIN %2$s d ON d.hname = c.house", - hint, TEST_INDEX_GAME_OF_THRONES); + final String query = + String.format( + Locale.ROOT, + "SELECT%1$s c.name.firstname, d.* " + + "FROM %2$s c LEFT JOIN %2$s d ON d.hname = c.house", + hint, + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); Assert.assertThat(hits.length(), equalTo(7)); - hits.forEach(hitObj -> { - JSONObject hit = (JSONObject) hitObj; + hits.forEach( + hitObj -> { + JSONObject hit = (JSONObject) hitObj; - Assert.assertThat(hit.getJSONObject("_source").length(), - equalTo(hit.getString("_id").endsWith("0") ? 1 : 5)); - }); + Assert.assertThat( + hit.getJSONObject("_source").length(), + equalTo(hit.getString("_id").endsWith("0") ? 1 : 5)); + }); } private void joinParseCheckSelectedFieldsSplit(boolean useNestedLoops) throws IOException { final String hint = useNestedLoops ? USE_NL_HINT : ""; String query = - String.format(Locale.ROOT, "SELECT%s a.firstname ,a.lastname,a.gender,d.dog_name " + - "FROM %s a JOIN %s d ON d.holdersName = a.firstname " + - "WHERE (a.age > 10 OR a.balance > 2000) AND d.age > 1", hint, TEST_INDEX_PEOPLE, + String.format( + Locale.ROOT, + "SELECT%s a.firstname ,a.lastname,a.gender,d.dog_name " + + "FROM %s a JOIN %s d ON d.holdersName = a.firstname " + + "WHERE (a.age > 10 OR a.balance > 2000) AND d.age > 1", + hint, + TEST_INDEX_PEOPLE, TEST_INDEX_DOG); JSONObject result = executeQuery(query); @@ -585,9 +674,13 @@ private void joinParseCheckSelectedFieldsSplit(boolean useNestedLoops) throws IO private void joinNoConditionButWithWhere(boolean useNestedLoops) throws IOException { final String hint = useNestedLoops ? USE_NL_HINT : ""; - String query = String.format(Locale.ROOT, "SELECT%s c.gender,h.hname,h.words FROM %2$s c " + - "JOIN %2$s h WHERE match_phrase(c.name.firstname, 'Daenerys')", - hint, TEST_INDEX_GAME_OF_THRONES); + String query = + String.format( + Locale.ROOT, + "SELECT%s c.gender,h.hname,h.words FROM %2$s c " + + "JOIN %2$s h WHERE match_phrase(c.name.firstname, 'Daenerys')", + hint, + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); @@ -598,9 +691,12 @@ private void joinNoConditionAndNoWhere(boolean useNestedLoops) throws IOExceptio final String hint = useNestedLoops ? USE_NL_HINT : ""; String query = - String.format(Locale.ROOT, "SELECT%s c.name.firstname,c.parents.father,h.hname,h.words " + - "FROM %2$s c JOIN %2$s h", - hint, TEST_INDEX_GAME_OF_THRONES); + String.format( + Locale.ROOT, + "SELECT%s c.name.firstname,c.parents.father,h.hname,h.words " + + "FROM %2$s c JOIN %2$s h", + hint, + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); @@ -610,17 +706,21 @@ private void joinNoConditionAndNoWhere(boolean useNestedLoops) throws IOExceptio private void joinWithNoWhereButWithCondition(boolean useNestedLoops) throws IOException { final String hint = useNestedLoops ? USE_NL_HINT : ""; - String query = String.format(Locale.ROOT, "SELECT%s c.gender,h.hname,h.words " + - "FROM %2$s c JOIN %2$s h ON h.hname = c.house", - hint, TEST_INDEX_GAME_OF_THRONES); + String query = + String.format( + Locale.ROOT, + "SELECT%s c.gender,h.hname,h.words " + "FROM %2$s c JOIN %2$s h ON h.hname = c.house", + hint, + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); - Map someMatch = ImmutableMap.of( - "c.gender", "F", - "h.hname", "Targaryen", - "h.words", "fireAndBlood"); + Map someMatch = + ImmutableMap.of( + "c.gender", "F", + "h.hname", "Targaryen", + "h.words", "fireAndBlood"); if (useNestedLoops) { // TODO: should the NL result be different? @@ -631,24 +731,26 @@ private void joinWithNoWhereButWithCondition(boolean useNestedLoops) throws IOEx } } - private void verifyJoinParseCheckSelectedFieldsSplitResult(JSONObject result, - boolean useNestedLoops) { + private void verifyJoinParseCheckSelectedFieldsSplitResult( + JSONObject result, boolean useNestedLoops) { - Map match1 = ImmutableMap.of( - "a.firstname", "Daenerys", - "a.lastname", "Targaryen", - "a.gender", "M", - "d.dog_name", "rex"); - Map match2 = ImmutableMap.of( - "a.firstname", "Hattie", - "a.lastname", "Bond", - "a.gender", "M", - "d.dog_name", "snoopy"); + Map match1 = + ImmutableMap.of( + "a.firstname", "Daenerys", + "a.lastname", "Targaryen", + "a.gender", "M", + "d.dog_name", "rex"); + Map match2 = + ImmutableMap.of( + "a.firstname", "Hattie", + "a.lastname", "Bond", + "a.gender", "M", + "d.dog_name", "snoopy"); JSONArray hits = getHits(result); if (useNestedLoops) { - //TODO: change field mapping in ON condition to keyword or change query to get result + // TODO: change field mapping in ON condition to keyword or change query to get result // TODO: why does NL query return no results? Assert.assertThat(hits.length(), equalTo(0)); } else { @@ -662,9 +764,12 @@ private void joinNoConditionAndNoWhereWithTotalLimit(boolean useNestedLoops) thr final String hint = useNestedLoops ? USE_NL_HINT : ""; String query = - String.format(Locale.ROOT, "SELECT%s c.name.firstname,c.parents.father,h.hname,h.words" + - " FROM %2$s c JOIN %2$s h LIMIT 9", - hint, TEST_INDEX_GAME_OF_THRONES); + String.format( + Locale.ROOT, + "SELECT%s c.name.firstname,c.parents.father,h.hname,h.words" + + " FROM %2$s c JOIN %2$s h LIMIT 9", + hint, + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); @@ -675,18 +780,22 @@ private void joinWithNestedFieldsOnReturn(boolean useNestedLoops) throws IOExcep final String hint = useNestedLoops ? USE_NL_HINT : ""; String query = - String.format(Locale.ROOT, "SELECT%s c.name.firstname,c.parents.father,h.hname,h.words " + - "FROM %2$s c JOIN %2$s h ON h.hname = c.house " + - "WHERE match_phrase(c.name.firstname, 'Daenerys')", - hint, TEST_INDEX_GAME_OF_THRONES); + String.format( + Locale.ROOT, + "SELECT%s c.name.firstname,c.parents.father,h.hname,h.words " + + "FROM %2$s c JOIN %2$s h ON h.hname = c.house " + + "WHERE match_phrase(c.name.firstname, 'Daenerys')", + hint, + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); - final Map expectedMatch = ImmutableMap.of( - "c.name.firstname", "Daenerys", - "c.parents.father", "Aerys", - "h.hname", "Targaryen", - "h.words", "fireAndBlood"); + final Map expectedMatch = + ImmutableMap.of( + "c.name.firstname", "Daenerys", + "c.parents.father", "Aerys", + "h.hname", "Targaryen", + "h.words", "fireAndBlood"); if (useNestedLoops) { Assert.assertThat(hits.length(), equalTo(0)); } else { @@ -699,17 +808,21 @@ private void joinWithAllAliasOnReturn(boolean useNestedLoops) throws IOException final String hint = useNestedLoops ? USE_NL_HINT : ""; String query = - String.format(Locale.ROOT, "SELECT%s c.name.firstname name,c.parents.father father," + - "h.hname house FROM %2$s c JOIN %2$s h ON h.hname = c.house " + - "WHERE match_phrase(c.name.firstname, 'Daenerys')", - hint, TEST_INDEX_GAME_OF_THRONES); + String.format( + Locale.ROOT, + "SELECT%s c.name.firstname name,c.parents.father father," + + "h.hname house FROM %2$s c JOIN %2$s h ON h.hname = c.house " + + "WHERE match_phrase(c.name.firstname, 'Daenerys')", + hint, + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); - final Map expectedMatch = ImmutableMap.of( - "name", "Daenerys", - "father", "Aerys", - "house", "Targaryen"); + final Map expectedMatch = + ImmutableMap.of( + "name", "Daenerys", + "father", "Aerys", + "house", "Targaryen"); if (useNestedLoops) { Assert.assertThat(hits.length(), equalTo(0)); @@ -723,20 +836,24 @@ private void joinWithSomeAliasOnReturn(boolean useNestedLoops) throws IOExceptio final String hint = useNestedLoops ? USE_NL_HINT : ""; String query = - String.format(Locale.ROOT, "SELECT%s c.name.firstname ,c.parents.father father, " + - "h.hname house FROM %2$s c JOIN %2$s h ON h.hname = c.house " + - "WHERE match_phrase(c.name.firstname, 'Daenerys')", - hint, TEST_INDEX_GAME_OF_THRONES); + String.format( + Locale.ROOT, + "SELECT%s c.name.firstname ,c.parents.father father, " + + "h.hname house FROM %2$s c JOIN %2$s h ON h.hname = c.house " + + "WHERE match_phrase(c.name.firstname, 'Daenerys')", + hint, + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); - final Map expectedMatch = ImmutableMap.of( - "c.name.firstname", "Daenerys", - "father", "Aerys", - "house", "Targaryen"); + final Map expectedMatch = + ImmutableMap.of( + "c.name.firstname", "Daenerys", + "father", "Aerys", + "house", "Targaryen"); if (useNestedLoops) { - //TODO: Either change the ON condition field to keyword or create a different subquery + // TODO: Either change the ON condition field to keyword or create a different subquery Assert.assertThat(hits.length(), equalTo(0)); } else { Assert.assertThat(hits.length(), equalTo(1)); @@ -749,18 +866,22 @@ private void joinWithNestedFieldsOnComparisonAndOnReturn(boolean useNestedLoops) final String hint = useNestedLoops ? USE_NL_HINT : ""; String query = - String.format(Locale.ROOT, "SELECT%s c.name.firstname,c.parents.father, h.hname,h.words " + - " FROM %2$s c JOIN %2$s h ON h.hname = c.name.lastname " + - "WHERE match_phrase(c.name.firstname, 'Daenerys')", - hint, TEST_INDEX_GAME_OF_THRONES); + String.format( + Locale.ROOT, + "SELECT%s c.name.firstname,c.parents.father, h.hname,h.words " + + " FROM %2$s c JOIN %2$s h ON h.hname = c.name.lastname " + + "WHERE match_phrase(c.name.firstname, 'Daenerys')", + hint, + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); - final Map expectedMatch = ImmutableMap.of( - "c.name.firstname", "Daenerys", - "c.parents.father", "Aerys", - "h.hname", "Targaryen", - "h.words", "fireAndBlood"); + final Map expectedMatch = + ImmutableMap.of( + "c.name.firstname", "Daenerys", + "c.parents.father", "Aerys", + "h.hname", "Targaryen", + "h.words", "fireAndBlood"); if (useNestedLoops) { Assert.assertThat(hits.length(), equalTo(0)); @@ -773,10 +894,12 @@ private void joinWithNestedFieldsOnComparisonAndOnReturn(boolean useNestedLoops) private void testLeftJoin(boolean useNestedLoops) throws IOException { final String hint = useNestedLoops ? USE_NL_HINT : ""; - String query = String.format("SELECT%s c.name.firstname, f.name.firstname,f.name.lastname " + - "FROM %2$s c LEFT JOIN %2$s f " + - "ON f.name.firstname = c.parents.father", - hint, TEST_INDEX_GAME_OF_THRONES); + String query = + String.format( + "SELECT%s c.name.firstname, f.name.firstname,f.name.lastname " + + "FROM %2$s c LEFT JOIN %2$s f " + + "ON f.name.firstname = c.parents.father", + hint, TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); @@ -805,10 +928,14 @@ private void testLeftJoin(boolean useNestedLoops) throws IOException { private void hintLimits_firstLimitSecondNull(boolean useNestedLoops) throws IOException { final String hint = useNestedLoops ? USE_NL_HINT : ""; - String query = String.format(Locale.ROOT, "SELECT%s /*! JOIN_TABLES_LIMIT(2,null) */ " + - "c.name.firstname,c.parents.father, h.hname,h.words " + - "FROM %2$s c JOIN %2$s h", - hint, TEST_INDEX_GAME_OF_THRONES); + String query = + String.format( + Locale.ROOT, + "SELECT%s /*! JOIN_TABLES_LIMIT(2,null) */ " + + "c.name.firstname,c.parents.father, h.hname,h.words " + + "FROM %2$s c JOIN %2$s h", + hint, + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); @@ -818,9 +945,14 @@ private void hintLimits_firstLimitSecondNull(boolean useNestedLoops) throws IOEx private void hintLimits_firstLimitSecondLimit(boolean useNestedLoops) throws IOException { final String hint = useNestedLoops ? USE_NL_HINT : ""; - String query = String.format(Locale.ROOT, "SELECT%s /*! JOIN_TABLES_LIMIT(2,2) */ " + - "c.name.firstname,c.parents.father, h.hname,h.words FROM %2$s c " + - "JOIN %2$s h", hint, TEST_INDEX_GAME_OF_THRONES); + String query = + String.format( + Locale.ROOT, + "SELECT%s /*! JOIN_TABLES_LIMIT(2,2) */ " + + "c.name.firstname,c.parents.father, h.hname,h.words FROM %2$s c " + + "JOIN %2$s h", + hint, + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); @@ -830,10 +962,14 @@ private void hintLimits_firstLimitSecondLimit(boolean useNestedLoops) throws IOE private void hintLimits_firstLimitSecondLimitOnlyOne(boolean useNestedLoops) throws IOException { final String hint = useNestedLoops ? USE_NL_HINT : ""; - String query = String.format(Locale.ROOT, "SELECT%s /*! JOIN_TABLES_LIMIT(3,1) */ " + - "c.name.firstname,c.parents.father , h.hname,h.words FROM %2$s h " + - "JOIN %2$s c ON c.name.lastname = h.hname", - hint, TEST_INDEX_GAME_OF_THRONES); + String query = + String.format( + Locale.ROOT, + "SELECT%s /*! JOIN_TABLES_LIMIT(3,1) */ " + + "c.name.firstname,c.parents.father , h.hname,h.words FROM %2$s h " + + "JOIN %2$s c ON c.name.lastname = h.hname", + hint, + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); @@ -843,9 +979,14 @@ private void hintLimits_firstLimitSecondLimitOnlyOne(boolean useNestedLoops) thr private void hintLimits_firstNullSecondLimit(boolean useNestedLoops) throws IOException { final String hint = useNestedLoops ? USE_NL_HINT : ""; - String query = String.format(Locale.ROOT, "SELECT%s /*! JOIN_TABLES_LIMIT(null,2) */ " + - "c.name.firstname,c.parents.father , h.hname,h.words FROM %2$s c " + - "JOIN %2$s h", hint, TEST_INDEX_GAME_OF_THRONES); + String query = + String.format( + Locale.ROOT, + "SELECT%s /*! JOIN_TABLES_LIMIT(null,2) */ " + + "c.name.firstname,c.parents.father , h.hname,h.words FROM %2$s c " + + "JOIN %2$s h", + hint, + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); @@ -855,10 +996,14 @@ private void hintLimits_firstNullSecondLimit(boolean useNestedLoops) throws IOEx private void testLeftJoinWithLimit(boolean useNestedLoops) throws IOException { final String hint = useNestedLoops ? USE_NL_HINT : ""; - String query = String.format(Locale.ROOT, "SELECT%s /*! JOIN_TABLES_LIMIT(3,null) */ " + - "c.name.firstname, f.name.firstname,f.name.lastname FROM %2$s c " + - "LEFT JOIN %2$s f ON f.name.firstname = c.parents.father", - hint, TEST_INDEX_GAME_OF_THRONES); + String query = + String.format( + Locale.ROOT, + "SELECT%s /*! JOIN_TABLES_LIMIT(3,null) */ " + + "c.name.firstname, f.name.firstname,f.name.lastname FROM %2$s c " + + "LEFT JOIN %2$s f ON f.name.firstname = c.parents.father", + hint, + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); @@ -868,20 +1013,27 @@ private void testLeftJoinWithLimit(boolean useNestedLoops) throws IOException { private void joinWithOr(boolean useNestedLoops) throws IOException { final String hint = useNestedLoops ? USE_NL_HINT : ""; - String query = String.format(Locale.ROOT, "SELECT%s d.dog_name,c.name.firstname " + - "FROM %s c JOIN %s d " + - "ON d.holdersName = c.name.firstname OR d.age = c.name.ofHisName", - hint, TEST_INDEX_GAME_OF_THRONES, TEST_INDEX_DOG); + String query = + String.format( + Locale.ROOT, + "SELECT%s d.dog_name,c.name.firstname " + + "FROM %s c JOIN %s d " + + "ON d.holdersName = c.name.firstname OR d.age = c.name.ofHisName", + hint, + TEST_INDEX_GAME_OF_THRONES, + TEST_INDEX_DOG); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); - final Map firstMatch = ImmutableMap.of( - "c.name.firstname", "Daenerys", - "d.dog_name", "rex"); - final Map secondMatch = ImmutableMap.of( - "c.name.firstname", "Brandon", - "d.dog_name", "snoopy"); + final Map firstMatch = + ImmutableMap.of( + "c.name.firstname", "Daenerys", + "d.dog_name", "rex"); + final Map secondMatch = + ImmutableMap.of( + "c.name.firstname", "Brandon", + "d.dog_name", "snoopy"); if (useNestedLoops) { Assert.assertThat(hits.length(), equalTo(1)); @@ -896,10 +1048,14 @@ private void joinWithOr(boolean useNestedLoops) throws IOException { private void joinWithOrderFirstTable(boolean useNestedLoops) throws IOException { final String hint = useNestedLoops ? USE_NL_HINT : ""; - String query = String.format(Locale.ROOT, "SELECT%s c.name.firstname,d.words " + - "FROM %2$s c JOIN %2$s d ON d.hname = c.house " + - "ORDER BY c.name.firstname", - hint, TEST_INDEX_GAME_OF_THRONES); + String query = + String.format( + Locale.ROOT, + "SELECT%s c.name.firstname,d.words " + + "FROM %2$s c JOIN %2$s d ON d.hname = c.house " + + "ORDER BY c.name.firstname", + hint, + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); @@ -912,35 +1068,42 @@ private void joinWithOrderFirstTable(boolean useNestedLoops) throws IOException String[] expectedNames = {"Brandon", "Daenerys", "Eddard", "Jaime"}; - IntStream.rangeClosed(0, 3).forEach(i -> { - String firstnamePath = String.format(Locale.ROOT, "/%d/_source/c.name.firstname", i); - Assert.assertThat(hits.query(firstnamePath), equalTo(expectedNames[i])); - }); + IntStream.rangeClosed(0, 3) + .forEach( + i -> { + String firstnamePath = + String.format(Locale.ROOT, "/%d/_source/c.name.firstname", i); + Assert.assertThat(hits.query(firstnamePath), equalTo(expectedNames[i])); + }); } } private boolean containsTerm(final String explainedQuery, final String termName) { return Pattern.compile( - Pattern.quote("\"terms\":{") - + ".*" - + Pattern.quote("\"" + termName + "\":[") - ) + Pattern.quote("\"terms\":{") + ".*" + Pattern.quote("\"" + termName + "\":[")) .matcher(explainedQuery.replaceAll("\\s+", "")) .find(); } - private void joinWithNullInCondition(boolean useNestedLoops, String left, - String oper1, String oper2, int expectedNum) + private void joinWithNullInCondition( + boolean useNestedLoops, String left, String oper1, String oper2, int expectedNum) throws IOException { final String hint = useNestedLoops ? USE_NL_HINT : ""; String query = - String.format(Locale.ROOT, "SELECT%s c.name.firstname,c.parents.father,c.hname," + - "f.name.firstname,f.house,f.hname FROM %s c " + - "%s JOIN %s f ON f.name.firstname = c.parents.father " + - "%s f.house = c.hname %s f.house = c.name.firstname", - hint, TEST_INDEX_GAME_OF_THRONES, left, TEST_INDEX_GAME_OF_THRONES, oper1, oper2); + String.format( + Locale.ROOT, + "SELECT%s c.name.firstname,c.parents.father,c.hname," + + "f.name.firstname,f.house,f.hname FROM %s c " + + "%s JOIN %s f ON f.name.firstname = c.parents.father " + + "%s f.house = c.hname %s f.house = c.name.firstname", + hint, + TEST_INDEX_GAME_OF_THRONES, + left, + TEST_INDEX_GAME_OF_THRONES, + oper1, + oper2); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); @@ -968,20 +1131,22 @@ private boolean hitsInclude(final JSONArray actualHits, Map expectedS return false; } - private void assertHitMatches(final JSONObject actualHit, - final Map expectedSourceValues) { + private void assertHitMatches( + final JSONObject actualHit, final Map expectedSourceValues) { final JSONObject src = actualHit.getJSONObject("_source"); Assert.assertThat(src.length(), equalTo(expectedSourceValues.size())); - src.keySet().forEach(key -> { - Assert.assertTrue(expectedSourceValues.containsKey(key)); - Object value = src.get(key); - Assert.assertThat(value, equalTo(expectedSourceValues.get(key))); - }); + src.keySet() + .forEach( + key -> { + Assert.assertTrue(expectedSourceValues.containsKey(key)); + Object value = src.get(key); + Assert.assertThat(value, equalTo(expectedSourceValues.get(key))); + }); } - private boolean hitMatches(final Map actualHit, - final Map expectedSourceValues) { + private boolean hitMatches( + final Map actualHit, final Map expectedSourceValues) { final Map src = uncheckedGetMap(actualHit.get("_source")); @@ -997,8 +1162,8 @@ private boolean hitMatches(final Map actualHit, Object actualValue = src.get(key); Object expectedValue = expectedSourceValues.get(key); - if ((actualValue == null && expectedValue != null) || - (actualValue != null && expectedValue == null)) { + if ((actualValue == null && expectedValue != null) + || (actualValue != null && expectedValue == null)) { return false; } else if (actualValue != null && !actualValue.equals(expectedValue)) { return false; diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/MathFunctionsIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/MathFunctionsIT.java index b42819bdf7..fcf1edf3e0 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/MathFunctionsIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/MathFunctionsIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.closeTo; @@ -32,9 +31,7 @@ protected void init() throws Exception { @Test public void lowerCaseFunctionCall() throws IOException { - SearchHit[] hits = query( - "SELECT abs(age - 100) AS abs" - ); + SearchHit[] hits = query("SELECT abs(age - 100) AS abs"); for (SearchHit hit : hits) { double abs = (double) getField(hit, "abs"); assertThat(abs, greaterThanOrEqualTo(0.0)); @@ -43,9 +40,7 @@ public void lowerCaseFunctionCall() throws IOException { @Test public void upperCaseFunctionCall() throws IOException { - SearchHit[] hits = query( - "SELECT ABS(age - 100) AS abs" - ); + SearchHit[] hits = query("SELECT ABS(age - 100) AS abs"); for (SearchHit hit : hits) { double abs = (double) getField(hit, "abs"); assertThat(abs, greaterThanOrEqualTo(0.0)); @@ -54,36 +49,28 @@ public void upperCaseFunctionCall() throws IOException { @Test public void eulersNumber() throws IOException { - SearchHit[] hits = query( - "SELECT E() AS e" - ); + SearchHit[] hits = query("SELECT E() AS e"); double e = (double) getField(hits[0], "e"); assertThat(e, equalTo(Math.E)); } @Test public void pi() throws IOException { - SearchHit[] hits = query( - "SELECT PI() AS pi" - ); + SearchHit[] hits = query("SELECT PI() AS pi"); double pi = (double) getField(hits[0], "pi"); assertThat(pi, equalTo(Math.PI)); } @Test public void expm1Function() throws IOException { - SearchHit[] hits = query( - "SELECT EXPM1(2) AS expm1" - ); + SearchHit[] hits = query("SELECT EXPM1(2) AS expm1"); double expm1 = (double) getField(hits[0], "expm1"); assertThat(expm1, equalTo(Math.expm1(2))); } @Test public void degreesFunction() throws IOException { - SearchHit[] hits = query( - "SELECT age, DEGREES(age) AS degrees" - ); + SearchHit[] hits = query("SELECT age, DEGREES(age) AS degrees"); for (SearchHit hit : hits) { int age = (int) getFieldFromSource(hit, "age"); double degrees = (double) getField(hit, "degrees"); @@ -93,9 +80,7 @@ public void degreesFunction() throws IOException { @Test public void radiansFunction() throws IOException { - SearchHit[] hits = query( - "SELECT age, RADIANS(age) as radians" - ); + SearchHit[] hits = query("SELECT age, RADIANS(age) as radians"); for (SearchHit hit : hits) { int age = (int) getFieldFromSource(hit, "age"); double radians = (double) getField(hit, "radians"); @@ -105,65 +90,54 @@ public void radiansFunction() throws IOException { @Test public void sin() throws IOException { - SearchHit[] hits = query( - "SELECT SIN(PI()) as sin" - ); + SearchHit[] hits = query("SELECT SIN(PI()) as sin"); double sin = (double) getField(hits[0], "sin"); assertThat(sin, equalTo(Math.sin(Math.PI))); } @Test public void asin() throws IOException { - SearchHit[] hits = query( - "SELECT ASIN(PI()) as asin" - ); + SearchHit[] hits = query("SELECT ASIN(PI()) as asin"); double asin = Double.valueOf((String) getField(hits[0], "asin")); assertThat(asin, equalTo(Math.asin(Math.PI))); } @Test public void sinh() throws IOException { - SearchHit[] hits = query( - "SELECT SINH(PI()) as sinh" - ); + SearchHit[] hits = query("SELECT SINH(PI()) as sinh"); double sinh = (double) getField(hits[0], "sinh"); assertThat(sinh, equalTo(Math.sinh(Math.PI))); } @Test public void power() throws IOException { - SearchHit[] hits = query( - "SELECT POWER(age, 2) AS power", - "WHERE (age IS NOT NULL) AND (balance IS NOT NULL) and (POWER(balance, 3) > 0)" - ); + SearchHit[] hits = + query( + "SELECT POWER(age, 2) AS power", + "WHERE (age IS NOT NULL) AND (balance IS NOT NULL) and (POWER(balance, 3) > 0)"); double power = (double) getField(hits[0], "power"); assertTrue(power >= 0); } @Test public void atan2() throws IOException { - SearchHit[] hits = query( - "SELECT ATAN2(age, age) AS atan2", - "WHERE (age IS NOT NULL) AND (ATAN2(age, age) > 0)" - ); + SearchHit[] hits = + query( + "SELECT ATAN2(age, age) AS atan2", "WHERE (age IS NOT NULL) AND (ATAN2(age, age) > 0)"); double atan2 = (double) getField(hits[0], "atan2"); assertThat(atan2, equalTo(Math.atan2(1, 1))); } @Test public void cot() throws IOException { - SearchHit[] hits = query( - "SELECT COT(PI()) AS cot" - ); + SearchHit[] hits = query("SELECT COT(PI()) AS cot"); double cot = (double) getField(hits[0], "cot"); assertThat(cot, closeTo(1 / Math.tan(Math.PI), 0.001)); } @Test public void sign() throws IOException { - SearchHit[] hits = query( - "SELECT SIGN(E()) AS sign" - ); + SearchHit[] hits = query("SELECT SIGN(E()) AS sign"); double sign = (double) getField(hits[0], "sign"); assertThat(sign, equalTo(Math.signum(Math.E))); } @@ -186,18 +160,18 @@ public void logWithTwoParams() throws IOException { public void logInAggregationShouldPass() { assertThat( executeQuery( - "SELECT LOG(age) FROM " + TestsConstants.TEST_INDEX_ACCOUNT - + " WHERE age IS NOT NULL GROUP BY LOG(age) ORDER BY LOG(age)", "jdbc" - ), - containsString("\"type\": \"double\"") - ); + "SELECT LOG(age) FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " WHERE age IS NOT NULL GROUP BY LOG(age) ORDER BY LOG(age)", + "jdbc"), + containsString("\"type\": \"double\"")); assertThat( executeQuery( - "SELECT LOG(2, age) FROM " + TestsConstants.TEST_INDEX_ACCOUNT + - " WHERE age IS NOT NULL GROUP BY LOG(2, age) ORDER BY LOG(2, age)", "jdbc" - ), - containsString("\"type\": \"double\"") - ); + "SELECT LOG(2, age) FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " WHERE age IS NOT NULL GROUP BY LOG(2, age) ORDER BY LOG(2, age)", + "jdbc"), + containsString("\"type\": \"double\"")); } @Test @@ -218,11 +192,11 @@ public void ln() throws IOException { public void lnInAggregationShouldPass() { assertThat( executeQuery( - "SELECT LN(age) FROM " + TestsConstants.TEST_INDEX_ACCOUNT + - " WHERE age IS NOT NULL GROUP BY LN(age) ORDER BY LN(age)", "jdbc" - ), - containsString("\"type\": \"double\"") - ); + "SELECT LN(age) FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " WHERE age IS NOT NULL GROUP BY LN(age) ORDER BY LN(age)", + "jdbc"), + containsString("\"type\": \"double\"")); } @Test @@ -238,10 +212,11 @@ private SearchHit[] query(String select, String... statements) throws IOExceptio final String response = executeQueryWithStringOutput(select + " " + FROM + " " + String.join(" ", statements)); - final XContentParser parser = new JsonXContentParser( - NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, - new JsonFactory().createParser(response)); + final XContentParser parser = + new JsonXContentParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + new JsonFactory().createParser(response)); return SearchResponse.fromXContent(parser).getHits().getHits(); } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/MetaDataQueriesIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/MetaDataQueriesIT.java index 9f0fca68d5..ba4519f607 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/MetaDataQueriesIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/MetaDataQueriesIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.equalTo; @@ -26,8 +25,8 @@ import org.opensearch.client.Request; import org.opensearch.sql.legacy.utils.StringUtils; - /** + *

  * The following are tests for SHOW/DESCRIBE query support under Pretty Format Response protocol using JDBC format.
  * 

* Unlike SELECT queries, the JDBC format response of SHOW and DESCRIBE queries has determined "schema" fields. @@ -182,6 +181,7 @@ * "type": "keyword" * } * ] + *

*/ public class MetaDataQueriesIT extends SQLIntegTestCase { @@ -294,29 +294,27 @@ public void describeSingleIndex() throws IOException { @Ignore("Breaking change, the new engine will return alias instead of index name") @Test public void showSingleIndexAlias() throws IOException { - client().performRequest(new Request("PUT", - TestsConstants.TEST_INDEX_ACCOUNT + "/_alias/acc")); + client().performRequest(new Request("PUT", TestsConstants.TEST_INDEX_ACCOUNT + "/_alias/acc")); JSONObject expected = executeQuery("SHOW TABLES LIKE " + TestsConstants.TEST_INDEX_ACCOUNT); JSONObject actual = executeQuery("SHOW TABLES LIKE acc"); assertThat(getDataRows(actual).length(), equalTo(1)); - assertTrue(StringUtils.format("Expected: %s, actual: %s", expected, actual), - expected.similar(actual)); + assertTrue( + StringUtils.format("Expected: %s, actual: %s", expected, actual), expected.similar(actual)); } @Ignore("Breaking change, the new engine will return alias instead of index name") @Test public void describeSingleIndexAlias() throws IOException { - client().performRequest(new Request("PUT", - TestsConstants.TEST_INDEX_ACCOUNT + "/_alias/acc")); + client().performRequest(new Request("PUT", TestsConstants.TEST_INDEX_ACCOUNT + "/_alias/acc")); JSONObject expected = executeQuery("DESCRIBE TABLES LIKE " + TestsConstants.TEST_INDEX_ACCOUNT); JSONObject actual = executeQuery("DESCRIBE TABLES LIKE acc"); assertThat(getDataRows(actual).length(), greaterThan(0)); - assertTrue(StringUtils.format("Expected: %s, actual: %s", expected, actual), - expected.similar(actual)); + assertTrue( + StringUtils.format("Expected: %s, actual: %s", expected, actual), expected.similar(actual)); } @Test @@ -355,7 +353,8 @@ public void describeSingleIndexWithObjectFieldShouldPass() throws IOException { assertThat(dataRows.length(), greaterThan(0)); assertThat(dataRows.getJSONArray(0).length(), equalTo(DESCRIBE_FIELD_LENGTH)); - verifySome(dataRows, + verifySome( + dataRows, describeRow(TEST_INDEX_GAME_OF_THRONES, "nickname", "text"), describeRow(TEST_INDEX_GAME_OF_THRONES, "name", "object"), describeRow(TEST_INDEX_GAME_OF_THRONES, "name.firstname", "text"), @@ -402,8 +401,10 @@ public void describeWildcardIndex() throws IOException { @Test public void describeWildcardColumn() throws IOException { - JSONObject response = executeQuery(String.format("DESCRIBE TABLES LIKE %s COLUMNS LIKE %%name", - TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + "DESCRIBE TABLES LIKE %s COLUMNS LIKE %%name", TestsConstants.TEST_INDEX_ACCOUNT)); String pattern = ".*name"; JSONArray dataRows = getDataRows(response); @@ -418,8 +419,10 @@ public void describeWildcardColumn() throws IOException { @Test public void describeSingleCharacterWildcard() throws IOException { - JSONObject response = executeQuery(String.format("DESCRIBE TABLES LIKE %s COLUMNS LIKE %%na_e", - TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + "DESCRIBE TABLES LIKE %s COLUMNS LIKE %%na_e", TestsConstants.TEST_INDEX_ACCOUNT)); String pattern = ".*na.e"; JSONArray dataRows = getDataRows(response); diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/MethodQueryIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/MethodQueryIT.java index 027228a92b..28c5886d68 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/MethodQueryIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/MethodQueryIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.both; @@ -28,26 +27,30 @@ protected void init() throws Exception { } /** + *
    * query
    * "query" : {
    *   query_string" : {
    *     "query" : "address:880 Holmes Lane"
    *   }
    * }
-   *
+   * 
* @throws IOException */ @Test public void queryTest() throws IOException { - final String result = explainQuery(String.format(Locale.ROOT, - "select address from %s where query('address:880 Holmes Lane') limit 3", - TestsConstants.TEST_INDEX_ACCOUNT)); - Assert.assertThat(result, - containsString("query_string\\\":{\\\"query\\\":\\\"address:880 Holmes Lane")); - + final String result = + explainQuery( + String.format( + Locale.ROOT, + "select address from %s where query('address:880 Holmes Lane') limit 3", + TestsConstants.TEST_INDEX_ACCOUNT)); + Assert.assertThat( + result, containsString("query_string\\\":{\\\"query\\\":\\\"address:880 Holmes Lane")); } /** + *
    * matchQuery
    * "query" : {
    *   "match" : {
@@ -57,19 +60,24 @@ public void queryTest() throws IOException {
    *     }
    *   }
    * }
-   *
+   * 
* @throws IOException */ @Test public void matchQueryTest() throws IOException { - final String result = explainQuery(String.format(Locale.ROOT, - "select address from %s where address= matchQuery('880 Holmes Lane') limit 3", - TestsConstants.TEST_INDEX_ACCOUNT)); - Assert.assertThat(result, + final String result = + explainQuery( + String.format( + Locale.ROOT, + "select address from %s where address= matchQuery('880 Holmes Lane') limit 3", + TestsConstants.TEST_INDEX_ACCOUNT)); + Assert.assertThat( + result, containsString("{\\\"match\\\":{\\\"address\\\":{\\\"query\\\":\\\"880 Holmes Lane\\\"")); } /** + *
    * matchQuery
    * {
    *   "query": {
@@ -109,45 +117,64 @@ public void matchQueryTest() throws IOException {
    *     }
    *   }
    * }
-   *
+   * 
* @throws IOException */ @Test - @Ignore("score query no longer maps to constant_score in the V2 engine - @see org.opensearch.sql.sql.ScoreQueryIT") + @Ignore( + "score query no longer maps to constant_score in the V2 engine - @see" + + " org.opensearch.sql.sql.ScoreQueryIT") public void scoreQueryTest() throws IOException { - final String result = explainQuery(String.format(Locale.ROOT, - "select address from %s " + - "where score(matchQuery(address, 'Lane'),100) " + - "or score(matchQuery(address,'Street'),0.5) order by _score desc limit 3", - TestsConstants.TEST_INDEX_ACCOUNT)); - Assert.assertThat(result, - both(containsString("{\"constant_score\":" + - "{\"filter\":{\"match\":{\"address\":{\"query\":\"Lane\"")).and( - containsString("{\"constant_score\":" + - "{\"filter\":{\"match\":{\"address\":{\"query\":\"Street\""))); + final String result = + explainQuery( + String.format( + Locale.ROOT, + "select address from %s " + + "where score(matchQuery(address, 'Lane'),100) " + + "or score(matchQuery(address,'Street'),0.5) order by _score desc limit 3", + TestsConstants.TEST_INDEX_ACCOUNT)); + Assert.assertThat( + result, + both(containsString( + "{\"constant_score\":" + "{\"filter\":{\"match\":{\"address\":{\"query\":\"Lane\"")) + .and( + containsString( + "{\"constant_score\":" + + "{\"filter\":{\"match\":{\"address\":{\"query\":\"Street\""))); } @Test public void regexpQueryTest() throws IOException { - final String result = explainQuery(String.format(Locale.ROOT, - "SELECT * FROM %s WHERE address=REGEXP_QUERY('.*')", - TestsConstants.TEST_INDEX_ACCOUNT)); - Assert.assertThat(result, - containsString("{\"bool\":{\"must\":[{\"regexp\":" - + "{\"address\":{\"value\":\".*\",\"flags_value\":255,\"max_determinized_states\":10000,\"boost\":1.0}}}")); + final String result = + explainQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE address=REGEXP_QUERY('.*')", + TestsConstants.TEST_INDEX_ACCOUNT)); + Assert.assertThat( + result, + containsString( + "{\"bool\":{\"must\":[{\"regexp\":" + + "{\"address\":{\"value\":\".*\",\"flags_value\":255,\"max_determinized_states\":10000,\"boost\":1.0}}}")); } @Test public void negativeRegexpQueryTest() throws IOException { - final String result = explainQuery(String.format(Locale.ROOT, - "SELECT * FROM %s WHERE NOT(address=REGEXP_QUERY('.*'))", - TestsConstants.TEST_INDEX_ACCOUNT)); - Assert.assertThat(result, - containsString("{\"bool\":{\"must_not\":[{\"regexp\":" - + "{\"address\":{\"value\":\".*\",\"flags_value\":255,\"max_determinized_states\":10000,\"boost\":1.0}}}")); + final String result = + explainQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE NOT(address=REGEXP_QUERY('.*'))", + TestsConstants.TEST_INDEX_ACCOUNT)); + Assert.assertThat( + result, + containsString( + "{\"bool\":{\"must_not\":[{\"regexp\":" + + "{\"address\":{\"value\":\".*\",\"flags_value\":255,\"max_determinized_states\":10000,\"boost\":1.0}}}")); } /** + *
    * wildcardQuery
    * l*e means leae ltae ...
    * "wildcard": {
@@ -155,35 +182,43 @@ public void negativeRegexpQueryTest() throws IOException {
    *     "wildcard" : "l*e"
    *   }
    * }
-   *
+   * 
* @throws IOException */ @Test public void wildcardQueryTest() throws IOException { - final String result = explainQuery(String.format(Locale.ROOT, - "select address from %s where address= wildcardQuery('l*e') order by _score desc limit 3", - TestsConstants.TEST_INDEX_ACCOUNT)); - Assert.assertThat(result, - containsString("{\"wildcard\":{\"address\":{\"wildcard\":\"l*e\"")); + final String result = + explainQuery( + String.format( + Locale.ROOT, + "select address from %s where address= wildcardQuery('l*e') order by _score desc" + + " limit 3", + TestsConstants.TEST_INDEX_ACCOUNT)); + Assert.assertThat(result, containsString("{\"wildcard\":{\"address\":{\"wildcard\":\"l*e\"")); } /** + *
    * matchPhraseQuery
    * "address" : {
    *   "query" : "671 Bristol Street",
    *   "type" : "phrase"
    * }
-   *
+   * 
* @throws IOException */ @Test - @Ignore("score query no longer handled by legacy engine - @see org.opensearch.sql.sql.ScoreQueryIT") + @Ignore( + "score query no longer handled by legacy engine - @see org.opensearch.sql.sql.ScoreQueryIT") public void matchPhraseQueryTest() throws IOException { - final String result = explainQuery(String.format(Locale.ROOT, - "select address from %s " + - "where address= matchPhrase('671 Bristol Street') order by _score desc limit 3", - TestsConstants.TEST_INDEX_ACCOUNT)); - Assert.assertThat(result, - containsString("{\"match_phrase\":{\"address\":{\"query\":\"671 Bristol Street\"")); + final String result = + explainQuery( + String.format( + Locale.ROOT, + "select address from %s where address= matchPhrase('671 Bristol Street') order by" + + " _score desc limit 3", + TestsConstants.TEST_INDEX_ACCOUNT)); + Assert.assertThat( + result, containsString("{\"match_phrase\":{\"address\":{\"query\":\"671 Bristol Street\"")); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/MetricsIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/MetricsIT.java index 3eeac66b97..238d3aeaff 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/MetricsIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/MetricsIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.equalTo; @@ -47,9 +46,7 @@ private void multiQueries(int n) throws IOException { } private Request makeStatRequest() { - return new Request( - "GET", STATS_API_ENDPOINT - ); + return new Request("GET", STATS_API_ENDPOINT); } private String executeStatRequest(final Request request) throws IOException { @@ -69,5 +66,4 @@ private String executeStatRequest(final Request request) throws IOException { return sb.toString(); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/MultiQueryIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/MultiQueryIT.java index d8d2b8875a..84750f8a27 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/MultiQueryIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/MultiQueryIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.equalTo; @@ -34,15 +33,17 @@ protected void init() throws Exception { @Test public void unionAllSameRequestOnlyOneRecordTwice() throws IOException { - String query = String.format("SELECT firstname " + - "FROM %s " + - "WHERE firstname = 'Amber' " + - "LIMIT 1 " + - "UNION ALL " + - "SELECT firstname " + - "FROM %s " + - "WHERE firstname = 'Amber'", - TestsConstants.TEST_INDEX_ACCOUNT, TestsConstants.TEST_INDEX_ACCOUNT); + String query = + String.format( + "SELECT firstname " + + "FROM %s " + + "WHERE firstname = 'Amber' " + + "LIMIT 1 " + + "UNION ALL " + + "SELECT firstname " + + "FROM %s " + + "WHERE firstname = 'Amber'", + TestsConstants.TEST_INDEX_ACCOUNT, TestsConstants.TEST_INDEX_ACCOUNT); JSONObject response = executeQuery(query); JSONArray hits = getHits(response); @@ -58,10 +59,12 @@ public void unionAllSameRequestOnlyOneRecordTwice() throws IOException { @Test public void unionAllOnlyOneRecordEachWithAlias() throws IOException { - String query = String.format("SELECT firstname FROM %s WHERE firstname = 'Amber' " + - "UNION ALL " + - "SELECT dog_name as firstname FROM %s WHERE dog_name = 'rex'", - TestsConstants.TEST_INDEX_ACCOUNT, TestsConstants.TEST_INDEX_DOG); + String query = + String.format( + "SELECT firstname FROM %s WHERE firstname = 'Amber' " + + "UNION ALL " + + "SELECT dog_name as firstname FROM %s WHERE dog_name = 'rex'", + TestsConstants.TEST_INDEX_ACCOUNT, TestsConstants.TEST_INDEX_DOG); JSONObject response = executeQuery(query); assertThat(getHits(response).length(), equalTo(2)); @@ -80,12 +83,14 @@ public void unionAllOnlyOneRecordEachWithAlias() throws IOException { @Test public void unionAllOnlyOneRecordEachWithComplexAlias() throws IOException { - String query = String.format("SELECT firstname FROM %s WHERE firstname = 'Amber' " + - "UNION ALL " + - "SELECT name.firstname as firstname " + - "FROM %s " + - "WHERE name.firstname = 'daenerys'", - TestsConstants.TEST_INDEX_ACCOUNT, TestsConstants.TEST_INDEX_GAME_OF_THRONES); + String query = + String.format( + "SELECT firstname FROM %s WHERE firstname = 'Amber' " + + "UNION ALL " + + "SELECT name.firstname as firstname " + + "FROM %s " + + "WHERE name.firstname = 'daenerys'", + TestsConstants.TEST_INDEX_ACCOUNT, TestsConstants.TEST_INDEX_GAME_OF_THRONES); JSONObject response = executeQuery(query); assertThat(getHits(response).length(), equalTo(2)); @@ -144,10 +149,12 @@ public void minusCMinusDTwoFieldsNoAliasWithScrolling() throws IOException { @Test public void minusCMinusDTwoFieldsAliasOnBothSecondTableFields() throws IOException { - String query = String.format("SELECT pk, letter FROM %s WHERE system_name = 'C' " + - "MINUS " + - "SELECT myId as pk, myLetter as letter FROM %s WHERE system_name = 'E'", - TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); + String query = + String.format( + "SELECT pk, letter FROM %s WHERE system_name = 'C' " + + "MINUS " + + "SELECT myId as pk, myLetter as letter FROM %s WHERE system_name = 'E'", + TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); JSONObject response = executeQuery(query); assertThat(getHits(response).length(), equalTo(1)); @@ -174,10 +181,12 @@ public void minusCMinusDTwoFieldsAliasOnBothTablesWithScrolling() throws IOExcep @Test public void minusCMinusCTwoFieldsOneAlias() throws IOException { - String query = String.format("SELECT pk as myId, letter FROM %s WHERE system_name = 'C' " + - "MINUS " + - "SELECT pk as myId, letter FROM %s WHERE system_name = 'C'", - TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); + String query = + String.format( + "SELECT pk as myId, letter FROM %s WHERE system_name = 'C' " + + "MINUS " + + "SELECT pk as myId, letter FROM %s WHERE system_name = 'C'", + TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); JSONObject response = executeQuery(query); assertThat(getHits(response).length(), equalTo(0)); @@ -185,10 +194,12 @@ public void minusCMinusCTwoFieldsOneAlias() throws IOException { @Test public void minusCMinusTNonExistentFieldTwoFields() throws IOException { - String query = String.format("SELECT pk, letter FROM %s WHERE system_name = 'C' " + - "MINUS " + - "SELECT pk, letter FROM %s WHERE system_name = 'T' ", - TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); + String query = + String.format( + "SELECT pk, letter FROM %s WHERE system_name = 'C' " + + "MINUS " + + "SELECT pk, letter FROM %s WHERE system_name = 'T' ", + TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); JSONObject response = executeQuery(query); assertThat(getHits(response).length(), equalTo(3)); @@ -229,20 +240,24 @@ public void minusTMinusCNonExistentFieldFirstQueryWithScrollingAndOptimization() } private void innerMinusAMinusANoAlias(String hint) throws IOException { - String query = String.format("SELECT %s pk FROM %s WHERE system_name = 'A' " + - "MINUS " + - "SELECT pk FROM %s WHERE system_name = 'A'", - hint, TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); + String query = + String.format( + "SELECT %s pk FROM %s WHERE system_name = 'A' " + + "MINUS " + + "SELECT pk FROM %s WHERE system_name = 'A'", + hint, TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); JSONObject response = executeQuery(query); assertThat(getHits(response).length(), equalTo(0)); } private void innerMinusAMinusBNoAlias(String hint) throws IOException { - String query = String.format("SELECT %s pk FROM %s WHERE system_name = 'A' " + - "MINUS " + - "SELECT pk FROM %s WHERE system_name = 'B'", - hint, TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); + String query = + String.format( + "SELECT %s pk FROM %s WHERE system_name = 'A' " + + "MINUS " + + "SELECT pk FROM %s WHERE system_name = 'B'", + hint, TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); JSONObject response = executeQuery(query); assertThat(getHits(response).length(), equalTo(1)); @@ -255,10 +270,12 @@ private void innerMinusAMinusBNoAlias(String hint) throws IOException { } private void innerMinusCMinusDTwoFieldsNoAlias(String hint) throws IOException { - String query = String.format("SELECT %s pk, letter FROM %s WHERE system_name = 'C' " + - "MINUS " + - "SELECT pk, letter FROM %s WHERE system_name = 'D'", - hint, TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); + String query = + String.format( + "SELECT %s pk, letter FROM %s WHERE system_name = 'C' " + + "MINUS " + + "SELECT pk, letter FROM %s WHERE system_name = 'D'", + hint, TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); JSONObject response = executeQuery(query); assertThat(getHits(response).length(), equalTo(1)); @@ -274,10 +291,12 @@ private void innerMinusCMinusDTwoFieldsNoAlias(String hint) throws IOException { } private void innerMinusCMinusDTwoFieldsAliasOnBothTables(String hint) throws IOException { - String query = String.format("SELECT %s pk as myId, letter FROM %s WHERE system_name = 'C' " + - "MINUS " + - "SELECT myId, myLetter as letter FROM %s WHERE system_name = 'E'", - hint, TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); + String query = + String.format( + "SELECT %s pk as myId, letter FROM %s WHERE system_name = 'C' " + + "MINUS " + + "SELECT myId, myLetter as letter FROM %s WHERE system_name = 'E'", + hint, TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); JSONObject response = executeQuery(query); assertThat(getHits(response).length(), equalTo(1)); @@ -293,20 +312,24 @@ private void innerMinusCMinusDTwoFieldsAliasOnBothTables(String hint) throws IOE } private void innerMinusCMinusTNonExistentFieldOneField(String hint) throws IOException { - String query = String.format("SELECT %s letter FROM %s WHERE system_name = 'C' " + - "MINUS " + - "SELECT letter FROM %s WHERE system_name = 'T'", - hint, TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); + String query = + String.format( + "SELECT %s letter FROM %s WHERE system_name = 'C' " + + "MINUS " + + "SELECT letter FROM %s WHERE system_name = 'T'", + hint, TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); JSONObject response = executeQuery(query); assertThat(getHits(response).length(), equalTo(3)); } private void innerMinusTMinusCNonExistentFieldFirstQuery(String hint) throws IOException { - String query = String.format("SELECT %s letter FROM %s WHERE system_name = 'T' " + - "MINUS " + - "SELECT letter FROM %s WHERE system_name = 'C'", - hint, TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); + String query = + String.format( + "SELECT %s letter FROM %s WHERE system_name = 'T' " + + "MINUS " + + "SELECT letter FROM %s WHERE system_name = 'C'", + hint, TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); JSONObject response = executeQuery(query); assertThat(getHits(response).length(), equalTo(0)); diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/ConvertTZFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/ConvertTZFunctionIT.java index 1a244bed85..105669c7ca 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/ConvertTZFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/ConvertTZFunctionIT.java @@ -1,182 +1,190 @@ - /* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ package org.opensearch.sql.ppl; - import org.json.JSONObject; - import org.junit.Test; - - import java.io.IOException; - - import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_DATE; - import static org.opensearch.sql.util.MatcherUtils.rows; - import static org.opensearch.sql.util.MatcherUtils.schema; - import static org.opensearch.sql.util.MatcherUtils.verifySchema; - import static org.opensearch.sql.util.MatcherUtils.verifySome; - - public class ConvertTZFunctionIT extends PPLIntegTestCase { - - - @Override - public void init() throws IOException { - loadIndex(Index.DATE); - } - - - @Test - public void inRangeZeroToPositive() throws IOException { - JSONObject result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2008-05-15 12:00:00','+00:00','+10:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows("2008-05-15 22:00:00")); - } - - @Test - public void inRangeZeroToZero() throws IOException { - JSONObject result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-05-12 00:00:00','-00:00','+00:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows("2021-05-12 00:00:00")); - } - - @Test - public void inRangePositiveToPositive() throws IOException { - JSONObject result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-05-12 00:00:00','+10:00','+11:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows("2021-05-12 01:00:00")); - } - - @Test - public void inRangeNegativeToPositive() throws IOException { - JSONObject result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-08:00','+09:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows("2021-05-13 04:34:50")); - } - - @Test - public void inRangeNoTZChange() throws IOException { - JSONObject result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-05-12 11:34:50','+09:00','+09:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows("2021-05-12 11:34:50")); - } - - @Test - public void inRangeTwentyFourHourChange() throws IOException { - JSONObject result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-12:00','+12:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows("2021-05-13 11:34:50")); - } - - @Test - public void inRangeFifteenMinuteTZ() throws IOException { - JSONObject result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-05-12 13:00:00','+09:30','+05:45') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows("2021-05-12 09:15:00")); - } - - @Test - public void nullFromFieldUnder() throws IOException { - JSONObject result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-05-30 11:34:50','-17:00','+08:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); - } - - @Test - public void nullToFieldOver() throws IOException { - JSONObject result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-12:00','+15:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); - } - - @Test - public void nullFromGarbageInput1() throws IOException { - JSONObject result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-12:00','test') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); - } - - @Test - public void nullFromGarbageInput2() throws IOException { - JSONObject result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021test','-12:00','+00:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); - } - - @Test - public void nullDateTimeInvalidDateValueFebruary() throws IOException { - JSONObject result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-02-30 10:00:00','+00:00','+00:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); - } - - @Test - public void nullDateTimeInvalidDateValueApril() throws IOException { - JSONObject result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-04-31 10:00:00','+00:00','+00:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); - } - - @Test - public void nullDateTimeInvalidDateValueMonth() throws IOException { - JSONObject result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-13-03 10:00:00','+00:00','+00:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); - } +import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_DATE; +import static org.opensearch.sql.util.MatcherUtils.rows; +import static org.opensearch.sql.util.MatcherUtils.schema; +import static org.opensearch.sql.util.MatcherUtils.verifySchema; +import static org.opensearch.sql.util.MatcherUtils.verifySome; +import java.io.IOException; +import org.json.JSONObject; +import org.junit.Test; + +public class ConvertTZFunctionIT extends PPLIntegTestCase { + + @Override + public void init() throws IOException { + loadIndex(Index.DATE); + } + + @Test + public void inRangeZeroToPositive() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2008-05-15 12:00:00','+00:00','+10:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows("2008-05-15 22:00:00")); + } + + @Test + public void inRangeZeroToZero() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-05-12 00:00:00','-00:00','+00:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows("2021-05-12 00:00:00")); + } + + @Test + public void inRangePositiveToPositive() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-05-12 00:00:00','+10:00','+11:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows("2021-05-12 01:00:00")); + } + + @Test + public void inRangeNegativeToPositive() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-08:00','+09:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows("2021-05-13 04:34:50")); + } + + @Test + public void inRangeNoTZChange() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-05-12 11:34:50','+09:00','+09:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows("2021-05-12 11:34:50")); + } + + @Test + public void inRangeTwentyFourHourChange() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-12:00','+12:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows("2021-05-13 11:34:50")); + } + + @Test + public void inRangeFifteenMinuteTZ() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-05-12 13:00:00','+09:30','+05:45') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows("2021-05-12 09:15:00")); + } + + @Test + public void nullFromFieldUnder() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-05-30 11:34:50','-17:00','+08:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); + } + + @Test + public void nullToFieldOver() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-12:00','+15:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); + } + + @Test + public void nullFromGarbageInput1() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-12:00','test') | fields f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); + } + + @Test + public void nullFromGarbageInput2() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021test','-12:00','+00:00') | fields f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); + } + + @Test + public void nullDateTimeInvalidDateValueFebruary() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-02-30 10:00:00','+00:00','+00:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); + } + + @Test + public void nullDateTimeInvalidDateValueApril() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-04-31 10:00:00','+00:00','+00:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); + } + + @Test + public void nullDateTimeInvalidDateValueMonth() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-13-03 10:00:00','+00:00','+00:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } +} diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/CrossClusterSearchIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/CrossClusterSearchIT.java index a8e686a893..19e3debdf0 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/CrossClusterSearchIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/CrossClusterSearchIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_ACCOUNT; @@ -23,13 +22,13 @@ public class CrossClusterSearchIT extends PPLIntegTestCase { - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); + @Rule public ExpectedException exceptionRule = ExpectedException.none(); - private final static String TEST_INDEX_BANK_REMOTE = REMOTE_CLUSTER + ":" + TEST_INDEX_BANK; - private final static String TEST_INDEX_DOG_REMOTE = REMOTE_CLUSTER + ":" + TEST_INDEX_DOG; - private final static String TEST_INDEX_DOG_MATCH_ALL_REMOTE = MATCH_ALL_REMOTE_CLUSTER + ":" + TEST_INDEX_DOG; - private final static String TEST_INDEX_ACCOUNT_REMOTE = REMOTE_CLUSTER + ":" + TEST_INDEX_ACCOUNT; + private static final String TEST_INDEX_BANK_REMOTE = REMOTE_CLUSTER + ":" + TEST_INDEX_BANK; + private static final String TEST_INDEX_DOG_REMOTE = REMOTE_CLUSTER + ":" + TEST_INDEX_DOG; + private static final String TEST_INDEX_DOG_MATCH_ALL_REMOTE = + MATCH_ALL_REMOTE_CLUSTER + ":" + TEST_INDEX_DOG; + private static final String TEST_INDEX_ACCOUNT_REMOTE = REMOTE_CLUSTER + ":" + TEST_INDEX_ACCOUNT; @Override public void init() throws IOException { @@ -49,7 +48,8 @@ public void testCrossClusterSearchAllFields() throws IOException { @Test public void testMatchAllCrossClusterSearchAllFields() throws IOException { - JSONObject result = executeQuery(String.format("search source=%s", TEST_INDEX_DOG_MATCH_ALL_REMOTE)); + JSONObject result = + executeQuery(String.format("search source=%s", TEST_INDEX_DOG_MATCH_ALL_REMOTE)); verifyColumn(result, columnName("dog_name"), columnName("holdersName"), columnName("age")); } @@ -64,18 +64,21 @@ public void testCrossClusterSearchWithoutLocalFieldMappingShouldFail() throws IO @Test public void testCrossClusterSearchCommandWithLogicalExpression() throws IOException { - JSONObject result = executeQuery(String.format( - "search source=%s firstname='Hattie' | fields firstname", TEST_INDEX_BANK_REMOTE)); + JSONObject result = + executeQuery( + String.format( + "search source=%s firstname='Hattie' | fields firstname", TEST_INDEX_BANK_REMOTE)); verifyDataRows(result, rows("Hattie")); } @Test public void testCrossClusterSearchMultiClusters() throws IOException { - JSONObject result = executeQuery(String.format( - "search source=%s,%s firstname='Hattie' | fields firstname", TEST_INDEX_BANK_REMOTE, TEST_INDEX_BANK)); - verifyDataRows(result, - rows("Hattie"), - rows("Hattie")); + JSONObject result = + executeQuery( + String.format( + "search source=%s,%s firstname='Hattie' | fields firstname", + TEST_INDEX_BANK_REMOTE, TEST_INDEX_BANK)); + verifyDataRows(result, rows("Hattie"), rows("Hattie")); } @Test @@ -106,8 +109,7 @@ public void testCrossClusterDescribeAllFields() throws IOException { columnName("SCOPE_TABLE"), columnName("SOURCE_DATA_TYPE"), columnName("IS_AUTOINCREMENT"), - columnName("IS_GENERATEDCOLUMN") - ); + columnName("IS_GENERATEDCOLUMN")); } @Test @@ -138,7 +140,6 @@ public void testMatchAllCrossClusterDescribeAllFields() throws IOException { columnName("SCOPE_TABLE"), columnName("SOURCE_DATA_TYPE"), columnName("IS_AUTOINCREMENT"), - columnName("IS_GENERATEDCOLUMN") - ); + columnName("IS_GENERATEDCOLUMN")); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/CsvFormatIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/CsvFormatIT.java index 430ae9a7b2..a9eb18c2a1 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/CsvFormatIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/CsvFormatIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK_CSV_SANITIZE; @@ -22,29 +21,40 @@ public void init() throws IOException { @Test public void sanitizeTest() throws IOException { - String result = executeCsvQuery( - String.format(Locale.ROOT, "source=%s | fields firstname, lastname", TEST_INDEX_BANK_CSV_SANITIZE)); - assertEquals(StringUtils.format( - "firstname,lastname%n" - + "'+Amber JOHnny,Duke Willmington+%n" - + "'-Hattie,Bond-%n" - + "'=Nanette,Bates=%n" - + "'@Dale,Adams@%n" - + "\",Elinor\",\"Ratliff,,,\"%n"), + String result = + executeCsvQuery( + String.format( + Locale.ROOT, + "source=%s | fields firstname, lastname", + TEST_INDEX_BANK_CSV_SANITIZE)); + assertEquals( + StringUtils.format( + "firstname,lastname%n" + + "'+Amber JOHnny,Duke Willmington+%n" + + "'-Hattie,Bond-%n" + + "'=Nanette,Bates=%n" + + "'@Dale,Adams@%n" + + "\",Elinor\",\"Ratliff,,,\"%n"), result); } @Test public void escapeSanitizeTest() throws IOException { - String result = executeCsvQuery( - String.format(Locale.ROOT, "source=%s | fields firstname, lastname", TEST_INDEX_BANK_CSV_SANITIZE), false); - assertEquals(StringUtils.format( - "firstname,lastname%n" - + "+Amber JOHnny,Duke Willmington+%n" - + "-Hattie,Bond-%n" - + "=Nanette,Bates=%n" - + "@Dale,Adams@%n" - + "\",Elinor\",\"Ratliff,,,\"%n"), + String result = + executeCsvQuery( + String.format( + Locale.ROOT, + "source=%s | fields firstname, lastname", + TEST_INDEX_BANK_CSV_SANITIZE), + false); + assertEquals( + StringUtils.format( + "firstname,lastname%n" + + "+Amber JOHnny,Duke Willmington+%n" + + "-Hattie,Bond-%n" + + "=Nanette,Bates=%n" + + "@Dale,Adams@%n" + + "\",Elinor\",\"Ratliff,,,\"%n"), result); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/DataTypeIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/DataTypeIT.java index 9911c35d8f..8b5a6d498e 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/DataTypeIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/DataTypeIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.SQLIntegTestCase.Index.DATA_TYPE_NONNUMERIC; @@ -27,9 +26,9 @@ public void init() throws IOException { @Test public void test_numeric_data_types() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s", TEST_INDEX_DATATYPE_NUMERIC)); - verifySchema(result, + JSONObject result = executeQuery(String.format("source=%s", TEST_INDEX_DATATYPE_NUMERIC)); + verifySchema( + result, schema("long_number", "long"), schema("integer_number", "integer"), schema("short_number", "short"), @@ -42,9 +41,9 @@ public void test_numeric_data_types() throws IOException { @Test public void test_nonnumeric_data_types() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s", TEST_INDEX_DATATYPE_NONNUMERIC)); - verifySchema(result, + JSONObject result = executeQuery(String.format("source=%s", TEST_INDEX_DATATYPE_NONNUMERIC)); + verifySchema( + result, schema("boolean_value", "boolean"), schema("keyword_value", "string"), schema("text_value", "string"), @@ -58,15 +57,18 @@ public void test_nonnumeric_data_types() throws IOException { @Test public void test_long_integer_data_type() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | eval " - + " int1 = 2147483647," - + " int2 = -2147483648," - + " long1 = 2147483648," - + " long2 = -2147483649 | " - + "fields int1, int2, long1, long2 ", - TEST_INDEX_DATATYPE_NUMERIC)); - verifySchema(result, + JSONObject result = + executeQuery( + String.format( + "source=%s | eval " + + " int1 = 2147483647," + + " int2 = -2147483648," + + " long1 = 2147483648," + + " long2 = -2147483649 | " + + "fields int1, int2, long1, long2 ", + TEST_INDEX_DATATYPE_NUMERIC)); + verifySchema( + result, schema("int1", "integer"), schema("int2", "integer"), schema("long1", "long"), diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeComparisonIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeComparisonIT.java index 4fb61ae2e9..6f6b5cc297 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeComparisonIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeComparisonIT.java @@ -47,9 +47,10 @@ public void resetTimeZone() { private String name; private Boolean expectedResult; - public DateTimeComparisonIT(@Name("functionCall") String functionCall, - @Name("name") String name, - @Name("expectedResult") Boolean expectedResult) { + public DateTimeComparisonIT( + @Name("functionCall") String functionCall, + @Name("name") String name, + @Name("expectedResult") Boolean expectedResult) { this.functionCall = functionCall; this.name = name; this.expectedResult = expectedResult; @@ -57,548 +58,707 @@ public DateTimeComparisonIT(@Name("functionCall") String functionCall, @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareTwoDates() { - return Arrays.asList($$( - $("DATE('2020-09-16') = DATE('2020-09-16')", "eq1", true), - $("DATE('2020-09-16') = DATE('1961-04-12')", "eq2", false), - $("DATE('2020-09-16') != DATE('1984-12-15')", "neq1", true), - $("DATE('1961-04-12') != DATE('1984-12-15')", "neq2", true), - $("DATE('1961-04-12') != DATE('1961-04-12')", "neq3", false), - $("DATE('1984-12-15') > DATE('1961-04-12')", "gt1", true), - $("DATE('1984-12-15') > DATE('2020-09-16')", "gt2", false), - $("DATE('1961-04-12') < DATE('1984-12-15')", "lt1", true), - $("DATE('1984-12-15') < DATE('1961-04-12')", "lt2", false), - $("DATE('1984-12-15') >= DATE('1961-04-12')", "gte1", true), - $("DATE('1984-12-15') >= DATE('1984-12-15')", "gte2", true), - $("DATE('1984-12-15') >= DATE('2020-09-16')", "gte3", false), - $("DATE('1961-04-12') <= DATE('1984-12-15')", "lte1", true), - $("DATE('1961-04-12') <= DATE('1961-04-12')", "lte2", true), - $("DATE('2020-09-16') <= DATE('1961-04-12')", "lte3", false) - )); + return Arrays.asList( + $$( + $("DATE('2020-09-16') = DATE('2020-09-16')", "eq1", true), + $("DATE('2020-09-16') = DATE('1961-04-12')", "eq2", false), + $("DATE('2020-09-16') != DATE('1984-12-15')", "neq1", true), + $("DATE('1961-04-12') != DATE('1984-12-15')", "neq2", true), + $("DATE('1961-04-12') != DATE('1961-04-12')", "neq3", false), + $("DATE('1984-12-15') > DATE('1961-04-12')", "gt1", true), + $("DATE('1984-12-15') > DATE('2020-09-16')", "gt2", false), + $("DATE('1961-04-12') < DATE('1984-12-15')", "lt1", true), + $("DATE('1984-12-15') < DATE('1961-04-12')", "lt2", false), + $("DATE('1984-12-15') >= DATE('1961-04-12')", "gte1", true), + $("DATE('1984-12-15') >= DATE('1984-12-15')", "gte2", true), + $("DATE('1984-12-15') >= DATE('2020-09-16')", "gte3", false), + $("DATE('1961-04-12') <= DATE('1984-12-15')", "lte1", true), + $("DATE('1961-04-12') <= DATE('1961-04-12')", "lte2", true), + $("DATE('2020-09-16') <= DATE('1961-04-12')", "lte3", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareTwoTimes() { - return Arrays.asList($$( - $("TIME('09:16:37') = TIME('09:16:37')", "eq1", true), - $("TIME('09:16:37') = TIME('04:12:42')", "eq2", false), - $("TIME('09:16:37') != TIME('12:15:22')", "neq1", true), - $("TIME('04:12:42') != TIME('12:15:22')", "neq2", true), - $("TIME('04:12:42') != TIME('04:12:42')", "neq3", false), - $("TIME('12:15:22') > TIME('04:12:42')", "gt1", true), - $("TIME('12:15:22') > TIME('19:16:03')", "gt2", false), - $("TIME('04:12:42') < TIME('12:15:22')", "lt1", true), - $("TIME('14:12:38') < TIME('12:15:22')", "lt2", false), - $("TIME('12:15:22') >= TIME('04:12:42')", "gte1", true), - $("TIME('12:15:22') >= TIME('12:15:22')", "gte2", true), - $("TIME('12:15:22') >= TIME('19:16:03')", "gte3", false), - $("TIME('04:12:42') <= TIME('12:15:22')", "lte1", true), - $("TIME('04:12:42') <= TIME('04:12:42')", "lte2", true), - $("TIME('19:16:03') <= TIME('04:12:42')", "lte3", false) - )); + return Arrays.asList( + $$( + $("TIME('09:16:37') = TIME('09:16:37')", "eq1", true), + $("TIME('09:16:37') = TIME('04:12:42')", "eq2", false), + $("TIME('09:16:37') != TIME('12:15:22')", "neq1", true), + $("TIME('04:12:42') != TIME('12:15:22')", "neq2", true), + $("TIME('04:12:42') != TIME('04:12:42')", "neq3", false), + $("TIME('12:15:22') > TIME('04:12:42')", "gt1", true), + $("TIME('12:15:22') > TIME('19:16:03')", "gt2", false), + $("TIME('04:12:42') < TIME('12:15:22')", "lt1", true), + $("TIME('14:12:38') < TIME('12:15:22')", "lt2", false), + $("TIME('12:15:22') >= TIME('04:12:42')", "gte1", true), + $("TIME('12:15:22') >= TIME('12:15:22')", "gte2", true), + $("TIME('12:15:22') >= TIME('19:16:03')", "gte3", false), + $("TIME('04:12:42') <= TIME('12:15:22')", "lte1", true), + $("TIME('04:12:42') <= TIME('04:12:42')", "lte2", true), + $("TIME('19:16:03') <= TIME('04:12:42')", "lte3", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareTwoDateTimes() { - return Arrays.asList($$( - $("DATETIME('2020-09-16 10:20:30') = DATETIME('2020-09-16 10:20:30')", "eq1", true), - $("DATETIME('2020-09-16 10:20:30') = DATETIME('1961-04-12 09:07:00')", "eq2", false), - $("DATETIME('2020-09-16 10:20:30') != DATETIME('1984-12-15 22:15:07')", "neq1", true), - $("DATETIME('1984-12-15 22:15:08') != DATETIME('1984-12-15 22:15:07')", "neq2", true), - $("DATETIME('1961-04-12 09:07:00') != DATETIME('1961-04-12 09:07:00')", "neq3", false), - $("DATETIME('1984-12-15 22:15:07') > DATETIME('1961-04-12 22:15:07')", "gt1", true), - $("DATETIME('1984-12-15 22:15:07') > DATETIME('1984-12-15 22:15:06')", "gt2", true), - $("DATETIME('1984-12-15 22:15:07') > DATETIME('2020-09-16 10:20:30')", "gt3", false), - $("DATETIME('1961-04-12 09:07:00') < DATETIME('1984-12-15 09:07:00')", "lt1", true), - $("DATETIME('1984-12-15 22:15:07') < DATETIME('1984-12-15 22:15:08')", "lt2", true), - $("DATETIME('1984-12-15 22:15:07') < DATETIME('1961-04-12 09:07:00')", "lt3", false), - $("DATETIME('1984-12-15 22:15:07') >= DATETIME('1961-04-12 09:07:00')", "gte1", true), - $("DATETIME('1984-12-15 22:15:07') >= DATETIME('1984-12-15 22:15:07')", "gte2", true), - $("DATETIME('1984-12-15 22:15:07') >= DATETIME('2020-09-16 10:20:30')", "gte3", false), - $("DATETIME('1961-04-12 09:07:00') <= DATETIME('1984-12-15 22:15:07')", "lte1", true), - $("DATETIME('1961-04-12 09:07:00') <= DATETIME('1961-04-12 09:07:00')", "lte2", true), - $("DATETIME('2020-09-16 10:20:30') <= DATETIME('1961-04-12 09:07:00')", "lte3", false) - )); + return Arrays.asList( + $$( + $("DATETIME('2020-09-16 10:20:30') = DATETIME('2020-09-16 10:20:30')", "eq1", true), + $("DATETIME('2020-09-16 10:20:30') = DATETIME('1961-04-12 09:07:00')", "eq2", false), + $("DATETIME('2020-09-16 10:20:30') != DATETIME('1984-12-15 22:15:07')", "neq1", true), + $("DATETIME('1984-12-15 22:15:08') != DATETIME('1984-12-15 22:15:07')", "neq2", true), + $("DATETIME('1961-04-12 09:07:00') != DATETIME('1961-04-12 09:07:00')", "neq3", false), + $("DATETIME('1984-12-15 22:15:07') > DATETIME('1961-04-12 22:15:07')", "gt1", true), + $("DATETIME('1984-12-15 22:15:07') > DATETIME('1984-12-15 22:15:06')", "gt2", true), + $("DATETIME('1984-12-15 22:15:07') > DATETIME('2020-09-16 10:20:30')", "gt3", false), + $("DATETIME('1961-04-12 09:07:00') < DATETIME('1984-12-15 09:07:00')", "lt1", true), + $("DATETIME('1984-12-15 22:15:07') < DATETIME('1984-12-15 22:15:08')", "lt2", true), + $("DATETIME('1984-12-15 22:15:07') < DATETIME('1961-04-12 09:07:00')", "lt3", false), + $("DATETIME('1984-12-15 22:15:07') >= DATETIME('1961-04-12 09:07:00')", "gte1", true), + $("DATETIME('1984-12-15 22:15:07') >= DATETIME('1984-12-15 22:15:07')", "gte2", true), + $("DATETIME('1984-12-15 22:15:07') >= DATETIME('2020-09-16 10:20:30')", "gte3", false), + $("DATETIME('1961-04-12 09:07:00') <= DATETIME('1984-12-15 22:15:07')", "lte1", true), + $("DATETIME('1961-04-12 09:07:00') <= DATETIME('1961-04-12 09:07:00')", "lte2", true), + $( + "DATETIME('2020-09-16 10:20:30') <= DATETIME('1961-04-12 09:07:00')", + "lte3", + false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareTwoTimestamps() { - return Arrays.asList($$( - $("TIMESTAMP('2020-09-16 10:20:30') = TIMESTAMP('2020-09-16 10:20:30')", "eq1", true), - $("TIMESTAMP('2020-09-16 10:20:30') = TIMESTAMP('1961-04-12 09:07:00')", "eq2", false), - $("TIMESTAMP('2020-09-16 10:20:30') != TIMESTAMP('1984-12-15 22:15:07')", "neq1", true), - $("TIMESTAMP('1984-12-15 22:15:08') != TIMESTAMP('1984-12-15 22:15:07')", "neq2", true), - $("TIMESTAMP('1961-04-12 09:07:00') != TIMESTAMP('1961-04-12 09:07:00')", "neq3", false), - $("TIMESTAMP('1984-12-15 22:15:07') > TIMESTAMP('1961-04-12 22:15:07')", "gt1", true), - $("TIMESTAMP('1984-12-15 22:15:07') > TIMESTAMP('1984-12-15 22:15:06')", "gt2", true), - $("TIMESTAMP('1984-12-15 22:15:07') > TIMESTAMP('2020-09-16 10:20:30')", "gt3", false), - $("TIMESTAMP('1961-04-12 09:07:00') < TIMESTAMP('1984-12-15 09:07:00')", "lt1", true), - $("TIMESTAMP('1984-12-15 22:15:07') < TIMESTAMP('1984-12-15 22:15:08')", "lt2", true), - $("TIMESTAMP('1984-12-15 22:15:07') < TIMESTAMP('1961-04-12 09:07:00')", "lt3", false), - $("TIMESTAMP('1984-12-15 22:15:07') >= TIMESTAMP('1961-04-12 09:07:00')", "gte1", true), - $("TIMESTAMP('1984-12-15 22:15:07') >= TIMESTAMP('1984-12-15 22:15:07')", "gte2", true), - $("TIMESTAMP('1984-12-15 22:15:07') >= TIMESTAMP('2020-09-16 10:20:30')", "gte3", false), - $("TIMESTAMP('1961-04-12 09:07:00') <= TIMESTAMP('1984-12-15 22:15:07')", "lte1", true), - $("TIMESTAMP('1961-04-12 09:07:00') <= TIMESTAMP('1961-04-12 09:07:00')", "lte2", true), - $("TIMESTAMP('2020-09-16 10:20:30') <= TIMESTAMP('1961-04-12 09:07:00')", "lte3", false) - )); + return Arrays.asList( + $$( + $("TIMESTAMP('2020-09-16 10:20:30') = TIMESTAMP('2020-09-16 10:20:30')", "eq1", true), + $("TIMESTAMP('2020-09-16 10:20:30') = TIMESTAMP('1961-04-12 09:07:00')", "eq2", false), + $("TIMESTAMP('2020-09-16 10:20:30') != TIMESTAMP('1984-12-15 22:15:07')", "neq1", true), + $("TIMESTAMP('1984-12-15 22:15:08') != TIMESTAMP('1984-12-15 22:15:07')", "neq2", true), + $( + "TIMESTAMP('1961-04-12 09:07:00') != TIMESTAMP('1961-04-12 09:07:00')", + "neq3", + false), + $("TIMESTAMP('1984-12-15 22:15:07') > TIMESTAMP('1961-04-12 22:15:07')", "gt1", true), + $("TIMESTAMP('1984-12-15 22:15:07') > TIMESTAMP('1984-12-15 22:15:06')", "gt2", true), + $("TIMESTAMP('1984-12-15 22:15:07') > TIMESTAMP('2020-09-16 10:20:30')", "gt3", false), + $("TIMESTAMP('1961-04-12 09:07:00') < TIMESTAMP('1984-12-15 09:07:00')", "lt1", true), + $("TIMESTAMP('1984-12-15 22:15:07') < TIMESTAMP('1984-12-15 22:15:08')", "lt2", true), + $("TIMESTAMP('1984-12-15 22:15:07') < TIMESTAMP('1961-04-12 09:07:00')", "lt3", false), + $("TIMESTAMP('1984-12-15 22:15:07') >= TIMESTAMP('1961-04-12 09:07:00')", "gte1", true), + $("TIMESTAMP('1984-12-15 22:15:07') >= TIMESTAMP('1984-12-15 22:15:07')", "gte2", true), + $( + "TIMESTAMP('1984-12-15 22:15:07') >= TIMESTAMP('2020-09-16 10:20:30')", + "gte3", + false), + $("TIMESTAMP('1961-04-12 09:07:00') <= TIMESTAMP('1984-12-15 22:15:07')", "lte1", true), + $("TIMESTAMP('1961-04-12 09:07:00') <= TIMESTAMP('1961-04-12 09:07:00')", "lte2", true), + $( + "TIMESTAMP('2020-09-16 10:20:30') <= TIMESTAMP('1961-04-12 09:07:00')", + "lte3", + false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareEqTimestampWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIMESTAMP('2020-09-16 10:20:30') = DATETIME('2020-09-16 10:20:30')", "ts_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') = TIMESTAMP('2020-09-16 10:20:30')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') = DATETIME('1961-04-12 09:07:00')", "ts_dt_f", false), - $("DATETIME('1961-04-12 09:07:00') = TIMESTAMP('1984-12-15 22:15:07')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 00:00:00') = DATE('2020-09-16')", "ts_d_t", true), - $("DATE('2020-09-16') = TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') = DATE('1961-04-12')", "ts_d_f", false), - $("DATE('1961-04-12') = TIMESTAMP('1984-12-15 22:15:07')", "d_ts_f", false), - $("TIMESTAMP('" + today + " 10:20:30') = TIME('10:20:30')", "ts_t_t", true), - $("TIME('10:20:30') = TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') = TIME('09:07:00')", "ts_t_f", false), - $("TIME('09:07:00') = TIMESTAMP('1984-12-15 22:15:07')", "t_ts_f", false) - )); + return Arrays.asList( + $$( + $( + "TIMESTAMP('2020-09-16 10:20:30') = DATETIME('2020-09-16 10:20:30')", + "ts_dt_t", + true), + $( + "DATETIME('2020-09-16 10:20:30') = TIMESTAMP('2020-09-16 10:20:30')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') = DATETIME('1961-04-12 09:07:00')", + "ts_dt_f", + false), + $( + "DATETIME('1961-04-12 09:07:00') = TIMESTAMP('1984-12-15 22:15:07')", + "dt_ts_f", + false), + $("TIMESTAMP('2020-09-16 00:00:00') = DATE('2020-09-16')", "ts_d_t", true), + $("DATE('2020-09-16') = TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), + $("TIMESTAMP('2020-09-16 10:20:30') = DATE('1961-04-12')", "ts_d_f", false), + $("DATE('1961-04-12') = TIMESTAMP('1984-12-15 22:15:07')", "d_ts_f", false), + $("TIMESTAMP('" + today + " 10:20:30') = TIME('10:20:30')", "ts_t_t", true), + $("TIME('10:20:30') = TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), + $("TIMESTAMP('2020-09-16 10:20:30') = TIME('09:07:00')", "ts_t_f", false), + $("TIME('09:07:00') = TIMESTAMP('1984-12-15 22:15:07')", "t_ts_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareEqDateTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("DATETIME('2020-09-16 10:20:30') = TIMESTAMP('2020-09-16 10:20:30')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') = DATETIME('2020-09-16 10:20:30')", "ts_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') = TIMESTAMP('1961-04-12 09:07:00')", "dt_ts_f", false), - $("TIMESTAMP('1961-04-12 09:07:00') = DATETIME('1984-12-15 22:15:07')", "ts_dt_f", false), - $("DATETIME('2020-09-16 00:00:00') = DATE('2020-09-16')", "dt_d_t", true), - $("DATE('2020-09-16') = DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') = DATE('1961-04-12')", "dt_d_f", false), - $("DATE('1961-04-12') = DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), - $("DATETIME('" + today + " 10:20:30') = TIME('10:20:30')", "dt_t_t", true), - $("TIME('10:20:30') = DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') = TIME('09:07:00')", "dt_t_f", false), - $("TIME('09:07:00') = DATETIME('1984-12-15 22:15:07')", "t_dt_f", false) - )); + return Arrays.asList( + $$( + $( + "DATETIME('2020-09-16 10:20:30') = TIMESTAMP('2020-09-16 10:20:30')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') = DATETIME('2020-09-16 10:20:30')", + "ts_dt_t", + true), + $( + "DATETIME('2020-09-16 10:20:30') = TIMESTAMP('1961-04-12 09:07:00')", + "dt_ts_f", + false), + $( + "TIMESTAMP('1961-04-12 09:07:00') = DATETIME('1984-12-15 22:15:07')", + "ts_dt_f", + false), + $("DATETIME('2020-09-16 00:00:00') = DATE('2020-09-16')", "dt_d_t", true), + $("DATE('2020-09-16') = DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), + $("DATETIME('2020-09-16 10:20:30') = DATE('1961-04-12')", "dt_d_f", false), + $("DATE('1961-04-12') = DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), + $("DATETIME('" + today + " 10:20:30') = TIME('10:20:30')", "dt_t_t", true), + $("TIME('10:20:30') = DATETIME('" + today + " 10:20:30')", "t_dt_t", true), + $("DATETIME('2020-09-16 10:20:30') = TIME('09:07:00')", "dt_t_f", false), + $("TIME('09:07:00') = DATETIME('1984-12-15 22:15:07')", "t_dt_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareEqDateWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("DATE('2020-09-16') = TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), - $("TIMESTAMP('2020-09-16 00:00:00') = DATE('2020-09-16')", "ts_d_t", true), - $("DATE('2020-09-16') = TIMESTAMP('1961-04-12 09:07:00')", "d_ts_f", false), - $("TIMESTAMP('1984-12-15 09:07:00') = DATE('1984-12-15')", "ts_d_f", false), - $("DATE('2020-09-16') = DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') = DATE('2020-09-16')", "dt_d_t", true), - $("DATE('1961-04-12') = DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), - $("DATETIME('1961-04-12 10:20:30') = DATE('1961-04-12')", "dt_d_f", false), - $("DATE('" + today + "') = TIME('00:00:00')", "d_t_t", true), - $("TIME('00:00:00') = DATE('" + today + "')", "t_d_t", true), - $("DATE('2020-09-16') = TIME('09:07:00')", "d_t_f", false), - $("TIME('09:07:00') = DATE('" + today + "')", "t_d_f", false) - )); + return Arrays.asList( + $$( + $("DATE('2020-09-16') = TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), + $("TIMESTAMP('2020-09-16 00:00:00') = DATE('2020-09-16')", "ts_d_t", true), + $("DATE('2020-09-16') = TIMESTAMP('1961-04-12 09:07:00')", "d_ts_f", false), + $("TIMESTAMP('1984-12-15 09:07:00') = DATE('1984-12-15')", "ts_d_f", false), + $("DATE('2020-09-16') = DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), + $("DATETIME('2020-09-16 00:00:00') = DATE('2020-09-16')", "dt_d_t", true), + $("DATE('1961-04-12') = DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), + $("DATETIME('1961-04-12 10:20:30') = DATE('1961-04-12')", "dt_d_f", false), + $("DATE('" + today + "') = TIME('00:00:00')", "d_t_t", true), + $("TIME('00:00:00') = DATE('" + today + "')", "t_d_t", true), + $("DATE('2020-09-16') = TIME('09:07:00')", "d_t_f", false), + $("TIME('09:07:00') = DATE('" + today + "')", "t_d_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareEqTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIME('10:20:30') = DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('" + today + " 10:20:30') = TIME('10:20:30')", "dt_t_t", true), - $("TIME('09:07:00') = DATETIME('1961-04-12 09:07:00')", "t_dt_f", false), - $("DATETIME('" + today + " 09:07:00') = TIME('10:20:30')", "dt_t_f", false), - $("TIME('10:20:30') = TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), - $("TIMESTAMP('" + today + " 10:20:30') = TIME('10:20:30')", "ts_t_t", true), - $("TIME('22:15:07') = TIMESTAMP('1984-12-15 22:15:07')", "t_ts_f", false), - $("TIMESTAMP('1984-12-15 10:20:30') = TIME('10:20:30')", "ts_t_f", false), - $("TIME('00:00:00') = DATE('" + today + "')", "t_d_t", true), - $("DATE('" + today + "') = TIME('00:00:00')", "d_t_t", true), - $("TIME('09:07:00') = DATE('" + today + "')", "t_d_f", false), - $("DATE('2020-09-16') = TIME('09:07:00')", "d_t_f", false) - )); + return Arrays.asList( + $$( + $("TIME('10:20:30') = DATETIME('" + today + " 10:20:30')", "t_dt_t", true), + $("DATETIME('" + today + " 10:20:30') = TIME('10:20:30')", "dt_t_t", true), + $("TIME('09:07:00') = DATETIME('1961-04-12 09:07:00')", "t_dt_f", false), + $("DATETIME('" + today + " 09:07:00') = TIME('10:20:30')", "dt_t_f", false), + $("TIME('10:20:30') = TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), + $("TIMESTAMP('" + today + " 10:20:30') = TIME('10:20:30')", "ts_t_t", true), + $("TIME('22:15:07') = TIMESTAMP('1984-12-15 22:15:07')", "t_ts_f", false), + $("TIMESTAMP('1984-12-15 10:20:30') = TIME('10:20:30')", "ts_t_f", false), + $("TIME('00:00:00') = DATE('" + today + "')", "t_d_t", true), + $("DATE('" + today + "') = TIME('00:00:00')", "d_t_t", true), + $("TIME('09:07:00') = DATE('" + today + "')", "t_d_f", false), + $("DATE('2020-09-16') = TIME('09:07:00')", "d_t_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareNeqTimestampWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIMESTAMP('2020-09-16 10:20:30') != DATETIME('1961-04-12 09:07:00')", "ts_dt_t", true), - $("DATETIME('1961-04-12 09:07:00') != TIMESTAMP('1984-12-15 22:15:07')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') != DATETIME('2020-09-16 10:20:30')", "ts_dt_f", false), - $("DATETIME('2020-09-16 10:20:30') != TIMESTAMP('2020-09-16 10:20:30')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') != DATE('1961-04-12')", "ts_d_t", true), - $("DATE('1961-04-12') != TIMESTAMP('1984-12-15 22:15:07')", "d_ts_t", true), - $("TIMESTAMP('2020-09-16 00:00:00') != DATE('2020-09-16')", "ts_d_f", false), - $("DATE('2020-09-16') != TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') != TIME('09:07:00')", "ts_t_t", true), - $("TIME('09:07:00') != TIMESTAMP('1984-12-15 22:15:07')", "t_ts_t", true), - $("TIMESTAMP('" + today + " 10:20:30') != TIME('10:20:30')", "ts_t_f", false), - $("TIME('10:20:30') != TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false) - )); + return Arrays.asList( + $$( + $( + "TIMESTAMP('2020-09-16 10:20:30') != DATETIME('1961-04-12 09:07:00')", + "ts_dt_t", + true), + $( + "DATETIME('1961-04-12 09:07:00') != TIMESTAMP('1984-12-15 22:15:07')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') != DATETIME('2020-09-16 10:20:30')", + "ts_dt_f", + false), + $( + "DATETIME('2020-09-16 10:20:30') != TIMESTAMP('2020-09-16 10:20:30')", + "dt_ts_f", + false), + $("TIMESTAMP('2020-09-16 10:20:30') != DATE('1961-04-12')", "ts_d_t", true), + $("DATE('1961-04-12') != TIMESTAMP('1984-12-15 22:15:07')", "d_ts_t", true), + $("TIMESTAMP('2020-09-16 00:00:00') != DATE('2020-09-16')", "ts_d_f", false), + $("DATE('2020-09-16') != TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), + $("TIMESTAMP('2020-09-16 10:20:30') != TIME('09:07:00')", "ts_t_t", true), + $("TIME('09:07:00') != TIMESTAMP('1984-12-15 22:15:07')", "t_ts_t", true), + $("TIMESTAMP('" + today + " 10:20:30') != TIME('10:20:30')", "ts_t_f", false), + $("TIME('10:20:30') != TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareNeqDateTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("DATETIME('2020-09-16 10:20:30') != TIMESTAMP('1961-04-12 09:07:00')", "dt_ts_t", true), - $("TIMESTAMP('1961-04-12 09:07:00') != DATETIME('1984-12-15 22:15:07')", "ts_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') != TIMESTAMP('2020-09-16 10:20:30')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') != DATETIME('2020-09-16 10:20:30')", "ts_dt_f", false), - $("DATETIME('2020-09-16 10:20:30') != DATE('1961-04-12')", "dt_d_t", true), - $("DATE('1961-04-12') != DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') != DATE('2020-09-16')", "dt_d_f", false), - $("DATE('2020-09-16') != DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), - $("DATETIME('2020-09-16 10:20:30') != TIME('09:07:00')", "dt_t_t", true), - $("TIME('09:07:00') != DATETIME('1984-12-15 22:15:07')", "t_dt_t", true), - $("DATETIME('" + today + " 10:20:30') != TIME('10:20:30')", "dt_t_f", false), - $("TIME('10:20:30') != DATETIME('" + today + " 10:20:30')", "t_dt_f", false) - )); + return Arrays.asList( + $$( + $( + "DATETIME('2020-09-16 10:20:30') != TIMESTAMP('1961-04-12 09:07:00')", + "dt_ts_t", + true), + $( + "TIMESTAMP('1961-04-12 09:07:00') != DATETIME('1984-12-15 22:15:07')", + "ts_dt_t", + true), + $( + "DATETIME('2020-09-16 10:20:30') != TIMESTAMP('2020-09-16 10:20:30')", + "dt_ts_f", + false), + $( + "TIMESTAMP('2020-09-16 10:20:30') != DATETIME('2020-09-16 10:20:30')", + "ts_dt_f", + false), + $("DATETIME('2020-09-16 10:20:30') != DATE('1961-04-12')", "dt_d_t", true), + $("DATE('1961-04-12') != DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), + $("DATETIME('2020-09-16 00:00:00') != DATE('2020-09-16')", "dt_d_f", false), + $("DATE('2020-09-16') != DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), + $("DATETIME('2020-09-16 10:20:30') != TIME('09:07:00')", "dt_t_t", true), + $("TIME('09:07:00') != DATETIME('1984-12-15 22:15:07')", "t_dt_t", true), + $("DATETIME('" + today + " 10:20:30') != TIME('10:20:30')", "dt_t_f", false), + $("TIME('10:20:30') != DATETIME('" + today + " 10:20:30')", "t_dt_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareNeqDateWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("DATE('2020-09-16') != TIMESTAMP('1961-04-12 09:07:00')", "d_ts_t", true), - $("TIMESTAMP('1984-12-15 09:07:00') != DATE('1984-12-15')", "ts_d_t", true), - $("DATE('2020-09-16') != TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), - $("TIMESTAMP('2020-09-16 00:00:00') != DATE('2020-09-16')", "ts_d_f", false), - $("DATE('1961-04-12') != DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), - $("DATETIME('1961-04-12 10:20:30') != DATE('1961-04-12')", "dt_d_t", true), - $("DATE('2020-09-16') != DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), - $("DATETIME('2020-09-16 00:00:00') != DATE('2020-09-16')", "dt_d_f", false), - $("DATE('2020-09-16') != TIME('09:07:00')", "d_t_t", true), - $("TIME('09:07:00') != DATE('" + today + "')", "t_d_t", true), - $("DATE('" + today + "') != TIME('00:00:00')", "d_t_f", false), - $("TIME('00:00:00') != DATE('" + today + "')", "t_d_f", false) - )); + return Arrays.asList( + $$( + $("DATE('2020-09-16') != TIMESTAMP('1961-04-12 09:07:00')", "d_ts_t", true), + $("TIMESTAMP('1984-12-15 09:07:00') != DATE('1984-12-15')", "ts_d_t", true), + $("DATE('2020-09-16') != TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), + $("TIMESTAMP('2020-09-16 00:00:00') != DATE('2020-09-16')", "ts_d_f", false), + $("DATE('1961-04-12') != DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), + $("DATETIME('1961-04-12 10:20:30') != DATE('1961-04-12')", "dt_d_t", true), + $("DATE('2020-09-16') != DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), + $("DATETIME('2020-09-16 00:00:00') != DATE('2020-09-16')", "dt_d_f", false), + $("DATE('2020-09-16') != TIME('09:07:00')", "d_t_t", true), + $("TIME('09:07:00') != DATE('" + today + "')", "t_d_t", true), + $("DATE('" + today + "') != TIME('00:00:00')", "d_t_f", false), + $("TIME('00:00:00') != DATE('" + today + "')", "t_d_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareNeqTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIME('09:07:00') != DATETIME('1961-04-12 09:07:00')", "t_dt_t", true), - $("DATETIME('" + today + " 09:07:00') != TIME('10:20:30')", "dt_t_t", true), - $("TIME('10:20:30') != DATETIME('" + today + " 10:20:30')", "t_dt_f", false), - $("DATETIME('" + today + " 10:20:30') != TIME('10:20:30')", "dt_t_f", false), - $("TIME('22:15:07') != TIMESTAMP('1984-12-15 22:15:07')", "t_ts_t", true), - $("TIMESTAMP('1984-12-15 10:20:30') != TIME('10:20:30')", "ts_t_t", true), - $("TIME('10:20:30') != TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false), - $("TIMESTAMP('" + today + " 10:20:30') != TIME('10:20:30')", "ts_t_f", false), - $("TIME('09:07:00') != DATE('" + today + "')", "t_d_t", true), - $("DATE('2020-09-16') != TIME('09:07:00')", "d_t_t", true), - $("TIME('00:00:00') != DATE('" + today + "')", "t_d_f", false), - $("DATE('" + today + "') != TIME('00:00:00')", "d_t_f", false) - )); + return Arrays.asList( + $$( + $("TIME('09:07:00') != DATETIME('1961-04-12 09:07:00')", "t_dt_t", true), + $("DATETIME('" + today + " 09:07:00') != TIME('10:20:30')", "dt_t_t", true), + $("TIME('10:20:30') != DATETIME('" + today + " 10:20:30')", "t_dt_f", false), + $("DATETIME('" + today + " 10:20:30') != TIME('10:20:30')", "dt_t_f", false), + $("TIME('22:15:07') != TIMESTAMP('1984-12-15 22:15:07')", "t_ts_t", true), + $("TIMESTAMP('1984-12-15 10:20:30') != TIME('10:20:30')", "ts_t_t", true), + $("TIME('10:20:30') != TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false), + $("TIMESTAMP('" + today + " 10:20:30') != TIME('10:20:30')", "ts_t_f", false), + $("TIME('09:07:00') != DATE('" + today + "')", "t_d_t", true), + $("DATE('2020-09-16') != TIME('09:07:00')", "d_t_t", true), + $("TIME('00:00:00') != DATE('" + today + "')", "t_d_f", false), + $("DATE('" + today + "') != TIME('00:00:00')", "d_t_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLtTimestampWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIMESTAMP('2020-09-16 10:20:30') < DATETIME('2061-04-12 09:07:00')", "ts_dt_t", true), - $("DATETIME('1961-04-12 09:07:00') < TIMESTAMP('1984-12-15 22:15:07')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') < DATETIME('2020-09-16 10:20:30')", "ts_dt_f", false), - $("DATETIME('2020-09-16 10:20:30') < TIMESTAMP('1961-04-12 09:07:00')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') < DATE('2077-04-12')", "ts_d_t", true), - $("DATE('1961-04-12') < TIMESTAMP('1984-12-15 22:15:07')", "d_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') < DATE('1961-04-12')", "ts_d_f", false), - $("DATE('2020-09-16') < TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') < TIME('09:07:00')", "ts_t_t", true), - $("TIME('09:07:00') < TIMESTAMP('3077-12-15 22:15:07')", "t_ts_t", true), - $("TIMESTAMP('" + today + " 10:20:30') < TIME('10:20:30')", "ts_t_f", false), - $("TIME('20:50:40') < TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false) - )); + return Arrays.asList( + $$( + $( + "TIMESTAMP('2020-09-16 10:20:30') < DATETIME('2061-04-12 09:07:00')", + "ts_dt_t", + true), + $( + "DATETIME('1961-04-12 09:07:00') < TIMESTAMP('1984-12-15 22:15:07')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') < DATETIME('2020-09-16 10:20:30')", + "ts_dt_f", + false), + $( + "DATETIME('2020-09-16 10:20:30') < TIMESTAMP('1961-04-12 09:07:00')", + "dt_ts_f", + false), + $("TIMESTAMP('2020-09-16 10:20:30') < DATE('2077-04-12')", "ts_d_t", true), + $("DATE('1961-04-12') < TIMESTAMP('1984-12-15 22:15:07')", "d_ts_t", true), + $("TIMESTAMP('2020-09-16 10:20:30') < DATE('1961-04-12')", "ts_d_f", false), + $("DATE('2020-09-16') < TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), + $("TIMESTAMP('2020-09-16 10:20:30') < TIME('09:07:00')", "ts_t_t", true), + $("TIME('09:07:00') < TIMESTAMP('3077-12-15 22:15:07')", "t_ts_t", true), + $("TIMESTAMP('" + today + " 10:20:30') < TIME('10:20:30')", "ts_t_f", false), + $("TIME('20:50:40') < TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLtDateTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("DATETIME('2020-09-16 10:20:30') < TIMESTAMP('2077-04-12 09:07:00')", "dt_ts_t", true), - $("TIMESTAMP('1961-04-12 09:07:00') < DATETIME('1984-12-15 22:15:07')", "ts_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') < TIMESTAMP('2020-09-16 10:20:30')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') < DATETIME('1984-12-15 22:15:07')", "ts_dt_f", false), - $("DATETIME('2020-09-16 10:20:30') < DATE('3077-04-12')", "dt_d_t", true), - $("DATE('1961-04-12') < DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') < DATE('2020-09-16')", "dt_d_f", false), - $("DATE('2020-09-16') < DATETIME('1961-04-12 09:07:00')", "d_dt_f", false), - $("DATETIME('2020-09-16 10:20:30') < TIME('09:07:00')", "dt_t_t", true), - $("TIME('09:07:00') < DATETIME('3077-12-15 22:15:07')", "t_dt_t", true), - $("DATETIME('" + today + " 10:20:30') < TIME('10:20:30')", "dt_t_f", false), - $("TIME('20:40:50') < DATETIME('" + today + " 10:20:30')", "t_dt_f", false) - )); + return Arrays.asList( + $$( + $( + "DATETIME('2020-09-16 10:20:30') < TIMESTAMP('2077-04-12 09:07:00')", + "dt_ts_t", + true), + $( + "TIMESTAMP('1961-04-12 09:07:00') < DATETIME('1984-12-15 22:15:07')", + "ts_dt_t", + true), + $( + "DATETIME('2020-09-16 10:20:30') < TIMESTAMP('2020-09-16 10:20:30')", + "dt_ts_f", + false), + $( + "TIMESTAMP('2020-09-16 10:20:30') < DATETIME('1984-12-15 22:15:07')", + "ts_dt_f", + false), + $("DATETIME('2020-09-16 10:20:30') < DATE('3077-04-12')", "dt_d_t", true), + $("DATE('1961-04-12') < DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), + $("DATETIME('2020-09-16 00:00:00') < DATE('2020-09-16')", "dt_d_f", false), + $("DATE('2020-09-16') < DATETIME('1961-04-12 09:07:00')", "d_dt_f", false), + $("DATETIME('2020-09-16 10:20:30') < TIME('09:07:00')", "dt_t_t", true), + $("TIME('09:07:00') < DATETIME('3077-12-15 22:15:07')", "t_dt_t", true), + $("DATETIME('" + today + " 10:20:30') < TIME('10:20:30')", "dt_t_f", false), + $("TIME('20:40:50') < DATETIME('" + today + " 10:20:30')", "t_dt_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLtDateWithOtherTypes() { - return Arrays.asList($$( - $("DATE('2020-09-16') < TIMESTAMP('3077-04-12 09:07:00')", "d_ts_t", true), - $("TIMESTAMP('1961-04-12 09:07:00') < DATE('1984-12-15')", "ts_d_t", true), - $("DATE('2020-09-16') < TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), - $("TIMESTAMP('2077-04-12 09:07:00') < DATE('2020-09-16')", "ts_d_f", false), - $("DATE('1961-04-12') < DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), - $("DATETIME('1961-04-12 10:20:30') < DATE('1984-11-15')", "dt_d_t", true), - $("DATE('2020-09-16') < DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), - $("DATETIME('2020-09-16 00:00:00') < DATE('1984-03-22')", "dt_d_f", false), - $("DATE('2020-09-16') < TIME('09:07:00')", "d_t_t", true), - $("TIME('09:07:00') < DATE('3077-04-12')", "t_d_t", true), - $("DATE('3077-04-12') < TIME('00:00:00')", "d_t_f", false), - $("TIME('00:00:00') < DATE('2020-09-16')", "t_d_f", false) - )); + return Arrays.asList( + $$( + $("DATE('2020-09-16') < TIMESTAMP('3077-04-12 09:07:00')", "d_ts_t", true), + $("TIMESTAMP('1961-04-12 09:07:00') < DATE('1984-12-15')", "ts_d_t", true), + $("DATE('2020-09-16') < TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), + $("TIMESTAMP('2077-04-12 09:07:00') < DATE('2020-09-16')", "ts_d_f", false), + $("DATE('1961-04-12') < DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), + $("DATETIME('1961-04-12 10:20:30') < DATE('1984-11-15')", "dt_d_t", true), + $("DATE('2020-09-16') < DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), + $("DATETIME('2020-09-16 00:00:00') < DATE('1984-03-22')", "dt_d_f", false), + $("DATE('2020-09-16') < TIME('09:07:00')", "d_t_t", true), + $("TIME('09:07:00') < DATE('3077-04-12')", "t_d_t", true), + $("DATE('3077-04-12') < TIME('00:00:00')", "d_t_f", false), + $("TIME('00:00:00') < DATE('2020-09-16')", "t_d_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLtTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIME('09:07:00') < DATETIME('3077-04-12 09:07:00')", "t_dt_t", true), - $("DATETIME('" + today + " 09:07:00') < TIME('10:20:30')", "dt_t_t", true), - $("TIME('10:20:30') < DATETIME('" + today + " 10:20:30')", "t_dt_f", false), - $("DATETIME('" + today + " 20:40:50') < TIME('10:20:30')", "dt_t_f", false), - $("TIME('22:15:07') < TIMESTAMP('3077-12-15 22:15:07')", "t_ts_t", true), - $("TIMESTAMP('1984-12-15 10:20:30') < TIME('10:20:30')", "ts_t_t", true), - $("TIME('10:20:30') < TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false), - $("TIMESTAMP('" + today + " 20:50:42') < TIME('10:20:30')", "ts_t_f", false), - $("TIME('09:07:00') < DATE('3077-04-12')", "t_d_t", true), - $("DATE('2020-09-16') < TIME('09:07:00')", "d_t_t", true), - $("TIME('00:00:00') < DATE('1961-04-12')", "t_d_f", false), - $("DATE('3077-04-12') < TIME('10:20:30')", "d_t_f", false) - )); + return Arrays.asList( + $$( + $("TIME('09:07:00') < DATETIME('3077-04-12 09:07:00')", "t_dt_t", true), + $("DATETIME('" + today + " 09:07:00') < TIME('10:20:30')", "dt_t_t", true), + $("TIME('10:20:30') < DATETIME('" + today + " 10:20:30')", "t_dt_f", false), + $("DATETIME('" + today + " 20:40:50') < TIME('10:20:30')", "dt_t_f", false), + $("TIME('22:15:07') < TIMESTAMP('3077-12-15 22:15:07')", "t_ts_t", true), + $("TIMESTAMP('1984-12-15 10:20:30') < TIME('10:20:30')", "ts_t_t", true), + $("TIME('10:20:30') < TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false), + $("TIMESTAMP('" + today + " 20:50:42') < TIME('10:20:30')", "ts_t_f", false), + $("TIME('09:07:00') < DATE('3077-04-12')", "t_d_t", true), + $("DATE('2020-09-16') < TIME('09:07:00')", "d_t_t", true), + $("TIME('00:00:00') < DATE('1961-04-12')", "t_d_f", false), + $("DATE('3077-04-12') < TIME('10:20:30')", "d_t_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGtTimestampWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIMESTAMP('2020-09-16 10:20:30') > DATETIME('2020-09-16 10:20:25')", "ts_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') > TIMESTAMP('1961-04-12 09:07:00')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') > DATETIME('2061-04-12 09:07:00')", "ts_dt_f", false), - $("DATETIME('1961-04-12 09:07:00') > TIMESTAMP('1984-12-15 09:07:00')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') > DATE('1961-04-12')", "ts_d_t", true), - $("DATE('2020-09-16') > TIMESTAMP('2020-09-15 22:15:07')", "d_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') > DATE('2077-04-12')", "ts_d_f", false), - $("DATE('1961-04-12') > TIMESTAMP('1961-04-12 00:00:00')", "d_ts_f", false), - $("TIMESTAMP('3077-07-08 20:20:30') > TIME('10:20:30')", "ts_t_t", true), - $("TIME('20:50:40') > TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), - $("TIMESTAMP('" + today + " 10:20:30') > TIME('10:20:30')", "ts_t_f", false), - $("TIME('09:07:00') > TIMESTAMP('3077-12-15 22:15:07')", "t_ts_f", false) - )); + return Arrays.asList( + $$( + $( + "TIMESTAMP('2020-09-16 10:20:30') > DATETIME('2020-09-16 10:20:25')", + "ts_dt_t", + true), + $( + "DATETIME('2020-09-16 10:20:30') > TIMESTAMP('1961-04-12 09:07:00')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') > DATETIME('2061-04-12 09:07:00')", + "ts_dt_f", + false), + $( + "DATETIME('1961-04-12 09:07:00') > TIMESTAMP('1984-12-15 09:07:00')", + "dt_ts_f", + false), + $("TIMESTAMP('2020-09-16 10:20:30') > DATE('1961-04-12')", "ts_d_t", true), + $("DATE('2020-09-16') > TIMESTAMP('2020-09-15 22:15:07')", "d_ts_t", true), + $("TIMESTAMP('2020-09-16 10:20:30') > DATE('2077-04-12')", "ts_d_f", false), + $("DATE('1961-04-12') > TIMESTAMP('1961-04-12 00:00:00')", "d_ts_f", false), + $("TIMESTAMP('3077-07-08 20:20:30') > TIME('10:20:30')", "ts_t_t", true), + $("TIME('20:50:40') > TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), + $("TIMESTAMP('" + today + " 10:20:30') > TIME('10:20:30')", "ts_t_f", false), + $("TIME('09:07:00') > TIMESTAMP('3077-12-15 22:15:07')", "t_ts_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGtDateTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("DATETIME('2020-09-16 10:20:31') > TIMESTAMP('2020-09-16 10:20:30')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') > DATETIME('1984-12-15 22:15:07')", "ts_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') > TIMESTAMP('2077-04-12 09:07:00')", "dt_ts_f", false), - $("TIMESTAMP('1961-04-12 09:07:00') > DATETIME('1961-04-12 09:07:00')", "ts_dt_f", false), - $("DATETIME('3077-04-12 10:20:30') > DATE('2020-09-16')", "dt_d_t", true), - $("DATE('2020-09-16') > DATETIME('1961-04-12 09:07:00')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') > DATE('2020-09-16')", "dt_d_f", false), - $("DATE('1961-04-12') > DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), - $("DATETIME('3077-04-12 10:20:30') > TIME('09:07:00')", "dt_t_t", true), - $("TIME('20:40:50') > DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('" + today + " 10:20:30') > TIME('10:20:30')", "dt_t_f", false), - $("TIME('09:07:00') > DATETIME('3077-12-15 22:15:07')", "t_dt_f", false) - )); + return Arrays.asList( + $$( + $( + "DATETIME('2020-09-16 10:20:31') > TIMESTAMP('2020-09-16 10:20:30')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') > DATETIME('1984-12-15 22:15:07')", + "ts_dt_t", + true), + $( + "DATETIME('2020-09-16 10:20:30') > TIMESTAMP('2077-04-12 09:07:00')", + "dt_ts_f", + false), + $( + "TIMESTAMP('1961-04-12 09:07:00') > DATETIME('1961-04-12 09:07:00')", + "ts_dt_f", + false), + $("DATETIME('3077-04-12 10:20:30') > DATE('2020-09-16')", "dt_d_t", true), + $("DATE('2020-09-16') > DATETIME('1961-04-12 09:07:00')", "d_dt_t", true), + $("DATETIME('2020-09-16 00:00:00') > DATE('2020-09-16')", "dt_d_f", false), + $("DATE('1961-04-12') > DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), + $("DATETIME('3077-04-12 10:20:30') > TIME('09:07:00')", "dt_t_t", true), + $("TIME('20:40:50') > DATETIME('" + today + " 10:20:30')", "t_dt_t", true), + $("DATETIME('" + today + " 10:20:30') > TIME('10:20:30')", "dt_t_f", false), + $("TIME('09:07:00') > DATETIME('3077-12-15 22:15:07')", "t_dt_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGtDateWithOtherTypes() { - return Arrays.asList($$( - $("DATE('2020-09-16') > TIMESTAMP('1961-04-12 09:07:00')", "d_ts_t", true), - $("TIMESTAMP('2077-04-12 09:07:00') > DATE('2020-09-16')", "ts_d_t", true), - $("DATE('2020-09-16') > TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), - $("TIMESTAMP('1961-04-12 09:07:00') > DATE('1984-12-15')", "ts_d_f", false), - $("DATE('1984-12-15') > DATETIME('1961-04-12 09:07:00')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') > DATE('1984-03-22')", "dt_d_t", true), - $("DATE('2020-09-16') > DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), - $("DATETIME('1961-04-12 10:20:30') > DATE('1984-11-15')", "dt_d_f", false), - $("DATE('3077-04-12') > TIME('00:00:00')", "d_t_t", true), - $("TIME('00:00:00') > DATE('2020-09-16')", "t_d_t", true), - $("DATE('2020-09-16') > TIME('09:07:00')", "d_t_f", false), - $("TIME('09:07:00') > DATE('3077-04-12')", "t_d_f", false) - )); + return Arrays.asList( + $$( + $("DATE('2020-09-16') > TIMESTAMP('1961-04-12 09:07:00')", "d_ts_t", true), + $("TIMESTAMP('2077-04-12 09:07:00') > DATE('2020-09-16')", "ts_d_t", true), + $("DATE('2020-09-16') > TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), + $("TIMESTAMP('1961-04-12 09:07:00') > DATE('1984-12-15')", "ts_d_f", false), + $("DATE('1984-12-15') > DATETIME('1961-04-12 09:07:00')", "d_dt_t", true), + $("DATETIME('2020-09-16 00:00:00') > DATE('1984-03-22')", "dt_d_t", true), + $("DATE('2020-09-16') > DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), + $("DATETIME('1961-04-12 10:20:30') > DATE('1984-11-15')", "dt_d_f", false), + $("DATE('3077-04-12') > TIME('00:00:00')", "d_t_t", true), + $("TIME('00:00:00') > DATE('2020-09-16')", "t_d_t", true), + $("DATE('2020-09-16') > TIME('09:07:00')", "d_t_f", false), + $("TIME('09:07:00') > DATE('3077-04-12')", "t_d_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGtTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIME('09:07:00') > DATETIME('1961-04-12 09:07:00')", "t_dt_t", true), - $("DATETIME('" + today + " 20:40:50') > TIME('10:20:30')", "dt_t_t", true), - $("TIME('10:20:30') > DATETIME('" + today + " 10:20:30')", "t_dt_f", false), - $("DATETIME('" + today + " 09:07:00') > TIME('10:20:30')", "dt_t_f", false), - $("TIME('22:15:07') > TIMESTAMP('1984-12-15 22:15:07')", "t_ts_t", true), - $("TIMESTAMP('" + today + " 20:50:42') > TIME('10:20:30')", "ts_t_t", true), - $("TIME('10:20:30') > TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false), - $("TIMESTAMP('1984-12-15 10:20:30') > TIME('10:20:30')", "ts_t_f", false), - $("TIME('00:00:00') > DATE('1961-04-12')", "t_d_t", true), - $("DATE('3077-04-12') > TIME('10:20:30')", "d_t_t", true), - $("TIME('09:07:00') > DATE('3077-04-12')", "t_d_f", false), - $("DATE('2020-09-16') > TIME('09:07:00')", "d_t_f", false) - )); + return Arrays.asList( + $$( + $("TIME('09:07:00') > DATETIME('1961-04-12 09:07:00')", "t_dt_t", true), + $("DATETIME('" + today + " 20:40:50') > TIME('10:20:30')", "dt_t_t", true), + $("TIME('10:20:30') > DATETIME('" + today + " 10:20:30')", "t_dt_f", false), + $("DATETIME('" + today + " 09:07:00') > TIME('10:20:30')", "dt_t_f", false), + $("TIME('22:15:07') > TIMESTAMP('1984-12-15 22:15:07')", "t_ts_t", true), + $("TIMESTAMP('" + today + " 20:50:42') > TIME('10:20:30')", "ts_t_t", true), + $("TIME('10:20:30') > TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false), + $("TIMESTAMP('1984-12-15 10:20:30') > TIME('10:20:30')", "ts_t_f", false), + $("TIME('00:00:00') > DATE('1961-04-12')", "t_d_t", true), + $("DATE('3077-04-12') > TIME('10:20:30')", "d_t_t", true), + $("TIME('09:07:00') > DATE('3077-04-12')", "t_d_f", false), + $("DATE('2020-09-16') > TIME('09:07:00')", "d_t_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLteTimestampWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIMESTAMP('2020-09-16 10:20:30') <= DATETIME('2020-09-16 10:20:30')", "ts_dt_t", true), - $("DATETIME('1961-04-12 09:07:00') <= TIMESTAMP('1984-12-15 22:15:07')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') <= DATETIME('1961-04-12 09:07:00')", "ts_dt_f", false), - $("DATETIME('2020-09-16 10:20:30') <= TIMESTAMP('1961-04-12 09:07:00')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') <= DATE('2077-04-12')", "ts_d_t", true), - $("DATE('2020-09-16') <= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') <= DATE('1961-04-12')", "ts_d_f", false), - $("DATE('2077-04-12') <= TIMESTAMP('1984-12-15 22:15:07')", "d_ts_f", false), - $("TIMESTAMP('" + today + " 10:20:30') <= TIME('10:20:30')", "ts_t_t", true), - $("TIME('09:07:00') <= TIMESTAMP('3077-12-15 22:15:07')", "t_ts_t", true), - $("TIMESTAMP('3077-09-16 10:20:30') <= TIME('09:07:00')", "ts_t_f", false), - $("TIME('20:50:40') <= TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false) - )); + return Arrays.asList( + $$( + $( + "TIMESTAMP('2020-09-16 10:20:30') <= DATETIME('2020-09-16 10:20:30')", + "ts_dt_t", + true), + $( + "DATETIME('1961-04-12 09:07:00') <= TIMESTAMP('1984-12-15 22:15:07')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') <= DATETIME('1961-04-12 09:07:00')", + "ts_dt_f", + false), + $( + "DATETIME('2020-09-16 10:20:30') <= TIMESTAMP('1961-04-12 09:07:00')", + "dt_ts_f", + false), + $("TIMESTAMP('2020-09-16 10:20:30') <= DATE('2077-04-12')", "ts_d_t", true), + $("DATE('2020-09-16') <= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), + $("TIMESTAMP('2020-09-16 10:20:30') <= DATE('1961-04-12')", "ts_d_f", false), + $("DATE('2077-04-12') <= TIMESTAMP('1984-12-15 22:15:07')", "d_ts_f", false), + $("TIMESTAMP('" + today + " 10:20:30') <= TIME('10:20:30')", "ts_t_t", true), + $("TIME('09:07:00') <= TIMESTAMP('3077-12-15 22:15:07')", "t_ts_t", true), + $("TIMESTAMP('3077-09-16 10:20:30') <= TIME('09:07:00')", "ts_t_f", false), + $("TIME('20:50:40') <= TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLteDateTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("DATETIME('2020-09-16 10:20:30') <= TIMESTAMP('2020-09-16 10:20:30')", "dt_ts_t", true), - $("TIMESTAMP('1961-04-12 09:07:00') <= DATETIME('1984-12-15 22:15:07')", "ts_dt_t", true), - $("DATETIME('3077-09-16 10:20:30') <= TIMESTAMP('2077-04-12 09:07:00')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') <= DATETIME('1984-12-15 22:15:07')", "ts_dt_f", false), - $("DATETIME('2020-09-16 00:00:00') <= DATE('2020-09-16')", "dt_d_t", true), - $("DATE('1961-04-12') <= DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') <= DATE('1984-04-12')", "dt_d_f", false), - $("DATE('2020-09-16') <= DATETIME('1961-04-12 09:07:00')", "d_dt_f", false), - $("DATETIME('" + today + " 10:20:30') <= TIME('10:20:30')", "dt_t_t", true), - $("TIME('09:07:00') <= DATETIME('3077-12-15 22:15:07')", "t_dt_t", true), - $("DATETIME('3077-09-16 10:20:30') <= TIME('19:07:00')", "dt_t_f", false), - $("TIME('20:40:50') <= DATETIME('" + today + " 10:20:30')", "t_dt_f", false) - )); + return Arrays.asList( + $$( + $( + "DATETIME('2020-09-16 10:20:30') <= TIMESTAMP('2020-09-16 10:20:30')", + "dt_ts_t", + true), + $( + "TIMESTAMP('1961-04-12 09:07:00') <= DATETIME('1984-12-15 22:15:07')", + "ts_dt_t", + true), + $( + "DATETIME('3077-09-16 10:20:30') <= TIMESTAMP('2077-04-12 09:07:00')", + "dt_ts_f", + false), + $( + "TIMESTAMP('2020-09-16 10:20:30') <= DATETIME('1984-12-15 22:15:07')", + "ts_dt_f", + false), + $("DATETIME('2020-09-16 00:00:00') <= DATE('2020-09-16')", "dt_d_t", true), + $("DATE('1961-04-12') <= DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), + $("DATETIME('2020-09-16 10:20:30') <= DATE('1984-04-12')", "dt_d_f", false), + $("DATE('2020-09-16') <= DATETIME('1961-04-12 09:07:00')", "d_dt_f", false), + $("DATETIME('" + today + " 10:20:30') <= TIME('10:20:30')", "dt_t_t", true), + $("TIME('09:07:00') <= DATETIME('3077-12-15 22:15:07')", "t_dt_t", true), + $("DATETIME('3077-09-16 10:20:30') <= TIME('19:07:00')", "dt_t_f", false), + $("TIME('20:40:50') <= DATETIME('" + today + " 10:20:30')", "t_dt_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLteDateWithOtherTypes() { - return Arrays.asList($$( - $("DATE('2020-09-16') <= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), - $("TIMESTAMP('1961-04-12 09:07:00') <= DATE('1984-12-15')", "ts_d_t", true), - $("DATE('2020-09-16') <= TIMESTAMP('1961-04-12 09:07:00')", "d_ts_f", false), - $("TIMESTAMP('2077-04-12 09:07:00') <= DATE('2020-09-16')", "ts_d_f", false), - $("DATE('2020-09-16') <= DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), - $("DATETIME('1961-04-12 10:20:30') <= DATE('1984-11-15')", "dt_d_t", true), - $("DATE('2077-04-12') <= DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), - $("DATETIME('2020-09-16 00:00:00') <= DATE('1984-03-22')", "dt_d_f", false), - $("DATE('2020-09-16') <= TIME('09:07:00')", "d_t_t", true), - $("TIME('09:07:00') <= DATE('3077-04-12')", "t_d_t", true), - $("DATE('3077-04-12') <= TIME('00:00:00')", "d_t_f", false), - $("TIME('00:00:00') <= DATE('2020-09-16')", "t_d_f", false) - )); + return Arrays.asList( + $$( + $("DATE('2020-09-16') <= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), + $("TIMESTAMP('1961-04-12 09:07:00') <= DATE('1984-12-15')", "ts_d_t", true), + $("DATE('2020-09-16') <= TIMESTAMP('1961-04-12 09:07:00')", "d_ts_f", false), + $("TIMESTAMP('2077-04-12 09:07:00') <= DATE('2020-09-16')", "ts_d_f", false), + $("DATE('2020-09-16') <= DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), + $("DATETIME('1961-04-12 10:20:30') <= DATE('1984-11-15')", "dt_d_t", true), + $("DATE('2077-04-12') <= DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), + $("DATETIME('2020-09-16 00:00:00') <= DATE('1984-03-22')", "dt_d_f", false), + $("DATE('2020-09-16') <= TIME('09:07:00')", "d_t_t", true), + $("TIME('09:07:00') <= DATE('3077-04-12')", "t_d_t", true), + $("DATE('3077-04-12') <= TIME('00:00:00')", "d_t_f", false), + $("TIME('00:00:00') <= DATE('2020-09-16')", "t_d_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLteTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIME('10:20:30') <= DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('" + today + " 09:07:00') <= TIME('10:20:30')", "dt_t_t", true), - $("TIME('09:07:00') <= DATETIME('1961-04-12 09:07:00')", "t_dt_f", false), - $("DATETIME('" + today + " 20:40:50') <= TIME('10:20:30')", "dt_t_f", false), - $("TIME('10:20:30') <= TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), - $("TIMESTAMP('1984-12-15 10:20:30') <= TIME('10:20:30')", "ts_t_t", true), - $("TIME('22:15:07') <= TIMESTAMP('1984-12-15 22:15:07')", "t_ts_f", false), - $("TIMESTAMP('" + today + " 20:50:42') <= TIME('10:20:30')", "ts_t_f", false), - $("TIME('09:07:00') <= DATE('3077-04-12')", "t_d_t", true), - $("DATE('2020-09-16') <= TIME('09:07:00')", "d_t_t", true), - $("TIME('00:00:00') <= DATE('1961-04-12')", "t_d_f", false), - $("DATE('3077-04-12') <= TIME('10:20:30')", "d_t_f", false) - )); + return Arrays.asList( + $$( + $("TIME('10:20:30') <= DATETIME('" + today + " 10:20:30')", "t_dt_t", true), + $("DATETIME('" + today + " 09:07:00') <= TIME('10:20:30')", "dt_t_t", true), + $("TIME('09:07:00') <= DATETIME('1961-04-12 09:07:00')", "t_dt_f", false), + $("DATETIME('" + today + " 20:40:50') <= TIME('10:20:30')", "dt_t_f", false), + $("TIME('10:20:30') <= TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), + $("TIMESTAMP('1984-12-15 10:20:30') <= TIME('10:20:30')", "ts_t_t", true), + $("TIME('22:15:07') <= TIMESTAMP('1984-12-15 22:15:07')", "t_ts_f", false), + $("TIMESTAMP('" + today + " 20:50:42') <= TIME('10:20:30')", "ts_t_f", false), + $("TIME('09:07:00') <= DATE('3077-04-12')", "t_d_t", true), + $("DATE('2020-09-16') <= TIME('09:07:00')", "d_t_t", true), + $("TIME('00:00:00') <= DATE('1961-04-12')", "t_d_f", false), + $("DATE('3077-04-12') <= TIME('10:20:30')", "d_t_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGteTimestampWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIMESTAMP('2020-09-16 10:20:30') >= DATETIME('2020-09-16 10:20:30')", "ts_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') >= TIMESTAMP('1961-04-12 09:07:00')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') >= DATETIME('2061-04-12 09:07:00')", "ts_dt_f", false), - $("DATETIME('1961-04-12 09:07:00') >= TIMESTAMP('1984-12-15 09:07:00')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') >= DATE('1961-04-12')", "ts_d_t", true), - $("DATE('2020-09-16') >= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') >= DATE('2077-04-12')", "ts_d_f", false), - $("DATE('1961-04-11') >= TIMESTAMP('1961-04-12 00:00:00')", "d_ts_f", false), - $("TIMESTAMP('" + today + " 10:20:30') >= TIME('10:20:30')", "ts_t_t", true), - $("TIME('20:50:40') >= TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), - $("TIMESTAMP('1977-07-08 10:20:30') >= TIME('10:20:30')", "ts_t_f", false), - $("TIME('09:07:00') >= TIMESTAMP('3077-12-15 22:15:07')", "t_ts_f", false) - )); + return Arrays.asList( + $$( + $( + "TIMESTAMP('2020-09-16 10:20:30') >= DATETIME('2020-09-16 10:20:30')", + "ts_dt_t", + true), + $( + "DATETIME('2020-09-16 10:20:30') >= TIMESTAMP('1961-04-12 09:07:00')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') >= DATETIME('2061-04-12 09:07:00')", + "ts_dt_f", + false), + $( + "DATETIME('1961-04-12 09:07:00') >= TIMESTAMP('1984-12-15 09:07:00')", + "dt_ts_f", + false), + $("TIMESTAMP('2020-09-16 10:20:30') >= DATE('1961-04-12')", "ts_d_t", true), + $("DATE('2020-09-16') >= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), + $("TIMESTAMP('2020-09-16 10:20:30') >= DATE('2077-04-12')", "ts_d_f", false), + $("DATE('1961-04-11') >= TIMESTAMP('1961-04-12 00:00:00')", "d_ts_f", false), + $("TIMESTAMP('" + today + " 10:20:30') >= TIME('10:20:30')", "ts_t_t", true), + $("TIME('20:50:40') >= TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), + $("TIMESTAMP('1977-07-08 10:20:30') >= TIME('10:20:30')", "ts_t_f", false), + $("TIME('09:07:00') >= TIMESTAMP('3077-12-15 22:15:07')", "t_ts_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGteDateTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("DATETIME('2020-09-16 10:20:30') >= TIMESTAMP('2020-09-16 10:20:30')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') >= DATETIME('1984-12-15 22:15:07')", "ts_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') >= TIMESTAMP('2077-04-12 09:07:00')", "dt_ts_f", false), - $("TIMESTAMP('1961-04-12 00:00:00') >= DATETIME('1961-04-12 09:07:00')", "ts_dt_f", false), - $("DATETIME('2020-09-16 00:00:00') >= DATE('2020-09-16')", "dt_d_t", true), - $("DATE('2020-09-16') >= DATETIME('1961-04-12 09:07:00')", "d_dt_t", true), - $("DATETIME('1961-04-12 09:07:00') >= DATE('2020-09-16')", "dt_d_f", false), - $("DATE('1961-04-12') >= DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), - $("DATETIME('" + today + " 10:20:30') >= TIME('10:20:30')", "dt_t_t", true), - $("TIME('20:40:50') >= DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('1961-04-12 09:07:00') >= TIME('09:07:00')", "dt_t_f", false), - $("TIME('09:07:00') >= DATETIME('3077-12-15 22:15:07')", "t_dt_f", false) - )); + return Arrays.asList( + $$( + $( + "DATETIME('2020-09-16 10:20:30') >= TIMESTAMP('2020-09-16 10:20:30')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') >= DATETIME('1984-12-15 22:15:07')", + "ts_dt_t", + true), + $( + "DATETIME('2020-09-16 10:20:30') >= TIMESTAMP('2077-04-12 09:07:00')", + "dt_ts_f", + false), + $( + "TIMESTAMP('1961-04-12 00:00:00') >= DATETIME('1961-04-12 09:07:00')", + "ts_dt_f", + false), + $("DATETIME('2020-09-16 00:00:00') >= DATE('2020-09-16')", "dt_d_t", true), + $("DATE('2020-09-16') >= DATETIME('1961-04-12 09:07:00')", "d_dt_t", true), + $("DATETIME('1961-04-12 09:07:00') >= DATE('2020-09-16')", "dt_d_f", false), + $("DATE('1961-04-12') >= DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), + $("DATETIME('" + today + " 10:20:30') >= TIME('10:20:30')", "dt_t_t", true), + $("TIME('20:40:50') >= DATETIME('" + today + " 10:20:30')", "t_dt_t", true), + $("DATETIME('1961-04-12 09:07:00') >= TIME('09:07:00')", "dt_t_f", false), + $("TIME('09:07:00') >= DATETIME('3077-12-15 22:15:07')", "t_dt_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGteDateWithOtherTypes() { - return Arrays.asList($$( - $("DATE('2020-09-16') >= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), - $("TIMESTAMP('2077-04-12 09:07:00') >= DATE('2020-09-16')", "ts_d_t", true), - $("DATE('1961-04-12') >= TIMESTAMP('1961-04-12 09:07:00')", "d_ts_f", false), - $("TIMESTAMP('1961-04-12 09:07:00') >= DATE('1984-12-15')", "ts_d_f", false), - $("DATE('2020-09-16') >= DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') >= DATE('1984-03-22')", "dt_d_t", true), - $("DATE('1960-12-15') >= DATETIME('1961-04-12 09:07:00')", "d_dt_f", false), - $("DATETIME('1961-04-12 10:20:30') >= DATE('1984-11-15')", "dt_d_f", false), - $("DATE('3077-04-12') >= TIME('00:00:00')", "d_t_t", true), - $("TIME('00:00:00') >= DATE('2020-09-16')", "t_d_t", true), - $("DATE('2020-09-16') >= TIME('09:07:00')", "d_t_f", false), - $("TIME('09:07:00') >= DATE('3077-04-12')", "t_d_f", false) - )); + return Arrays.asList( + $$( + $("DATE('2020-09-16') >= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), + $("TIMESTAMP('2077-04-12 09:07:00') >= DATE('2020-09-16')", "ts_d_t", true), + $("DATE('1961-04-12') >= TIMESTAMP('1961-04-12 09:07:00')", "d_ts_f", false), + $("TIMESTAMP('1961-04-12 09:07:00') >= DATE('1984-12-15')", "ts_d_f", false), + $("DATE('2020-09-16') >= DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), + $("DATETIME('2020-09-16 00:00:00') >= DATE('1984-03-22')", "dt_d_t", true), + $("DATE('1960-12-15') >= DATETIME('1961-04-12 09:07:00')", "d_dt_f", false), + $("DATETIME('1961-04-12 10:20:30') >= DATE('1984-11-15')", "dt_d_f", false), + $("DATE('3077-04-12') >= TIME('00:00:00')", "d_t_t", true), + $("TIME('00:00:00') >= DATE('2020-09-16')", "t_d_t", true), + $("DATE('2020-09-16') >= TIME('09:07:00')", "d_t_f", false), + $("TIME('09:07:00') >= DATE('3077-04-12')", "t_d_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGteTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIME('10:20:30') >= DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('" + today + " 20:40:50') >= TIME('10:20:30')", "dt_t_t", true), - $("TIME('09:07:00') >= DATETIME('3077-04-12 09:07:00')", "t_dt_f", false), - $("DATETIME('" + today + " 09:07:00') >= TIME('10:20:30')", "dt_t_f", false), - $("TIME('10:20:30') >= TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), - $("TIMESTAMP('" + today + " 20:50:42') >= TIME('10:20:30')", "ts_t_t", true), - $("TIME('22:15:07') >= TIMESTAMP('3077-12-15 22:15:07')", "t_ts_f", false), - $("TIMESTAMP('1984-12-15 10:20:30') >= TIME('10:20:30')", "ts_t_f", false), - $("TIME('00:00:00') >= DATE('1961-04-12')", "t_d_t", true), - $("DATE('3077-04-12') >= TIME('10:20:30')", "d_t_t", true), - $("TIME('09:07:00') >= DATE('3077-04-12')", "t_d_f", false), - $("DATE('2020-09-16') >= TIME('09:07:00')", "d_t_f", false) - )); + return Arrays.asList( + $$( + $("TIME('10:20:30') >= DATETIME('" + today + " 10:20:30')", "t_dt_t", true), + $("DATETIME('" + today + " 20:40:50') >= TIME('10:20:30')", "dt_t_t", true), + $("TIME('09:07:00') >= DATETIME('3077-04-12 09:07:00')", "t_dt_f", false), + $("DATETIME('" + today + " 09:07:00') >= TIME('10:20:30')", "dt_t_f", false), + $("TIME('10:20:30') >= TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), + $("TIMESTAMP('" + today + " 20:50:42') >= TIME('10:20:30')", "ts_t_t", true), + $("TIME('22:15:07') >= TIMESTAMP('3077-12-15 22:15:07')", "t_ts_f", false), + $("TIMESTAMP('1984-12-15 10:20:30') >= TIME('10:20:30')", "ts_t_f", false), + $("TIME('00:00:00') >= DATE('1961-04-12')", "t_d_t", true), + $("DATE('3077-04-12') >= TIME('10:20:30')", "d_t_t", true), + $("TIME('09:07:00') >= DATE('3077-04-12')", "t_d_f", false), + $("DATE('2020-09-16') >= TIME('09:07:00')", "d_t_f", false))); } @Test public void testCompare() throws IOException { - var result = executeQuery(String.format("source=%s | eval `%s` = %s | fields `%s`", - TEST_INDEX_DATATYPE_NONNUMERIC, name, functionCall, name)); + var result = + executeQuery( + String.format( + "source=%s | eval `%s` = %s | fields `%s`", + TEST_INDEX_DATATYPE_NONNUMERIC, name, functionCall, name)); verifySchema(result, schema(name, null, "boolean")); verifyDataRows(result, rows(expectedResult)); } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeFunctionIT.java index b75b0ecaef..1df87a87b3 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeFunctionIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK; @@ -53,826 +52,1151 @@ public void resetTimeZone() { @Test public void testAddDateWithDays() throws IOException { - var result = executeQuery(String.format("source=%s | eval " - + " f = adddate(date('2020-09-16'), 1)" - + " | fields f", TEST_INDEX_DATE)); + var result = + executeQuery( + String.format( + "source=%s | eval " + " f = adddate(date('2020-09-16'), 1)" + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "date")); verifySome(result.getJSONArray("datarows"), rows("2020-09-17")); - result = executeQuery(String.format("source=%s | eval " - + " f = adddate(timestamp('2020-09-16 17:30:00'), 1)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = adddate(timestamp('2020-09-16 17:30:00'), 1)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-17 17:30:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = adddate(DATETIME('2020-09-16 07:40:00'), 1)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = adddate(DATETIME('2020-09-16 07:40:00'), 1)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-17 07:40:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = adddate(TIME('07:40:00'), 0)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + " f = adddate(TIME('07:40:00'), 0)" + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows(LocalDate.now() + " 07:40:00")); } @Test public void testAddDateWithInterval() throws IOException { - JSONObject result = executeQuery(String.format("source=%s | eval " - + " f = adddate(timestamp('2020-09-16 17:30:00'), interval 1 day)" - + " | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval " + + " f = adddate(timestamp('2020-09-16 17:30:00'), interval 1 day)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-17 17:30:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = adddate(DATETIME('2020-09-16 17:30:00'), interval 1 day)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = adddate(DATETIME('2020-09-16 17:30:00'), interval 1 day)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-17 17:30:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = adddate(date('2020-09-16'), interval 1 day) " - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = adddate(date('2020-09-16'), interval 1 day) " + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-17 00:00:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = adddate(date('2020-09-16'), interval 1 hour)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = adddate(date('2020-09-16'), interval 1 hour)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-16 01:00:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = adddate(TIME('07:40:00'), interval 1 day)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = adddate(TIME('07:40:00'), interval 1 day)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), - rows(LocalDate.now().plusDays(1).atTime(LocalTime.of(7, 40)).atZone(systemTz.toZoneId()) - .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); + verifySome( + result.getJSONArray("datarows"), + rows( + LocalDate.now() + .plusDays(1) + .atTime(LocalTime.of(7, 40)) + .atZone(systemTz.toZoneId()) + .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); - result = executeQuery(String.format("source=%s | eval " - + " f = adddate(TIME('07:40:00'), interval 1 hour)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = adddate(TIME('07:40:00'), interval 1 hour)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), - rows(LocalDate.now().atTime(LocalTime.of(8, 40)).atZone(systemTz.toZoneId()) - .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); + verifySome( + result.getJSONArray("datarows"), + rows( + LocalDate.now() + .atTime(LocalTime.of(8, 40)) + .atZone(systemTz.toZoneId()) + .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); } @Test public void testConvertTZ() throws IOException { JSONObject result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2008-05-15 12:00:00','+00:00','+10:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2008-05-15 12:00:00','+00:00','+10:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2008-05-15 22:00:00")); result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-05-12 00:00:00','-00:00','+00:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-05-12 00:00:00','-00:00','+00:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2021-05-12 00:00:00")); result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-05-12 00:00:00','+10:00','+11:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-05-12 00:00:00','+10:00','+11:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2021-05-12 01:00:00")); result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-08:00','+09:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-08:00','+09:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2021-05-13 04:34:50")); result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-05-12 11:34:50','+09:00','+09:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-05-12 11:34:50','+09:00','+09:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2021-05-12 11:34:50")); result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-12:00','+12:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-12:00','+12:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2021-05-13 11:34:50")); result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-05-12 13:00:00','+09:30','+05:45') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-05-12 13:00:00','+09:30','+05:45') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2021-05-12 09:15:00")); result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-05-30 11:34:50','-17:00','+08:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-05-30 11:34:50','-17:00','+08:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-12:00','+15:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-12:00','+15:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } @Test public void testDateAdd() throws IOException { - JSONObject result = executeQuery(String.format("source=%s | eval " - + " f = date_add(timestamp('2020-09-16 17:30:00'), interval 1 day)" - + " | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval " + + " f = date_add(timestamp('2020-09-16 17:30:00'), interval 1 day)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-17 17:30:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = date_add(DATETIME('2020-09-16 17:30:00'), interval 1 day)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = date_add(DATETIME('2020-09-16 17:30:00'), interval 1 day)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-17 17:30:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = date_add(date('2020-09-16'), interval 1 day)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = date_add(date('2020-09-16'), interval 1 day)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-17 00:00:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = date_add(date('2020-09-16'), interval 1 hour)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = date_add(date('2020-09-16'), interval 1 hour)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-16 01:00:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = date_add(TIME('07:40:00'), interval 1 day)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = date_add(TIME('07:40:00'), interval 1 day)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), - rows(LocalDate.now().plusDays(1).atTime(LocalTime.of(7, 40)).atZone(systemTz.toZoneId()) - .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); + verifySome( + result.getJSONArray("datarows"), + rows( + LocalDate.now() + .plusDays(1) + .atTime(LocalTime.of(7, 40)) + .atZone(systemTz.toZoneId()) + .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); - result = executeQuery(String.format("source=%s | eval " - + " f = date_add(TIME('07:40:00'), interval 1 hour)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = date_add(TIME('07:40:00'), interval 1 hour)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), - rows(LocalDate.now().atTime(LocalTime.of(8, 40)).atZone(systemTz.toZoneId()) - .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); + verifySome( + result.getJSONArray("datarows"), + rows( + LocalDate.now() + .atTime(LocalTime.of(8, 40)) + .atZone(systemTz.toZoneId()) + .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); - result = executeQuery(String.format("source=%s | eval " - + " f = DATE_ADD(birthdate, INTERVAL 1 YEAR)" - + " | fields f", TEST_INDEX_BANK)); + result = + executeQuery( + String.format( + "source=%s | eval " + " f = DATE_ADD(birthdate, INTERVAL 1 YEAR)" + " | fields f", + TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "datetime")); - verifyDataRows(result, - rows("2018-10-23 00:00:00"), - rows("2018-11-20 00:00:00"), - rows("2019-06-23 00:00:00"), - rows("2019-11-13 23:33:20"), - rows("2019-06-27 00:00:00"), - rows("2019-08-19 00:00:00"), - rows("2019-08-11 00:00:00")); + verifyDataRows( + result, + rows("2018-10-23 00:00:00"), + rows("2018-11-20 00:00:00"), + rows("2019-06-23 00:00:00"), + rows("2019-11-13 23:33:20"), + rows("2019-06-27 00:00:00"), + rows("2019-08-19 00:00:00"), + rows("2019-08-11 00:00:00")); } @Test public void testDateTime() throws IOException { JSONObject result = - executeQuery(String.format( - "source=%s | eval f = DATETIME('2008-12-25 05:30:00+00:00', 'America/Los_Angeles') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-12-25 05:30:00+00:00', 'America/Los_Angeles')" + + " | fields f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2008-12-24 21:30:00")); result = - executeQuery(String.format( - "source=%s | eval f = DATETIME('2008-12-25 05:30:00+00:00', '+01:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-12-25 05:30:00+00:00', '+01:00') | fields f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2008-12-25 06:30:00")); result = - executeQuery(String.format( - "source=%s | eval f = DATETIME('2008-12-25 05:30:00-05:00', '+05:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-12-25 05:30:00-05:00', '+05:00') | fields f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2008-12-25 15:30:00")); result = - executeQuery(String.format( - "source=%s | eval f = DATETIME('2004-02-28 23:00:00-10:00', '+10:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2004-02-28 23:00:00-10:00', '+10:00') | fields f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2004-02-29 19:00:00")); result = - executeQuery(String.format( - "source=%s | eval f = DATETIME('2003-02-28 23:00:00-10:00', '+10:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2003-02-28 23:00:00-10:00', '+10:00') | fields f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2003-03-01 19:00:00")); result = - executeQuery(String.format( - "source=%s | eval f = DATETIME('2008-12-25 05:30:00+00:00', '+14:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-12-25 05:30:00+00:00', '+14:00') | fields f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2008-12-25 19:30:00")); result = - executeQuery(String.format( - "source=%s | eval f = DATETIME('2008-01-01 02:00:00+10:00', '-10:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-01-01 02:00:00+10:00', '-10:00') | fields f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2007-12-31 06:00:00")); result = - executeQuery(String.format( - "source=%s | eval f = DATETIME('2008-01-01 02:00:00+10:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-01-01 02:00:00+10:00') | fields f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2008-01-01 02:00:00")); result = - executeQuery(String.format( - "source=%s | eval f = DATETIME('2008-01-01 02:00:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-01-01 02:00:00') | fields f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2008-01-01 02:00:00")); result = - executeQuery(String.format( - "source=%s | eval f = DATETIME('2008-01-01 02:00:00+15:00', '-12:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-01-01 02:00:00+15:00', '-12:00') | fields f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); result = - executeQuery(String.format( - "source=%s | eval f = DATETIME('2008-01-01 02:00:00+10:00', '-14:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-01-01 02:00:00+10:00', '-14:00') | fields f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); result = - executeQuery(String.format( - "source=%s | eval f = DATETIME('2008-01-01 02:00:00', '-14:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-01-01 02:00:00', '-14:00') | fields f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } @Test public void testDateSub() throws IOException { - JSONObject result = executeQuery(String.format("source=%s | eval " - + " f = date_sub(timestamp('2020-09-16 17:30:00'), interval 1 day)" - + " | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval " + + " f = date_sub(timestamp('2020-09-16 17:30:00'), interval 1 day)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-15 17:30:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = date_sub(DATETIME('2020-09-16 17:30:00'), interval 1 day)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = date_sub(DATETIME('2020-09-16 17:30:00'), interval 1 day)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-15 17:30:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = date_sub(date('2020-09-16'), interval 1 day)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = date_sub(date('2020-09-16'), interval 1 day)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-15 00:00:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = date_sub(date('2020-09-16'), interval 1 hour)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = date_sub(date('2020-09-16'), interval 1 hour)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-15 23:00:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = date_sub(TIME('07:40:00'), interval 1 day)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = date_sub(TIME('07:40:00'), interval 1 day)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), - rows(LocalDate.now().plusDays(-1).atTime(LocalTime.of(7, 40)).atZone(systemTz.toZoneId()) - .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); + verifySome( + result.getJSONArray("datarows"), + rows( + LocalDate.now() + .plusDays(-1) + .atTime(LocalTime.of(7, 40)) + .atZone(systemTz.toZoneId()) + .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); - result = executeQuery(String.format("source=%s | eval " - + " f = date_sub(TIME('07:40:00'), interval 1 hour)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = date_sub(TIME('07:40:00'), interval 1 hour)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), - rows(LocalDate.now().atTime(LocalTime.of(6, 40)).atZone(systemTz.toZoneId()) - .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); + verifySome( + result.getJSONArray("datarows"), + rows( + LocalDate.now() + .atTime(LocalTime.of(6, 40)) + .atZone(systemTz.toZoneId()) + .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); } @Test public void testDay() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = day(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = day(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(16)); - result = executeQuery(String.format( - "source=%s | eval f = day('2020-09-16') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format("source=%s | eval f = day('2020-09-16') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(16)); } @Test public void testDay_of_week() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = day_of_week(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = day_of_week(date('2020-09-16')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(4)); - result = executeQuery(String.format( - "source=%s | eval f = day_of_week('2020-09-16') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = day_of_week('2020-09-16') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(4)); } @Test public void testDay_of_month() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = day_of_month(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = day_of_month(date('2020-09-16')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(16)); - result = executeQuery(String.format( - "source=%s | eval f = day_of_month('2020-09-16') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = day_of_month('2020-09-16') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(16)); } @Test public void testDay_of_year() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = day_of_year(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = day_of_year(date('2020-09-16')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(260)); - result = executeQuery(String.format( - "source=%s | eval f = day_of_year('2020-09-16') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = day_of_year('2020-09-16') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(260)); } @Test public void testDayName() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = dayname(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = dayname(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "string")); verifySome(result.getJSONArray("datarows"), rows("Wednesday")); - result = executeQuery(String.format( - "source=%s | eval f = dayname('2020-09-16') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = dayname('2020-09-16') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "string")); verifySome(result.getJSONArray("datarows"), rows("Wednesday")); } @Test public void testDayOfMonth() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = dayofmonth(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = dayofmonth(date('2020-09-16')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(16)); - result = executeQuery(String.format( - "source=%s | eval f = dayofmonth('2020-09-16') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = dayofmonth('2020-09-16') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(16)); } @Test public void testDayOfWeek() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = dayofweek(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = dayofweek(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(4)); - result = executeQuery(String.format( - "source=%s | eval f = dayofweek('2020-09-16') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = dayofweek('2020-09-16') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(4)); } @Test public void testDayOfYear() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = dayofyear(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = dayofyear(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(260)); - result = executeQuery(String.format( - "source=%s | eval f = dayofyear('2020-09-16') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = dayofyear('2020-09-16') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(260)); } @Test public void testFromDays() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = from_days(738049) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format("source=%s | eval f = from_days(738049) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "date")); verifySome(result.getJSONArray("datarows"), rows("2020-09-16")); } @Test public void testHour() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = hour(timestamp('2020-09-16 17:30:00')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = hour(timestamp('2020-09-16 17:30:00')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(17)); - result = executeQuery(String.format( - "source=%s | eval f = hour(time('17:30:00')) | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = hour(time('17:30:00')) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(17)); - result = executeQuery(String.format( - "source=%s | eval f = hour('2020-09-16 17:30:00') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = hour('2020-09-16 17:30:00') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(17)); - result = executeQuery(String.format( - "source=%s | eval f = hour('17:30:00') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format("source=%s | eval f = hour('17:30:00') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(17)); } @Test public void testHour_of_day() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = hour_of_day(timestamp('2020-09-16 17:30:00')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = hour_of_day(timestamp('2020-09-16 17:30:00')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(17)); - result = executeQuery(String.format( - "source=%s | eval f = hour_of_day(time('17:30:00')) | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = hour_of_day(time('17:30:00')) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(17)); - result = executeQuery(String.format( - "source=%s | eval f = hour_of_day('2020-09-16 17:30:00') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = hour_of_day('2020-09-16 17:30:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(17)); - result = executeQuery(String.format( - "source=%s | eval f = hour_of_day('17:30:00') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = hour_of_day('17:30:00') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(17)); } @Test public void testMicrosecond() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = microsecond(timestamp('2020-09-16 17:30:00.123456')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = microsecond(timestamp('2020-09-16 17:30:00.123456')) |" + + " fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(123456)); // Explicit timestamp value with less than 6 microsecond digits - result = executeQuery(String.format( - "source=%s | eval f = microsecond(timestamp('2020-09-16 17:30:00.1234')) | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = microsecond(timestamp('2020-09-16 17:30:00.1234')) | fields" + + " f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(123400)); - result = executeQuery(String.format( - "source=%s | eval f = microsecond(time('17:30:00.000010')) | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = microsecond(time('17:30:00.000010')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(10)); // Explicit time value with less than 6 microsecond digits - result = executeQuery(String.format( - "source=%s | eval f = microsecond(time('17:30:00.1234')) | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = microsecond(time('17:30:00.1234')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(123400)); - result = executeQuery(String.format( - "source=%s | eval f = microsecond('2020-09-16 17:30:00.123456') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = microsecond('2020-09-16 17:30:00.123456') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(123456)); // Implicit timestamp value with less than 6 microsecond digits - result = executeQuery(String.format( - "source=%s | eval f = microsecond('2020-09-16 17:30:00.1234') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = microsecond('2020-09-16 17:30:00.1234') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(123400)); - result = executeQuery(String.format( - "source=%s | eval f = microsecond('17:30:00.000010') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = microsecond('17:30:00.000010') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(10)); // Implicit time value with less than 6 microsecond digits - result = executeQuery(String.format( - "source=%s | eval f = microsecond('17:30:00.1234') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = microsecond('17:30:00.1234') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(123400)); } @Test public void testMinute() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = minute(timestamp('2020-09-16 17:30:00')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = minute(timestamp('2020-09-16 17:30:00')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(30)); - result = executeQuery(String.format( - "source=%s | eval f = minute(time('17:30:00')) | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = minute(time('17:30:00')) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(30)); - result = executeQuery(String.format( - "source=%s | eval f = minute('2020-09-16 17:30:00') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = minute('2020-09-16 17:30:00') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(30)); - result = executeQuery(String.format( - "source=%s | eval f = minute('17:30:00') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format("source=%s | eval f = minute('17:30:00') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(30)); } @Test public void testMinute_of_hour() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = minute_of_hour(timestamp('2020-09-16 17:30:00')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = minute_of_hour(timestamp('2020-09-16 17:30:00')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(30)); - result = executeQuery(String.format( - "source=%s | eval f = minute_of_hour(time('17:30:00')) | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = minute_of_hour(time('17:30:00')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(30)); - result = executeQuery(String.format( - "source=%s | eval f = minute_of_hour('2020-09-16 17:30:00') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = minute_of_hour('2020-09-16 17:30:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(30)); - result = executeQuery(String.format( - "source=%s | eval f = minute_of_hour('17:30:00') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = minute_of_hour('17:30:00') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(30)); } @Test public void testMinute_of_day() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = minute_of_day(timestamp('2020-09-16 17:30:00')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = minute_of_day(timestamp('2020-09-16 17:30:00')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(1050)); - result = executeQuery(String.format( - "source=%s | eval f = minute_of_day(time('17:30:00')) | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = minute_of_day(time('17:30:00')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(1050)); - result = executeQuery(String.format( - "source=%s | eval f = minute_of_day('2020-09-16 17:30:00') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = minute_of_day('2020-09-16 17:30:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(1050)); - result = executeQuery(String.format( - "source=%s | eval f = minute_of_day('17:30:00') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = minute_of_day('17:30:00') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(1050)); } @Test public void testMonth() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = month(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = month(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(9)); - result = executeQuery(String.format( - "source=%s | eval f = month('2020-09-16') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format("source=%s | eval f = month('2020-09-16') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(9)); } @Test public void testMonth_of_year() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = month_of_year(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = month_of_year(date('2020-09-16')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(9)); - result = executeQuery(String.format( - "source=%s | eval f = month_of_year('2020-09-16') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = month_of_year('2020-09-16') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(9)); } @Test public void testMonthName() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = monthname(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = monthname(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "string")); verifySome(result.getJSONArray("datarows"), rows("September")); - result = executeQuery(String.format( - "source=%s | eval f = monthname('2020-09-16') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = monthname('2020-09-16') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "string")); verifySome(result.getJSONArray("datarows"), rows("September")); } @Test public void testQuarter() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = quarter(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = quarter(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(3)); - result = executeQuery(String.format( - "source=%s | eval f = quarter('2020-09-16') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = quarter('2020-09-16') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(3)); } @Test public void testSecond() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = second(timestamp('2020-09-16 17:30:00')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = second(timestamp('2020-09-16 17:30:00')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(0)); - result = executeQuery(String.format( - "source=%s | eval f = second(time('17:30:00')) | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = second(time('17:30:00')) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(0)); - result = executeQuery(String.format( - "source=%s | eval f = second('2020-09-16 17:30:00') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = second('2020-09-16 17:30:00') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(0)); - result = executeQuery(String.format( - "source=%s | eval f = second('17:30:00') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format("source=%s | eval f = second('17:30:00') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(0)); } @Test public void testSecond_of_minute() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = second_of_minute(timestamp('2020-09-16 17:30:00')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = second_of_minute(timestamp('2020-09-16 17:30:00')) | fields" + + " f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(0)); - result = executeQuery(String.format( - "source=%s | eval f = second_of_minute(time('17:30:00')) | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = second_of_minute(time('17:30:00')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(0)); - result = executeQuery(String.format( - "source=%s | eval f = second_of_minute('2020-09-16 17:30:00') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = second_of_minute('2020-09-16 17:30:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(0)); - result = executeQuery(String.format( - "source=%s | eval f = second_of_minute('17:30:00') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = second_of_minute('17:30:00') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(0)); } @Test public void testSubDateDays() throws IOException { - var result = executeQuery(String.format("source=%s | eval " - + " f = subdate(date('2020-09-16'), 1)" - + " | fields f", TEST_INDEX_DATE)); + var result = + executeQuery( + String.format( + "source=%s | eval " + " f = subdate(date('2020-09-16'), 1)" + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "date")); verifySome(result.getJSONArray("datarows"), rows("2020-09-15")); - result = executeQuery(String.format("source=%s | eval " - + " f = subdate(timestamp('2020-09-16 17:30:00'), 1)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = subdate(timestamp('2020-09-16 17:30:00'), 1)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-15 17:30:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = subdate(date('2020-09-16'), 1)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + " f = subdate(date('2020-09-16'), 1)" + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "date")); verifySome(result.getJSONArray("datarows"), rows("2020-09-15")); - result = executeQuery(String.format("source=%s | eval " - + " f = subdate(TIME('07:40:00'), 0)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + " f = subdate(TIME('07:40:00'), 0)" + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows(LocalDate.now() + " 07:40:00")); } @Test public void testSubDateInterval() throws IOException { - JSONObject result = executeQuery(String.format("source=%s | eval " - + " f = subdate(timestamp('2020-09-16 17:30:00'), interval 1 day)" - + " | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval " + + " f = subdate(timestamp('2020-09-16 17:30:00'), interval 1 day)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-15 17:30:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = subdate(DATETIME('2020-09-16 17:30:00'), interval 1 day)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = subdate(DATETIME('2020-09-16 17:30:00'), interval 1 day)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-15 17:30:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = subdate(date('2020-09-16'), interval 1 day) " - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = subdate(date('2020-09-16'), interval 1 day) " + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-15 00:00:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = subdate(date('2020-09-16'), interval 1 hour)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = subdate(date('2020-09-16'), interval 1 hour)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-15 23:00:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = subdate(TIME('07:40:00'), interval 1 day)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = subdate(TIME('07:40:00'), interval 1 day)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), - rows(LocalDate.now().plusDays(-1).atTime(LocalTime.of(7, 40)).atZone(systemTz.toZoneId()) - .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); + verifySome( + result.getJSONArray("datarows"), + rows( + LocalDate.now() + .plusDays(-1) + .atTime(LocalTime.of(7, 40)) + .atZone(systemTz.toZoneId()) + .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); - result = executeQuery(String.format("source=%s | eval " - + " f = subdate(TIME('07:40:00'), interval 1 hour)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = subdate(TIME('07:40:00'), interval 1 hour)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), - rows(LocalDate.now().atTime(LocalTime.of(6, 40)).atZone(systemTz.toZoneId()) - .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); + verifySome( + result.getJSONArray("datarows"), + rows( + LocalDate.now() + .atTime(LocalTime.of(6, 40)) + .atZone(systemTz.toZoneId()) + .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); } @Test public void testTimeToSec() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = time_to_sec(time('17:30:00')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = time_to_sec(time('17:30:00')) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "long")); verifySome(result.getJSONArray("datarows"), rows(63000)); - result = executeQuery(String.format( - "source=%s | eval f = time_to_sec('17:30:00') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = time_to_sec('17:30:00') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "long")); verifySome(result.getJSONArray("datarows"), rows(63000)); } @Test public void testToDays() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = to_days(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = to_days(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "long")); verifySome(result.getJSONArray("datarows"), rows(738049)); - result = executeQuery(String.format( - "source=%s | eval f = to_days('2020-09-16') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = to_days('2020-09-16') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "long")); verifySome(result.getJSONArray("datarows"), rows(738049)); } private void week(String date, int mode, int expectedResult) throws IOException { - JSONObject result = executeQuery(StringUtils.format( - "source=%s | eval f = week(date('%s'), %d) | fields f", TEST_INDEX_DATE, date, mode)); + JSONObject result = + executeQuery( + StringUtils.format( + "source=%s | eval f = week(date('%s'), %d) | fields f", + TEST_INDEX_DATE, date, mode)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(expectedResult)); } @Test public void testWeek() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = week(date('2008-02-20')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = week(date('2008-02-20')) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(7)); @@ -885,35 +1209,46 @@ public void testWeek() throws IOException { @Test public void testWeek_of_year() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = week_of_year(date('2008-02-20')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = week_of_year(date('2008-02-20')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(7)); } @Test public void testYear() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = year(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = year(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(2020)); - result = executeQuery(String.format( - "source=%s | eval f = year('2020-09-16') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format("source=%s | eval f = year('2020-09-16') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(2020)); } - void verifyDateFormat(String date, String type, String format, String formatted) throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = date_format(%s('%s'), '%s') | fields f", - TEST_INDEX_DATE, type, date, format)); + void verifyDateFormat(String date, String type, String format, String formatted) + throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = date_format(%s('%s'), '%s') | fields f", + TEST_INDEX_DATE, type, date, format)); verifySchema(result, schema("f", null, "string")); verifySome(result.getJSONArray("datarows"), rows(formatted)); - result = executeQuery(String.format( - "source=%s | eval f = date_format('%s', '%s') | fields f", - TEST_INDEX_DATE, date, format)); + result = + executeQuery( + String.format( + "source=%s | eval f = date_format('%s', '%s') | fields f", + TEST_INDEX_DATE, date, format)); verifySchema(result, schema("f", null, "string")); verifySome(result.getJSONArray("datarows"), rows(formatted)); } @@ -921,10 +1256,11 @@ void verifyDateFormat(String date, String type, String format, String formatted) @Test public void testDateFormat() throws IOException { String timestamp = "1998-01-31 13:14:15.012345"; - String timestampFormat = "%a %b %c %D %d %e %f %H %h %I %i %j %k %l %M " - + "%m %p %r %S %s %T %% %P"; - String timestampFormatted = "Sat Jan 01 31st 31 31 012345 13 01 01 14 031 13 1 " - + "January 01 PM 01:14:15 PM 15 15 13:14:15 % P"; + String timestampFormat = + "%a %b %c %D %d %e %f %H %h %I %i %j %k %l %M " + "%m %p %r %S %s %T %% %P"; + String timestampFormatted = + "Sat Jan 01 31st 31 31 012345 13 01 01 14 031 13 1 " + + "January 01 PM 01:14:15 PM 15 15 13:14:15 % P"; verifyDateFormat(timestamp, "timestamp", timestampFormat, timestampFormatted); String date = "1998-01-31"; @@ -948,76 +1284,119 @@ public void testDateFormatISO8601() throws IOException { @Test public void testMakeTime() throws IOException { - var result = executeQuery(String.format( - "source=%s | eval f1 = MAKETIME(20, 30, 40), f2 = MAKETIME(20.2, 49.5, 42.100502) | fields f1, f2", TEST_INDEX_DATE)); + var result = + executeQuery( + String.format( + "source=%s | eval f1 = MAKETIME(20, 30, 40), f2 = MAKETIME(20.2, 49.5, 42.100502) |" + + " fields f1, f2", + TEST_INDEX_DATE)); verifySchema(result, schema("f1", null, "time"), schema("f2", null, "time")); verifySome(result.getJSONArray("datarows"), rows("20:30:40", "20:50:42.100502")); } @Test public void testMakeDate() throws IOException { - var result = executeQuery(String.format( - "source=%s | eval f1 = MAKEDATE(1945, 5.9), f2 = MAKEDATE(1984, 1984) | fields f1, f2", TEST_INDEX_DATE)); + var result = + executeQuery( + String.format( + "source=%s | eval f1 = MAKEDATE(1945, 5.9), f2 = MAKEDATE(1984, 1984) | fields f1," + + " f2", + TEST_INDEX_DATE)); verifySchema(result, schema("f1", null, "date"), schema("f2", null, "date")); verifySome(result.getJSONArray("datarows"), rows("1945-01-06", "1989-06-06")); } @Test public void testAddTime() throws IOException { - var result = executeQuery(String.format("source=%s | eval" - + " `'2008-12-12' + 0` = ADDTIME(DATE('2008-12-12'), DATE('2008-11-15'))," - + " `'23:59:59' + 0` = ADDTIME(TIME('23:59:59'), DATE('2004-01-01'))," - + " `'2004-01-01' + '23:59:59'` = ADDTIME(DATE('2004-01-01'), TIME('23:59:59'))," - + " `'10:20:30' + '00:05:42'` = ADDTIME(TIME('10:20:30'), TIME('00:05:42'))," - + " `'15:42:13' + '09:07:00'` = ADDTIME(TIMESTAMP('1999-12-31 15:42:13'), DATETIME('1961-04-12 09:07:00'))" - + " | fields `'2008-12-12' + 0`, `'23:59:59' + 0`, `'2004-01-01' + '23:59:59'`, `'10:20:30' + '00:05:42'`, `'15:42:13' + '09:07:00'`", TEST_INDEX_DATE)); - verifySchema(result, + var result = + executeQuery( + String.format( + "source=%s | eval `'2008-12-12' + 0` = ADDTIME(DATE('2008-12-12')," + + " DATE('2008-11-15')), `'23:59:59' + 0` = ADDTIME(TIME('23:59:59')," + + " DATE('2004-01-01')), `'2004-01-01' + '23:59:59'` =" + + " ADDTIME(DATE('2004-01-01'), TIME('23:59:59')), `'10:20:30' + '00:05:42'` =" + + " ADDTIME(TIME('10:20:30'), TIME('00:05:42')), `'15:42:13' + '09:07:00'` =" + + " ADDTIME(TIMESTAMP('1999-12-31 15:42:13'), DATETIME('1961-04-12 09:07:00'))" + + " | fields `'2008-12-12' + 0`, `'23:59:59' + 0`, `'2004-01-01' + '23:59:59'`," + + " `'10:20:30' + '00:05:42'`, `'15:42:13' + '09:07:00'`", + TEST_INDEX_DATE)); + verifySchema( + result, schema("'2008-12-12' + 0", null, "datetime"), schema("'23:59:59' + 0", null, "time"), schema("'2004-01-01' + '23:59:59'", null, "datetime"), schema("'10:20:30' + '00:05:42'", null, "time"), schema("'15:42:13' + '09:07:00'", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows("2008-12-12 00:00:00", "23:59:59", "2004-01-01 23:59:59", "10:26:12", "2000-01-01 00:49:13")); + verifySome( + result.getJSONArray("datarows"), + rows( + "2008-12-12 00:00:00", + "23:59:59", + "2004-01-01 23:59:59", + "10:26:12", + "2000-01-01 00:49:13")); } @Test public void testSubTime() throws IOException { - var result = executeQuery(String.format("source=%s | eval" - + " `'2008-12-12' - 0` = SUBTIME(DATE('2008-12-12'), DATE('2008-11-15'))," - + " `'23:59:59' - 0` = SUBTIME(TIME('23:59:59'), DATE('2004-01-01'))," - + " `'2004-01-01' - '23:59:59'` = SUBTIME(DATE('2004-01-01'), TIME('23:59:59'))," - + " `'10:20:30' - '00:05:42'` = SUBTIME(TIME('10:20:30'), TIME('00:05:42'))," - + " `'15:42:13' - '09:07:00'` = SUBTIME(TIMESTAMP('1999-12-31 15:42:13'), DATETIME('1961-04-12 09:07:00'))" - + " | fields `'2008-12-12' - 0`, `'23:59:59' - 0`, `'2004-01-01' - '23:59:59'`, `'10:20:30' - '00:05:42'`, `'15:42:13' - '09:07:00'`", TEST_INDEX_DATE)); - verifySchema(result, + var result = + executeQuery( + String.format( + "source=%s | eval `'2008-12-12' - 0` = SUBTIME(DATE('2008-12-12')," + + " DATE('2008-11-15')), `'23:59:59' - 0` = SUBTIME(TIME('23:59:59')," + + " DATE('2004-01-01')), `'2004-01-01' - '23:59:59'` =" + + " SUBTIME(DATE('2004-01-01'), TIME('23:59:59')), `'10:20:30' - '00:05:42'` =" + + " SUBTIME(TIME('10:20:30'), TIME('00:05:42')), `'15:42:13' - '09:07:00'` =" + + " SUBTIME(TIMESTAMP('1999-12-31 15:42:13'), DATETIME('1961-04-12 09:07:00'))" + + " | fields `'2008-12-12' - 0`, `'23:59:59' - 0`, `'2004-01-01' - '23:59:59'`," + + " `'10:20:30' - '00:05:42'`, `'15:42:13' - '09:07:00'`", + TEST_INDEX_DATE)); + verifySchema( + result, schema("'2008-12-12' - 0", null, "datetime"), schema("'23:59:59' - 0", null, "time"), schema("'2004-01-01' - '23:59:59'", null, "datetime"), schema("'10:20:30' - '00:05:42'", null, "time"), schema("'15:42:13' - '09:07:00'", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows("2008-12-12 00:00:00", "23:59:59", "2003-12-31 00:00:01", "10:14:48", "1999-12-31 06:35:13")); + verifySome( + result.getJSONArray("datarows"), + rows( + "2008-12-12 00:00:00", + "23:59:59", + "2003-12-31 00:00:01", + "10:14:48", + "1999-12-31 06:35:13")); } @Test public void testFromUnixTime() throws IOException { - var result = executeQuery(String.format( - "source=%s | eval f1 = FROM_UNIXTIME(200300400), f2 = FROM_UNIXTIME(12224.12), " - + "f3 = FROM_UNIXTIME(1662601316, '%%T') | fields f1, f2, f3", TEST_INDEX_DATE)); - verifySchema(result, + var result = + executeQuery( + String.format( + "source=%s | eval f1 = FROM_UNIXTIME(200300400), f2 = FROM_UNIXTIME(12224.12), " + + "f3 = FROM_UNIXTIME(1662601316, '%%T') | fields f1, f2, f3", + TEST_INDEX_DATE)); + verifySchema( + result, schema("f1", null, "datetime"), schema("f2", null, "datetime"), schema("f3", null, "string")); - verifySome(result.getJSONArray("datarows"), + verifySome( + result.getJSONArray("datarows"), rows("1976-05-07 07:00:00", "1970-01-01 03:23:44.12", "01:41:56")); } @Test public void testUnixTimeStamp() throws IOException { - var result = executeQuery(String.format( - "source=%s | eval f1 = UNIX_TIMESTAMP(MAKEDATE(1984, 1984)), " - + "f2 = UNIX_TIMESTAMP(TIMESTAMP('2003-12-31 12:00:00')), " - + "f3 = UNIX_TIMESTAMP(20771122143845) | fields f1, f2, f3", TEST_INDEX_DATE)); - verifySchema(result, + var result = + executeQuery( + String.format( + "source=%s | eval f1 = UNIX_TIMESTAMP(MAKEDATE(1984, 1984)), " + + "f2 = UNIX_TIMESTAMP(TIMESTAMP('2003-12-31 12:00:00')), " + + "f3 = UNIX_TIMESTAMP(20771122143845) | fields f1, f2, f3", + TEST_INDEX_DATE)); + verifySchema( + result, schema("f1", null, "double"), schema("f2", null, "double"), schema("f3", null, "double")); @@ -1026,28 +1405,43 @@ public void testUnixTimeStamp() throws IOException { @Test public void testPeriodAdd() throws IOException { - var result = executeQuery(String.format( - "source=%s | eval f1 = PERIOD_ADD(200801, 2), f2 = PERIOD_ADD(200801, -12) | fields f1, f2", TEST_INDEX_DATE)); + var result = + executeQuery( + String.format( + "source=%s | eval f1 = PERIOD_ADD(200801, 2), f2 = PERIOD_ADD(200801, -12) | fields" + + " f1, f2", + TEST_INDEX_DATE)); verifySchema(result, schema("f1", null, "integer"), schema("f2", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(200803, 200701)); } @Test public void testPeriodDiff() throws IOException { - var result = executeQuery(String.format( - "source=%s | eval f1 = PERIOD_DIFF(200802, 200703), f2 = PERIOD_DIFF(200802, 201003) | fields f1, f2", TEST_INDEX_DATE)); + var result = + executeQuery( + String.format( + "source=%s | eval f1 = PERIOD_DIFF(200802, 200703), f2 = PERIOD_DIFF(200802," + + " 201003) | fields f1, f2", + TEST_INDEX_DATE)); verifySchema(result, schema("f1", null, "integer"), schema("f2", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(11, -25)); } public void testDateDiff() throws IOException { - var result = executeQuery(String.format("source=%s | eval" - + " `'2000-01-02' - '2000-01-01'` = DATEDIFF(TIMESTAMP('2000-01-02 00:00:00'), TIMESTAMP('2000-01-01 23:59:59'))," - + " `'2001-02-01' - '2004-01-01'` = DATEDIFF(DATE('2001-02-01'), TIMESTAMP('2004-01-01 00:00:00'))," - + " `'2004-01-01' - '2002-02-01'` = DATEDIFF(TIMESTAMP('2004-01-01 00:00:00'), DATETIME('2002-02-01 14:25:30'))," - + " `today - today` = DATEDIFF(TIME('23:59:59'), TIME('00:00:00'))" - + " | fields `'2000-01-02' - '2000-01-01'`, `'2001-02-01' - '2004-01-01'`, `'2004-01-01' - '2002-02-01'`, `today - today`", TEST_INDEX_DATE)); - verifySchema(result, + var result = + executeQuery( + String.format( + "source=%s | eval `'2000-01-02' - '2000-01-01'` = DATEDIFF(TIMESTAMP('2000-01-02" + + " 00:00:00'), TIMESTAMP('2000-01-01 23:59:59')), `'2001-02-01' -" + + " '2004-01-01'` = DATEDIFF(DATE('2001-02-01'), TIMESTAMP('2004-01-01" + + " 00:00:00')), `'2004-01-01' - '2002-02-01'` = DATEDIFF(TIMESTAMP('2004-01-01" + + " 00:00:00'), DATETIME('2002-02-01 14:25:30')), `today - today` =" + + " DATEDIFF(TIME('23:59:59'), TIME('00:00:00')) | fields `'2000-01-02' -" + + " '2000-01-01'`, `'2001-02-01' - '2004-01-01'`, `'2004-01-01' -" + + " '2002-02-01'`, `today - today`", + TEST_INDEX_DATE)); + verifySchema( + result, schema("'2000-01-02' - '2000-01-01'", null, "long"), schema("'2001-02-01' - '2004-01-01'", null, "long"), schema("'2004-01-01' - '2002-02-01'", null, "long"), @@ -1057,90 +1451,124 @@ public void testDateDiff() throws IOException { @Test public void testTimeDiff() throws IOException { - var result = executeQuery(String.format( - "source=%s | eval f = TIMEDIFF('23:59:59', '13:00:00') | fields f", TEST_INDEX_DATE)); + var result = + executeQuery( + String.format( + "source=%s | eval f = TIMEDIFF('23:59:59', '13:00:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "time")); verifySome(result.getJSONArray("datarows"), rows("10:59:59")); } @Test - public void testGetFormat() throws IOException{ - var result = executeQuery(String.format("source=%s | eval f = date_format('2003-10-03', get_format(DATE,'USA')) | fields f", TEST_INDEX_DATE)); + public void testGetFormat() throws IOException { + var result = + executeQuery( + String.format( + "source=%s | eval f = date_format('2003-10-03', get_format(DATE,'USA')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "string")); verifySome(result.getJSONArray("datarows"), rows("10.03.2003")); } @Test - public void testLastDay() throws IOException{ - var result = executeQuery(String.format("source=%s | eval f = last_day('2003-10-03') | fields f", TEST_INDEX_DATE)); + public void testLastDay() throws IOException { + var result = + executeQuery( + String.format( + "source=%s | eval f = last_day('2003-10-03') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "date")); verifySome(result.getJSONArray("datarows"), rows("2003-10-31")); } @Test - public void testSecToTime() throws IOException{ - var result = executeQuery(String.format("source=%s | eval f = sec_to_time(123456) | fields f", TEST_INDEX_DATE)); + public void testSecToTime() throws IOException { + var result = + executeQuery( + String.format("source=%s | eval f = sec_to_time(123456) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "time")); verifySome(result.getJSONArray("datarows"), rows("10:17:36")); } @Test - public void testYearWeek() throws IOException{ - var result = executeQuery(String.format("source=%s | eval f1 = yearweek('2003-10-03') | eval f2 = yearweek('2003-10-03', 3) | fields f1, f2", TEST_INDEX_DATE)); - verifySchema(result, - schema("f1", null, "integer"), - schema("f2", null, "integer")); + public void testYearWeek() throws IOException { + var result = + executeQuery( + String.format( + "source=%s | eval f1 = yearweek('2003-10-03') | eval f2 = yearweek('2003-10-03', 3)" + + " | fields f1, f2", + TEST_INDEX_DATE)); + verifySchema(result, schema("f1", null, "integer"), schema("f2", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(200339, 200340)); } @Test - public void testWeekDay() throws IOException{ - var result = executeQuery(String.format("source=%s | eval f = weekday('2003-10-03') | fields f", TEST_INDEX_DATE)); + public void testWeekDay() throws IOException { + var result = + executeQuery( + String.format( + "source=%s | eval f = weekday('2003-10-03') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(4)); } @Test - public void testToSeconds() throws IOException{ - var result = executeQuery(String.format("source=%s | eval f1 = to_seconds(date('2008-10-07')) | " + - "eval f2 = to_seconds('2020-09-16 07:40:00') | " + - "eval f3 = to_seconds(DATETIME('2020-09-16 07:40:00')) | fields f1, f2, f3", TEST_INDEX_DATE)); - verifySchema(result, - schema("f1", null, "long"), - schema("f2", null, "long"), - schema("f3", null, "long")); + public void testToSeconds() throws IOException { + var result = + executeQuery( + String.format( + "source=%s | eval f1 = to_seconds(date('2008-10-07')) | " + + "eval f2 = to_seconds('2020-09-16 07:40:00') | " + + "eval f3 = to_seconds(DATETIME('2020-09-16 07:40:00')) | fields f1, f2, f3", + TEST_INDEX_DATE)); + verifySchema( + result, schema("f1", null, "long"), schema("f2", null, "long"), schema("f3", null, "long")); verifySome(result.getJSONArray("datarows"), rows(63390556800L, 63767461200L, 63767461200L)); } @Test - public void testStrToDate() throws IOException{ - var result = executeQuery(String.format("source=%s | eval f = str_to_date('01,5,2013', '%s') | fields f", TEST_INDEX_DATE, "%d,%m,%Y")); + public void testStrToDate() throws IOException { + var result = + executeQuery( + String.format( + "source=%s | eval f = str_to_date('01,5,2013', '%s') | fields f", + TEST_INDEX_DATE, "%d,%m,%Y")); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2013-05-01 00:00:00")); } @Test - public void testTimeStampAdd() throws IOException{ - var result = executeQuery(String.format("source=%s | eval f = timestampadd(YEAR, 15, '2001-03-06 00:00:00') | fields f", TEST_INDEX_DATE)); + public void testTimeStampAdd() throws IOException { + var result = + executeQuery( + String.format( + "source=%s | eval f = timestampadd(YEAR, 15, '2001-03-06 00:00:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2016-03-06 00:00:00")); } @Test - public void testTimestampDiff() throws IOException{ - var result = executeQuery(String.format("source=%s | eval f = timestampdiff(YEAR, '1997-01-01 00:00:00', '2001-03-06 00:00:00') | fields f", TEST_INDEX_DATE)); + public void testTimestampDiff() throws IOException { + var result = + executeQuery( + String.format( + "source=%s | eval f = timestampdiff(YEAR, '1997-01-01 00:00:00', '2001-03-06" + + " 00:00:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows(4)); } @Test - public void testExtract() throws IOException{ - var result = executeQuery(String.format("source=%s | eval f1 = extract(YEAR FROM '1997-01-01 00:00:00') | eval f2 = extract(MINUTE FROM time('10:17:36')) | fields f1, f2", TEST_INDEX_DATE)); - verifySchema(result, - schema("f1", null, "long"), - schema("f2", null, "long")); + public void testExtract() throws IOException { + var result = + executeQuery( + String.format( + "source=%s | eval f1 = extract(YEAR FROM '1997-01-01 00:00:00') | eval f2 =" + + " extract(MINUTE FROM time('10:17:36')) | fields f1, f2", + TEST_INDEX_DATE)); + verifySchema(result, schema("f1", null, "long"), schema("f2", null, "long")); verifySome(result.getJSONArray("datarows"), rows(1997L, 17L)); } - - } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeImplementationIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeImplementationIT.java index 3f24b619f5..fb97da32ab 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeImplementationIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeImplementationIT.java @@ -26,36 +26,45 @@ public void init() throws IOException { @Test public void inRangeZeroToStringTZ() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | eval f = DATETIME('2008-12-25 05:30:00+00:00', 'America/Los_Angeles') | fields f", - TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-12-25 05:30:00+00:00', 'America/Los_Angeles')" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2008-12-24 21:30:00")); } @Test public void inRangeZeroToPositive() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | eval f = DATETIME('2008-12-25 05:30:00+00:00', '+01:00') | fields f", - TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-12-25 05:30:00+00:00', '+01:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2008-12-25 06:30:00")); } @Test public void inRangeNegativeToPositive() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | eval f = DATETIME('2008-12-25 05:30:00-05:00', '+05:00') | fields f", - TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-12-25 05:30:00-05:00', '+05:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2008-12-25 15:30:00")); } @Test public void inRangeTwentyHourOffset() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | eval f = DATETIME('2004-02-28 23:00:00-10:00', '+10:00') | fields f", - TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2004-02-28 23:00:00-10:00', '+10:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2004-02-29 19:00:00")); } @@ -63,85 +72,111 @@ public void inRangeTwentyHourOffset() throws IOException { @Test public void inRangeYearChange() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | eval f = DATETIME('2008-01-01 02:00:00+10:00', '-10:00') | fields f", - TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-01-01 02:00:00+10:00', '-10:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2007-12-31 06:00:00")); } @Test public void inRangeZeroToMax() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | eval f = DATETIME('2008-12-25 05:30:00+00:00', '+14:00') | fields f", - TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-12-25 05:30:00+00:00', '+14:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2008-12-25 19:30:00")); } @Test public void inRangeNoToTZ() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | eval f = DATETIME('2008-01-01 02:00:00+10:00') | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-01-01 02:00:00+10:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2008-01-01 02:00:00")); } @Test public void inRangeNoTZ() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | eval f = DATETIME('2008-01-01 02:00:00') | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-01-01 02:00:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2008-01-01 02:00:00")); } @Test public void nullField3Over() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | eval f = DATETIME('2008-01-01 02:00:00+15:00', '-12:00') | fields f", - TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-01-01 02:00:00+15:00', '-12:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } @Test public void nullField2Under() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | eval f = DATETIME('2008-01-01 02:00:00+10:00', '-14:00') | fields f", - TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-01-01 02:00:00+10:00', '-14:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } @Test public void nullTField3Over() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | eval f = DATETIME('2008-01-01 02:00:00', '+15:00') | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-01-01 02:00:00', '+15:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } @Test public void nullDateTimeInvalidDateValueFebruary() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | eval f = DATETIME('2021-02-30 10:00:00') | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2021-02-30 10:00:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } @Test public void nullDateTimeInvalidDateValueApril() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | eval f = DATETIME('2021-04-31 10:00:00') | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2021-04-31 10:00:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } @Test public void nullDateTimeInvalidDateValueMonth() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | eval f = DATETIME('2021-13-03 10:00:00') | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2021-13-03 10:00:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/DedupCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/DedupCommandIT.java index bd4fadb57f..7a6cf16bb4 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/DedupCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/DedupCommandIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK; diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/DescribeCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/DescribeCommandIT.java index 23bea69a52..aee32e08d1 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/DescribeCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/DescribeCommandIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_DOG; @@ -52,19 +51,17 @@ public void testDescribeAllFields() throws IOException { columnName("SCOPE_TABLE"), columnName("SOURCE_DATA_TYPE"), columnName("IS_AUTOINCREMENT"), - columnName("IS_GENERATEDCOLUMN") - ); + columnName("IS_GENERATEDCOLUMN")); } @Test public void testDescribeFilterFields() throws IOException { - JSONObject result = executeQuery(String.format("describe %s | fields TABLE_NAME, COLUMN_NAME, TYPE_NAME", TEST_INDEX_DOG)); + JSONObject result = + executeQuery( + String.format( + "describe %s | fields TABLE_NAME, COLUMN_NAME, TYPE_NAME", TEST_INDEX_DOG)); verifyColumn( - result, - columnName("TABLE_NAME"), - columnName("COLUMN_NAME"), - columnName("TYPE_NAME") - ); + result, columnName("TABLE_NAME"), columnName("COLUMN_NAME"), columnName("TYPE_NAME")); } @Test diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/ExplainIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/ExplainIT.java index 1a785e9074..fce975ef92 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/ExplainIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/ExplainIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.util.MatcherUtils.assertJsonEquals; @@ -35,8 +34,7 @@ public void testExplain() throws Exception { + "| fields - city " + "| eval age2 = avg_age + 2 " + "| dedup age2 " - + "| fields age2") - ); + + "| fields age2")); } @Test @@ -50,8 +48,7 @@ public void testFilterPushDownExplain() throws Exception { + "| where age > 30 " + "| where age < 40 " + "| where balance > 10000 " - + "| fields age") - ); + + "| fields age")); } @Test @@ -63,8 +60,7 @@ public void testFilterAndAggPushDownExplain() throws Exception { explainQueryToString( "source=opensearch-sql_test_index_account" + "| where age > 30 " - + "| stats avg(age) AS avg_age by state, city") - ); + + "| stats avg(age) AS avg_age by state, city")); } @Test @@ -77,8 +73,7 @@ public void testSortPushDownExplain() throws Exception { "source=opensearch-sql_test_index_account" + "| sort age " + "| where age > 30" - + "| fields age") - ); + + "| fields age")); } String loadFromFile(String filename) throws Exception { diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/FieldsCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/FieldsCommandIT.java index 4eb99e8b04..e8a287c80e 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/FieldsCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/FieldsCommandIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_ACCOUNT; @@ -43,7 +42,9 @@ public void testFieldsWithMultiFields() throws IOException { verifyColumn(result, columnName("firstname"), columnName("lastname")); } - @Ignore("Cannot resolve wildcard yet. Enable once https://github.com/opensearch-project/sql/issues/787 is resolved.") + @Ignore( + "Cannot resolve wildcard yet. Enable once" + + " https://github.com/opensearch-project/sql/issues/787 is resolved.") @Test public void testFieldsWildCard() throws IOException { JSONObject result = @@ -57,14 +58,14 @@ public void testSelectDateTypeField() throws IOException { executeQuery(String.format("source=%s | fields birthdate", TEST_INDEX_BANK)); verifySchema(result, schema("birthdate", null, "timestamp")); - verifyDataRows(result, + verifyDataRows( + result, rows("2017-10-23 00:00:00"), rows("2017-11-20 00:00:00"), rows("2018-06-23 00:00:00"), rows("2018-11-13 23:33:20"), rows("2018-06-27 00:00:00"), rows("2018-08-19 00:00:00"), - rows("2018-08-11 00:00:00") - ); + rows("2018-08-11 00:00:00")); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/HeadCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/HeadCommandIT.java index 48c489ce10..8a96620fe0 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/HeadCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/HeadCommandIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_ACCOUNT; @@ -39,7 +38,8 @@ public void init() throws IOException { public void testHead() throws IOException { JSONObject result = executeQuery(String.format("source=%s | fields firstname, age | head", TEST_INDEX_ACCOUNT)); - verifyDataRows(result, + verifyDataRows( + result, rows("Amber", 32), rows("Hattie", 36), rows("Nanette", 28), @@ -55,11 +55,9 @@ public void testHead() throws IOException { @Test public void testHeadWithNumber() throws IOException { JSONObject result = - executeQuery(String.format("source=%s | fields firstname, age | head 3", TEST_INDEX_ACCOUNT)); - verifyDataRows(result, - rows("Amber", 32), - rows("Hattie", 36), - rows("Nanette", 28)); + executeQuery( + String.format("source=%s | fields firstname, age | head 3", TEST_INDEX_ACCOUNT)); + verifyDataRows(result, rows("Amber", 32), rows("Hattie", 36), rows("Nanette", 28)); } @Ignore("Fix https://github.com/opensearch-project/sql/issues/703#issuecomment-1211422130") @@ -67,9 +65,10 @@ public void testHeadWithNumber() throws IOException { public void testHeadWithNumberLargerThanQuerySizeLimit() throws IOException { setQuerySizeLimit(5); JSONObject result = - executeQuery(String.format( - "source=%s | fields firstname, age | head 10", TEST_INDEX_ACCOUNT)); - verifyDataRows(result, + executeQuery( + String.format("source=%s | fields firstname, age | head 10", TEST_INDEX_ACCOUNT)); + verifyDataRows( + result, rows("Amber", 32), rows("Hattie", 36), rows("Nanette", 28), @@ -86,9 +85,10 @@ public void testHeadWithNumberLargerThanQuerySizeLimit() throws IOException { public void testHeadWithNumberLargerThanMaxResultWindow() throws IOException { setMaxResultWindow(TEST_INDEX_ACCOUNT, 10); JSONObject result = - executeQuery(String.format( - "source=%s | fields firstname, age | head 15", TEST_INDEX_ACCOUNT)); - verifyDataRows(result, + executeQuery( + String.format("source=%s | fields firstname, age | head 15", TEST_INDEX_ACCOUNT)); + verifyDataRows( + result, rows("Amber", 32), rows("Hattie", 36), rows("Nanette", 28), @@ -112,9 +112,10 @@ public void testHeadWithLargeNumber() throws IOException { setQuerySizeLimit(5); setMaxResultWindow(TEST_INDEX_ACCOUNT, 10); JSONObject result = - executeQuery(String.format( - "source=%s | fields firstname, age | head 15", TEST_INDEX_ACCOUNT)); - verifyDataRows(result, + executeQuery( + String.format("source=%s | fields firstname, age | head 15", TEST_INDEX_ACCOUNT)); + verifyDataRows( + result, rows("Amber", 32), rows("Hattie", 36), rows("Nanette", 28), @@ -135,10 +136,8 @@ public void testHeadWithLargeNumber() throws IOException { @Test public void testHeadWithNumberAndFrom() throws IOException { JSONObject result = - executeQuery(String.format("source=%s | fields firstname, age | head 3 from 4", TEST_INDEX_ACCOUNT)); - verifyDataRows(result, - rows("Elinor", 36), - rows("Virginia", 39), - rows("Dillard", 34)); + executeQuery( + String.format("source=%s | fields firstname, age | head 3 from 4", TEST_INDEX_ACCOUNT)); + verifyDataRows(result, rows("Elinor", 36), rows("Virginia", 39), rows("Dillard", 34)); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/InformationSchemaCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/InformationSchemaCommandIT.java index 37909e4726..cf7cfcdb39 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/InformationSchemaCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/InformationSchemaCommandIT.java @@ -28,10 +28,10 @@ public class InformationSchemaCommandIT extends PPLIntegTestCase { /** - * Integ tests are dependent on self generated metrics in prometheus instance. - * When running individual integ tests there - * is no time for generation of metrics in the test prometheus instance. - * This method gives prometheus time to generate metrics on itself. + * Integ tests are dependent on self generated metrics in prometheus instance. When running + * individual integ tests there is no time for generation of metrics in the test prometheus + * instance. This method gives prometheus time to generate metrics on itself. + * * @throws InterruptedException */ @BeforeClass @@ -42,8 +42,11 @@ protected static void metricGenerationWait() throws InterruptedException { @Override protected void init() throws InterruptedException, IOException { DataSourceMetadata createDSM = - new DataSourceMetadata("my_prometheus", DataSourceType.PROMETHEUS, - ImmutableList.of(), ImmutableMap.of("prometheus.uri", "http://localhost:9090")); + new DataSourceMetadata( + "my_prometheus", + DataSourceType.PROMETHEUS, + ImmutableList.of(), + ImmutableMap.of("prometheus.uri", "http://localhost:9090")); Request createRequest = getCreateDataSourceRequest(createDSM); Response response = client().performRequest(createRequest); Assert.assertEquals(201, response.getStatusLine().getStatusCode()); @@ -59,8 +62,9 @@ protected void deleteDataSourceMetadata() throws IOException { @Test public void testSearchTablesFromPrometheusCatalog() throws IOException { JSONObject result = - executeQuery("source=my_prometheus.information_schema.tables " - + "| where LIKE(TABLE_NAME, '%http%')"); + executeQuery( + "source=my_prometheus.information_schema.tables " + + "| where LIKE(TABLE_NAME, '%http%')"); this.logger.error(result.toString()); verifyColumn( result, @@ -69,24 +73,53 @@ public void testSearchTablesFromPrometheusCatalog() throws IOException { columnName("TABLE_NAME"), columnName("TABLE_TYPE"), columnName("UNIT"), - columnName("REMARKS") - ); - verifyDataRows(result, - rows("my_prometheus", "default", "promhttp_metric_handler_requests_in_flight", - "gauge", "", "Current number of scrapes being served."), - rows("my_prometheus", "default", "prometheus_sd_http_failures_total", - "counter", "", "Number of HTTP service discovery refresh failures."), - rows("my_prometheus", "default", "promhttp_metric_handler_requests_total", - "counter", "", "Total number of scrapes by HTTP status code."), - rows("my_prometheus", "default", "prometheus_http_request_duration_seconds", - "histogram", "", "Histogram of latencies for HTTP requests."), - rows("my_prometheus", "default", "prometheus_http_requests_total", - "counter", "", "Counter of HTTP requests."), - rows("my_prometheus", "default", "prometheus_http_response_size_bytes", - "histogram", "", "Histogram of response size for HTTP requests.")); + columnName("REMARKS")); + verifyDataRows( + result, + rows( + "my_prometheus", + "default", + "promhttp_metric_handler_requests_in_flight", + "gauge", + "", + "Current number of scrapes being served."), + rows( + "my_prometheus", + "default", + "prometheus_sd_http_failures_total", + "counter", + "", + "Number of HTTP service discovery refresh failures."), + rows( + "my_prometheus", + "default", + "promhttp_metric_handler_requests_total", + "counter", + "", + "Total number of scrapes by HTTP status code."), + rows( + "my_prometheus", + "default", + "prometheus_http_request_duration_seconds", + "histogram", + "", + "Histogram of latencies for HTTP requests."), + rows( + "my_prometheus", + "default", + "prometheus_http_requests_total", + "counter", + "", + "Counter of HTTP requests."), + rows( + "my_prometheus", + "default", + "prometheus_http_response_size_bytes", + "histogram", + "", + "Histogram of response size for HTTP requests.")); } - @Test public void testTablesFromPrometheusCatalog() throws IOException { JSONObject result = @@ -101,15 +134,18 @@ public void testTablesFromPrometheusCatalog() throws IOException { columnName("TABLE_NAME"), columnName("TABLE_TYPE"), columnName("UNIT"), - columnName("REMARKS") - ); - verifyDataRows(result, - rows("my_prometheus", - "default", "prometheus_http_requests_total", - "counter", "", "Counter of HTTP requests.")); + columnName("REMARKS")); + verifyDataRows( + result, + rows( + "my_prometheus", + "default", + "prometheus_http_requests_total", + "counter", + "", + "Counter of HTTP requests.")); } - // Moved this IT from DescribeCommandIT to segregate Datasource Integ Tests. @Test public void testDescribeCommandWithPrometheusCatalog() throws IOException { @@ -120,16 +156,19 @@ public void testDescribeCommandWithPrometheusCatalog() throws IOException { columnName("TABLE_SCHEMA"), columnName("TABLE_NAME"), columnName("COLUMN_NAME"), - columnName("DATA_TYPE") - ); - verifyDataRows(result, + columnName("DATA_TYPE")); + verifyDataRows( + result, rows("my_prometheus", "default", "prometheus_http_requests_total", "handler", "keyword"), rows("my_prometheus", "default", "prometheus_http_requests_total", "code", "keyword"), rows("my_prometheus", "default", "prometheus_http_requests_total", "instance", "keyword"), rows("my_prometheus", "default", "prometheus_http_requests_total", "@value", "double"), - rows("my_prometheus", "default", "prometheus_http_requests_total", "@timestamp", + rows( + "my_prometheus", + "default", + "prometheus_http_requests_total", + "@timestamp", "timestamp"), rows("my_prometheus", "default", "prometheus_http_requests_total", "job", "keyword")); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/LegacyAPICompatibilityIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/LegacyAPICompatibilityIT.java index 4bf9a37a9f..c14b9baa35 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/LegacyAPICompatibilityIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/LegacyAPICompatibilityIT.java @@ -16,9 +16,7 @@ import org.opensearch.client.RequestOptions; import org.opensearch.client.Response; -/** - * For backward compatibility, check if legacy API endpoints are accessible. - */ +/** For backward compatibility, check if legacy API endpoints are accessible. */ public class LegacyAPICompatibilityIT extends PPLIntegTestCase { @Override @@ -51,22 +49,20 @@ public void stats() throws IOException { @Test public void legacySettingNewEndpoint() throws IOException { - String requestBody = "{" - + " \"persistent\": {" - + " \"opendistro.ppl.query.memory_limit\": \"80%\"" - + " }" - + "}"; + String requestBody = + "{" + + " \"persistent\": {" + + " \"opendistro.ppl.query.memory_limit\": \"80%\"" + + " }" + + "}"; Response response = updateSetting(SETTINGS_API_ENDPOINT, requestBody); Assert.assertEquals(200, response.getStatusLine().getStatusCode()); } @Test public void newSettingNewEndpoint() throws IOException { - String requestBody = "{" - + " \"persistent\": {" - + " \"plugins.query.size_limit\": \"100\"" - + " }" - + "}"; + String requestBody = + "{" + " \"persistent\": {" + " \"plugins.query.size_limit\": \"100\"" + " }" + "}"; Response response = updateSetting(SETTINGS_API_ENDPOINT, requestBody); Assert.assertEquals(200, response.getStatusLine().getStatusCode()); } @@ -83,5 +79,4 @@ private RequestOptions.Builder buildJsonOption() { restOptionsBuilder.addHeader("Content-Type", "application/json"); return restOptionsBuilder; } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/LikeQueryIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/LikeQueryIT.java index 67ad553689..75dd6aa268 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/LikeQueryIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/LikeQueryIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_WILDCARD; @@ -23,9 +22,13 @@ public void init() throws IOException { @Test public void test_like_with_percent() throws IOException { - String query = "source=" + TEST_INDEX_WILDCARD + " | WHERE Like(KeywordBody, 'test wildcard%') | fields KeywordBody"; + String query = + "source=" + + TEST_INDEX_WILDCARD + + " | WHERE Like(KeywordBody, 'test wildcard%') | fields KeywordBody"; JSONObject result = executeQuery(query); - verifyDataRows(result, + verifyDataRows( + result, rows("test wildcard"), rows("test wildcard in the end of the text%"), rows("test wildcard in % the middle of the text"), @@ -37,51 +40,66 @@ public void test_like_with_percent() throws IOException { @Test public void test_like_with_escaped_percent() throws IOException { - String query = "source=" + TEST_INDEX_WILDCARD + " | WHERE Like(KeywordBody, '\\\\%test wildcard%') | fields KeywordBody"; + String query = + "source=" + + TEST_INDEX_WILDCARD + + " | WHERE Like(KeywordBody, '\\\\%test wildcard%') | fields KeywordBody"; JSONObject result = executeQuery(query); - verifyDataRows(result, - rows("%test wildcard in the beginning of the text")); + verifyDataRows(result, rows("%test wildcard in the beginning of the text")); } @Test public void test_like_in_where_with_escaped_underscore() throws IOException { - String query = "source=" + TEST_INDEX_WILDCARD + " | WHERE Like(KeywordBody, '\\\\_test wildcard%') | fields KeywordBody"; + String query = + "source=" + + TEST_INDEX_WILDCARD + + " | WHERE Like(KeywordBody, '\\\\_test wildcard%') | fields KeywordBody"; JSONObject result = executeQuery(query); - verifyDataRows(result, - rows("_test wildcard in the beginning of the text")); + verifyDataRows(result, rows("_test wildcard in the beginning of the text")); } @Test public void test_like_on_text_field_with_one_word() throws IOException { - String query = "source=" + TEST_INDEX_WILDCARD + " | WHERE Like(TextBody, 'test*') | fields TextBody"; + String query = + "source=" + TEST_INDEX_WILDCARD + " | WHERE Like(TextBody, 'test*') | fields TextBody"; JSONObject result = executeQuery(query); assertEquals(9, result.getInt("total")); } @Test public void test_like_on_text_keyword_field_with_one_word() throws IOException { - String query = "source=" + TEST_INDEX_WILDCARD + " | WHERE Like(TextKeywordBody, 'test*') | fields TextKeywordBody"; + String query = + "source=" + + TEST_INDEX_WILDCARD + + " | WHERE Like(TextKeywordBody, 'test*') | fields TextKeywordBody"; JSONObject result = executeQuery(query); assertEquals(8, result.getInt("total")); } @Test public void test_like_on_text_keyword_field_with_greater_than_one_word() throws IOException { - String query = "source=" + TEST_INDEX_WILDCARD + " | WHERE Like(TextKeywordBody, 'test wild*') | fields TextKeywordBody"; + String query = + "source=" + + TEST_INDEX_WILDCARD + + " | WHERE Like(TextKeywordBody, 'test wild*') | fields TextKeywordBody"; JSONObject result = executeQuery(query); assertEquals(7, result.getInt("total")); } @Test public void test_like_on_text_field_with_greater_than_one_word() throws IOException { - String query = "source=" + TEST_INDEX_WILDCARD + " | WHERE Like(TextBody, 'test wild*') | fields TextBody"; + String query = + "source=" + TEST_INDEX_WILDCARD + " | WHERE Like(TextBody, 'test wild*') | fields TextBody"; JSONObject result = executeQuery(query); assertEquals(0, result.getInt("total")); } @Test public void test_convert_field_text_to_keyword() throws IOException { - String query = "source=" + TEST_INDEX_WILDCARD + " | WHERE Like(TextKeywordBody, '*') | fields TextKeywordBody"; + String query = + "source=" + + TEST_INDEX_WILDCARD + + " | WHERE Like(TextKeywordBody, '*') | fields TextKeywordBody"; String result = explainQueryToString(query); assertTrue(result.contains("TextKeywordBody.keyword")); } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/MatchBoolPrefixIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/MatchBoolPrefixIT.java index 42ba8bea53..67e6fac04d 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/MatchBoolPrefixIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/MatchBoolPrefixIT.java @@ -28,9 +28,7 @@ public void valid_query_match_test() throws IOException { "source=%s | where match_bool_prefix(phrase, 'qui') | fields phrase", TEST_INDEX_PHRASE)); - verifyDataRows(result, - rows("quick fox"), - rows("quick fox here")); + verifyDataRows(result, rows("quick fox"), rows("quick fox here")); } @Test @@ -38,12 +36,11 @@ public void optional_parameter_match_test() throws IOException { JSONObject result = executeQuery( String.format( - "source=%s | where match_bool_prefix(phrase, '2 tes', minimum_should_match=1, fuzziness=2) | fields phrase", + "source=%s | where match_bool_prefix(phrase, '2 tes', minimum_should_match=1," + + " fuzziness=2) | fields phrase", TEST_INDEX_PHRASE)); - verifyDataRows(result, - rows("my test"), - rows("my test 2")); + verifyDataRows(result, rows("my test"), rows("my test 2")); } @Test diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/MatchIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/MatchIT.java index 808be2334d..908f7a621c 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/MatchIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/MatchIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK; diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/MatchPhraseIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/MatchPhraseIT.java index 780113de52..5efc2108b9 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/MatchPhraseIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/MatchPhraseIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_PHRASE; @@ -24,18 +23,20 @@ public void init() throws IOException { @Test public void test_match_phrase_function() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | where match_phrase(phrase, 'quick fox') | fields phrase", TEST_INDEX_PHRASE)); + executeQuery( + String.format( + "source=%s | where match_phrase(phrase, 'quick fox') | fields phrase", + TEST_INDEX_PHRASE)); verifyDataRows(result, rows("quick fox"), rows("quick fox here")); } @Test public void test_match_phrase_with_slop() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | where match_phrase(phrase, 'brown fox', slop = 2) | fields phrase", TEST_INDEX_PHRASE)); + executeQuery( + String.format( + "source=%s | where match_phrase(phrase, 'brown fox', slop = 2) | fields phrase", + TEST_INDEX_PHRASE)); verifyDataRows(result, rows("brown fox"), rows("fox brown")); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/MatchPhrasePrefixIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/MatchPhrasePrefixIT.java index 0f827692a5..d6277252a5 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/MatchPhrasePrefixIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/MatchPhrasePrefixIT.java @@ -24,46 +24,48 @@ public void init() throws IOException { public void required_parameters() throws IOException { String query = "source = %s | WHERE match_phrase_prefix(Title, 'champagne be') | fields Title"; JSONObject result = executeQuery(String.format(query, TEST_INDEX_BEER)); - verifyDataRows(result, + verifyDataRows( + result, rows("Can old flat champagne be used for vinegar?"), rows("Elder flower champagne best to use natural yeast or add a wine yeast?")); } - @Test public void all_optional_parameters() throws IOException { // The values for optional parameters are valid but arbitrary. - String query = "source = %s " + - "| WHERE match_phrase_prefix(Title, 'flat champ', boost = 1.0, " + - "zero_terms_query='ALL', max_expansions = 2, analyzer=standard, slop=0) " + - "| fields Title"; + String query = + "source = %s " + + "| WHERE match_phrase_prefix(Title, 'flat champ', boost = 1.0, " + + "zero_terms_query='ALL', max_expansions = 2, analyzer=standard, slop=0) " + + "| fields Title"; JSONObject result = executeQuery(String.format(query, TEST_INDEX_BEER)); verifyDataRows(result, rows("Can old flat champagne be used for vinegar?")); } - @Test public void max_expansions_is_3() throws IOException { // max_expansions applies to the last term in the query -- 'bottl' // It tells OpenSearch to consider only the first 3 terms that start with 'bottl' // In this dataset these are 'bottle-conditioning', 'bottling', 'bottles'. - String query = "source = %s " + - "| WHERE match_phrase_prefix(Tags, 'draught bottl', max_expansions=3) | fields Tags"; + String query = + "source = %s " + + "| WHERE match_phrase_prefix(Tags, 'draught bottl', max_expansions=3) | fields Tags"; JSONObject result = executeQuery(String.format(query, TEST_INDEX_BEER)); - verifyDataRows(result, rows("brewing draught bottling"), - rows("draught bottles")); + verifyDataRows(result, rows("brewing draught bottling"), rows("draught bottles")); } @Test public void analyzer_english() throws IOException { // English analyzer removes 'in' and 'to' as they are common words. // This results in an empty query. - String query = "source = %s " + - "| WHERE match_phrase_prefix(Title, 'in to', analyzer=english)" + - "| fields Title"; + String query = + "source = %s " + + "| WHERE match_phrase_prefix(Title, 'in to', analyzer=english)" + + "| fields Title"; JSONObject result = executeQuery(String.format(query, TEST_INDEX_BEER)); - assertTrue("Expect English analyzer to filter out common words 'in' and 'to'", + assertTrue( + "Expect English analyzer to filter out common words 'in' and 'to'", result.getInt("total") == 0); } @@ -71,9 +73,10 @@ public void analyzer_english() throws IOException { public void analyzer_standard() throws IOException { // Standard analyzer does not treat 'in' and 'to' as special terms. // This results in 'to' being used as a phrase prefix given us 'Tokyo'. - String query = "source = %s " + - "| WHERE match_phrase_prefix(Title, 'in to', analyzer=standard)" + - "| fields Title"; + String query = + "source = %s " + + "| WHERE match_phrase_prefix(Title, 'in to', analyzer=standard)" + + "| fields Title"; JSONObject result = executeQuery(String.format(query, TEST_INDEX_BEER)); verifyDataRows(result, rows("Local microbreweries and craft beer in Tokyo")); } @@ -83,21 +86,19 @@ public void zero_term_query_all() throws IOException { // English analyzer removes 'in' and 'to' as they are common words. // zero_terms_query of 'ALL' causes all rows to be returned. // ORDER BY ... LIMIT helps make the test understandable. - String query = "source = %s" + - "| WHERE match_phrase_prefix(Title, 'in to', analyzer=english, zero_terms_query='ALL') " + - "| sort -Title | head 1 | fields Title"; + String query = + "source = %s| WHERE match_phrase_prefix(Title, 'in to', analyzer=english," + + " zero_terms_query='ALL') | sort -Title | head 1 | fields Title"; JSONObject result = executeQuery(String.format(query, TEST_INDEX_BEER)); verifyDataRows(result, rows("was working great, now all foam")); } - @Test public void slop_is_2() throws IOException { // When slop is 2, the terms are matched exactly in the order specified. // 'open' is used to match prefix of the next term. - String query = "source = %s" + - "| where match_phrase_prefix(Tags, 'gas ta', slop=2) " + - "| fields Tags"; + String query = + "source = %s | where match_phrase_prefix(Tags, 'gas ta', slop=2) | fields Tags"; JSONObject result = executeQuery(String.format(query, TEST_INDEX_BEER)); verifyDataRows(result, rows("taste gas")); } @@ -105,12 +106,9 @@ public void slop_is_2() throws IOException { @Test public void slop_is_3() throws IOException { // When slop is 3, results will include phrases where the query terms are transposed. - String query = "source = %s" + - "| where match_phrase_prefix(Tags, 'gas ta', slop=3)" + - "| fields Tags"; + String query = + "source = %s | where match_phrase_prefix(Tags, 'gas ta', slop=3) | fields Tags"; JSONObject result = executeQuery(String.format(query, TEST_INDEX_BEER)); - verifyDataRows(result, - rows("taste draught gas"), - rows("taste gas")); + verifyDataRows(result, rows("taste draught gas"), rows("taste gas")); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/MathematicalFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/MathematicalFunctionIT.java index 6dd2d3916f..2d6a52c12b 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/MathematicalFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/MathematicalFunctionIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK; @@ -31,138 +30,146 @@ public void init() throws IOException { @Test public void testAbs() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = abs(age) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = abs(age) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "integer")); - verifyDataRows( - result, - rows(32), rows(36), rows(28), rows(33), rows(36), rows(39), rows(34)); + verifyDataRows(result, rows(32), rows(36), rows(28), rows(33), rows(36), rows(39), rows(34)); } @Test public void testCeil() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = ceil(age) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = ceil(age) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "long")); - verifyDataRows( - result, - rows(32), rows(36), rows(28), rows(33), rows(36), rows(39), rows(34)); + verifyDataRows(result, rows(32), rows(36), rows(28), rows(33), rows(36), rows(39), rows(34)); } @Test public void testCeiling() throws IOException { JSONObject result = executeQuery( - String.format( - "source=%s | eval f = ceiling(age) | fields f", TEST_INDEX_BANK)); + String.format("source=%s | eval f = ceiling(age) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "long")); - verifyDataRows( - result, - rows(32), rows(36), rows(28), rows(33), rows(36), rows(39), rows(34)); + verifyDataRows(result, rows(32), rows(36), rows(28), rows(33), rows(36), rows(39), rows(34)); } @Test public void testE() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = e() | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = e() | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); verifyDataRows( - result, rows(Math.E), rows(Math.E), rows(Math.E), rows(Math.E), - rows(Math.E), rows(Math.E), rows(Math.E)); + result, + rows(Math.E), + rows(Math.E), + rows(Math.E), + rows(Math.E), + rows(Math.E), + rows(Math.E), + rows(Math.E)); } @Test public void testExp() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = exp(age) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = exp(age) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); verifyDataRows( - result, rows(Math.exp(32)), rows(Math.exp(36)), rows(Math.exp(28)), rows(Math.exp(33)), - rows(Math.exp(36)), rows(Math.exp(39)), rows(Math.exp(34))); + result, + rows(Math.exp(32)), + rows(Math.exp(36)), + rows(Math.exp(28)), + rows(Math.exp(33)), + rows(Math.exp(36)), + rows(Math.exp(39)), + rows(Math.exp(34))); } @Test public void testFloor() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = floor(age) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = floor(age) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "long")); - verifyDataRows( - result, - rows(32), rows(36), rows(28), rows(33), rows(36), rows(39), rows(34)); + verifyDataRows(result, rows(32), rows(36), rows(28), rows(33), rows(36), rows(39), rows(34)); } @Test public void testLn() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = ln(age) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = ln(age) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); verifyDataRows( - result, rows(Math.log(32)), rows(Math.log(36)), rows(Math.log(28)), rows(Math.log(33)), - rows(Math.log(36)), rows(Math.log(39)), rows(Math.log(34))); + result, + rows(Math.log(32)), + rows(Math.log(36)), + rows(Math.log(28)), + rows(Math.log(33)), + rows(Math.log(36)), + rows(Math.log(39)), + rows(Math.log(34))); } @Test public void testLogOneArg() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = log(age) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = log(age) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); - verifyDataRows(result, - rows(Math.log(28)), rows(Math.log(32)), rows(Math.log(33)), rows(Math.log(34)), - rows(Math.log(36)), rows(Math.log(36)), rows(Math.log(39)) - ); + verifyDataRows( + result, + rows(Math.log(28)), + rows(Math.log(32)), + rows(Math.log(33)), + rows(Math.log(34)), + rows(Math.log(36)), + rows(Math.log(36)), + rows(Math.log(39))); } @Test public void testLogTwoArgs() throws IOException { JSONObject result = executeQuery( - String.format( - "source=%s | eval f = log(age, balance) | fields f", TEST_INDEX_BANK)); + String.format("source=%s | eval f = log(age, balance) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); verifyDataRows( - result, closeTo(Math.log(39225) / Math.log(32)), closeTo(Math.log(5686) / Math.log(36)), - closeTo(Math.log(32838) / Math.log(28)), closeTo(Math.log(4180) / Math.log(33)), - closeTo(Math.log(16418) / Math.log(36)), closeTo(Math.log(40540) / Math.log(39)), + result, + closeTo(Math.log(39225) / Math.log(32)), + closeTo(Math.log(5686) / Math.log(36)), + closeTo(Math.log(32838) / Math.log(28)), + closeTo(Math.log(4180) / Math.log(33)), + closeTo(Math.log(16418) / Math.log(36)), + closeTo(Math.log(40540) / Math.log(39)), closeTo(Math.log(48086) / Math.log(34))); } @Test public void testLog10() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = log10(age) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = log10(age) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); verifyDataRows( - result, rows(Math.log10(32)), rows(Math.log10(36)), rows(Math.log10(28)), - rows(Math.log10(33)), rows(Math.log10(36)), rows(Math.log10(39)), rows(Math.log10(34))); + result, + rows(Math.log10(32)), + rows(Math.log10(36)), + rows(Math.log10(28)), + rows(Math.log10(33)), + rows(Math.log10(36)), + rows(Math.log10(39)), + rows(Math.log10(34))); } @Test public void testLog2() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = log2(age) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = log2(age) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); verifyDataRows( result, - closeTo(Math.log(32) / Math.log(2)), closeTo(Math.log(36) / Math.log(2)), - closeTo(Math.log(28) / Math.log(2)), closeTo(Math.log(33) / Math.log(2)), - closeTo(Math.log(36) / Math.log(2)), closeTo(Math.log(39) / Math.log(2)), + closeTo(Math.log(32) / Math.log(2)), + closeTo(Math.log(36) / Math.log(2)), + closeTo(Math.log(28) / Math.log(2)), + closeTo(Math.log(33) / Math.log(2)), + closeTo(Math.log(36) / Math.log(2)), + closeTo(Math.log(39) / Math.log(2)), closeTo(Math.log(34) / Math.log(2))); } @@ -170,168 +177,178 @@ public void testLog2() throws IOException { public void testConv() throws IOException { JSONObject result = executeQuery( - String.format( - "source=%s | eval f = conv(age, 10, 16) | fields f", TEST_INDEX_BANK)); + String.format("source=%s | eval f = conv(age, 10, 16) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "string")); verifyDataRows( - result, rows("20"), rows("24"), rows("1c"), rows("21"), - rows("24"), rows("27"), rows("22")); + result, rows("20"), rows("24"), rows("1c"), rows("21"), rows("24"), rows("27"), rows("22")); } @Test public void testCrc32() throws IOException { JSONObject result = executeQuery( - String.format( - "source=%s | eval f = crc32(firstname) | fields f", TEST_INDEX_BANK)); + String.format("source=%s | eval f = crc32(firstname) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "long")); verifyDataRows( - result, rows(324249283), rows(3369714977L), rows(1165568529), rows(2293694493L), - rows(3936131563L), rows(256963594), rows(824319315)); + result, + rows(324249283), + rows(3369714977L), + rows(1165568529), + rows(2293694493L), + rows(3936131563L), + rows(256963594), + rows(824319315)); } @Test public void testMod() throws IOException { JSONObject result = executeQuery( - String.format( - "source=%s | eval f = mod(age, 10) | fields f", TEST_INDEX_BANK)); + String.format("source=%s | eval f = mod(age, 10) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "integer")); - verifyDataRows( - result, rows(2), rows(6), rows(8), rows(3), rows(6), rows(9), rows(4)); + verifyDataRows(result, rows(2), rows(6), rows(8), rows(3), rows(6), rows(9), rows(4)); } @Test public void testPow() throws IOException { JSONObject pow = - executeQuery( - String.format( - "source=%s | eval f = pow(age, 2) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = pow(age, 2) | fields f", TEST_INDEX_BANK)); verifySchema(pow, schema("f", null, "double")); verifyDataRows( - pow, rows(1024.0), rows(1296.0), rows(784.0), rows(1089.0), rows(1296.0), rows(1521.0), rows(1156.0)); + pow, + rows(1024.0), + rows(1296.0), + rows(784.0), + rows(1089.0), + rows(1296.0), + rows(1521.0), + rows(1156.0)); JSONObject power = executeQuery( - String.format( - "source=%s | eval f = power(age, 2) | fields f", TEST_INDEX_BANK)); + String.format("source=%s | eval f = power(age, 2) | fields f", TEST_INDEX_BANK)); verifySchema(power, schema("f", null, "double")); verifyDataRows( - power, rows(1024.0), rows(1296.0), rows(784.0), rows(1089.0), rows(1296.0), rows(1521.0), rows(1156.0)); - + power, + rows(1024.0), + rows(1296.0), + rows(784.0), + rows(1089.0), + rows(1296.0), + rows(1521.0), + rows(1156.0)); } @Test public void testRound() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = round(age) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = round(age) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "long")); - verifyDataRows(result, - rows(32), rows(36), rows(28), rows(33), rows(36), rows(39), rows(34)); + verifyDataRows(result, rows(32), rows(36), rows(28), rows(33), rows(36), rows(39), rows(34)); result = executeQuery( - String.format( - "source=%s | eval f = round(age, -1) | fields f", TEST_INDEX_BANK)); + String.format("source=%s | eval f = round(age, -1) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "long")); - verifyDataRows(result, - rows(30), rows(40), rows(30), rows(30), rows(40), rows(40), rows(30)); + verifyDataRows(result, rows(30), rows(40), rows(30), rows(30), rows(40), rows(40), rows(30)); } @Test public void testSign() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = sign(age) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = sign(age) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "integer")); - verifyDataRows( - result, rows(1), rows(1), rows(1), rows(1), rows(1), rows(1), rows(1)); + verifyDataRows(result, rows(1), rows(1), rows(1), rows(1), rows(1), rows(1), rows(1)); } @Test public void testSqrt() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = sqrt(age) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = sqrt(age) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); - verifyDataRows(result, - rows(5.656854249492381), rows(6.0), rows(5.291502622129181), - rows(5.744562646538029), rows(6.0), rows(6.244997998398398), + verifyDataRows( + result, + rows(5.656854249492381), + rows(6.0), + rows(5.291502622129181), + rows(5.744562646538029), + rows(6.0), + rows(6.244997998398398), rows(5.830951894845301)); } @Test public void testCbrt() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = cbrt(num3) | fields f", TEST_INDEX_CALCS)); + executeQuery(String.format("source=%s | eval f = cbrt(num3) | fields f", TEST_INDEX_CALCS)); verifySchema(result, schema("f", null, "double")); - verifyDataRows(result, - closeTo(Math.cbrt(-11.52)), closeTo(Math.cbrt(-9.31)), closeTo(Math.cbrt(-12.17)), - closeTo(Math.cbrt(-7.25)), closeTo(Math.cbrt(12.93)), closeTo(Math.cbrt(-19.96)), - closeTo(Math.cbrt(10.93)), closeTo(Math.cbrt(3.64)), closeTo(Math.cbrt(-13.38)), - closeTo(Math.cbrt(-10.56)), closeTo(Math.cbrt(-4.79)), closeTo(Math.cbrt(-10.81)), - closeTo(Math.cbrt(-6.62)), closeTo(Math.cbrt(-18.43)), closeTo(Math.cbrt(6.84)), - closeTo(Math.cbrt(-10.98)), closeTo(Math.cbrt(-2.6))); + verifyDataRows( + result, + closeTo(Math.cbrt(-11.52)), + closeTo(Math.cbrt(-9.31)), + closeTo(Math.cbrt(-12.17)), + closeTo(Math.cbrt(-7.25)), + closeTo(Math.cbrt(12.93)), + closeTo(Math.cbrt(-19.96)), + closeTo(Math.cbrt(10.93)), + closeTo(Math.cbrt(3.64)), + closeTo(Math.cbrt(-13.38)), + closeTo(Math.cbrt(-10.56)), + closeTo(Math.cbrt(-4.79)), + closeTo(Math.cbrt(-10.81)), + closeTo(Math.cbrt(-6.62)), + closeTo(Math.cbrt(-18.43)), + closeTo(Math.cbrt(6.84)), + closeTo(Math.cbrt(-10.98)), + closeTo(Math.cbrt(-2.6))); } @Test public void testTruncate() throws IOException { JSONObject result = executeQuery( - String.format( - "source=%s | eval f = truncate(age, 1) | fields f", TEST_INDEX_BANK)); + String.format("source=%s | eval f = truncate(age, 1) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "long")); - verifyDataRows(result, - rows(32), rows(36), rows(28), rows(33), rows(36), rows(39), rows(34)); + verifyDataRows(result, rows(32), rows(36), rows(28), rows(33), rows(36), rows(39), rows(34)); result = executeQuery( - String.format( - "source=%s | eval f = truncate(age, -1) | fields f", TEST_INDEX_BANK)); + String.format("source=%s | eval f = truncate(age, -1) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "long")); - verifyDataRows(result, - rows(30), rows(30), rows(20), rows(30), rows(30), rows(30), rows(30)); + verifyDataRows(result, rows(30), rows(30), rows(20), rows(30), rows(30), rows(30), rows(30)); } @Test public void testPi() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = pi() | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = pi() | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); verifyDataRows( - result, rows(Math.PI), rows(Math.PI), rows(Math.PI), rows(Math.PI), - rows(Math.PI), rows(Math.PI), rows(Math.PI)); + result, + rows(Math.PI), + rows(Math.PI), + rows(Math.PI), + rows(Math.PI), + rows(Math.PI), + rows(Math.PI), + rows(Math.PI)); } @Test public void testRand() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = rand() | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = rand() | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "float")); result = - executeQuery( - String.format( - "source=%s | eval f = rand(5) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = rand(5) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "float")); } @Test public void testAcos() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = acos(0) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = acos(0) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); verifySome(result.getJSONArray("datarows"), rows(Math.acos(0))); } @@ -339,9 +356,7 @@ public void testAcos() throws IOException { @Test public void testAsin() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = asin(1) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = asin(1) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); verifySome(result.getJSONArray("datarows"), rows(Math.asin(1))); } @@ -349,16 +364,12 @@ public void testAsin() throws IOException { @Test public void testAtan() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = atan(2) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = atan(2) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); verifySome(result.getJSONArray("datarows"), rows(Math.atan(2))); result = - executeQuery( - String.format( - "source=%s | eval f = atan(2, 3) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = atan(2, 3) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); verifySome(result.getJSONArray("datarows"), rows(Math.atan2(2, 3))); } @@ -366,9 +377,7 @@ public void testAtan() throws IOException { @Test public void testAtan2() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = atan2(2, 3) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = atan2(2, 3) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); verifySome(result.getJSONArray("datarows"), rows(Math.atan2(2, 3))); } @@ -376,9 +385,7 @@ public void testAtan2() throws IOException { @Test public void testCos() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = cos(1.57) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = cos(1.57) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); verifySome(result.getJSONArray("datarows"), rows(Math.cos(1.57))); } @@ -386,9 +393,7 @@ public void testCos() throws IOException { @Test public void testCot() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = cot(2) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = cot(2) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); verifySome(result.getJSONArray("datarows"), closeTo(1 / Math.tan(2))); } @@ -397,8 +402,7 @@ public void testCot() throws IOException { public void testDegrees() throws IOException { JSONObject result = executeQuery( - String.format( - "source=%s | eval f = degrees(1.57) | fields f", TEST_INDEX_BANK)); + String.format("source=%s | eval f = degrees(1.57) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); verifySome(result.getJSONArray("datarows"), rows(Math.toDegrees(1.57))); } @@ -406,9 +410,7 @@ public void testDegrees() throws IOException { @Test public void testRadians() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = radians(90) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = radians(90) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); verifySome(result.getJSONArray("datarows"), rows(Math.toRadians(90))); } @@ -416,9 +418,7 @@ public void testRadians() throws IOException { @Test public void testSin() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = sin(1.57) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = sin(1.57) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); verifySome(result.getJSONArray("datarows"), rows(Math.sin(1.57))); } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/MetricsIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/MetricsIT.java index 41373afdc6..73882a4036 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/MetricsIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/MetricsIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.hamcrest.Matchers.equalTo; @@ -44,9 +43,7 @@ private void multiQueries(int n) throws IOException { } private Request makeStatRequest() { - return new Request( - "GET", "/_plugins/_ppl/stats" - ); + return new Request("GET", "/_plugins/_ppl/stats"); } private int pplRequestTotal() throws IOException { @@ -70,5 +67,4 @@ private String executeStatRequest(final Request request) throws IOException { return sb.toString(); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/MultiMatchIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/MultiMatchIT.java index 6562c551da..8fc043d32d 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/MultiMatchIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/MultiMatchIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BEER; @@ -21,36 +20,41 @@ public void init() throws IOException { @Test public void test_multi_match() throws IOException { - String query = "SOURCE=" + TEST_INDEX_BEER - + " | WHERE multi_match([\\\"Tags\\\" ^ 1.5, Title, 'Body' 4.2], 'taste') | fields Id"; + String query = + "SOURCE=" + + TEST_INDEX_BEER + + " | WHERE multi_match([\\\"Tags\\\" ^ 1.5, Title, 'Body' 4.2], 'taste') | fields Id"; var result = executeQuery(query); assertEquals(16, result.getInt("total")); } @Test public void test_multi_match_all_params() throws IOException { - String query = "SOURCE=" + TEST_INDEX_BEER - + " | WHERE multi_match(['Body', Tags], 'taste beer', operator='and', analyzer=english," - + "auto_generate_synonyms_phrase_query=true, boost = 0.77, cutoff_frequency=0.33," - + "fuzziness = 'AUTO:1,5', fuzzy_transpositions = false, lenient = true, max_expansions = 25," - + "minimum_should_match = '2<-25% 9<-3', prefix_length = 7, tie_breaker = 0.3," - + "type = most_fields, slop = 2, zero_terms_query = 'ALL') | fields Id"; + String query = + "SOURCE=" + + TEST_INDEX_BEER + + " | WHERE multi_match(['Body', Tags], 'taste beer', operator='and'," + + " analyzer=english,auto_generate_synonyms_phrase_query=true, boost = 0.77," + + " cutoff_frequency=0.33,fuzziness = 'AUTO:1,5', fuzzy_transpositions = false, lenient" + + " = true, max_expansions = 25,minimum_should_match = '2<-25% 9<-3', prefix_length =" + + " 7, tie_breaker = 0.3,type = most_fields, slop = 2, zero_terms_query = 'ALL') |" + + " fields Id"; var result = executeQuery(query); assertEquals(10, result.getInt("total")); } @Test public void test_wildcard_multi_match() throws IOException { - String query1 = "SOURCE=" + TEST_INDEX_BEER - + " | WHERE multi_match(['Tags'], 'taste') | fields Id"; + String query1 = + "SOURCE=" + TEST_INDEX_BEER + " | WHERE multi_match(['Tags'], 'taste') | fields Id"; var result1 = executeQuery(query1); - String query2 = "SOURCE=" + TEST_INDEX_BEER - + " | WHERE multi_match(['T*'], 'taste') | fields Id"; + String query2 = + "SOURCE=" + TEST_INDEX_BEER + " | WHERE multi_match(['T*'], 'taste') | fields Id"; var result2 = executeQuery(query2); assertNotEquals(result2.getInt("total"), result1.getInt("total")); - String query3 = "source=" + TEST_INDEX_BEER - + " | where simple_query_string(['*Date'], '2014-01-22')"; + String query3 = + "source=" + TEST_INDEX_BEER + " | where simple_query_string(['*Date'], '2014-01-22')"; JSONObject result3 = executeQuery(query3); assertEquals(10, result3.getInt("total")); } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/AdminIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/AdminIT.java index 1ca21041a3..b037167ed7 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/AdminIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/AdminIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.hamcrest.Matchers.equalTo; @@ -66,8 +65,10 @@ public void describeSingleIndexAlias() throws IOException { public void describeSingleIndexWildcard() throws IOException { JSONObject response1 = executeQuery("DESCRIBE TABLES LIKE \\\"%account\\\""); JSONObject response2 = executeQuery("DESCRIBE TABLES LIKE '%account'"); - JSONObject response3 = executeQuery("DESCRIBE TABLES LIKE '%account' COLUMNS LIKE \\\"%name\\\""); - JSONObject response4 = executeQuery("DESCRIBE TABLES LIKE \\\"%account\\\" COLUMNS LIKE '%name'"); + JSONObject response3 = + executeQuery("DESCRIBE TABLES LIKE '%account' COLUMNS LIKE \\\"%name\\\""); + JSONObject response4 = + executeQuery("DESCRIBE TABLES LIKE \\\"%account\\\" COLUMNS LIKE '%name'"); // 11 rows in the output, each corresponds to a column in the table assertEquals(11, response1.getJSONArray("datarows").length()); assertTrue(response1.similar(response2)); diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/AggregationIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/AggregationIT.java index 1075b14431..339cd56370 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/AggregationIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/AggregationIT.java @@ -37,17 +37,19 @@ protected void init() throws Exception { @Test public void testFilteredAggregatePushDown() throws IOException { - JSONObject response = executeQuery( - "SELECT COUNT(*) FILTER(WHERE age > 35) FROM " + TEST_INDEX_BANK); + JSONObject response = + executeQuery("SELECT COUNT(*) FILTER(WHERE age > 35) FROM " + TEST_INDEX_BANK); verifySchema(response, schema("COUNT(*) FILTER(WHERE age > 35)", null, "integer")); verifyDataRows(response, rows(3)); } @Test public void testFilteredAggregateNotPushDown() throws IOException { - JSONObject response = executeQuery( - "SELECT COUNT(*) FILTER(WHERE age > 35) FROM (SELECT * FROM " + TEST_INDEX_BANK - + ") AS a"); + JSONObject response = + executeQuery( + "SELECT COUNT(*) FILTER(WHERE age > 35) FROM (SELECT * FROM " + + TEST_INDEX_BANK + + ") AS a"); verifySchema(response, schema("COUNT(*) FILTER(WHERE age > 35)", null, "integer")); verifyDataRows(response, rows(3)); } @@ -55,45 +57,65 @@ public void testFilteredAggregateNotPushDown() throws IOException { @Test public void testPushDownAggregationOnNullValues() throws IOException { // OpenSearch aggregation query (MetricAggregation) - var response = executeQuery(String.format( - "SELECT min(`int`), max(`int`), avg(`int`), min(`dbl`), max(`dbl`), avg(`dbl`) " + - "FROM %s WHERE `key` = 'null'", TEST_INDEX_NULL_MISSING)); - verifySchema(response, - schema("min(`int`)", null, "integer"), schema("max(`int`)", null, "integer"), - schema("avg(`int`)", null, "double"), schema("min(`dbl`)", null, "double"), - schema("max(`dbl`)", null, "double"), schema("avg(`dbl`)", null, "double")); + var response = + executeQuery( + String.format( + "SELECT min(`int`), max(`int`), avg(`int`), min(`dbl`), max(`dbl`), avg(`dbl`) " + + "FROM %s WHERE `key` = 'null'", + TEST_INDEX_NULL_MISSING)); + verifySchema( + response, + schema("min(`int`)", null, "integer"), + schema("max(`int`)", null, "integer"), + schema("avg(`int`)", null, "double"), + schema("min(`dbl`)", null, "double"), + schema("max(`dbl`)", null, "double"), + schema("avg(`dbl`)", null, "double")); verifyDataRows(response, rows(null, null, null, null, null, null)); } @Test public void testPushDownAggregationOnMissingValues() throws IOException { // OpenSearch aggregation query (MetricAggregation) - var response = executeQuery(String.format( - "SELECT min(`int`), max(`int`), avg(`int`), min(`dbl`), max(`dbl`), avg(`dbl`) " + - "FROM %s WHERE `key` = 'null'", TEST_INDEX_NULL_MISSING)); - verifySchema(response, - schema("min(`int`)", null, "integer"), schema("max(`int`)", null, "integer"), - schema("avg(`int`)", null, "double"), schema("min(`dbl`)", null, "double"), - schema("max(`dbl`)", null, "double"), schema("avg(`dbl`)", null, "double")); + var response = + executeQuery( + String.format( + "SELECT min(`int`), max(`int`), avg(`int`), min(`dbl`), max(`dbl`), avg(`dbl`) " + + "FROM %s WHERE `key` = 'null'", + TEST_INDEX_NULL_MISSING)); + verifySchema( + response, + schema("min(`int`)", null, "integer"), + schema("max(`int`)", null, "integer"), + schema("avg(`int`)", null, "double"), + schema("min(`dbl`)", null, "double"), + schema("max(`dbl`)", null, "double"), + schema("avg(`dbl`)", null, "double")); verifyDataRows(response, rows(null, null, null, null, null, null)); } @Test public void testInMemoryAggregationOnNullValues() throws IOException { // In-memory aggregation performed by the plugin - var response = executeQuery(String.format("SELECT" - + " min(`int`) over (PARTITION BY `key`), max(`int`) over (PARTITION BY `key`)," - + " avg(`int`) over (PARTITION BY `key`), min(`dbl`) over (PARTITION BY `key`)," - + " max(`dbl`) over (PARTITION BY `key`), avg(`dbl`) over (PARTITION BY `key`)" - + " FROM %s WHERE `key` = 'null'", TEST_INDEX_NULL_MISSING)); - verifySchema(response, + var response = + executeQuery( + String.format( + "SELECT" + + " min(`int`) over (PARTITION BY `key`), max(`int`) over (PARTITION BY `key`)," + + " avg(`int`) over (PARTITION BY `key`), min(`dbl`) over (PARTITION BY `key`)," + + " max(`dbl`) over (PARTITION BY `key`), avg(`dbl`) over (PARTITION BY `key`)" + + " FROM %s WHERE `key` = 'null'", + TEST_INDEX_NULL_MISSING)); + verifySchema( + response, schema("min(`int`) over (PARTITION BY `key`)", null, "integer"), schema("max(`int`) over (PARTITION BY `key`)", null, "integer"), schema("avg(`int`) over (PARTITION BY `key`)", null, "double"), schema("min(`dbl`) over (PARTITION BY `key`)", null, "double"), schema("max(`dbl`) over (PARTITION BY `key`)", null, "double"), schema("avg(`dbl`) over (PARTITION BY `key`)", null, "double")); - verifyDataRows(response, // 4 rows with null values + verifyDataRows( + response, // 4 rows with null values rows(null, null, null, null, null, null), rows(null, null, null, null, null, null), rows(null, null, null, null, null, null), @@ -103,19 +125,25 @@ public void testInMemoryAggregationOnNullValues() throws IOException { @Test public void testInMemoryAggregationOnMissingValues() throws IOException { // In-memory aggregation performed by the plugin - var response = executeQuery(String.format("SELECT" - + " min(`int`) over (PARTITION BY `key`), max(`int`) over (PARTITION BY `key`)," - + " avg(`int`) over (PARTITION BY `key`), min(`dbl`) over (PARTITION BY `key`)," - + " max(`dbl`) over (PARTITION BY `key`), avg(`dbl`) over (PARTITION BY `key`)" - + " FROM %s WHERE `key` = 'missing'", TEST_INDEX_NULL_MISSING)); - verifySchema(response, + var response = + executeQuery( + String.format( + "SELECT" + + " min(`int`) over (PARTITION BY `key`), max(`int`) over (PARTITION BY `key`)," + + " avg(`int`) over (PARTITION BY `key`), min(`dbl`) over (PARTITION BY `key`)," + + " max(`dbl`) over (PARTITION BY `key`), avg(`dbl`) over (PARTITION BY `key`)" + + " FROM %s WHERE `key` = 'missing'", + TEST_INDEX_NULL_MISSING)); + verifySchema( + response, schema("min(`int`) over (PARTITION BY `key`)", null, "integer"), schema("max(`int`) over (PARTITION BY `key`)", null, "integer"), schema("avg(`int`) over (PARTITION BY `key`)", null, "double"), schema("min(`dbl`) over (PARTITION BY `key`)", null, "double"), schema("max(`dbl`) over (PARTITION BY `key`)", null, "double"), schema("avg(`dbl`) over (PARTITION BY `key`)", null, "double")); - verifyDataRows(response, // 4 rows with null values + verifyDataRows( + response, // 4 rows with null values rows(null, null, null, null, null, null), rows(null, null, null, null, null, null), rows(null, null, null, null, null, null), @@ -124,12 +152,17 @@ public void testInMemoryAggregationOnMissingValues() throws IOException { @Test public void testInMemoryAggregationOnNullValuesReturnsNull() throws IOException { - var response = executeQuery(String.format("SELECT " - + " max(int0) over (PARTITION BY `datetime1`)," - + " min(int0) over (PARTITION BY `datetime1`)," - + " avg(int0) over (PARTITION BY `datetime1`)" - + "from %s where int0 IS NULL;", TEST_INDEX_CALCS)); - verifySchema(response, + var response = + executeQuery( + String.format( + "SELECT " + + " max(int0) over (PARTITION BY `datetime1`)," + + " min(int0) over (PARTITION BY `datetime1`)," + + " avg(int0) over (PARTITION BY `datetime1`)" + + "from %s where int0 IS NULL;", + TEST_INDEX_CALCS)); + verifySchema( + response, schema("max(int0) over (PARTITION BY `datetime1`)", null, "integer"), schema("min(int0) over (PARTITION BY `datetime1`)", null, "integer"), schema("avg(int0) over (PARTITION BY `datetime1`)", null, "double")); @@ -138,21 +171,31 @@ public void testInMemoryAggregationOnNullValuesReturnsNull() throws IOException @Test public void testInMemoryAggregationOnAllValuesAndOnNotNullReturnsSameResult() throws IOException { - var responseNotNulls = executeQuery(String.format("SELECT " - + " max(int0) over (PARTITION BY `datetime1`)," - + " min(int0) over (PARTITION BY `datetime1`)," - + " avg(int0) over (PARTITION BY `datetime1`)" - + "from %s where int0 IS NOT NULL;", TEST_INDEX_CALCS)); - var responseAllValues = executeQuery(String.format("SELECT " - + " max(int0) over (PARTITION BY `datetime1`)," - + " min(int0) over (PARTITION BY `datetime1`)," - + " avg(int0) over (PARTITION BY `datetime1`)" - + "from %s;", TEST_INDEX_CALCS)); - verifySchema(responseNotNulls, + var responseNotNulls = + executeQuery( + String.format( + "SELECT " + + " max(int0) over (PARTITION BY `datetime1`)," + + " min(int0) over (PARTITION BY `datetime1`)," + + " avg(int0) over (PARTITION BY `datetime1`)" + + "from %s where int0 IS NOT NULL;", + TEST_INDEX_CALCS)); + var responseAllValues = + executeQuery( + String.format( + "SELECT " + + " max(int0) over (PARTITION BY `datetime1`)," + + " min(int0) over (PARTITION BY `datetime1`)," + + " avg(int0) over (PARTITION BY `datetime1`)" + + "from %s;", + TEST_INDEX_CALCS)); + verifySchema( + responseNotNulls, schema("max(int0) over (PARTITION BY `datetime1`)", null, "integer"), schema("min(int0) over (PARTITION BY `datetime1`)", null, "integer"), schema("avg(int0) over (PARTITION BY `datetime1`)", null, "double")); - verifySchema(responseAllValues, + verifySchema( + responseAllValues, schema("max(int0) over (PARTITION BY `datetime1`)", null, "integer"), schema("min(int0) over (PARTITION BY `datetime1`)", null, "integer"), schema("avg(int0) over (PARTITION BY `datetime1`)", null, "double")); @@ -163,9 +206,13 @@ public void testInMemoryAggregationOnAllValuesAndOnNotNullReturnsSameResult() th @Test public void testPushDownAggregationOnNullNumericValuesReturnsNull() throws IOException { - var response = executeQuery(String.format("SELECT " - + "max(int0), min(int0), avg(int0) from %s where int0 IS NULL;", TEST_INDEX_CALCS)); - verifySchema(response, + var response = + executeQuery( + String.format( + "SELECT " + "max(int0), min(int0), avg(int0) from %s where int0 IS NULL;", + TEST_INDEX_CALCS)); + verifySchema( + response, schema("max(int0)", null, "integer"), schema("min(int0)", null, "integer"), schema("avg(int0)", null, "double")); @@ -174,9 +221,13 @@ public void testPushDownAggregationOnNullNumericValuesReturnsNull() throws IOExc @Test public void testPushDownAggregationOnNullDateTimeValuesFromTableReturnsNull() throws IOException { - var response = executeQuery(String.format("SELECT " - + "max(datetime1), min(datetime1), avg(datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, + var response = + executeQuery( + String.format( + "SELECT " + "max(datetime1), min(datetime1), avg(datetime1) from %s", + TEST_INDEX_CALCS)); + verifySchema( + response, schema("max(datetime1)", null, "timestamp"), schema("min(datetime1)", null, "timestamp"), schema("avg(datetime1)", null, "timestamp")); @@ -185,9 +236,14 @@ public void testPushDownAggregationOnNullDateTimeValuesFromTableReturnsNull() th @Test public void testPushDownAggregationOnNullDateValuesReturnsNull() throws IOException { - var response = executeQuery(String.format("SELECT " - + "max(CAST(NULL AS date)), min(CAST(NULL AS date)), avg(CAST(NULL AS date)) from %s", TEST_INDEX_CALCS)); - verifySchema(response, + var response = + executeQuery( + String.format( + "SELECT max(CAST(NULL AS date)), min(CAST(NULL AS date)), avg(CAST(NULL AS date))" + + " from %s", + TEST_INDEX_CALCS)); + verifySchema( + response, schema("max(CAST(NULL AS date))", null, "date"), schema("min(CAST(NULL AS date))", null, "date"), schema("avg(CAST(NULL AS date))", null, "date")); @@ -196,9 +252,14 @@ public void testPushDownAggregationOnNullDateValuesReturnsNull() throws IOExcept @Test public void testPushDownAggregationOnNullTimeValuesReturnsNull() throws IOException { - var response = executeQuery(String.format("SELECT " - + "max(CAST(NULL AS time)), min(CAST(NULL AS time)), avg(CAST(NULL AS time)) from %s", TEST_INDEX_CALCS)); - verifySchema(response, + var response = + executeQuery( + String.format( + "SELECT max(CAST(NULL AS time)), min(CAST(NULL AS time)), avg(CAST(NULL AS time))" + + " from %s", + TEST_INDEX_CALCS)); + verifySchema( + response, schema("max(CAST(NULL AS time))", null, "time"), schema("min(CAST(NULL AS time))", null, "time"), schema("avg(CAST(NULL AS time))", null, "time")); @@ -207,9 +268,14 @@ public void testPushDownAggregationOnNullTimeValuesReturnsNull() throws IOExcept @Test public void testPushDownAggregationOnNullTimeStampValuesReturnsNull() throws IOException { - var response = executeQuery(String.format("SELECT " - + "max(CAST(NULL AS timestamp)), min(CAST(NULL AS timestamp)), avg(CAST(NULL AS timestamp)) from %s", TEST_INDEX_CALCS)); - verifySchema(response, + var response = + executeQuery( + String.format( + "SELECT max(CAST(NULL AS timestamp)), min(CAST(NULL AS timestamp)), avg(CAST(NULL" + + " AS timestamp)) from %s", + TEST_INDEX_CALCS)); + verifySchema( + response, schema("max(CAST(NULL AS timestamp))", null, "timestamp"), schema("min(CAST(NULL AS timestamp))", null, "timestamp"), schema("avg(CAST(NULL AS timestamp))", null, "timestamp")); @@ -218,9 +284,13 @@ public void testPushDownAggregationOnNullTimeStampValuesReturnsNull() throws IOE @Test public void testPushDownAggregationOnNullDateTimeValuesReturnsNull() throws IOException { - var response = executeQuery(String.format("SELECT " - + "max(datetime(NULL)), min(datetime(NULL)), avg(datetime(NULL)) from %s", TEST_INDEX_CALCS)); - verifySchema(response, + var response = + executeQuery( + String.format( + "SELECT " + "max(datetime(NULL)), min(datetime(NULL)), avg(datetime(NULL)) from %s", + TEST_INDEX_CALCS)); + verifySchema( + response, schema("max(datetime(NULL))", null, "datetime"), schema("min(datetime(NULL))", null, "datetime"), schema("avg(datetime(NULL))", null, "datetime")); @@ -229,15 +299,22 @@ public void testPushDownAggregationOnNullDateTimeValuesReturnsNull() throws IOEx @Test public void testPushDownAggregationOnAllValuesAndOnNotNullReturnsSameResult() throws IOException { - var responseNotNulls = executeQuery(String.format("SELECT " - + "max(int0), min(int0), avg(int0) from %s where int0 IS NOT NULL;", TEST_INDEX_CALCS)); - var responseAllValues = executeQuery(String.format("SELECT " - + "max(int0), min(int0), avg(int0) from %s;", TEST_INDEX_CALCS)); - verifySchema(responseNotNulls, + var responseNotNulls = + executeQuery( + String.format( + "SELECT " + "max(int0), min(int0), avg(int0) from %s where int0 IS NOT NULL;", + TEST_INDEX_CALCS)); + var responseAllValues = + executeQuery( + String.format( + "SELECT " + "max(int0), min(int0), avg(int0) from %s;", TEST_INDEX_CALCS)); + verifySchema( + responseNotNulls, schema("max(int0)", null, "integer"), schema("min(int0)", null, "integer"), schema("avg(int0)", null, "double")); - verifySchema(responseAllValues, + verifySchema( + responseAllValues, schema("max(int0)", null, "integer"), schema("min(int0)", null, "integer"), schema("avg(int0)", null, "double")); @@ -248,18 +325,21 @@ public void testPushDownAggregationOnAllValuesAndOnNotNullReturnsSameResult() th @Test public void testPushDownAndInMemoryAggregationReturnTheSameResult() throws IOException { - // Playing with 'over (PARTITION BY `datetime1`)' - `datetime1` column has the same value for all rows + // Playing with 'over (PARTITION BY `datetime1`)' - `datetime1` column has the same value for + // all rows // so partitioning by this column has no sense and doesn't (shouldn't) affect the results // Aggregations with `OVER` clause are executed in memory (in SQL plugin memory), // Aggregations without it are performed the OpenSearch node itself (pushed down to opensearch) - // Going to compare results of `min`, `max` and `avg` aggregation on all numeric columns in `calcs` + // Going to compare results of `min`, `max` and `avg` aggregation on all numeric columns in + // `calcs` var columns = List.of("int0", "int1", "int2", "int3", "num0", "num1", "num2", "num3", "num4"); var aggregations = List.of("min", "max", "avg"); var inMemoryAggregQuery = new StringBuilder("SELECT "); var pushDownAggregQuery = new StringBuilder("SELECT "); for (var col : columns) { for (var aggreg : aggregations) { - inMemoryAggregQuery.append(String.format(" %s(%s) over (PARTITION BY `datetime1`),", aggreg, col)); + inMemoryAggregQuery.append( + String.format(" %s(%s) over (PARTITION BY `datetime1`),", aggreg, col)); pushDownAggregQuery.append(String.format(" %s(%s),", aggreg, col)); } } @@ -267,313 +347,362 @@ public void testPushDownAndInMemoryAggregationReturnTheSameResult() throws IOExc inMemoryAggregQuery.deleteCharAt(inMemoryAggregQuery.length() - 1); pushDownAggregQuery.deleteCharAt(pushDownAggregQuery.length() - 1); - var responseInMemory = executeQuery( - inMemoryAggregQuery.append("from " + TEST_INDEX_CALCS).toString()); - var responsePushDown = executeQuery( - pushDownAggregQuery.append("from " + TEST_INDEX_CALCS).toString()); + var responseInMemory = + executeQuery(inMemoryAggregQuery.append("from " + TEST_INDEX_CALCS).toString()); + var responsePushDown = + executeQuery(pushDownAggregQuery.append("from " + TEST_INDEX_CALCS).toString()); for (int i = 0; i < columns.size() * aggregations.size(); i++) { assertEquals( - ((Number)responseInMemory.query("/datarows/0/" + i)).doubleValue(), - ((Number)responsePushDown.query("/datarows/0/" + i)).doubleValue(), + ((Number) responseInMemory.query("/datarows/0/" + i)).doubleValue(), + ((Number) responsePushDown.query("/datarows/0/" + i)).doubleValue(), 0.0000001); // a minor delta is affordable } } public void testMinIntegerPushedDown() throws IOException { - var response = executeQuery(String.format("SELECT min(int2)" - + " from %s", TEST_INDEX_CALCS)); + var response = executeQuery(String.format("SELECT min(int2)" + " from %s", TEST_INDEX_CALCS)); verifySchema(response, schema("min(int2)", null, "integer")); verifyDataRows(response, rows(-9)); } @Test public void testMaxIntegerPushedDown() throws IOException { - var response = executeQuery(String.format("SELECT max(int2)" - + " from %s", TEST_INDEX_CALCS)); + var response = executeQuery(String.format("SELECT max(int2)" + " from %s", TEST_INDEX_CALCS)); verifySchema(response, schema("max(int2)", null, "integer")); verifyDataRows(response, rows(9)); } @Test public void testAvgIntegerPushedDown() throws IOException { - var response = executeQuery(String.format("SELECT avg(int2)" - + " from %s", TEST_INDEX_CALCS)); + var response = executeQuery(String.format("SELECT avg(int2)" + " from %s", TEST_INDEX_CALCS)); verifySchema(response, schema("avg(int2)", null, "double")); verifyDataRows(response, rows(-0.8235294117647058D)); } @Test public void testMinDoublePushedDown() throws IOException { - var response = executeQuery(String.format("SELECT min(num3)" - + " from %s", TEST_INDEX_CALCS)); + var response = executeQuery(String.format("SELECT min(num3)" + " from %s", TEST_INDEX_CALCS)); verifySchema(response, schema("min(num3)", null, "double")); verifyDataRows(response, rows(-19.96D)); } @Test public void testMaxDoublePushedDown() throws IOException { - var response = executeQuery(String.format("SELECT max(num3)" - + " from %s", TEST_INDEX_CALCS)); + var response = executeQuery(String.format("SELECT max(num3)" + " from %s", TEST_INDEX_CALCS)); verifySchema(response, schema("max(num3)", null, "double")); verifyDataRows(response, rows(12.93D)); } @Test public void testAvgDoublePushedDown() throws IOException { - var response = executeQuery(String.format("SELECT avg(num3)" - + " from %s", TEST_INDEX_CALCS)); + var response = executeQuery(String.format("SELECT avg(num3)" + " from %s", TEST_INDEX_CALCS)); verifySchema(response, schema("avg(num3)", null, "double")); verifyDataRows(response, rows(-6.12D)); } @Test public void testMinIntegerInMemory() throws IOException { - var response = executeQuery(String.format("SELECT min(int2)" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("min(int2) OVER(PARTITION BY datetime1)", null, "integer")); + var response = + executeQuery( + String.format( + "SELECT min(int2)" + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); + verifySchema(response, schema("min(int2) OVER(PARTITION BY datetime1)", null, "integer")); verifySome(response.getJSONArray("datarows"), rows(-9)); } @Test public void testMaxIntegerInMemory() throws IOException { - var response = executeQuery(String.format("SELECT max(int2)" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("max(int2) OVER(PARTITION BY datetime1)", null, "integer")); + var response = + executeQuery( + String.format( + "SELECT max(int2)" + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); + verifySchema(response, schema("max(int2) OVER(PARTITION BY datetime1)", null, "integer")); verifySome(response.getJSONArray("datarows"), rows(9)); } @Test public void testAvgIntegerInMemory() throws IOException { - var response = executeQuery(String.format("SELECT avg(int2)" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("avg(int2) OVER(PARTITION BY datetime1)", null, "double")); + var response = + executeQuery( + String.format( + "SELECT avg(int2)" + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); + verifySchema(response, schema("avg(int2) OVER(PARTITION BY datetime1)", null, "double")); verifySome(response.getJSONArray("datarows"), rows(-0.8235294117647058D)); } @Test public void testMinDoubleInMemory() throws IOException { - var response = executeQuery(String.format("SELECT min(num3)" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("min(num3) OVER(PARTITION BY datetime1)", null, "double")); + var response = + executeQuery( + String.format( + "SELECT min(num3)" + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); + verifySchema(response, schema("min(num3) OVER(PARTITION BY datetime1)", null, "double")); verifySome(response.getJSONArray("datarows"), rows(-19.96D)); } @Test public void testMaxDoubleInMemory() throws IOException { - var response = executeQuery(String.format("SELECT max(num3)" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("max(num3) OVER(PARTITION BY datetime1)", null, "double")); + var response = + executeQuery( + String.format( + "SELECT max(num3)" + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); + verifySchema(response, schema("max(num3) OVER(PARTITION BY datetime1)", null, "double")); verifySome(response.getJSONArray("datarows"), rows(12.93D)); } @Test public void testAvgDoubleInMemory() throws IOException { - var response = executeQuery(String.format("SELECT avg(num3)" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("avg(num3) OVER(PARTITION BY datetime1)", null, "double")); + var response = + executeQuery( + String.format( + "SELECT avg(num3)" + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); + verifySchema(response, schema("avg(num3) OVER(PARTITION BY datetime1)", null, "double")); verifySome(response.getJSONArray("datarows"), rows(-6.12D)); } @Test public void testMaxDatePushedDown() throws IOException { - var response = executeQuery(String.format("SELECT max(date0)" - + " from %s", TEST_INDEX_CALCS)); + var response = executeQuery(String.format("SELECT max(date0)" + " from %s", TEST_INDEX_CALCS)); verifySchema(response, schema("max(date0)", null, "date")); verifyDataRows(response, rows("2004-06-19")); } @Test public void testAvgDatePushedDown() throws IOException { - var response = executeQuery(String.format("SELECT avg(date0)" - + " from %s", TEST_INDEX_CALCS)); + var response = executeQuery(String.format("SELECT avg(date0)" + " from %s", TEST_INDEX_CALCS)); verifySchema(response, schema("avg(date0)", null, "date")); verifyDataRows(response, rows("1992-04-23")); } @Test public void testMinDateTimePushedDown() throws IOException { - var response = executeQuery(String.format("SELECT min(datetime(CAST(time0 AS STRING)))" - + " from %s", TEST_INDEX_CALCS)); + var response = + executeQuery( + String.format( + "SELECT min(datetime(CAST(time0 AS STRING)))" + " from %s", TEST_INDEX_CALCS)); verifySchema(response, schema("min(datetime(CAST(time0 AS STRING)))", null, "datetime")); verifyDataRows(response, rows("1899-12-30 21:07:32")); } @Test public void testMaxDateTimePushedDown() throws IOException { - var response = executeQuery(String.format("SELECT max(datetime(CAST(time0 AS STRING)))" - + " from %s", TEST_INDEX_CALCS)); + var response = + executeQuery( + String.format( + "SELECT max(datetime(CAST(time0 AS STRING)))" + " from %s", TEST_INDEX_CALCS)); verifySchema(response, schema("max(datetime(CAST(time0 AS STRING)))", null, "datetime")); verifyDataRows(response, rows("1900-01-01 20:36:00")); } @Test public void testAvgDateTimePushedDown() throws IOException { - var response = executeQuery(String.format("SELECT avg(datetime(CAST(time0 AS STRING)))" - + " from %s", TEST_INDEX_CALCS)); + var response = + executeQuery( + String.format( + "SELECT avg(datetime(CAST(time0 AS STRING)))" + " from %s", TEST_INDEX_CALCS)); verifySchema(response, schema("avg(datetime(CAST(time0 AS STRING)))", null, "datetime")); verifyDataRows(response, rows("1900-01-01 03:35:00.236")); } @Test public void testMinTimePushedDown() throws IOException { - var response = executeQuery(String.format("SELECT min(time1)" - + " from %s", TEST_INDEX_CALCS)); + var response = executeQuery(String.format("SELECT min(time1)" + " from %s", TEST_INDEX_CALCS)); verifySchema(response, schema("min(time1)", null, "time")); verifyDataRows(response, rows("00:05:57")); } @Test public void testMaxTimePushedDown() throws IOException { - var response = executeQuery(String.format("SELECT max(time1)" - + " from %s", TEST_INDEX_CALCS)); + var response = executeQuery(String.format("SELECT max(time1)" + " from %s", TEST_INDEX_CALCS)); verifySchema(response, schema("max(time1)", null, "time")); verifyDataRows(response, rows("22:50:16")); } @Test public void testAvgTimePushedDown() throws IOException { - var response = executeQuery(String.format("SELECT avg(time1)" - + " from %s", TEST_INDEX_CALCS)); + var response = executeQuery(String.format("SELECT avg(time1)" + " from %s", TEST_INDEX_CALCS)); verifySchema(response, schema("avg(time1)", null, "time")); verifyDataRows(response, rows("13:06:36.25")); } @Test public void testMinTimeStampPushedDown() throws IOException { - var response = executeQuery(String.format("SELECT min(CAST(datetime0 AS timestamp))" - + " from %s", TEST_INDEX_CALCS)); + var response = + executeQuery( + String.format( + "SELECT min(CAST(datetime0 AS timestamp))" + " from %s", TEST_INDEX_CALCS)); verifySchema(response, schema("min(CAST(datetime0 AS timestamp))", null, "timestamp")); verifyDataRows(response, rows("2004-07-04 22:49:28")); } @Test public void testMaxTimeStampPushedDown() throws IOException { - var response = executeQuery(String.format("SELECT max(CAST(datetime0 AS timestamp))" - + " from %s", TEST_INDEX_CALCS)); + var response = + executeQuery( + String.format( + "SELECT max(CAST(datetime0 AS timestamp))" + " from %s", TEST_INDEX_CALCS)); verifySchema(response, schema("max(CAST(datetime0 AS timestamp))", null, "timestamp")); verifyDataRows(response, rows("2004-08-02 07:59:23")); } @Test public void testAvgTimeStampPushedDown() throws IOException { - var response = executeQuery(String.format("SELECT avg(CAST(datetime0 AS timestamp))" - + " from %s", TEST_INDEX_CALCS)); + var response = + executeQuery( + String.format( + "SELECT avg(CAST(datetime0 AS timestamp))" + " from %s", TEST_INDEX_CALCS)); verifySchema(response, schema("avg(CAST(datetime0 AS timestamp))", null, "timestamp")); verifyDataRows(response, rows("2004-07-20 10:38:09.705")); } @Test public void testMinDateInMemory() throws IOException { - var response = executeQuery(String.format("SELECT min(date0)" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("min(date0) OVER(PARTITION BY datetime1)", null, "date")); + var response = + executeQuery( + String.format( + "SELECT min(date0)" + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); + verifySchema(response, schema("min(date0) OVER(PARTITION BY datetime1)", null, "date")); verifySome(response.getJSONArray("datarows"), rows("1972-07-04")); } @Test public void testMaxDateInMemory() throws IOException { - var response = executeQuery(String.format("SELECT max(date0)" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("max(date0) OVER(PARTITION BY datetime1)", null, "date")); + var response = + executeQuery( + String.format( + "SELECT max(date0)" + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); + verifySchema(response, schema("max(date0) OVER(PARTITION BY datetime1)", null, "date")); verifySome(response.getJSONArray("datarows"), rows("2004-06-19")); } @Test public void testAvgDateInMemory() throws IOException { - var response = executeQuery(String.format("SELECT avg(date0)" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("avg(date0) OVER(PARTITION BY datetime1)", null, "date")); + var response = + executeQuery( + String.format( + "SELECT avg(date0)" + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); + verifySchema(response, schema("avg(date0) OVER(PARTITION BY datetime1)", null, "date")); verifySome(response.getJSONArray("datarows"), rows("1992-04-23")); } @Test public void testMinDateTimeInMemory() throws IOException { - var response = executeQuery(String.format("SELECT min(datetime(CAST(time0 AS STRING)))" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("min(datetime(CAST(time0 AS STRING))) OVER(PARTITION BY datetime1)", null, "datetime")); + var response = + executeQuery( + String.format( + "SELECT min(datetime(CAST(time0 AS STRING)))" + + " OVER(PARTITION BY datetime1) from %s", + TEST_INDEX_CALCS)); + verifySchema( + response, + schema( + "min(datetime(CAST(time0 AS STRING))) OVER(PARTITION BY datetime1)", null, "datetime")); verifySome(response.getJSONArray("datarows"), rows("1899-12-30 21:07:32")); } @Test public void testMaxDateTimeInMemory() throws IOException { - var response = executeQuery(String.format("SELECT max(datetime(CAST(time0 AS STRING)))" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("max(datetime(CAST(time0 AS STRING))) OVER(PARTITION BY datetime1)", null, "datetime")); + var response = + executeQuery( + String.format( + "SELECT max(datetime(CAST(time0 AS STRING)))" + + " OVER(PARTITION BY datetime1) from %s", + TEST_INDEX_CALCS)); + verifySchema( + response, + schema( + "max(datetime(CAST(time0 AS STRING))) OVER(PARTITION BY datetime1)", null, "datetime")); verifySome(response.getJSONArray("datarows"), rows("1900-01-01 20:36:00")); } @Test public void testAvgDateTimeInMemory() throws IOException { - var response = executeQuery(String.format("SELECT avg(datetime(CAST(time0 AS STRING)))" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("avg(datetime(CAST(time0 AS STRING))) OVER(PARTITION BY datetime1)", null, "datetime")); + var response = + executeQuery( + String.format( + "SELECT avg(datetime(CAST(time0 AS STRING)))" + + " OVER(PARTITION BY datetime1) from %s", + TEST_INDEX_CALCS)); + verifySchema( + response, + schema( + "avg(datetime(CAST(time0 AS STRING))) OVER(PARTITION BY datetime1)", null, "datetime")); verifySome(response.getJSONArray("datarows"), rows("1900-01-01 03:35:00.236")); } @Test public void testMinTimeInMemory() throws IOException { - var response = executeQuery(String.format("SELECT min(time1)" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("min(time1) OVER(PARTITION BY datetime1)", null, "time")); + var response = + executeQuery( + String.format( + "SELECT min(time1)" + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); + verifySchema(response, schema("min(time1) OVER(PARTITION BY datetime1)", null, "time")); verifySome(response.getJSONArray("datarows"), rows("00:05:57")); } @Test public void testMaxTimeInMemory() throws IOException { - var response = executeQuery(String.format("SELECT max(time1)" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("max(time1) OVER(PARTITION BY datetime1)", null, "time")); + var response = + executeQuery( + String.format( + "SELECT max(time1)" + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); + verifySchema(response, schema("max(time1) OVER(PARTITION BY datetime1)", null, "time")); verifySome(response.getJSONArray("datarows"), rows("22:50:16")); } @Test public void testAvgTimeInMemory() throws IOException { - var response = executeQuery(String.format("SELECT avg(time1)" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("avg(time1) OVER(PARTITION BY datetime1)", null, "time")); + var response = + executeQuery( + String.format( + "SELECT avg(time1)" + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); + verifySchema(response, schema("avg(time1) OVER(PARTITION BY datetime1)", null, "time")); verifySome(response.getJSONArray("datarows"), rows("13:06:36.25")); } @Test public void testMinTimeStampInMemory() throws IOException { - var response = executeQuery(String.format("SELECT min(CAST(datetime0 AS timestamp))" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("min(CAST(datetime0 AS timestamp)) OVER(PARTITION BY datetime1)", null, "timestamp")); + var response = + executeQuery( + String.format( + "SELECT min(CAST(datetime0 AS timestamp))" + + " OVER(PARTITION BY datetime1) from %s", + TEST_INDEX_CALCS)); + verifySchema( + response, + schema( + "min(CAST(datetime0 AS timestamp)) OVER(PARTITION BY datetime1)", null, "timestamp")); verifySome(response.getJSONArray("datarows"), rows("2004-07-04 22:49:28")); } @Test public void testMaxTimeStampInMemory() throws IOException { - var response = executeQuery(String.format("SELECT max(CAST(datetime0 AS timestamp))" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("max(CAST(datetime0 AS timestamp)) OVER(PARTITION BY datetime1)", null, "timestamp")); + var response = + executeQuery( + String.format( + "SELECT max(CAST(datetime0 AS timestamp))" + + " OVER(PARTITION BY datetime1) from %s", + TEST_INDEX_CALCS)); + verifySchema( + response, + schema( + "max(CAST(datetime0 AS timestamp)) OVER(PARTITION BY datetime1)", null, "timestamp")); verifySome(response.getJSONArray("datarows"), rows("2004-08-02 07:59:23")); } @Test public void testAvgTimeStampInMemory() throws IOException { - var response = executeQuery(String.format("SELECT avg(CAST(datetime0 AS timestamp))" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("avg(CAST(datetime0 AS timestamp)) OVER(PARTITION BY datetime1)", null, "timestamp")); + var response = + executeQuery( + String.format( + "SELECT avg(CAST(datetime0 AS timestamp))" + + " OVER(PARTITION BY datetime1) from %s", + TEST_INDEX_CALCS)); + verifySchema( + response, + schema( + "avg(CAST(datetime0 AS timestamp)) OVER(PARTITION BY datetime1)", null, "timestamp")); verifySome(response.getJSONArray("datarows"), rows("2004-07-20 10:38:09.705")); } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/ArithmeticFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/ArithmeticFunctionIT.java index dd99cf2e75..7c91c42197 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/ArithmeticFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/ArithmeticFunctionIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.opensearch.sql.legacy.plugin.RestSqlAction.QUERY_API_ENDPOINT; @@ -24,222 +23,223 @@ public class ArithmeticFunctionIT extends SQLIntegTestCase { - @Override - public void init() throws Exception { - super.init(); - loadIndex(Index.BANK); - } - - public void testAdd() throws IOException { - JSONObject result = executeQuery("select 3 + 2"); - verifySchema(result, schema("3 + 2", null, "integer")); - verifyDataRows(result, rows(3 + 2)); - - result = executeQuery("select 2.5 + 2"); - verifySchema(result, schema("2.5 + 2", null, "double")); - verifyDataRows(result, rows(2.5D + 2)); - - result = executeQuery("select 3000000000 + 2"); - verifySchema(result, schema("3000000000 + 2", null, "long")); - verifyDataRows(result, rows(3000000000L + 2)); - - result = executeQuery("select CAST(6.666666 AS FLOAT) + 2"); - verifySchema(result, schema("CAST(6.666666 AS FLOAT) + 2", null, "float")); - verifyDataRows(result, rows(6.666666 + 2)); - } - - @Test - public void testAddFunction() throws IOException { - JSONObject result = executeQuery("select add(3, 2)"); - verifySchema(result, schema("add(3, 2)", null, "integer")); - verifyDataRows(result, rows(3 + 2)); - - result = executeQuery("select add(2.5, 2)"); - verifySchema(result, schema("add(2.5, 2)", null, "double")); - verifyDataRows(result, rows(2.5D + 2)); - - result = executeQuery("select add(3000000000, 2)"); - verifySchema(result, schema("add(3000000000, 2)", null, "long")); - verifyDataRows(result, rows(3000000000L + 2)); - - result = executeQuery("select add(CAST(6.666666 AS FLOAT), 2)"); - verifySchema(result, schema("add(CAST(6.666666 AS FLOAT), 2)", null, "float")); - verifyDataRows(result, rows(6.666666 + 2)); - } - - public void testDivide() throws IOException { - JSONObject result = executeQuery("select 3 / 2"); - verifySchema(result, schema("3 / 2", null, "integer")); - verifyDataRows(result, rows(3 / 2)); - - result = executeQuery("select 2.5 / 2"); - verifySchema(result, schema("2.5 / 2", null, "double")); - verifyDataRows(result, rows(2.5D / 2)); - - result = executeQuery("select 6000000000 / 2"); - verifySchema(result, schema("6000000000 / 2", null, "long")); - verifyDataRows(result, rows(6000000000L / 2)); - - result = executeQuery("select cast(1.6 AS float) / 2"); - verifySchema(result, schema("cast(1.6 AS float) / 2", null, "float")); - verifyDataRows(result, rows(1.6 / 2)); - } - - public void testDivideFunction() throws IOException { - JSONObject result = executeQuery("select divide(3, 2)"); - verifySchema(result, schema("divide(3, 2)", null, "integer")); - verifyDataRows(result, rows(3 / 2)); - - result = executeQuery("select divide(2.5, 2)"); - verifySchema(result, schema("divide(2.5, 2)", null, "double")); - verifyDataRows(result, rows(2.5D / 2)); - - result = executeQuery("select divide(6000000000, 2)"); - verifySchema(result, schema("divide(6000000000, 2)", null, "long")); - verifyDataRows(result, rows(6000000000L / 2)); - - result = executeQuery("select divide(cast(1.6 AS float), 2)"); - verifySchema(result, schema("divide(cast(1.6 AS float), 2)", null, "float")); - verifyDataRows(result, rows(1.6 / 2)); - } - - public void testMod() throws IOException { - JSONObject result = executeQuery("select mod(3, 2)"); - verifySchema(result, schema("mod(3, 2)", null, "integer")); - verifyDataRows(result, rows(3 % 2)); - - result = executeQuery("select mod(2.5, 2)"); - verifySchema(result, schema("mod(2.5, 2)", null, "double")); - verifyDataRows(result, rows(2.5D % 2)); - - result = executeQuery("select mod(cast(300001 as long), 2)"); - verifySchema(result, schema("mod(cast(300001 as long), 2)", null, "long")); - verifyDataRows(result, rows(3000001 % 2)); - - result = executeQuery("select mod(cast(1.6 AS float), 2)"); - verifySchema(result, schema("mod(cast(1.6 AS float), 2)", null, "float")); - verifyDataRows(result, rows(1.6 % 2)); - } - - public void testModulus() throws IOException { - JSONObject result = executeQuery("select 3 % 2"); - verifySchema(result, schema("3 % 2", null, "integer")); - verifyDataRows(result, rows(3 % 2)); - - result = executeQuery("select 2.5 % 2"); - verifySchema(result, schema("2.5 % 2", null, "double")); - verifyDataRows(result, rows(2.5D % 2)); - - result = executeQuery("select cast(300001 as long) % 2"); - verifySchema(result, schema("cast(300001 as long) % 2", null, "long")); - verifyDataRows(result, rows(300001 % 2)); - - result = executeQuery("select cast(1.6 AS float) % 2"); - verifySchema(result, schema("cast(1.6 AS float) % 2", null, "float")); - verifyDataRows(result, rows(1.6 % 2)); - } - - public void testModulusFunction() throws IOException { - JSONObject result = executeQuery("select modulus(3, 2)"); - verifySchema(result, schema("modulus(3, 2)", null, "integer")); - verifyDataRows(result, rows(3 % 2)); - - result = executeQuery("select modulus(2.5, 2)"); - verifySchema(result, schema("modulus(2.5, 2)", null, "double")); - verifyDataRows(result, rows(2.5D % 2)); - - result = executeQuery("select modulus(cast(300001 as long), 2)"); - verifySchema(result, schema("modulus(cast(300001 as long), 2)", null, "long")); - verifyDataRows(result, rows(300001 % 2)); - - result = executeQuery("select modulus(cast(1.6 AS float), 2)"); - verifySchema(result, schema("modulus(cast(1.6 AS float), 2)", null, "float")); - verifyDataRows(result, rows(1.6 % 2)); - } - - public void testMultiply() throws IOException { - JSONObject result = executeQuery("select 3 * 2"); - verifySchema(result, schema("3 * 2", null, "integer")); - verifyDataRows(result, rows(3 * 2)); - - result = executeQuery("select 2.5 * 2"); - verifySchema(result, schema("2.5 * 2", null, "double")); - verifyDataRows(result, rows(2.5D * 2)); - - result = executeQuery("select 3000000000 * 2"); - verifySchema(result, schema("3000000000 * 2", null, "long")); - verifyDataRows(result, rows(3000000000L * 2)); - - result = executeQuery("select CAST(1.6 AS FLOAT) * 2"); - verifySchema(result, schema("CAST(1.6 AS FLOAT) * 2", null, "float")); - verifyDataRows(result, rows(1.6 * 2)); - } - - @Test - public void testMultiplyFunction() throws IOException { - JSONObject result = executeQuery("select multiply(3, 2)"); - verifySchema(result, schema("multiply(3, 2)", null, "integer")); - verifyDataRows(result, rows(3 * 2)); - - result = executeQuery("select multiply(2.5, 2)"); - verifySchema(result, schema("multiply(2.5, 2)", null, "double")); - verifyDataRows(result, rows(2.5D * 2)); - - result = executeQuery("select multiply(3000000000, 2)"); - verifySchema(result, schema("multiply(3000000000, 2)", null, "long")); - verifyDataRows(result, rows(3000000000L * 2)); - - result = executeQuery("select multiply(CAST(1.6 AS FLOAT), 2)"); - verifySchema(result, schema("multiply(CAST(1.6 AS FLOAT), 2)", null, "float")); - verifyDataRows(result, rows(1.6 * 2)); - } - - public void testSubtract() throws IOException { - JSONObject result = executeQuery("select 3 - 2"); - verifySchema(result, schema("3 - 2", null, "integer")); - verifyDataRows(result, rows(3 - 2)); - - result = executeQuery("select 2.5 - 2"); - verifySchema(result, schema("2.5 - 2", null, "double")); - verifyDataRows(result, rows(2.5D - 2)); - - result = executeQuery("select 3000000000 - 2"); - verifySchema(result, schema("3000000000 - 2", null, "long")); - verifyDataRows(result, rows(3000000000L - 2)); - - result = executeQuery("select CAST(6.666666 AS FLOAT) - 2"); - verifySchema(result, schema("CAST(6.666666 AS FLOAT) - 2", null, "float")); - verifyDataRows(result, rows(6.666666 - 2)); - } - - @Test - public void testSubtractFunction() throws IOException { - JSONObject result = executeQuery("select subtract(3, 2)"); - verifySchema(result, schema("subtract(3, 2)", null, "integer")); - verifyDataRows(result, rows(3 - 2)); - - result = executeQuery("select subtract(2.5, 2)"); - verifySchema(result, schema("subtract(2.5, 2)", null, "double")); - verifyDataRows(result, rows(2.5D - 2)); - - result = executeQuery("select subtract(3000000000, 2)"); - verifySchema(result, schema("subtract(3000000000, 2)", null, "long")); - verifyDataRows(result, rows(3000000000L - 2)); - - result = executeQuery("select cast(subtract(cast(6.666666 as float), 2) as float)"); - verifySchema(result, schema("cast(subtract(cast(6.666666 as float), 2) as float)", null, "float")); - verifyDataRows(result, rows(6.666666 - 2)); - } - - protected JSONObject executeQuery(String query) throws IOException { - Request request = new Request("POST", QUERY_API_ENDPOINT); - request.setJsonEntity(String.format(Locale.ROOT, "{\n" + " \"query\": \"%s\"\n" + "}", query)); - - RequestOptions.Builder restOptionsBuilder = RequestOptions.DEFAULT.toBuilder(); - restOptionsBuilder.addHeader("Content-Type", "application/json"); - request.setOptions(restOptionsBuilder); - - Response response = client().performRequest(request); - return new JSONObject(getResponseBody(response)); - } + @Override + public void init() throws Exception { + super.init(); + loadIndex(Index.BANK); + } + + public void testAdd() throws IOException { + JSONObject result = executeQuery("select 3 + 2"); + verifySchema(result, schema("3 + 2", null, "integer")); + verifyDataRows(result, rows(3 + 2)); + + result = executeQuery("select 2.5 + 2"); + verifySchema(result, schema("2.5 + 2", null, "double")); + verifyDataRows(result, rows(2.5D + 2)); + + result = executeQuery("select 3000000000 + 2"); + verifySchema(result, schema("3000000000 + 2", null, "long")); + verifyDataRows(result, rows(3000000000L + 2)); + + result = executeQuery("select CAST(6.666666 AS FLOAT) + 2"); + verifySchema(result, schema("CAST(6.666666 AS FLOAT) + 2", null, "float")); + verifyDataRows(result, rows(6.666666 + 2)); + } + + @Test + public void testAddFunction() throws IOException { + JSONObject result = executeQuery("select add(3, 2)"); + verifySchema(result, schema("add(3, 2)", null, "integer")); + verifyDataRows(result, rows(3 + 2)); + + result = executeQuery("select add(2.5, 2)"); + verifySchema(result, schema("add(2.5, 2)", null, "double")); + verifyDataRows(result, rows(2.5D + 2)); + + result = executeQuery("select add(3000000000, 2)"); + verifySchema(result, schema("add(3000000000, 2)", null, "long")); + verifyDataRows(result, rows(3000000000L + 2)); + + result = executeQuery("select add(CAST(6.666666 AS FLOAT), 2)"); + verifySchema(result, schema("add(CAST(6.666666 AS FLOAT), 2)", null, "float")); + verifyDataRows(result, rows(6.666666 + 2)); + } + + public void testDivide() throws IOException { + JSONObject result = executeQuery("select 3 / 2"); + verifySchema(result, schema("3 / 2", null, "integer")); + verifyDataRows(result, rows(3 / 2)); + + result = executeQuery("select 2.5 / 2"); + verifySchema(result, schema("2.5 / 2", null, "double")); + verifyDataRows(result, rows(2.5D / 2)); + + result = executeQuery("select 6000000000 / 2"); + verifySchema(result, schema("6000000000 / 2", null, "long")); + verifyDataRows(result, rows(6000000000L / 2)); + + result = executeQuery("select cast(1.6 AS float) / 2"); + verifySchema(result, schema("cast(1.6 AS float) / 2", null, "float")); + verifyDataRows(result, rows(1.6 / 2)); + } + + public void testDivideFunction() throws IOException { + JSONObject result = executeQuery("select divide(3, 2)"); + verifySchema(result, schema("divide(3, 2)", null, "integer")); + verifyDataRows(result, rows(3 / 2)); + + result = executeQuery("select divide(2.5, 2)"); + verifySchema(result, schema("divide(2.5, 2)", null, "double")); + verifyDataRows(result, rows(2.5D / 2)); + + result = executeQuery("select divide(6000000000, 2)"); + verifySchema(result, schema("divide(6000000000, 2)", null, "long")); + verifyDataRows(result, rows(6000000000L / 2)); + + result = executeQuery("select divide(cast(1.6 AS float), 2)"); + verifySchema(result, schema("divide(cast(1.6 AS float), 2)", null, "float")); + verifyDataRows(result, rows(1.6 / 2)); + } + + public void testMod() throws IOException { + JSONObject result = executeQuery("select mod(3, 2)"); + verifySchema(result, schema("mod(3, 2)", null, "integer")); + verifyDataRows(result, rows(3 % 2)); + + result = executeQuery("select mod(2.5, 2)"); + verifySchema(result, schema("mod(2.5, 2)", null, "double")); + verifyDataRows(result, rows(2.5D % 2)); + + result = executeQuery("select mod(cast(300001 as long), 2)"); + verifySchema(result, schema("mod(cast(300001 as long), 2)", null, "long")); + verifyDataRows(result, rows(3000001 % 2)); + + result = executeQuery("select mod(cast(1.6 AS float), 2)"); + verifySchema(result, schema("mod(cast(1.6 AS float), 2)", null, "float")); + verifyDataRows(result, rows(1.6 % 2)); + } + + public void testModulus() throws IOException { + JSONObject result = executeQuery("select 3 % 2"); + verifySchema(result, schema("3 % 2", null, "integer")); + verifyDataRows(result, rows(3 % 2)); + + result = executeQuery("select 2.5 % 2"); + verifySchema(result, schema("2.5 % 2", null, "double")); + verifyDataRows(result, rows(2.5D % 2)); + + result = executeQuery("select cast(300001 as long) % 2"); + verifySchema(result, schema("cast(300001 as long) % 2", null, "long")); + verifyDataRows(result, rows(300001 % 2)); + + result = executeQuery("select cast(1.6 AS float) % 2"); + verifySchema(result, schema("cast(1.6 AS float) % 2", null, "float")); + verifyDataRows(result, rows(1.6 % 2)); + } + + public void testModulusFunction() throws IOException { + JSONObject result = executeQuery("select modulus(3, 2)"); + verifySchema(result, schema("modulus(3, 2)", null, "integer")); + verifyDataRows(result, rows(3 % 2)); + + result = executeQuery("select modulus(2.5, 2)"); + verifySchema(result, schema("modulus(2.5, 2)", null, "double")); + verifyDataRows(result, rows(2.5D % 2)); + + result = executeQuery("select modulus(cast(300001 as long), 2)"); + verifySchema(result, schema("modulus(cast(300001 as long), 2)", null, "long")); + verifyDataRows(result, rows(300001 % 2)); + + result = executeQuery("select modulus(cast(1.6 AS float), 2)"); + verifySchema(result, schema("modulus(cast(1.6 AS float), 2)", null, "float")); + verifyDataRows(result, rows(1.6 % 2)); + } + + public void testMultiply() throws IOException { + JSONObject result = executeQuery("select 3 * 2"); + verifySchema(result, schema("3 * 2", null, "integer")); + verifyDataRows(result, rows(3 * 2)); + + result = executeQuery("select 2.5 * 2"); + verifySchema(result, schema("2.5 * 2", null, "double")); + verifyDataRows(result, rows(2.5D * 2)); + + result = executeQuery("select 3000000000 * 2"); + verifySchema(result, schema("3000000000 * 2", null, "long")); + verifyDataRows(result, rows(3000000000L * 2)); + + result = executeQuery("select CAST(1.6 AS FLOAT) * 2"); + verifySchema(result, schema("CAST(1.6 AS FLOAT) * 2", null, "float")); + verifyDataRows(result, rows(1.6 * 2)); + } + + @Test + public void testMultiplyFunction() throws IOException { + JSONObject result = executeQuery("select multiply(3, 2)"); + verifySchema(result, schema("multiply(3, 2)", null, "integer")); + verifyDataRows(result, rows(3 * 2)); + + result = executeQuery("select multiply(2.5, 2)"); + verifySchema(result, schema("multiply(2.5, 2)", null, "double")); + verifyDataRows(result, rows(2.5D * 2)); + + result = executeQuery("select multiply(3000000000, 2)"); + verifySchema(result, schema("multiply(3000000000, 2)", null, "long")); + verifyDataRows(result, rows(3000000000L * 2)); + + result = executeQuery("select multiply(CAST(1.6 AS FLOAT), 2)"); + verifySchema(result, schema("multiply(CAST(1.6 AS FLOAT), 2)", null, "float")); + verifyDataRows(result, rows(1.6 * 2)); + } + + public void testSubtract() throws IOException { + JSONObject result = executeQuery("select 3 - 2"); + verifySchema(result, schema("3 - 2", null, "integer")); + verifyDataRows(result, rows(3 - 2)); + + result = executeQuery("select 2.5 - 2"); + verifySchema(result, schema("2.5 - 2", null, "double")); + verifyDataRows(result, rows(2.5D - 2)); + + result = executeQuery("select 3000000000 - 2"); + verifySchema(result, schema("3000000000 - 2", null, "long")); + verifyDataRows(result, rows(3000000000L - 2)); + + result = executeQuery("select CAST(6.666666 AS FLOAT) - 2"); + verifySchema(result, schema("CAST(6.666666 AS FLOAT) - 2", null, "float")); + verifyDataRows(result, rows(6.666666 - 2)); + } + + @Test + public void testSubtractFunction() throws IOException { + JSONObject result = executeQuery("select subtract(3, 2)"); + verifySchema(result, schema("subtract(3, 2)", null, "integer")); + verifyDataRows(result, rows(3 - 2)); + + result = executeQuery("select subtract(2.5, 2)"); + verifySchema(result, schema("subtract(2.5, 2)", null, "double")); + verifyDataRows(result, rows(2.5D - 2)); + + result = executeQuery("select subtract(3000000000, 2)"); + verifySchema(result, schema("subtract(3000000000, 2)", null, "long")); + verifyDataRows(result, rows(3000000000L - 2)); + + result = executeQuery("select cast(subtract(cast(6.666666 as float), 2) as float)"); + verifySchema( + result, schema("cast(subtract(cast(6.666666 as float), 2) as float)", null, "float")); + verifyDataRows(result, rows(6.666666 - 2)); + } + + protected JSONObject executeQuery(String query) throws IOException { + Request request = new Request("POST", QUERY_API_ENDPOINT); + request.setJsonEntity(String.format(Locale.ROOT, "{\n" + " \"query\": \"%s\"\n" + "}", query)); + + RequestOptions.Builder restOptionsBuilder = RequestOptions.DEFAULT.toBuilder(); + restOptionsBuilder.addHeader("Content-Type", "application/json"); + request.setOptions(restOptionsBuilder); + + Response response = client().performRequest(request); + return new JSONObject(getResponseBody(response)); + } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/ConditionalIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/ConditionalIT.java index ab0900784d..deb41653e2 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/ConditionalIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/ConditionalIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.hamcrest.Matchers.equalTo; @@ -42,9 +41,11 @@ public void init() throws Exception { @Test public void ifnullShouldPassJDBC() throws IOException { - JSONObject response = executeJdbcRequest( - "SELECT IFNULL(lastname, 'unknown') AS name FROM " + TEST_INDEX_ACCOUNT - + " GROUP BY name"); + JSONObject response = + executeJdbcRequest( + "SELECT IFNULL(lastname, 'unknown') AS name FROM " + + TEST_INDEX_ACCOUNT + + " GROUP BY name"); assertEquals("IFNULL(lastname, 'unknown')", response.query("/schema/0/name")); assertEquals("name", response.query("/schema/0/alias")); assertEquals("keyword", response.query("/schema/0/type")); @@ -52,87 +53,95 @@ public void ifnullShouldPassJDBC() throws IOException { @Test public void ifnullWithNullInputTest() { - JSONObject response = new JSONObject(executeQuery( - "SELECT IFNULL(null, firstname) as IFNULL1 ," + JSONObject response = + new JSONObject( + executeQuery( + "SELECT IFNULL(null, firstname) as IFNULL1 ," + " IFNULL(firstname, null) as IFNULL2 ," + " IFNULL(null, null) as IFNULL3 " - + " FROM " + TEST_INDEX_BANK_WITH_NULL_VALUES - + " WHERE balance is null limit 2", "jdbc")); - - verifySchema(response, - schema("IFNULL(null, firstname)", "IFNULL1", "keyword"), - schema("IFNULL(firstname, null)", "IFNULL2", "keyword"), - schema("IFNULL(null, null)", "IFNULL3", "byte")); - verifyDataRows(response, - rows("Hattie", "Hattie", LITERAL_NULL.value()), - rows( "Elinor", "Elinor", LITERAL_NULL.value()) - ); + + " FROM " + + TEST_INDEX_BANK_WITH_NULL_VALUES + + " WHERE balance is null limit 2", + "jdbc")); + + verifySchema( + response, + schema("IFNULL(null, firstname)", "IFNULL1", "keyword"), + schema("IFNULL(firstname, null)", "IFNULL2", "keyword"), + schema("IFNULL(null, null)", "IFNULL3", "byte")); + verifyDataRows( + response, + rows("Hattie", "Hattie", LITERAL_NULL.value()), + rows("Elinor", "Elinor", LITERAL_NULL.value())); } @Test public void ifnullWithMissingInputTest() { - JSONObject response = new JSONObject(executeQuery( - "SELECT IFNULL(balance, 100) as IFNULL1, " + JSONObject response = + new JSONObject( + executeQuery( + "SELECT IFNULL(balance, 100) as IFNULL1, " + " IFNULL(200, balance) as IFNULL2, " + " IFNULL(balance, balance) as IFNULL3 " - + " FROM " + TEST_INDEX_BANK_WITH_NULL_VALUES - + " WHERE balance is null limit 3", "jdbc")); - verifySchema(response, - schema("IFNULL(balance, 100)", "IFNULL1", "long"), - schema("IFNULL(200, balance)", "IFNULL2", "long"), - schema("IFNULL(balance, balance)", "IFNULL3", "long")); - verifyDataRows(response, - rows(100, 200, null), - rows(100, 200, null), - rows(100, 200, null) - ); + + " FROM " + + TEST_INDEX_BANK_WITH_NULL_VALUES + + " WHERE balance is null limit 3", + "jdbc")); + verifySchema( + response, + schema("IFNULL(balance, 100)", "IFNULL1", "long"), + schema("IFNULL(200, balance)", "IFNULL2", "long"), + schema("IFNULL(balance, balance)", "IFNULL3", "long")); + verifyDataRows(response, rows(100, 200, null), rows(100, 200, null), rows(100, 200, null)); } @Test public void nullifShouldPassJDBC() throws IOException { - JSONObject response = executeJdbcRequest( - "SELECT NULLIF(lastname, 'unknown') AS name FROM " + TEST_INDEX_ACCOUNT); + JSONObject response = + executeJdbcRequest("SELECT NULLIF(lastname, 'unknown') AS name FROM " + TEST_INDEX_ACCOUNT); assertEquals("NULLIF(lastname, 'unknown')", response.query("/schema/0/name")); assertEquals("name", response.query("/schema/0/alias")); assertEquals("keyword", response.query("/schema/0/type")); } @Test - public void nullifWithNotNullInputTestOne(){ - JSONObject response = new JSONObject(executeQuery( - "SELECT NULLIF(firstname, 'Amber JOHnny') as testnullif " - + "FROM " + TEST_INDEX_BANK_WITH_NULL_VALUES - + " limit 2 ", "jdbc")); - verifySchema(response, - schema("NULLIF(firstname, 'Amber JOHnny')", "testnullif", "keyword")); - verifyDataRows(response, - rows(LITERAL_NULL.value()), - rows("Hattie") - ); + public void nullifWithNotNullInputTestOne() { + JSONObject response = + new JSONObject( + executeQuery( + "SELECT NULLIF(firstname, 'Amber JOHnny') as testnullif " + + "FROM " + + TEST_INDEX_BANK_WITH_NULL_VALUES + + " limit 2 ", + "jdbc")); + verifySchema(response, schema("NULLIF(firstname, 'Amber JOHnny')", "testnullif", "keyword")); + verifyDataRows(response, rows(LITERAL_NULL.value()), rows("Hattie")); } @Test public void nullifWithNullInputTest() { - JSONObject response = new JSONObject(executeQuery( - "SELECT NULLIF(1/0, 123) as nullif1 ," + JSONObject response = + new JSONObject( + executeQuery( + "SELECT NULLIF(1/0, 123) as nullif1 ," + " NULLIF(123, 1/0) as nullif2 ," + " NULLIF(1/0, 1/0) as nullif3 " - + " FROM " + TEST_INDEX_BANK_WITH_NULL_VALUES - + " WHERE balance is null limit 1", "jdbc")); - verifySchema(response, - schema("NULLIF(1/0, 123)", "nullif1", "integer"), - schema("NULLIF(123, 1/0)", "nullif2", "integer"), - schema("NULLIF(1/0, 1/0)", "nullif3", "integer")); - verifyDataRows(response, - rows(LITERAL_NULL.value(), 123, LITERAL_NULL.value() - ) - ); + + " FROM " + + TEST_INDEX_BANK_WITH_NULL_VALUES + + " WHERE balance is null limit 1", + "jdbc")); + verifySchema( + response, + schema("NULLIF(1/0, 123)", "nullif1", "integer"), + schema("NULLIF(123, 1/0)", "nullif2", "integer"), + schema("NULLIF(1/0, 1/0)", "nullif3", "integer")); + verifyDataRows(response, rows(LITERAL_NULL.value(), 123, LITERAL_NULL.value())); } @Test public void isnullShouldPassJDBC() throws IOException { - JSONObject response = executeJdbcRequest( - "SELECT ISNULL(lastname) AS name FROM " + TEST_INDEX_ACCOUNT); + JSONObject response = + executeJdbcRequest("SELECT ISNULL(lastname) AS name FROM " + TEST_INDEX_ACCOUNT); assertEquals("ISNULL(lastname)", response.query("/schema/0/name")); assertEquals("name", response.query("/schema/0/alias")); assertEquals("boolean", response.query("/schema/0/type")); @@ -141,47 +150,48 @@ public void isnullShouldPassJDBC() throws IOException { @Test public void isnullWithNotNullInputTest() throws IOException { assertThat( - executeQuery("SELECT ISNULL('elastic') AS isnull FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvInt("/fields/isnull/0", equalTo(0))) - ); + executeQuery("SELECT ISNULL('elastic') AS isnull FROM " + TEST_INDEX_ACCOUNT), + hitAny(kvInt("/fields/isnull/0", equalTo(0)))); assertThat( - executeQuery("SELECT ISNULL('') AS isnull FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvInt("/fields/isnull/0", equalTo(0))) - ); + executeQuery("SELECT ISNULL('') AS isnull FROM " + TEST_INDEX_ACCOUNT), + hitAny(kvInt("/fields/isnull/0", equalTo(0)))); } @Test public void isnullWithNullInputTest() { - JSONObject response = new JSONObject(executeQuery( - "SELECT ISNULL(1/0) as ISNULL1 ," + JSONObject response = + new JSONObject( + executeQuery( + "SELECT ISNULL(1/0) as ISNULL1 ," + " ISNULL(firstname) as ISNULL2 " - + " FROM " + TEST_INDEX_BANK_WITH_NULL_VALUES - + " WHERE balance is null limit 2", "jdbc")); - verifySchema(response, - schema("ISNULL(1/0)", "ISNULL1", "boolean"), - schema("ISNULL(firstname)", "ISNULL2", "boolean")); - verifyDataRows(response, - rows(LITERAL_TRUE.value(), LITERAL_FALSE.value()), - rows(LITERAL_TRUE.value(), LITERAL_FALSE.value()) - ); + + " FROM " + + TEST_INDEX_BANK_WITH_NULL_VALUES + + " WHERE balance is null limit 2", + "jdbc")); + verifySchema( + response, + schema("ISNULL(1/0)", "ISNULL1", "boolean"), + schema("ISNULL(firstname)", "ISNULL2", "boolean")); + verifyDataRows( + response, + rows(LITERAL_TRUE.value(), LITERAL_FALSE.value()), + rows(LITERAL_TRUE.value(), LITERAL_FALSE.value())); } @Test - public void isnullWithMathExpr() throws IOException{ + public void isnullWithMathExpr() throws IOException { assertThat( - executeQuery("SELECT ISNULL(1+1) AS isnull FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvInt("/fields/isnull/0", equalTo(0))) - ); + executeQuery("SELECT ISNULL(1+1) AS isnull FROM " + TEST_INDEX_ACCOUNT), + hitAny(kvInt("/fields/isnull/0", equalTo(0)))); assertThat( - executeQuery("SELECT ISNULL(1+1*1/0) AS isnull FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvInt("/fields/isnull/0", equalTo(1))) - ); + executeQuery("SELECT ISNULL(1+1*1/0) AS isnull FROM " + TEST_INDEX_ACCOUNT), + hitAny(kvInt("/fields/isnull/0", equalTo(1)))); } @Test public void ifShouldPassJDBC() throws IOException { - JSONObject response = executeJdbcRequest( - "SELECT IF(2 > 0, 'hello', 'world') AS name FROM " + TEST_INDEX_ACCOUNT); + JSONObject response = + executeJdbcRequest("SELECT IF(2 > 0, 'hello', 'world') AS name FROM " + TEST_INDEX_ACCOUNT); assertEquals("IF(2 > 0, 'hello', 'world')", response.query("/schema/0/name")); assertEquals("name", response.query("/schema/0/alias")); assertEquals("keyword", response.query("/schema/0/type")); @@ -189,33 +199,37 @@ public void ifShouldPassJDBC() throws IOException { @Test public void ifWithTrueAndFalseCondition() throws IOException { - JSONObject response = new JSONObject(executeQuery( - "SELECT IF(2 < 0, firstname, lastname) as IF0, " - + " IF(2 > 0, firstname, lastname) as IF1, " - + " firstname as IF2, " - + " lastname as IF3 " - + " FROM " + TEST_INDEX_BANK_WITH_NULL_VALUES - + " limit 2 ", "jdbc" )); - verifySchema(response, - schema("IF(2 < 0, firstname, lastname)", "IF0", "keyword"), - schema("IF(2 > 0, firstname, lastname)", "IF1", "keyword"), - schema("firstname", "IF2", "text"), - schema("lastname", "IF3", "keyword") - ); - verifyDataRows(response, - rows("Duke Willmington", "Amber JOHnny", "Amber JOHnny", "Duke Willmington"), - rows("Bond", "Hattie", "Hattie", "Bond") - ); - + JSONObject response = + new JSONObject( + executeQuery( + "SELECT IF(2 < 0, firstname, lastname) as IF0, " + + " IF(2 > 0, firstname, lastname) as IF1, " + + " firstname as IF2, " + + " lastname as IF3 " + + " FROM " + + TEST_INDEX_BANK_WITH_NULL_VALUES + + " limit 2 ", + "jdbc")); + verifySchema( + response, + schema("IF(2 < 0, firstname, lastname)", "IF0", "keyword"), + schema("IF(2 > 0, firstname, lastname)", "IF1", "keyword"), + schema("firstname", "IF2", "text"), + schema("lastname", "IF3", "keyword")); + verifyDataRows( + response, + rows("Duke Willmington", "Amber JOHnny", "Amber JOHnny", "Duke Willmington"), + rows("Bond", "Hattie", "Hattie", "Bond")); } private SearchHits query(String query) throws IOException { final String rsp = executeQueryWithStringOutput(query); - final XContentParser parser = new JsonXContentParser( - NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, - new JsonFactory().createParser(rsp)); + final XContentParser parser = + new JsonXContentParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + new JsonFactory().createParser(rsp)); return SearchResponse.fromXContent(parser).getHits(); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/CorrectnessTestBase.java b/integ-test/src/test/java/org/opensearch/sql/sql/CorrectnessTestBase.java index cd5765e0ce..33c9c0687f 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/CorrectnessTestBase.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/CorrectnessTestBase.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static java.util.Collections.emptyMap; @@ -25,15 +24,13 @@ import org.opensearch.sql.legacy.utils.StringUtils; /** - * SQL integration test base class. This is very similar to CorrectnessIT though - * enforce the success of all tests rather than report failures only. + * SQL integration test base class. This is very similar to CorrectnessIT though enforce the success + * of all tests rather than report failures only. */ @ThreadLeakScope(ThreadLeakScope.Scope.NONE) public abstract class CorrectnessTestBase extends RestIntegTestCase { - /** - * Comparison test runner shared by all methods in this IT class. - */ + /** Comparison test runner shared by all methods in this IT class. */ private static ComparisonTest runner; @Override @@ -43,8 +40,7 @@ protected void init() throws Exception { } TestConfig config = new TestConfig(emptyMap()); - runner = new ComparisonTest(getOpenSearchConnection(), - getOtherDBConnections(config)); + runner = new ComparisonTest(getOpenSearchConnection(), getOtherDBConnections(config)); runner.connect(); for (TestDataSet dataSet : config.getTestDataSets()) { @@ -52,9 +48,7 @@ protected void init() throws Exception { } } - /** - * Clean up test data and close other database connection. - */ + /** Clean up test data and close other database connection. */ @AfterClass public static void cleanUp() { if (runner == null) { @@ -74,33 +68,29 @@ public static void cleanUp() { } /** - * Execute the given queries and compare result with other database. - * The queries will be considered as one test batch. + * Execute the given queries and compare result with other database. The queries will be + * considered as one test batch. */ protected void verify(String... queries) { TestReport result = runner.verify(new TestQuerySet(queries)); TestSummary summary = result.getSummary(); - Assert.assertEquals(StringUtils.format( - "Comparison test failed on queries: %s", new JSONObject(result).toString(2)), - 0, summary.getFailure()); + Assert.assertEquals( + StringUtils.format( + "Comparison test failed on queries: %s", new JSONObject(result).toString(2)), + 0, + summary.getFailure()); } - /** - * Use OpenSearch cluster initialized by OpenSearch Gradle task. - */ + /** Use OpenSearch cluster initialized by OpenSearch Gradle task. */ private DBConnection getOpenSearchConnection() { String openSearchHost = client().getNodes().get(0).getHost().toString(); return new OpenSearchConnection("jdbc:opensearch://" + openSearchHost, client()); } - /** - * Create database connection with database name and connect URL. - */ + /** Create database connection with database name and connect URL. */ private DBConnection[] getOtherDBConnections(TestConfig config) { - return config.getOtherDbConnectionNameAndUrls() - .entrySet().stream() - .map(e -> new JDBCConnection(e.getKey(), e.getValue())) - .toArray(DBConnection[]::new); + return config.getOtherDbConnectionNameAndUrls().entrySet().stream() + .map(e -> new JDBCConnection(e.getKey(), e.getValue())) + .toArray(DBConnection[]::new); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/CsvFormatIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/CsvFormatIT.java index 3af4db89de..330268c0e4 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/CsvFormatIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/CsvFormatIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK_CSV_SANITIZE; @@ -26,36 +25,45 @@ public void init() throws IOException { @Test public void sanitizeTest() { - String result = executeQuery( - String.format(Locale.ROOT, "SELECT firstname, lastname FROM %s", TEST_INDEX_BANK_CSV_SANITIZE), "csv"); - assertEquals(StringUtils.format( - "firstname,lastname%n" - + "'+Amber JOHnny,Duke Willmington+%n" - + "'-Hattie,Bond-%n" - + "'=Nanette,Bates=%n" - + "'@Dale,Adams@%n" - + "\",Elinor\",\"Ratliff,,,\"%n"), + String result = + executeQuery( + String.format( + Locale.ROOT, "SELECT firstname, lastname FROM %s", TEST_INDEX_BANK_CSV_SANITIZE), + "csv"); + assertEquals( + StringUtils.format( + "firstname,lastname%n" + + "'+Amber JOHnny,Duke Willmington+%n" + + "'-Hattie,Bond-%n" + + "'=Nanette,Bates=%n" + + "'@Dale,Adams@%n" + + "\",Elinor\",\"Ratliff,,,\"%n"), result); } @Test public void escapeSanitizeTest() { - String result = executeQuery( - String.format(Locale.ROOT, "SELECT firstname, lastname FROM %s", TEST_INDEX_BANK_CSV_SANITIZE), - "csv&sanitize=false"); - assertEquals(StringUtils.format( - "firstname,lastname%n" - + "+Amber JOHnny,Duke Willmington+%n" - + "-Hattie,Bond-%n" - + "=Nanette,Bates=%n" - + "@Dale,Adams@%n" - + "\",Elinor\",\"Ratliff,,,\"%n"), + String result = + executeQuery( + String.format( + Locale.ROOT, "SELECT firstname, lastname FROM %s", TEST_INDEX_BANK_CSV_SANITIZE), + "csv&sanitize=false"); + assertEquals( + StringUtils.format( + "firstname,lastname%n" + + "+Amber JOHnny,Duke Willmington+%n" + + "-Hattie,Bond-%n" + + "=Nanette,Bates=%n" + + "@Dale,Adams@%n" + + "\",Elinor\",\"Ratliff,,,\"%n"), result); } @Test public void contentHeaderTest() throws IOException { - String query = String.format(Locale.ROOT, "SELECT firstname, lastname FROM %s", TEST_INDEX_BANK_CSV_SANITIZE); + String query = + String.format( + Locale.ROOT, "SELECT firstname, lastname FROM %s", TEST_INDEX_BANK_CSV_SANITIZE); String requestBody = makeRequest(query); Request sqlRequest = new Request("POST", "/_plugins/_sql?format=csv"); diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeComparisonIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeComparisonIT.java index e935b269f1..432daef82f 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeComparisonIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeComparisonIT.java @@ -54,9 +54,10 @@ public void resetTimeZone() { private String name; private Boolean expectedResult; - public DateTimeComparisonIT(@Name("functionCall") String functionCall, - @Name("name") String name, - @Name("expectedResult") Boolean expectedResult) { + public DateTimeComparisonIT( + @Name("functionCall") String functionCall, + @Name("name") String name, + @Name("expectedResult") Boolean expectedResult) { this.functionCall = functionCall; this.name = name; this.expectedResult = expectedResult; @@ -64,542 +65,698 @@ public DateTimeComparisonIT(@Name("functionCall") String functionCall, @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareTwoDates() { - return Arrays.asList($$( - $("DATE('2020-09-16') = DATE('2020-09-16')", "eq1", true), - $("DATE('2020-09-16') = DATE('1961-04-12')", "eq2", false), - $("DATE('2020-09-16') != DATE('1984-12-15')", "neq1", true), - $("DATE('1961-04-12') != DATE('1984-12-15')", "neq2", true), - $("DATE('1961-04-12') != DATE('1961-04-12')", "neq3", false), - $("DATE('1984-12-15') > DATE('1961-04-12')", "gt1", true), - $("DATE('1984-12-15') > DATE('2020-09-16')", "gt2", false), - $("DATE('1961-04-12') < DATE('1984-12-15')", "lt1", true), - $("DATE('1984-12-15') < DATE('1961-04-12')", "lt2", false), - $("DATE('1984-12-15') >= DATE('1961-04-12')", "gte1", true), - $("DATE('1984-12-15') >= DATE('1984-12-15')", "gte2", true), - $("DATE('1984-12-15') >= DATE('2020-09-16')", "gte3", false), - $("DATE('1961-04-12') <= DATE('1984-12-15')", "lte1", true), - $("DATE('1961-04-12') <= DATE('1961-04-12')", "lte2", true), - $("DATE('2020-09-16') <= DATE('1961-04-12')", "lte3", false) - )); + return Arrays.asList( + $$( + $("DATE('2020-09-16') = DATE('2020-09-16')", "eq1", true), + $("DATE('2020-09-16') = DATE('1961-04-12')", "eq2", false), + $("DATE('2020-09-16') != DATE('1984-12-15')", "neq1", true), + $("DATE('1961-04-12') != DATE('1984-12-15')", "neq2", true), + $("DATE('1961-04-12') != DATE('1961-04-12')", "neq3", false), + $("DATE('1984-12-15') > DATE('1961-04-12')", "gt1", true), + $("DATE('1984-12-15') > DATE('2020-09-16')", "gt2", false), + $("DATE('1961-04-12') < DATE('1984-12-15')", "lt1", true), + $("DATE('1984-12-15') < DATE('1961-04-12')", "lt2", false), + $("DATE('1984-12-15') >= DATE('1961-04-12')", "gte1", true), + $("DATE('1984-12-15') >= DATE('1984-12-15')", "gte2", true), + $("DATE('1984-12-15') >= DATE('2020-09-16')", "gte3", false), + $("DATE('1961-04-12') <= DATE('1984-12-15')", "lte1", true), + $("DATE('1961-04-12') <= DATE('1961-04-12')", "lte2", true), + $("DATE('2020-09-16') <= DATE('1961-04-12')", "lte3", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareTwoTimes() { - return Arrays.asList($$( - $("TIME('09:16:37') = TIME('09:16:37')", "eq1", true), - $("TIME('09:16:37') = TIME('04:12:42')", "eq2", false), - $("TIME('09:16:37') != TIME('12:15:22')", "neq1", true), - $("TIME('04:12:42') != TIME('12:15:22')", "neq2", true), - $("TIME('04:12:42') != TIME('04:12:42')", "neq3", false), - $("TIME('12:15:22') > TIME('04:12:42')", "gt1", true), - $("TIME('12:15:22') > TIME('19:16:03')", "gt2", false), - $("TIME('04:12:42') < TIME('12:15:22')", "lt1", true), - $("TIME('14:12:38') < TIME('12:15:22')", "lt2", false), - $("TIME('12:15:22') >= TIME('04:12:42')", "gte1", true), - $("TIME('12:15:22') >= TIME('12:15:22')", "gte2", true), - $("TIME('12:15:22') >= TIME('19:16:03')", "gte3", false), - $("TIME('04:12:42') <= TIME('12:15:22')", "lte1", true), - $("TIME('04:12:42') <= TIME('04:12:42')", "lte2", true), - $("TIME('19:16:03') <= TIME('04:12:42')", "lte3", false) - )); + return Arrays.asList( + $$( + $("TIME('09:16:37') = TIME('09:16:37')", "eq1", true), + $("TIME('09:16:37') = TIME('04:12:42')", "eq2", false), + $("TIME('09:16:37') != TIME('12:15:22')", "neq1", true), + $("TIME('04:12:42') != TIME('12:15:22')", "neq2", true), + $("TIME('04:12:42') != TIME('04:12:42')", "neq3", false), + $("TIME('12:15:22') > TIME('04:12:42')", "gt1", true), + $("TIME('12:15:22') > TIME('19:16:03')", "gt2", false), + $("TIME('04:12:42') < TIME('12:15:22')", "lt1", true), + $("TIME('14:12:38') < TIME('12:15:22')", "lt2", false), + $("TIME('12:15:22') >= TIME('04:12:42')", "gte1", true), + $("TIME('12:15:22') >= TIME('12:15:22')", "gte2", true), + $("TIME('12:15:22') >= TIME('19:16:03')", "gte3", false), + $("TIME('04:12:42') <= TIME('12:15:22')", "lte1", true), + $("TIME('04:12:42') <= TIME('04:12:42')", "lte2", true), + $("TIME('19:16:03') <= TIME('04:12:42')", "lte3", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareTwoDateTimes() { - return Arrays.asList($$( - $("DATETIME('2020-09-16 10:20:30') = DATETIME('2020-09-16 10:20:30')", "eq1", true), - $("DATETIME('2020-09-16 10:20:30') = DATETIME('1961-04-12 09:07:00')", "eq2", false), - $("DATETIME('2020-09-16 10:20:30') != DATETIME('1984-12-15 22:15:07')", "neq1", true), - $("DATETIME('1984-12-15 22:15:08') != DATETIME('1984-12-15 22:15:07')", "neq2", true), - $("DATETIME('1961-04-12 09:07:00') != DATETIME('1961-04-12 09:07:00')", "neq3", false), - $("DATETIME('1984-12-15 22:15:07') > DATETIME('1961-04-12 22:15:07')", "gt1", true), - $("DATETIME('1984-12-15 22:15:07') > DATETIME('1984-12-15 22:15:06')", "gt2", true), - $("DATETIME('1984-12-15 22:15:07') > DATETIME('2020-09-16 10:20:30')", "gt3", false), - $("DATETIME('1961-04-12 09:07:00') < DATETIME('1984-12-15 09:07:00')", "lt1", true), - $("DATETIME('1984-12-15 22:15:07') < DATETIME('1984-12-15 22:15:08')", "lt2", true), - $("DATETIME('1984-12-15 22:15:07') < DATETIME('1961-04-12 09:07:00')", "lt3", false), - $("DATETIME('1984-12-15 22:15:07') >= DATETIME('1961-04-12 09:07:00')", "gte1", true), - $("DATETIME('1984-12-15 22:15:07') >= DATETIME('1984-12-15 22:15:07')", "gte2", true), - $("DATETIME('1984-12-15 22:15:07') >= DATETIME('2020-09-16 10:20:30')", "gte3", false), - $("DATETIME('1961-04-12 09:07:00') <= DATETIME('1984-12-15 22:15:07')", "lte1", true), - $("DATETIME('1961-04-12 09:07:00') <= DATETIME('1961-04-12 09:07:00')", "lte2", true), - $("DATETIME('2020-09-16 10:20:30') <= DATETIME('1961-04-12 09:07:00')", "lte3", false) - )); + return Arrays.asList( + $$( + $("DATETIME('2020-09-16 10:20:30') = DATETIME('2020-09-16 10:20:30')", "eq1", true), + $("DATETIME('2020-09-16 10:20:30') = DATETIME('1961-04-12 09:07:00')", "eq2", false), + $("DATETIME('2020-09-16 10:20:30') != DATETIME('1984-12-15 22:15:07')", "neq1", true), + $("DATETIME('1984-12-15 22:15:08') != DATETIME('1984-12-15 22:15:07')", "neq2", true), + $("DATETIME('1961-04-12 09:07:00') != DATETIME('1961-04-12 09:07:00')", "neq3", false), + $("DATETIME('1984-12-15 22:15:07') > DATETIME('1961-04-12 22:15:07')", "gt1", true), + $("DATETIME('1984-12-15 22:15:07') > DATETIME('1984-12-15 22:15:06')", "gt2", true), + $("DATETIME('1984-12-15 22:15:07') > DATETIME('2020-09-16 10:20:30')", "gt3", false), + $("DATETIME('1961-04-12 09:07:00') < DATETIME('1984-12-15 09:07:00')", "lt1", true), + $("DATETIME('1984-12-15 22:15:07') < DATETIME('1984-12-15 22:15:08')", "lt2", true), + $("DATETIME('1984-12-15 22:15:07') < DATETIME('1961-04-12 09:07:00')", "lt3", false), + $("DATETIME('1984-12-15 22:15:07') >= DATETIME('1961-04-12 09:07:00')", "gte1", true), + $("DATETIME('1984-12-15 22:15:07') >= DATETIME('1984-12-15 22:15:07')", "gte2", true), + $("DATETIME('1984-12-15 22:15:07') >= DATETIME('2020-09-16 10:20:30')", "gte3", false), + $("DATETIME('1961-04-12 09:07:00') <= DATETIME('1984-12-15 22:15:07')", "lte1", true), + $("DATETIME('1961-04-12 09:07:00') <= DATETIME('1961-04-12 09:07:00')", "lte2", true), + $( + "DATETIME('2020-09-16 10:20:30') <= DATETIME('1961-04-12 09:07:00')", + "lte3", + false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareTwoTimestamps() { - return Arrays.asList($$( - $("TIMESTAMP('2020-09-16 10:20:30') = TIMESTAMP('2020-09-16 10:20:30')", "eq1", true), - $("TIMESTAMP('2020-09-16 10:20:30') = TIMESTAMP('1961-04-12 09:07:00')", "eq2", false), - $("TIMESTAMP('2020-09-16 10:20:30') != TIMESTAMP('1984-12-15 22:15:07')", "neq1", true), - $("TIMESTAMP('1984-12-15 22:15:08') != TIMESTAMP('1984-12-15 22:15:07')", "neq2", true), - $("TIMESTAMP('1961-04-12 09:07:00') != TIMESTAMP('1961-04-12 09:07:00')", "neq3", false), - $("TIMESTAMP('1984-12-15 22:15:07') > TIMESTAMP('1961-04-12 22:15:07')", "gt1", true), - $("TIMESTAMP('1984-12-15 22:15:07') > TIMESTAMP('1984-12-15 22:15:06')", "gt2", true), - $("TIMESTAMP('1984-12-15 22:15:07') > TIMESTAMP('2020-09-16 10:20:30')", "gt3", false), - $("TIMESTAMP('1961-04-12 09:07:00') < TIMESTAMP('1984-12-15 09:07:00')", "lt1", true), - $("TIMESTAMP('1984-12-15 22:15:07') < TIMESTAMP('1984-12-15 22:15:08')", "lt2", true), - $("TIMESTAMP('1984-12-15 22:15:07') < TIMESTAMP('1961-04-12 09:07:00')", "lt3", false), - $("TIMESTAMP('1984-12-15 22:15:07') >= TIMESTAMP('1961-04-12 09:07:00')", "gte1", true), - $("TIMESTAMP('1984-12-15 22:15:07') >= TIMESTAMP('1984-12-15 22:15:07')", "gte2", true), - $("TIMESTAMP('1984-12-15 22:15:07') >= TIMESTAMP('2020-09-16 10:20:30')", "gte3", false), - $("TIMESTAMP('1961-04-12 09:07:00') <= TIMESTAMP('1984-12-15 22:15:07')", "lte1", true), - $("TIMESTAMP('1961-04-12 09:07:00') <= TIMESTAMP('1961-04-12 09:07:00')", "lte2", true), - $("TIMESTAMP('2020-09-16 10:20:30') <= TIMESTAMP('1961-04-12 09:07:00')", "lte3", false) - )); + return Arrays.asList( + $$( + $("TIMESTAMP('2020-09-16 10:20:30') = TIMESTAMP('2020-09-16 10:20:30')", "eq1", true), + $("TIMESTAMP('2020-09-16 10:20:30') = TIMESTAMP('1961-04-12 09:07:00')", "eq2", false), + $("TIMESTAMP('2020-09-16 10:20:30') != TIMESTAMP('1984-12-15 22:15:07')", "neq1", true), + $("TIMESTAMP('1984-12-15 22:15:08') != TIMESTAMP('1984-12-15 22:15:07')", "neq2", true), + $( + "TIMESTAMP('1961-04-12 09:07:00') != TIMESTAMP('1961-04-12 09:07:00')", + "neq3", + false), + $("TIMESTAMP('1984-12-15 22:15:07') > TIMESTAMP('1961-04-12 22:15:07')", "gt1", true), + $("TIMESTAMP('1984-12-15 22:15:07') > TIMESTAMP('1984-12-15 22:15:06')", "gt2", true), + $("TIMESTAMP('1984-12-15 22:15:07') > TIMESTAMP('2020-09-16 10:20:30')", "gt3", false), + $("TIMESTAMP('1961-04-12 09:07:00') < TIMESTAMP('1984-12-15 09:07:00')", "lt1", true), + $("TIMESTAMP('1984-12-15 22:15:07') < TIMESTAMP('1984-12-15 22:15:08')", "lt2", true), + $("TIMESTAMP('1984-12-15 22:15:07') < TIMESTAMP('1961-04-12 09:07:00')", "lt3", false), + $("TIMESTAMP('1984-12-15 22:15:07') >= TIMESTAMP('1961-04-12 09:07:00')", "gte1", true), + $("TIMESTAMP('1984-12-15 22:15:07') >= TIMESTAMP('1984-12-15 22:15:07')", "gte2", true), + $( + "TIMESTAMP('1984-12-15 22:15:07') >= TIMESTAMP('2020-09-16 10:20:30')", + "gte3", + false), + $("TIMESTAMP('1961-04-12 09:07:00') <= TIMESTAMP('1984-12-15 22:15:07')", "lte1", true), + $("TIMESTAMP('1961-04-12 09:07:00') <= TIMESTAMP('1961-04-12 09:07:00')", "lte2", true), + $( + "TIMESTAMP('2020-09-16 10:20:30') <= TIMESTAMP('1961-04-12 09:07:00')", + "lte3", + false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareEqTimestampWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIMESTAMP('2020-09-16 10:20:30') = DATETIME('2020-09-16 10:20:30')", "ts_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') = TIMESTAMP('2020-09-16 10:20:30')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') = DATETIME('1961-04-12 09:07:00')", "ts_dt_f", false), - $("DATETIME('1961-04-12 09:07:00') = TIMESTAMP('1984-12-15 22:15:07')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 00:00:00') = DATE('2020-09-16')", "ts_d_t", true), - $("DATE('2020-09-16') = TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') = DATE('1961-04-12')", "ts_d_f", false), - $("DATE('1961-04-12') = TIMESTAMP('1984-12-15 22:15:07')", "d_ts_f", false), - $("TIMESTAMP('" + today + " 10:20:30') = TIME('10:20:30')", "ts_t_t", true), - $("TIME('10:20:30') = TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') = TIME('09:07:00')", "ts_t_f", false), - $("TIME('09:07:00') = TIMESTAMP('1984-12-15 22:15:07')", "t_ts_f", false) - )); + return Arrays.asList( + $$( + $( + "TIMESTAMP('2020-09-16 10:20:30') = DATETIME('2020-09-16 10:20:30')", + "ts_dt_t", + true), + $( + "DATETIME('2020-09-16 10:20:30') = TIMESTAMP('2020-09-16 10:20:30')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') = DATETIME('1961-04-12 09:07:00')", + "ts_dt_f", + false), + $( + "DATETIME('1961-04-12 09:07:00') = TIMESTAMP('1984-12-15 22:15:07')", + "dt_ts_f", + false), + $("TIMESTAMP('2020-09-16 00:00:00') = DATE('2020-09-16')", "ts_d_t", true), + $("DATE('2020-09-16') = TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), + $("TIMESTAMP('2020-09-16 10:20:30') = DATE('1961-04-12')", "ts_d_f", false), + $("DATE('1961-04-12') = TIMESTAMP('1984-12-15 22:15:07')", "d_ts_f", false), + $("TIMESTAMP('" + today + " 10:20:30') = TIME('10:20:30')", "ts_t_t", true), + $("TIME('10:20:30') = TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), + $("TIMESTAMP('2020-09-16 10:20:30') = TIME('09:07:00')", "ts_t_f", false), + $("TIME('09:07:00') = TIMESTAMP('1984-12-15 22:15:07')", "t_ts_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareEqDateTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("DATETIME('2020-09-16 10:20:30') = TIMESTAMP('2020-09-16 10:20:30')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') = DATETIME('2020-09-16 10:20:30')", "ts_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') = TIMESTAMP('1961-04-12 09:07:00')", "dt_ts_f", false), - $("TIMESTAMP('1961-04-12 09:07:00') = DATETIME('1984-12-15 22:15:07')", "ts_dt_f", false), - $("DATETIME('2020-09-16 00:00:00') = DATE('2020-09-16')", "dt_d_t", true), - $("DATE('2020-09-16') = DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') = DATE('1961-04-12')", "dt_d_f", false), - $("DATE('1961-04-12') = DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), - $("DATETIME('" + today + " 10:20:30') = TIME('10:20:30')", "dt_t_t", true), - $("TIME('10:20:30') = DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') = TIME('09:07:00')", "dt_t_f", false), - $("TIME('09:07:00') = DATETIME('1984-12-15 22:15:07')", "t_dt_f", false) - )); + return Arrays.asList( + $$( + $( + "DATETIME('2020-09-16 10:20:30') = TIMESTAMP('2020-09-16 10:20:30')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') = DATETIME('2020-09-16 10:20:30')", + "ts_dt_t", + true), + $( + "DATETIME('2020-09-16 10:20:30') = TIMESTAMP('1961-04-12 09:07:00')", + "dt_ts_f", + false), + $( + "TIMESTAMP('1961-04-12 09:07:00') = DATETIME('1984-12-15 22:15:07')", + "ts_dt_f", + false), + $("DATETIME('2020-09-16 00:00:00') = DATE('2020-09-16')", "dt_d_t", true), + $("DATE('2020-09-16') = DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), + $("DATETIME('2020-09-16 10:20:30') = DATE('1961-04-12')", "dt_d_f", false), + $("DATE('1961-04-12') = DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), + $("DATETIME('" + today + " 10:20:30') = TIME('10:20:30')", "dt_t_t", true), + $("TIME('10:20:30') = DATETIME('" + today + " 10:20:30')", "t_dt_t", true), + $("DATETIME('2020-09-16 10:20:30') = TIME('09:07:00')", "dt_t_f", false), + $("TIME('09:07:00') = DATETIME('1984-12-15 22:15:07')", "t_dt_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareEqDateWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("DATE('2020-09-16') = TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), - $("TIMESTAMP('2020-09-16 00:00:00') = DATE('2020-09-16')", "ts_d_t", true), - $("DATE('2020-09-16') = TIMESTAMP('1961-04-12 09:07:00')", "d_ts_f", false), - $("TIMESTAMP('1984-12-15 09:07:00') = DATE('1984-12-15')", "ts_d_f", false), - $("DATE('2020-09-16') = DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') = DATE('2020-09-16')", "dt_d_t", true), - $("DATE('1961-04-12') = DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), - $("DATETIME('1961-04-12 10:20:30') = DATE('1961-04-12')", "dt_d_f", false), - $("DATE('" + today + "') = TIME('00:00:00')", "d_t_t", true), - $("TIME('00:00:00') = DATE('" + today + "')", "t_d_t", true), - $("DATE('2020-09-16') = TIME('09:07:00')", "d_t_f", false), - $("TIME('09:07:00') = DATE('" + today + "')", "t_d_f", false) - )); + return Arrays.asList( + $$( + $("DATE('2020-09-16') = TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), + $("TIMESTAMP('2020-09-16 00:00:00') = DATE('2020-09-16')", "ts_d_t", true), + $("DATE('2020-09-16') = TIMESTAMP('1961-04-12 09:07:00')", "d_ts_f", false), + $("TIMESTAMP('1984-12-15 09:07:00') = DATE('1984-12-15')", "ts_d_f", false), + $("DATE('2020-09-16') = DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), + $("DATETIME('2020-09-16 00:00:00') = DATE('2020-09-16')", "dt_d_t", true), + $("DATE('1961-04-12') = DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), + $("DATETIME('1961-04-12 10:20:30') = DATE('1961-04-12')", "dt_d_f", false), + $("DATE('" + today + "') = TIME('00:00:00')", "d_t_t", true), + $("TIME('00:00:00') = DATE('" + today + "')", "t_d_t", true), + $("DATE('2020-09-16') = TIME('09:07:00')", "d_t_f", false), + $("TIME('09:07:00') = DATE('" + today + "')", "t_d_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareEqTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIME('10:20:30') = DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('" + today + " 10:20:30') = TIME('10:20:30')", "dt_t_t", true), - $("TIME('09:07:00') = DATETIME('1961-04-12 09:07:00')", "t_dt_f", false), - $("DATETIME('" + today + " 09:07:00') = TIME('10:20:30')", "dt_t_f", false), - $("TIME('10:20:30') = TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), - $("TIMESTAMP('" + today + " 10:20:30') = TIME('10:20:30')", "ts_t_t", true), - $("TIME('22:15:07') = TIMESTAMP('1984-12-15 22:15:07')", "t_ts_f", false), - $("TIMESTAMP('1984-12-15 10:20:30') = TIME('10:20:30')", "ts_t_f", false), - $("TIME('00:00:00') = DATE('" + today + "')", "t_d_t", true), - $("DATE('" + today + "') = TIME('00:00:00')", "d_t_t", true), - $("TIME('09:07:00') = DATE('" + today + "')", "t_d_f", false), - $("DATE('2020-09-16') = TIME('09:07:00')", "d_t_f", false) - )); + return Arrays.asList( + $$( + $("TIME('10:20:30') = DATETIME('" + today + " 10:20:30')", "t_dt_t", true), + $("DATETIME('" + today + " 10:20:30') = TIME('10:20:30')", "dt_t_t", true), + $("TIME('09:07:00') = DATETIME('1961-04-12 09:07:00')", "t_dt_f", false), + $("DATETIME('" + today + " 09:07:00') = TIME('10:20:30')", "dt_t_f", false), + $("TIME('10:20:30') = TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), + $("TIMESTAMP('" + today + " 10:20:30') = TIME('10:20:30')", "ts_t_t", true), + $("TIME('22:15:07') = TIMESTAMP('1984-12-15 22:15:07')", "t_ts_f", false), + $("TIMESTAMP('1984-12-15 10:20:30') = TIME('10:20:30')", "ts_t_f", false), + $("TIME('00:00:00') = DATE('" + today + "')", "t_d_t", true), + $("DATE('" + today + "') = TIME('00:00:00')", "d_t_t", true), + $("TIME('09:07:00') = DATE('" + today + "')", "t_d_f", false), + $("DATE('2020-09-16') = TIME('09:07:00')", "d_t_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareNeqTimestampWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIMESTAMP('2020-09-16 10:20:30') != DATETIME('1961-04-12 09:07:00')", "ts_dt_t", true), - $("DATETIME('1961-04-12 09:07:00') != TIMESTAMP('1984-12-15 22:15:07')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') != DATETIME('2020-09-16 10:20:30')", "ts_dt_f", false), - $("DATETIME('2020-09-16 10:20:30') != TIMESTAMP('2020-09-16 10:20:30')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') != DATE('1961-04-12')", "ts_d_t", true), - $("DATE('1961-04-12') != TIMESTAMP('1984-12-15 22:15:07')", "d_ts_t", true), - $("TIMESTAMP('2020-09-16 00:00:00') != DATE('2020-09-16')", "ts_d_f", false), - $("DATE('2020-09-16') != TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') != TIME('09:07:00')", "ts_t_t", true), - $("TIME('09:07:00') != TIMESTAMP('1984-12-15 22:15:07')", "t_ts_t", true), - $("TIMESTAMP('" + today + " 10:20:30') != TIME('10:20:30')", "ts_t_f", false), - $("TIME('10:20:30') != TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false) - )); + return Arrays.asList( + $$( + $( + "TIMESTAMP('2020-09-16 10:20:30') != DATETIME('1961-04-12 09:07:00')", + "ts_dt_t", + true), + $( + "DATETIME('1961-04-12 09:07:00') != TIMESTAMP('1984-12-15 22:15:07')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') != DATETIME('2020-09-16 10:20:30')", + "ts_dt_f", + false), + $( + "DATETIME('2020-09-16 10:20:30') != TIMESTAMP('2020-09-16 10:20:30')", + "dt_ts_f", + false), + $("TIMESTAMP('2020-09-16 10:20:30') != DATE('1961-04-12')", "ts_d_t", true), + $("DATE('1961-04-12') != TIMESTAMP('1984-12-15 22:15:07')", "d_ts_t", true), + $("TIMESTAMP('2020-09-16 00:00:00') != DATE('2020-09-16')", "ts_d_f", false), + $("DATE('2020-09-16') != TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), + $("TIMESTAMP('2020-09-16 10:20:30') != TIME('09:07:00')", "ts_t_t", true), + $("TIME('09:07:00') != TIMESTAMP('1984-12-15 22:15:07')", "t_ts_t", true), + $("TIMESTAMP('" + today + " 10:20:30') != TIME('10:20:30')", "ts_t_f", false), + $("TIME('10:20:30') != TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareNeqDateTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("DATETIME('2020-09-16 10:20:30') != TIMESTAMP('1961-04-12 09:07:00')", "dt_ts_t", true), - $("TIMESTAMP('1961-04-12 09:07:00') != DATETIME('1984-12-15 22:15:07')", "ts_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') != TIMESTAMP('2020-09-16 10:20:30')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') != DATETIME('2020-09-16 10:20:30')", "ts_dt_f", false), - $("DATETIME('2020-09-16 10:20:30') != DATE('1961-04-12')", "dt_d_t", true), - $("DATE('1961-04-12') != DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') != DATE('2020-09-16')", "dt_d_f", false), - $("DATE('2020-09-16') != DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), - $("DATETIME('2020-09-16 10:20:30') != TIME('09:07:00')", "dt_t_t", true), - $("TIME('09:07:00') != DATETIME('1984-12-15 22:15:07')", "t_dt_t", true), - $("DATETIME('" + today + " 10:20:30') != TIME('10:20:30')", "dt_t_f", false), - $("TIME('10:20:30') != DATETIME('" + today + " 10:20:30')", "t_dt_f", false) - )); + return Arrays.asList( + $$( + $( + "DATETIME('2020-09-16 10:20:30') != TIMESTAMP('1961-04-12 09:07:00')", + "dt_ts_t", + true), + $( + "TIMESTAMP('1961-04-12 09:07:00') != DATETIME('1984-12-15 22:15:07')", + "ts_dt_t", + true), + $( + "DATETIME('2020-09-16 10:20:30') != TIMESTAMP('2020-09-16 10:20:30')", + "dt_ts_f", + false), + $( + "TIMESTAMP('2020-09-16 10:20:30') != DATETIME('2020-09-16 10:20:30')", + "ts_dt_f", + false), + $("DATETIME('2020-09-16 10:20:30') != DATE('1961-04-12')", "dt_d_t", true), + $("DATE('1961-04-12') != DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), + $("DATETIME('2020-09-16 00:00:00') != DATE('2020-09-16')", "dt_d_f", false), + $("DATE('2020-09-16') != DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), + $("DATETIME('2020-09-16 10:20:30') != TIME('09:07:00')", "dt_t_t", true), + $("TIME('09:07:00') != DATETIME('1984-12-15 22:15:07')", "t_dt_t", true), + $("DATETIME('" + today + " 10:20:30') != TIME('10:20:30')", "dt_t_f", false), + $("TIME('10:20:30') != DATETIME('" + today + " 10:20:30')", "t_dt_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareNeqDateWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("DATE('2020-09-16') != TIMESTAMP('1961-04-12 09:07:00')", "d_ts_t", true), - $("TIMESTAMP('1984-12-15 09:07:00') != DATE('1984-12-15')", "ts_d_t", true), - $("DATE('2020-09-16') != TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), - $("TIMESTAMP('2020-09-16 00:00:00') != DATE('2020-09-16')", "ts_d_f", false), - $("DATE('1961-04-12') != DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), - $("DATETIME('1961-04-12 10:20:30') != DATE('1961-04-12')", "dt_d_t", true), - $("DATE('2020-09-16') != DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), - $("DATETIME('2020-09-16 00:00:00') != DATE('2020-09-16')", "dt_d_f", false), - $("DATE('2020-09-16') != TIME('09:07:00')", "d_t_t", true), - $("TIME('09:07:00') != DATE('" + today + "')", "t_d_t", true), - $("DATE('" + today + "') != TIME('00:00:00')", "d_t_f", false), - $("TIME('00:00:00') != DATE('" + today + "')", "t_d_f", false) - )); + return Arrays.asList( + $$( + $("DATE('2020-09-16') != TIMESTAMP('1961-04-12 09:07:00')", "d_ts_t", true), + $("TIMESTAMP('1984-12-15 09:07:00') != DATE('1984-12-15')", "ts_d_t", true), + $("DATE('2020-09-16') != TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), + $("TIMESTAMP('2020-09-16 00:00:00') != DATE('2020-09-16')", "ts_d_f", false), + $("DATE('1961-04-12') != DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), + $("DATETIME('1961-04-12 10:20:30') != DATE('1961-04-12')", "dt_d_t", true), + $("DATE('2020-09-16') != DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), + $("DATETIME('2020-09-16 00:00:00') != DATE('2020-09-16')", "dt_d_f", false), + $("DATE('2020-09-16') != TIME('09:07:00')", "d_t_t", true), + $("TIME('09:07:00') != DATE('" + today + "')", "t_d_t", true), + $("DATE('" + today + "') != TIME('00:00:00')", "d_t_f", false), + $("TIME('00:00:00') != DATE('" + today + "')", "t_d_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareNeqTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIME('09:07:00') != DATETIME('1961-04-12 09:07:00')", "t_dt_t", true), - $("DATETIME('" + today + " 09:07:00') != TIME('10:20:30')", "dt_t_t", true), - $("TIME('10:20:30') != DATETIME('" + today + " 10:20:30')", "t_dt_f", false), - $("DATETIME('" + today + " 10:20:30') != TIME('10:20:30')", "dt_t_f", false), - $("TIME('22:15:07') != TIMESTAMP('1984-12-15 22:15:07')", "t_ts_t", true), - $("TIMESTAMP('1984-12-15 10:20:30') != TIME('10:20:30')", "ts_t_t", true), - $("TIME('10:20:30') != TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false), - $("TIMESTAMP('" + today + " 10:20:30') != TIME('10:20:30')", "ts_t_f", false), - $("TIME('09:07:00') != DATE('" + today + "')", "t_d_t", true), - $("DATE('2020-09-16') != TIME('09:07:00')", "d_t_t", true), - $("TIME('00:00:00') != DATE('" + today + "')", "t_d_f", false), - $("DATE('" + today + "') != TIME('00:00:00')", "d_t_f", false) - )); + return Arrays.asList( + $$( + $("TIME('09:07:00') != DATETIME('1961-04-12 09:07:00')", "t_dt_t", true), + $("DATETIME('" + today + " 09:07:00') != TIME('10:20:30')", "dt_t_t", true), + $("TIME('10:20:30') != DATETIME('" + today + " 10:20:30')", "t_dt_f", false), + $("DATETIME('" + today + " 10:20:30') != TIME('10:20:30')", "dt_t_f", false), + $("TIME('22:15:07') != TIMESTAMP('1984-12-15 22:15:07')", "t_ts_t", true), + $("TIMESTAMP('1984-12-15 10:20:30') != TIME('10:20:30')", "ts_t_t", true), + $("TIME('10:20:30') != TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false), + $("TIMESTAMP('" + today + " 10:20:30') != TIME('10:20:30')", "ts_t_f", false), + $("TIME('09:07:00') != DATE('" + today + "')", "t_d_t", true), + $("DATE('2020-09-16') != TIME('09:07:00')", "d_t_t", true), + $("TIME('00:00:00') != DATE('" + today + "')", "t_d_f", false), + $("DATE('" + today + "') != TIME('00:00:00')", "d_t_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLtTimestampWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIMESTAMP('2020-09-16 10:20:30') < DATETIME('2061-04-12 09:07:00')", "ts_dt_t", true), - $("DATETIME('1961-04-12 09:07:00') < TIMESTAMP('1984-12-15 22:15:07')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') < DATETIME('2020-09-16 10:20:30')", "ts_dt_f", false), - $("DATETIME('2020-09-16 10:20:30') < TIMESTAMP('1961-04-12 09:07:00')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') < DATE('2077-04-12')", "ts_d_t", true), - $("DATE('1961-04-12') < TIMESTAMP('1984-12-15 22:15:07')", "d_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') < DATE('1961-04-12')", "ts_d_f", false), - $("DATE('2020-09-16') < TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') < TIME('09:07:00')", "ts_t_t", true), - $("TIME('09:07:00') < TIMESTAMP('3077-12-15 22:15:07')", "t_ts_t", true), - $("TIMESTAMP('" + today + " 10:20:30') < TIME('10:20:30')", "ts_t_f", false), - $("TIME('20:50:40') < TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false) - )); + return Arrays.asList( + $$( + $( + "TIMESTAMP('2020-09-16 10:20:30') < DATETIME('2061-04-12 09:07:00')", + "ts_dt_t", + true), + $( + "DATETIME('1961-04-12 09:07:00') < TIMESTAMP('1984-12-15 22:15:07')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') < DATETIME('2020-09-16 10:20:30')", + "ts_dt_f", + false), + $( + "DATETIME('2020-09-16 10:20:30') < TIMESTAMP('1961-04-12 09:07:00')", + "dt_ts_f", + false), + $("TIMESTAMP('2020-09-16 10:20:30') < DATE('2077-04-12')", "ts_d_t", true), + $("DATE('1961-04-12') < TIMESTAMP('1984-12-15 22:15:07')", "d_ts_t", true), + $("TIMESTAMP('2020-09-16 10:20:30') < DATE('1961-04-12')", "ts_d_f", false), + $("DATE('2020-09-16') < TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), + $("TIMESTAMP('2020-09-16 10:20:30') < TIME('09:07:00')", "ts_t_t", true), + $("TIME('09:07:00') < TIMESTAMP('3077-12-15 22:15:07')", "t_ts_t", true), + $("TIMESTAMP('" + today + " 10:20:30') < TIME('10:20:30')", "ts_t_f", false), + $("TIME('20:50:40') < TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLtDateTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("DATETIME('2020-09-16 10:20:30') < TIMESTAMP('2077-04-12 09:07:00')", "dt_ts_t", true), - $("TIMESTAMP('1961-04-12 09:07:00') < DATETIME('1984-12-15 22:15:07')", "ts_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') < TIMESTAMP('2020-09-16 10:20:30')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') < DATETIME('1984-12-15 22:15:07')", "ts_dt_f", false), - $("DATETIME('2020-09-16 10:20:30') < DATE('3077-04-12')", "dt_d_t", true), - $("DATE('1961-04-12') < DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') < DATE('2020-09-16')", "dt_d_f", false), - $("DATE('2020-09-16') < DATETIME('1961-04-12 09:07:00')", "d_dt_f", false), - $("DATETIME('2020-09-16 10:20:30') < TIME('09:07:00')", "dt_t_t", true), - $("TIME('09:07:00') < DATETIME('3077-12-15 22:15:07')", "t_dt_t", true), - $("DATETIME('" + today + " 10:20:30') < TIME('10:20:30')", "dt_t_f", false), - $("TIME('20:40:50') < DATETIME('" + today + " 10:20:30')", "t_dt_f", false) - )); + return Arrays.asList( + $$( + $( + "DATETIME('2020-09-16 10:20:30') < TIMESTAMP('2077-04-12 09:07:00')", + "dt_ts_t", + true), + $( + "TIMESTAMP('1961-04-12 09:07:00') < DATETIME('1984-12-15 22:15:07')", + "ts_dt_t", + true), + $( + "DATETIME('2020-09-16 10:20:30') < TIMESTAMP('2020-09-16 10:20:30')", + "dt_ts_f", + false), + $( + "TIMESTAMP('2020-09-16 10:20:30') < DATETIME('1984-12-15 22:15:07')", + "ts_dt_f", + false), + $("DATETIME('2020-09-16 10:20:30') < DATE('3077-04-12')", "dt_d_t", true), + $("DATE('1961-04-12') < DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), + $("DATETIME('2020-09-16 00:00:00') < DATE('2020-09-16')", "dt_d_f", false), + $("DATE('2020-09-16') < DATETIME('1961-04-12 09:07:00')", "d_dt_f", false), + $("DATETIME('2020-09-16 10:20:30') < TIME('09:07:00')", "dt_t_t", true), + $("TIME('09:07:00') < DATETIME('3077-12-15 22:15:07')", "t_dt_t", true), + $("DATETIME('" + today + " 10:20:30') < TIME('10:20:30')", "dt_t_f", false), + $("TIME('20:40:50') < DATETIME('" + today + " 10:20:30')", "t_dt_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLtDateWithOtherTypes() { - return Arrays.asList($$( - $("DATE('2020-09-16') < TIMESTAMP('3077-04-12 09:07:00')", "d_ts_t", true), - $("TIMESTAMP('1961-04-12 09:07:00') < DATE('1984-12-15')", "ts_d_t", true), - $("DATE('2020-09-16') < TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), - $("TIMESTAMP('2077-04-12 09:07:00') < DATE('2020-09-16')", "ts_d_f", false), - $("DATE('1961-04-12') < DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), - $("DATETIME('1961-04-12 10:20:30') < DATE('1984-11-15')", "dt_d_t", true), - $("DATE('2020-09-16') < DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), - $("DATETIME('2020-09-16 00:00:00') < DATE('1984-03-22')", "dt_d_f", false), - $("DATE('2020-09-16') < TIME('09:07:00')", "d_t_t", true), - $("TIME('09:07:00') < DATE('3077-04-12')", "t_d_t", true), - $("DATE('3077-04-12') < TIME('00:00:00')", "d_t_f", false), - $("TIME('00:00:00') < DATE('2020-09-16')", "t_d_f", false) - )); + return Arrays.asList( + $$( + $("DATE('2020-09-16') < TIMESTAMP('3077-04-12 09:07:00')", "d_ts_t", true), + $("TIMESTAMP('1961-04-12 09:07:00') < DATE('1984-12-15')", "ts_d_t", true), + $("DATE('2020-09-16') < TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), + $("TIMESTAMP('2077-04-12 09:07:00') < DATE('2020-09-16')", "ts_d_f", false), + $("DATE('1961-04-12') < DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), + $("DATETIME('1961-04-12 10:20:30') < DATE('1984-11-15')", "dt_d_t", true), + $("DATE('2020-09-16') < DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), + $("DATETIME('2020-09-16 00:00:00') < DATE('1984-03-22')", "dt_d_f", false), + $("DATE('2020-09-16') < TIME('09:07:00')", "d_t_t", true), + $("TIME('09:07:00') < DATE('3077-04-12')", "t_d_t", true), + $("DATE('3077-04-12') < TIME('00:00:00')", "d_t_f", false), + $("TIME('00:00:00') < DATE('2020-09-16')", "t_d_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLtTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIME('09:07:00') < DATETIME('3077-04-12 09:07:00')", "t_dt_t", true), - $("DATETIME('" + today + " 09:07:00') < TIME('10:20:30')", "dt_t_t", true), - $("TIME('10:20:30') < DATETIME('" + today + " 10:20:30')", "t_dt_f", false), - $("DATETIME('" + today + " 20:40:50') < TIME('10:20:30')", "dt_t_f", false), - $("TIME('22:15:07') < TIMESTAMP('3077-12-15 22:15:07')", "t_ts_t", true), - $("TIMESTAMP('1984-12-15 10:20:30') < TIME('10:20:30')", "ts_t_t", true), - $("TIME('10:20:30') < TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false), - $("TIMESTAMP('" + today + " 20:50:42') < TIME('10:20:30')", "ts_t_f", false), - $("TIME('09:07:00') < DATE('3077-04-12')", "t_d_t", true), - $("DATE('2020-09-16') < TIME('09:07:00')", "d_t_t", true), - $("TIME('00:00:00') < DATE('1961-04-12')", "t_d_f", false), - $("DATE('3077-04-12') < TIME('10:20:30')", "d_t_f", false) - )); + return Arrays.asList( + $$( + $("TIME('09:07:00') < DATETIME('3077-04-12 09:07:00')", "t_dt_t", true), + $("DATETIME('" + today + " 09:07:00') < TIME('10:20:30')", "dt_t_t", true), + $("TIME('10:20:30') < DATETIME('" + today + " 10:20:30')", "t_dt_f", false), + $("DATETIME('" + today + " 20:40:50') < TIME('10:20:30')", "dt_t_f", false), + $("TIME('22:15:07') < TIMESTAMP('3077-12-15 22:15:07')", "t_ts_t", true), + $("TIMESTAMP('1984-12-15 10:20:30') < TIME('10:20:30')", "ts_t_t", true), + $("TIME('10:20:30') < TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false), + $("TIMESTAMP('" + today + " 20:50:42') < TIME('10:20:30')", "ts_t_f", false), + $("TIME('09:07:00') < DATE('3077-04-12')", "t_d_t", true), + $("DATE('2020-09-16') < TIME('09:07:00')", "d_t_t", true), + $("TIME('00:00:00') < DATE('1961-04-12')", "t_d_f", false), + $("DATE('3077-04-12') < TIME('10:20:30')", "d_t_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGtTimestampWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIMESTAMP('2020-09-16 10:20:30') > DATETIME('2020-09-16 10:20:25')", "ts_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') > TIMESTAMP('1961-04-12 09:07:00')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') > DATETIME('2061-04-12 09:07:00')", "ts_dt_f", false), - $("DATETIME('1961-04-12 09:07:00') > TIMESTAMP('1984-12-15 09:07:00')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') > DATE('1961-04-12')", "ts_d_t", true), - $("DATE('2020-09-16') > TIMESTAMP('2020-09-15 22:15:07')", "d_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') > DATE('2077-04-12')", "ts_d_f", false), - $("DATE('1961-04-12') > TIMESTAMP('1961-04-12 00:00:00')", "d_ts_f", false), - $("TIMESTAMP('3077-07-08 20:20:30') > TIME('10:20:30')", "ts_t_t", true), - $("TIME('20:50:40') > TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), - $("TIMESTAMP('" + today + " 10:20:30') > TIME('10:20:30')", "ts_t_f", false), - $("TIME('09:07:00') > TIMESTAMP('3077-12-15 22:15:07')", "t_ts_f", false) - )); + return Arrays.asList( + $$( + $( + "TIMESTAMP('2020-09-16 10:20:30') > DATETIME('2020-09-16 10:20:25')", + "ts_dt_t", + true), + $( + "DATETIME('2020-09-16 10:20:30') > TIMESTAMP('1961-04-12 09:07:00')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') > DATETIME('2061-04-12 09:07:00')", + "ts_dt_f", + false), + $( + "DATETIME('1961-04-12 09:07:00') > TIMESTAMP('1984-12-15 09:07:00')", + "dt_ts_f", + false), + $("TIMESTAMP('2020-09-16 10:20:30') > DATE('1961-04-12')", "ts_d_t", true), + $("DATE('2020-09-16') > TIMESTAMP('2020-09-15 22:15:07')", "d_ts_t", true), + $("TIMESTAMP('2020-09-16 10:20:30') > DATE('2077-04-12')", "ts_d_f", false), + $("DATE('1961-04-12') > TIMESTAMP('1961-04-12 00:00:00')", "d_ts_f", false), + $("TIMESTAMP('3077-07-08 20:20:30') > TIME('10:20:30')", "ts_t_t", true), + $("TIME('20:50:40') > TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), + $("TIMESTAMP('" + today + " 10:20:30') > TIME('10:20:30')", "ts_t_f", false), + $("TIME('09:07:00') > TIMESTAMP('3077-12-15 22:15:07')", "t_ts_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGtDateTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("DATETIME('2020-09-16 10:20:31') > TIMESTAMP('2020-09-16 10:20:30')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') > DATETIME('1984-12-15 22:15:07')", "ts_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') > TIMESTAMP('2077-04-12 09:07:00')", "dt_ts_f", false), - $("TIMESTAMP('1961-04-12 09:07:00') > DATETIME('1961-04-12 09:07:00')", "ts_dt_f", false), - $("DATETIME('3077-04-12 10:20:30') > DATE('2020-09-16')", "dt_d_t", true), - $("DATE('2020-09-16') > DATETIME('1961-04-12 09:07:00')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') > DATE('2020-09-16')", "dt_d_f", false), - $("DATE('1961-04-12') > DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), - $("DATETIME('3077-04-12 10:20:30') > TIME('09:07:00')", "dt_t_t", true), - $("TIME('20:40:50') > DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('" + today + " 10:20:30') > TIME('10:20:30')", "dt_t_f", false), - $("TIME('09:07:00') > DATETIME('3077-12-15 22:15:07')", "t_dt_f", false) - )); + return Arrays.asList( + $$( + $( + "DATETIME('2020-09-16 10:20:31') > TIMESTAMP('2020-09-16 10:20:30')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') > DATETIME('1984-12-15 22:15:07')", + "ts_dt_t", + true), + $( + "DATETIME('2020-09-16 10:20:30') > TIMESTAMP('2077-04-12 09:07:00')", + "dt_ts_f", + false), + $( + "TIMESTAMP('1961-04-12 09:07:00') > DATETIME('1961-04-12 09:07:00')", + "ts_dt_f", + false), + $("DATETIME('3077-04-12 10:20:30') > DATE('2020-09-16')", "dt_d_t", true), + $("DATE('2020-09-16') > DATETIME('1961-04-12 09:07:00')", "d_dt_t", true), + $("DATETIME('2020-09-16 00:00:00') > DATE('2020-09-16')", "dt_d_f", false), + $("DATE('1961-04-12') > DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), + $("DATETIME('3077-04-12 10:20:30') > TIME('09:07:00')", "dt_t_t", true), + $("TIME('20:40:50') > DATETIME('" + today + " 10:20:30')", "t_dt_t", true), + $("DATETIME('" + today + " 10:20:30') > TIME('10:20:30')", "dt_t_f", false), + $("TIME('09:07:00') > DATETIME('3077-12-15 22:15:07')", "t_dt_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGtDateWithOtherTypes() { - return Arrays.asList($$( - $("DATE('2020-09-16') > TIMESTAMP('1961-04-12 09:07:00')", "d_ts_t", true), - $("TIMESTAMP('2077-04-12 09:07:00') > DATE('2020-09-16')", "ts_d_t", true), - $("DATE('2020-09-16') > TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), - $("TIMESTAMP('1961-04-12 09:07:00') > DATE('1984-12-15')", "ts_d_f", false), - $("DATE('1984-12-15') > DATETIME('1961-04-12 09:07:00')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') > DATE('1984-03-22')", "dt_d_t", true), - $("DATE('2020-09-16') > DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), - $("DATETIME('1961-04-12 10:20:30') > DATE('1984-11-15')", "dt_d_f", false), - $("DATE('3077-04-12') > TIME('00:00:00')", "d_t_t", true), - $("TIME('00:00:00') > DATE('2020-09-16')", "t_d_t", true), - $("DATE('2020-09-16') > TIME('09:07:00')", "d_t_f", false), - $("TIME('09:07:00') > DATE('3077-04-12')", "t_d_f", false) - )); + return Arrays.asList( + $$( + $("DATE('2020-09-16') > TIMESTAMP('1961-04-12 09:07:00')", "d_ts_t", true), + $("TIMESTAMP('2077-04-12 09:07:00') > DATE('2020-09-16')", "ts_d_t", true), + $("DATE('2020-09-16') > TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), + $("TIMESTAMP('1961-04-12 09:07:00') > DATE('1984-12-15')", "ts_d_f", false), + $("DATE('1984-12-15') > DATETIME('1961-04-12 09:07:00')", "d_dt_t", true), + $("DATETIME('2020-09-16 00:00:00') > DATE('1984-03-22')", "dt_d_t", true), + $("DATE('2020-09-16') > DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), + $("DATETIME('1961-04-12 10:20:30') > DATE('1984-11-15')", "dt_d_f", false), + $("DATE('3077-04-12') > TIME('00:00:00')", "d_t_t", true), + $("TIME('00:00:00') > DATE('2020-09-16')", "t_d_t", true), + $("DATE('2020-09-16') > TIME('09:07:00')", "d_t_f", false), + $("TIME('09:07:00') > DATE('3077-04-12')", "t_d_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGtTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIME('09:07:00') > DATETIME('1961-04-12 09:07:00')", "t_dt_t", true), - $("DATETIME('" + today + " 20:40:50') > TIME('10:20:30')", "dt_t_t", true), - $("TIME('10:20:30') > DATETIME('" + today + " 10:20:30')", "t_dt_f", false), - $("DATETIME('" + today + " 09:07:00') > TIME('10:20:30')", "dt_t_f", false), - $("TIME('22:15:07') > TIMESTAMP('1984-12-15 22:15:07')", "t_ts_t", true), - $("TIMESTAMP('" + today + " 20:50:42') > TIME('10:20:30')", "ts_t_t", true), - $("TIME('10:20:30') > TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false), - $("TIMESTAMP('1984-12-15 10:20:30') > TIME('10:20:30')", "ts_t_f", false), - $("TIME('00:00:00') > DATE('1961-04-12')", "t_d_t", true), - $("DATE('3077-04-12') > TIME('10:20:30')", "d_t_t", true), - $("TIME('09:07:00') > DATE('3077-04-12')", "t_d_f", false), - $("DATE('2020-09-16') > TIME('09:07:00')", "d_t_f", false) - )); + return Arrays.asList( + $$( + $("TIME('09:07:00') > DATETIME('1961-04-12 09:07:00')", "t_dt_t", true), + $("DATETIME('" + today + " 20:40:50') > TIME('10:20:30')", "dt_t_t", true), + $("TIME('10:20:30') > DATETIME('" + today + " 10:20:30')", "t_dt_f", false), + $("DATETIME('" + today + " 09:07:00') > TIME('10:20:30')", "dt_t_f", false), + $("TIME('22:15:07') > TIMESTAMP('1984-12-15 22:15:07')", "t_ts_t", true), + $("TIMESTAMP('" + today + " 20:50:42') > TIME('10:20:30')", "ts_t_t", true), + $("TIME('10:20:30') > TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false), + $("TIMESTAMP('1984-12-15 10:20:30') > TIME('10:20:30')", "ts_t_f", false), + $("TIME('00:00:00') > DATE('1961-04-12')", "t_d_t", true), + $("DATE('3077-04-12') > TIME('10:20:30')", "d_t_t", true), + $("TIME('09:07:00') > DATE('3077-04-12')", "t_d_f", false), + $("DATE('2020-09-16') > TIME('09:07:00')", "d_t_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLteTimestampWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIMESTAMP('2020-09-16 10:20:30') <= DATETIME('2020-09-16 10:20:30')", "ts_dt_t", true), - $("DATETIME('1961-04-12 09:07:00') <= TIMESTAMP('1984-12-15 22:15:07')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') <= DATETIME('1961-04-12 09:07:00')", "ts_dt_f", false), - $("DATETIME('2020-09-16 10:20:30') <= TIMESTAMP('1961-04-12 09:07:00')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') <= DATE('2077-04-12')", "ts_d_t", true), - $("DATE('2020-09-16') <= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') <= DATE('1961-04-12')", "ts_d_f", false), - $("DATE('2077-04-12') <= TIMESTAMP('1984-12-15 22:15:07')", "d_ts_f", false), - $("TIMESTAMP('" + today + " 10:20:30') <= TIME('10:20:30')", "ts_t_t", true), - $("TIME('09:07:00') <= TIMESTAMP('3077-12-15 22:15:07')", "t_ts_t", true), - $("TIMESTAMP('3077-09-16 10:20:30') <= TIME('09:07:00')", "ts_t_f", false), - $("TIME('20:50:40') <= TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false) - )); + return Arrays.asList( + $$( + $( + "TIMESTAMP('2020-09-16 10:20:30') <= DATETIME('2020-09-16 10:20:30')", + "ts_dt_t", + true), + $( + "DATETIME('1961-04-12 09:07:00') <= TIMESTAMP('1984-12-15 22:15:07')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') <= DATETIME('1961-04-12 09:07:00')", + "ts_dt_f", + false), + $( + "DATETIME('2020-09-16 10:20:30') <= TIMESTAMP('1961-04-12 09:07:00')", + "dt_ts_f", + false), + $("TIMESTAMP('2020-09-16 10:20:30') <= DATE('2077-04-12')", "ts_d_t", true), + $("DATE('2020-09-16') <= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), + $("TIMESTAMP('2020-09-16 10:20:30') <= DATE('1961-04-12')", "ts_d_f", false), + $("DATE('2077-04-12') <= TIMESTAMP('1984-12-15 22:15:07')", "d_ts_f", false), + $("TIMESTAMP('" + today + " 10:20:30') <= TIME('10:20:30')", "ts_t_t", true), + $("TIME('09:07:00') <= TIMESTAMP('3077-12-15 22:15:07')", "t_ts_t", true), + $("TIMESTAMP('3077-09-16 10:20:30') <= TIME('09:07:00')", "ts_t_f", false), + $("TIME('20:50:40') <= TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLteDateTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("DATETIME('2020-09-16 10:20:30') <= TIMESTAMP('2020-09-16 10:20:30')", "dt_ts_t", true), - $("TIMESTAMP('1961-04-12 09:07:00') <= DATETIME('1984-12-15 22:15:07')", "ts_dt_t", true), - $("DATETIME('3077-09-16 10:20:30') <= TIMESTAMP('2077-04-12 09:07:00')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') <= DATETIME('1984-12-15 22:15:07')", "ts_dt_f", false), - $("DATETIME('2020-09-16 00:00:00') <= DATE('2020-09-16')", "dt_d_t", true), - $("DATE('1961-04-12') <= DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') <= DATE('1984-04-12')", "dt_d_f", false), - $("DATE('2020-09-16') <= DATETIME('1961-04-12 09:07:00')", "d_dt_f", false), - $("DATETIME('" + today + " 10:20:30') <= TIME('10:20:30')", "dt_t_t", true), - $("TIME('09:07:00') <= DATETIME('3077-12-15 22:15:07')", "t_dt_t", true), - $("DATETIME('3077-09-16 10:20:30') <= TIME('19:07:00')", "dt_t_f", false), - $("TIME('20:40:50') <= DATETIME('" + today + " 10:20:30')", "t_dt_f", false) - )); + return Arrays.asList( + $$( + $( + "DATETIME('2020-09-16 10:20:30') <= TIMESTAMP('2020-09-16 10:20:30')", + "dt_ts_t", + true), + $( + "TIMESTAMP('1961-04-12 09:07:00') <= DATETIME('1984-12-15 22:15:07')", + "ts_dt_t", + true), + $( + "DATETIME('3077-09-16 10:20:30') <= TIMESTAMP('2077-04-12 09:07:00')", + "dt_ts_f", + false), + $( + "TIMESTAMP('2020-09-16 10:20:30') <= DATETIME('1984-12-15 22:15:07')", + "ts_dt_f", + false), + $("DATETIME('2020-09-16 00:00:00') <= DATE('2020-09-16')", "dt_d_t", true), + $("DATE('1961-04-12') <= DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), + $("DATETIME('2020-09-16 10:20:30') <= DATE('1984-04-12')", "dt_d_f", false), + $("DATE('2020-09-16') <= DATETIME('1961-04-12 09:07:00')", "d_dt_f", false), + $("DATETIME('" + today + " 10:20:30') <= TIME('10:20:30')", "dt_t_t", true), + $("TIME('09:07:00') <= DATETIME('3077-12-15 22:15:07')", "t_dt_t", true), + $("DATETIME('3077-09-16 10:20:30') <= TIME('19:07:00')", "dt_t_f", false), + $("TIME('20:40:50') <= DATETIME('" + today + " 10:20:30')", "t_dt_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLteDateWithOtherTypes() { - return Arrays.asList($$( - $("DATE('2020-09-16') <= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), - $("TIMESTAMP('1961-04-12 09:07:00') <= DATE('1984-12-15')", "ts_d_t", true), - $("DATE('2020-09-16') <= TIMESTAMP('1961-04-12 09:07:00')", "d_ts_f", false), - $("TIMESTAMP('2077-04-12 09:07:00') <= DATE('2020-09-16')", "ts_d_f", false), - $("DATE('2020-09-16') <= DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), - $("DATETIME('1961-04-12 10:20:30') <= DATE('1984-11-15')", "dt_d_t", true), - $("DATE('2077-04-12') <= DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), - $("DATETIME('2020-09-16 00:00:00') <= DATE('1984-03-22')", "dt_d_f", false), - $("DATE('2020-09-16') <= TIME('09:07:00')", "d_t_t", true), - $("TIME('09:07:00') <= DATE('3077-04-12')", "t_d_t", true), - $("DATE('3077-04-12') <= TIME('00:00:00')", "d_t_f", false), - $("TIME('00:00:00') <= DATE('2020-09-16')", "t_d_f", false) - )); + return Arrays.asList( + $$( + $("DATE('2020-09-16') <= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), + $("TIMESTAMP('1961-04-12 09:07:00') <= DATE('1984-12-15')", "ts_d_t", true), + $("DATE('2020-09-16') <= TIMESTAMP('1961-04-12 09:07:00')", "d_ts_f", false), + $("TIMESTAMP('2077-04-12 09:07:00') <= DATE('2020-09-16')", "ts_d_f", false), + $("DATE('2020-09-16') <= DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), + $("DATETIME('1961-04-12 10:20:30') <= DATE('1984-11-15')", "dt_d_t", true), + $("DATE('2077-04-12') <= DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), + $("DATETIME('2020-09-16 00:00:00') <= DATE('1984-03-22')", "dt_d_f", false), + $("DATE('2020-09-16') <= TIME('09:07:00')", "d_t_t", true), + $("TIME('09:07:00') <= DATE('3077-04-12')", "t_d_t", true), + $("DATE('3077-04-12') <= TIME('00:00:00')", "d_t_f", false), + $("TIME('00:00:00') <= DATE('2020-09-16')", "t_d_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLteTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIME('10:20:30') <= DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('" + today + " 09:07:00') <= TIME('10:20:30')", "dt_t_t", true), - $("TIME('09:07:00') <= DATETIME('1961-04-12 09:07:00')", "t_dt_f", false), - $("DATETIME('" + today + " 20:40:50') <= TIME('10:20:30')", "dt_t_f", false), - $("TIME('10:20:30') <= TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), - $("TIMESTAMP('1984-12-15 10:20:30') <= TIME('10:20:30')", "ts_t_t", true), - $("TIME('22:15:07') <= TIMESTAMP('1984-12-15 22:15:07')", "t_ts_f", false), - $("TIMESTAMP('" + today + " 20:50:42') <= TIME('10:20:30')", "ts_t_f", false), - $("TIME('09:07:00') <= DATE('3077-04-12')", "t_d_t", true), - $("DATE('2020-09-16') <= TIME('09:07:00')", "d_t_t", true), - $("TIME('00:00:00') <= DATE('1961-04-12')", "t_d_f", false), - $("DATE('3077-04-12') <= TIME('10:20:30')", "d_t_f", false) - )); + return Arrays.asList( + $$( + $("TIME('10:20:30') <= DATETIME('" + today + " 10:20:30')", "t_dt_t", true), + $("DATETIME('" + today + " 09:07:00') <= TIME('10:20:30')", "dt_t_t", true), + $("TIME('09:07:00') <= DATETIME('1961-04-12 09:07:00')", "t_dt_f", false), + $("DATETIME('" + today + " 20:40:50') <= TIME('10:20:30')", "dt_t_f", false), + $("TIME('10:20:30') <= TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), + $("TIMESTAMP('1984-12-15 10:20:30') <= TIME('10:20:30')", "ts_t_t", true), + $("TIME('22:15:07') <= TIMESTAMP('1984-12-15 22:15:07')", "t_ts_f", false), + $("TIMESTAMP('" + today + " 20:50:42') <= TIME('10:20:30')", "ts_t_f", false), + $("TIME('09:07:00') <= DATE('3077-04-12')", "t_d_t", true), + $("DATE('2020-09-16') <= TIME('09:07:00')", "d_t_t", true), + $("TIME('00:00:00') <= DATE('1961-04-12')", "t_d_f", false), + $("DATE('3077-04-12') <= TIME('10:20:30')", "d_t_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGteTimestampWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIMESTAMP('2020-09-16 10:20:30') >= DATETIME('2020-09-16 10:20:30')", "ts_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') >= TIMESTAMP('1961-04-12 09:07:00')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') >= DATETIME('2061-04-12 09:07:00')", "ts_dt_f", false), - $("DATETIME('1961-04-12 09:07:00') >= TIMESTAMP('1984-12-15 09:07:00')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') >= DATE('1961-04-12')", "ts_d_t", true), - $("DATE('2020-09-16') >= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') >= DATE('2077-04-12')", "ts_d_f", false), - $("DATE('1961-04-11') >= TIMESTAMP('1961-04-12 00:00:00')", "d_ts_f", false), - $("TIMESTAMP('" + today + " 10:20:30') >= TIME('10:20:30')", "ts_t_t", true), - $("TIME('20:50:40') >= TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), - $("TIMESTAMP('1977-07-08 10:20:30') >= TIME('10:20:30')", "ts_t_f", false), - $("TIME('09:07:00') >= TIMESTAMP('3077-12-15 22:15:07')", "t_ts_f", false) - )); + return Arrays.asList( + $$( + $( + "TIMESTAMP('2020-09-16 10:20:30') >= DATETIME('2020-09-16 10:20:30')", + "ts_dt_t", + true), + $( + "DATETIME('2020-09-16 10:20:30') >= TIMESTAMP('1961-04-12 09:07:00')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') >= DATETIME('2061-04-12 09:07:00')", + "ts_dt_f", + false), + $( + "DATETIME('1961-04-12 09:07:00') >= TIMESTAMP('1984-12-15 09:07:00')", + "dt_ts_f", + false), + $("TIMESTAMP('2020-09-16 10:20:30') >= DATE('1961-04-12')", "ts_d_t", true), + $("DATE('2020-09-16') >= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), + $("TIMESTAMP('2020-09-16 10:20:30') >= DATE('2077-04-12')", "ts_d_f", false), + $("DATE('1961-04-11') >= TIMESTAMP('1961-04-12 00:00:00')", "d_ts_f", false), + $("TIMESTAMP('" + today + " 10:20:30') >= TIME('10:20:30')", "ts_t_t", true), + $("TIME('20:50:40') >= TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), + $("TIMESTAMP('1977-07-08 10:20:30') >= TIME('10:20:30')", "ts_t_f", false), + $("TIME('09:07:00') >= TIMESTAMP('3077-12-15 22:15:07')", "t_ts_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGteDateTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("DATETIME('2020-09-16 10:20:30') >= TIMESTAMP('2020-09-16 10:20:30')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') >= DATETIME('1984-12-15 22:15:07')", "ts_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') >= TIMESTAMP('2077-04-12 09:07:00')", "dt_ts_f", false), - $("TIMESTAMP('1961-04-12 00:00:00') >= DATETIME('1961-04-12 09:07:00')", "ts_dt_f", false), - $("DATETIME('2020-09-16 00:00:00') >= DATE('2020-09-16')", "dt_d_t", true), - $("DATE('2020-09-16') >= DATETIME('1961-04-12 09:07:00')", "d_dt_t", true), - $("DATETIME('1961-04-12 09:07:00') >= DATE('2020-09-16')", "dt_d_f", false), - $("DATE('1961-04-12') >= DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), - $("DATETIME('" + today + " 10:20:30') >= TIME('10:20:30')", "dt_t_t", true), - $("TIME('20:40:50') >= DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('1961-04-12 09:07:00') >= TIME('09:07:00')", "dt_t_f", false), - $("TIME('09:07:00') >= DATETIME('3077-12-15 22:15:07')", "t_dt_f", false) - )); + return Arrays.asList( + $$( + $( + "DATETIME('2020-09-16 10:20:30') >= TIMESTAMP('2020-09-16 10:20:30')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') >= DATETIME('1984-12-15 22:15:07')", + "ts_dt_t", + true), + $( + "DATETIME('2020-09-16 10:20:30') >= TIMESTAMP('2077-04-12 09:07:00')", + "dt_ts_f", + false), + $( + "TIMESTAMP('1961-04-12 00:00:00') >= DATETIME('1961-04-12 09:07:00')", + "ts_dt_f", + false), + $("DATETIME('2020-09-16 00:00:00') >= DATE('2020-09-16')", "dt_d_t", true), + $("DATE('2020-09-16') >= DATETIME('1961-04-12 09:07:00')", "d_dt_t", true), + $("DATETIME('1961-04-12 09:07:00') >= DATE('2020-09-16')", "dt_d_f", false), + $("DATE('1961-04-12') >= DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), + $("DATETIME('" + today + " 10:20:30') >= TIME('10:20:30')", "dt_t_t", true), + $("TIME('20:40:50') >= DATETIME('" + today + " 10:20:30')", "t_dt_t", true), + $("DATETIME('1961-04-12 09:07:00') >= TIME('09:07:00')", "dt_t_f", false), + $("TIME('09:07:00') >= DATETIME('3077-12-15 22:15:07')", "t_dt_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGteDateWithOtherTypes() { - return Arrays.asList($$( - $("DATE('2020-09-16') >= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), - $("TIMESTAMP('2077-04-12 09:07:00') >= DATE('2020-09-16')", "ts_d_t", true), - $("DATE('1961-04-12') >= TIMESTAMP('1961-04-12 09:07:00')", "d_ts_f", false), - $("TIMESTAMP('1961-04-12 09:07:00') >= DATE('1984-12-15')", "ts_d_f", false), - $("DATE('2020-09-16') >= DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') >= DATE('1984-03-22')", "dt_d_t", true), - $("DATE('1960-12-15') >= DATETIME('1961-04-12 09:07:00')", "d_dt_f", false), - $("DATETIME('1961-04-12 10:20:30') >= DATE('1984-11-15')", "dt_d_f", false), - $("DATE('3077-04-12') >= TIME('00:00:00')", "d_t_t", true), - $("TIME('00:00:00') >= DATE('2020-09-16')", "t_d_t", true), - $("DATE('2020-09-16') >= TIME('09:07:00')", "d_t_f", false), - $("TIME('09:07:00') >= DATE('3077-04-12')", "t_d_f", false) - )); + return Arrays.asList( + $$( + $("DATE('2020-09-16') >= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), + $("TIMESTAMP('2077-04-12 09:07:00') >= DATE('2020-09-16')", "ts_d_t", true), + $("DATE('1961-04-12') >= TIMESTAMP('1961-04-12 09:07:00')", "d_ts_f", false), + $("TIMESTAMP('1961-04-12 09:07:00') >= DATE('1984-12-15')", "ts_d_f", false), + $("DATE('2020-09-16') >= DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), + $("DATETIME('2020-09-16 00:00:00') >= DATE('1984-03-22')", "dt_d_t", true), + $("DATE('1960-12-15') >= DATETIME('1961-04-12 09:07:00')", "d_dt_f", false), + $("DATETIME('1961-04-12 10:20:30') >= DATE('1984-11-15')", "dt_d_f", false), + $("DATE('3077-04-12') >= TIME('00:00:00')", "d_t_t", true), + $("TIME('00:00:00') >= DATE('2020-09-16')", "t_d_t", true), + $("DATE('2020-09-16') >= TIME('09:07:00')", "d_t_f", false), + $("TIME('09:07:00') >= DATE('3077-04-12')", "t_d_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGteTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIME('10:20:30') >= DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('" + today + " 20:40:50') >= TIME('10:20:30')", "dt_t_t", true), - $("TIME('09:07:00') >= DATETIME('3077-04-12 09:07:00')", "t_dt_f", false), - $("DATETIME('" + today + " 09:07:00') >= TIME('10:20:30')", "dt_t_f", false), - $("TIME('10:20:30') >= TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), - $("TIMESTAMP('" + today + " 20:50:42') >= TIME('10:20:30')", "ts_t_t", true), - $("TIME('22:15:07') >= TIMESTAMP('3077-12-15 22:15:07')", "t_ts_f", false), - $("TIMESTAMP('1984-12-15 10:20:30') >= TIME('10:20:30')", "ts_t_f", false), - $("TIME('00:00:00') >= DATE('1961-04-12')", "t_d_t", true), - $("DATE('3077-04-12') >= TIME('10:20:30')", "d_t_t", true), - $("TIME('09:07:00') >= DATE('3077-04-12')", "t_d_f", false), - $("DATE('2020-09-16') >= TIME('09:07:00')", "d_t_f", false) - )); + return Arrays.asList( + $$( + $("TIME('10:20:30') >= DATETIME('" + today + " 10:20:30')", "t_dt_t", true), + $("DATETIME('" + today + " 20:40:50') >= TIME('10:20:30')", "dt_t_t", true), + $("TIME('09:07:00') >= DATETIME('3077-04-12 09:07:00')", "t_dt_f", false), + $("DATETIME('" + today + " 09:07:00') >= TIME('10:20:30')", "dt_t_f", false), + $("TIME('10:20:30') >= TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), + $("TIMESTAMP('" + today + " 20:50:42') >= TIME('10:20:30')", "ts_t_t", true), + $("TIME('22:15:07') >= TIMESTAMP('3077-12-15 22:15:07')", "t_ts_f", false), + $("TIMESTAMP('1984-12-15 10:20:30') >= TIME('10:20:30')", "ts_t_f", false), + $("TIME('00:00:00') >= DATE('1961-04-12')", "t_d_t", true), + $("DATE('3077-04-12') >= TIME('10:20:30')", "d_t_t", true), + $("TIME('09:07:00') >= DATE('3077-04-12')", "t_d_f", false), + $("DATE('2020-09-16') >= TIME('09:07:00')", "d_t_f", false))); } @Test diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeFormatsIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeFormatsIT.java index fc05e502c5..d6f2d2c7f4 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeFormatsIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeFormatsIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_DATE_FORMATS; @@ -34,57 +33,72 @@ public void init() throws Exception { @Test public void testReadingDateFormats() throws IOException { - String query = String.format("SELECT weekyear_week_day, hour_minute_second_millis," + - " strict_ordinal_date_time FROM %s LIMIT 1", TEST_INDEX_DATE_FORMATS); + String query = + String.format( + "SELECT weekyear_week_day, hour_minute_second_millis," + + " strict_ordinal_date_time FROM %s LIMIT 1", + TEST_INDEX_DATE_FORMATS); JSONObject result = executeQuery(query); - verifySchema(result, + verifySchema( + result, schema("weekyear_week_day", null, "date"), schema("hour_minute_second_millis", null, "time"), schema("strict_ordinal_date_time", null, "timestamp")); - verifyDataRows(result, - rows("1984-04-12", - "09:07:42", - "1984-04-12 09:07:42.000123456" - )); + verifyDataRows(result, rows("1984-04-12", "09:07:42", "1984-04-12 09:07:42.000123456")); } @Test public void testDateFormatsWithOr() throws IOException { - String query = String.format("SELECT yyyy-MM-dd_OR_epoch_millis FROM %s", TEST_INDEX_DATE_FORMATS); + String query = + String.format("SELECT yyyy-MM-dd_OR_epoch_millis FROM %s", TEST_INDEX_DATE_FORMATS); JSONObject result = executeQuery(query); - verifyDataRows(result, - rows("1984-04-12 00:00:00"), - rows("1984-04-12 09:07:42.000123456")); + verifyDataRows(result, rows("1984-04-12 00:00:00"), rows("1984-04-12 09:07:42.000123456")); } @Test @SneakyThrows public void testCustomFormats() { - String query = String.format("SELECT custom_time, custom_timestamp, custom_date_or_date," - + "custom_date_or_custom_time, custom_time_parser_check FROM %s", TEST_INDEX_DATE_FORMATS); + String query = + String.format( + "SELECT custom_time, custom_timestamp, custom_date_or_date," + + "custom_date_or_custom_time, custom_time_parser_check FROM %s", + TEST_INDEX_DATE_FORMATS); JSONObject result = executeQuery(query); - verifySchema(result, + verifySchema( + result, schema("custom_time", null, "time"), schema("custom_timestamp", null, "timestamp"), schema("custom_date_or_date", null, "date"), schema("custom_date_or_custom_time", null, "timestamp"), schema("custom_time_parser_check", null, "time")); - verifyDataRows(result, - rows("09:07:42", "1984-04-12 09:07:42", "1984-04-12", "1961-04-12 00:00:00", "23:44:36.321"), - rows("21:07:42", "1984-04-12 22:07:42", "1984-04-12", "1970-01-01 09:07:00", "09:01:16.542")); + verifyDataRows( + result, + rows( + "09:07:42", "1984-04-12 09:07:42", "1984-04-12", "1961-04-12 00:00:00", "23:44:36.321"), + rows( + "21:07:42", + "1984-04-12 22:07:42", + "1984-04-12", + "1970-01-01 09:07:00", + "09:01:16.542")); } @Test @SneakyThrows public void testCustomFormats2() { - String query = String.format("SELECT custom_no_delimiter_date, custom_no_delimiter_time," - + "custom_no_delimiter_ts FROM %s", TEST_INDEX_DATE_FORMATS); + String query = + String.format( + "SELECT custom_no_delimiter_date, custom_no_delimiter_time," + + "custom_no_delimiter_ts FROM %s", + TEST_INDEX_DATE_FORMATS); JSONObject result = executeQuery(query); - verifySchema(result, + verifySchema( + result, schema("custom_no_delimiter_date", null, "date"), schema("custom_no_delimiter_time", null, "time"), schema("custom_no_delimiter_ts", null, "timestamp")); - verifyDataRows(result, + verifyDataRows( + result, rows("1984-10-20", "10:20:30", "1984-10-20 15:35:48"), rows("1961-04-12", "09:07:00", "1961-04-12 09:07:00")); } @@ -92,16 +106,21 @@ public void testCustomFormats2() { @Test @SneakyThrows public void testIncompleteFormats() { - String query = String.format("SELECT incomplete_1, incomplete_2, incorrect," - + "incomplete_custom_time, incomplete_custom_date FROM %s", TEST_INDEX_DATE_FORMATS); + String query = + String.format( + "SELECT incomplete_1, incomplete_2, incorrect," + + "incomplete_custom_time, incomplete_custom_date FROM %s", + TEST_INDEX_DATE_FORMATS); JSONObject result = executeQuery(query); - verifySchema(result, + verifySchema( + result, schema("incomplete_1", null, "timestamp"), schema("incomplete_2", null, "date"), schema("incorrect", null, "timestamp"), schema("incomplete_custom_time", null, "time"), schema("incomplete_custom_date", null, "date")); - verifyDataRows(result, + verifyDataRows( + result, rows("1984-01-01 00:00:00", null, null, "10:00:00", "1999-01-01"), rows("2012-01-01 00:00:00", null, null, "20:00:00", "3021-01-01")); } @@ -109,13 +128,13 @@ public void testIncompleteFormats() { @Test @SneakyThrows public void testNumericFormats() { - String query = String.format("SELECT epoch_sec, epoch_milli" - + " FROM %s", TEST_INDEX_DATE_FORMATS); + String query = + String.format("SELECT epoch_sec, epoch_milli" + " FROM %s", TEST_INDEX_DATE_FORMATS); JSONObject result = executeQuery(query); - verifySchema(result, - schema("epoch_sec", null, "timestamp"), - schema("epoch_milli", null, "timestamp")); - verifyDataRows(result, + verifySchema( + result, schema("epoch_sec", null, "timestamp"), schema("epoch_milli", null, "timestamp")); + verifyDataRows( + result, rows("1970-01-01 00:00:42", "1970-01-01 00:00:00.042"), rows("1970-01-02 03:55:00", "1970-01-01 00:01:40.5")); } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeFunctionIT.java index ab5aa46853..33eb8b693f 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeFunctionIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK; @@ -60,12 +59,14 @@ public void resetTimeZone() { } @Test - public void testDateInGroupBy() throws IOException{ + public void testDateInGroupBy() throws IOException { JSONObject result = - executeQuery(String.format("SELECT DATE(birthdate) FROM %s GROUP BY DATE(birthdate)",TEST_INDEX_BANK) ); - verifySchema(result, - schema("DATE(birthdate)", null, "date")); - verifyDataRows(result, + executeQuery( + String.format( + "SELECT DATE(birthdate) FROM %s GROUP BY DATE(birthdate)", TEST_INDEX_BANK)); + verifySchema(result, schema("DATE(birthdate)", null, "date")); + verifyDataRows( + result, rows("2017-10-23"), rows("2017-11-20"), rows("2018-06-23"), @@ -78,9 +79,11 @@ public void testDateInGroupBy() throws IOException{ @Test public void testDateWithHavingClauseOnly() throws IOException { JSONObject result = - executeQuery(String.format("SELECT (TO_DAYS(DATE('2050-01-01')) - 693961) FROM %s HAVING (COUNT(1) > 0)",TEST_INDEX_BANK) ); - verifySchema(result, - schema("(TO_DAYS(DATE('2050-01-01')) - 693961)", null, "long")); + executeQuery( + String.format( + "SELECT (TO_DAYS(DATE('2050-01-01')) - 693961) FROM %s HAVING (COUNT(1) > 0)", + TEST_INDEX_BANK)); + verifySchema(result, schema("(TO_DAYS(DATE('2050-01-01')) - 693961)", null, "long")); verifyDataRows(result, rows(54787)); } @@ -107,83 +110,98 @@ public void testAddDateWithDays() throws IOException { public void testAddDateWithInterval() throws IOException { JSONObject result = executeQuery("select adddate(timestamp('2020-09-16 17:30:00'), interval 1 day)"); - verifySchema(result, + verifySchema( + result, schema("adddate(timestamp('2020-09-16 17:30:00'), interval 1 day)", null, "datetime")); verifyDataRows(result, rows("2020-09-17 17:30:00")); result = executeQuery("select adddate(DATETIME('2020-09-16 17:30:00'), interval 1 day)"); - verifySchema(result, + verifySchema( + result, schema("adddate(DATETIME('2020-09-16 17:30:00'), interval 1 day)", null, "datetime")); verifyDataRows(result, rows("2020-09-17 17:30:00")); result = executeQuery("select adddate(date('2020-09-16'), interval 1 day)"); - verifySchema(result, - schema("adddate(date('2020-09-16'), interval 1 day)", null, "datetime")); + verifySchema(result, schema("adddate(date('2020-09-16'), interval 1 day)", null, "datetime")); verifyDataRows(result, rows("2020-09-17 00:00:00")); result = executeQuery("select adddate(date('2020-09-16'), interval 1 hour)"); - verifySchema(result, - schema("adddate(date('2020-09-16'), interval 1 hour)", null, "datetime")); + verifySchema(result, schema("adddate(date('2020-09-16'), interval 1 hour)", null, "datetime")); verifyDataRows(result, rows("2020-09-16 01:00:00")); result = executeQuery("select adddate(TIME('07:40:00'), interval 1 day)"); - verifySchema(result, - schema("adddate(TIME('07:40:00'), interval 1 day)", null, "datetime")); - verifyDataRows(result, - rows(LocalDate.now().plusDays(1).atTime(LocalTime.of(7, 40)).atZone(systemTz.toZoneId()) - .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); + verifySchema(result, schema("adddate(TIME('07:40:00'), interval 1 day)", null, "datetime")); + verifyDataRows( + result, + rows( + LocalDate.now() + .plusDays(1) + .atTime(LocalTime.of(7, 40)) + .atZone(systemTz.toZoneId()) + .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); result = executeQuery("select adddate(TIME('07:40:00'), interval 1 hour)"); - verifySchema(result, - schema("adddate(TIME('07:40:00'), interval 1 hour)", null, "datetime")); - verifyDataRows(result, - rows(LocalDate.now().atTime(LocalTime.of(8, 40)).atZone(systemTz.toZoneId()) - .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); + verifySchema(result, schema("adddate(TIME('07:40:00'), interval 1 hour)", null, "datetime")); + verifyDataRows( + result, + rows( + LocalDate.now() + .atTime(LocalTime.of(8, 40)) + .atZone(systemTz.toZoneId()) + .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); } @Test public void testDateAdd() throws IOException { JSONObject result = executeQuery("select date_add(timestamp('2020-09-16 17:30:00'), interval 1 day)"); - verifySchema(result, + verifySchema( + result, schema("date_add(timestamp('2020-09-16 17:30:00'), interval 1 day)", null, "datetime")); verifyDataRows(result, rows("2020-09-17 17:30:00")); result = executeQuery("select date_add(DATETIME('2020-09-16 17:30:00'), interval 1 day)"); - verifySchema(result, + verifySchema( + result, schema("date_add(DATETIME('2020-09-16 17:30:00'), interval 1 day)", null, "datetime")); verifyDataRows(result, rows("2020-09-17 17:30:00")); result = executeQuery("select date_add(date('2020-09-16'), interval 1 day)"); - verifySchema(result, - schema("date_add(date('2020-09-16'), interval 1 day)", null, "datetime")); + verifySchema(result, schema("date_add(date('2020-09-16'), interval 1 day)", null, "datetime")); verifyDataRows(result, rows("2020-09-17 00:00:00")); result = executeQuery("select date_add(date('2020-09-16'), interval 1 hour)"); - verifySchema(result, - schema("date_add(date('2020-09-16'), interval 1 hour)", null, "datetime")); + verifySchema(result, schema("date_add(date('2020-09-16'), interval 1 hour)", null, "datetime")); verifyDataRows(result, rows("2020-09-16 01:00:00")); result = executeQuery("select date_add(TIME('07:40:00'), interval 1 day)"); - verifySchema(result, - schema("date_add(TIME('07:40:00'), interval 1 day)", null, "datetime")); - verifyDataRows(result, - rows(LocalDate.now().plusDays(1).atTime(LocalTime.of(7, 40)).atZone(systemTz.toZoneId()) - .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); + verifySchema(result, schema("date_add(TIME('07:40:00'), interval 1 day)", null, "datetime")); + verifyDataRows( + result, + rows( + LocalDate.now() + .plusDays(1) + .atTime(LocalTime.of(7, 40)) + .atZone(systemTz.toZoneId()) + .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); result = executeQuery("select date_add(TIME('07:40:00'), interval 1 hour)"); - verifySchema(result, - schema("date_add(TIME('07:40:00'), interval 1 hour)", null, "datetime")); - verifyDataRows(result, - rows(LocalDate.now().atTime(LocalTime.of(8, 40)).atZone(systemTz.toZoneId()) - .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); - - result = executeQuery(String.format("SELECT DATE_ADD(birthdate, INTERVAL 1 YEAR) FROM %s", - TEST_INDEX_BANK)); - - verifySchema(result, - schema("DATE_ADD(birthdate, INTERVAL 1 YEAR)", null, "datetime")); - verifyDataRows(result, + verifySchema(result, schema("date_add(TIME('07:40:00'), interval 1 hour)", null, "datetime")); + verifyDataRows( + result, + rows( + LocalDate.now() + .atTime(LocalTime.of(8, 40)) + .atZone(systemTz.toZoneId()) + .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); + + result = + executeQuery( + String.format("SELECT DATE_ADD(birthdate, INTERVAL 1 YEAR) FROM %s", TEST_INDEX_BANK)); + + verifySchema(result, schema("DATE_ADD(birthdate, INTERVAL 1 YEAR)", null, "datetime")); + verifyDataRows( + result, rows("2018-10-23 00:00:00"), rows("2018-11-20 00:00:00"), rows("2019-06-23 00:00:00"), @@ -197,38 +215,45 @@ public void testDateAdd() throws IOException { public void testDateSub() throws IOException { JSONObject result = executeQuery("select date_sub(timestamp('2020-09-16 17:30:00'), interval 1 day)"); - verifySchema(result, + verifySchema( + result, schema("date_sub(timestamp('2020-09-16 17:30:00'), interval 1 day)", null, "datetime")); verifyDataRows(result, rows("2020-09-15 17:30:00")); result = executeQuery("select date_sub(DATETIME('2020-09-16 17:30:00'), interval 1 day)"); - verifySchema(result, + verifySchema( + result, schema("date_sub(DATETIME('2020-09-16 17:30:00'), interval 1 day)", null, "datetime")); verifyDataRows(result, rows("2020-09-15 17:30:00")); result = executeQuery("select date_sub(date('2020-09-16'), interval 1 day)"); - verifySchema(result, - schema("date_sub(date('2020-09-16'), interval 1 day)", null, "datetime")); + verifySchema(result, schema("date_sub(date('2020-09-16'), interval 1 day)", null, "datetime")); verifyDataRows(result, rows("2020-09-15 00:00:00")); result = executeQuery("select date_sub(date('2020-09-16'), interval 1 hour)"); - verifySchema(result, - schema("date_sub(date('2020-09-16'), interval 1 hour)", null, "datetime")); + verifySchema(result, schema("date_sub(date('2020-09-16'), interval 1 hour)", null, "datetime")); verifyDataRows(result, rows("2020-09-15 23:00:00")); result = executeQuery("select date_sub(TIME('07:40:00'), interval 1 day)"); - verifySchema(result, - schema("date_sub(TIME('07:40:00'), interval 1 day)", null, "datetime")); - verifyDataRows(result, - rows(LocalDate.now().plusDays(-1).atTime(LocalTime.of(7, 40)).atZone(systemTz.toZoneId()) - .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); + verifySchema(result, schema("date_sub(TIME('07:40:00'), interval 1 day)", null, "datetime")); + verifyDataRows( + result, + rows( + LocalDate.now() + .plusDays(-1) + .atTime(LocalTime.of(7, 40)) + .atZone(systemTz.toZoneId()) + .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); result = executeQuery("select date_sub(TIME('07:40:00'), interval 1 hour)"); - verifySchema(result, - schema("date_sub(TIME('07:40:00'), interval 1 hour)", null, "datetime")); - verifyDataRows(result, - rows(LocalDate.now().atTime(LocalTime.of(6, 40)).atZone(systemTz.toZoneId()) - .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); + verifySchema(result, schema("date_sub(TIME('07:40:00'), interval 1 hour)", null, "datetime")); + verifyDataRows( + result, + rows( + LocalDate.now() + .atTime(LocalTime.of(6, 40)) + .atZone(systemTz.toZoneId()) + .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); } @Test @@ -282,30 +307,34 @@ public void testDayOfMonthAliasesReturnTheSameResults() throws IOException { verifyDataRows(result1, rows(22)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT dayofmonth(date0) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT day_of_month(date0) FROM %s", TEST_INDEX_CALCS)); + result1 = executeQuery(String.format("SELECT dayofmonth(date0) FROM %s", TEST_INDEX_CALCS)); + result2 = executeQuery(String.format("SELECT day_of_month(date0) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT dayofmonth(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT day_of_month(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + result1 = + executeQuery( + String.format( + "SELECT dayofmonth(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery( + String.format( + "SELECT day_of_month(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT dayofmonth(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT day_of_month(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); + result1 = + executeQuery( + String.format("SELECT dayofmonth(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery( + String.format("SELECT day_of_month(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT dayofmonth(datetime0) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT day_of_month(datetime0) FROM %s", TEST_INDEX_CALCS)); + result1 = executeQuery(String.format("SELECT dayofmonth(datetime0) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery(String.format("SELECT day_of_month(datetime0) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); } + @Test public void testDayOfWeek() throws IOException { JSONObject result = executeQuery("select dayofweek(date('2020-09-16'))"); @@ -335,28 +364,31 @@ public void testDayOfWeekAliasesReturnTheSameResults() throws IOException { verifyDataRows(result1, rows(3)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT dayofweek(date0) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT day_of_week(date0) FROM %s", TEST_INDEX_CALCS)); + result1 = executeQuery(String.format("SELECT dayofweek(date0) FROM %s", TEST_INDEX_CALCS)); + result2 = executeQuery(String.format("SELECT day_of_week(date0) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT dayofweek(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT day_of_week(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + result1 = + executeQuery( + String.format( + "SELECT dayofweek(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery( + String.format( + "SELECT day_of_week(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT dayofweek(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT day_of_week(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); + result1 = + executeQuery( + String.format("SELECT dayofweek(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery( + String.format("SELECT day_of_week(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT dayofweek(datetime0) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT day_of_week(datetime0) FROM %s", TEST_INDEX_CALCS)); + result1 = executeQuery(String.format("SELECT dayofweek(datetime0) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery(String.format("SELECT day_of_week(datetime0) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); } @@ -397,30 +429,34 @@ public void testDayOfYearAlternateSyntaxesReturnTheSameResults() throws IOExcept verifyDataRows(result1, rows(326)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT dayofyear(date0) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT day_of_year(date0) FROM %s", TEST_INDEX_CALCS)); + result1 = executeQuery(String.format("SELECT dayofyear(date0) FROM %s", TEST_INDEX_CALCS)); + result2 = executeQuery(String.format("SELECT day_of_year(date0) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT dayofyear(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT day_of_year(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + result1 = + executeQuery( + String.format( + "SELECT dayofyear(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery( + String.format( + "SELECT day_of_year(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT dayofyear(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT day_of_year(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); + result1 = + executeQuery( + String.format("SELECT dayofyear(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery( + String.format("SELECT day_of_year(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT dayofyear(datetime0) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT day_of_year(datetime0) FROM %s", TEST_INDEX_CALCS)); + result1 = executeQuery(String.format("SELECT dayofyear(datetime0) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery(String.format("SELECT day_of_year(datetime0) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); } + @Test public void testFromDays() throws IOException { JSONObject result = executeQuery("select from_days(738049)"); @@ -450,13 +486,11 @@ public void testHour() throws IOException { @Test public void testHourOfDayWithUnderscores() throws IOException { JSONObject result = executeQuery("select hour_of_day(timestamp('2020-09-16 17:30:00'))"); - verifySchema(result, schema( - "hour_of_day(timestamp('2020-09-16 17:30:00'))", null, "integer")); + verifySchema(result, schema("hour_of_day(timestamp('2020-09-16 17:30:00'))", null, "integer")); verifyDataRows(result, rows(17)); result = executeQuery("select hour_of_day(datetime('2020-09-16 17:30:00'))"); - verifySchema(result, schema( - "hour_of_day(datetime('2020-09-16 17:30:00'))", null, "integer")); + verifySchema(result, schema("hour_of_day(datetime('2020-09-16 17:30:00'))", null, "integer")); verifyDataRows(result, rows(17)); result = executeQuery("select hour_of_day(time('17:30:00'))"); @@ -474,41 +508,45 @@ public void testHourOfDayWithUnderscores() throws IOException { @Test public void testExtractWithDatetime() throws IOException { - JSONObject datetimeResult = executeQuery( - String.format( - "SELECT extract(DAY_SECOND FROM datetime(cast(datetime0 AS STRING))) FROM %s LIMIT 1", - TEST_INDEX_CALCS)); + JSONObject datetimeResult = + executeQuery( + String.format( + "SELECT extract(DAY_SECOND FROM datetime(cast(datetime0 AS STRING))) FROM %s LIMIT" + + " 1", + TEST_INDEX_CALCS)); verifyDataRows(datetimeResult, rows(9101735)); } @Test public void testExtractWithTime() throws IOException { - JSONObject timeResult = executeQuery( - String.format( - "SELECT extract(HOUR_SECOND FROM time0) FROM %s LIMIT 1", - TEST_INDEX_CALCS)); + JSONObject timeResult = + executeQuery( + String.format( + "SELECT extract(HOUR_SECOND FROM time0) FROM %s LIMIT 1", TEST_INDEX_CALCS)); verifyDataRows(timeResult, rows(210732)); - } @Test public void testExtractWithDate() throws IOException { - JSONObject dateResult = executeQuery( - String.format( - "SELECT extract(YEAR_MONTH FROM date0) FROM %s LIMIT 1", - TEST_INDEX_CALCS)); + JSONObject dateResult = + executeQuery( + String.format( + "SELECT extract(YEAR_MONTH FROM date0) FROM %s LIMIT 1", TEST_INDEX_CALCS)); verifyDataRows(dateResult, rows(200404)); } @Test public void testExtractWithDifferentTypesReturnSameResult() throws IOException { - JSONObject dateResult = executeQuery( - String.format("SELECT extract(YEAR_MONTH FROM datetime0) FROM %s LIMIT 1", TEST_INDEX_CALCS)); + JSONObject dateResult = + executeQuery( + String.format( + "SELECT extract(YEAR_MONTH FROM datetime0) FROM %s LIMIT 1", TEST_INDEX_CALCS)); - JSONObject datetimeResult = executeQuery( - String.format( - "SELECT extract(YEAR_MONTH FROM date(datetime0)) FROM %s LIMIT 1", - TEST_INDEX_CALCS)); + JSONObject datetimeResult = + executeQuery( + String.format( + "SELECT extract(YEAR_MONTH FROM date(datetime0)) FROM %s LIMIT 1", + TEST_INDEX_CALCS)); dateResult.getJSONArray("datarows").similar(datetimeResult.getJSONArray("datarows")); } @@ -520,63 +558,55 @@ public void testHourFunctionAliasesReturnTheSameResults() throws IOException { verifyDataRows(result1, rows(11)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT hour(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT hour_of_day(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + result1 = + executeQuery( + String.format( + "SELECT hour(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery( + String.format( + "SELECT hour_of_day(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT hour(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT hour_of_day(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); + result1 = + executeQuery(String.format("SELECT hour(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery( + String.format("SELECT hour_of_day(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT hour(datetime0) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT hour_of_day(datetime0) FROM %s", TEST_INDEX_CALCS)); + result1 = executeQuery(String.format("SELECT hour(datetime0) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery(String.format("SELECT hour_of_day(datetime0) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); } @Test public void testLastDay() throws IOException { - JSONObject result = executeQuery( - String.format("SELECT last_day(date0) FROM %s LIMIT 3", - TEST_INDEX_CALCS)); - verifyDataRows(result, - rows("2004-04-30"), - rows("1972-07-31"), - rows("1975-11-30")); - - result = executeQuery( - String.format("SELECT last_day(date0) FROM %s LIMIT 3", - TEST_INDEX_CALCS)); - verifyDataRows(result, - rows("2004-04-30"), - rows("1972-07-31"), - rows("1975-11-30")); - - result = executeQuery( - String.format("SELECT last_day(date0) FROM %s LIMIT 3", - TEST_INDEX_CALCS)); - verifyDataRows(result, - rows("2004-04-30"), - rows("1972-07-31"), - rows("1975-11-30")); + JSONObject result = + executeQuery(String.format("SELECT last_day(date0) FROM %s LIMIT 3", TEST_INDEX_CALCS)); + verifyDataRows(result, rows("2004-04-30"), rows("1972-07-31"), rows("1975-11-30")); + + result = + executeQuery(String.format("SELECT last_day(date0) FROM %s LIMIT 3", TEST_INDEX_CALCS)); + verifyDataRows(result, rows("2004-04-30"), rows("1972-07-31"), rows("1975-11-30")); + + result = + executeQuery(String.format("SELECT last_day(date0) FROM %s LIMIT 3", TEST_INDEX_CALCS)); + verifyDataRows(result, rows("2004-04-30"), rows("1972-07-31"), rows("1975-11-30")); } @Test public void testMicrosecond() throws IOException { JSONObject result = executeQuery("select microsecond(timestamp('2020-09-16 17:30:00.123456'))"); - verifySchema(result, - schema("microsecond(timestamp('2020-09-16 17:30:00.123456'))", null, "integer")); + verifySchema( + result, schema("microsecond(timestamp('2020-09-16 17:30:00.123456'))", null, "integer")); verifyDataRows(result, rows(123456)); // Explicit timestamp value with less than 6 microsecond digits result = executeQuery("select microsecond(timestamp('2020-09-16 17:30:00.1234'))"); - verifySchema(result, - schema("microsecond(timestamp('2020-09-16 17:30:00.1234'))", null, "integer")); + verifySchema( + result, schema("microsecond(timestamp('2020-09-16 17:30:00.1234'))", null, "integer")); verifyDataRows(result, rows(123400)); result = executeQuery("select microsecond(time('17:30:00.000010'))"); @@ -626,11 +656,11 @@ public void testMinute() throws IOException { verifyDataRows(result, rows(30)); } - @Test public void testMinuteOfDay() throws IOException { JSONObject result = executeQuery("select minute_of_day(timestamp('2020-09-16 17:30:00'))"); - verifySchema(result, schema("minute_of_day(timestamp('2020-09-16 17:30:00'))", null, "integer")); + verifySchema( + result, schema("minute_of_day(timestamp('2020-09-16 17:30:00'))", null, "integer")); verifyDataRows(result, rows(1050)); result = executeQuery("select minute_of_day(datetime('2020-09-16 17:30:00'))"); @@ -653,8 +683,8 @@ public void testMinuteOfDay() throws IOException { @Test public void testMinuteOfHour() throws IOException { JSONObject result = executeQuery("select minute_of_hour(timestamp('2020-09-16 17:30:00'))"); - verifySchema(result, schema( - "minute_of_hour(timestamp('2020-09-16 17:30:00'))", null, "integer")); + verifySchema( + result, schema("minute_of_hour(timestamp('2020-09-16 17:30:00'))", null, "integer")); verifyDataRows(result, rows(30)); result = executeQuery("select minute_of_hour(time('17:30:00'))"); @@ -677,22 +707,29 @@ public void testMinuteFunctionAliasesReturnTheSameResults() throws IOException { verifyDataRows(result1, rows(30)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT minute(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT minute_of_hour(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + result1 = + executeQuery( + String.format( + "SELECT minute(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery( + String.format( + "SELECT minute_of_hour(datetime(CAST(time0 AS STRING))) FROM %s", + TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT minute(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT minute_of_hour(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); + result1 = + executeQuery( + String.format("SELECT minute(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery( + String.format( + "SELECT minute_of_hour(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT minute(datetime0) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT minute_of_hour(datetime0) FROM %s", TEST_INDEX_CALCS)); + result1 = executeQuery(String.format("SELECT minute(datetime0) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery(String.format("SELECT minute_of_hour(datetime0) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); } @@ -718,7 +755,8 @@ public void testMonthOfYearTypes() throws IOException { verifyDataRows(result, rows(9)); result = executeQuery("select month_of_year(timestamp('2020-09-16 00:00:00'))"); - verifySchema(result, schema("month_of_year(timestamp('2020-09-16 00:00:00'))", null, "integer")); + verifySchema( + result, schema("month_of_year(timestamp('2020-09-16 00:00:00'))", null, "integer")); verifyDataRows(result, rows(9)); result = executeQuery("select month_of_year('2020-09-16')"); @@ -733,28 +771,31 @@ public void testMonthAlternateSyntaxesReturnTheSameResults() throws IOException verifyDataRows(result1, rows(11)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT month(date0) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT month_of_year(date0) FROM %s", TEST_INDEX_CALCS)); + result1 = executeQuery(String.format("SELECT month(date0) FROM %s", TEST_INDEX_CALCS)); + result2 = executeQuery(String.format("SELECT month_of_year(date0) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT month(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT month_of_year(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + result1 = + executeQuery( + String.format( + "SELECT month(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery( + String.format( + "SELECT month_of_year(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT month(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT month_of_year(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); + result1 = + executeQuery( + String.format("SELECT month(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery( + String.format("SELECT month_of_year(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT month(datetime0) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT month_of_year(datetime0) FROM %s", TEST_INDEX_CALCS)); + result1 = executeQuery(String.format("SELECT month(datetime0) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery(String.format("SELECT month_of_year(datetime0) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); } @@ -782,12 +823,9 @@ public void testQuarter() throws IOException { @Test public void testSecToTime() throws IOException { - JSONObject result = executeQuery( - String.format("SELECT sec_to_time(balance) FROM %s LIMIT 3", TEST_INDEX_BANK)); - verifyDataRows(result, - rows("10:53:45"), - rows("01:34:46"), - rows("09:07:18")); + JSONObject result = + executeQuery(String.format("SELECT sec_to_time(balance) FROM %s LIMIT 3", TEST_INDEX_BANK)); + verifyDataRows(result, rows("10:53:45"), rows("01:34:46"), rows("09:07:18")); } @Test @@ -811,7 +849,8 @@ public void testSecond() throws IOException { public void testSecondOfMinute() throws IOException { JSONObject result = executeQuery("select second_of_minute(timestamp('2020-09-16 17:30:00'))"); - verifySchema(result, schema("second_of_minute(timestamp('2020-09-16 17:30:00'))", null, "integer")); + verifySchema( + result, schema("second_of_minute(timestamp('2020-09-16 17:30:00'))", null, "integer")); verifyDataRows(result, rows(0)); result = executeQuery("select second_of_minute(time('17:30:00'))"); @@ -834,70 +873,68 @@ public void testSecondFunctionAliasesReturnTheSameResults() throws IOException { verifyDataRows(result1, rows(34)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT second(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT second_of_minute(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + result1 = + executeQuery( + String.format( + "SELECT second(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery( + String.format( + "SELECT second_of_minute(datetime(CAST(time0 AS STRING))) FROM %s", + TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT second(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT second_of_minute(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); + result1 = + executeQuery( + String.format("SELECT second(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery( + String.format( + "SELECT second_of_minute(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT second(datetime0) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT second_of_minute(datetime0) FROM %s", TEST_INDEX_CALCS)); + result1 = executeQuery(String.format("SELECT second(datetime0) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery(String.format("SELECT second_of_minute(datetime0) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); } @Test public void testStrToDate() throws IOException { - //Ideal case - JSONObject result = executeQuery( - String.format("SELECT str_to_date(CAST(birthdate AS STRING)," - + " '%%Y-%%m-%%d %%h:%%i:%%s') FROM %s LIMIT 2", - TEST_INDEX_BANK)); - verifyDataRows(result, - rows("2017-10-23 00:00:00"), - rows("2017-11-20 00:00:00") - ); - - //Bad string format case - result = executeQuery( - String.format("SELECT str_to_date(CAST(birthdate AS STRING)," - + " '%%Y %%s') FROM %s LIMIT 2", - TEST_INDEX_BANK)); - verifyDataRows(result, - rows((Object) null), - rows((Object) null) - ); - - //bad date format case - result = executeQuery( - String.format("SELECT str_to_date(firstname," - + " '%%Y-%%m-%%d %%h:%%i:%%s') FROM %s LIMIT 2", - TEST_INDEX_BANK)); - verifyDataRows(result, - rows((Object) null), - rows((Object) null) - ); + // Ideal case + JSONObject result = + executeQuery( + String.format( + "SELECT str_to_date(CAST(birthdate AS STRING)," + + " '%%Y-%%m-%%d %%h:%%i:%%s') FROM %s LIMIT 2", + TEST_INDEX_BANK)); + verifyDataRows(result, rows("2017-10-23 00:00:00"), rows("2017-11-20 00:00:00")); + + // Bad string format case + result = + executeQuery( + String.format( + "SELECT str_to_date(CAST(birthdate AS STRING)," + " '%%Y %%s') FROM %s LIMIT 2", + TEST_INDEX_BANK)); + verifyDataRows(result, rows((Object) null), rows((Object) null)); + + // bad date format case + result = + executeQuery( + String.format( + "SELECT str_to_date(firstname," + " '%%Y-%%m-%%d %%h:%%i:%%s') FROM %s LIMIT 2", + TEST_INDEX_BANK)); + verifyDataRows(result, rows((Object) null), rows((Object) null)); } @Test public void testSubDateWithDays() throws IOException { - var result = - executeQuery("select subdate(date('2020-09-16'), 1)"); - verifySchema(result, - schema("subdate(date('2020-09-16'), 1)", null, "date")); + var result = executeQuery("select subdate(date('2020-09-16'), 1)"); + verifySchema(result, schema("subdate(date('2020-09-16'), 1)", null, "date")); verifyDataRows(result, rows("2020-09-15")); - result = - executeQuery("select subdate(timestamp('2020-09-16 17:30:00'), 1)"); - verifySchema(result, - schema("subdate(timestamp('2020-09-16 17:30:00'), 1)", null, "datetime")); + result = executeQuery("select subdate(timestamp('2020-09-16 17:30:00'), 1)"); + verifySchema(result, schema("subdate(timestamp('2020-09-16 17:30:00'), 1)", null, "datetime")); verifyDataRows(result, rows("2020-09-15 17:30:00")); result = executeQuery("select subdate(DATETIME('2020-09-16 07:40:00'), 1)"); @@ -913,60 +950,68 @@ public void testSubDateWithDays() throws IOException { public void testSubDateWithInterval() throws IOException { JSONObject result = executeQuery("select subdate(timestamp('2020-09-16 17:30:00'), interval 1 day)"); - verifySchema(result, + verifySchema( + result, schema("subdate(timestamp('2020-09-16 17:30:00'), interval 1 day)", null, "datetime")); verifyDataRows(result, rows("2020-09-15 17:30:00")); result = executeQuery("select subdate(DATETIME('2020-09-16 17:30:00'), interval 1 day)"); - verifySchema(result, + verifySchema( + result, schema("subdate(DATETIME('2020-09-16 17:30:00'), interval 1 day)", null, "datetime")); verifyDataRows(result, rows("2020-09-15 17:30:00")); result = executeQuery("select subdate(date('2020-09-16'), interval 1 day)"); - verifySchema(result, - schema("subdate(date('2020-09-16'), interval 1 day)", null, "datetime")); + verifySchema(result, schema("subdate(date('2020-09-16'), interval 1 day)", null, "datetime")); verifyDataRows(result, rows("2020-09-15 00:00:00")); result = executeQuery("select subdate(date('2020-09-16'), interval 1 hour)"); - verifySchema(result, - schema("subdate(date('2020-09-16'), interval 1 hour)", null, "datetime")); + verifySchema(result, schema("subdate(date('2020-09-16'), interval 1 hour)", null, "datetime")); verifyDataRows(result, rows("2020-09-15 23:00:00")); result = executeQuery("select subdate(TIME('07:40:00'), interval 1 day)"); - verifySchema(result, - schema("subdate(TIME('07:40:00'), interval 1 day)", null, "datetime")); - verifyDataRows(result, - rows(LocalDate.now().plusDays(-1).atTime(LocalTime.of(7, 40)).atZone(systemTz.toZoneId()) - .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); + verifySchema(result, schema("subdate(TIME('07:40:00'), interval 1 day)", null, "datetime")); + verifyDataRows( + result, + rows( + LocalDate.now() + .plusDays(-1) + .atTime(LocalTime.of(7, 40)) + .atZone(systemTz.toZoneId()) + .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); result = executeQuery("select subdate(TIME('07:40:00'), interval 1 hour)"); - verifySchema(result, - schema("subdate(TIME('07:40:00'), interval 1 hour)", null, "datetime")); - verifyDataRows(result, - rows(LocalDate.now().atTime(LocalTime.of(6, 40)).atZone(systemTz.toZoneId()) - .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); + verifySchema(result, schema("subdate(TIME('07:40:00'), interval 1 hour)", null, "datetime")); + verifyDataRows( + result, + rows( + LocalDate.now() + .atTime(LocalTime.of(6, 40)) + .atZone(systemTz.toZoneId()) + .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); } @Test - public void testTimstampadd() throws IOException { - JSONObject result = executeQuery( - String.format("SELECT timestampadd(WEEK, 2, time0) FROM %s LIMIT 3", TEST_INDEX_CALCS)); + public void testTimstampadd() throws IOException { + JSONObject result = + executeQuery( + String.format("SELECT timestampadd(WEEK, 2, time0) FROM %s LIMIT 3", TEST_INDEX_CALCS)); - verifyDataRows(result, + verifyDataRows( + result, rows("1900-01-13 21:07:32"), rows("1900-01-15 13:48:48"), rows("1900-01-15 18:21:08")); } @Test - public void testTimstampdiff() throws IOException { - JSONObject result = executeQuery( - String.format("SELECT timestampdiff(DAY, time0, datetime0) FROM %s LIMIT 3", TEST_INDEX_CALCS)); + public void testTimstampdiff() throws IOException { + JSONObject result = + executeQuery( + String.format( + "SELECT timestampdiff(DAY, time0, datetime0) FROM %s LIMIT 3", TEST_INDEX_CALCS)); - verifyDataRows(result, - rows(38176), - rows(38191), - rows(38198)); + verifyDataRows(result, rows(38176), rows(38191), rows(38198)); } @Test @@ -993,16 +1038,20 @@ public void testToDays() throws IOException { @Test public void testToSeconds() throws IOException { - JSONObject result = executeQuery( - String.format("select to_seconds(date0) FROM %s LIMIT 2", TEST_INDEX_CALCS)); + JSONObject result = + executeQuery(String.format("select to_seconds(date0) FROM %s LIMIT 2", TEST_INDEX_CALCS)); verifyDataRows(result, rows(63249206400L), rows(62246275200L)); - result = executeQuery( - String.format("SELECT to_seconds(datetime(cast(datetime0 AS string))) FROM %s LIMIT 2", TEST_INDEX_CALCS)); + result = + executeQuery( + String.format( + "SELECT to_seconds(datetime(cast(datetime0 AS string))) FROM %s LIMIT 2", + TEST_INDEX_CALCS)); verifyDataRows(result, rows(63256587455L), rows(63258064234L)); - result = executeQuery(String.format( - "select to_seconds(datetime0) FROM %s LIMIT 2", TEST_INDEX_CALCS)); + result = + executeQuery( + String.format("select to_seconds(datetime0) FROM %s LIMIT 2", TEST_INDEX_CALCS)); verifyDataRows(result, rows(63256587455L), rows(63258064234L)); } @@ -1017,11 +1066,14 @@ public void testYear() throws IOException { verifyDataRows(result, rows(2020)); } - private void week(String date, int mode, int expectedResult, String functionName) throws IOException { - JSONObject result = executeQuery(StringUtils.format("select %s(date('%s'), %d)", functionName, date, - mode)); - verifySchema(result, - schema(StringUtils.format("%s(date('%s'), %d)", functionName, date, mode), null, "integer")); + private void week(String date, int mode, int expectedResult, String functionName) + throws IOException { + JSONObject result = + executeQuery(StringUtils.format("select %s(date('%s'), %d)", functionName, date, mode)); + verifySchema( + result, + schema( + StringUtils.format("%s(date('%s'), %d)", functionName, date, mode), null, "integer")); verifyDataRows(result, rows(expectedResult)); } @@ -1040,7 +1092,8 @@ public void testWeek() throws IOException { @Test public void testWeekday() throws IOException { - JSONObject result = executeQuery(String.format("SELECT weekday(date0) FROM %s LIMIT 3", TEST_INDEX_CALCS)); + JSONObject result = + executeQuery(String.format("SELECT weekday(date0) FROM %s LIMIT 3", TEST_INDEX_CALCS)); verifyDataRows(result, rows(3), rows(1), rows(2)); } @@ -1071,12 +1124,9 @@ public void testWeekOfYear() throws IOException { } private void compareWeekResults(String arg, String table) throws IOException { - JSONObject result1 = executeQuery(String.format( - "SELECT week(%s) FROM %s", arg, table)); - JSONObject result2 = executeQuery(String.format( - "SELECT week_of_year(%s) FROM %s", arg, table)); - JSONObject result3 = executeQuery(String.format( - "SELECT weekofyear(%s) FROM %s", arg, table)); + JSONObject result1 = executeQuery(String.format("SELECT week(%s) FROM %s", arg, table)); + JSONObject result2 = executeQuery(String.format("SELECT week_of_year(%s) FROM %s", arg, table)); + JSONObject result3 = executeQuery(String.format("SELECT weekofyear(%s) FROM %s", arg, table)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); result1.getJSONArray("datarows").similar(result3.getJSONArray("datarows")); @@ -1099,13 +1149,16 @@ public void testWeekAlternateSyntaxesReturnTheSameResults() throws IOException { @Test public void testYearweek() throws IOException { - JSONObject result = executeQuery( - String.format("SELECT yearweek(time0), yearweek(time0, 4) FROM %s LIMIT 2", TEST_INDEX_CALCS)); + JSONObject result = + executeQuery( + String.format( + "SELECT yearweek(time0), yearweek(time0, 4) FROM %s LIMIT 2", TEST_INDEX_CALCS)); verifyDataRows(result, rows(189952, 189952), rows(189953, 190001)); } - void verifyDateFormat(String date, String type, String format, String formatted) throws IOException { + void verifyDateFormat(String date, String type, String format, String formatted) + throws IOException { String query = String.format("date_format(%s('%s'), '%s')", type, date, format); JSONObject result = executeQuery("select " + query); verifySchema(result, schema(query, null, "keyword")); @@ -1120,10 +1173,11 @@ void verifyDateFormat(String date, String type, String format, String formatted) @Test public void testDateFormat() throws IOException { String timestamp = "1998-01-31 13:14:15.012345"; - String timestampFormat = "%a %b %c %D %d %e %f %H %h %I %i %j %k %l %M " - + "%m %p %r %S %s %T %% %P"; - String timestampFormatted = "Sat Jan 01 31st 31 31 012345 13 01 01 14 031 13 1 " - + "January 01 PM 01:14:15 PM 15 15 13:14:15 % P"; + String timestampFormat = + "%a %b %c %D %d %e %f %H %h %I %i %j %k %l %M " + "%m %p %r %S %s %T %% %P"; + String timestampFormatted = + "Sat Jan 01 31st 31 31 012345 13 01 01 14 031 13 1 " + + "January 01 PM 01:14:15 PM 15 15 13:14:15 % P"; verifyDateFormat(timestamp, "timestamp", timestampFormat, timestampFormatted); String date = "1998-01-31"; @@ -1134,9 +1188,10 @@ public void testDateFormat() throws IOException { @Test public void testMakeTime() throws IOException { - var result = executeQuery( - "select MAKETIME(20, 30, 40) as f1, MAKETIME(20.2, 49.5, 42.100502) as f2"); - verifySchema(result, + var result = + executeQuery("select MAKETIME(20, 30, 40) as f1, MAKETIME(20.2, 49.5, 42.100502) as f2"); + verifySchema( + result, schema("MAKETIME(20, 30, 40)", "f1", "time"), schema("MAKETIME(20.2, 49.5, 42.100502)", "f2", "time")); verifyDataRows(result, rows("20:30:40", "20:50:42.100502")); @@ -1144,9 +1199,9 @@ public void testMakeTime() throws IOException { @Test public void testMakeDate() throws IOException { - var result = executeQuery( - "select MAKEDATE(1945, 5.9) as f1, MAKEDATE(1984, 1984) as f2"); - verifySchema(result, + var result = executeQuery("select MAKEDATE(1945, 5.9) as f1, MAKEDATE(1984, 1984) as f2"); + verifySchema( + result, schema("MAKEDATE(1945, 5.9)", "f1", "date"), schema("MAKEDATE(1984, 1984)", "f2", "date")); verifyDataRows(result, rows("1945-01-06", "1989-06-06")); @@ -1154,30 +1209,35 @@ public void testMakeDate() throws IOException { @Test public void testFromUnixTime() throws IOException { - var result = executeQuery( - "select FROM_UNIXTIME(200300400) f1, FROM_UNIXTIME(12224.12) f2, " - + "FROM_UNIXTIME(1662601316, '%T') f3"); - verifySchema(result, - schema("FROM_UNIXTIME(200300400)", "f1", "datetime"), + var result = + executeQuery( + "select FROM_UNIXTIME(200300400) f1, FROM_UNIXTIME(12224.12) f2, " + + "FROM_UNIXTIME(1662601316, '%T') f3"); + verifySchema( + result, + schema("FROM_UNIXTIME(200300400)", "f1", "datetime"), schema("FROM_UNIXTIME(12224.12)", "f2", "datetime"), schema("FROM_UNIXTIME(1662601316, '%T')", "f3", "keyword")); - verifySome(result.getJSONArray("datarows"), + verifySome( + result.getJSONArray("datarows"), rows("1976-05-07 07:00:00", "1970-01-01 03:23:44.12", "01:41:56")); } @Test - public void testGetFormatAsArgument() throws IOException{ + public void testGetFormatAsArgument() throws IOException { var result = executeQuery("SELECT DATE_FORMAT('2003-10-03',GET_FORMAT(DATE,'USA'))"); verifyDataRows(result, rows("10.03.2003")); } @Test public void testUnixTimeStamp() throws IOException { - var result = executeQuery( - "select UNIX_TIMESTAMP(MAKEDATE(1984, 1984)) f1, " - + "UNIX_TIMESTAMP(TIMESTAMP('2003-12-31 12:00:00')) f2, " - + "UNIX_TIMESTAMP(20771122143845) f3"); - verifySchema(result, + var result = + executeQuery( + "select UNIX_TIMESTAMP(MAKEDATE(1984, 1984)) f1, " + + "UNIX_TIMESTAMP(TIMESTAMP('2003-12-31 12:00:00')) f2, " + + "UNIX_TIMESTAMP(20771122143845) f3"); + verifySchema( + result, schema("UNIX_TIMESTAMP(MAKEDATE(1984, 1984))", "f1", "double"), schema("UNIX_TIMESTAMP(TIMESTAMP('2003-12-31 12:00:00'))", "f2", "double"), schema("UNIX_TIMESTAMP(20771122143845)", "f3", "double")); @@ -1186,9 +1246,9 @@ public void testUnixTimeStamp() throws IOException { @Test public void testPeriodAdd() throws IOException { - var result = executeQuery( - "select PERIOD_ADD(200801, 2) as f1, PERIOD_ADD(200801, -12) as f2"); - verifySchema(result, + var result = executeQuery("select PERIOD_ADD(200801, 2) as f1, PERIOD_ADD(200801, -12) as f2"); + verifySchema( + result, schema("PERIOD_ADD(200801, 2)", "f1", "integer"), schema("PERIOD_ADD(200801, -12)", "f2", "integer")); verifyDataRows(result, rows(200803, 200701)); @@ -1196,57 +1256,103 @@ public void testPeriodAdd() throws IOException { @Test public void testPeriodDiff() throws IOException { - var result = executeQuery( - "select PERIOD_DIFF(200802, 200703) as f1, PERIOD_DIFF(200802, 201003) as f2"); - verifySchema(result, + var result = + executeQuery("select PERIOD_DIFF(200802, 200703) as f1, PERIOD_DIFF(200802, 201003) as f2"); + verifySchema( + result, schema("PERIOD_DIFF(200802, 200703)", "f1", "integer"), schema("PERIOD_DIFF(200802, 201003)", "f2", "integer")); verifyDataRows(result, rows(11, -25)); } public void testAddTime() throws IOException { - var result = executeQuery("SELECT" - + " ADDTIME(DATE('2008-12-12'), DATE('2008-11-15')) AS `'2008-12-12' + 0`," - + " ADDTIME(TIME('23:59:59'), DATE('2004-01-01')) AS `'23:59:59' + 0`," - + " ADDTIME(DATE('2004-01-01'), TIME('23:59:59')) AS `'2004-01-01' + '23:59:59'`," - + " ADDTIME(TIME('10:20:30'), TIME('00:05:42')) AS `'10:20:30' + '00:05:42'`," - + " ADDTIME(TIMESTAMP('1999-12-31 15:42:13'), DATETIME('1961-04-12 09:07:00')) AS `'15:42:13' + '09:07:00'`"); - verifySchema(result, + var result = + executeQuery( + "SELECT ADDTIME(DATE('2008-12-12'), DATE('2008-11-15')) AS `'2008-12-12' + 0`," + + " ADDTIME(TIME('23:59:59'), DATE('2004-01-01')) AS `'23:59:59' + 0`," + + " ADDTIME(DATE('2004-01-01'), TIME('23:59:59')) AS `'2004-01-01' + '23:59:59'`," + + " ADDTIME(TIME('10:20:30'), TIME('00:05:42')) AS `'10:20:30' + '00:05:42'`," + + " ADDTIME(TIMESTAMP('1999-12-31 15:42:13'), DATETIME('1961-04-12 09:07:00')) AS" + + " `'15:42:13' + '09:07:00'`"); + verifySchema( + result, schema("ADDTIME(DATE('2008-12-12'), DATE('2008-11-15'))", "'2008-12-12' + 0", "datetime"), schema("ADDTIME(TIME('23:59:59'), DATE('2004-01-01'))", "'23:59:59' + 0", "time"), - schema("ADDTIME(DATE('2004-01-01'), TIME('23:59:59'))", "'2004-01-01' + '23:59:59'", "datetime"), + schema( + "ADDTIME(DATE('2004-01-01'), TIME('23:59:59'))", + "'2004-01-01' + '23:59:59'", + "datetime"), schema("ADDTIME(TIME('10:20:30'), TIME('00:05:42'))", "'10:20:30' + '00:05:42'", "time"), - schema("ADDTIME(TIMESTAMP('1999-12-31 15:42:13'), DATETIME('1961-04-12 09:07:00'))", "'15:42:13' + '09:07:00'", "datetime")); - verifyDataRows(result, rows("2008-12-12 00:00:00", "23:59:59", "2004-01-01 23:59:59", "10:26:12", "2000-01-01 00:49:13")); + schema( + "ADDTIME(TIMESTAMP('1999-12-31 15:42:13'), DATETIME('1961-04-12 09:07:00'))", + "'15:42:13' + '09:07:00'", + "datetime")); + verifyDataRows( + result, + rows( + "2008-12-12 00:00:00", + "23:59:59", + "2004-01-01 23:59:59", + "10:26:12", + "2000-01-01 00:49:13")); } @Test public void testSubTime() throws IOException { - var result = executeQuery("SELECT" - + " SUBTIME(DATE('2008-12-12'), DATE('2008-11-15')) AS `'2008-12-12' - 0`," - + " SUBTIME(TIME('23:59:59'), DATE('2004-01-01')) AS `'23:59:59' - 0`," - + " SUBTIME(DATE('2004-01-01'), TIME('23:59:59')) AS `'2004-01-01' - '23:59:59'`," - + " SUBTIME(TIME('10:20:30'), TIME('00:05:42')) AS `'10:20:30' - '00:05:42'`," - + " SUBTIME(TIMESTAMP('1999-12-31 15:42:13'), DATETIME('1961-04-12 09:07:00')) AS `'15:42:13' - '09:07:00'`"); - verifySchema(result, + var result = + executeQuery( + "SELECT SUBTIME(DATE('2008-12-12'), DATE('2008-11-15')) AS `'2008-12-12' - 0`," + + " SUBTIME(TIME('23:59:59'), DATE('2004-01-01')) AS `'23:59:59' - 0`," + + " SUBTIME(DATE('2004-01-01'), TIME('23:59:59')) AS `'2004-01-01' - '23:59:59'`," + + " SUBTIME(TIME('10:20:30'), TIME('00:05:42')) AS `'10:20:30' - '00:05:42'`," + + " SUBTIME(TIMESTAMP('1999-12-31 15:42:13'), DATETIME('1961-04-12 09:07:00')) AS" + + " `'15:42:13' - '09:07:00'`"); + verifySchema( + result, schema("SUBTIME(DATE('2008-12-12'), DATE('2008-11-15'))", "'2008-12-12' - 0", "datetime"), schema("SUBTIME(TIME('23:59:59'), DATE('2004-01-01'))", "'23:59:59' - 0", "time"), - schema("SUBTIME(DATE('2004-01-01'), TIME('23:59:59'))", "'2004-01-01' - '23:59:59'", "datetime"), + schema( + "SUBTIME(DATE('2004-01-01'), TIME('23:59:59'))", + "'2004-01-01' - '23:59:59'", + "datetime"), schema("SUBTIME(TIME('10:20:30'), TIME('00:05:42'))", "'10:20:30' - '00:05:42'", "time"), - schema("SUBTIME(TIMESTAMP('1999-12-31 15:42:13'), DATETIME('1961-04-12 09:07:00'))", "'15:42:13' - '09:07:00'", "datetime")); - verifyDataRows(result, rows("2008-12-12 00:00:00", "23:59:59", "2003-12-31 00:00:01", "10:14:48", "1999-12-31 06:35:13")); + schema( + "SUBTIME(TIMESTAMP('1999-12-31 15:42:13'), DATETIME('1961-04-12 09:07:00'))", + "'15:42:13' - '09:07:00'", + "datetime")); + verifyDataRows( + result, + rows( + "2008-12-12 00:00:00", + "23:59:59", + "2003-12-31 00:00:01", + "10:14:48", + "1999-12-31 06:35:13")); } public void testDateDiff() throws IOException { - var result = executeQuery("SELECT" - + " DATEDIFF(TIMESTAMP('2000-01-02 00:00:00'), TIMESTAMP('2000-01-01 23:59:59')) AS `'2000-01-02' - '2000-01-01'`," - + " DATEDIFF(DATE('2001-02-01'), TIMESTAMP('2004-01-01 00:00:00')) AS `'2001-02-01' - '2004-01-01'`," - + " DATEDIFF(TIMESTAMP('2004-01-01 00:00:00'), DATETIME('2002-02-01 14:25:30')) AS `'2004-01-01' - '2002-02-01'`," - + " DATEDIFF(TIME('23:59:59'), TIME('00:00:00')) AS `today - today`"); - verifySchema(result, - schema("DATEDIFF(TIMESTAMP('2000-01-02 00:00:00'), TIMESTAMP('2000-01-01 23:59:59'))", "'2000-01-02' - '2000-01-01'", "long"), - schema("DATEDIFF(DATE('2001-02-01'), TIMESTAMP('2004-01-01 00:00:00'))", "'2001-02-01' - '2004-01-01'", "long"), - schema("DATEDIFF(TIMESTAMP('2004-01-01 00:00:00'), DATETIME('2002-02-01 14:25:30'))", "'2004-01-01' - '2002-02-01'", "long"), + var result = + executeQuery( + "SELECT DATEDIFF(TIMESTAMP('2000-01-02 00:00:00'), TIMESTAMP('2000-01-01 23:59:59')) AS" + + " `'2000-01-02' - '2000-01-01'`, DATEDIFF(DATE('2001-02-01')," + + " TIMESTAMP('2004-01-01 00:00:00')) AS `'2001-02-01' - '2004-01-01'`," + + " DATEDIFF(TIMESTAMP('2004-01-01 00:00:00'), DATETIME('2002-02-01 14:25:30')) AS" + + " `'2004-01-01' - '2002-02-01'`, DATEDIFF(TIME('23:59:59'), TIME('00:00:00')) AS" + + " `today - today`"); + verifySchema( + result, + schema( + "DATEDIFF(TIMESTAMP('2000-01-02 00:00:00'), TIMESTAMP('2000-01-01 23:59:59'))", + "'2000-01-02' - '2000-01-01'", + "long"), + schema( + "DATEDIFF(DATE('2001-02-01'), TIMESTAMP('2004-01-01 00:00:00'))", + "'2001-02-01' - '2004-01-01'", + "long"), + schema( + "DATEDIFF(TIMESTAMP('2004-01-01 00:00:00'), DATETIME('2002-02-01 14:25:30'))", + "'2004-01-01' - '2002-02-01'", + "long"), schema("DATEDIFF(TIME('23:59:59'), TIME('00:00:00'))", "today - today", "long")); verifyDataRows(result, rows(1, -1064, 699, 0)); } @@ -1258,7 +1364,8 @@ public void testTimeDiff() throws IOException { verifyDataRows(result, rows("10:59:59")); } - void verifyTimeFormat(String time, String type, String format, String formatted) throws IOException { + void verifyTimeFormat(String time, String type, String format, String formatted) + throws IOException { String query = String.format("time_format(%s('%s'), '%s')", type, time, format); JSONObject result = executeQuery("select " + query); verifySchema(result, schema(query, null, "keyword")); @@ -1361,13 +1468,13 @@ public void testBracketedEquivalent() throws IOException { @Test public void testBracketFails() { - assertThrows(ResponseException.class, ()->executeQuery("select {time '2020-09-16'}")); - assertThrows(ResponseException.class, ()->executeQuery("select {t '2020-09-16'}")); - assertThrows(ResponseException.class, ()->executeQuery("select {date '17:30:00'}")); - assertThrows(ResponseException.class, ()->executeQuery("select {d '17:30:00'}")); - assertThrows(ResponseException.class, ()->executeQuery("select {timestamp '2020-09-16'}")); - assertThrows(ResponseException.class, ()->executeQuery("select {ts '2020-09-16'}")); - assertThrows(ResponseException.class, ()->executeQuery("select {timestamp '17:30:00'}")); - assertThrows(ResponseException.class, ()->executeQuery("select {ts '17:30:00'}")); + assertThrows(ResponseException.class, () -> executeQuery("select {time '2020-09-16'}")); + assertThrows(ResponseException.class, () -> executeQuery("select {t '2020-09-16'}")); + assertThrows(ResponseException.class, () -> executeQuery("select {date '17:30:00'}")); + assertThrows(ResponseException.class, () -> executeQuery("select {d '17:30:00'}")); + assertThrows(ResponseException.class, () -> executeQuery("select {timestamp '2020-09-16'}")); + assertThrows(ResponseException.class, () -> executeQuery("select {ts '2020-09-16'}")); + assertThrows(ResponseException.class, () -> executeQuery("select {timestamp '17:30:00'}")); + assertThrows(ResponseException.class, () -> executeQuery("select {ts '17:30:00'}")); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeImplementationIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeImplementationIT.java index 94a5b4fb16..750be47de3 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeImplementationIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeImplementationIT.java @@ -5,15 +5,15 @@ package org.opensearch.sql.sql; +import org.junit.Test; +import org.opensearch.sql.legacy.SQLIntegTestCase; +import java.io.IOException; + import static org.opensearch.sql.util.MatcherUtils.rows; import static org.opensearch.sql.util.MatcherUtils.schema; import static org.opensearch.sql.util.MatcherUtils.verifyDataRows; import static org.opensearch.sql.util.MatcherUtils.verifySchema; -import java.io.IOException; -import org.junit.Test; -import org.opensearch.sql.legacy.SQLIntegTestCase; - public class DateTimeImplementationIT extends SQLIntegTestCase { @@ -25,128 +25,112 @@ public void init() throws Exception { @Test public void inRangeZeroToStringTZ() throws IOException { - var result = executeJdbcRequest( - "SELECT DATETIME('2008-12-25 05:30:00+00:00', 'America/Los_Angeles')"); - verifySchema(result, + var result = + executeJdbcRequest("SELECT DATETIME('2008-12-25 05:30:00+00:00', 'America/Los_Angeles')"); + verifySchema( + result, schema("DATETIME('2008-12-25 05:30:00+00:00', 'America/Los_Angeles')", null, "datetime")); verifyDataRows(result, rows("2008-12-24 21:30:00")); } @Test public void inRangeZeroToPositive() throws IOException { - var result = executeJdbcRequest( - "SELECT DATETIME('2008-12-25 05:30:00+00:00', '+01:00')"); - verifySchema(result, - schema("DATETIME('2008-12-25 05:30:00+00:00', '+01:00')", null, "datetime")); + var result = executeJdbcRequest("SELECT DATETIME('2008-12-25 05:30:00+00:00', '+01:00')"); + verifySchema( + result, schema("DATETIME('2008-12-25 05:30:00+00:00', '+01:00')", null, "datetime")); verifyDataRows(result, rows("2008-12-25 06:30:00")); } @Test public void inRangeNegativeToPositive() throws IOException { - var result = executeJdbcRequest( - "SELECT DATETIME('2008-12-25 05:30:00-05:00', '+05:00')"); - verifySchema(result, - schema("DATETIME('2008-12-25 05:30:00-05:00', '+05:00')", null, "datetime")); + var result = executeJdbcRequest("SELECT DATETIME('2008-12-25 05:30:00-05:00', '+05:00')"); + verifySchema( + result, schema("DATETIME('2008-12-25 05:30:00-05:00', '+05:00')", null, "datetime")); verifyDataRows(result, rows("2008-12-25 15:30:00")); } @Test public void inRangeTwentyHourOffset() throws IOException { - var result = executeJdbcRequest( - "SELECT DATETIME('2004-02-28 23:00:00-10:00', '+10:00')"); - verifySchema(result, - schema("DATETIME('2004-02-28 23:00:00-10:00', '+10:00')", null, "datetime")); + var result = executeJdbcRequest("SELECT DATETIME('2004-02-28 23:00:00-10:00', '+10:00')"); + verifySchema( + result, schema("DATETIME('2004-02-28 23:00:00-10:00', '+10:00')", null, "datetime")); verifyDataRows(result, rows("2004-02-29 19:00:00")); } @Test public void inRangeYearChange() throws IOException { - var result = executeJdbcRequest( - "SELECT DATETIME('2008-01-01 02:00:00+10:00', '-10:00')"); - verifySchema(result, - schema("DATETIME('2008-01-01 02:00:00+10:00', '-10:00')", null, "datetime")); + var result = executeJdbcRequest("SELECT DATETIME('2008-01-01 02:00:00+10:00', '-10:00')"); + verifySchema( + result, schema("DATETIME('2008-01-01 02:00:00+10:00', '-10:00')", null, "datetime")); verifyDataRows(result, rows("2007-12-31 06:00:00")); } @Test public void inRangeZeroNoToTZ() throws IOException { - var result = executeJdbcRequest( - "SELECT DATETIME('2008-01-01 02:00:00+10:00')"); - verifySchema(result, - schema("DATETIME('2008-01-01 02:00:00+10:00')", null, "datetime")); + var result = executeJdbcRequest("SELECT DATETIME('2008-01-01 02:00:00+10:00')"); + verifySchema(result, schema("DATETIME('2008-01-01 02:00:00+10:00')", null, "datetime")); verifyDataRows(result, rows("2008-01-01 02:00:00")); } @Test public void inRangeZeroNoTZ() throws IOException { - var result = executeJdbcRequest( - "SELECT DATETIME('2008-01-01 02:00:00')"); - verifySchema(result, - schema("DATETIME('2008-01-01 02:00:00')", null, "datetime")); + var result = executeJdbcRequest("SELECT DATETIME('2008-01-01 02:00:00')"); + verifySchema(result, schema("DATETIME('2008-01-01 02:00:00')", null, "datetime")); verifyDataRows(result, rows("2008-01-01 02:00:00")); } @Test public void inRangeZeroDayConvert() throws IOException { - var result = executeJdbcRequest( - "SELECT DATETIME('2008-01-01 02:00:00+12:00', '-12:00')"); - verifySchema(result, - schema("DATETIME('2008-01-01 02:00:00+12:00', '-12:00')", null, "datetime")); + var result = executeJdbcRequest("SELECT DATETIME('2008-01-01 02:00:00+12:00', '-12:00')"); + verifySchema( + result, schema("DATETIME('2008-01-01 02:00:00+12:00', '-12:00')", null, "datetime")); verifyDataRows(result, rows("2007-12-31 02:00:00")); } @Test public void inRangeJustInRangeNegative() throws IOException { - var result = executeJdbcRequest( - "SELECT DATETIME('2008-01-01 02:00:00+10:00', '-13:59')"); - verifySchema(result, - schema("DATETIME('2008-01-01 02:00:00+10:00', '-13:59')", null, "datetime")); + var result = executeJdbcRequest("SELECT DATETIME('2008-01-01 02:00:00+10:00', '-13:59')"); + verifySchema( + result, schema("DATETIME('2008-01-01 02:00:00+10:00', '-13:59')", null, "datetime")); verifyDataRows(result, rows("2007-12-31 02:01:00")); } @Test public void inRangeJustInRangePositive() throws IOException { - var result = executeJdbcRequest( - "SELECT DATETIME('2008-01-01 02:00:00+14:00', '-10:00')"); - verifySchema(result, - schema("DATETIME('2008-01-01 02:00:00+14:00', '-10:00')", null, "datetime")); + var result = executeJdbcRequest("SELECT DATETIME('2008-01-01 02:00:00+14:00', '-10:00')"); + verifySchema( + result, schema("DATETIME('2008-01-01 02:00:00+14:00', '-10:00')", null, "datetime")); verifyDataRows(result, rows("2007-12-31 02:00:00")); } @Test public void nullField3Under() throws IOException { - var result = executeJdbcRequest( - "SELECT DATETIME('2008-01-01 02:00:00+10:00', '-14:01')"); - verifySchema(result, - schema("DATETIME('2008-01-01 02:00:00+10:00', '-14:01')", null, "datetime")); - verifyDataRows(result, rows(new Object[]{null})); + var result = executeJdbcRequest("SELECT DATETIME('2008-01-01 02:00:00+10:00', '-14:01')"); + verifySchema( + result, schema("DATETIME('2008-01-01 02:00:00+10:00', '-14:01')", null, "datetime")); + verifyDataRows(result, rows(new Object[] {null})); } @Test public void nullField1Over() throws IOException { - var result = executeJdbcRequest( - "SELECT DATETIME('2008-01-01 02:00:00+14:01', '-10:00')"); - verifySchema(result, - schema("DATETIME('2008-01-01 02:00:00+14:01', '-10:00')", null, "datetime")); - verifyDataRows(result, rows(new Object[]{null})); + var result = executeJdbcRequest("SELECT DATETIME('2008-01-01 02:00:00+14:01', '-10:00')"); + verifySchema( + result, schema("DATETIME('2008-01-01 02:00:00+14:01', '-10:00')", null, "datetime")); + verifyDataRows(result, rows(new Object[] {null})); } @Test public void nullDateTimeInvalidDateValueFebruary() throws IOException { - var result = executeJdbcRequest( - "SELECT DATETIME('2021-02-30 10:00:00')"); - verifySchema(result, - schema("DATETIME('2021-02-30 10:00:00')", null, "datetime")); - verifyDataRows(result, rows(new Object[]{null})); + var result = executeJdbcRequest("SELECT DATETIME('2021-02-30 10:00:00')"); + verifySchema(result, schema("DATETIME('2021-02-30 10:00:00')", null, "datetime")); + verifyDataRows(result, rows(new Object[] {null})); } @Test public void nullDateTimeInvalidDateValueApril() throws IOException { - var result = executeJdbcRequest( - "SELECT DATETIME('2021-04-31 10:00:00')"); - verifySchema(result, - schema("DATETIME('2021-04-31 10:00:00')", null, "datetime")); - verifyDataRows(result, rows(new Object[]{null})); + var result = executeJdbcRequest("SELECT DATETIME('2021-04-31 10:00:00')"); + verifySchema(result, schema("DATETIME('2021-04-31 10:00:00')", null, "datetime")); + verifyDataRows(result, rows(new Object[] {null})); } @Test diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/ExpressionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/ExpressionIT.java index 30211366b1..be1471641e 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/ExpressionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/ExpressionIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.hamcrest.Matchers.is; @@ -23,15 +22,14 @@ import org.opensearch.sql.legacy.RestIntegTestCase; /** - * Integration test for different type of expressions such as literals, arithmetic, predicate - * and function expression. Since comparison test in {@link SQLCorrectnessIT} is enforced, - * this kind of manual written IT class will be focused on anomaly case test. + * Integration test for different type of expressions such as literals, arithmetic, predicate and + * function expression. Since comparison test in {@link SQLCorrectnessIT} is enforced, this kind of + * manual written IT class will be focused on anomaly case test. */ @Ignore public class ExpressionIT extends RestIntegTestCase { - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); + @Rule public ExpectedException exceptionRule = ExpectedException.none(); @Override protected void init() throws Exception { @@ -44,8 +42,7 @@ public ResponseExceptionAssertion expectResponseException() { /** * Response exception assertion helper to assert property value in OpenSearch ResponseException - * and Response inside. This serves as syntax sugar to improve the readability of test - * code. + * and Response inside. This serves as syntax sugar to improve the readability of test code. */ private static class ResponseExceptionAssertion { private final ExpectedException exceptionRule; @@ -57,9 +54,12 @@ private ResponseExceptionAssertion(ExpectedException exceptionRule) { } ResponseExceptionAssertion hasStatusCode(int expected) { - exceptionRule.expect(featureValueOf("statusCode", is(expected), - (Function) e -> - e.getResponse().getStatusLine().getStatusCode())); + exceptionRule.expect( + featureValueOf( + "statusCode", + is(expected), + (Function) + e -> e.getResponse().getStatusLine().getStatusCode())); return this; } @@ -83,5 +83,4 @@ private static Response executeQuery(String query) throws IOException { return client().performRequest(request); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/HighlightFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/HighlightFunctionIT.java index d55972691c..d0f890526b 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/HighlightFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/HighlightFunctionIT.java @@ -30,90 +30,126 @@ public void single_highlight_test() { String query = "SELECT Tags, highlight('Tags') FROM %s WHERE match(Tags, 'yeast') LIMIT 1"; JSONObject response = executeJdbcRequest(String.format(query, TestsConstants.TEST_INDEX_BEER)); - verifySchema(response, schema("Tags", null, "text"), - schema("highlight('Tags')", null, "nested")); + verifySchema( + response, schema("Tags", null, "text"), schema("highlight('Tags')", null, "nested")); assertEquals(1, response.getInt("total")); - verifyDataRows(response, - rows("alcohol-level yeast home-brew champagne", + verifyDataRows( + response, + rows( + "alcohol-level yeast home-brew champagne", new JSONArray(List.of("alcohol-level yeast home-brew champagne")))); } @Test public void highlight_optional_arguments_test() { - String query = "SELECT highlight('Tags', pre_tags='', post_tags='') " + - "FROM %s WHERE match(Tags, 'yeast') LIMIT 1"; + String query = + "SELECT highlight('Tags', pre_tags='', post_tags='') " + + "FROM %s WHERE match(Tags, 'yeast') LIMIT 1"; JSONObject response = executeJdbcRequest(String.format(query, TestsConstants.TEST_INDEX_BEER)); - verifySchema(response, schema("highlight('Tags', pre_tags='', post_tags='')", - null, "nested")); + verifySchema( + response, + schema("highlight('Tags', pre_tags='', post_tags='')", null, "nested")); assertEquals(1, response.getInt("total")); - verifyDataRows(response, + verifyDataRows( + response, rows(new JSONArray(List.of("alcohol-level yeast home-brew champagne")))); } @Test public void highlight_multiple_optional_arguments_test() { - String query = "SELECT highlight(Title), highlight(Body, pre_tags='', post_tags='') FROM %s WHERE multi_match([Title, Body], 'IPA') LIMIT 1"; + String query = + "SELECT highlight(Title), highlight(Body, pre_tags='', post_tags='') FROM %s WHERE multi_match([Title, Body], 'IPA')" + + " LIMIT 1"; JSONObject response = executeJdbcRequest(String.format(query, TestsConstants.TEST_INDEX_BEER)); - verifySchema(response, schema("highlight(Title)", null, "nested"), - schema("highlight(Body, pre_tags='', " + - "post_tags='')", null, "nested")); + verifySchema( + response, + schema("highlight(Title)", null, "nested"), + schema( + "highlight(Body, pre_tags='', " + + "post_tags='')", + null, + "nested")); assertEquals(1, response.getInt("size")); - verifyDataRows(response, rows(new JSONArray(List.of("What are the differences between an IPA" + - " and its variants?")), - new JSONArray(List.of("

I know what makes an IPA" + - " an IPA, but what are the unique characteristics of it's" + - " common variants?", - "To be specific, the ones I'm interested in are Double IPA " + - "and Black IPA, but general differences" + - " between")))); + verifyDataRows( + response, + rows( + new JSONArray( + List.of("What are the differences between an IPA" + " and its variants?")), + new JSONArray( + List.of( + "

I know what makes an IPA an" + + " IPA, but what are the" + + " unique characteristics of it's common variants?", + "To be specific, the ones I'm interested in are Double IPA and Black IPA, but general differences" + + " between")))); } @Test public void multiple_highlight_test() { - String query = "SELECT highlight(Title), highlight(Tags) FROM %s WHERE MULTI_MATCH([Title, Tags], 'hops') LIMIT 1"; + String query = + "SELECT highlight(Title), highlight(Tags) FROM %s WHERE MULTI_MATCH([Title, Tags], 'hops')" + + " LIMIT 1"; JSONObject response = executeJdbcRequest(String.format(query, TestsConstants.TEST_INDEX_BEER)); - verifySchema(response, schema("highlight(Title)", null, "nested"), + verifySchema( + response, + schema("highlight(Title)", null, "nested"), schema("highlight(Tags)", null, "nested")); assertEquals(1, response.getInt("total")); - verifyDataRows(response, - rows( new JSONArray(List.of("What uses do hops have outside of brewing?")), + verifyDataRows( + response, + rows( + new JSONArray(List.of("What uses do hops have outside of brewing?")), new JSONArray(List.of("hops history")))); } @Test public void wildcard_highlight_test() { - String query = "SELECT highlight('*itle') FROM %s WHERE MULTI_MATCH([Title, Tags], 'hops') LIMIT 1"; + String query = + "SELECT highlight('*itle') FROM %s WHERE MULTI_MATCH([Title, Tags], 'hops') LIMIT 1"; JSONObject response = executeJdbcRequest(String.format(query, TestsConstants.TEST_INDEX_BEER)); verifySchema(response, schema("highlight('*itle')", null, "object")); assertEquals(1, response.getInt("total")); - verifyDataRows(response, rows(new JSONObject(ImmutableMap.of( - "Title", new JSONArray(List.of("What uses do hops have outside of brewing?")))))); + verifyDataRows( + response, + rows( + new JSONObject( + ImmutableMap.of( + "Title", + new JSONArray( + List.of("What uses do hops have outside of brewing?")))))); } @Test public void wildcard_multi_field_highlight_test() { - String query = "SELECT highlight('T*') FROM %s WHERE MULTI_MATCH([Title, Tags], 'hops') LIMIT 1"; + String query = + "SELECT highlight('T*') FROM %s WHERE MULTI_MATCH([Title, Tags], 'hops') LIMIT 1"; JSONObject response = executeJdbcRequest(String.format(query, TestsConstants.TEST_INDEX_BEER)); verifySchema(response, schema("highlight('T*')", null, "object")); assertEquals(1, response.getInt("total")); - verifyDataRows(response, rows(new JSONObject(ImmutableMap.of( - "Title", new JSONArray(List.of("What uses do hops have outside of brewing?")), - "Tags", new JSONArray(List.of("hops history")))))); + verifyDataRows( + response, + rows( + new JSONObject( + ImmutableMap.of( + "Title", + new JSONArray( + List.of("What uses do hops have outside of brewing?")), + "Tags", new JSONArray(List.of("hops history")))))); } @Test @@ -124,9 +160,15 @@ public void highlight_all_test() { verifySchema(response, schema("highlight('*')", null, "object")); assertEquals(1, response.getInt("total")); - verifyDataRows(response, rows(new JSONObject(ImmutableMap.of( - "Title", new JSONArray(List.of("What uses do hops have outside of brewing?")), - "Tags", new JSONArray(List.of("hops history")))))); + verifyDataRows( + response, + rows( + new JSONObject( + ImmutableMap.of( + "Title", + new JSONArray( + List.of("What uses do hops have outside of brewing?")), + "Tags", new JSONArray(List.of("hops history")))))); } @Test @@ -136,14 +178,23 @@ public void highlight_no_limit_test() { verifySchema(response, schema("highlight(Body)", null, "nested")); assertEquals(2, response.getInt("total")); - verifyDataRows(response, rows(new JSONArray(List.of("Boiling affects hops, by boiling" + - " off the aroma and extracting more of the organic acids that provide"))), - - rows(new JSONArray(List.of("

Do hops have (or had in the past) any use outside of brewing beer?", - "when-was-the-first-beer-ever-brewed\">dating first modern beers we have the first record" + - " of cultivating hops", - "predating the first record of use of hops in beer by nearly a century.", - "Could the hops have been cultivated for any other purpose than brewing, " + - "or can we safely assume if they")))); + verifyDataRows( + response, + rows( + new JSONArray( + List.of( + "Boiling affects hops, by boiling" + + " off the aroma and extracting more of the organic acids that provide"))), + rows( + new JSONArray( + List.of( + "

Do hops have (or had in the past) any use outside of brewing" + + " beer?", + "when-was-the-first-beer-ever-brewed\">dating first modern beers we have" + + " the first record of cultivating hops", + "predating the first record of use of hops in beer by nearly a" + + " century.", + "Could the hops have been cultivated for any other purpose than" + + " brewing, or can we safely assume if they")))); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/IdentifierIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/IdentifierIT.java index 8d3f9e1509..2c1796f0c3 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/IdentifierIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/IdentifierIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.opensearch.sql.util.MatcherUtils.rows; @@ -19,9 +18,7 @@ import org.opensearch.client.Request; import org.opensearch.sql.legacy.SQLIntegTestCase; -/** - * Integration tests for identifiers including index and field name symbol. - */ +/** Integration tests for identifiers including index and field name symbol. */ public class IdentifierIT extends SQLIntegTestCase { @Test @@ -46,12 +43,13 @@ public void testQuotedIndexNames() throws IOException { @Test public void testSpecialFieldName() throws IOException { - new Index("test") - .addDoc("{\"@timestamp\": 10, \"dimensions:major_version\": 30}"); - final JSONObject result = new JSONObject(executeQuery("SELECT @timestamp, " - + "`dimensions:major_version` FROM test", "jdbc")); + new Index("test").addDoc("{\"@timestamp\": 10, \"dimensions:major_version\": 30}"); + final JSONObject result = + new JSONObject( + executeQuery("SELECT @timestamp, " + "`dimensions:major_version` FROM test", "jdbc")); - verifySchema(result, + verifySchema( + result, schema("@timestamp", null, "long"), schema("dimensions:major_version", null, "long")); verifyDataRows(result, rows(10, 30)); @@ -66,12 +64,11 @@ public void testMultipleQueriesWithSpecialIndexNames() throws IOException { @Test public void testDoubleUnderscoreIdentifierTest() throws IOException { - new Index("test.twounderscores") - .addDoc("{\"__age\": 30}"); - final JSONObject result = new JSONObject(executeQuery("SELECT __age FROM test.twounderscores", "jdbc")); + new Index("test.twounderscores").addDoc("{\"__age\": 30}"); + final JSONObject result = + new JSONObject(executeQuery("SELECT __age FROM test.twounderscores", "jdbc")); - verifySchema(result, - schema("__age", null, "long")); + verifySchema(result, schema("__age", null, "long")); verifyDataRows(result, rows(30)); } @@ -83,19 +80,20 @@ public void testMetafieldIdentifierTest() throws IOException { new Index(index).addDoc("{\"age\": 30}", id); // Execute using field metadata values - final JSONObject result = new JSONObject(executeQuery( - "SELECT *, _id, _index, _score, _maxscore, _sort " - + "FROM " + index, - "jdbc")); + final JSONObject result = + new JSONObject( + executeQuery( + "SELECT *, _id, _index, _score, _maxscore, _sort " + "FROM " + index, "jdbc")); // Verify that the metadata values are returned when requested - verifySchema(result, - schema("age", null, "long"), - schema("_id", null, "keyword"), - schema("_index", null, "keyword"), - schema("_score", null, "float"), - schema("_maxscore", null, "float"), - schema("_sort", null, "long")); + verifySchema( + result, + schema("age", null, "long"), + schema("_id", null, "keyword"), + schema("_index", null, "keyword"), + schema("_score", null, "float"), + schema("_maxscore", null, "float"), + schema("_sort", null, "long")); verifyDataRows(result, rows(30, id, index, 1.0, 1.0, -2)); } @@ -113,13 +111,13 @@ public void testMetafieldIdentifierRoutingSelectTest() throws IOException { .addDocWithShardId("{\"age\": 35}", "test5", "test5"); // Execute using field metadata values filtering on the routing shard hash id - final JSONObject result = new JSONObject(executeQuery( - "SELECT age, _id, _index, _routing " - + "FROM " + index, - "jdbc")); + final JSONObject result = + new JSONObject( + executeQuery("SELECT age, _id, _index, _routing " + "FROM " + index, "jdbc")); // Verify that the metadata values are returned when requested - verifySchema(result, + verifySchema( + result, schema("age", null, "long"), schema("_id", null, "keyword"), schema("_index", null, "keyword"), @@ -151,14 +149,19 @@ public void testMetafieldIdentifierRoutingFilterTest() throws IOException { .addDocWithShardId("{\"age\": 36}", "test6", "test6"); // Execute using field metadata values filtering on the routing shard hash id - final JSONObject result = new JSONObject(executeQuery( - "SELECT _id, _index, _routing " - + "FROM " + index + " " - + "WHERE _routing = \\\"test4\\\"", - "jdbc")); + final JSONObject result = + new JSONObject( + executeQuery( + "SELECT _id, _index, _routing " + + "FROM " + + index + + " " + + "WHERE _routing = \\\"test4\\\"", + "jdbc")); // Verify that the metadata values are returned when requested - verifySchema(result, + verifySchema( + result, schema("_id", null, "keyword"), schema("_index", null, "keyword"), schema("_routing", null, "keyword")); @@ -170,7 +173,6 @@ public void testMetafieldIdentifierRoutingFilterTest() throws IOException { assertEquals("test4", datarows.getJSONArray(0).getString(0)); // note that _routing in the SELECT clause returns the shard, not the routing hash id assertTrue(datarows.getJSONArray(0).getString(2).contains("[" + index + "]")); - } @Test @@ -181,14 +183,21 @@ public void testMetafieldIdentifierWithAliasTest() throws IOException { new Index(index).addDoc("{\"age\": 30}", id); // Execute using field metadata values - final JSONObject result = new JSONObject(executeQuery( - "SELECT _id AS A, _index AS B, _score AS C, _maxscore AS D, _sort AS E " - + "FROM " + index + " " - + "WHERE _id = \\\"" + id + "\\\"", - "jdbc")); + final JSONObject result = + new JSONObject( + executeQuery( + "SELECT _id AS A, _index AS B, _score AS C, _maxscore AS D, _sort AS E " + + "FROM " + + index + + " " + + "WHERE _id = \\\"" + + id + + "\\\"", + "jdbc")); // Verify that the metadata values are returned when requested - verifySchema(result, + verifySchema( + result, schema("_id", "A", "keyword"), schema("_index", "B", "keyword"), schema("_score", "C", "float"), @@ -209,9 +218,7 @@ private void queryAndAssertTheDoc(String sql) { verifyDataRows(result, rows(30)); } - /** - * Index abstraction for test code readability. - */ + /** Index abstraction for test code readability. */ private static class Index { private final String indexName; @@ -241,18 +248,20 @@ void addDoc(String doc) { } public Index addDoc(String doc, String id) { - Request indexDoc = new Request("POST", String.format("/%s/_doc/%s?refresh=true", indexName, id)); + Request indexDoc = + new Request("POST", String.format("/%s/_doc/%s?refresh=true", indexName, id)); indexDoc.setJsonEntity(doc); performRequest(client(), indexDoc); return this; } public Index addDocWithShardId(String doc, String id, String routing) { - Request indexDoc = new Request("POST", String.format("/%s/_doc/%s?refresh=true&routing=%s", indexName, id, routing)); + Request indexDoc = + new Request( + "POST", String.format("/%s/_doc/%s?refresh=true&routing=%s", indexName, id, routing)); indexDoc.setJsonEntity(doc); performRequest(client(), indexDoc); return this; } } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/JdbcFormatIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/JdbcFormatIT.java index 4b158d73df..f36992b1d0 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/JdbcFormatIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/JdbcFormatIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK; @@ -23,11 +22,16 @@ protected void init() throws Exception { @Test public void testSimpleDataTypesInSchema() { - JSONObject response = new JSONObject(executeQuery( - "SELECT account_number, address, age, birthdate, city, male, state " - + "FROM " + TEST_INDEX_BANK, "jdbc")); - - verifySchema(response, + JSONObject response = + new JSONObject( + executeQuery( + "SELECT account_number, address, age, birthdate, city, male, state " + + "FROM " + + TEST_INDEX_BANK, + "jdbc")); + + verifySchema( + response, schema("account_number", "long"), schema("address", "text"), schema("age", "integer"), @@ -39,10 +43,10 @@ public void testSimpleDataTypesInSchema() { @Test public void testAliasInSchema() { - JSONObject response = new JSONObject(executeQuery( - "SELECT account_number AS acc FROM " + TEST_INDEX_BANK, "jdbc")); + JSONObject response = + new JSONObject( + executeQuery("SELECT account_number AS acc FROM " + TEST_INDEX_BANK, "jdbc")); verifySchema(response, schema("account_number", "acc", "long")); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/LegacyAPICompatibilityIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/LegacyAPICompatibilityIT.java index 1f85b2857f..e9c0fd2c55 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/LegacyAPICompatibilityIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/LegacyAPICompatibilityIT.java @@ -24,9 +24,7 @@ import org.opensearch.sql.legacy.SQLIntegTestCase; import org.opensearch.sql.legacy.utils.StringUtils; -/** - * For backward compatibility, check if legacy API endpoints are accessible. - */ +/** For backward compatibility, check if legacy API endpoints are accessible. */ public class LegacyAPICompatibilityIT extends SQLIntegTestCase { @Override @@ -56,8 +54,8 @@ public void explain() throws IOException { @Test public void closeCursor() throws IOException { - String sql = StringUtils.format( - "SELECT firstname FROM %s WHERE balance > 100", TEST_INDEX_ACCOUNT); + String sql = + StringUtils.format("SELECT firstname FROM %s WHERE balance > 100", TEST_INDEX_ACCOUNT); JSONObject result = new JSONObject(executeFetchQuery(sql, 50, "jdbc")); Request request = new Request("POST", LEGACY_CURSOR_CLOSE_ENDPOINT); @@ -76,44 +74,36 @@ public void stats() throws IOException { @Test public void legacySettingsLegacyEndpoint() throws IOException { - String requestBody = "{" - + " \"persistent\": {" - + " \"opendistro.sql.query.slowlog\": \"10\"" - + " }" - + "}"; + String requestBody = + "{" + " \"persistent\": {" + " \"opendistro.sql.query.slowlog\": \"10\"" + " }" + "}"; Response response = updateSetting(LEGACY_SQL_SETTINGS_API_ENDPOINT, requestBody); Assert.assertEquals(200, response.getStatusLine().getStatusCode()); } @Test public void legacySettingNewEndpoint() throws IOException { - String requestBody = "{" - + " \"persistent\": {" - + " \"opendistro.query.size_limit\": \"100\"" - + " }" - + "}"; + String requestBody = + "{" + " \"persistent\": {" + " \"opendistro.query.size_limit\": \"100\"" + " }" + "}"; Response response = updateSetting(SETTINGS_API_ENDPOINT, requestBody); Assert.assertEquals(200, response.getStatusLine().getStatusCode()); } @Test public void newSettingsLegacyEndpoint() throws IOException { - String requestBody = "{" - + " \"persistent\": {" - + " \"plugins.sql.slowlog\": \"10\"" - + " }" - + "}"; + String requestBody = + "{" + " \"persistent\": {" + " \"plugins.sql.slowlog\": \"10\"" + " }" + "}"; Response response = updateSetting(LEGACY_SQL_SETTINGS_API_ENDPOINT, requestBody); Assert.assertEquals(200, response.getStatusLine().getStatusCode()); } @Test public void newSettingNewEndpoint() throws IOException { - String requestBody = "{" - + " \"persistent\": {" - + " \"plugins.query.metrics.rolling_interval\": \"80\"" - + " }" - + "}"; + String requestBody = + "{" + + " \"persistent\": {" + + " \"plugins.query.metrics.rolling_interval\": \"80\"" + + " }" + + "}"; Response response = updateSetting(SETTINGS_API_ENDPOINT, requestBody); Assert.assertEquals(200, response.getStatusLine().getStatusCode()); } @@ -130,5 +120,4 @@ private RequestOptions.Builder buildJsonOption() { restOptionsBuilder.addHeader("Content-Type", "application/json"); return restOptionsBuilder; } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/LikeQueryIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/LikeQueryIT.java index 0dbb0404f9..c5ff50898a 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/LikeQueryIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/LikeQueryIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_WILDCARD; @@ -23,9 +22,11 @@ protected void init() throws Exception { @Test public void test_like_in_select() throws IOException { - String query = "SELECT KeywordBody, KeywordBody LIKE 'test wildcard%' FROM " + TEST_INDEX_WILDCARD; + String query = + "SELECT KeywordBody, KeywordBody LIKE 'test wildcard%' FROM " + TEST_INDEX_WILDCARD; JSONObject result = executeJdbcRequest(query); - verifyDataRows(result, + verifyDataRows( + result, rows("test wildcard", true), rows("test wildcard in the end of the text%", true), rows("%test wildcard in the beginning of the text", false), @@ -40,9 +41,11 @@ public void test_like_in_select() throws IOException { @Test public void test_like_in_select_with_escaped_percent() throws IOException { - String query = "SELECT KeywordBody, KeywordBody LIKE '\\\\%test wildcard%' FROM " + TEST_INDEX_WILDCARD; + String query = + "SELECT KeywordBody, KeywordBody LIKE '\\\\%test wildcard%' FROM " + TEST_INDEX_WILDCARD; JSONObject result = executeJdbcRequest(query); - verifyDataRows(result, + verifyDataRows( + result, rows("test wildcard", false), rows("test wildcard in the end of the text%", false), rows("%test wildcard in the beginning of the text", true), @@ -57,9 +60,11 @@ public void test_like_in_select_with_escaped_percent() throws IOException { @Test public void test_like_in_select_with_escaped_underscore() throws IOException { - String query = "SELECT KeywordBody, KeywordBody LIKE '\\\\_test wildcard%' FROM " + TEST_INDEX_WILDCARD; + String query = + "SELECT KeywordBody, KeywordBody LIKE '\\\\_test wildcard%' FROM " + TEST_INDEX_WILDCARD; JSONObject result = executeJdbcRequest(query); - verifyDataRows(result, + verifyDataRows( + result, rows("test wildcard", false), rows("test wildcard in the end of the text%", false), rows("%test wildcard in the beginning of the text", false), @@ -74,9 +79,13 @@ public void test_like_in_select_with_escaped_underscore() throws IOException { @Test public void test_like_in_where() throws IOException { - String query = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE KeywordBody LIKE 'test wildcard%'"; + String query = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE KeywordBody LIKE 'test wildcard%'"; JSONObject result = executeJdbcRequest(query); - verifyDataRows(result, + verifyDataRows( + result, rows("test wildcard"), rows("test wildcard in the end of the text%"), rows("test wildcard in % the middle of the text"), @@ -88,18 +97,22 @@ public void test_like_in_where() throws IOException { @Test public void test_like_in_where_with_escaped_percent() throws IOException { - String query = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE KeywordBody LIKE '\\\\%test wildcard%'"; + String query = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE KeywordBody LIKE '\\\\%test wildcard%'"; JSONObject result = executeJdbcRequest(query); - verifyDataRows(result, - rows("%test wildcard in the beginning of the text")); + verifyDataRows(result, rows("%test wildcard in the beginning of the text")); } @Test public void test_like_in_where_with_escaped_underscore() throws IOException { - String query = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE KeywordBody LIKE '\\\\_test wildcard%'"; + String query = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE KeywordBody LIKE '\\\\_test wildcard%'"; JSONObject result = executeJdbcRequest(query); - verifyDataRows(result, - rows("_test wildcard in the beginning of the text")); + verifyDataRows(result, rows("_test wildcard in the beginning of the text")); } @Test @@ -118,7 +131,8 @@ public void test_like_on_text_keyword_field_with_one_word() throws IOException { @Test public void test_like_on_text_keyword_field_with_greater_than_one_word() throws IOException { - String query = "SELECT * FROM " + TEST_INDEX_WILDCARD + " WHERE TextKeywordBody LIKE 'test wild*'"; + String query = + "SELECT * FROM " + TEST_INDEX_WILDCARD + " WHERE TextKeywordBody LIKE 'test wild*'"; JSONObject result = executeJdbcRequest(query); assertEquals(7, result.getInt("total")); } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/MatchBoolPrefixIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/MatchBoolPrefixIT.java index 1c959c5460..c81cc8e4f5 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/MatchBoolPrefixIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/MatchBoolPrefixIT.java @@ -23,32 +23,30 @@ public void init() throws IOException { @Test public void query_matches_test() throws IOException { - String query = "SELECT phrase FROM " - + TEST_INDEX_PHRASE + " WHERE match_bool_prefix(phrase, 'quick')"; + String query = + "SELECT phrase FROM " + TEST_INDEX_PHRASE + " WHERE match_bool_prefix(phrase, 'quick')"; var result = new JSONObject(executeQuery(query, "jdbc")); verifySchema(result, schema("phrase", "text")); - verifyDataRows(result, - rows("quick fox"), - rows("quick fox here")); + verifyDataRows(result, rows("quick fox"), rows("quick fox here")); } @Test public void additional_parameters_test() throws IOException { - String query = "SELECT phrase FROM " - + TEST_INDEX_PHRASE + " WHERE match_bool_prefix(phrase, '2 test', minimum_should_match=1, fuzziness=2)"; + String query = + "SELECT phrase FROM " + + TEST_INDEX_PHRASE + + " WHERE match_bool_prefix(phrase, '2 test', minimum_should_match=1, fuzziness=2)"; var result = new JSONObject(executeQuery(query, "jdbc")); verifySchema(result, schema("phrase", "text")); - verifyDataRows(result, - rows("my test"), - rows("my test 2")); + verifyDataRows(result, rows("my test"), rows("my test 2")); } @Test public void no_matches_test() throws IOException { - String query = "SELECT * FROM " - + TEST_INDEX_PHRASE + " WHERE match_bool_prefix(phrase, 'rice')"; + String query = + "SELECT * FROM " + TEST_INDEX_PHRASE + " WHERE match_bool_prefix(phrase, 'rice')"; var result = new JSONObject(executeQuery(query, "jdbc")); assertEquals(0, result.getInt("total")); } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/MatchIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/MatchIT.java index 9885ddfa33..5bde838e19 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/MatchIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/MatchIT.java @@ -29,93 +29,119 @@ public void init() throws IOException { @Test public void match_in_where() throws IOException { - JSONObject result = executeJdbcRequest("SELECT firstname FROM " + TEST_INDEX_ACCOUNT + " WHERE match(lastname, 'Bates')"); + JSONObject result = + executeJdbcRequest( + "SELECT firstname FROM " + TEST_INDEX_ACCOUNT + " WHERE match(lastname, 'Bates')"); verifySchema(result, schema("firstname", "text")); verifyDataRows(result, rows("Nanette")); } @Test public void match_in_having() throws IOException { - JSONObject result = executeJdbcRequest("SELECT lastname FROM " + TEST_INDEX_ACCOUNT + " HAVING match(firstname, 'Nanette')"); + JSONObject result = + executeJdbcRequest( + "SELECT lastname FROM " + TEST_INDEX_ACCOUNT + " HAVING match(firstname, 'Nanette')"); verifySchema(result, schema("lastname", "text")); verifyDataRows(result, rows("Bates")); } @Test public void missing_field_test() { - String query = StringUtils.format("SELECT * FROM %s WHERE match(invalid, 'Bates')", TEST_INDEX_ACCOUNT); + String query = + StringUtils.format("SELECT * FROM %s WHERE match(invalid, 'Bates')", TEST_INDEX_ACCOUNT); final RuntimeException exception = expectThrows(RuntimeException.class, () -> executeJdbcRequest(query)); - assertTrue(exception.getMessage() - .contains("can't resolve Symbol(namespace=FIELD_NAME, name=invalid) in type env")); + assertTrue( + exception + .getMessage() + .contains("can't resolve Symbol(namespace=FIELD_NAME, name=invalid) in type env")); assertTrue(exception.getMessage().contains("SemanticCheckException")); } @Test public void missing_quoted_field_test() { - String query = StringUtils.format("SELECT * FROM %s WHERE match('invalid', 'Bates')", TEST_INDEX_ACCOUNT); + String query = + StringUtils.format("SELECT * FROM %s WHERE match('invalid', 'Bates')", TEST_INDEX_ACCOUNT); final RuntimeException exception = expectThrows(RuntimeException.class, () -> executeJdbcRequest(query)); - assertTrue(exception.getMessage() - .contains("can't resolve Symbol(namespace=FIELD_NAME, name=invalid) in type env")); + assertTrue( + exception + .getMessage() + .contains("can't resolve Symbol(namespace=FIELD_NAME, name=invalid) in type env")); assertTrue(exception.getMessage().contains("SemanticCheckException")); } @Test public void missing_backtick_field_test() { - String query = StringUtils.format("SELECT * FROM %s WHERE match(`invalid`, 'Bates')", TEST_INDEX_ACCOUNT); + String query = + StringUtils.format("SELECT * FROM %s WHERE match(`invalid`, 'Bates')", TEST_INDEX_ACCOUNT); final RuntimeException exception = expectThrows(RuntimeException.class, () -> executeJdbcRequest(query)); - assertTrue(exception.getMessage() - .contains("can't resolve Symbol(namespace=FIELD_NAME, name=invalid) in type env")); + assertTrue( + exception + .getMessage() + .contains("can't resolve Symbol(namespace=FIELD_NAME, name=invalid) in type env")); assertTrue(exception.getMessage().contains("SemanticCheckException")); } @Test public void matchquery_in_where() throws IOException { - JSONObject result = executeJdbcRequest("SELECT firstname FROM " + TEST_INDEX_ACCOUNT + " WHERE matchquery(lastname, 'Bates')"); + JSONObject result = + executeJdbcRequest( + "SELECT firstname FROM " + TEST_INDEX_ACCOUNT + " WHERE matchquery(lastname, 'Bates')"); verifySchema(result, schema("firstname", "text")); verifyDataRows(result, rows("Nanette")); } @Test public void matchquery_in_having() throws IOException { - JSONObject result = executeJdbcRequest("SELECT lastname FROM " + TEST_INDEX_ACCOUNT + " HAVING matchquery(firstname, 'Nanette')"); + JSONObject result = + executeJdbcRequest( + "SELECT lastname FROM " + + TEST_INDEX_ACCOUNT + + " HAVING matchquery(firstname, 'Nanette')"); verifySchema(result, schema("lastname", "text")); verifyDataRows(result, rows("Bates")); } @Test public void match_query_in_where() throws IOException { - JSONObject result = executeJdbcRequest("SELECT firstname FROM " + TEST_INDEX_ACCOUNT + " WHERE match_query(lastname, 'Bates')"); + JSONObject result = + executeJdbcRequest( + "SELECT firstname FROM " + + TEST_INDEX_ACCOUNT + + " WHERE match_query(lastname, 'Bates')"); verifySchema(result, schema("firstname", "text")); verifyDataRows(result, rows("Nanette")); } @Test public void match_query_in_having() throws IOException { - JSONObject result = executeJdbcRequest( - "SELECT lastname FROM " + TEST_INDEX_ACCOUNT + " HAVING match_query(firstname, 'Nanette')"); + JSONObject result = + executeJdbcRequest( + "SELECT lastname FROM " + + TEST_INDEX_ACCOUNT + + " HAVING match_query(firstname, 'Nanette')"); verifySchema(result, schema("lastname", "text")); verifyDataRows(result, rows("Bates")); } @Test public void match_aliases_return_the_same_results() throws IOException { - String query1 = "SELECT lastname FROM " - + TEST_INDEX_ACCOUNT + " HAVING match(firstname, 'Nanette')"; + String query1 = + "SELECT lastname FROM " + TEST_INDEX_ACCOUNT + " HAVING match(firstname, 'Nanette')"; JSONObject result1 = executeJdbcRequest(query1); - String query2 = "SELECT lastname FROM " - + TEST_INDEX_ACCOUNT + " HAVING matchquery(firstname, 'Nanette')"; + String query2 = + "SELECT lastname FROM " + TEST_INDEX_ACCOUNT + " HAVING matchquery(firstname, 'Nanette')"; JSONObject result2 = executeJdbcRequest(query2); - String query3 = "SELECT lastname FROM " - + TEST_INDEX_ACCOUNT + " HAVING match_query(firstname, 'Nanette')"; + String query3 = + "SELECT lastname FROM " + TEST_INDEX_ACCOUNT + " HAVING match_query(firstname, 'Nanette')"; JSONObject result3 = executeJdbcRequest(query3); assertEquals(result1.getInt("total"), result2.getInt("total")); assertEquals(result1.getInt("total"), result3.getInt("total")); @@ -123,30 +149,33 @@ public void match_aliases_return_the_same_results() throws IOException { @Test public void match_query_alternate_syntax() throws IOException { - JSONObject result = executeJdbcRequest( - "SELECT lastname FROM " + TEST_INDEX_ACCOUNT + " WHERE lastname = match_query('Bates')"); + JSONObject result = + executeJdbcRequest( + "SELECT lastname FROM " + + TEST_INDEX_ACCOUNT + + " WHERE lastname = match_query('Bates')"); verifySchema(result, schema("lastname", "text")); verifyDataRows(result, rows("Bates")); } @Test public void matchquery_alternate_syntax() throws IOException { - JSONObject result = executeJdbcRequest( - "SELECT lastname FROM " + TEST_INDEX_ACCOUNT + " WHERE lastname = matchquery('Bates')"); + JSONObject result = + executeJdbcRequest( + "SELECT lastname FROM " + TEST_INDEX_ACCOUNT + " WHERE lastname = matchquery('Bates')"); verifySchema(result, schema("lastname", "text")); verifyDataRows(result, rows("Bates")); } @Test public void match_alternate_syntaxes_return_the_same_results() throws IOException { - String query1 = "SELECT * FROM " - + TEST_INDEX_ACCOUNT + " WHERE match(firstname, 'Nanette')"; + String query1 = "SELECT * FROM " + TEST_INDEX_ACCOUNT + " WHERE match(firstname, 'Nanette')"; JSONObject result1 = executeJdbcRequest(query1); - String query2 = "SELECT * FROM " - + TEST_INDEX_ACCOUNT + " WHERE firstname = match_query('Nanette')"; + String query2 = + "SELECT * FROM " + TEST_INDEX_ACCOUNT + " WHERE firstname = match_query('Nanette')"; JSONObject result2 = executeJdbcRequest(query2); - String query3 = "SELECT * FROM " - + TEST_INDEX_ACCOUNT + " WHERE firstname = matchquery('Nanette')"; + String query3 = + "SELECT * FROM " + TEST_INDEX_ACCOUNT + " WHERE firstname = matchquery('Nanette')"; JSONObject result3 = executeJdbcRequest(query3); assertEquals(result1.getInt("total"), result2.getInt("total")); assertEquals(result1.getInt("total"), result3.getInt("total")); @@ -154,11 +183,16 @@ public void match_alternate_syntaxes_return_the_same_results() throws IOExceptio @Test public void matchPhraseQueryTest() throws IOException { - final String result = explainQuery(String.format(Locale.ROOT, - "select address from %s " + - "where address= matchPhrase('671 Bristol Street') order by _score desc limit 3", - TestsConstants.TEST_INDEX_ACCOUNT)); - Assert.assertThat(result, - containsString("{\\\"match_phrase\\\":{\\\"address\\\":{\\\"query\\\":\\\"671 Bristol Street\\\"")); + final String result = + explainQuery( + String.format( + Locale.ROOT, + "select address from %s where address= matchPhrase('671 Bristol Street') order by" + + " _score desc limit 3", + TestsConstants.TEST_INDEX_ACCOUNT)); + Assert.assertThat( + result, + containsString( + "{\\\"match_phrase\\\":{\\\"address\\\":{\\\"query\\\":\\\"671 Bristol Street\\\"")); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/MatchPhraseIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/MatchPhraseIT.java index 3b7e65dcc6..d08149aa96 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/MatchPhraseIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/MatchPhraseIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_PHRASE; diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/MatchPhrasePrefixIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/MatchPhrasePrefixIT.java index dd2a8384d6..f181a18689 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/MatchPhrasePrefixIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/MatchPhrasePrefixIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BEER; @@ -26,7 +25,8 @@ protected void init() throws Exception { public void required_parameters() throws IOException { String query = "SELECT Title FROM %s WHERE match_phrase_prefix(Title, 'champagne be')"; JSONObject result = executeJdbcRequest(String.format(query, TEST_INDEX_BEER)); - verifyDataRows(result, + verifyDataRows( + result, rows("Can old flat champagne be used for vinegar?"), rows("Elder flower champagne best to use natural yeast or add a wine yeast?")); } @@ -34,9 +34,10 @@ public void required_parameters() throws IOException { @Test public void all_optional_parameters() throws IOException { // The values for optional parameters are valid but arbitrary. - String query = "SELECT Title FROM %s " + - "WHERE match_phrase_prefix(Title, 'flat champ', boost = 1.0, zero_terms_query='ALL', " + - "max_expansions = 2, analyzer=standard, slop=0)"; + String query = + "SELECT Title FROM %s " + + "WHERE match_phrase_prefix(Title, 'flat champ', boost = 1.0, zero_terms_query='ALL', " + + "max_expansions = 2, analyzer=standard, slop=0)"; JSONObject result = executeJdbcRequest(String.format(query, TEST_INDEX_BEER)); verifyDataRows(result, rows("Can old flat champagne be used for vinegar?")); } @@ -47,21 +48,22 @@ public void max_expansions_is_3() throws IOException { // It tells OpenSearch to consider only the first 3 terms that start with 'bottl' // In this dataset these are 'bottle-conditioning', 'bottling', 'bottles'. - String query = "SELECT Tags FROM %s " + - "WHERE match_phrase_prefix(Tags, 'draught bottl', max_expansions=3)"; + String query = + "SELECT Tags FROM %s " + + "WHERE match_phrase_prefix(Tags, 'draught bottl', max_expansions=3)"; JSONObject result = executeJdbcRequest(String.format(query, TEST_INDEX_BEER)); - verifyDataRows(result, rows("brewing draught bottling"), - rows("draught bottles")); + verifyDataRows(result, rows("brewing draught bottling"), rows("draught bottles")); } @Test public void analyzer_english() throws IOException { // English analyzer removes 'in' and 'to' as they are common words. // This results in an empty query. - String query = "SELECT Title FROM %s " + - "WHERE match_phrase_prefix(Title, 'in to', analyzer=english)"; + String query = + "SELECT Title FROM %s " + "WHERE match_phrase_prefix(Title, 'in to', analyzer=english)"; JSONObject result = executeJdbcRequest(String.format(query, TEST_INDEX_BEER)); - assertTrue("Expect English analyzer to filter out common words 'in' and 'to'", + assertTrue( + "Expect English analyzer to filter out common words 'in' and 'to'", result.getInt("total") == 0); } @@ -69,8 +71,8 @@ public void analyzer_english() throws IOException { public void analyzer_standard() throws IOException { // Standard analyzer does not treat 'in' and 'to' as special terms. // This results in 'to' being used as a phrase prefix given us 'Tokyo'. - String query = "SELECT Title FROM %s " + - "WHERE match_phrase_prefix(Title, 'in to', analyzer=standard)"; + String query = + "SELECT Title FROM %s " + "WHERE match_phrase_prefix(Title, 'in to', analyzer=standard)"; JSONObject result = executeJdbcRequest(String.format(query, TEST_INDEX_BEER)); verifyDataRows(result, rows("Local microbreweries and craft beer in Tokyo")); } @@ -80,15 +82,15 @@ public void zero_term_query_all() throws IOException { // English analyzer removes 'in' and 'to' as they are common words. // zero_terms_query of 'ALL' causes all rows to be returned. // ORDER BY ... LIMIT helps make the test understandable. - String query = "SELECT Title FROM %s" + - " WHERE match_phrase_prefix(Title, 'in to', analyzer=english, zero_terms_query='ALL')" + - " ORDER BY Title DESC" + - " LIMIT 1"; + String query = + "SELECT Title FROM %s" + + " WHERE match_phrase_prefix(Title, 'in to', analyzer=english, zero_terms_query='ALL')" + + " ORDER BY Title DESC" + + " LIMIT 1"; JSONObject result = executeJdbcRequest(String.format(query, TEST_INDEX_BEER)); verifyDataRows(result, rows("was working great, now all foam")); } - @Test public void slop_is_2() throws IOException { // When slop is 2, the terms are matched exactly in the order specified. @@ -103,8 +105,6 @@ public void slop_is_3() throws IOException { // When slop is 3, results will include phrases where the query terms are transposed. String query = "SELECT Tags from %s where match_phrase_prefix(Tags, 'gas ta', slop=3)"; JSONObject result = executeJdbcRequest(String.format(query, TEST_INDEX_BEER)); - verifyDataRows(result, - rows("taste draught gas"), - rows("taste gas")); + verifyDataRows(result, rows("taste draught gas"), rows("taste gas")); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/MathematicalFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/MathematicalFunctionIT.java index cbb39ead40..60b7632ad0 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/MathematicalFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/MathematicalFunctionIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK; @@ -34,9 +33,8 @@ public void init() throws Exception { @Test public void testPI() throws IOException { JSONObject result = - executeQuery(String.format("SELECT PI() FROM %s HAVING (COUNT(1) > 0)",TEST_INDEX_BANK) ); - verifySchema(result, - schema("PI()", null, "double")); + executeQuery(String.format("SELECT PI() FROM %s HAVING (COUNT(1) > 0)", TEST_INDEX_BANK)); + verifySchema(result, schema("PI()", null, "double")); verifyDataRows(result, rows(3.141592653589793)); } @@ -97,7 +95,8 @@ public void testE() throws IOException { @Test public void testExpm1() throws IOException { - JSONObject result = executeQuery("select expm1(account_number) FROM " + TEST_INDEX_BANK + " LIMIT 2"); + JSONObject result = + executeQuery("select expm1(account_number) FROM " + TEST_INDEX_BANK + " LIMIT 2"); verifySchema(result, schema("expm1(account_number)", null, "double")); verifyDataRows(result, rows(Math.expm1(1)), rows(Math.expm1(6))); } @@ -333,36 +332,28 @@ public void testCbrt() throws IOException { @Test public void testLnReturnsNull() throws IOException { JSONObject result = executeQuery("select ln(0), ln(-2)"); - verifySchema(result, - schema("ln(0)", "double"), - schema("ln(-2)", "double")); + verifySchema(result, schema("ln(0)", "double"), schema("ln(-2)", "double")); verifyDataRows(result, rows(null, null)); } @Test public void testLogReturnsNull() throws IOException { JSONObject result = executeQuery("select log(0), log(-2)"); - verifySchema(result, - schema("log(0)", "double"), - schema("log(-2)", "double")); + verifySchema(result, schema("log(0)", "double"), schema("log(-2)", "double")); verifyDataRows(result, rows(null, null)); } @Test public void testLog10ReturnsNull() throws IOException { JSONObject result = executeQuery("select log10(0), log10(-2)"); - verifySchema(result, - schema("log10(0)", "double"), - schema("log10(-2)", "double")); + verifySchema(result, schema("log10(0)", "double"), schema("log10(-2)", "double")); verifyDataRows(result, rows(null, null)); } @Test public void testLog2ReturnsNull() throws IOException { JSONObject result = executeQuery("select log2(0), log2(-2)"); - verifySchema(result, - schema("log2(0)", "double"), - schema("log2(-2)", "double")); + verifySchema(result, schema("log2(0)", "double"), schema("log2(-2)", "double")); verifyDataRows(result, rows(null, null)); } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/MetricsIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/MetricsIT.java index 2a26eb19fe..4bbab4f167 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/MetricsIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/MetricsIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK; @@ -40,9 +39,7 @@ public void requestCount() throws IOException, InterruptedException { } private Request makeStatRequest() { - return new Request( - "GET", STATS_API_ENDPOINT - ); + return new Request("GET", STATS_API_ENDPOINT); } private int requestTotal() throws IOException { diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/MultiMatchIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/MultiMatchIT.java index 6ef9846557..0bc091b0d2 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/MultiMatchIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/MultiMatchIT.java @@ -30,96 +30,111 @@ public void init() throws IOException { @Test public void test_mandatory_params() throws IOException { - String query = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE multi_match([\\\"Tags\\\" ^ 1.5, Title, 'Body' 4.2], 'taste')"; + String query = + "SELECT Id FROM " + + TEST_INDEX_BEER + + " WHERE multi_match([\\\"Tags\\\" ^ 1.5, Title, 'Body' 4.2], 'taste')"; JSONObject result = executeJdbcRequest(query); assertEquals(16, result.getInt("total")); } @Test public void test_all_params() { - String query = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE multi_match(['Body', Tags], 'taste beer', operator='and', analyzer=english," - + "auto_generate_synonyms_phrase_query=true, boost = 0.77, cutoff_frequency=0.33," - + "fuzziness = 'AUTO:1,5', fuzzy_transpositions = false, lenient = true, max_expansions = 25," - + "minimum_should_match = '2<-25% 9<-3', prefix_length = 7, tie_breaker = 0.3," - + "type = most_fields, slop = 2, zero_terms_query = 'ALL');"; + String query = + "SELECT Id FROM " + + TEST_INDEX_BEER + + " WHERE multi_match(['Body', Tags], 'taste beer', operator='and'," + + " analyzer=english,auto_generate_synonyms_phrase_query=true, boost = 0.77," + + " cutoff_frequency=0.33,fuzziness = 'AUTO:1,5', fuzzy_transpositions = false, lenient" + + " = true, max_expansions = 25,minimum_should_match = '2<-25% 9<-3', prefix_length =" + + " 7, tie_breaker = 0.3,type = most_fields, slop = 2, zero_terms_query = 'ALL');"; JSONObject result = executeJdbcRequest(query); assertEquals(10, result.getInt("total")); } @Test public void verify_wildcard_test() { - String query1 = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE multi_match(['Tags'], 'taste')"; + String query1 = "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE multi_match(['Tags'], 'taste')"; JSONObject result1 = executeJdbcRequest(query1); - String query2 = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE multi_match(['T*'], 'taste')"; + String query2 = "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE multi_match(['T*'], 'taste')"; JSONObject result2 = executeJdbcRequest(query2); assertNotEquals(result2.getInt("total"), result1.getInt("total")); - String query = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE multi_match(['*Date'], '2014-01-22');"; + String query = + "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE multi_match(['*Date'], '2014-01-22');"; JSONObject result = executeJdbcRequest(query); assertEquals(10, result.getInt("total")); } @Test public void test_multimatch_alternate_parameter_syntax() { - String query = "SELECT Tags FROM " + TEST_INDEX_BEER - + " WHERE multimatch('query'='taste', 'fields'='Tags')"; + String query = + "SELECT Tags FROM " + + TEST_INDEX_BEER + + " WHERE multimatch('query'='taste', 'fields'='Tags')"; JSONObject result = executeJdbcRequest(query); assertEquals(8, result.getInt("total")); } @Test public void test_multimatchquery_alternate_parameter_syntax() { - String query = "SELECT Tags FROM " + TEST_INDEX_BEER - + " WHERE multimatchquery(query='cicerone', fields='Tags')"; + String query = + "SELECT Tags FROM " + + TEST_INDEX_BEER + + " WHERE multimatchquery(query='cicerone', fields='Tags')"; JSONObject result = executeJdbcRequest(query); assertEquals(2, result.getInt("total")); - verifyDataRows(result, rows("serving cicerone restaurants"), - rows("taste cicerone")); + verifyDataRows(result, rows("serving cicerone restaurants"), rows("taste cicerone")); } @Test public void test_quoted_multi_match_alternate_parameter_syntax() { - String query = "SELECT Tags FROM " + TEST_INDEX_BEER - + " WHERE multi_match('query'='cicerone', 'fields'='Tags')"; + String query = + "SELECT Tags FROM " + + TEST_INDEX_BEER + + " WHERE multi_match('query'='cicerone', 'fields'='Tags')"; JSONObject result = executeJdbcRequest(query); assertEquals(2, result.getInt("total")); - verifyDataRows(result, rows("serving cicerone restaurants"), - rows("taste cicerone")); + verifyDataRows(result, rows("serving cicerone restaurants"), rows("taste cicerone")); } @Test public void test_multi_match_alternate_parameter_syntax() { - String query = "SELECT Tags FROM " + TEST_INDEX_BEER - + " WHERE multi_match(query='cicerone', fields='Tags')"; + String query = + "SELECT Tags FROM " + + TEST_INDEX_BEER + + " WHERE multi_match(query='cicerone', fields='Tags')"; JSONObject result = executeJdbcRequest(query); assertEquals(2, result.getInt("total")); - verifyDataRows(result, rows("serving cicerone restaurants"), - rows("taste cicerone")); + verifyDataRows(result, rows("serving cicerone restaurants"), rows("taste cicerone")); } @Test public void test_wildcard_multi_match_alternate_parameter_syntax() { - String query = "SELECT Body FROM " + TEST_INDEX_BEER - + " WHERE multi_match(query='IPA', fields='B*') LIMIT 1"; + String query = + "SELECT Body FROM " + + TEST_INDEX_BEER + + " WHERE multi_match(query='IPA', fields='B*') LIMIT 1"; JSONObject result = executeJdbcRequest(query); - verifyDataRows(result, rows("

I know what makes an IPA an IPA, but what are the unique" + - " characteristics of it's common variants? To be specific, the ones I'm interested in are Double IPA" + - " and Black IPA, but general differences between any other styles would be welcome too.

\n")); + verifyDataRows( + result, + rows( + "

I know what makes an IPA an IPA, but what are the unique characteristics of it's" + + " common variants? To be specific, the ones I'm interested in are Double IPA and" + + " Black IPA, but general differences between any other styles would be welcome" + + " too.

\n")); } @Test public void test_all_params_multimatchquery_alternate_parameter_syntax() { - String query = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE multimatchquery(query='cicerone', fields='Tags', 'operator'='or', analyzer=english," - + "auto_generate_synonyms_phrase_query=true, boost = 0.77, cutoff_frequency=0.33," - + "fuzziness = 'AUTO:1,5', fuzzy_transpositions = false, lenient = true, max_expansions = 25," - + "minimum_should_match = '2<-25% 9<-3', prefix_length = 7, tie_breaker = 0.3," - + "type = most_fields, slop = 2, zero_terms_query = 'ALL');"; + String query = + "SELECT Id FROM " + + TEST_INDEX_BEER + + " WHERE multimatchquery(query='cicerone', fields='Tags', 'operator'='or'," + + " analyzer=english,auto_generate_synonyms_phrase_query=true, boost = 0.77," + + " cutoff_frequency=0.33,fuzziness = 'AUTO:1,5', fuzzy_transpositions = false, lenient" + + " = true, max_expansions = 25,minimum_should_match = '2<-25% 9<-3', prefix_length =" + + " 7, tie_breaker = 0.3,type = most_fields, slop = 2, zero_terms_query = 'ALL');"; JSONObject result = executeJdbcRequest(query); assertEquals(2, result.getInt("total")); @@ -127,28 +142,28 @@ public void test_all_params_multimatchquery_alternate_parameter_syntax() { @Test public void multi_match_alternate_syntax() throws IOException { - String query = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE CreationDate = multi_match('2014-01-22');"; + String query = + "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE CreationDate = multi_match('2014-01-22');"; var result = new JSONObject(executeQuery(query, "jdbc")); assertEquals(8, result.getInt("total")); } @Test public void multimatch_alternate_syntax() throws IOException { - String query = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE CreationDate = multimatch('2014-01-22');"; + String query = + "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE CreationDate = multimatch('2014-01-22');"; var result = new JSONObject(executeQuery(query, "jdbc")); assertEquals(8, result.getInt("total")); } @Test public void multi_match_alternate_syntaxes_return_the_same_results() throws IOException { - String query1 = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE multi_match(['CreationDate'], '2014-01-22');"; - String query2 = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE CreationDate = multi_match('2014-01-22');"; - String query3 = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE CreationDate = multimatch('2014-01-22');"; + String query1 = + "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE multi_match(['CreationDate'], '2014-01-22');"; + String query2 = + "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE CreationDate = multi_match('2014-01-22');"; + String query3 = + "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE CreationDate = multimatch('2014-01-22');"; var result1 = new JSONObject(executeQuery(query1, "jdbc")); var result2 = new JSONObject(executeQuery(query2, "jdbc")); var result3 = new JSONObject(executeQuery(query3, "jdbc")); diff --git a/integ-test/src/test/java/org/opensearch/sql/util/InternalRestHighLevelClient.java b/integ-test/src/test/java/org/opensearch/sql/util/InternalRestHighLevelClient.java index 57726089ae..0897a508e3 100644 --- a/integ-test/src/test/java/org/opensearch/sql/util/InternalRestHighLevelClient.java +++ b/integ-test/src/test/java/org/opensearch/sql/util/InternalRestHighLevelClient.java @@ -9,9 +9,7 @@ import org.opensearch.client.RestClient; import org.opensearch.client.RestHighLevelClient; -/** - * Internal RestHighLevelClient only for testing purpose. - */ +/** Internal RestHighLevelClient only for testing purpose. */ public class InternalRestHighLevelClient extends RestHighLevelClient { public InternalRestHighLevelClient(RestClient restClient) { super(restClient, RestClient::close, Collections.emptyList()); diff --git a/integ-test/src/test/java/org/opensearch/sql/util/MatcherUtils.java b/integ-test/src/test/java/org/opensearch/sql/util/MatcherUtils.java index 4cb2aa299d..d444218c66 100644 --- a/integ-test/src/test/java/org/opensearch/sql/util/MatcherUtils.java +++ b/integ-test/src/test/java/org/opensearch/sql/util/MatcherUtils.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.util; import static org.hamcrest.MatcherAssert.assertThat; @@ -45,16 +44,15 @@ public class MatcherUtils { /** * Assert field value in object by a custom matcher and getter to access the field. * - * @param name description + * @param name description * @param subMatcher sub-matcher for field - * @param getter getter function to access the field - * @param type of outer object - * @param type of inner field + * @param getter getter function to access the field + * @param type of outer object + * @param type of inner field * @return matcher */ - public static FeatureMatcher featureValueOf(String name, - Matcher subMatcher, - Function getter) { + public static FeatureMatcher featureValueOf( + String name, Matcher subMatcher, Function getter) { return new FeatureMatcher(subMatcher, name, name) { @Override protected U featureValueOf(T actual) { @@ -68,8 +66,8 @@ public static Matcher hits(Matcher... hitMatchers) { if (hitMatchers.length == 0) { return featureValueOf("SearchHits", emptyArray(), SearchHits::getHits); } - return featureValueOf("SearchHits", arrayContainingInAnyOrder(hitMatchers), - SearchHits::getHits); + return featureValueOf( + "SearchHits", arrayContainingInAnyOrder(hitMatchers), SearchHits::getHits); } @SafeVarargs @@ -92,14 +90,17 @@ public static Matcher> kv(String key, Object value) { } public static Matcher hitAny(String query, Matcher... matcher) { - return featureValueOf("SearchHits", hasItems(matcher), actual -> { - JSONArray array = (JSONArray) (actual.query(query)); - List results = new ArrayList<>(array.length()); - for (Object element : array) { - results.add((JSONObject) element); - } - return results; - }); + return featureValueOf( + "SearchHits", + hasItems(matcher), + actual -> { + JSONArray array = (JSONArray) (actual.query(query)); + List results = new ArrayList<>(array.length()); + for (Object element : array) { + results.add((JSONObject) element); + } + return results; + }); } public static Matcher hitAny(Matcher... matcher) { @@ -107,14 +108,17 @@ public static Matcher hitAny(Matcher... matcher) { } public static Matcher hitAll(Matcher... matcher) { - return featureValueOf("SearchHits", containsInAnyOrder(matcher), actual -> { - JSONArray array = (JSONArray) (actual.query("/hits/hits")); - List results = new ArrayList<>(array.length()); - for (Object element : array) { - results.add((JSONObject) element); - } - return results; - }); + return featureValueOf( + "SearchHits", + containsInAnyOrder(matcher), + actual -> { + JSONArray array = (JSONArray) (actual.query("/hits/hits")); + List results = new ArrayList<>(array.length()); + for (Object element : array) { + results.add((JSONObject) element); + } + return results; + }); } public static Matcher kvString(String key, Matcher matcher) { @@ -122,7 +126,8 @@ public static Matcher kvString(String key, Matcher matcher) } public static Matcher kvDouble(String key, Matcher matcher) { - return featureValueOf("Json Match", matcher, actual -> ((BigDecimal) actual.query(key)).doubleValue()); + return featureValueOf( + "Json Match", matcher, actual -> ((BigDecimal) actual.query(key)).doubleValue()); } public static Matcher kvInt(String key, Matcher matcher) { @@ -196,19 +201,18 @@ public static void verifyOrder(JSONArray array, Matcher... matchers) { assertThat(objects, containsInRelativeOrder(matchers)); } - public static TypeSafeMatcher schema(String expectedName, - String expectedType) { + public static TypeSafeMatcher schema(String expectedName, String expectedType) { return schema(expectedName, null, expectedType); } - public static TypeSafeMatcher schema(String expectedName, String expectedAlias, - String expectedType) { + public static TypeSafeMatcher schema( + String expectedName, String expectedAlias, String expectedType) { return new TypeSafeMatcher() { @Override public void describeTo(Description description) { description.appendText( - String - .format("(name=%s, alias=%s, type=%s)", expectedName, expectedAlias, expectedType)); + String.format( + "(name=%s, alias=%s, type=%s)", expectedName, expectedAlias, expectedType)); } @Override @@ -216,9 +220,9 @@ protected boolean matchesSafely(JSONObject jsonObject) { String actualName = (String) jsonObject.query("/name"); String actualAlias = (String) jsonObject.query("/alias"); String actualType = (String) jsonObject.query("/type"); - return expectedName.equals(actualName) && - (Strings.isNullOrEmpty(expectedAlias) || expectedAlias.equals(actualAlias)) && - expectedType.equals(actualType); + return expectedName.equals(actualName) + && (Strings.isNullOrEmpty(expectedAlias) || expectedAlias.equals(actualAlias)) + && expectedType.equals(actualType); } }; } @@ -288,10 +292,7 @@ public void describeTo(Description description) { }; } - - /** - * Tests if a string is equal to another string, ignore the case and whitespace. - */ + /** Tests if a string is equal to another string, ignore the case and whitespace. */ public static class IsEqualIgnoreCaseAndWhiteSpace extends TypeSafeMatcher { private final String string; @@ -314,7 +315,8 @@ public void describeMismatchSafely(String item, Description mismatchDescription) @Override public void describeTo(Description description) { - description.appendText("a string equal to ") + description + .appendText("a string equal to ") .appendValue(string) .appendText(" ignore case and white space"); } @@ -334,13 +336,11 @@ public static Matcher equalToIgnoreCaseAndWhiteSpace(String expectedStri /** * Compare two JSON string are equals. + * * @param expected expected JSON string. * @param actual actual JSON string. */ public static void assertJsonEquals(String expected, String actual) { - assertEquals( - JsonParser.parseString(expected), - JsonParser.parseString(actual) - ); + assertEquals(JsonParser.parseString(expected), JsonParser.parseString(actual)); } } From 491a73bce2024347d8cfb492829aae88d54f9db4 Mon Sep 17 00:00:00 2001 From: Mitchell Gale Date: Wed, 16 Aug 2023 14:00:41 -0700 Subject: [PATCH 22/42] [Spotless] Applying Google Code Format for integ-tests #10 (#1967) * [Spotless] Applying Google Code Format for integ-tests #10 (#329) * Add spotless apply 33 files. Signed-off-by: Mitchell Gale * Addressed PR comments. Signed-off-by: Mitchell Gale * Address PR comments. Signed-off-by: Mitchell Gale * fixing integ test failure. Signed-off-by: Mitchell Gale --------- Signed-off-by: Mitchell Gale * Update integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestConfigTest.java Co-authored-by: Yury-Fridlyand Signed-off-by: Mitchell Gale * Apply suggestions from code review Co-authored-by: Guian Gumpac Signed-off-by: Mitchell Gale * Apply suggestions from code review Co-authored-by: Guian Gumpac Signed-off-by: Mitchell Gale --------- Signed-off-by: Mitchell Gale Signed-off-by: Mitchell Gale Co-authored-by: Yury-Fridlyand Co-authored-by: Guian Gumpac --- .../sql/correctness/TestConfig.java | 51 +- .../correctness/report/SuccessTestCase.java | 5 +- .../correctness/report/TestCaseReport.java | 15 +- .../sql/correctness/report/TestReport.java | 6 +- .../sql/correctness/report/TestSummary.java | 6 +- .../correctness/runner/resultset/Type.java | 14 +- .../sql/correctness/tests/TestConfigTest.java | 21 +- .../correctness/tests/TestDataSetTest.java | 32 +- .../correctness/tests/TestQuerySetTest.java | 14 +- .../sql/correctness/tests/TestReportTest.java | 175 +-- .../sql/correctness/tests/UnitTests.java | 22 +- .../sql/correctness/testset/TestDataSet.java | 31 +- .../sql/correctness/testset/TestQuerySet.java | 14 +- .../org/opensearch/sql/legacy/SubqueryIT.java | 310 ++--- .../sql/legacy/TermQueryExplainIT.java | 302 ++--- .../org/opensearch/sql/legacy/TestUtils.java | 79 +- .../opensearch/sql/legacy/TestsConstants.java | 101 +- .../sql/legacy/TypeInformationIT.java | 83 +- .../org/opensearch/sql/ppl/StandaloneIT.java | 48 +- .../opensearch/sql/ppl/StatsCommandIT.java | 96 +- .../opensearch/sql/ppl/SystemFunctionIT.java | 88 +- .../opensearch/sql/ppl/TextFunctionIT.java | 59 +- .../org/opensearch/sql/ppl/TopCommandIT.java | 25 +- .../sql/ppl/VisualizationFormatIT.java | 11 +- .../opensearch/sql/ppl/WhereCommandIT.java | 1 - .../sql/sql/StandalonePaginationIT.java | 64 +- .../opensearch/sql/sql/StringLiteralIT.java | 3 - .../opensearch/sql/sql/SystemFunctionIT.java | 66 +- .../opensearch/sql/sql/TextFunctionIT.java | 4 +- .../opensearch/sql/sql/WildcardQueryIT.java | 124 +- .../opensearch/sql/sql/WindowFunctionIT.java | 67 +- .../opensearch/sql/util/StandaloneModule.java | 11 +- .../org/opensearch/sql/util/TestUtils.java | 1105 +++++++++-------- 33 files changed, 1569 insertions(+), 1484 deletions(-) diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/TestConfig.java b/integ-test/src/test/java/org/opensearch/sql/correctness/TestConfig.java index d344c29e20..a498f15d63 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/TestConfig.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/TestConfig.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness; import static java.util.stream.Collectors.joining; @@ -19,11 +18,13 @@ import org.opensearch.sql.legacy.utils.StringUtils; /** + *
  * Test configuration parse the following information from command line arguments:
  * 1) Test schema and data
  * 2) Test queries
  * 3) OpenSearch connection URL
  * 4) Other database connection URLs
+ * 
*/ public class TestConfig { @@ -37,9 +38,7 @@ public class TestConfig { private final String openSearchHostUrl; - /** - * Test against some database rather than OpenSearch via our JDBC driver - */ + /** Test against some database rather than OpenSearch via our JDBC driver */ private final String dbConnectionUrl; private final Map otherDbConnectionNameAndUrls = new HashMap<>(); @@ -75,12 +74,14 @@ public Map getOtherDbConnectionNameAndUrls() { private TestDataSet[] buildDefaultTestDataSet() { return new TestDataSet[] { - new TestDataSet("opensearch_dashboards_sample_data_flights", - readFile("opensearch_dashboards_sample_data_flights.json"), - readFile("opensearch_dashboards_sample_data_flights.csv")), - new TestDataSet("opensearch_dashboards_sample_data_ecommerce", - readFile("opensearch_dashboards_sample_data_ecommerce.json"), - readFile("opensearch_dashboards_sample_data_ecommerce.csv")), + new TestDataSet( + "opensearch_dashboards_sample_data_flights", + readFile("opensearch_dashboards_sample_data_flights.json"), + readFile("opensearch_dashboards_sample_data_flights.csv")), + new TestDataSet( + "opensearch_dashboards_sample_data_ecommerce", + readFile("opensearch_dashboards_sample_data_ecommerce.json"), + readFile("opensearch_dashboards_sample_data_ecommerce.csv")), }; } @@ -118,31 +119,33 @@ private static String readFile(String relativePath) { @Override public String toString() { return "\n=================================\n" - + "Tested Database : " + openSearchHostUrlToString() + '\n' - + "Other Databases :\n" + otherDbConnectionInfoToString() + '\n' - + "Test data set(s) :\n" + testDataSetsToString() + '\n' - + "Test query set : " + testQuerySet + '\n' - + "=================================\n"; + + "Tested Database : " + + openSearchHostUrlToString() + + "\nOther Databases :\n" + + otherDbConnectionInfoToString() + + "\nTest data set(s) :\n" + + testDataSetsToString() + + "\nTest query set : " + + testQuerySet + + "\n=================================\n"; } private String testDataSetsToString() { - return Arrays.stream(testDataSets). - map(TestDataSet::toString). - collect(joining("\n")); + return Arrays.stream(testDataSets).map(TestDataSet::toString).collect(joining("\n")); } private String openSearchHostUrlToString() { if (!dbConnectionUrl.isEmpty()) { return dbConnectionUrl; } - return openSearchHostUrl.isEmpty() ? "(Use internal OpenSearch in workspace)" : - openSearchHostUrl; + return openSearchHostUrl.isEmpty() + ? "(Use internal OpenSearch in workspace)" + : openSearchHostUrl; } private String otherDbConnectionInfoToString() { - return otherDbConnectionNameAndUrls.entrySet().stream(). - map(e -> StringUtils.format(" %s = %s", e.getKey(), e.getValue())). - collect(joining("\n")); + return otherDbConnectionNameAndUrls.entrySet().stream() + .map(e -> StringUtils.format(" %s = %s", e.getKey(), e.getValue())) + .collect(joining("\n")); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/report/SuccessTestCase.java b/integ-test/src/test/java/org/opensearch/sql/correctness/report/SuccessTestCase.java index 62cd9b3fbe..8ec996e660 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/report/SuccessTestCase.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/report/SuccessTestCase.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.report; import static org.opensearch.sql.correctness.report.TestCaseReport.TestResult.SUCCESS; @@ -12,9 +11,7 @@ import lombok.Getter; import lombok.ToString; -/** - * Report for successful test case result. - */ +/** Report for successful test case result. */ @EqualsAndHashCode(callSuper = true) @ToString(callSuper = true) @Getter diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/report/TestCaseReport.java b/integ-test/src/test/java/org/opensearch/sql/correctness/report/TestCaseReport.java index 1a6285c52e..7567e9cd6a 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/report/TestCaseReport.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/report/TestCaseReport.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.report; import static org.opensearch.sql.correctness.report.TestCaseReport.TestResult.SUCCESS; @@ -12,22 +11,19 @@ import lombok.Getter; import lombok.ToString; -/** - * Base class for different test result. - */ +/** Base class for different test result. */ @EqualsAndHashCode @ToString public abstract class TestCaseReport { public enum TestResult { - SUCCESS, FAILURE; + SUCCESS, + FAILURE; } - @Getter - private final int id; + @Getter private final int id; - @Getter - private final String sql; + @Getter private final String sql; private final TestResult result; @@ -40,5 +36,4 @@ public TestCaseReport(int id, String sql, TestResult result) { public String getResult() { return result == SUCCESS ? "Success" : "Failed"; } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/report/TestReport.java b/integ-test/src/test/java/org/opensearch/sql/correctness/report/TestReport.java index 88b23ccd5b..9b9b3b7a23 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/report/TestReport.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/report/TestReport.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.report; import java.util.ArrayList; @@ -12,9 +11,7 @@ import lombok.Getter; import lombok.ToString; -/** - * Test report class to generate JSON report. - */ +/** Test report class to generate JSON report. */ @EqualsAndHashCode @ToString @Getter @@ -37,5 +34,4 @@ public void addTestCase(TestCaseReport testCase) { summary.addFailure(); } } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/report/TestSummary.java b/integ-test/src/test/java/org/opensearch/sql/correctness/report/TestSummary.java index 90767582b5..bbd4385460 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/report/TestSummary.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/report/TestSummary.java @@ -3,16 +3,13 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.report; import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.ToString; -/** - * Test summary section. - */ +/** Test summary section. */ @EqualsAndHashCode @ToString @Getter @@ -33,5 +30,4 @@ public void addFailure() { failure++; total++; } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/runner/resultset/Type.java b/integ-test/src/test/java/org/opensearch/sql/correctness/runner/resultset/Type.java index 23cc0e3347..d626f75ccb 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/runner/resultset/Type.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/runner/resultset/Type.java @@ -3,25 +3,17 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.runner.resultset; import lombok.Data; -/** - * Column type in schema - */ +/** Column type in schema */ @Data public class Type { - /** - * Column name - */ + /** Column name */ private final String name; - /** - * Column type - */ + /** Column type */ private final String type; - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestConfigTest.java b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestConfigTest.java index 1abe6ea109..daf084d371 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestConfigTest.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestConfigTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.tests; import static java.util.Collections.emptyMap; @@ -18,9 +17,7 @@ import org.junit.Test; import org.opensearch.sql.correctness.TestConfig; -/** - * Tests for {@link TestConfig} - */ +/** Tests for {@link TestConfig} */ public class TestConfigTest { @Test @@ -31,9 +28,7 @@ public void testDefaultConfig() { config.getOtherDbConnectionNameAndUrls(), allOf( hasEntry("H2", "jdbc:h2:mem:test;DB_CLOSE_DELAY=-1"), - hasEntry("SQLite", "jdbc:sqlite::memory:") - ) - ); + hasEntry("SQLite", "jdbc:sqlite::memory:"))); } @Test @@ -45,18 +40,16 @@ public void testCustomESUrls() { @Test public void testCustomDbUrls() { - Map args = ImmutableMap.of("otherDbUrls", - "H2=jdbc:h2:mem:test;DB_CLOSE_DELAY=-1," - + "Derby=jdbc:derby:memory:myDb;create=true"); + Map args = + ImmutableMap.of( + "otherDbUrls", + "H2=jdbc:h2:mem:test;DB_CLOSE_DELAY=-1,Derby=jdbc:derby:memory:myDb;create=true"); TestConfig config = new TestConfig(args); assertThat( config.getOtherDbConnectionNameAndUrls(), allOf( hasEntry("H2", "jdbc:h2:mem:test;DB_CLOSE_DELAY=-1"), - hasEntry("Derby", "jdbc:derby:memory:myDb;create=true") - ) - ); + hasEntry("Derby", "jdbc:derby:memory:myDb;create=true"))); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestDataSetTest.java b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestDataSetTest.java index 3967d96658..7411df6a54 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestDataSetTest.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestDataSetTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.tests; import static org.hamcrest.MatcherAssert.assertThat; @@ -13,9 +12,7 @@ import org.junit.Test; import org.opensearch.sql.correctness.testset.TestDataSet; -/** - * Tests for {@link TestDataSet} - */ +/** Tests for {@link TestDataSet} */ public class TestDataSetTest { @Test @@ -40,9 +37,7 @@ public void testDataSetWithSingleColumnData() { new Object[] {"field"}, new Object[] {"hello"}, new Object[] {"world"}, - new Object[] {"123"} - ) - ); + new Object[] {"123"})); } @Test @@ -61,16 +56,13 @@ public void testDataSetWithMultiColumnsData() { + " }\n" + "}"; - TestDataSet dataSet = new TestDataSet("test", mappings, - "field1,field2\nhello,123\nworld,456"); + TestDataSet dataSet = new TestDataSet("test", mappings, "field1,field2\nhello,123\nworld,456"); assertThat( dataSet.getDataRows(), contains( new Object[] {"field1", "field2"}, new Object[] {"hello", 123}, - new Object[] {"world", 456} - ) - ); + new Object[] {"world", 456})); } @Test @@ -86,17 +78,15 @@ public void testDataSetWithEscapedComma() { + " }\n" + "}"; - TestDataSet dataSet = new TestDataSet("test", mappings, - "field\n\"hello,world,123\"\n123\n\"[abc,def,ghi]\""); + TestDataSet dataSet = + new TestDataSet("test", mappings, "field\n\"hello,world,123\"\n123\n\"[abc,def,ghi]\""); assertThat( dataSet.getDataRows(), contains( new Object[] {"field"}, new Object[] {"hello,world,123"}, new Object[] {"123"}, - new Object[] {"[abc,def,ghi]"} - ) - ); + new Object[] {"[abc,def,ghi]"})); } @Test @@ -115,17 +105,13 @@ public void testDataSetWithNullData() { + " }\n" + "}"; - TestDataSet dataSet = new TestDataSet("test", mappings, - "field1,field2\n,123\nworld,\n,"); + TestDataSet dataSet = new TestDataSet("test", mappings, "field1,field2\n,123\nworld,\n,"); assertThat( dataSet.getDataRows(), contains( new Object[] {"field1", "field2"}, new Object[] {null, 123}, new Object[] {"world", null}, - new Object[] {null, null} - ) - ); + new Object[] {null, null})); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestQuerySetTest.java b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestQuerySetTest.java index 1c97f743f4..08d360dfc7 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestQuerySetTest.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestQuerySetTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.tests; import static org.hamcrest.MatcherAssert.assertThat; @@ -12,22 +11,13 @@ import org.junit.Test; import org.opensearch.sql.correctness.testset.TestQuerySet; -/** - * Tests for {@link TestQuerySet} - */ +/** Tests for {@link TestQuerySet} */ public class TestQuerySetTest { @Test public void testQuerySet() { TestQuerySet querySet = new TestQuerySet("SELECT * FROM accounts\nSELECT * FROM accounts LIMIT 5"); - assertThat( - querySet, - contains( - "SELECT * FROM accounts", - "SELECT * FROM accounts LIMIT 5" - ) - ); + assertThat(querySet, contains("SELECT * FROM accounts", "SELECT * FROM accounts LIMIT 5")); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestReportTest.java b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestReportTest.java index 35b64fd5d6..9ac5151b21 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestReportTest.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestReportTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.tests; import static java.util.Arrays.asList; @@ -20,9 +19,7 @@ import org.opensearch.sql.correctness.runner.resultset.Row; import org.opensearch.sql.correctness.runner.resultset.Type; -/** - * Test for {@link TestReport} - */ +/** Test for {@link TestReport} */ public class TestReportTest { private TestReport report = new TestReport(); @@ -31,22 +28,22 @@ public class TestReportTest { public void testSuccessReport() { report.addTestCase(new SuccessTestCase(1, "SELECT * FROM accounts")); JSONObject actual = new JSONObject(report); - JSONObject expected = new JSONObject( - "{" + - " \"summary\": {" + - " \"total\": 1," + - " \"success\": 1," + - " \"failure\": 0" + - " }," + - " \"tests\": [" + - " {" + - " \"id\": 1," + - " \"result\": 'Success'," + - " \"sql\": \"SELECT * FROM accounts\"," + - " }" + - " ]" + - "}" - ); + JSONObject expected = + new JSONObject( + "{" + + " \"summary\": {" + + " \"total\": 1," + + " \"success\": 1," + + " \"failure\": 0" + + " }," + + " \"tests\": [" + + " {" + + " \"id\": 1," + + " \"result\": 'Success'," + + " \"sql\": \"SELECT * FROM accounts\"," + + " }" + + " ]" + + "}"); if (!actual.similar(expected)) { fail("Actual JSON is different from expected: " + actual.toString(2)); @@ -55,54 +52,63 @@ public void testSuccessReport() { @Test public void testFailedReport() { - report.addTestCase(new FailedTestCase(1, "SELECT * FROM accounts", asList( - new DBResult("OpenSearch", singleton(new Type("firstName", "text")), - singleton(new Row(asList("hello")))), - new DBResult("H2", singleton(new Type("firstName", "text")), - singleton(new Row(asList("world"))))), - "[SQLITE_ERROR] SQL error or missing database;" - )); + report.addTestCase( + new FailedTestCase( + 1, + "SELECT * FROM accounts", + asList( + new DBResult( + "OpenSearch", + singleton(new Type("firstName", "text")), + singleton(new Row(asList("hello")))), + new DBResult( + "H2", + singleton(new Type("firstName", "text")), + singleton(new Row(asList("world"))))), + "[SQLITE_ERROR] SQL error or missing database;")); JSONObject actual = new JSONObject(report); - JSONObject expected = new JSONObject( - "{" + - " \"summary\": {" + - " \"total\": 1," + - " \"success\": 0," + - " \"failure\": 1" + - " }," + - " \"tests\": [" + - " {" + - " \"id\": 1," + - " \"result\": 'Failed'," + - " \"sql\": \"SELECT * FROM accounts\"," + - " \"explain\": \"Data row at [0] is different: this=[Row(values=[world])], other=[Row(values=[hello])]\"," + - " \"errors\": \"[SQLITE_ERROR] SQL error or missing database;\"," + - " \"resultSets\": [" + - " {" + - " \"database\": \"H2\"," + - " \"schema\": [" + - " {" + - " \"name\": \"firstName\"," + - " \"type\": \"text\"" + - " }" + - " ]," + - " \"dataRows\": [[\"world\"]]" + - " }," + - " {" + - " \"database\": \"OpenSearch\"," + - " \"schema\": [" + - " {" + - " \"name\": \"firstName\"," + - " \"type\": \"text\"" + - " }" + - " ]," + - " \"dataRows\": [[\"hello\"]]" + - " }" + - " ]" + - " }" + - " ]" + - "}" - ); + JSONObject expected = + new JSONObject( + "{" + + " \"summary\": {" + + " \"total\": 1," + + " \"success\": 0," + + " \"failure\": 1" + + " }," + + " \"tests\": [" + + " {" + + " \"id\": 1," + + " \"result\": 'Failed'," + + " \"sql\": \"SELECT * FROM accounts\"," + + " \"explain\": \"Data row at [0] is different: " + + "this=[Row(values=[world])], " + + "other=[Row(values=[hello])]\"," + + " \"errors\": \"[SQLITE_ERROR] SQL error or missing database;\"," + + " \"resultSets\": [" + + " {" + + " \"database\": \"H2\"," + + " \"schema\": [" + + " {" + + " \"name\": \"firstName\"," + + " \"type\": \"text\"" + + " }" + + " ]," + + " \"dataRows\": [[\"world\"]]" + + " }," + + " {" + + " \"database\": \"OpenSearch\"," + + " \"schema\": [" + + " {" + + " \"name\": \"firstName\"," + + " \"type\": \"text\"" + + " }" + + " ]," + + " \"dataRows\": [[\"hello\"]]" + + " }" + + " ]" + + " }" + + " ]" + + "}"); if (!actual.similar(expected)) { fail("Actual JSON is different from expected: " + actual.toString(2)); @@ -113,27 +119,26 @@ public void testFailedReport() { public void testErrorReport() { report.addTestCase(new ErrorTestCase(1, "SELECT * FROM", "Missing table name in query")); JSONObject actual = new JSONObject(report); - JSONObject expected = new JSONObject( - "{" + - " \"summary\": {" + - " \"total\": 1," + - " \"success\": 0," + - " \"failure\": 1" + - " }," + - " \"tests\": [" + - " {" + - " \"id\": 1," + - " \"result\": 'Failed'," + - " \"sql\": \"SELECT * FROM\"," + - " \"reason\": \"Missing table name in query\"," + - " }" + - " ]" + - "}" - ); + JSONObject expected = + new JSONObject( + "{" + + " \"summary\": {" + + " \"total\": 1," + + " \"success\": 0," + + " \"failure\": 1" + + " }," + + " \"tests\": [" + + " {" + + " \"id\": 1," + + " \"result\": 'Failed'," + + " \"sql\": \"SELECT * FROM\"," + + " \"reason\": \"Missing table name in query\"," + + " }" + + " ]" + + "}"); if (!actual.similar(expected)) { fail("Actual JSON is different from expected: " + actual.toString(2)); } } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/UnitTests.java b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/UnitTests.java index 0bc5456069..367e2e10bf 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/UnitTests.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/UnitTests.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.tests; import org.junit.runner.RunWith; @@ -11,15 +10,14 @@ @RunWith(Suite.class) @Suite.SuiteClasses({ - ComparisonTestTest.class, - TestConfigTest.class, - TestDataSetTest.class, - TestQuerySetTest.class, - TestReportTest.class, - OpenSearchConnectionTest.class, - JDBCConnectionTest.class, - DBResultTest.class, - RowTest.class, + ComparisonTestTest.class, + TestConfigTest.class, + TestDataSetTest.class, + TestQuerySetTest.class, + TestReportTest.class, + OpenSearchConnectionTest.class, + JDBCConnectionTest.class, + DBResultTest.class, + RowTest.class, }) -public class UnitTests { -} +public class UnitTests {} diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/testset/TestDataSet.java b/integ-test/src/test/java/org/opensearch/sql/correctness/testset/TestDataSet.java index 66fc7c88af..25a3f907cf 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/testset/TestDataSet.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/testset/TestDataSet.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.testset; import static java.util.stream.Collectors.joining; @@ -15,9 +14,7 @@ import org.json.JSONObject; import org.opensearch.sql.legacy.utils.StringUtils; -/** - * Test data set - */ +/** Test data set */ public class TestDataSet { private final String tableName; @@ -42,9 +39,7 @@ public List getDataRows() { return dataRows; } - /** - * Split columns in each line by separator and ignore escaped separator(s) in quoted string. - */ + /** Split columns in each line by separator and ignore escaped separator(s) in quoted string. */ private List splitColumns(String content, char separator) { List result = new ArrayList<>(); for (String line : content.split("\\r?\\n")) { @@ -76,8 +71,8 @@ private List splitColumns(String content, char separator) { } /** - * Convert column string values (read from CSV file) to objects of its real type - * based on the type information in index mapping file. + * Convert column string values (read from CSV file) to objects of its real type based on the type + * information in index mapping file. */ private List convertStringDataToActualType(List rows) { JSONObject types = new JSONObject(schema); @@ -93,7 +88,8 @@ private List convertStringDataToActualType(List rows) { return result; } - private Object[] convertStringArrayToObjectArray(JSONObject types, String[] columnNames, String[] row) { + private Object[] convertStringArrayToObjectArray( + JSONObject types, String[] columnNames, String[] row) { Object[] result = new Object[row.length]; for (int i = 0; i < row.length; i++) { String colName = columnNames[i]; @@ -126,22 +122,15 @@ private Object convertStringToObject(String type, String str) { case "boolean": return Boolean.valueOf(str); default: - throw new IllegalStateException(StringUtils.format( - "Data type %s is not supported yet for value: %s", type, str)); + throw new IllegalStateException( + StringUtils.format("Data type %s is not supported yet for value: %s", type, str)); } } @Override public String toString() { int total = dataRows.size(); - return "Test data set :\n" - + " Table name: " + tableName + '\n' - + " Schema: " + schema + '\n' - + " Data rows (first 5 in " + total + "):" - + dataRows.stream(). - limit(5). - map(Arrays::toString). - collect(joining("\n ", "\n ", "\n")); + return String.format("Test data set:\n Table name: %s\n Schema: %s\n Data rows (first 5 in %d):", tableName, schema, total) + + dataRows.stream().limit(5).map(Arrays::toString).collect(joining("\n ", "\n ", "\n")); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/testset/TestQuerySet.java b/integ-test/src/test/java/org/opensearch/sql/correctness/testset/TestQuerySet.java index 7eee2cde9f..161d314c1d 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/testset/TestQuerySet.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/testset/TestQuerySet.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.testset; import static java.util.stream.Collectors.joining; @@ -12,9 +11,7 @@ import java.util.Iterator; import java.util.List; -/** - * Test query set including SQL queries for comparison testing. - */ +/** Test query set including SQL queries for comparison testing. */ public class TestQuerySet implements Iterable { private List queries; @@ -49,10 +46,9 @@ private List lines(String content) { @Override public String toString() { int total = queries.size(); - return "SQL queries (first 5 in " + total + "):" - + queries.stream(). - limit(5). - collect(joining("\n ", "\n ", "\n")); + return "SQL queries (first 5 in " + + total + + "):" + + queries.stream().limit(5).collect(joining("\n ", "\n ", "\n")); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/SubqueryIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/SubqueryIT.java index 0fd0fea7f7..c1d656628f 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/SubqueryIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/SubqueryIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.both; @@ -38,9 +37,7 @@ public class SubqueryIT extends SQLIntegTestCase { - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); - + @Rule public ExpectedException exceptionRule = ExpectedException.none(); @Override protected void init() throws Exception { @@ -51,50 +48,55 @@ protected void init() throws Exception { @Test public void testIN() throws IOException { - String query = String.format(Locale.ROOT, - "SELECT dog_name " + - "FROM %s A " + - "WHERE holdersName IN (SELECT firstname FROM %s B) " + - "AND dog_name <> 'babala'", - TEST_INDEX_DOGSUBQUERY, TEST_INDEX_ACCOUNT); + String query = + String.format( + Locale.ROOT, + "SELECT dog_name " + + "FROM %s A " + + "WHERE holdersName IN (SELECT firstname FROM %s B) " + + "AND dog_name <> 'babala'", + TEST_INDEX_DOGSUBQUERY, + TEST_INDEX_ACCOUNT); JSONObject response = executeQuery(query); assertThat( response, hitAll( kvString("/_source/A.dog_name", is("snoopy")), - kvString("/_source/A.dog_name", is("gogo")) - ) - ); + kvString("/_source/A.dog_name", is("gogo")))); } @Test public void testINWithAlias() throws IOException { - String query = String.format(Locale.ROOT, - "SELECT A.dog_name " + - "FROM %s A " + - "WHERE A.holdersName IN (SELECT B.firstname FROM %s B) " + - "AND A.dog_name <> 'babala'", - TEST_INDEX_DOGSUBQUERY, TEST_INDEX_ACCOUNT); + String query = + String.format( + Locale.ROOT, + "SELECT A.dog_name " + + "FROM %s A " + + "WHERE A.holdersName IN (SELECT B.firstname FROM %s B) " + + "AND A.dog_name <> 'babala'", + TEST_INDEX_DOGSUBQUERY, + TEST_INDEX_ACCOUNT); JSONObject response = executeQuery(query); assertThat( response, hitAll( kvString("/_source/A.dog_name", is("snoopy")), - kvString("/_source/A.dog_name", is("gogo")) - ) - ); + kvString("/_source/A.dog_name", is("gogo")))); } @Test public void testINSelectAll() throws IOException { - String query = String.format(Locale.ROOT, - "SELECT * " + - "FROM %s A " + - "WHERE holdersName IN (SELECT firstname FROM %s B) " + - "AND dog_name <> 'babala'", - TEST_INDEX_DOGSUBQUERY, TEST_INDEX_ACCOUNT); + String query = + String.format( + Locale.ROOT, + "SELECT * " + + "FROM %s A " + + "WHERE holdersName IN (SELECT firstname FROM %s B) " + + "AND dog_name <> 'babala'", + TEST_INDEX_DOGSUBQUERY, + TEST_INDEX_ACCOUNT); JSONObject response = executeQuery(query); assertThat( @@ -105,39 +107,38 @@ public void testINSelectAll() throws IOException { .and(kvInt("/_source/A.age", is(4))), both(kvString("/_source/A.dog_name", is("gogo"))) .and(kvString("/_source/A.holdersName", is("Gabrielle"))) - .and(kvInt("/_source/A.age", is(6))) - ) - ); + .and(kvInt("/_source/A.age", is(6))))); } @Test public void testINWithInnerWhere() throws IOException { - String query = String.format(Locale.ROOT, - "SELECT dog_name " + - "FROM %s A " + - "WHERE holdersName IN (SELECT firstname FROM %s B WHERE age <> 36) " + - "AND dog_name <> 'babala'", - TEST_INDEX_DOGSUBQUERY, TEST_INDEX_ACCOUNT); + String query = + String.format( + Locale.ROOT, + "SELECT dog_name " + + "FROM %s A " + + "WHERE holdersName IN (SELECT firstname FROM %s B WHERE age <> 36) " + + "AND dog_name <> 'babala'", + TEST_INDEX_DOGSUBQUERY, + TEST_INDEX_ACCOUNT); JSONObject response = executeQuery(query); - assertThat( - response, - hitAll( - kvString("/_source/A.dog_name", is("gogo")) - ) - ); + assertThat(response, hitAll(kvString("/_source/A.dog_name", is("gogo")))); } @Test public void testNotSupportedQuery() throws IOException { exceptionRule.expect(ResponseException.class); exceptionRule.expectMessage("Unsupported subquery"); - String query = String.format(Locale.ROOT, - "SELECT dog_name " + - "FROM %s A " + - "WHERE holdersName NOT IN (SELECT firstname FROM %s B WHERE age <> 36) " + - "AND dog_name <> 'babala'", - TEST_INDEX_DOGSUBQUERY, TEST_INDEX_ACCOUNT); + String query = + String.format( + Locale.ROOT, + "SELECT dog_name " + + "FROM %s A " + + "WHERE holdersName NOT IN (SELECT firstname FROM %s B WHERE age <> 36) " + + "AND dog_name <> 'babala'", + TEST_INDEX_DOGSUBQUERY, + TEST_INDEX_ACCOUNT); executeQuery(query); } @@ -145,100 +146,91 @@ public void testNotSupportedQuery() throws IOException { @Ignore @Test public void testINWithDuplicate() throws IOException { - String query = String.format(Locale.ROOT, - "SELECT dog_name " + - "FROM %s A " + - "WHERE holdersName IN (SELECT firstname FROM %s B)", - TEST_INDEX_DOGSUBQUERY, TEST_INDEX_ACCOUNT); + String query = + String.format( + Locale.ROOT, + "SELECT dog_name FROM %s A WHERE holdersName IN (SELECT firstname FROM %s B)", + TEST_INDEX_DOGSUBQUERY, + TEST_INDEX_ACCOUNT); JSONObject response = executeQuery(query); assertThat( response, hitAll( kvString("/_source/A.dog_name", is("snoopy")), - kvString("/_source/A.dog_name", is("babala")) - ) - ); + kvString("/_source/A.dog_name", is("babala")))); } @Test public void nonCorrelatedExists() throws IOException { - String query = String.format(Locale.ROOT, - "SELECT e.name " + - "FROM %s as e " + - "WHERE EXISTS (SELECT * FROM e.projects as p)", - TEST_INDEX_EMPLOYEE_NESTED); + String query = + String.format( + Locale.ROOT, + "SELECT e.name FROM %s as e WHERE EXISTS (SELECT * FROM e.projects as p)", + TEST_INDEX_EMPLOYEE_NESTED); JSONObject response = executeQuery(query); assertThat( response, hitAll( kvString("/_source/name", is("Bob Smith")), - kvString("/_source/name", is("Jane Smith")) - ) - ); + kvString("/_source/name", is("Jane Smith")))); } @Test public void nonCorrelatedExistsWhere() throws IOException { - String query = String.format(Locale.ROOT, - "SELECT e.name " + - "FROM %s as e " + - "WHERE EXISTS (SELECT * FROM e.projects as p WHERE p.name LIKE 'aurora')", - TEST_INDEX_EMPLOYEE_NESTED); + String query = + String.format( + Locale.ROOT, + "SELECT e.name " + + "FROM %s as e " + + "WHERE EXISTS (SELECT * FROM e.projects as p WHERE p.name LIKE 'aurora')", + TEST_INDEX_EMPLOYEE_NESTED); JSONObject response = executeQuery(query); - assertThat( - response, - hitAll( - kvString("/_source/name", is("Bob Smith")) - ) - ); + assertThat(response, hitAll(kvString("/_source/name", is("Bob Smith")))); } @Test public void nonCorrelatedExistsParentWhere() throws IOException { - String query = String.format(Locale.ROOT, - "SELECT e.name " + - "FROM %s as e " + - "WHERE EXISTS (SELECT * FROM e.projects as p WHERE p.name LIKE 'security') " + - "AND e.name LIKE 'jane'", - TEST_INDEX_EMPLOYEE_NESTED); + String query = + String.format( + Locale.ROOT, + "SELECT e.name " + + "FROM %s as e " + + "WHERE EXISTS (SELECT * FROM e.projects as p WHERE p.name LIKE 'security') " + + "AND e.name LIKE 'jane'", + TEST_INDEX_EMPLOYEE_NESTED); JSONObject response = executeQuery(query); - assertThat( - response, - hitAll( - kvString("/_source/name", is("Jane Smith")) - ) - ); + assertThat(response, hitAll(kvString("/_source/name", is("Jane Smith")))); } @Test public void nonCorrelatedNotExists() throws IOException { - String query = String.format(Locale.ROOT, - "SELECT e.name " + - "FROM %s as e " + - "WHERE NOT EXISTS (SELECT * FROM e.projects as p)", - TEST_INDEX_EMPLOYEE_NESTED); + String query = + String.format( + Locale.ROOT, + "SELECT e.name FROM %s as e WHERE NOT EXISTS (SELECT * FROM e.projects as p)", + TEST_INDEX_EMPLOYEE_NESTED); JSONObject response = executeQuery(query); assertThat( response, hitAll( kvString("/_source/name", is("Susan Smith")), - kvString("/_source/name", is("John Doe")) - ) - ); + kvString("/_source/name", is("John Doe")))); } @Test public void nonCorrelatedNotExistsWhere() throws IOException { - String query = String.format(Locale.ROOT, - "SELECT e.name " + - "FROM %s as e " + - "WHERE NOT EXISTS (SELECT * FROM e.projects as p WHERE p.name LIKE 'aurora')", - TEST_INDEX_EMPLOYEE_NESTED); + String query = + String.format( + Locale.ROOT, + "SELECT e.name " + + "FROM %s as e " + + "WHERE NOT EXISTS (SELECT * FROM e.projects as p WHERE p.name LIKE 'aurora')", + TEST_INDEX_EMPLOYEE_NESTED); JSONObject response = executeQuery(query); assertThat( @@ -246,52 +238,55 @@ public void nonCorrelatedNotExistsWhere() throws IOException { hitAll( kvString("/_source/name", is("Susan Smith")), kvString("/_source/name", is("Jane Smith")), - kvString("/_source/name", is("John Doe")) - ) - ); + kvString("/_source/name", is("John Doe")))); } @Test public void nonCorrelatedNotExistsParentWhere() throws IOException { - String query = String.format(Locale.ROOT, - "SELECT e.name " + - "FROM %s as e " + - "WHERE NOT EXISTS (SELECT * FROM e.projects as p WHERE p.name LIKE 'security') " + - "AND e.name LIKE 'smith'", - TEST_INDEX_EMPLOYEE_NESTED); + String query = + String.format( + Locale.ROOT, + "SELECT e.name " + + "FROM %s as e " + + "WHERE NOT EXISTS (SELECT * FROM e.projects as p WHERE p.name LIKE 'security') " + + "AND e.name LIKE 'smith'", + TEST_INDEX_EMPLOYEE_NESTED); JSONObject response = executeQuery(query); - assertThat( - response, - hitAll( - kvString("/_source/name", is("Susan Smith")) - ) - ); + assertThat(response, hitAll(kvString("/_source/name", is("Susan Smith")))); } @Test public void selectFromSubqueryWithCountShouldPass() throws IOException { - JSONObject result = executeQuery( - StringUtils.format("SELECT t.TEMP as count " + - "FROM (SELECT COUNT(*) as TEMP FROM %s) t", TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + StringUtils.format( + "SELECT t.TEMP as count FROM (SELECT COUNT(*) as TEMP FROM %s) t", + TEST_INDEX_ACCOUNT)); assertThat(result.query("/aggregations/count/value"), equalTo(1000)); } @Test public void selectFromSubqueryWithWhereAndCountShouldPass() throws IOException { - JSONObject result = executeQuery( - StringUtils.format("SELECT t.TEMP as count " + - "FROM (SELECT COUNT(*) as TEMP FROM %s WHERE age > 30) t", TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + StringUtils.format( + "SELECT t.TEMP as count " + + "FROM (SELECT COUNT(*) as TEMP FROM %s WHERE age > 30) t", + TEST_INDEX_ACCOUNT)); assertThat(result.query("/aggregations/count/value"), equalTo(502)); } @Test public void selectFromSubqueryWithCountAndGroupByShouldPass() throws Exception { - JSONObject result = executeQuery( - StringUtils.format("SELECT t.TEMP as count " + - "FROM (SELECT COUNT(*) as TEMP FROM %s GROUP BY gender) t", TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + StringUtils.format( + "SELECT t.TEMP as count " + + "FROM (SELECT COUNT(*) as TEMP FROM %s GROUP BY gender) t", + TEST_INDEX_ACCOUNT)); assertThat(getTotalHits(result), equalTo(1000)); JSONObject gender = (JSONObject) result.query("/aggregations/gender"); @@ -312,11 +307,12 @@ public void selectFromSubqueryWithCountAndGroupByShouldPass() throws Exception { @Test public void selectFromSubqueryWithCountAndGroupByAndOrderByShouldPass() throws IOException { - JSONObject result = executeQuery( - StringUtils.format( - "SELECT t.TEMP as count " + - "FROM (SELECT COUNT(*) as TEMP FROM %s GROUP BY age ORDER BY TEMP) t", - TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + StringUtils.format( + "SELECT t.TEMP as count " + + "FROM (SELECT COUNT(*) as TEMP FROM %s GROUP BY age ORDER BY TEMP) t", + TEST_INDEX_ACCOUNT)); JSONArray buckets = (JSONArray) result.query("/aggregations/age/buckets"); List countList = new ArrayList<>(); for (int i = 0; i < buckets.length(); ++i) { @@ -328,44 +324,50 @@ public void selectFromSubqueryWithCountAndGroupByAndOrderByShouldPass() throws I @Test public void selectFromSubqueryWithCountAndGroupByAndHavingShouldPass() throws Exception { - JSONObject result = executeQuery( - StringUtils.format("SELECT t.T1 as g, t.T2 as c " + - "FROM (SELECT gender as T1, COUNT(*) as T2 " + - " FROM %s " + - " GROUP BY gender " + - " HAVING T2 > 500) t", TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + StringUtils.format( + "SELECT t.T1 as g, t.T2 as c " + + "FROM (SELECT gender as T1, COUNT(*) as T2 " + + " FROM %s " + + " GROUP BY gender " + + " HAVING T2 > 500) t", + TEST_INDEX_ACCOUNT)); assertThat(result.query("/aggregations/g/buckets/0/c/value"), equalTo(507)); } @Test public void selectFromSubqueryCountAndSum() throws IOException { - JSONObject result = executeQuery( - StringUtils.format( - "SELECT t.TEMP1 as count, t.TEMP2 as balance " + - "FROM (SELECT COUNT(*) as TEMP1, SUM(balance) as TEMP2 " + - " FROM %s) t", - TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + StringUtils.format( + "SELECT t.TEMP1 as count, t.TEMP2 as balance " + + "FROM (SELECT COUNT(*) as TEMP1, SUM(balance) as TEMP2 " + + " FROM %s) t", + TEST_INDEX_ACCOUNT)); assertThat(result.query("/aggregations/count/value"), equalTo(1000)); - assertThat(((BigDecimal) result.query("/aggregations/balance/value")).doubleValue(), + assertThat( + ((BigDecimal) result.query("/aggregations/balance/value")).doubleValue(), closeTo(25714837.0, 0.01)); } @Test public void selectFromSubqueryWithoutAliasShouldPass() throws IOException { - JSONObject response = executeJdbcRequest( - StringUtils.format( - "SELECT a.firstname AS my_first, a.lastname AS my_last, a.age AS my_age " + - "FROM (SELECT firstname, lastname, age " + - "FROM %s " + - "WHERE age = 40 and account_number = 291) AS a", - TEST_INDEX_ACCOUNT)); - - verifySchema(response, + JSONObject response = + executeJdbcRequest( + StringUtils.format( + "SELECT a.firstname AS my_first, a.lastname AS my_last, a.age AS my_age " + + "FROM (SELECT firstname, lastname, age " + + "FROM %s " + + "WHERE age = 40 and account_number = 291) AS a", + TEST_INDEX_ACCOUNT)); + + verifySchema( + response, schema("firstname", "my_first", "text"), schema("lastname", "my_last", "text"), schema("age", "my_age", "long")); - verifyDataRows(response, - rows("Lynn", "Pollard", 40)); + verifyDataRows(response, rows("Lynn", "Pollard", 40)); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/TermQueryExplainIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/TermQueryExplainIT.java index fcc9b048c9..6c33b2b242 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/TermQueryExplainIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/TermQueryExplainIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.containsString; @@ -17,7 +16,6 @@ import org.opensearch.client.ResponseException; import org.opensearch.core.rest.RestStatus; - public class TermQueryExplainIT extends SQLIntegTestCase { @Override @@ -35,13 +33,15 @@ protected void init() throws Exception { @Test public void testNonExistingIndex() throws IOException { try { - explainQuery("SELECT firstname, lastname " + - "FROM opensearch_sql_test_fake_index " + - "WHERE firstname = 'Leo'"); + explainQuery( + "SELECT firstname, lastname " + + "FROM opensearch_sql_test_fake_index " + + "WHERE firstname = 'Leo'"); Assert.fail("Expected ResponseException, but none was thrown"); } catch (ResponseException e) { - assertThat(e.getResponse().getStatusLine().getStatusCode(), + assertThat( + e.getResponse().getStatusLine().getStatusCode(), equalTo(RestStatus.BAD_REQUEST.getStatus())); final String entity = TestUtils.getResponseBody(e.getResponse()); assertThat(entity, containsString("no such index")); @@ -52,13 +52,13 @@ public void testNonExistingIndex() throws IOException { @Test public void testNonResolvingIndexPattern() throws IOException { try { - explainQuery("SELECT * " + - "FROM opensearch_sql_test_blah_blah* " + - "WHERE firstname = 'Leo'"); + explainQuery( + "SELECT * FROM opensearch_sql_test_blah_blah* WHERE firstname = 'Leo'"); Assert.fail("Expected ResponseException, but none was thrown"); } catch (ResponseException e) { - assertThat(e.getResponse().getStatusLine().getStatusCode(), + assertThat( + e.getResponse().getStatusLine().getStatusCode(), equalTo(RestStatus.BAD_REQUEST.getStatus())); final String entity = TestUtils.getResponseBody(e.getResponse()); assertThat(entity, containsString("Field [firstname] cannot be found or used here.")); @@ -68,10 +68,11 @@ public void testNonResolvingIndexPattern() throws IOException { @Test public void testNonResolvingIndexPatternWithExistingIndex() throws IOException { - String result = explainQuery( - "SELECT * " + - "FROM opensearch_sql_test_blah_blah*, opensearch-sql_test_index_bank " + - "WHERE state = 'DC'"); + String result = + explainQuery( + "SELECT * " + + "FROM opensearch_sql_test_blah_blah*, opensearch-sql_test_index_bank " + + "WHERE state = 'DC'"); assertThat(result, containsString("\"term\":{\"state.keyword\"")); } @@ -79,12 +80,13 @@ public void testNonResolvingIndexPatternWithExistingIndex() throws IOException { public void testNonResolvingIndexPatternWithNonExistingIndex() throws IOException { try { explainQuery( - "SELECT firstname, lastname " + - "FROM opensearch_sql_test_blah_blah*, another_fake_index " + - "WHERE firstname = 'Leo'"); + "SELECT firstname, lastname " + + "FROM opensearch_sql_test_blah_blah*, another_fake_index " + + "WHERE firstname = 'Leo'"); Assert.fail("Expected ResponseException, but none was thrown"); } catch (ResponseException e) { - assertThat(e.getResponse().getStatusLine().getStatusCode(), + assertThat( + e.getResponse().getStatusLine().getStatusCode(), equalTo(RestStatus.BAD_REQUEST.getStatus())); final String entity = TestUtils.getResponseBody(e.getResponse()); assertThat(entity, containsString("no such index")); @@ -95,11 +97,11 @@ public void testNonResolvingIndexPatternWithNonExistingIndex() throws IOExceptio @Test public void testNonCompatibleMappings() throws IOException { try { - explainQuery( - "SELECT * FROM opensearch-sql_test_index_dog, opensearch-sql_test_index_dog2"); + explainQuery("SELECT * FROM opensearch-sql_test_index_dog, opensearch-sql_test_index_dog2"); Assert.fail("Expected ResponseException, but none was thrown"); } catch (ResponseException e) { - assertThat(e.getResponse().getStatusLine().getStatusCode(), + assertThat( + e.getResponse().getStatusLine().getStatusCode(), equalTo(RestStatus.BAD_REQUEST.getStatus())); final String entity = TestUtils.getResponseBody(e.getResponse()); assertThat(entity, containsString("Field [holdersName] have conflict type")); @@ -108,14 +110,15 @@ public void testNonCompatibleMappings() throws IOException { } /** - * The dog_name field has same type in dog and dog2 index. - * But, the holdersName field has different type. + * The dog_name field has same type in dog and dog2 index. But, the holdersName field has + * different type. */ @Test public void testNonCompatibleMappingsButTheFieldIsNotUsed() throws IOException { - String result = explainQuery( - "SELECT dog_name " + - "FROM opensearch-sql_test_index_dog, opensearch-sql_test_index_dog2 WHERE dog_name = 'dog'"); + String result = + explainQuery( + "SELECT dog_name FROM opensearch-sql_test_index_dog, opensearch-sql_test_index_dog2" + + " WHERE dog_name = 'dog'"); System.out.println(result); assertThat(result, containsString("dog_name")); assertThat(result, containsString("_source")); @@ -123,20 +126,21 @@ public void testNonCompatibleMappingsButTheFieldIsNotUsed() throws IOException { @Test public void testEqualFieldMappings() throws IOException { - String result = explainQuery( - "SELECT color " + - "FROM opensearch-sql_test_index_dog2, opensearch-sql_test_index_dog3"); + String result = + explainQuery( + "SELECT color " + + "FROM opensearch-sql_test_index_dog2, opensearch-sql_test_index_dog3"); assertThat(result, containsString("color")); assertThat(result, containsString("_source")); } @Test public void testIdenticalMappings() throws IOException { - String result = explainQuery( - "SELECT firstname, birthdate, state " + - "FROM opensearch-sql_test_index_bank, opensearch-sql_test_index_bank_two " + - "WHERE state = 'WA' OR male = true" - ); + String result = + explainQuery( + "SELECT firstname, birthdate, state " + + "FROM opensearch-sql_test_index_bank, opensearch-sql_test_index_bank_two " + + "WHERE state = 'WA' OR male = true"); assertThat(result, containsString("term")); assertThat(result, containsString("state.keyword")); assertThat(result, containsString("_source")); @@ -144,24 +148,23 @@ public void testIdenticalMappings() throws IOException { @Test public void testIdenticalMappingsWithTypes() throws IOException { - String result = explainQuery( - "SELECT firstname, birthdate, state " + - "FROM opensearch-sql_test_index_bank/account, opensearch-sql_test_index_bank_two/account_two " + - "WHERE state = 'WA' OR male = true" - ); + String result = + explainQuery( + "SELECT firstname, birthdate, state FROM opensearch-sql_test_index_bank/account," + + " opensearch-sql_test_index_bank_two/account_two WHERE state = 'WA' OR male =" + + " true"); assertThat(result, containsString("term")); assertThat(result, containsString("state.keyword")); assertThat(result, containsString("_source")); } - @Test public void testIdenticalMappingsWithPartialType() throws IOException { - String result = explainQuery( - "SELECT firstname, birthdate, state " + - "FROM opensearch-sql_test_index_bank/account, opensearch-sql_test_index_bank_two " + - "WHERE state = 'WA' OR male = true" - ); + String result = + explainQuery( + "SELECT firstname, birthdate, state " + + "FROM opensearch-sql_test_index_bank/account, opensearch-sql_test_index_bank_two " + + "WHERE state = 'WA' OR male = true"); assertThat(result, containsString("term")); assertThat(result, containsString("state.keyword")); assertThat(result, containsString("_source")); @@ -170,22 +173,22 @@ public void testIdenticalMappingsWithPartialType() throws IOException { @Test public void testTextFieldOnly() throws IOException { - String result = explainQuery( - "SELECT firstname, birthdate, state " + - "FROM opensearch-sql_test_index_bank " + - "WHERE firstname = 'Abbas'" - ); + String result = + explainQuery( + "SELECT firstname, birthdate, state " + + "FROM opensearch-sql_test_index_bank " + + "WHERE firstname = 'Abbas'"); assertThat(result, containsString("term")); assertThat(result, not(containsString("firstname."))); } @Test public void testTextAndKeywordAppendsKeywordAlias() throws IOException { - String result = explainQuery( - "SELECT firstname, birthdate, state " + - "FROM opensearch-sql_test_index_bank " + - "WHERE state = 'WA' OR lastname = 'Chen'" - ); + String result = + explainQuery( + "SELECT firstname, birthdate, state " + + "FROM opensearch-sql_test_index_bank " + + "WHERE state = 'WA' OR lastname = 'Chen'"); assertThat(result, containsString("term")); assertThat(result, containsString("state.keyword")); assertThat(result, not(containsString("lastname."))); @@ -194,8 +197,7 @@ public void testTextAndKeywordAppendsKeywordAlias() throws IOException { @Test public void testBooleanFieldNoKeywordAlias() throws IOException { - String result = - explainQuery("SELECT * FROM opensearch-sql_test_index_bank WHERE male = false"); + String result = explainQuery("SELECT * FROM opensearch-sql_test_index_bank WHERE male = false"); assertThat(result, containsString("term")); assertThat(result, not(containsString("male."))); } @@ -203,8 +205,8 @@ public void testBooleanFieldNoKeywordAlias() throws IOException { @Test public void testDateFieldNoKeywordAlias() throws IOException { - String result = explainQuery( - "SELECT * FROM opensearch-sql_test_index_bank WHERE birthdate = '2018-08-19'"); + String result = + explainQuery("SELECT * FROM opensearch-sql_test_index_bank WHERE birthdate = '2018-08-19'"); assertThat(result, containsString("term")); assertThat(result, not(containsString("birthdate."))); } @@ -218,11 +220,11 @@ public void testNumberNoKeywordAlias() throws IOException { @Test public void inTestInWhere() throws IOException { - String result = explainQuery( - "SELECT * " + - "FROM opensearch-sql_test_index_bank " + - "WHERE state IN ('WA' , 'PA' , 'TN')" - ); + String result = + explainQuery( + "SELECT * " + + "FROM opensearch-sql_test_index_bank " + + "WHERE state IN ('WA' , 'PA' , 'TN')"); assertThat(result, containsString("term")); assertThat(result, containsString("state.keyword")); } @@ -230,53 +232,52 @@ public void inTestInWhere() throws IOException { @Test @Ignore // TODO: enable when subqueries are fixed public void inTestInWhereSubquery() throws IOException { - String result = explainQuery( - "SELECT * " + - "FROM opensearch-sql_test_index_bank/account WHERE " + - "state IN (SELECT state FROM opensearch-sql_test_index_bank WHERE city = 'Nicholson')" - ); + String result = + explainQuery( + "SELECT * FROM opensearch-sql_test_index_bank/account WHERE state IN (SELECT state FROM" + + " opensearch-sql_test_index_bank WHERE city = 'Nicholson')"); assertThat(result, containsString("term")); assertThat(result, containsString("state.keyword")); } @Test public void testKeywordAliasGroupBy() throws IOException { - String result = explainQuery( - "SELECT firstname, state " + - "FROM opensearch-sql_test_index_bank/account " + - "GROUP BY firstname, state"); + String result = + explainQuery( + "SELECT firstname, state " + + "FROM opensearch-sql_test_index_bank/account " + + "GROUP BY firstname, state"); assertThat(result, containsString("term")); assertThat(result, containsString("state.keyword")); } @Test public void testKeywordAliasGroupByUsingTableAlias() throws IOException { - String result = explainQuery( - "SELECT a.firstname, a.state " + - "FROM opensearch-sql_test_index_bank/account a " + - "GROUP BY a.firstname, a.state"); + String result = + explainQuery( + "SELECT a.firstname, a.state " + + "FROM opensearch-sql_test_index_bank/account a " + + "GROUP BY a.firstname, a.state"); assertThat(result, containsString("term")); assertThat(result, containsString("state.keyword")); } @Test public void testKeywordAliasOrderBy() throws IOException { - String result = explainQuery( - "SELECT * " + - "FROM opensearch-sql_test_index_bank " + - "ORDER BY state, lastname " - ); + String result = + explainQuery( + "SELECT * FROM opensearch-sql_test_index_bank ORDER BY state, lastname "); assertThat(result, containsString("\"state.keyword\":{\"order\":\"asc\"")); assertThat(result, containsString("\"lastname\":{\"order\":\"asc\"}")); } @Test public void testKeywordAliasOrderByUsingTableAlias() throws IOException { - String result = explainQuery( - "SELECT * " + - "FROM opensearch-sql_test_index_bank b " + - "ORDER BY b.state, b.lastname " - ); + String result = + explainQuery( + "SELECT * " + + "FROM opensearch-sql_test_index_bank b " + + "ORDER BY b.state, b.lastname "); assertThat(result, containsString("\"state.keyword\":{\"order\":\"asc\"")); assertThat(result, containsString("\"lastname\":{\"order\":\"asc\"}")); } @@ -286,13 +287,13 @@ public void testKeywordAliasOrderByUsingTableAlias() throws IOException { public void testJoinWhere() throws IOException { String expectedOutput = TestUtils.fileToString("src/test/resources/expectedOutput/term_join_where", true); - String result = explainQuery( - "SELECT a.firstname, a.lastname , b.city " + - "FROM opensearch-sql_test_index_account a " + - "JOIN opensearch-sql_test_index_account b " + - "ON a.city = b.city " + - "WHERE a.city IN ('Nicholson', 'Yardville')" - ); + String result = + explainQuery( + "SELECT a.firstname, a.lastname , b.city " + + "FROM opensearch-sql_test_index_account a " + + "JOIN opensearch-sql_test_index_account b " + + "ON a.city = b.city " + + "WHERE a.city IN ('Nicholson', 'Yardville')"); assertThat(result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); } @@ -301,57 +302,56 @@ public void testJoinWhere() throws IOException { public void testJoinAliasMissing() throws IOException { try { explainQuery( - "SELECT a.firstname, a.lastname , b.city " + - "FROM opensearch-sql_test_index_account a " + - "JOIN opensearch-sql_test_index_account b " + - "ON a.city = b.city " + - "WHERE city IN ('Nicholson', 'Yardville')" - ); + "SELECT a.firstname, a.lastname , b.city " + + "FROM opensearch-sql_test_index_account a " + + "JOIN opensearch-sql_test_index_account b " + + "ON a.city = b.city " + + "WHERE city IN ('Nicholson', 'Yardville')"); Assert.fail("Expected ResponseException, but none was thrown"); } catch (ResponseException e) { - assertThat(e.getResponse().getStatusLine().getStatusCode(), + assertThat( + e.getResponse().getStatusLine().getStatusCode(), equalTo(RestStatus.BAD_REQUEST.getStatus())); final String entity = TestUtils.getResponseBody(e.getResponse()); assertThat(entity, containsString("Field name [city] is ambiguous")); assertThat(entity, containsString("\"type\": \"VerificationException\"")); } - } @Test public void testNestedSingleConditionAllFields() throws IOException { - String result = explainQuery( - "SELECT * " + - "FROM opensearch-sql_test_index_employee_nested e, e.projects p " + - "WHERE p.name = 'something' " - ); - assertThat(result, - containsString("\"term\":{\"projects.name.keyword\":{\"value\":\"something\"")); + String result = + explainQuery( + "SELECT * " + + "FROM opensearch-sql_test_index_employee_nested e, e.projects p " + + "WHERE p.name = 'something' "); + assertThat( + result, containsString("\"term\":{\"projects.name.keyword\":{\"value\":\"something\"")); assertThat(result, containsString("\"path\":\"projects\"")); } @Test public void testNestedMultipleCondition() throws IOException { - String result = explainQuery( - "SELECT e.id, p.name " + - "FROM opensearch-sql_test_index_employee_nested e, e.projects p " + - "WHERE p.name = 'something' and p.started_year = 1990 " - ); - assertThat(result, - containsString("\"term\":{\"projects.name.keyword\":{\"value\":\"something\"")); + String result = + explainQuery( + "SELECT e.id, p.name " + + "FROM opensearch-sql_test_index_employee_nested e, e.projects p " + + "WHERE p.name = 'something' and p.started_year = 1990 "); + assertThat( + result, containsString("\"term\":{\"projects.name.keyword\":{\"value\":\"something\"")); assertThat(result, containsString("\"term\":{\"projects.started_year\":{\"value\":1990")); assertThat(result, containsString("\"path\":\"projects\"")); } @Test public void testConditionsOnDifferentNestedDocs() throws IOException { - String result = explainQuery( - "SELECT p.name, c.likes " + - "FROM opensearch-sql_test_index_employee_nested e, e.projects p, e.comments c " + - "WHERE p.name = 'something' or c.likes = 56 " - ); - assertThat(result, - containsString("\"term\":{\"projects.name.keyword\":{\"value\":\"something\"")); + String result = + explainQuery( + "SELECT p.name, c.likes " + + "FROM opensearch-sql_test_index_employee_nested e, e.projects p, e.comments c " + + "WHERE p.name = 'something' or c.likes = 56 "); + assertThat( + result, containsString("\"term\":{\"projects.name.keyword\":{\"value\":\"something\"")); assertThat(result, containsString("\"term\":{\"comments.likes\":{\"value\":56")); assertThat(result, containsString("\"path\":\"projects\"")); assertThat(result, containsString("\"path\":\"comments\"")); @@ -359,11 +359,11 @@ public void testConditionsOnDifferentNestedDocs() throws IOException { @Test public void testNestedSingleConditionSpecificFields() throws IOException { - String result = explainQuery( - "SELECT e.id, p.name " + - "FROM opensearch-sql_test_index_employee_nested e, e.projects p " + - "WHERE p.name = 'hello' or p.name = 'world' " - ); + String result = + explainQuery( + "SELECT e.id, p.name " + + "FROM opensearch-sql_test_index_employee_nested e, e.projects p " + + "WHERE p.name = 'hello' or p.name = 'world' "); assertThat(result, containsString("\"term\":{\"projects.name.keyword\":{\"value\":\"hello\"")); assertThat(result, containsString("\"term\":{\"projects.name.keyword\":{\"value\":\"world\"")); assertThat(result, containsString("\"path\":\"projects\"")); @@ -371,32 +371,33 @@ public void testNestedSingleConditionSpecificFields() throws IOException { @Test public void testNestedSingleGroupBy() throws IOException { - String result = explainQuery( - "SELECT e.id, p.name " + - "FROM opensearch-sql_test_index_employee_nested e, e.projects p " + - "GROUP BY p.name "); + String result = + explainQuery( + "SELECT e.id, p.name " + + "FROM opensearch-sql_test_index_employee_nested e, e.projects p " + + "GROUP BY p.name "); assertThat(result, containsString("\"terms\":{\"field\":\"projects.name.keyword\"")); assertThat(result, containsString("\"nested\":{\"path\":\"projects\"")); } @Test public void testNestedSingleOrderBy() throws IOException { - String result = explainQuery( - "SELECT e.id, p.name " + - "FROM opensearch-sql_test_index_employee_nested e, e.projects p " + - "ORDER BY p.name " - ); + String result = + explainQuery( + "SELECT e.id, p.name " + + "FROM opensearch-sql_test_index_employee_nested e, e.projects p " + + "ORDER BY p.name "); assertThat(result, containsString("\"sort\":[{\"projects.name.keyword\"")); assertThat(result, containsString("\"nested\":{\"path\":\"projects\"")); } @Test public void testNestedIsNotNullExplain() throws IOException { - String explain = explainQuery( - "SELECT e.name " + - "FROM opensearch-sql_test_index_employee_nested as e, e.projects as p " + - "WHERE p IS NOT NULL" - ); + String explain = + explainQuery( + "SELECT e.name " + + "FROM opensearch-sql_test_index_employee_nested as e, e.projects as p " + + "WHERE p IS NOT NULL"); assertThat(explain, containsString("\"exists\":{\"field\":\"projects\"")); assertThat(explain, containsString("\"path\":\"projects\"")); @@ -407,14 +408,15 @@ public void testNestedIsNotNullExplain() throws IOException { public void testMultiQuery() throws IOException { String expectedOutput = TestUtils.fileToString("src/test/resources/expectedOutput/term_union_where", true); - String result = explainQuery( - "SELECT firstname " + - "FROM opensearch-sql_test_index_account/account " + - "WHERE firstname = 'Amber' " + - "UNION ALL " + - "SELECT dog_name as firstname " + - "FROM opensearch-sql_test_index_dog/dog " + - "WHERE holdersName = 'Hattie' OR dog_name = 'rex'"); + String result = + explainQuery( + "SELECT firstname " + + "FROM opensearch-sql_test_index_account/account " + + "WHERE firstname = 'Amber' " + + "UNION ALL " + + "SELECT dog_name as firstname " + + "FROM opensearch-sql_test_index_dog/dog " + + "WHERE holdersName = 'Hattie' OR dog_name = 'rex'"); assertThat(result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/TestUtils.java b/integ-test/src/test/java/org/opensearch/sql/legacy/TestUtils.java index 30cee86e15..1abc1d6183 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/TestUtils.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/TestUtils.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static com.google.common.base.Strings.isNullOrEmpty; @@ -36,14 +35,14 @@ public class TestUtils { - private final static String MAPPING_FILE_PATH = "src/test/resources/indexDefinitions/"; + private static final String MAPPING_FILE_PATH = "src/test/resources/indexDefinitions/"; /** * Create test index by REST client. * - * @param client client connection + * @param client client connection * @param indexName test index name - * @param mapping test index mapping or null if no predefined mapping + * @param mapping test index mapping or null if no predefined mapping */ public static void createIndexByRestClient(RestClient client, String indexName, String mapping) { Request request = new Request("PUT", "/" + indexName); @@ -54,16 +53,16 @@ public static void createIndexByRestClient(RestClient client, String indexName, } /** - * https://github.com/elastic/elasticsearch/pull/49959 - * Deprecate creation of dot-prefixed index names except for hidden and system indices. - * Create hidden index by REST client. + * https://github.com/elastic/elasticsearch/pull/49959
+ * Deprecate creation of dot-prefixed index + * names except for hidden and system indices. Create hidden index by REST client. * - * @param client client connection + * @param client client connection * @param indexName test index name - * @param mapping test index mapping or null if no predefined mapping + * @param mapping test index mapping or null if no predefined mapping */ - public static void createHiddenIndexByRestClient(RestClient client, String indexName, - String mapping) { + public static void createHiddenIndexByRestClient( + RestClient client, String indexName, String mapping) { Request request = new Request("PUT", "/" + indexName); JSONObject jsonObject = isNullOrEmpty(mapping) ? new JSONObject() : new JSONObject(mapping); jsonObject.put("settings", new JSONObject("{\"index\":{\"hidden\":true}}")); @@ -73,11 +72,10 @@ public static void createHiddenIndexByRestClient(RestClient client, String index } /** - * Check if index already exists by OpenSearch index exists API which returns: - * 200 - specified indices or aliases exist - * 404 - one or more indices specified or aliases do not exist + * Check if index already exists by OpenSearch index exists API which returns: 200 - specified + * indices or aliases exist 404 - one or more indices specified or aliases do not exist * - * @param client client connection + * @param client client connection * @param indexName index name * @return true for index exist */ @@ -93,13 +91,13 @@ public static boolean isIndexExist(RestClient client, String indexName) { /** * Load test data set by REST client. * - * @param client client connection - * @param indexName index name + * @param client client connection + * @param indexName index name * @param dataSetFilePath file path of test data set * @throws IOException */ - public static void loadDataByRestClient(RestClient client, String indexName, - String dataSetFilePath) throws IOException { + public static void loadDataByRestClient( + RestClient client, String indexName, String dataSetFilePath) throws IOException { Path path = Paths.get(getResourceFilePath(dataSetFilePath)); Request request = new Request("POST", "/" + indexName + "/_bulk?refresh=true"); request.setJsonEntity(new String(Files.readAllBytes(path))); @@ -109,7 +107,7 @@ public static void loadDataByRestClient(RestClient client, String indexName, /** * Perform a request by REST client. * - * @param client client connection + * @param client client connection * @param request request object */ public static Response performRequest(RestClient client, Request request) { @@ -177,7 +175,6 @@ public static String getEmployeeNestedTypeIndexMapping() { return getMappingFile(mappingFile); } - public static String getNestedTypeIndexMapping() { String mappingFile = "nested_type_index_mapping.json"; return getMappingFile(mappingFile); @@ -255,8 +252,8 @@ public static void loadBulk(Client client, String jsonPath, String defaultIndex) BulkRequest bulkRequest = new BulkRequest(); try (final InputStream stream = new FileInputStream(absJsonPath); - final Reader streamReader = new InputStreamReader(stream, StandardCharsets.UTF_8); - final BufferedReader br = new BufferedReader(streamReader)) { + final Reader streamReader = new InputStreamReader(stream, StandardCharsets.UTF_8); + final BufferedReader br = new BufferedReader(streamReader)) { while (true) { @@ -285,8 +282,11 @@ public static void loadBulk(Client client, String jsonPath, String defaultIndex) BulkResponse bulkResponse = client.bulk(bulkRequest).actionGet(); if (bulkResponse.hasFailures()) { - throw new Exception("Failed to load test data into index " + defaultIndex + ", " + - bulkResponse.buildFailureMessage()); + throw new Exception( + "Failed to load test data into index " + + defaultIndex + + ", " + + bulkResponse.buildFailureMessage()); } System.out.println(bulkResponse.getItems().length + " documents loaded."); // ensure the documents are searchable @@ -312,8 +312,8 @@ public static String getResponseBody(Response response, boolean retainNewLines) final StringBuilder sb = new StringBuilder(); try (final InputStream is = response.getEntity().getContent(); - final BufferedReader br = new BufferedReader( - new InputStreamReader(is, StandardCharsets.UTF_8))) { + final BufferedReader br = + new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8))) { String line; while ((line = br.readLine()) != null) { @@ -326,15 +326,14 @@ public static String getResponseBody(Response response, boolean retainNewLines) return sb.toString(); } - public static String fileToString(final String filePathFromProjectRoot, - final boolean removeNewLines) - throws IOException { + public static String fileToString( + final String filePathFromProjectRoot, final boolean removeNewLines) throws IOException { final String absolutePath = getResourceFilePath(filePathFromProjectRoot); try (final InputStream stream = new FileInputStream(absolutePath); - final Reader streamReader = new InputStreamReader(stream, StandardCharsets.UTF_8); - final BufferedReader br = new BufferedReader(streamReader)) { + final Reader streamReader = new InputStreamReader(stream, StandardCharsets.UTF_8); + final BufferedReader br = new BufferedReader(streamReader)) { final StringBuilder stringBuilder = new StringBuilder(); String line = br.readLine(); @@ -388,12 +387,16 @@ public static List> getPermutations(final List items) { } final String currentItem = items.get(i); - result.addAll(getPermutations(smallerSet).stream().map(smallerSetPermutation -> { - final List permutation = new ArrayList<>(); - permutation.add(currentItem); - permutation.addAll(smallerSetPermutation); - return permutation; - }).collect(Collectors.toCollection(LinkedList::new))); + result.addAll( + getPermutations(smallerSet).stream() + .map( + smallerSetPermutation -> { + final List permutation = new ArrayList<>(); + permutation.add(currentItem); + permutation.addAll(smallerSetPermutation); + return permutation; + }) + .collect(Collectors.toCollection(LinkedList::new))); } return result; diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/TestsConstants.java b/integ-test/src/test/java/org/opensearch/sql/legacy/TestsConstants.java index 338be25a0c..29bc9813fa 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/TestsConstants.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/TestsConstants.java @@ -3,66 +3,63 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; -/** - * Created by omershelef on 18/12/14. - */ +/** Created by omershelef on 18/12/14. */ public class TestsConstants { - public final static String PERSISTENT = "persistent"; - public final static String TRANSIENT = "transient"; + public static final String PERSISTENT = "persistent"; + public static final String TRANSIENT = "transient"; - public final static String TEST_INDEX = "opensearch-sql_test_index"; + public static final String TEST_INDEX = "opensearch-sql_test_index"; - public final static String TEST_INDEX_ONLINE = TEST_INDEX + "_online"; - public final static String TEST_INDEX_ACCOUNT = TEST_INDEX + "_account"; - public final static String TEST_INDEX_PHRASE = TEST_INDEX + "_phrase"; - public final static String TEST_INDEX_DOG = TEST_INDEX + "_dog"; - public final static String TEST_INDEX_DOG2 = TEST_INDEX + "_dog2"; - public final static String TEST_INDEX_DOG3 = TEST_INDEX + "_dog3"; - public final static String TEST_INDEX_DOGSUBQUERY = TEST_INDEX + "_subquery"; - public final static String TEST_INDEX_PEOPLE = TEST_INDEX + "_people"; - public final static String TEST_INDEX_PEOPLE2 = TEST_INDEX + "_people2"; - public final static String TEST_INDEX_GAME_OF_THRONES = TEST_INDEX + "_game_of_thrones"; - public final static String TEST_INDEX_SYSTEM = TEST_INDEX + "_system"; - public final static String TEST_INDEX_ODBC = TEST_INDEX + "_odbc"; - public final static String TEST_INDEX_LOCATION = TEST_INDEX + "_location"; - public final static String TEST_INDEX_LOCATION2 = TEST_INDEX + "_location2"; - public final static String TEST_INDEX_NESTED_TYPE = TEST_INDEX + "_nested_type"; - public final static String TEST_INDEX_NESTED_TYPE_WITHOUT_ARRAYS = + public static final String TEST_INDEX_ONLINE = TEST_INDEX + "_online"; + public static final String TEST_INDEX_ACCOUNT = TEST_INDEX + "_account"; + public static final String TEST_INDEX_PHRASE = TEST_INDEX + "_phrase"; + public static final String TEST_INDEX_DOG = TEST_INDEX + "_dog"; + public static final String TEST_INDEX_DOG2 = TEST_INDEX + "_dog2"; + public static final String TEST_INDEX_DOG3 = TEST_INDEX + "_dog3"; + public static final String TEST_INDEX_DOGSUBQUERY = TEST_INDEX + "_subquery"; + public static final String TEST_INDEX_PEOPLE = TEST_INDEX + "_people"; + public static final String TEST_INDEX_PEOPLE2 = TEST_INDEX + "_people2"; + public static final String TEST_INDEX_GAME_OF_THRONES = TEST_INDEX + "_game_of_thrones"; + public static final String TEST_INDEX_SYSTEM = TEST_INDEX + "_system"; + public static final String TEST_INDEX_ODBC = TEST_INDEX + "_odbc"; + public static final String TEST_INDEX_LOCATION = TEST_INDEX + "_location"; + public static final String TEST_INDEX_LOCATION2 = TEST_INDEX + "_location2"; + public static final String TEST_INDEX_NESTED_TYPE = TEST_INDEX + "_nested_type"; + public static final String TEST_INDEX_NESTED_TYPE_WITHOUT_ARRAYS = TEST_INDEX + "_nested_type_without_arrays"; - public final static String TEST_INDEX_NESTED_SIMPLE = TEST_INDEX + "_nested_simple"; - public final static String TEST_INDEX_NESTED_WITH_QUOTES = + public static final String TEST_INDEX_NESTED_SIMPLE = TEST_INDEX + "_nested_simple"; + public static final String TEST_INDEX_NESTED_WITH_QUOTES = TEST_INDEX + "_nested_type_with_quotes"; - public final static String TEST_INDEX_EMPLOYEE_NESTED = TEST_INDEX + "_employee_nested"; - public final static String TEST_INDEX_JOIN_TYPE = TEST_INDEX + "_join_type"; - public final static String TEST_INDEX_UNEXPANDED_OBJECT = TEST_INDEX + "_unexpanded_object"; - public final static String TEST_INDEX_BANK = TEST_INDEX + "_bank"; - public final static String TEST_INDEX_BANK_TWO = TEST_INDEX_BANK + "_two"; - public final static String TEST_INDEX_BANK_WITH_NULL_VALUES = + public static final String TEST_INDEX_EMPLOYEE_NESTED = TEST_INDEX + "_employee_nested"; + public static final String TEST_INDEX_JOIN_TYPE = TEST_INDEX + "_join_type"; + public static final String TEST_INDEX_UNEXPANDED_OBJECT = TEST_INDEX + "_unexpanded_object"; + public static final String TEST_INDEX_BANK = TEST_INDEX + "_bank"; + public static final String TEST_INDEX_BANK_TWO = TEST_INDEX_BANK + "_two"; + public static final String TEST_INDEX_BANK_WITH_NULL_VALUES = TEST_INDEX_BANK + "_with_null_values"; - public final static String TEST_INDEX_BANK_CSV_SANITIZE = TEST_INDEX_BANK + "_csv_sanitize"; - public final static String TEST_INDEX_BANK_RAW_SANITIZE = TEST_INDEX_BANK + "_raw_sanitize"; - public final static String TEST_INDEX_ORDER = TEST_INDEX + "_order"; - public final static String TEST_INDEX_WEBLOG = TEST_INDEX + "_weblog"; - public final static String TEST_INDEX_DATE = TEST_INDEX + "_date"; - public final static String TEST_INDEX_DATE_TIME = TEST_INDEX + "_datetime"; - public final static String TEST_INDEX_DEEP_NESTED = TEST_INDEX + "_deep_nested"; - public final static String TEST_INDEX_STRINGS = TEST_INDEX + "_strings"; - public final static String TEST_INDEX_DATATYPE_NUMERIC = TEST_INDEX + "_datatypes_numeric"; - public final static String TEST_INDEX_DATATYPE_NONNUMERIC = TEST_INDEX + "_datatypes_nonnumeric"; - public final static String TEST_INDEX_BEER = TEST_INDEX + "_beer"; - public final static String TEST_INDEX_NULL_MISSING = TEST_INDEX + "_null_missing"; - public final static String TEST_INDEX_CALCS = TEST_INDEX + "_calcs"; - public final static String TEST_INDEX_DATE_FORMATS = TEST_INDEX + "_date_formats"; - public final static String TEST_INDEX_WILDCARD = TEST_INDEX + "_wildcard"; - public final static String TEST_INDEX_MULTI_NESTED_TYPE = TEST_INDEX + "_multi_nested"; - public final static String TEST_INDEX_NESTED_WITH_NULLS = TEST_INDEX + "_nested_with_nulls"; - public final static String DATASOURCES = ".ql-datasources"; + public static final String TEST_INDEX_BANK_CSV_SANITIZE = TEST_INDEX_BANK + "_csv_sanitize"; + public static final String TEST_INDEX_BANK_RAW_SANITIZE = TEST_INDEX_BANK + "_raw_sanitize"; + public static final String TEST_INDEX_ORDER = TEST_INDEX + "_order"; + public static final String TEST_INDEX_WEBLOG = TEST_INDEX + "_weblog"; + public static final String TEST_INDEX_DATE = TEST_INDEX + "_date"; + public static final String TEST_INDEX_DATE_TIME = TEST_INDEX + "_datetime"; + public static final String TEST_INDEX_DEEP_NESTED = TEST_INDEX + "_deep_nested"; + public static final String TEST_INDEX_STRINGS = TEST_INDEX + "_strings"; + public static final String TEST_INDEX_DATATYPE_NUMERIC = TEST_INDEX + "_datatypes_numeric"; + public static final String TEST_INDEX_DATATYPE_NONNUMERIC = TEST_INDEX + "_datatypes_nonnumeric"; + public static final String TEST_INDEX_BEER = TEST_INDEX + "_beer"; + public static final String TEST_INDEX_NULL_MISSING = TEST_INDEX + "_null_missing"; + public static final String TEST_INDEX_CALCS = TEST_INDEX + "_calcs"; + public static final String TEST_INDEX_DATE_FORMATS = TEST_INDEX + "_date_formats"; + public static final String TEST_INDEX_WILDCARD = TEST_INDEX + "_wildcard"; + public static final String TEST_INDEX_MULTI_NESTED_TYPE = TEST_INDEX + "_multi_nested"; + public static final String TEST_INDEX_NESTED_WITH_NULLS = TEST_INDEX + "_nested_with_nulls"; + public static final String DATASOURCES = ".ql-datasources"; - public final static String DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"; - public final static String TS_DATE_FORMAT = "yyyy-MM-dd HH:mm:ss.SSS"; - public final static String SIMPLE_DATE_FORMAT = "yyyy-MM-dd"; + public static final String DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"; + public static final String TS_DATE_FORMAT = "yyyy-MM-dd HH:mm:ss.SSS"; + public static final String SIMPLE_DATE_FORMAT = "yyyy-MM-dd"; } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/TypeInformationIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/TypeInformationIT.java index 646a38b011..421aae9622 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/TypeInformationIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/TypeInformationIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.opensearch.sql.util.MatcherUtils.schema; @@ -26,8 +25,8 @@ protected void init() throws Exception { @Test public void testAbsWithIntFieldReturnsInt() { JSONObject response = - executeJdbcRequest("SELECT ABS(age) FROM " + TestsConstants.TEST_INDEX_ACCOUNT + - " ORDER BY age LIMIT 5"); + executeJdbcRequest( + "SELECT ABS(age) FROM " + TestsConstants.TEST_INDEX_ACCOUNT + " ORDER BY age LIMIT 5"); verifySchema(response, schema("ABS(age)", null, "long")); } @@ -35,8 +34,10 @@ public void testAbsWithIntFieldReturnsInt() { @Test public void testCeilWithLongFieldReturnsLong() { JSONObject response = - executeJdbcRequest("SELECT CEIL(balance) FROM " + TestsConstants.TEST_INDEX_ACCOUNT + - " ORDER BY balance LIMIT 5"); + executeJdbcRequest( + "SELECT CEIL(balance) FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY balance LIMIT 5"); verifySchema(response, schema("CEIL(balance)", null, "long")); } @@ -46,8 +47,8 @@ public void testCeilWithLongFieldReturnsLong() { */ @Test public void testPiReturnsDouble() { - JSONObject response = executeJdbcRequest("SELECT PI() FROM " + TestsConstants.TEST_INDEX_ACCOUNT - + " LIMIT 1"); + JSONObject response = + executeJdbcRequest("SELECT PI() FROM " + TestsConstants.TEST_INDEX_ACCOUNT + " LIMIT 1"); verifySchema(response, schema("PI()", null, "double")); } @@ -57,16 +58,22 @@ public void testPiReturnsDouble() { */ @Test public void testUpperWithStringFieldReturnsString() { - JSONObject response = executeJdbcRequest("SELECT UPPER(firstname) AS firstname_alias FROM " + - TestsConstants.TEST_INDEX_ACCOUNT + " ORDER BY firstname_alias LIMIT 2"); + JSONObject response = + executeJdbcRequest( + "SELECT UPPER(firstname) AS firstname_alias FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY firstname_alias LIMIT 2"); verifySchema(response, schema("UPPER(firstname)", "firstname_alias", "keyword")); } @Test public void testLowerWithTextFieldReturnsText() { - JSONObject response = executeJdbcRequest("SELECT LOWER(firstname) FROM " + - TestsConstants.TEST_INDEX_ACCOUNT + " ORDER BY firstname LIMIT 2"); + JSONObject response = + executeJdbcRequest( + "SELECT LOWER(firstname) FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY firstname LIMIT 2"); verifySchema(response, schema("LOWER(firstname)", null, "keyword")); } @@ -76,8 +83,11 @@ public void testLowerWithTextFieldReturnsText() { */ @Test public void testLengthWithTextFieldReturnsInt() { - JSONObject response = executeJdbcRequest("SELECT length(firstname) FROM " + - TestsConstants.TEST_INDEX_ACCOUNT + " ORDER BY firstname LIMIT 2"); + JSONObject response = + executeJdbcRequest( + "SELECT length(firstname) FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY firstname LIMIT 2"); verifySchema(response, schema("length(firstname)", null, "integer")); } @@ -85,8 +95,10 @@ public void testLengthWithTextFieldReturnsInt() { @Test public void testLengthWithGroupByExpr() { JSONObject response = - executeJdbcRequest("SELECT Length(firstname) FROM " + TestsConstants.TEST_INDEX_ACCOUNT + - " GROUP BY LENGTH(firstname) LIMIT 5"); + executeJdbcRequest( + "SELECT Length(firstname) FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " GROUP BY LENGTH(firstname) LIMIT 5"); verifySchema(response, schema("Length(firstname)", null, "integer")); } @@ -96,16 +108,22 @@ public void testLengthWithGroupByExpr() { */ @Test public void testSinWithLongFieldReturnsDouble() { - JSONObject response = executeJdbcRequest("SELECT sin(balance) FROM " + - TestsConstants.TEST_INDEX_ACCOUNT + " ORDER BY firstname LIMIT 2"); + JSONObject response = + executeJdbcRequest( + "SELECT sin(balance) FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY firstname LIMIT 2"); verifySchema(response, schema("sin(balance)", null, "double")); } @Test public void testRadiansWithLongFieldReturnsDouble() { - JSONObject response = executeJdbcRequest("SELECT radians(balance) FROM " + - TestsConstants.TEST_INDEX_ACCOUNT + " ORDER BY firstname LIMIT 2"); + JSONObject response = + executeJdbcRequest( + "SELECT radians(balance) FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY firstname LIMIT 2"); verifySchema(response, schema("radians(balance)", null, "double")); } @@ -115,16 +133,22 @@ public void testRadiansWithLongFieldReturnsDouble() { */ @Test public void testAddWithIntReturnsInt() { - JSONObject response = executeJdbcRequest("SELECT (balance + 5) AS balance_add_five FROM " + - TestsConstants.TEST_INDEX_ACCOUNT + " ORDER BY firstname LIMIT 2"); + JSONObject response = + executeJdbcRequest( + "SELECT (balance + 5) AS balance_add_five FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY firstname LIMIT 2"); verifySchema(response, schema("(balance + 5)", "balance_add_five", "long")); } @Test public void testSubtractLongWithLongReturnsLong() { - JSONObject response = executeJdbcRequest("SELECT (balance - balance) FROM " + - TestsConstants.TEST_INDEX_ACCOUNT + " ORDER BY firstname LIMIT 2"); + JSONObject response = + executeJdbcRequest( + "SELECT (balance - balance) FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY firstname LIMIT 2"); verifySchema(response, schema("(balance - balance)", null, "long")); } @@ -134,17 +158,18 @@ public void testSubtractLongWithLongReturnsLong() { */ @Test public void testDayOfWeekWithKeywordReturnsText() { - JSONObject response = executeJdbcRequest("SELECT DAYOFWEEK(insert_time) FROM " - + TestsConstants.TEST_INDEX_ONLINE + " LIMIT 2"); + JSONObject response = + executeJdbcRequest( + "SELECT DAYOFWEEK(insert_time) FROM " + TestsConstants.TEST_INDEX_ONLINE + " LIMIT 2"); - verifySchema(response, - schema("DAYOFWEEK(insert_time)", null, "integer")); + verifySchema(response, schema("DAYOFWEEK(insert_time)", null, "integer")); } @Test public void testYearWithKeywordReturnsText() { - JSONObject response = executeJdbcRequest("SELECT YEAR(insert_time) FROM " - + TestsConstants.TEST_INDEX_ONLINE + " LIMIT 2"); + JSONObject response = + executeJdbcRequest( + "SELECT YEAR(insert_time) FROM " + TestsConstants.TEST_INDEX_ONLINE + " LIMIT 2"); verifySchema(response, schema("YEAR(insert_time)", null, "integer")); } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/StandaloneIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/StandaloneIT.java index 8ef8787597..f81e1b6615 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/StandaloneIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/StandaloneIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.datasource.model.DataSourceMetadata.defaultOpenSearchDataSourceMetadata; @@ -78,17 +77,21 @@ public class StandaloneIT extends PPLIntegTestCase { public void init() { RestHighLevelClient restClient = new InternalRestHighLevelClient(client()); OpenSearchClient client = new OpenSearchRestClient(restClient); - DataSourceService dataSourceService = new DataSourceServiceImpl( - new ImmutableSet.Builder() - .add(new OpenSearchDataSourceFactory(client, defaultSettings())) - .build(), getDataSourceMetadataStorage(), getDataSourceUserRoleHelper()); + DataSourceService dataSourceService = + new DataSourceServiceImpl( + new ImmutableSet.Builder() + .add(new OpenSearchDataSourceFactory(client, defaultSettings())) + .build(), + getDataSourceMetadataStorage(), + getDataSourceUserRoleHelper()); dataSourceService.createDataSource(defaultOpenSearchDataSourceMetadata()); ModulesBuilder modules = new ModulesBuilder(); - modules.add(new StandaloneModule(new InternalRestHighLevelClient(client()), defaultSettings(), dataSourceService)); + modules.add( + new StandaloneModule( + new InternalRestHighLevelClient(client()), defaultSettings(), dataSourceService)); Injector injector = modules.createInjector(); - pplService = - SecurityAccess.doPrivileged(() -> injector.getInstance(PPLService.class)); + pplService = SecurityAccess.doPrivileged(() -> injector.getInstance(PPLService.class)); } @Test @@ -146,9 +149,8 @@ public void onFailure(Exception e) { private Settings defaultSettings() { return new Settings() { - private final Map defaultSettings = new ImmutableMap.Builder() - .put(Key.QUERY_SIZE_LIMIT, 200) - .build(); + private final Map defaultSettings = + new ImmutableMap.Builder().put(Key.QUERY_SIZE_LIMIT, 200).build(); @Override public T getSettingValue(Key key) { @@ -162,9 +164,7 @@ public List getSettings() { }; } - /** - * Internal RestHighLevelClient only for testing purpose. - */ + /** Internal RestHighLevelClient only for testing purpose. */ static class InternalRestHighLevelClient extends RestHighLevelClient { public InternalRestHighLevelClient(RestClient restClient) { super(restClient, RestClient::close, Collections.emptyList()); @@ -197,8 +197,8 @@ public StorageEngine storageEngine(OpenSearchClient client) { } @Provides - public ExecutionEngine executionEngine(OpenSearchClient client, ExecutionProtector protector, - PlanSerializer planSerializer) { + public ExecutionEngine executionEngine( + OpenSearchClient client, ExecutionProtector protector, PlanSerializer planSerializer) { return new OpenSearchExecutionEngine(client, protector, planSerializer); } @@ -257,28 +257,20 @@ public Optional getDataSourceMetadata(String datasourceName) } @Override - public void createDataSourceMetadata(DataSourceMetadata dataSourceMetadata) { - - } + public void createDataSourceMetadata(DataSourceMetadata dataSourceMetadata) {} @Override - public void updateDataSourceMetadata(DataSourceMetadata dataSourceMetadata) { - - } + public void updateDataSourceMetadata(DataSourceMetadata dataSourceMetadata) {} @Override - public void deleteDataSourceMetadata(String datasourceName) { - - } + public void deleteDataSourceMetadata(String datasourceName) {} }; } public static DataSourceUserAuthorizationHelper getDataSourceUserRoleHelper() { return new DataSourceUserAuthorizationHelper() { @Override - public void authorizeDataSource(DataSourceMetadata dataSourceMetadata) { - - } + public void authorizeDataSource(DataSourceMetadata dataSourceMetadata) {} }; } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/StatsCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/StatsCommandIT.java index 5389f245a4..92b9e309b8 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/StatsCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/StatsCommandIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_ACCOUNT; @@ -66,26 +65,23 @@ public void testStatsDistinctCount() throws IOException { verifySchema(response, schema("distinct_count(gender)", null, "integer")); verifyDataRows(response, rows(2)); - response = - executeQuery(String.format("source=%s | stats dc(age)", TEST_INDEX_ACCOUNT)); + response = executeQuery(String.format("source=%s | stats dc(age)", TEST_INDEX_ACCOUNT)); verifySchema(response, schema("dc(age)", null, "integer")); verifyDataRows(response, rows(21)); } @Test public void testStatsMin() throws IOException { - JSONObject response = executeQuery(String.format( - "source=%s | stats min(age)", - TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery(String.format("source=%s | stats min(age)", TEST_INDEX_ACCOUNT)); verifySchema(response, schema("min(age)", null, "long")); verifyDataRows(response, rows(20)); } @Test public void testStatsMax() throws IOException { - JSONObject response = executeQuery(String.format( - "source=%s | stats max(age)", - TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery(String.format("source=%s | stats max(age)", TEST_INDEX_ACCOUNT)); verifySchema(response, schema("max(age)", null, "long")); verifyDataRows(response, rows(40)); } @@ -93,8 +89,8 @@ public void testStatsMax() throws IOException { @Test public void testStatsNested() throws IOException { JSONObject response = - executeQuery(String.format("source=%s | stats avg(abs(age) * 2) as AGE", - TEST_INDEX_ACCOUNT)); + executeQuery( + String.format("source=%s | stats avg(abs(age) * 2) as AGE", TEST_INDEX_ACCOUNT)); verifySchema(response, schema("AGE", null, "double")); verifyDataRows(response, rows(60.342)); } @@ -102,8 +98,7 @@ public void testStatsNested() throws IOException { @Test public void testStatsNestedDoubleValue() throws IOException { JSONObject response = - executeQuery(String.format("source=%s | stats avg(abs(age) * 2.0)", - TEST_INDEX_ACCOUNT)); + executeQuery(String.format("source=%s | stats avg(abs(age) * 2.0)", TEST_INDEX_ACCOUNT)); verifySchema(response, schema("avg(abs(age) * 2.0)", null, "double")); verifyDataRows(response, rows(60.342)); } @@ -111,88 +106,87 @@ public void testStatsNestedDoubleValue() throws IOException { @Test public void testStatsWhere() throws IOException { JSONObject response = - executeQuery(String.format( - "source=%s | stats sum(balance) as a by state | where a > 780000", - TEST_INDEX_ACCOUNT)); - verifySchema(response, schema("a", null, "long"), - schema("state", null, "string")); + executeQuery( + String.format( + "source=%s | stats sum(balance) as a by state | where a > 780000", + TEST_INDEX_ACCOUNT)); + verifySchema(response, schema("a", null, "long"), schema("state", null, "string")); verifyDataRows(response, rows(782199, "TX")); } @Test public void testGroupByNullValue() throws IOException { JSONObject response = - executeQuery(String.format( - "source=%s | stats avg(balance) as a by age", - TEST_INDEX_BANK_WITH_NULL_VALUES)); - verifySchema(response, schema("a", null, "double"), - schema("age", null, "integer")); - verifyDataRows(response, + executeQuery( + String.format( + "source=%s | stats avg(balance) as a by age", TEST_INDEX_BANK_WITH_NULL_VALUES)); + verifySchema(response, schema("a", null, "double"), schema("age", null, "integer")); + verifyDataRows( + response, rows(null, null), rows(32838D, 28), rows(39225D, 32), rows(4180D, 33), rows(48086D, 34), - rows(null, 36) - ); + rows(null, 36)); } - //Todo. The column of agg function is in random order. This is because we create the project + // Todo. The column of agg function is in random order. This is because we create the project // all operator from the symbol table which can't maintain the original column order. @Test public void testMultipleAggregationFunction() throws IOException { - JSONObject response = executeQuery(String.format( - "source=%s | stats min(age), max(age)", - TEST_INDEX_ACCOUNT)); - verifySchema(response, schema("min(age)", null, "long"), - schema("max(age)", null, "long")); + JSONObject response = + executeQuery(String.format("source=%s | stats min(age), max(age)", TEST_INDEX_ACCOUNT)); + verifySchema(response, schema("min(age)", null, "long"), schema("max(age)", null, "long")); verifyDataRows(response, rows(20, 40)); } @Test public void testStatsWithNull() throws IOException { JSONObject response = - executeQuery(String.format( - "source=%s | stats avg(age)", - TEST_INDEX_BANK_WITH_NULL_VALUES)); + executeQuery(String.format("source=%s | stats avg(age)", TEST_INDEX_BANK_WITH_NULL_VALUES)); verifySchema(response, schema("avg(age)", null, "double")); verifyDataRows(response, rows(33.166666666666664)); } @Test public void testStatsWithMissing() throws IOException { - JSONObject response = executeQuery(String.format( - "source=%s | stats avg(balance)", - TEST_INDEX_BANK_WITH_NULL_VALUES)); + JSONObject response = + executeQuery( + String.format("source=%s | stats avg(balance)", TEST_INDEX_BANK_WITH_NULL_VALUES)); verifySchema(response, schema("avg(balance)", null, "double")); verifyDataRows(response, rows(31082.25)); } @Test public void testStatsBySpan() throws IOException { - JSONObject response = executeQuery(String.format( - "source=%s | stats count() by span(age,10)", - TEST_INDEX_BANK)); - verifySchema(response, schema("count()", null, "integer"), schema("span(age,10)", null, "integer")); + JSONObject response = + executeQuery(String.format("source=%s | stats count() by span(age,10)", TEST_INDEX_BANK)); + verifySchema( + response, schema("count()", null, "integer"), schema("span(age,10)", null, "integer")); verifyDataRows(response, rows(1, 20), rows(6, 30)); } @Test public void testStatsTimeSpan() throws IOException { - JSONObject response = executeQuery(String.format( - "source=%s | stats count() by span(birthdate,1y)", - TEST_INDEX_BANK)); - verifySchema(response, schema("count()", null, "integer"), schema( - "span(birthdate,1y)", null, "timestamp")); + JSONObject response = + executeQuery( + String.format("source=%s | stats count() by span(birthdate,1y)", TEST_INDEX_BANK)); + verifySchema( + response, + schema("count()", null, "integer"), + schema("span(birthdate,1y)", null, "timestamp")); verifyDataRows(response, rows(2, "2017-01-01 00:00:00"), rows(5, "2018-01-01 00:00:00")); } @Test public void testStatsAliasedSpan() throws IOException { - JSONObject response = executeQuery(String.format( - "source=%s | stats count() by span(age,10) as age_bucket", - TEST_INDEX_BANK)); - verifySchema(response, schema("count()", null, "integer"), schema("age_bucket", null, "integer")); + JSONObject response = + executeQuery( + String.format( + "source=%s | stats count() by span(age,10) as age_bucket", TEST_INDEX_BANK)); + verifySchema( + response, schema("count()", null, "integer"), schema("age_bucket", null, "integer")); verifyDataRows(response, rows(1, 20), rows(6, 30)); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/SystemFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/SystemFunctionIT.java index de13aa5488..d2cd140e99 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/SystemFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/SystemFunctionIT.java @@ -26,49 +26,63 @@ public void init() throws IOException { @Test public void typeof_sql_types() throws IOException { - JSONObject response = executeQuery(String.format("source=%s | eval " - + "`str` = typeof('pewpew'), `double` = typeof(1.0)," - + "`int` = typeof(12345), `long` = typeof(1234567891011), `interval` = typeof(INTERVAL 2 DAY)" - + " | fields `str`, `double`, `int`, `long`, `interval`", - TEST_INDEX_DATATYPE_NUMERIC)); + JSONObject response = + executeQuery( + String.format( + "source=%s | eval `str` = typeof('pewpew')," + + " `double` = typeof(1.0)," + + "`int` = typeof(12345)," + + " `long` = typeof(1234567891011)," + + " `interval` = typeof(INTERVAL 2 DAY)" + + " | fields `str`, `double`, `int`, `long`, `interval`", + TEST_INDEX_DATATYPE_NUMERIC)); // TODO: test null in PPL - verifyDataRows(response, - rows("KEYWORD", "DOUBLE", "INTEGER", "LONG", "INTERVAL")); + verifyDataRows(response, rows("KEYWORD", "DOUBLE", "INTEGER", "LONG", "INTERVAL")); - response = executeQuery(String.format("source=%s | eval " - + "`timestamp` = typeof(CAST('1961-04-12 09:07:00' AS TIMESTAMP))," - + "`time` = typeof(CAST('09:07:00' AS TIME))," - + "`date` = typeof(CAST('1961-04-12' AS DATE))," - + "`datetime` = typeof(DATETIME('1961-04-12 09:07:00'))" - + " | fields `timestamp`, `time`, `date`, `datetime`", - TEST_INDEX_DATATYPE_NUMERIC)); - verifyDataRows(response, - rows("TIMESTAMP", "TIME", "DATE", "DATETIME")); + response = + executeQuery( + String.format( + "source=%s | eval " + + "`timestamp` = typeof(CAST('1961-04-12 09:07:00' AS TIMESTAMP))," + + "`time` = typeof(CAST('09:07:00' AS TIME))," + + "`date` = typeof(CAST('1961-04-12' AS DATE))," + + "`datetime` = typeof(DATETIME('1961-04-12 09:07:00'))" + + " | fields `timestamp`, `time`, `date`, `datetime`", + TEST_INDEX_DATATYPE_NUMERIC)); + verifyDataRows(response, rows("TIMESTAMP", "TIME", "DATE", "DATETIME")); } @Test public void typeof_opensearch_types() throws IOException { - JSONObject response = executeQuery(String.format("source=%s | eval " - + "`double` = typeof(double_number), `long` = typeof(long_number)," - + "`integer` = typeof(integer_number), `byte` = typeof(byte_number)," - + "`short` = typeof(short_number), `float` = typeof(float_number)," - + "`half_float` = typeof(half_float_number), `scaled_float` = typeof(scaled_float_number)" - + " | fields `double`, `long`, `integer`, `byte`, `short`, `float`, `half_float`, `scaled_float`", - TEST_INDEX_DATATYPE_NUMERIC)); - verifyDataRows(response, - rows("DOUBLE", "LONG", "INTEGER", "BYTE", "SHORT", "FLOAT", "FLOAT", "DOUBLE")); + JSONObject response = + executeQuery( + String.format( + "source=%s | eval `double` = typeof(double_number), `long` =" + + " typeof(long_number),`integer` = typeof(integer_number), `byte` =" + + " typeof(byte_number),`short` = typeof(short_number), `float` =" + + " typeof(float_number),`half_float` = typeof(half_float_number)," + + " `scaled_float` = typeof(scaled_float_number) | fields `double`, `long`," + + " `integer`, `byte`, `short`, `float`, `half_float`, `scaled_float`", + TEST_INDEX_DATATYPE_NUMERIC)); + verifyDataRows( + response, rows("DOUBLE", "LONG", "INTEGER", "BYTE", "SHORT", "FLOAT", "FLOAT", "DOUBLE")); - response = executeQuery(String.format("source=%s | eval " - + "`text` = typeof(text_value), `date` = typeof(date_value)," - + "`boolean` = typeof(boolean_value), `object` = typeof(object_value)," - + "`keyword` = typeof(keyword_value), `ip` = typeof(ip_value)," - + "`binary` = typeof(binary_value), `geo_point` = typeof(geo_point_value)" - // TODO activate this test once `ARRAY` type supported, see ExpressionAnalyzer::isTypeNotSupported - //+ ", `nested` = typeof(nested_value)" - + " | fields `text`, `date`, `boolean`, `object`, `keyword`, `ip`, `binary`, `geo_point`", - TEST_INDEX_DATATYPE_NONNUMERIC)); - verifyDataRows(response, - rows("TEXT", "TIMESTAMP", "BOOLEAN", "OBJECT", "KEYWORD", - "IP", "BINARY", "GEO_POINT")); + response = + executeQuery( + String.format( + "source=%s | eval " + + "`text` = typeof(text_value), `date` = typeof(date_value)," + + "`boolean` = typeof(boolean_value), `object` = typeof(object_value)," + + "`keyword` = typeof(keyword_value), `ip` = typeof(ip_value)," + + "`binary` = typeof(binary_value), `geo_point` = typeof(geo_point_value)" + // TODO activate this test once `ARRAY` type supported, see + // ExpressionAnalyzer::isTypeNotSupported + // + ", `nested` = typeof(nested_value)" + + " | fields `text`, `date`, `boolean`, `object`, `keyword`, `ip`, `binary`," + + " `geo_point`", + TEST_INDEX_DATATYPE_NONNUMERIC)); + verifyDataRows( + response, + rows("TEXT", "TIMESTAMP", "BOOLEAN", "OBJECT", "KEYWORD", "IP", "BINARY", "GEO_POINT")); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/TextFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/TextFunctionIT.java index 024f190bee..dc9f1d98d2 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/TextFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/TextFunctionIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_STRINGS; @@ -23,27 +22,45 @@ public void init() throws IOException { loadIndex(Index.BANK_WITH_STRING_VALUES); } - void verifyQuery(String command, String initialArgs, String additionalArgs, - String outputRow1, String outputRow2, String outputRow3) throws IOException { - String query = String.format( - "source=%s | eval f=%s(%sname%s) | fields f", TEST_INDEX_STRINGS, command, initialArgs, additionalArgs); + void verifyQuery( + String command, + String initialArgs, + String additionalArgs, + String outputRow1, + String outputRow2, + String outputRow3) + throws IOException { + String query = + String.format( + "source=%s | eval f=%s(%sname%s) | fields f", + TEST_INDEX_STRINGS, command, initialArgs, additionalArgs); JSONObject result = executeQuery(query); verifySchema(result, schema("f", null, "string")); verifyDataRows(result, rows(outputRow1), rows(outputRow2), rows(outputRow3)); } - void verifyQuery(String command, String initialArgs, String additionalArgs, - Integer outputRow1, Integer outputRow2, Integer outputRow3) throws IOException { - String query = String.format( - "source=%s | eval f=%s(%sname%s) | fields f", TEST_INDEX_STRINGS, command, initialArgs, additionalArgs); + void verifyQuery( + String command, + String initialArgs, + String additionalArgs, + Integer outputRow1, + Integer outputRow2, + Integer outputRow3) + throws IOException { + String query = + String.format( + "source=%s | eval f=%s(%sname%s) | fields f", + TEST_INDEX_STRINGS, command, initialArgs, additionalArgs); JSONObject result = executeQuery(query); verifySchema(result, schema("f", null, "integer")); verifyDataRows(result, rows(outputRow1), rows(outputRow2), rows(outputRow3)); } - void verifyRegexQuery(String pattern, Integer outputRow1, Integer outputRow2, Integer outputRow3) throws IOException { - String query = String.format( - "source=%s | eval f=name regexp '%s' | fields f", TEST_INDEX_STRINGS, pattern); + void verifyRegexQuery(String pattern, Integer outputRow1, Integer outputRow2, Integer outputRow3) + throws IOException { + String query = + String.format( + "source=%s | eval f=name regexp '%s' | fields f", TEST_INDEX_STRINGS, pattern); JSONObject result = executeQuery(query); verifySchema(result, schema("f", null, "integer")); verifyDataRows(result, rows(outputRow1), rows(outputRow2), rows(outputRow3)); @@ -55,7 +72,7 @@ public void testRegexp() throws IOException { verifyRegexQuery(".*", 1, 1, 1); } - @Test + @Test public void testSubstr() throws IOException { verifyQuery("substr", "", ", 2", "ello", "orld", "elloworld"); verifyQuery("substr", "", ", 2, 2", "el", "or", "el"); @@ -99,14 +116,19 @@ public void testLtrim() throws IOException { @Test public void testConcat() throws IOException { - verifyQuery("concat", "", ", 'there', 'all', '!'", - "hellothereall!", "worldthereall!", "helloworldthereall!"); + verifyQuery( + "concat", + "", + ", 'there', 'all', '!'", + "hellothereall!", + "worldthereall!", + "helloworldthereall!"); } @Test public void testConcat_ws() throws IOException { - verifyQuery("concat_ws", "',', ", ", 'there'", - "hello,there", "world,there", "helloworld,there"); + verifyQuery( + "concat_ws", "',', ", ", 'there'", "hello,there", "world,there", "helloworld,there"); } @Test @@ -137,7 +159,8 @@ public void testLocate() throws IOException { @Test public void testReplace() throws IOException { - verifyQuery("replace", "", ", 'world', ' opensearch'", "hello", " opensearch", "hello opensearch"); + verifyQuery( + "replace", "", ", 'world', ' opensearch'", "hello", " opensearch", "hello opensearch"); } @Test diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/TopCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/TopCommandIT.java index 054ff303a1..f9587e4b63 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/TopCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/TopCommandIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_ACCOUNT; @@ -14,7 +13,7 @@ import org.json.JSONObject; import org.junit.jupiter.api.Test; -public class TopCommandIT extends PPLIntegTestCase{ +public class TopCommandIT extends PPLIntegTestCase { @Override public void init() throws IOException { @@ -24,30 +23,20 @@ public void init() throws IOException { @Test public void testTopWithoutGroup() throws IOException { - JSONObject result = - executeQuery(String.format("source=%s | top gender", TEST_INDEX_ACCOUNT)); - verifyDataRows( - result, - rows("M"), - rows("F")); + JSONObject result = executeQuery(String.format("source=%s | top gender", TEST_INDEX_ACCOUNT)); + verifyDataRows(result, rows("M"), rows("F")); } @Test - public void testTopNWithoutGroup() throws IOException{ - JSONObject result = - executeQuery(String.format("source=%s | top 1 gender", TEST_INDEX_ACCOUNT)); - verifyDataRows( - result, - rows("M")); + public void testTopNWithoutGroup() throws IOException { + JSONObject result = executeQuery(String.format("source=%s | top 1 gender", TEST_INDEX_ACCOUNT)); + verifyDataRows(result, rows("M")); } @Test public void testTopNWithGroup() throws IOException { JSONObject result = executeQuery(String.format("source=%s | top 1 state by gender", TEST_INDEX_ACCOUNT)); - verifyDataRows( - result, - rows("F", "TX"), - rows("M", "MD")); + verifyDataRows(result, rows("F", "TX"), rows("M", "MD")); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/VisualizationFormatIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/VisualizationFormatIT.java index d530b4140d..263ed502ed 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/VisualizationFormatIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/VisualizationFormatIT.java @@ -24,8 +24,9 @@ public void init() throws IOException { @Test void format() throws IOException { - String result = executeVizQuery( - String.format(Locale.ROOT, "source=%s | fields firstname, age", TEST_INDEX_BANK), true); + String result = + executeVizQuery( + String.format(Locale.ROOT, "source=%s | fields firstname, age", TEST_INDEX_BANK), true); assertEquals( "{\n" + " \"data\": {\n" @@ -67,8 +68,10 @@ void format() throws IOException { } private String executeVizQuery(String query, boolean pretty) throws IOException { - Request request = buildRequest(query, - QUERY_API_ENDPOINT + String.format(Locale.ROOT, "?format=csv&pretty=%b", pretty)); + Request request = + buildRequest( + query, + QUERY_API_ENDPOINT + String.format(Locale.ROOT, "?format=csv&pretty=%b", pretty)); Response response = client().performRequest(request); Assert.assertEquals(200, response.getStatusLine().getStatusCode()); return getResponseBody(response, true); diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/WhereCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/WhereCommandIT.java index ba870732fd..d56f9ffe32 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/WhereCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/WhereCommandIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_ACCOUNT; diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/StandalonePaginationIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/StandalonePaginationIT.java index 4738d233bf..e884734c96 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/StandalonePaginationIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/StandalonePaginationIT.java @@ -66,17 +66,19 @@ public class StandalonePaginationIT extends SQLIntegTestCase { public void init() { RestHighLevelClient restClient = new InternalRestHighLevelClient(client()); client = new OpenSearchRestClient(restClient); - DataSourceService dataSourceService = new DataSourceServiceImpl( - new ImmutableSet.Builder() - .add(new OpenSearchDataSourceFactory(client, defaultSettings())) - .build(), - getDataSourceMetadataStorage(), - getDataSourceUserRoleHelper() - ); + DataSourceService dataSourceService = + new DataSourceServiceImpl( + new ImmutableSet.Builder() + .add(new OpenSearchDataSourceFactory(client, defaultSettings())) + .build(), + getDataSourceMetadataStorage(), + getDataSourceUserRoleHelper()); dataSourceService.createDataSource(defaultOpenSearchDataSourceMetadata()); ModulesBuilder modules = new ModulesBuilder(); - modules.add(new StandaloneModule(new InternalRestHighLevelClient(client()), defaultSettings(), dataSourceService)); + modules.add( + new StandaloneModule( + new InternalRestHighLevelClient(client()), defaultSettings(), dataSourceService)); Injector injector = modules.createInjector(); queryService = injector.getInstance(QueryService.class); @@ -85,10 +87,9 @@ public void init() { @Test public void test_pagination_whitebox() throws IOException { - class TestResponder - implements ResponseListener { - @Getter - Cursor cursor = Cursor.None; + class TestResponder implements ResponseListener { + @Getter Cursor cursor = Cursor.None; + @Override public void onResponse(ExecutionEngine.QueryResponse response) { cursor = response.getCursor(); @@ -113,13 +114,16 @@ public void onFailure(Exception e) { // act 1, asserts in firstResponder var t = new OpenSearchIndex(client, defaultSettings(), "test"); - LogicalPlan p = new LogicalPaginate(1, List.of( - new LogicalProject( - new LogicalRelation("test", t), List.of( - DSL.named("name", DSL.ref("name", ExprCoreType.STRING)), - DSL.named("age", DSL.ref("age", ExprCoreType.LONG))), - List.of() - ))); + LogicalPlan p = + new LogicalPaginate( + 1, + List.of( + new LogicalProject( + new LogicalRelation("test", t), + List.of( + DSL.named("name", DSL.ref("name", ExprCoreType.STRING)), + DSL.named("age", DSL.ref("age", ExprCoreType.LONG))), + List.of()))); var firstResponder = new TestResponder(); queryService.executePlan(p, PlanContext.emptyPlanContext(), firstResponder); @@ -139,24 +143,30 @@ public void test_explain_not_supported() { // Request should be rejected before index names are resolved request.setJsonEntity("{ \"query\": \"select * from something\", \"fetch_size\": 10 }"); var exception = assertThrows(ResponseException.class, () -> client().performRequest(request)); - var response = new JSONObject(new String(exception.getResponse().getEntity().getContent().readAllBytes())); - assertEquals("`explain` feature for paginated requests is not implemented yet.", + var response = + new JSONObject(new String(exception.getResponse().getEntity().getContent().readAllBytes())); + assertEquals( + "`explain` feature for paginated requests is not implemented yet.", response.getJSONObject("error").getString("details")); // Request should be rejected before cursor parsed request.setJsonEntity("{ \"cursor\" : \"n:0000\" }"); exception = assertThrows(ResponseException.class, () -> client().performRequest(request)); - response = new JSONObject(new String(exception.getResponse().getEntity().getContent().readAllBytes())); - assertEquals("Explain of a paged query continuation is not supported. Use `explain` for the initial query request.", + response = + new JSONObject(new String(exception.getResponse().getEntity().getContent().readAllBytes())); + assertEquals( + "Explain of a paged query continuation is not supported. Use `explain` for the initial" + + " query request.", response.getJSONObject("error").getString("details")); } private Settings defaultSettings() { return new Settings() { - private final Map defaultSettings = new ImmutableMap.Builder() - .put(Key.QUERY_SIZE_LIMIT, 200) - .put(Key.SQL_CURSOR_KEEP_ALIVE, TimeValue.timeValueMinutes(1)) - .build(); + private final Map defaultSettings = + new ImmutableMap.Builder() + .put(Key.QUERY_SIZE_LIMIT, 200) + .put(Key.SQL_CURSOR_KEEP_ALIVE, TimeValue.timeValueMinutes(1)) + .build(); @Override public T getSettingValue(Key key) { diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/StringLiteralIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/StringLiteralIT.java index e54000f80d..d98016d62b 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/StringLiteralIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/StringLiteralIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.opensearch.sql.util.MatcherUtils.rows; @@ -16,8 +15,6 @@ import org.junit.Test; import org.opensearch.sql.legacy.SQLIntegTestCase; - - public class StringLiteralIT extends SQLIntegTestCase { @Test public void testStringHelloSingleQuote() throws IOException { diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/SystemFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/SystemFunctionIT.java index 584cdd05dd..4b39e2925c 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/SystemFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/SystemFunctionIT.java @@ -24,37 +24,47 @@ protected void init() throws Exception { @Test public void typeof_sql_types() { - JSONObject response = executeJdbcRequest("SELECT typeof('pewpew'), typeof(NULL), typeof(1.0)," - + "typeof(12345), typeof(1234567891011), typeof(INTERVAL 2 DAY);"); - verifyDataRows(response, - rows("KEYWORD", "UNDEFINED", "DOUBLE", "INTEGER", "LONG", "INTERVAL")); - - response = executeJdbcRequest("SELECT" - + " typeof(CAST('1961-04-12 09:07:00' AS TIMESTAMP))," - + " typeof(CAST('09:07:00' AS TIME))," - + " typeof(CAST('1961-04-12' AS DATE))," - + " typeof(DATETIME('1961-04-12 09:07:00'))"); - verifyDataRows(response, - rows("TIMESTAMP", "TIME", "DATE", "DATETIME")); + JSONObject response = + executeJdbcRequest( + "SELECT typeof('pewpew'), typeof(NULL), typeof(1.0)," + + "typeof(12345), typeof(1234567891011), typeof(INTERVAL 2 DAY);"); + verifyDataRows(response, rows("KEYWORD", "UNDEFINED", "DOUBLE", "INTEGER", "LONG", "INTERVAL")); + + response = + executeJdbcRequest( + "SELECT" + + " typeof(CAST('1961-04-12 09:07:00' AS TIMESTAMP))," + + " typeof(CAST('09:07:00' AS TIME))," + + " typeof(CAST('1961-04-12' AS DATE))," + + " typeof(DATETIME('1961-04-12 09:07:00'))"); + verifyDataRows(response, rows("TIMESTAMP", "TIME", "DATE", "DATETIME")); } @Test public void typeof_opensearch_types() { - JSONObject response = executeJdbcRequest(String.format("SELECT typeof(double_number)," - + "typeof(long_number), typeof(integer_number), typeof(byte_number), typeof(short_number)," - + "typeof(float_number), typeof(half_float_number), typeof(scaled_float_number)" - + " from %s;", TEST_INDEX_DATATYPE_NUMERIC)); - verifyDataRows(response, - rows("DOUBLE", "LONG", "INTEGER", "BYTE", "SHORT", "FLOAT", "FLOAT", "DOUBLE")); - - response = executeJdbcRequest(String.format("SELECT typeof(text_value)," - + "typeof(date_value), typeof(boolean_value), typeof(object_value), typeof(keyword_value)," - + "typeof(ip_value), typeof(binary_value), typeof(geo_point_value)" - // TODO activate this test once `ARRAY` type supported, see ExpressionAnalyzer::isTypeNotSupported - //+ ", typeof(nested_value)" - + " from %s;", TEST_INDEX_DATATYPE_NONNUMERIC)); - verifyDataRows(response, - rows("TEXT", "TIMESTAMP", "BOOLEAN", "OBJECT", "KEYWORD", - "IP", "BINARY", "GEO_POINT")); + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT typeof(double_number),typeof(long_number), typeof(integer_number)," + + " typeof(byte_number), typeof(short_number),typeof(float_number)," + + " typeof(half_float_number), typeof(scaled_float_number) from %s;", + TEST_INDEX_DATATYPE_NUMERIC)); + verifyDataRows( + response, rows("DOUBLE", "LONG", "INTEGER", "BYTE", "SHORT", "FLOAT", "FLOAT", "DOUBLE")); + + response = + executeJdbcRequest( + String.format( + "SELECT typeof(text_value),typeof(date_value), typeof(boolean_value)," + + " typeof(object_value), typeof(keyword_value),typeof(ip_value)," + + " typeof(binary_value), typeof(geo_point_value)" + // TODO activate this test once `ARRAY` type supported, see + // ExpressionAnalyzer::isTypeNotSupported + // + ", typeof(nested_value)" + + " from %s;", + TEST_INDEX_DATATYPE_NONNUMERIC)); + verifyDataRows( + response, + rows("TEXT", "TIMESTAMP", "BOOLEAN", "OBJECT", "KEYWORD", "IP", "BINARY", "GEO_POINT")); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/TextFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/TextFunctionIT.java index 94677354e4..314132fed0 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/TextFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/TextFunctionIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.opensearch.sql.legacy.plugin.RestSqlAction.QUERY_API_ENDPOINT; @@ -43,8 +42,7 @@ void verifyQuery(String query, String type, Integer output) throws IOException { void verifyQueryWithNullOutput(String query, String type) throws IOException { JSONObject result = executeQuery("select 'test null'," + query); - verifySchema(result, schema(query, null, type), - schema("'test null'", null, type)); + verifySchema(result, schema(query, null, type), schema("'test null'", null, type)); verifyDataRows(result, rows("test null", null)); } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/WildcardQueryIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/WildcardQueryIT.java index 030c07c5fa..8123f887f2 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/WildcardQueryIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/WildcardQueryIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_WILDCARD; @@ -25,11 +24,17 @@ protected void init() throws Exception { public void test_wildcard_query_asterisk_function() throws IOException { String expected = "test wildcard"; - String query1 = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(KeywordBody, 't*') LIMIT 1"; + String query1 = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcard_query(KeywordBody, 't*') LIMIT 1"; JSONObject result1 = executeJdbcRequest(query1); verifyDataRows(result1, rows(expected)); - String query2 = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE wildcardquery(KeywordBody, 't*') LIMIT 1"; + String query2 = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcardquery(KeywordBody, 't*') LIMIT 1"; JSONObject result2 = executeJdbcRequest(query2); verifyDataRows(result2, rows(expected)); } @@ -38,11 +43,17 @@ public void test_wildcard_query_asterisk_function() throws IOException { public void test_wildcard_query_question_mark_function() throws IOException { String expected = "test wildcard"; - String query1 = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(KeywordBody, 'test wild??rd')"; + String query1 = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcard_query(KeywordBody, 'test wild??rd')"; JSONObject result1 = executeJdbcRequest(query1); verifyDataRows(result1, rows(expected)); - String query2 = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE wildcardquery(KeywordBody, 'test wild??rd')"; + String query2 = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcardquery(KeywordBody, 'test wild??rd')"; JSONObject result2 = executeJdbcRequest(query2); verifyDataRows(result2, rows(expected)); } @@ -50,11 +61,17 @@ public void test_wildcard_query_question_mark_function() throws IOException { // SQL uses ? as a wildcard which is converted to * in WildcardQuery.java @Test public void test_wildcard_query_sql_wildcard_percent_conversion() throws IOException { - String query1 = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(KeywordBody, 'test%')"; + String query1 = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcard_query(KeywordBody, 'test%')"; JSONObject result1 = executeJdbcRequest(query1); assertEquals(8, result1.getInt("total")); - String query2 = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(KeywordBody, 'test*')"; + String query2 = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcard_query(KeywordBody, 'test*')"; JSONObject result2 = executeJdbcRequest(query2); assertEquals(result1.getInt("total"), result2.getInt("total")); } @@ -62,27 +79,41 @@ public void test_wildcard_query_sql_wildcard_percent_conversion() throws IOExcep // SQL uses _ as a wildcard which is converted to ? in WildcardQuery.java @Test public void test_wildcard_query_sql_wildcard_underscore_conversion() throws IOException { - String query1 = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(KeywordBody, 'test wild_ard*')"; + String query1 = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcard_query(KeywordBody, 'test wild_ard*')"; JSONObject result1 = executeJdbcRequest(query1); assertEquals(7, result1.getInt("total")); - String query2 = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(KeywordBody, 'test wild?ard*')"; + String query2 = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcard_query(KeywordBody, 'test wild?ard*')"; JSONObject result2 = executeJdbcRequest(query2); assertEquals(result1.getInt("total"), result2.getInt("total")); } @Test public void test_escaping_wildcard_percent_in_the_beginning_of_text() throws IOException { - String query = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(KeywordBody, '\\\\%*')"; + String query = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcard_query(KeywordBody, '\\\\%*')"; JSONObject result = executeJdbcRequest(query); verifyDataRows(result, rows("%test wildcard in the beginning of the text")); } @Test public void test_escaping_wildcard_percent_in_text() throws IOException { - String query = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(KeywordBody, '*\\\\%%')"; + String query = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcard_query(KeywordBody, '*\\\\%%')"; JSONObject result = executeJdbcRequest(query); - verifyDataRows(result, rows("test wildcard in % the middle of the text"), + verifyDataRows( + result, + rows("test wildcard in % the middle of the text"), rows("test wildcard %% beside each other"), rows("test wildcard in the end of the text%"), rows("%test wildcard in the beginning of the text")); @@ -90,30 +121,44 @@ public void test_escaping_wildcard_percent_in_text() throws IOException { @Test public void test_escaping_wildcard_percent_in_the_end_of_text() throws IOException { - String query = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(KeywordBody, '*\\\\%')"; + String query = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcard_query(KeywordBody, '*\\\\%')"; JSONObject result = executeJdbcRequest(query); verifyDataRows(result, rows("test wildcard in the end of the text%")); } @Test public void test_double_escaped_wildcard_percent() throws IOException { - String query = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(KeywordBody, '*\\\\%\\\\%*')"; + String query = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcard_query(KeywordBody, '*\\\\%\\\\%*')"; JSONObject result = executeJdbcRequest(query); verifyDataRows(result, rows("test wildcard %% beside each other")); } @Test public void test_escaping_wildcard_underscore_in_the_beginning_of_text() throws IOException { - String query = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(KeywordBody, '\\\\_*')"; + String query = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcard_query(KeywordBody, '\\\\_*')"; JSONObject result = executeJdbcRequest(query); verifyDataRows(result, rows("_test wildcard in the beginning of the text")); } @Test public void test_escaping_wildcard_underscore_in_text() throws IOException { - String query = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(KeywordBody, '*\\\\_*')"; + String query = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcard_query(KeywordBody, '*\\\\_*')"; JSONObject result = executeJdbcRequest(query); - verifyDataRows(result, rows("test wildcard in _ the middle of the text"), + verifyDataRows( + result, + rows("test wildcard in _ the middle of the text"), rows("test wildcard __ beside each other"), rows("test wildcard in the end of the text_"), rows("_test wildcard in the beginning of the text"), @@ -122,60 +167,77 @@ public void test_escaping_wildcard_underscore_in_text() throws IOException { @Test public void test_escaping_wildcard_underscore_in_the_end_of_text() throws IOException { - String query = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(KeywordBody, '*\\\\_')"; + String query = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcard_query(KeywordBody, '*\\\\_')"; JSONObject result = executeJdbcRequest(query); - verifyDataRows(result, - rows("test wildcard in the end of the text_"), - rows("test backslash wildcard \\_")); + verifyDataRows( + result, rows("test wildcard in the end of the text_"), rows("test backslash wildcard \\_")); } @Test public void test_double_escaped_wildcard_underscore() throws IOException { - String query = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(KeywordBody, '*\\\\_\\\\_*')"; + String query = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcard_query(KeywordBody, '*\\\\_\\\\_*')"; JSONObject result = executeJdbcRequest(query); verifyDataRows(result, rows("test wildcard __ beside each other")); } @Test public void test_backslash_wildcard() throws IOException { - String query = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(KeywordBody, '*\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\_')"; + String query = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcard_query(KeywordBody, '*\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\_')"; JSONObject result = executeJdbcRequest(query); verifyDataRows(result, rows("test backslash wildcard \\_")); } @Test public void all_params_test() throws IOException { - String query = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD - + " WHERE wildcard_query(KeywordBody, 'test*', boost = 0.9," - + " case_insensitive=true, rewrite='constant_score')"; + String query = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcard_query(KeywordBody, 'test*', boost = 0.9," + + " case_insensitive=true, rewrite='constant_score')"; JSONObject result = executeJdbcRequest(query); assertEquals(8, result.getInt("total")); } @Test public void test_wildcard_query_on_text_field_with_one_word() throws IOException { - String query = "SELECT * FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(TextBody, 'test*')"; + String query = + "SELECT * FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(TextBody, 'test*')"; JSONObject result = executeJdbcRequest(query); assertEquals(9, result.getInt("total")); } @Test public void test_wildcard_query_on_text_keyword_field_with_one_word() throws IOException { - String query = "SELECT * FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(TextKeywordBody, 'test*')"; + String query = + "SELECT * FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(TextKeywordBody, 'test*')"; JSONObject result = executeJdbcRequest(query); assertEquals(9, result.getInt("total")); } @Test public void test_wildcard_query_on_text_field_with_greater_than_one_word() throws IOException { - String query = "SELECT * FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(TextBody, 'test wild*')"; + String query = + "SELECT * FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(TextBody, 'test wild*')"; JSONObject result = executeJdbcRequest(query); assertEquals(0, result.getInt("total")); } @Test - public void test_wildcard_query_on_text_keyword_field_with_greater_than_one_word() throws IOException { - String query = "SELECT * FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(TextKeywordBody, 'test wild*')"; + public void test_wildcard_query_on_text_keyword_field_with_greater_than_one_word() + throws IOException { + String query = + "SELECT * FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcard_query(TextKeywordBody, 'test wild*')"; JSONObject result = executeJdbcRequest(query); assertEquals(0, result.getInt("total")); } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/WindowFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/WindowFunctionIT.java index b586125af3..86257e6a22 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/WindowFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/WindowFunctionIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.opensearch.sql.util.MatcherUtils.rows; @@ -25,11 +24,16 @@ protected void init() throws Exception { @Test public void testOrderByNullFirst() { - JSONObject response = new JSONObject( - executeQuery("SELECT age, ROW_NUMBER() OVER(ORDER BY age DESC NULLS FIRST) " - + "FROM " + TestsConstants.TEST_INDEX_BANK_WITH_NULL_VALUES, "jdbc")); + JSONObject response = + new JSONObject( + executeQuery( + "SELECT age, ROW_NUMBER() OVER(ORDER BY age DESC NULLS FIRST) " + + "FROM " + + TestsConstants.TEST_INDEX_BANK_WITH_NULL_VALUES, + "jdbc")); - verifyDataRows(response, + verifyDataRows( + response, rows(null, 1), rows(36, 2), rows(36, 3), @@ -41,11 +45,16 @@ public void testOrderByNullFirst() { @Test public void testOrderByNullLast() { - JSONObject response = new JSONObject( - executeQuery("SELECT age, ROW_NUMBER() OVER(ORDER BY age NULLS LAST) " - + "FROM " + TestsConstants.TEST_INDEX_BANK_WITH_NULL_VALUES, "jdbc")); + JSONObject response = + new JSONObject( + executeQuery( + "SELECT age, ROW_NUMBER() OVER(ORDER BY age NULLS LAST) " + + "FROM " + + TestsConstants.TEST_INDEX_BANK_WITH_NULL_VALUES, + "jdbc")); - verifyDataRows(response, + verifyDataRows( + response, rows(28, 1), rows(32, 2), rows(33, 3), @@ -57,10 +66,15 @@ public void testOrderByNullLast() { @Test public void testDistinctCountOverNull() { - JSONObject response = new JSONObject(executeQuery( - "SELECT lastname, COUNT(DISTINCT gender) OVER() " - + "FROM " + TestsConstants.TEST_INDEX_BANK, "jdbc")); - verifyDataRows(response, + JSONObject response = + new JSONObject( + executeQuery( + "SELECT lastname, COUNT(DISTINCT gender) OVER() " + + "FROM " + + TestsConstants.TEST_INDEX_BANK, + "jdbc")); + verifyDataRows( + response, rows("Duke Willmington", 2), rows("Bond", 2), rows("Bates", 2), @@ -72,10 +86,15 @@ public void testDistinctCountOverNull() { @Test public void testDistinctCountOver() { - JSONObject response = new JSONObject(executeQuery( - "SELECT lastname, COUNT(DISTINCT gender) OVER(ORDER BY lastname) " - + "FROM " + TestsConstants.TEST_INDEX_BANK, "jdbc")); - verifyDataRowsInOrder(response, + JSONObject response = + new JSONObject( + executeQuery( + "SELECT lastname, COUNT(DISTINCT gender) OVER(ORDER BY lastname) " + + "FROM " + + TestsConstants.TEST_INDEX_BANK, + "jdbc")); + verifyDataRowsInOrder( + response, rows("Adams", 1), rows("Ayala", 2), rows("Bates", 2), @@ -87,10 +106,15 @@ public void testDistinctCountOver() { @Test public void testDistinctCountPartition() { - JSONObject response = new JSONObject(executeQuery( - "SELECT lastname, COUNT(DISTINCT gender) OVER(PARTITION BY gender ORDER BY lastname) " - + "FROM " + TestsConstants.TEST_INDEX_BANK, "jdbc")); - verifyDataRowsInOrder(response, + JSONObject response = + new JSONObject( + executeQuery( + "SELECT lastname, COUNT(DISTINCT gender) OVER(PARTITION BY gender ORDER BY" + + " lastname) FROM " + + TestsConstants.TEST_INDEX_BANK, + "jdbc")); + verifyDataRowsInOrder( + response, rows("Ayala", 1), rows("Bates", 1), rows("Mcpherson", 1), @@ -99,5 +123,4 @@ public void testDistinctCountPartition() { rows("Duke Willmington", 1), rows("Ratliff", 1)); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/util/StandaloneModule.java b/integ-test/src/test/java/org/opensearch/sql/util/StandaloneModule.java index ad8afc47ca..5d6f0b5a55 100644 --- a/integ-test/src/test/java/org/opensearch/sql/util/StandaloneModule.java +++ b/integ-test/src/test/java/org/opensearch/sql/util/StandaloneModule.java @@ -37,8 +37,8 @@ import org.opensearch.sql.storage.StorageEngine; /** - * A utility class which registers SQL engine singletons as `OpenSearchPluginModule` does. - * It is needed to get access to those instances in test and validate their behavior. + * A utility class which registers SQL engine singletons as `OpenSearchPluginModule` does. It is + * needed to get access to those instances in test and validate their behavior. */ @RequiredArgsConstructor public class StandaloneModule extends AbstractModule { @@ -53,8 +53,7 @@ public class StandaloneModule extends AbstractModule { BuiltinFunctionRepository.getInstance(); @Override - protected void configure() { - } + protected void configure() {} @Provides public OpenSearchClient openSearchClient() { @@ -67,8 +66,8 @@ public StorageEngine storageEngine(OpenSearchClient client) { } @Provides - public ExecutionEngine executionEngine(OpenSearchClient client, ExecutionProtector protector, - PlanSerializer planSerializer) { + public ExecutionEngine executionEngine( + OpenSearchClient client, ExecutionProtector protector, PlanSerializer planSerializer) { return new OpenSearchExecutionEngine(client, protector, planSerializer); } diff --git a/integ-test/src/test/java/org/opensearch/sql/util/TestUtils.java b/integ-test/src/test/java/org/opensearch/sql/util/TestUtils.java index 3281c172cb..ac5cee118c 100644 --- a/integ-test/src/test/java/org/opensearch/sql/util/TestUtils.java +++ b/integ-test/src/test/java/org/opensearch/sql/util/TestUtils.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.util; import static com.google.common.base.Strings.isNullOrEmpty; @@ -44,9 +43,9 @@ public class TestUtils { /** * Create test index by REST client. * - * @param client client connection + * @param client client connection * @param indexName test index name - * @param mapping test index mapping or null if no predefined mapping + * @param mapping test index mapping or null if no predefined mapping */ public static void createIndexByRestClient(RestClient client, String indexName, String mapping) { Request request = new Request("PUT", "/" + indexName); @@ -57,16 +56,16 @@ public static void createIndexByRestClient(RestClient client, String indexName, } /** - * https://github.com/elastic/elasticsearch/pull/49959 - * Deprecate creation of dot-prefixed index names except for hidden and system indices. - * Create hidden index by REST client. + * https://github.com/elastic/elasticsearch/pull/49959
+ * Deprecate creation of dot-prefixed index + * names except for hidden and system indices. Create hidden index by REST client. * - * @param client client connection + * @param client client connection * @param indexName test index name - * @param mapping test index mapping or null if no predefined mapping + * @param mapping test index mapping or null if no predefined mapping */ - public static void createHiddenIndexByRestClient(RestClient client, String indexName, - String mapping) { + public static void createHiddenIndexByRestClient( + RestClient client, String indexName, String mapping) { Request request = new Request("PUT", "/" + indexName); JSONObject jsonObject = isNullOrEmpty(mapping) ? new JSONObject() : new JSONObject(mapping); jsonObject.put("settings", new JSONObject("{\"index\":{\"hidden\":true}}")); @@ -76,11 +75,11 @@ public static void createHiddenIndexByRestClient(RestClient client, String index } /** - * Check if index already exists by OpenSearch index exists API which returns: - * 200 - specified indices or aliases exist + * Check if index already exists by OpenSearch index exists API which returns:
+ * 200 - specified indices or aliases exist
* 404 - one or more indices specified or aliases do not exist * - * @param client client connection + * @param client client connection * @param indexName index name * @return true for index exist */ @@ -96,13 +95,13 @@ public static boolean isIndexExist(RestClient client, String indexName) { /** * Load test data set by REST client. * - * @param client client connection - * @param indexName index name + * @param client client connection + * @param indexName index name * @param dataSetFilePath file path of test data set * @throws IOException */ - public static void loadDataByRestClient(RestClient client, String indexName, - String dataSetFilePath) throws IOException { + public static void loadDataByRestClient( + RestClient client, String indexName, String dataSetFilePath) throws IOException { Path path = Paths.get(getResourceFilePath(dataSetFilePath)); Request request = new Request("POST", "/" + indexName + "/_bulk?refresh=true"); request.setJsonEntity(new String(Files.readAllBytes(path))); @@ -112,7 +111,7 @@ public static void loadDataByRestClient(RestClient client, String indexName, /** * Perform a request by REST client. * - * @param client client connection + * @param client client connection * @param request request object */ public static Response performRequest(RestClient client, Request request) { @@ -129,566 +128,567 @@ public static Response performRequest(RestClient client, Request request) { } public static String getAccountIndexMapping() { - return "{ \"mappings\": {" + - " \"properties\": {\n" + - " \"gender\": {\n" + - " \"type\": \"text\",\n" + - " \"fielddata\": true\n" + - " }," + - " \"address\": {\n" + - " \"type\": \"text\",\n" + - " \"fielddata\": true\n" + - " }," + - " \"firstname\": {\n" + - " \"type\": \"text\",\n" + - " \"fielddata\": true,\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }" + - " }" + - " }," + - " \"lastname\": {\n" + - " \"type\": \"text\",\n" + - " \"fielddata\": true,\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }" + - " }" + - " }," + - " \"state\": {\n" + - " \"type\": \"text\",\n" + - " \"fielddata\": true,\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }" + - " }" + - " }" + - " }" + - " }" + - "}"; + return "{ \"mappings\": {" + + " \"properties\": {\n" + + " \"gender\": {\n" + + " \"type\": \"text\",\n" + + " \"fielddata\": true\n" + + " }," + + " \"address\": {\n" + + " \"type\": \"text\",\n" + + " \"fielddata\": true\n" + + " }," + + " \"firstname\": {\n" + + " \"type\": \"text\",\n" + + " \"fielddata\": true,\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }" + + " }" + + " }," + + " \"lastname\": {\n" + + " \"type\": \"text\",\n" + + " \"fielddata\": true,\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }" + + " }" + + " }," + + " \"state\": {\n" + + " \"type\": \"text\",\n" + + " \"fielddata\": true,\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }" + + " }" + + " }" + + " }" + + " }" + + "}"; } public static String getPhraseIndexMapping() { - return "{ \"mappings\": {" + - " \"properties\": {\n" + - " \"phrase\": {\n" + - " \"type\": \"text\",\n" + - " \"store\": true\n" + - " }" + - " }" + - " }" + - "}"; + return "{ \"mappings\": {" + + " \"properties\": {\n" + + " \"phrase\": {\n" + + " \"type\": \"text\",\n" + + " \"store\": true\n" + + " }" + + " }" + + " }" + + "}"; } public static String getDogIndexMapping() { - return "{ \"mappings\": {" + - " \"properties\": {\n" + - " \"dog_name\": {\n" + - " \"type\": \"text\",\n" + - " \"fielddata\": true\n" + - " }" + - " }" + - " }" + - "}"; + return "{ \"mappings\": {" + + " \"properties\": {\n" + + " \"dog_name\": {\n" + + " \"type\": \"text\",\n" + + " \"fielddata\": true\n" + + " }" + + " }" + + " }" + + "}"; } public static String getDogs2IndexMapping() { - return "{ \"mappings\": {" + - " \"properties\": {\n" + - " \"dog_name\": {\n" + - " \"type\": \"text\",\n" + - " \"fielddata\": true\n" + - " },\n" + - " \"holdersName\": {\n" + - " \"type\": \"keyword\"\n" + - " }" + - " }" + - " }" + - "}"; + return "{ \"mappings\": {" + + " \"properties\": {\n" + + " \"dog_name\": {\n" + + " \"type\": \"text\",\n" + + " \"fielddata\": true\n" + + " },\n" + + " \"holdersName\": {\n" + + " \"type\": \"keyword\"\n" + + " }" + + " }" + + " }" + + "}"; } public static String getDogs3IndexMapping() { - return "{ \"mappings\": {" + - " \"properties\": {\n" + - " \"holdersName\": {\n" + - " \"type\": \"keyword\"\n" + - " },\n" + - " \"color\": {\n" + - " \"type\": \"text\"\n" + - " }" + - " }" + - " }" + - "}"; + return "{ \"mappings\": {" + + " \"properties\": {\n" + + " \"holdersName\": {\n" + + " \"type\": \"keyword\"\n" + + " },\n" + + " \"color\": {\n" + + " \"type\": \"text\"\n" + + " }" + + " }" + + " }" + + "}"; } public static String getPeople2IndexMapping() { - return "{ \"mappings\": {" + - " \"properties\": {\n" + - " \"firstname\": {\n" + - " \"type\": \"keyword\"\n" + - " }" + - " }" + - " }" + - "}"; + return "{ \"mappings\": {" + + " \"properties\": {\n" + + " \"firstname\": {\n" + + " \"type\": \"keyword\"\n" + + " }" + + " }" + + " }" + + "}"; } public static String getGameOfThronesIndexMapping() { - return "{ \"mappings\": { " + - " \"properties\": {\n" + - " \"nickname\": {\n" + - " \"type\":\"text\", " + - " \"fielddata\":true" + - " },\n" + - " \"name\": {\n" + - " \"properties\": {\n" + - " \"firstname\": {\n" + - " \"type\": \"text\",\n" + - " \"fielddata\": true\n" + - " },\n" + - " \"lastname\": {\n" + - " \"type\": \"text\",\n" + - " \"fielddata\": true\n" + - " },\n" + - " \"ofHerName\": {\n" + - " \"type\": \"integer\"\n" + - " },\n" + - " \"ofHisName\": {\n" + - " \"type\": \"integer\"\n" + - " }\n" + - " }\n" + - " },\n" + - " \"house\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\"\n" + - " }\n" + - " }\n" + - " },\n" + - " \"gender\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\"\n" + - " }\n" + - " }\n" + - " }" + - "} } }"; + return "{ \"mappings\": { " + + " \"properties\": {\n" + + " \"nickname\": {\n" + + " \"type\":\"text\", " + + " \"fielddata\":true" + + " },\n" + + " \"name\": {\n" + + " \"properties\": {\n" + + " \"firstname\": {\n" + + " \"type\": \"text\",\n" + + " \"fielddata\": true\n" + + " },\n" + + " \"lastname\": {\n" + + " \"type\": \"text\",\n" + + " \"fielddata\": true\n" + + " },\n" + + " \"ofHerName\": {\n" + + " \"type\": \"integer\"\n" + + " },\n" + + " \"ofHisName\": {\n" + + " \"type\": \"integer\"\n" + + " }\n" + + " }\n" + + " },\n" + + " \"house\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\"\n" + + " }\n" + + " }\n" + + " },\n" + + " \"gender\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\"\n" + + " }\n" + + " }\n" + + " }" + + "} } }"; } // System public static String getOdbcIndexMapping() { - return "{\n" + - "\t\"mappings\" :{\n" + - "\t\t\"properties\":{\n" + - "\t\t\t\"odbc_time\":{\n" + - "\t\t\t\t\"type\":\"date\",\n" + - "\t\t\t\t\"format\": \"'{ts' ''yyyy-MM-dd HH:mm:ss.SSS'''}'\"\n" + - "\t\t\t},\n" + - "\t\t\t\"docCount\":{\n" + - "\t\t\t\t\"type\":\"text\"\n" + - "\t\t\t}\n" + - "\t\t}\n" + - "\t}\n" + - "}"; + return "{\n" + + "\t\"mappings\" :{\n" + + "\t\t\"properties\":{\n" + + "\t\t\t\"odbc_time\":{\n" + + "\t\t\t\t\"type\":\"date\",\n" + + "\t\t\t\t\"format\": \"'{ts' ''yyyy-MM-dd HH:mm:ss.SSS'''}'\"\n" + + "\t\t\t},\n" + + "\t\t\t\"docCount\":{\n" + + "\t\t\t\t\"type\":\"text\"\n" + + "\t\t\t}\n" + + "\t\t}\n" + + "\t}\n" + + "}"; } public static String getLocationIndexMapping() { - return "{\n" + - "\t\"mappings\" :{\n" + - "\t\t\"properties\":{\n" + - "\t\t\t\"place\":{\n" + - "\t\t\t\t\"type\":\"geo_shape\"\n" + - //"\t\t\t\t\"tree\": \"quadtree\",\n" + // Field tree and precision are deprecated in OpenSearch - //"\t\t\t\t\"precision\": \"10km\"\n" + - "\t\t\t},\n" + - "\t\t\t\"center\":{\n" + - "\t\t\t\t\"type\":\"geo_point\"\n" + - "\t\t\t},\n" + - "\t\t\t\"description\":{\n" + - "\t\t\t\t\"type\":\"text\"\n" + - "\t\t\t}\n" + - "\t\t}\n" + - "\t}\n" + - "}"; + return "{\n" + + "\t\"mappings\" :{\n" + + "\t\t\"properties\":{\n" + + "\t\t\t\"place\":{\n" + + "\t\t\t\t\"type\":\"geo_shape\"\n" + + + // "\t\t\t\t\"tree\": \"quadtree\",\n" + // Field tree and precision are deprecated in + // OpenSearch + // "\t\t\t\t\"precision\": \"10km\"\n" + + "\t\t\t},\n" + + "\t\t\t\"center\":{\n" + + "\t\t\t\t\"type\":\"geo_point\"\n" + + "\t\t\t},\n" + + "\t\t\t\"description\":{\n" + + "\t\t\t\t\"type\":\"text\"\n" + + "\t\t\t}\n" + + "\t\t}\n" + + "\t}\n" + + "}"; } public static String getEmployeeNestedTypeIndexMapping() { - return "{\n" + - " \"mappings\": {\n" + - " \"properties\": {\n" + - " \"comments\": {\n" + - " \"type\": \"nested\",\n" + - " \"properties\": {\n" + - " \"date\": {\n" + - " \"type\": \"date\"\n" + - " },\n" + - " \"likes\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"message\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - " },\n" + - " \"id\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"name\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }\n" + - " }\n" + - " },\n" + - " \"projects\": {\n" + - " \"type\": \"nested\",\n" + - " \"properties\": {\n" + - " \"address\": {\n" + - " \"type\": \"nested\",\n" + - " \"properties\": {\n" + - " \"city\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }\n" + - " }\n" + - " },\n" + - " \"state\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - " },\n" + - " \"name\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\"\n" + - " }\n" + - " },\n" + - " \"fielddata\": true\n" + - " },\n" + - " \"started_year\": {\n" + - " \"type\": \"long\"\n" + - " }\n" + - " }\n" + - " },\n" + - " \"title\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - "}\n"; + return "{\n" + + " \"mappings\": {\n" + + " \"properties\": {\n" + + " \"comments\": {\n" + + " \"type\": \"nested\",\n" + + " \"properties\": {\n" + + " \"date\": {\n" + + " \"type\": \"date\"\n" + + " },\n" + + " \"likes\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"message\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " },\n" + + " \"id\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"name\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }\n" + + " }\n" + + " },\n" + + " \"projects\": {\n" + + " \"type\": \"nested\",\n" + + " \"properties\": {\n" + + " \"address\": {\n" + + " \"type\": \"nested\",\n" + + " \"properties\": {\n" + + " \"city\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }\n" + + " }\n" + + " },\n" + + " \"state\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " },\n" + + " \"name\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\"\n" + + " }\n" + + " },\n" + + " \"fielddata\": true\n" + + " },\n" + + " \"started_year\": {\n" + + " \"type\": \"long\"\n" + + " }\n" + + " }\n" + + " },\n" + + " \"title\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + "}\n"; } - public static String getNestedTypeIndexMapping() { - return "{ \"mappings\": {\n" + - " \"properties\": {\n" + - " \"message\": {\n" + - " \"type\": \"nested\",\n" + - " \"properties\": {\n" + - " \"info\": {\n" + - " \"type\": \"keyword\",\n" + - " \"index\": \"true\"\n" + - " },\n" + - " \"author\": {\n" + - " \"type\": \"keyword\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\" : 256\n" + - " }\n" + - " },\n" + - " \"index\": \"true\"\n" + - " },\n" + - " \"dayOfWeek\": {\n" + - " \"type\": \"long\"\n" + - " }\n" + - " }\n" + - " },\n" + - " \"comment\": {\n" + - " \"type\": \"nested\",\n" + - " \"properties\": {\n" + - " \"data\": {\n" + - " \"type\": \"keyword\",\n" + - " \"index\": \"true\"\n" + - " },\n" + - " \"likes\": {\n" + - " \"type\": \"long\"\n" + - " }\n" + - " }\n" + - " },\n" + - " \"myNum\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"someField\": {\n" + - " \"type\": \"keyword\",\n" + - " \"index\": \"true\"\n" + - " }\n" + - " }\n" + - " }\n" + - " }}"; + return "{ \"mappings\": {\n" + + " \"properties\": {\n" + + " \"message\": {\n" + + " \"type\": \"nested\",\n" + + " \"properties\": {\n" + + " \"info\": {\n" + + " \"type\": \"keyword\",\n" + + " \"index\": \"true\"\n" + + " },\n" + + " \"author\": {\n" + + " \"type\": \"keyword\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\" : 256\n" + + " }\n" + + " },\n" + + " \"index\": \"true\"\n" + + " },\n" + + " \"dayOfWeek\": {\n" + + " \"type\": \"long\"\n" + + " }\n" + + " }\n" + + " },\n" + + " \"comment\": {\n" + + " \"type\": \"nested\",\n" + + " \"properties\": {\n" + + " \"data\": {\n" + + " \"type\": \"keyword\",\n" + + " \"index\": \"true\"\n" + + " },\n" + + " \"likes\": {\n" + + " \"type\": \"long\"\n" + + " }\n" + + " }\n" + + " },\n" + + " \"myNum\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"someField\": {\n" + + " \"type\": \"keyword\",\n" + + " \"index\": \"true\"\n" + + " }\n" + + " }\n" + + " }\n" + + " }}"; } public static String getJoinTypeIndexMapping() { - return "{\n" + - " \"mappings\": {\n" + - " \"properties\": {\n" + - " \"join_field\": {\n" + - " \"type\": \"join\",\n" + - " \"relations\": {\n" + - " \"parentType\": \"childrenType\"\n" + - " }\n" + - " },\n" + - " \"parentTile\": {\n" + - " \"index\": \"true\",\n" + - " \"type\": \"keyword\"\n" + - " },\n" + - " \"dayOfWeek\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"author\": {\n" + - " \"index\": \"true\",\n" + - " \"type\": \"keyword\"\n" + - " },\n" + - " \"info\": {\n" + - " \"index\": \"true\",\n" + - " \"type\": \"keyword\"\n" + - " }\n" + - " }\n" + - " }\n" + - "}"; + return "{\n" + + " \"mappings\": {\n" + + " \"properties\": {\n" + + " \"join_field\": {\n" + + " \"type\": \"join\",\n" + + " \"relations\": {\n" + + " \"parentType\": \"childrenType\"\n" + + " }\n" + + " },\n" + + " \"parentTile\": {\n" + + " \"index\": \"true\",\n" + + " \"type\": \"keyword\"\n" + + " },\n" + + " \"dayOfWeek\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"author\": {\n" + + " \"index\": \"true\",\n" + + " \"type\": \"keyword\"\n" + + " },\n" + + " \"info\": {\n" + + " \"index\": \"true\",\n" + + " \"type\": \"keyword\"\n" + + " }\n" + + " }\n" + + " }\n" + + "}"; } public static String getBankIndexMapping() { - return "{\n" + - " \"mappings\": {\n" + - " \"properties\": {\n" + - " \"account_number\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"address\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"age\": {\n" + - " \"type\": \"integer\"\n" + - " },\n" + - " \"balance\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"birthdate\": {\n" + - " \"type\": \"date\"\n" + - " },\n" + - " \"city\": {\n" + - " \"type\": \"keyword\"\n" + - " },\n" + - " \"email\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"employer\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"firstname\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"gender\": {\n" + - " \"type\": \"text\",\n" + - " \"fielddata\": true\n" + - " }," + - " \"lastname\": {\n" + - " \"type\": \"keyword\"\n" + - " },\n" + - " \"male\": {\n" + - " \"type\": \"boolean\"\n" + - " },\n" + - " \"state\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - "}"; + return "{\n" + + " \"mappings\": {\n" + + " \"properties\": {\n" + + " \"account_number\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"address\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"age\": {\n" + + " \"type\": \"integer\"\n" + + " },\n" + + " \"balance\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"birthdate\": {\n" + + " \"type\": \"date\"\n" + + " },\n" + + " \"city\": {\n" + + " \"type\": \"keyword\"\n" + + " },\n" + + " \"email\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"employer\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"firstname\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"gender\": {\n" + + " \"type\": \"text\",\n" + + " \"fielddata\": true\n" + + " }," + + " \"lastname\": {\n" + + " \"type\": \"keyword\"\n" + + " },\n" + + " \"male\": {\n" + + " \"type\": \"boolean\"\n" + + " },\n" + + " \"state\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + "}"; } public static String getBankWithNullValuesIndexMapping() { - return "{\n" + - " \"mappings\": {\n" + - " \"properties\": {\n" + - " \"account_number\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"address\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"age\": {\n" + - " \"type\": \"integer\"\n" + - " },\n" + - " \"balance\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"gender\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"firstname\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"lastname\": {\n" + - " \"type\": \"keyword\"\n" + - " }\n" + - " }\n" + - " }\n" + - "}"; + return "{\n" + + " \"mappings\": {\n" + + " \"properties\": {\n" + + " \"account_number\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"address\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"age\": {\n" + + " \"type\": \"integer\"\n" + + " },\n" + + " \"balance\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"gender\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"firstname\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"lastname\": {\n" + + " \"type\": \"keyword\"\n" + + " }\n" + + " }\n" + + " }\n" + + "}"; } public static String getOrderIndexMapping() { - return "{\n" + - " \"mappings\": {\n" + - " \"properties\": {\n" + - " \"id\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"name\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - "}"; + return "{\n" + + " \"mappings\": {\n" + + " \"properties\": {\n" + + " \"id\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"name\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + "}"; } public static String getWeblogsIndexMapping() { - return "{\n" + - " \"mappings\": {\n" + - " \"properties\": {\n" + - " \"host\": {\n" + - " \"type\": \"ip\"\n" + - " },\n" + - " \"method\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"url\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"response\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"bytes\": {\n" + - " \"type\": \"text\"\n" + - " }\n" + - " }\n" + - " }\n" + - "}"; + return "{\n" + + " \"mappings\": {\n" + + " \"properties\": {\n" + + " \"host\": {\n" + + " \"type\": \"ip\"\n" + + " },\n" + + " \"method\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"url\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"response\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"bytes\": {\n" + + " \"type\": \"text\"\n" + + " }\n" + + " }\n" + + " }\n" + + "}"; } public static String getDateIndexMapping() { - return "{ \"mappings\": {" + - " \"properties\": {\n" + - " \"date_keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }" + - " }" + - " }" + - "}"; + return "{ \"mappings\": {" + + " \"properties\": {\n" + + " \"date_keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }" + + " }" + + " }" + + "}"; } public static String getDateTimeIndexMapping() { - return "{" + - " \"mappings\": {" + - " \"properties\": {" + - " \"birthday\": {" + - " \"type\": \"date\"" + - " }" + - " }" + - " }" + - "}"; + return "{" + + " \"mappings\": {" + + " \"properties\": {" + + " \"birthday\": {" + + " \"type\": \"date\"" + + " }" + + " }" + + " }" + + "}"; } public static String getNestedSimpleIndexMapping() { - return "{" + - " \"mappings\": {" + - " \"properties\": {" + - " \"address\": {" + - " \"type\": \"nested\"," + - " \"properties\": {" + - " \"city\": {" + - " \"type\": \"text\"," + - " \"fields\": {" + - " \"keyword\": {" + - " \"type\": \"keyword\"," + - " \"ignore_above\": 256" + - " }" + - " }" + - " }," + - " \"state\": {" + - " \"type\": \"text\"," + - " \"fields\": {" + - " \"keyword\": {" + - " \"type\": \"keyword\"," + - " \"ignore_above\": 256" + - " }" + - " }" + - " }" + - " }" + - " }," + - " \"age\": {" + - " \"type\": \"long\"" + - " }," + - " \"id\": {" + - " \"type\": \"long\"" + - " }," + - " \"name\": {" + - " \"type\": \"text\"," + - " \"fields\": {" + - " \"keyword\": {" + - " \"type\": \"keyword\"," + - " \"ignore_above\": 256" + - " }" + - " }" + - " }" + - " }" + - " }" + - "}"; + return "{" + + " \"mappings\": {" + + " \"properties\": {" + + " \"address\": {" + + " \"type\": \"nested\"," + + " \"properties\": {" + + " \"city\": {" + + " \"type\": \"text\"," + + " \"fields\": {" + + " \"keyword\": {" + + " \"type\": \"keyword\"," + + " \"ignore_above\": 256" + + " }" + + " }" + + " }," + + " \"state\": {" + + " \"type\": \"text\"," + + " \"fields\": {" + + " \"keyword\": {" + + " \"type\": \"keyword\"," + + " \"ignore_above\": 256" + + " }" + + " }" + + " }" + + " }" + + " }," + + " \"age\": {" + + " \"type\": \"long\"" + + " }," + + " \"id\": {" + + " \"type\": \"long\"" + + " }," + + " \"name\": {" + + " \"type\": \"text\"," + + " \"fields\": {" + + " \"keyword\": {" + + " \"type\": \"keyword\"," + + " \"ignore_above\": 256" + + " }" + + " }" + + " }" + + " }" + + " }" + + "}"; } public static void loadBulk(Client client, String jsonPath, String defaultIndex) @@ -698,8 +698,8 @@ public static void loadBulk(Client client, String jsonPath, String defaultIndex) BulkRequest bulkRequest = new BulkRequest(); try (final InputStream stream = new FileInputStream(absJsonPath); - final Reader streamReader = new InputStreamReader(stream, StandardCharsets.UTF_8); - final BufferedReader br = new BufferedReader(streamReader)) { + final Reader streamReader = new InputStreamReader(stream, StandardCharsets.UTF_8); + final BufferedReader br = new BufferedReader(streamReader)) { while (true) { @@ -728,8 +728,11 @@ public static void loadBulk(Client client, String jsonPath, String defaultIndex) BulkResponse bulkResponse = client.bulk(bulkRequest).actionGet(); if (bulkResponse.hasFailures()) { - throw new Exception("Failed to load test data into index " + defaultIndex + ", " + - bulkResponse.buildFailureMessage()); + throw new Exception( + "Failed to load test data into index " + + defaultIndex + + ", " + + bulkResponse.buildFailureMessage()); } System.out.println(bulkResponse.getItems().length + " documents loaded."); // ensure the documents are searchable @@ -755,8 +758,8 @@ public static String getResponseBody(Response response, boolean retainNewLines) final StringBuilder sb = new StringBuilder(); try (final InputStream is = response.getEntity().getContent(); - final BufferedReader br = new BufferedReader( - new InputStreamReader(is, StandardCharsets.UTF_8))) { + final BufferedReader br = + new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8))) { String line; while ((line = br.readLine()) != null) { @@ -769,15 +772,14 @@ public static String getResponseBody(Response response, boolean retainNewLines) return sb.toString(); } - public static String fileToString(final String filePathFromProjectRoot, - final boolean removeNewLines) - throws IOException { + public static String fileToString( + final String filePathFromProjectRoot, final boolean removeNewLines) throws IOException { final String absolutePath = getResourceFilePath(filePathFromProjectRoot); try (final InputStream stream = new FileInputStream(absolutePath); - final Reader streamReader = new InputStreamReader(stream, StandardCharsets.UTF_8); - final BufferedReader br = new BufferedReader(streamReader)) { + final Reader streamReader = new InputStreamReader(stream, StandardCharsets.UTF_8); + final BufferedReader br = new BufferedReader(streamReader)) { final StringBuilder stringBuilder = new StringBuilder(); String line = br.readLine(); @@ -831,36 +833,41 @@ public static List> getPermutations(final List items) { } final String currentItem = items.get(i); - result.addAll(getPermutations(smallerSet).stream().map(smallerSetPermutation -> { - final List permutation = new ArrayList<>(); - permutation.add(currentItem); - permutation.addAll(smallerSetPermutation); - return permutation; - }).collect(Collectors.toCollection(LinkedList::new))); + result.addAll( + getPermutations(smallerSet).stream() + .map( + smallerSetPermutation -> { + final List permutation = new ArrayList<>(); + permutation.add(currentItem); + permutation.addAll(smallerSetPermutation); + return permutation; + }) + .collect(Collectors.toCollection(LinkedList::new))); } return result; } public static void verifyIsV1Cursor(JSONObject response) { - var legacyCursorPrefixes = Arrays.stream(CursorType.values()) - .map(c -> c.getId() + ":").collect(Collectors.toList()); + var legacyCursorPrefixes = + Arrays.stream(CursorType.values()).map(c -> c.getId() + ":").collect(Collectors.toList()); verifyCursor(response, legacyCursorPrefixes, "v1"); } - public static void verifyIsV2Cursor(JSONObject response) { verifyCursor(response, List.of(CURSOR_PREFIX), "v2"); } - private static void verifyCursor(JSONObject response, List validCursorPrefix, String engineName) { - assertTrue("'cursor' property does not exist", response.has("cursor")); + private static void verifyCursor( + JSONObject response, List validCursorPrefix, String engineName) { + assertTrue("'cursor' property does not exist", response.has("cursor")); - var cursor = response.getString("cursor"); - assertFalse("'cursor' property is empty", cursor.isEmpty()); - assertTrue("The cursor '" + cursor.substring(0, 50) + "...' is not from " + engineName + " engine.", - validCursorPrefix.stream().anyMatch(cursor::startsWith)); - } + var cursor = response.getString("cursor"); + assertFalse("'cursor' property is empty", cursor.isEmpty()); + assertTrue( + "The cursor '" + cursor.substring(0, 50) + "...' is not from " + engineName + " engine.", + validCursorPrefix.stream().anyMatch(cursor::startsWith)); + } public static void verifyNoCursor(JSONObject response) { assertTrue(!response.has("cursor")); From b035b13111af3a87d7d70018068582e12f45f541 Mon Sep 17 00:00:00 2001 From: Mitchell Gale Date: Wed, 16 Aug 2023 14:47:00 -0700 Subject: [PATCH 23/42] [Spotless] Applying Google Code Format for sql files #11 (#1968) * [Spotless] Applying Google Code Format for sql files #11 (#330) * Spotless apply, ignore checkstyle, manual java doc fixes. Signed-off-by: Mitchell Gale * Spotless apply Signed-off-by: Mitchell Gale --------- Signed-off-by: Mitchell Gale * Apply suggestions from code review Co-authored-by: Guian Gumpac Signed-off-by: Mitchell Gale --------- Signed-off-by: Mitchell Gale Signed-off-by: Mitchell Gale Co-authored-by: Guian Gumpac --- build.gradle | 5 +- sql/build.gradle | 5 + .../org/opensearch/sql/sql/SQLService.java | 24 +- .../sql/sql/antlr/AnonymizerListener.java | 22 +- .../sql/sql/antlr/SQLSyntaxParser.java | 10 +- .../sql/sql/domain/SQLQueryRequest.java | 72 +- .../sql/sql/parser/AstAggregationBuilder.java | 46 +- .../opensearch/sql/sql/parser/AstBuilder.java | 46 +- .../sql/sql/parser/AstExpressionBuilder.java | 316 ++++---- .../sql/parser/AstHavingFilterBuilder.java | 9 +- .../sql/sql/parser/AstSortBuilder.java | 14 +- .../sql/sql/parser/ParserUtils.java | 25 +- .../sql/parser/context/ParsingContext.java | 13 +- .../parser/context/QuerySpecification.java | 45 +- .../common/antlr/SyntaxParserTestBase.java | 9 +- .../opensearch/sql/sql/SQLServiceTest.java | 38 +- .../sql/sql/antlr/BracketedTimestampTest.java | 1 - .../sql/sql/antlr/HighlightTest.java | 15 +- .../sql/antlr/MatchBoolPrefixParserTest.java | 5 +- .../sql/sql/antlr/SQLParserTest.java | 1 - .../sql/sql/antlr/SQLSyntaxParserTest.java | 606 +++++++-------- .../sql/sql/domain/SQLQueryRequestTest.java | 156 ++-- .../sql/parser/AnonymizerListenerTest.java | 47 +- .../sql/parser/AstAggregationBuilderTest.java | 122 ++-- .../sql/sql/parser/AstBuilderTest.java | 327 +++------ .../sql/sql/parser/AstBuilderTestBase.java | 4 +- .../sql/parser/AstExpressionBuilderTest.java | 690 ++++++++---------- .../parser/AstHavingFilterBuilderTest.java | 4 +- .../sql/parser/AstNowLikeFunctionTest.java | 65 +- .../parser/AstQualifiedNameBuilderTest.java | 9 +- .../sql/sql/parser/AstSortBuilderTest.java | 47 +- .../context/QuerySpecificationTest.java | 61 +- 32 files changed, 1221 insertions(+), 1638 deletions(-) diff --git a/build.gradle b/build.gradle index c0f82c502b..a256fd1d22 100644 --- a/build.gradle +++ b/build.gradle @@ -84,9 +84,10 @@ repositories { spotless { java { target fileTree('.') { - include 'common/**/*.java', - 'datasources/**/*.java', + include 'datasources/**/*.java', 'core/**/*.java', + 'sql/**/*.java', + 'common/**/*.java', 'ppl/**/*.java' exclude '**/build/**', '**/build-*/**' } diff --git a/sql/build.gradle b/sql/build.gradle index 44dc37cf0f..d85cc4ca74 100644 --- a/sql/build.gradle +++ b/sql/build.gradle @@ -58,6 +58,11 @@ dependencies { testImplementation(testFixtures(project(":core"))) } +// Being ignored as a temporary measure before being removed in favour of +// spotless https://github.com/opensearch-project/sql/issues/1101 +checkstyleTest.ignoreFailures = true +checkstyleMain.ignoreFailures = true + test { useJUnitPlatform() testLogging { diff --git a/sql/src/main/java/org/opensearch/sql/sql/SQLService.java b/sql/src/main/java/org/opensearch/sql/sql/SQLService.java index 91ec00cdd5..e1ca778453 100644 --- a/sql/src/main/java/org/opensearch/sql/sql/SQLService.java +++ b/sql/src/main/java/org/opensearch/sql/sql/SQLService.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import java.util.Optional; @@ -21,9 +20,7 @@ import org.opensearch.sql.sql.parser.AstBuilder; import org.opensearch.sql.sql.parser.AstStatementBuilder; -/** - * SQL service. - */ +/** SQL service. */ @RequiredArgsConstructor public class SQLService { @@ -69,15 +66,19 @@ private AbstractPlan plan( if (request.getCursor().isPresent()) { // Handle v2 cursor here -- legacy cursor was handled earlier. if (isExplainRequest) { - throw new UnsupportedOperationException("Explain of a paged query continuation " - + "is not supported. Use `explain` for the initial query request."); + throw new UnsupportedOperationException( + "Explain of a paged query continuation " + + "is not supported. Use `explain` for the initial query request."); } if (request.isCursorCloseRequest()) { - return queryExecutionFactory.createCloseCursor(request.getCursor().get(), - queryListener.orElse(null)); + return queryExecutionFactory.createCloseCursor( + request.getCursor().get(), queryListener.orElse(null)); } - return queryExecutionFactory.create(request.getCursor().get(), - isExplainRequest, queryListener.orElse(null), explainListener.orElse(null)); + return queryExecutionFactory.create( + request.getCursor().get(), + isExplainRequest, + queryListener.orElse(null), + explainListener.orElse(null)); } else { // 1.Parse query and convert parse tree (CST) to abstract syntax tree (AST) ParseTree cst = parser.parse(request.getQuery()); @@ -90,8 +91,7 @@ private AbstractPlan plan( .fetchSize(request.getFetchSize()) .build())); - return queryExecutionFactory.create( - statement, queryListener, explainListener); + return queryExecutionFactory.create(statement, queryListener, explainListener); } } } diff --git a/sql/src/main/java/org/opensearch/sql/sql/antlr/AnonymizerListener.java b/sql/src/main/java/org/opensearch/sql/sql/antlr/AnonymizerListener.java index bd7b5cbedf..0d1b89f7a9 100644 --- a/sql/src/main/java/org/opensearch/sql/sql/antlr/AnonymizerListener.java +++ b/sql/src/main/java/org/opensearch/sql/sql/antlr/AnonymizerListener.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql.antlr; import static org.opensearch.sql.sql.antlr.parser.OpenSearchSQLLexer.BACKTICK_QUOTE_ID; @@ -31,21 +30,17 @@ import org.antlr.v4.runtime.tree.ParseTreeListener; import org.antlr.v4.runtime.tree.TerminalNode; -/** - * Parse tree listener for anonymizing SQL requests. - */ +/** Parse tree listener for anonymizing SQL requests. */ public class AnonymizerListener implements ParseTreeListener { private String anonymizedQueryString = ""; private static final int NO_TYPE = -1; private int previousType = NO_TYPE; @Override - public void enterEveryRule(ParserRuleContext ctx) { - } + public void enterEveryRule(ParserRuleContext ctx) {} @Override - public void exitEveryRule(ParserRuleContext ctx) { - } + public void exitEveryRule(ParserRuleContext ctx) {} @Override public void visitTerminal(TerminalNode node) { @@ -57,10 +52,11 @@ public void visitTerminal(TerminalNode node) { int token = node.getSymbol().getType(); boolean isDotIdentifiers = token == DOT || previousType == DOT; boolean isComma = token == COMMA; - boolean isEqualComparison = ((token == EQUAL_SYMBOL) + boolean isEqualComparison = + ((token == EQUAL_SYMBOL) && (previousType == LESS_SYMBOL - || previousType == GREATER_SYMBOL - || previousType == EXCLAMATION_SYMBOL)); + || previousType == GREATER_SYMBOL + || previousType == EXCLAMATION_SYMBOL)); boolean isNotEqualComparisonAlternative = previousType == LESS_SYMBOL && token == GREATER_SYMBOL; if (!isDotIdentifiers && !isComma && !isEqualComparison && !isNotEqualComparisonAlternative) { @@ -103,9 +99,7 @@ public void visitTerminal(TerminalNode node) { } @Override - public void visitErrorNode(ErrorNode node) { - - } + public void visitErrorNode(ErrorNode node) {} public String getAnonymizedQueryString() { return "(" + anonymizedQueryString + ")"; diff --git a/sql/src/main/java/org/opensearch/sql/sql/antlr/SQLSyntaxParser.java b/sql/src/main/java/org/opensearch/sql/sql/antlr/SQLSyntaxParser.java index 4f7b925718..d1a6adc236 100644 --- a/sql/src/main/java/org/opensearch/sql/sql/antlr/SQLSyntaxParser.java +++ b/sql/src/main/java/org/opensearch/sql/sql/antlr/SQLSyntaxParser.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql.antlr; import org.antlr.v4.runtime.CommonTokenStream; @@ -16,16 +15,15 @@ import org.opensearch.sql.sql.antlr.parser.OpenSearchSQLLexer; import org.opensearch.sql.sql.antlr.parser.OpenSearchSQLParser; -/** - * SQL syntax parser which encapsulates an ANTLR parser. - */ +/** SQL syntax parser which encapsulates an ANTLR parser. */ public class SQLSyntaxParser implements Parser { private static final Logger LOG = LogManager.getLogger(SQLSyntaxParser.class); /** * Parse a SQL query by ANTLR parser. - * @param query a SQL query - * @return parse tree root + * + * @param query a SQL query + * @return parse tree root */ @Override public ParseTree parse(String query) { diff --git a/sql/src/main/java/org/opensearch/sql/sql/domain/SQLQueryRequest.java b/sql/src/main/java/org/opensearch/sql/sql/domain/SQLQueryRequest.java index c9321f5775..4e902cb67d 100644 --- a/sql/src/main/java/org/opensearch/sql/sql/domain/SQLQueryRequest.java +++ b/sql/src/main/java/org/opensearch/sql/sql/domain/SQLQueryRequest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql.domain; import java.util.Collections; @@ -20,43 +19,30 @@ import org.json.JSONObject; import org.opensearch.sql.protocol.response.format.Format; -/** - * SQL query request. - */ +/** SQL query request. */ @ToString @EqualsAndHashCode @RequiredArgsConstructor public class SQLQueryRequest { private static final String QUERY_FIELD_CURSOR = "cursor"; - private static final Set SUPPORTED_FIELDS = Set.of( - "query", "fetch_size", "parameters", QUERY_FIELD_CURSOR); + private static final Set SUPPORTED_FIELDS = + Set.of("query", "fetch_size", "parameters", QUERY_FIELD_CURSOR); private static final String QUERY_PARAMS_FORMAT = "format"; private static final String QUERY_PARAMS_SANITIZE = "sanitize"; - /** - * JSON payload in REST request. - */ + /** JSON payload in REST request. */ private final JSONObject jsonContent; - /** - * SQL query. - */ - @Getter - private final String query; + /** SQL query. */ + @Getter private final String query; - /** - * Request path. - */ + /** Request path. */ private final String path; - /** - * Request format. - */ + /** Request format. */ private final String format; - /** - * Request params. - */ + /** Request params. */ private Map params = Collections.emptyMap(); @Getter @@ -65,11 +51,13 @@ public class SQLQueryRequest { private String cursor; - /** - * Constructor of SQLQueryRequest that passes request params. - */ - public SQLQueryRequest(JSONObject jsonContent, String query, String path, - Map params, String cursor) { + /** Constructor of SQLQueryRequest that passes request params. */ + public SQLQueryRequest( + JSONObject jsonContent, + String query, + String path, + Map params, + String cursor) { this.jsonContent = jsonContent; this.query = query; this.path = path; @@ -80,24 +68,30 @@ public SQLQueryRequest(JSONObject jsonContent, String query, String path, } /** + * + * + *
    * Pre-check if the request can be supported by meeting ALL the following criteria:
    *  1.Only supported fields present in request body, ex. "filter" and "cursor" are not supported
    *  2.Response format is default or can be supported.
+   * 
* * @return true if supported. */ public boolean isSupported() { var noCursor = !isCursor(); var noQuery = query == null; - var noUnsupportedParams = params.isEmpty() - || (params.size() == 1 && params.containsKey(QUERY_PARAMS_FORMAT)); + var noUnsupportedParams = + params.isEmpty() || (params.size() == 1 && params.containsKey(QUERY_PARAMS_FORMAT)); var noContent = jsonContent == null || jsonContent.isEmpty(); - return ((!noCursor && noQuery - && noUnsupportedParams && noContent) // if cursor is given, but other things - || (noCursor && !noQuery)) // or if cursor is not given, but query - && isOnlySupportedFieldInPayload() // and request has supported fields only - && isSupportedFormat(); // and request is in supported format + return ((!noCursor + && noQuery + && noUnsupportedParams + && noContent) // if cursor is given, but other things + || (noCursor && !noQuery)) // or if cursor is not given, but query + && isOnlySupportedFieldInPayload() // and request has supported fields only + && isSupportedFormat(); // and request is in supported format } private boolean isCursor() { @@ -106,6 +100,7 @@ private boolean isCursor() { /** * Check if request is to explain rather than execute the query. + * * @return true if it is an explain request */ public boolean isExplainRequest() { @@ -116,16 +111,14 @@ public boolean isCursorCloseRequest() { return path.endsWith("/close"); } - /** - * Decide on the formatter by the requested format. - */ + /** Decide on the formatter by the requested format. */ public Format format() { Optional optionalFormat = Format.of(format); if (optionalFormat.isPresent()) { return optionalFormat.get(); } else { throw new IllegalArgumentException( - String.format(Locale.ROOT,"response in %s format is not supported.", format)); + String.format(Locale.ROOT, "response in %s format is not supported.", format)); } } @@ -155,5 +148,4 @@ private boolean shouldSanitize(Map params) { } return true; } - } diff --git a/sql/src/main/java/org/opensearch/sql/sql/parser/AstAggregationBuilder.java b/sql/src/main/java/org/opensearch/sql/sql/parser/AstAggregationBuilder.java index bd4464d00e..e46147b7a3 100644 --- a/sql/src/main/java/org/opensearch/sql/sql/parser/AstAggregationBuilder.java +++ b/sql/src/main/java/org/opensearch/sql/sql/parser/AstAggregationBuilder.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql.parser; import static java.util.Collections.emptyList; @@ -27,6 +26,8 @@ import org.opensearch.sql.sql.parser.context.QuerySpecification; /** + * + * *
SelectExpressionAnalyzerTest
  * AST aggregation builder that builds AST aggregation node for the following scenarios:
  *
@@ -59,9 +60,7 @@
 @RequiredArgsConstructor
 public class AstAggregationBuilder extends OpenSearchSQLParserBaseVisitor {
 
-  /**
-   * Query specification that contains info collected beforehand.
-   */
+  /** Query specification that contains info collected beforehand. */
   private final QuerySpecification querySpec;
 
   @Override
@@ -78,10 +77,7 @@ public UnresolvedPlan visit(ParseTree groupByClause) {
 
   private UnresolvedPlan buildExplicitAggregation() {
     List groupByItems = replaceGroupByItemIfAliasOrOrdinal();
-    return new Aggregation(
-        new ArrayList<>(querySpec.getAggregators()),
-        emptyList(),
-        groupByItems);
+    return new Aggregation(new ArrayList<>(querySpec.getAggregators()), emptyList(), groupByItems);
   }
 
   private UnresolvedPlan buildImplicitAggregation() {
@@ -89,33 +85,32 @@ private UnresolvedPlan buildImplicitAggregation() {
 
     if (invalidSelectItem.isPresent()) {
       // Report semantic error to avoid fall back to old engine again
-      throw new SemanticCheckException(StringUtils.format(
-          "Explicit GROUP BY clause is required because expression [%s] "
-              + "contains non-aggregated column", invalidSelectItem.get()));
+      throw new SemanticCheckException(
+          StringUtils.format(
+              "Explicit GROUP BY clause is required because expression [%s] "
+                  + "contains non-aggregated column",
+              invalidSelectItem.get()));
     }
 
     return new Aggregation(
-        new ArrayList<>(querySpec.getAggregators()),
-        emptyList(),
-        querySpec.getGroupByItems());
+        new ArrayList<>(querySpec.getAggregators()), emptyList(), querySpec.getGroupByItems());
   }
 
   private List replaceGroupByItemIfAliasOrOrdinal() {
-    return querySpec.getGroupByItems()
-                    .stream()
-                    .map(querySpec::replaceIfAliasOrOrdinal)
-                    .map(expr -> new Alias(expr.toString(), expr))
-                    .collect(Collectors.toList());
+    return querySpec.getGroupByItems().stream()
+        .map(querySpec::replaceIfAliasOrOrdinal)
+        .map(expr -> new Alias(expr.toString(), expr))
+        .collect(Collectors.toList());
   }
 
   /**
-   * Find non-aggregate item in SELECT clause. Note that literal is special which is not required
-   * to be applied by aggregate function.
+   * Find non-aggregate item in SELECT clause. Note that literal is special which is not required to
+   * be applied by aggregate function.
    */
   private Optional findNonAggregatedItemInSelect() {
     return querySpec.getSelectItems().stream()
-                                     .filter(this::isNonAggregateOrLiteralExpression)
-                                     .findFirst();
+        .filter(this::isNonAggregateOrLiteralExpression)
+        .findFirst();
   }
 
   private boolean isAggregatorNotFoundAnywhere() {
@@ -132,8 +127,7 @@ private boolean isNonAggregateOrLiteralExpression(UnresolvedExpression expr) {
     }
 
     List children = expr.getChild();
-    return children.stream().anyMatch(child ->
-        isNonAggregateOrLiteralExpression((UnresolvedExpression) child));
+    return children.stream()
+        .anyMatch(child -> isNonAggregateOrLiteralExpression((UnresolvedExpression) child));
   }
-
 }
diff --git a/sql/src/main/java/org/opensearch/sql/sql/parser/AstBuilder.java b/sql/src/main/java/org/opensearch/sql/sql/parser/AstBuilder.java
index 020889c082..ab96f16263 100644
--- a/sql/src/main/java/org/opensearch/sql/sql/parser/AstBuilder.java
+++ b/sql/src/main/java/org/opensearch/sql/sql/parser/AstBuilder.java
@@ -3,7 +3,6 @@
  * SPDX-License-Identifier: Apache-2.0
  */
 
-
 package org.opensearch.sql.sql.parser;
 
 import static java.util.Collections.emptyList;
@@ -43,22 +42,18 @@
 import org.opensearch.sql.sql.antlr.parser.OpenSearchSQLParserBaseVisitor;
 import org.opensearch.sql.sql.parser.context.ParsingContext;
 
-/**
- * Abstract syntax tree (AST) builder.
- */
+/** Abstract syntax tree (AST) builder. */
 @RequiredArgsConstructor
 public class AstBuilder extends OpenSearchSQLParserBaseVisitor {
 
   private final AstExpressionBuilder expressionBuilder = new AstExpressionBuilder();
 
-  /**
-   * Parsing context stack that contains context for current query parsing.
-   */
+  /** Parsing context stack that contains context for current query parsing. */
   private final ParsingContext context = new ParsingContext();
 
   /**
-   * SQL query to get original token text. This is necessary because token.getText() returns
-   * text without whitespaces or other characters discarded by lexer.
+   * SQL query to get original token text. This is necessary because token.getText() returns text
+   * without whitespaces or other characters discarded by lexer.
    */
   private final String query;
 
@@ -91,8 +86,7 @@ public UnresolvedPlan visitQuerySpecification(QuerySpecificationContext queryCon
 
     if (queryContext.fromClause() == null) {
       Optional allFields =
-          project.getProjectList().stream().filter(node -> node instanceof AllFields)
-              .findFirst();
+          project.getProjectList().stream().filter(node -> node instanceof AllFields).findFirst();
       if (allFields.isPresent()) {
         throw new SyntaxCheckException("No FROM clause found for select all");
       }
@@ -119,9 +113,8 @@ public UnresolvedPlan visitQuerySpecification(QuerySpecificationContext queryCon
 
   @Override
   public UnresolvedPlan visitSelectClause(SelectClauseContext ctx) {
-    ImmutableList.Builder builder =
-        new ImmutableList.Builder<>();
-    if (ctx.selectElements().star != null) { //TODO: project operator should be required?
+    ImmutableList.Builder builder = new ImmutableList.Builder<>();
+    if (ctx.selectElements().star != null) { // TODO: project operator should be required?
       builder.add(AllFields.of());
     }
     ctx.selectElements().selectElement().forEach(field -> builder.add(visitSelectItem(field)));
@@ -132,8 +125,7 @@ public UnresolvedPlan visitSelectClause(SelectClauseContext ctx) {
   public UnresolvedPlan visitLimitClause(OpenSearchSQLParser.LimitClauseContext ctx) {
     return new Limit(
         Integer.parseInt(ctx.limit.getText()),
-        ctx.offset == null ? 0 : Integer.parseInt(ctx.offset.getText())
-    );
+        ctx.offset == null ? 0 : Integer.parseInt(ctx.offset.getText()));
   }
 
   @Override
@@ -165,29 +157,26 @@ public UnresolvedPlan visitFromClause(FromClauseContext ctx) {
   }
 
   /**
-   * Ensure NESTED function is not used in HAVING clause and fallback to legacy engine.
-   * Can remove when support is added for NESTED function in HAVING clause.
+   * Ensure NESTED function is not used in HAVING clause and fallback to legacy engine. Can remove
+   * when support is added for NESTED function in HAVING clause.
+   *
    * @param func : Function in HAVING clause
    */
   private void verifySupportsCondition(UnresolvedExpression func) {
     if (func instanceof Function) {
-      if (((Function) func).getFuncName().equalsIgnoreCase(
-          BuiltinFunctionName.NESTED.name()
-      )) {
+      if (((Function) func).getFuncName().equalsIgnoreCase(BuiltinFunctionName.NESTED.name())) {
         throw new SyntaxCheckException(
-            "Falling back to legacy engine. Nested function is not supported in the HAVING clause."
-        );
+            "Falling back to legacy engine. Nested function is not supported in the HAVING"
+                + " clause.");
       }
-      ((Function)func).getFuncArgs().stream()
-          .forEach(e -> verifySupportsCondition(e)
-      );
+      ((Function) func).getFuncArgs().stream().forEach(e -> verifySupportsCondition(e));
     }
   }
 
   @Override
   public UnresolvedPlan visitTableAsRelation(TableAsRelationContext ctx) {
-    String tableAlias = (ctx.alias() == null) ? null
-        : StringUtils.unquoteIdentifier(ctx.alias().getText());
+    String tableAlias =
+        (ctx.alias() == null) ? null : StringUtils.unquoteIdentifier(ctx.alias().getText());
     return new Relation(visitAstExpression(ctx.tableName()), tableAlias);
   }
 
@@ -228,5 +217,4 @@ private UnresolvedExpression visitSelectItem(SelectElementContext ctx) {
       return new Alias(name, expr, alias);
     }
   }
-
 }
diff --git a/sql/src/main/java/org/opensearch/sql/sql/parser/AstExpressionBuilder.java b/sql/src/main/java/org/opensearch/sql/sql/parser/AstExpressionBuilder.java
index 192514250b..6dd1e02a1d 100644
--- a/sql/src/main/java/org/opensearch/sql/sql/parser/AstExpressionBuilder.java
+++ b/sql/src/main/java/org/opensearch/sql/sql/parser/AstExpressionBuilder.java
@@ -3,7 +3,6 @@
  * SPDX-License-Identifier: Apache-2.0
  */
 
-
 package org.opensearch.sql.sql.parser;
 
 import static org.opensearch.sql.ast.dsl.AstDSL.between;
@@ -113,9 +112,7 @@
 import org.opensearch.sql.sql.antlr.parser.OpenSearchSQLParser.TableNameContext;
 import org.opensearch.sql.sql.antlr.parser.OpenSearchSQLParserBaseVisitor;
 
-/**
- * Expression builder to parse text to expression in AST.
- */
+/** Expression builder to parse text to expression in AST. */
 public class AstExpressionBuilder extends OpenSearchSQLParserBaseVisitor {
 
   @Override
@@ -141,9 +138,7 @@ public UnresolvedExpression visitQualifiedName(QualifiedNameContext ctx) {
   @Override
   public UnresolvedExpression visitMathExpressionAtom(MathExpressionAtomContext ctx) {
     return new Function(
-        ctx.mathOperator.getText(),
-        Arrays.asList(visit(ctx.left), visit(ctx.right))
-    );
+        ctx.mathOperator.getText(), Arrays.asList(visit(ctx.left), visit(ctx.right)));
   }
 
   @Override
@@ -152,11 +147,8 @@ public UnresolvedExpression visitNestedExpressionAtom(NestedExpressionAtomContex
   }
 
   @Override
-  public UnresolvedExpression visitNestedAllFunctionCall(
-      NestedAllFunctionCallContext ctx) {
-    return new NestedAllTupleFields(
-        visitQualifiedName(ctx.allTupleFields().path).toString()
-    );
+  public UnresolvedExpression visitNestedAllFunctionCall(NestedAllFunctionCallContext ctx) {
+    return new NestedAllTupleFields(visitQualifiedName(ctx.allTupleFields().path).toString());
   }
 
   @Override
@@ -167,39 +159,36 @@ public UnresolvedExpression visitScalarFunctionCall(ScalarFunctionCallContext ct
   @Override
   public UnresolvedExpression visitGetFormatFunctionCall(GetFormatFunctionCallContext ctx) {
     return new Function(
-        ctx.getFormatFunction().GET_FORMAT().toString(),
-        getFormatFunctionArguments(ctx));
+        ctx.getFormatFunction().GET_FORMAT().toString(), getFormatFunctionArguments(ctx));
   }
 
   @Override
-  public UnresolvedExpression visitHighlightFunctionCall(
-      HighlightFunctionCallContext ctx) {
+  public UnresolvedExpression visitHighlightFunctionCall(HighlightFunctionCallContext ctx) {
     ImmutableMap.Builder builder = ImmutableMap.builder();
-    ctx.highlightFunction().highlightArg().forEach(v -> builder.put(
-        v.highlightArgName().getText().toLowerCase(),
-        new Literal(StringUtils.unquoteText(v.highlightArgValue().getText()),
-            DataType.STRING))
-    );
+    ctx.highlightFunction()
+        .highlightArg()
+        .forEach(
+            v ->
+                builder.put(
+                    v.highlightArgName().getText().toLowerCase(),
+                    new Literal(
+                        StringUtils.unquoteText(v.highlightArgValue().getText()),
+                        DataType.STRING)));
 
-    return new HighlightFunction(visit(ctx.highlightFunction().relevanceField()),
-        builder.build());
+    return new HighlightFunction(visit(ctx.highlightFunction().relevanceField()), builder.build());
   }
 
-
   @Override
   public UnresolvedExpression visitTimestampFunctionCall(TimestampFunctionCallContext ctx) {
     return new Function(
-        ctx.timestampFunction().timestampFunctionName().getText(),
-        timestampFunctionArguments(ctx));
+        ctx.timestampFunction().timestampFunctionName().getText(), timestampFunctionArguments(ctx));
   }
 
   @Override
-  public UnresolvedExpression visitPositionFunction(
-          PositionFunctionContext ctx) {
+  public UnresolvedExpression visitPositionFunction(PositionFunctionContext ctx) {
     return new Function(
-            POSITION.getName().getFunctionName(),
-            Arrays.asList(visitFunctionArg(ctx.functionArg(0)),
-                visitFunctionArg(ctx.functionArg(1))));
+        POSITION.getName().getFunctionName(),
+        Arrays.asList(visitFunctionArg(ctx.functionArg(0)), visitFunctionArg(ctx.functionArg(1))));
   }
 
   @Override
@@ -217,8 +206,7 @@ public UnresolvedExpression visitColumnFilter(ColumnFilterContext ctx) {
   }
 
   @Override
-  public UnresolvedExpression visitShowDescribePattern(
-      ShowDescribePatternContext ctx) {
+  public UnresolvedExpression visitShowDescribePattern(ShowDescribePatternContext ctx) {
     return visit(ctx.stringLiteral());
   }
 
@@ -235,21 +223,18 @@ public UnresolvedExpression visitWindowFunctionClause(WindowFunctionClauseContex
 
     List partitionByList = Collections.emptyList();
     if (overClause.partitionByClause() != null) {
-      partitionByList = overClause.partitionByClause()
-                                  .expression()
-                                  .stream()
-                                  .map(this::visit)
-                                  .collect(Collectors.toList());
+      partitionByList =
+          overClause.partitionByClause().expression().stream()
+              .map(this::visit)
+              .collect(Collectors.toList());
     }
 
     List> sortList = Collections.emptyList();
     if (overClause.orderByClause() != null) {
-      sortList = overClause.orderByClause()
-                           .orderByElement()
-                           .stream()
-                           .map(item -> ImmutablePair.of(
-                               createSortOption(item), visit(item.expression())))
-                           .collect(Collectors.toList());
+      sortList =
+          overClause.orderByClause().orderByElement().stream()
+              .map(item -> ImmutablePair.of(createSortOption(item), visit(item.expression())))
+              .collect(Collectors.toList());
     }
     return new WindowFunction(visit(ctx.function), partitionByList, sortList);
   }
@@ -262,17 +247,12 @@ public UnresolvedExpression visitScalarWindowFunction(ScalarWindowFunctionContex
   @Override
   public UnresolvedExpression visitRegularAggregateFunctionCall(
       RegularAggregateFunctionCallContext ctx) {
-    return new AggregateFunction(
-        ctx.functionName.getText(),
-        visitFunctionArg(ctx.functionArg()));
+    return new AggregateFunction(ctx.functionName.getText(), visitFunctionArg(ctx.functionArg()));
   }
 
   @Override
   public UnresolvedExpression visitDistinctCountFunctionCall(DistinctCountFunctionCallContext ctx) {
-    return new AggregateFunction(
-        ctx.COUNT().getText(),
-        visitFunctionArg(ctx.functionArg()),
-        true);
+    return new AggregateFunction(ctx.COUNT().getText(), visitFunctionArg(ctx.functionArg()), true);
   }
 
   @Override
@@ -288,18 +268,16 @@ public UnresolvedExpression visitFilterClause(FilterClauseContext ctx) {
   @Override
   public UnresolvedExpression visitIsNullPredicate(IsNullPredicateContext ctx) {
     return new Function(
-        ctx.nullNotnull().NOT() == null ? IS_NULL.getName().getFunctionName() :
-            IS_NOT_NULL.getName().getFunctionName(),
+        ctx.nullNotnull().NOT() == null
+            ? IS_NULL.getName().getFunctionName()
+            : IS_NOT_NULL.getName().getFunctionName(),
         Arrays.asList(visit(ctx.predicate())));
   }
 
   @Override
   public UnresolvedExpression visitBetweenPredicate(BetweenPredicateContext ctx) {
     UnresolvedExpression func =
-        between(
-            visit(ctx.predicate(0)),
-            visit(ctx.predicate(1)),
-            visit(ctx.predicate(2)));
+        between(visit(ctx.predicate(0)), visit(ctx.predicate(1)), visit(ctx.predicate(2)));
 
     if (ctx.NOT() != null) {
       func = not(func);
@@ -310,26 +288,21 @@ public UnresolvedExpression visitBetweenPredicate(BetweenPredicateContext ctx) {
   @Override
   public UnresolvedExpression visitLikePredicate(LikePredicateContext ctx) {
     return new Function(
-        ctx.NOT() == null ? LIKE.getName().getFunctionName() :
-            NOT_LIKE.getName().getFunctionName(),
+        ctx.NOT() == null ? LIKE.getName().getFunctionName() : NOT_LIKE.getName().getFunctionName(),
         Arrays.asList(visit(ctx.left), visit(ctx.right)));
   }
 
   @Override
   public UnresolvedExpression visitRegexpPredicate(RegexpPredicateContext ctx) {
-    return new Function(REGEXP.getName().getFunctionName(),
-            Arrays.asList(visit(ctx.left), visit(ctx.right)));
+    return new Function(
+        REGEXP.getName().getFunctionName(), Arrays.asList(visit(ctx.left), visit(ctx.right)));
   }
 
   @Override
   public UnresolvedExpression visitInPredicate(InPredicateContext ctx) {
     UnresolvedExpression field = visit(ctx.predicate());
-    List inLists = ctx
-        .expressions()
-        .expression()
-        .stream()
-        .map(this::visit)
-        .collect(Collectors.toList());
+    List inLists =
+        ctx.expressions().expression().stream().map(this::visit).collect(Collectors.toList());
     UnresolvedExpression in = AstDSL.in(field, inLists);
     return ctx.NOT() != null ? AstDSL.not(in) : in;
   }
@@ -394,34 +367,30 @@ public UnresolvedExpression visitTimeLiteral(TimeLiteralContext ctx) {
   }
 
   @Override
-  public UnresolvedExpression visitTimestampLiteral(
-      TimestampLiteralContext ctx) {
+  public UnresolvedExpression visitTimestampLiteral(TimestampLiteralContext ctx) {
     return AstDSL.timestampLiteral(StringUtils.unquoteText(ctx.timestamp.getText()));
   }
 
   @Override
   public UnresolvedExpression visitIntervalLiteral(IntervalLiteralContext ctx) {
-    return new Interval(
-        visit(ctx.expression()), IntervalUnit.of(ctx.intervalUnit().getText()));
+    return new Interval(visit(ctx.expression()), IntervalUnit.of(ctx.intervalUnit().getText()));
   }
 
   @Override
-  public UnresolvedExpression visitBinaryComparisonPredicate(
-      BinaryComparisonPredicateContext ctx) {
+  public UnresolvedExpression visitBinaryComparisonPredicate(BinaryComparisonPredicateContext ctx) {
     String functionName = ctx.comparisonOperator().getText();
     return new Function(
         functionName.equals("<>") ? "!=" : functionName,
-        Arrays.asList(visit(ctx.left), visit(ctx.right))
-    );
+        Arrays.asList(visit(ctx.left), visit(ctx.right)));
   }
 
   @Override
   public UnresolvedExpression visitCaseFunctionCall(CaseFunctionCallContext ctx) {
     UnresolvedExpression caseValue = (ctx.expression() == null) ? null : visit(ctx.expression());
-    List whenStatements = ctx.caseFuncAlternative()
-                                   .stream()
-                                   .map(when -> (When) visit(when))
-                                   .collect(Collectors.toList());
+    List whenStatements =
+        ctx.caseFuncAlternative().stream()
+            .map(when -> (When) visit(when))
+            .collect(Collectors.toList());
     UnresolvedExpression elseStatement = (ctx.elseArg == null) ? null : visit(ctx.elseArg);
 
     return new Case(caseValue, whenStatements, elseStatement);
@@ -433,23 +402,19 @@ public UnresolvedExpression visitCaseFuncAlternative(CaseFuncAlternativeContext
   }
 
   @Override
-  public UnresolvedExpression visitDataTypeFunctionCall(
-      DataTypeFunctionCallContext ctx) {
+  public UnresolvedExpression visitDataTypeFunctionCall(DataTypeFunctionCallContext ctx) {
     return new Cast(visit(ctx.expression()), visit(ctx.convertedDataType()));
   }
 
   @Override
-  public UnresolvedExpression visitConvertedDataType(
-      ConvertedDataTypeContext ctx) {
+  public UnresolvedExpression visitConvertedDataType(ConvertedDataTypeContext ctx) {
     return AstDSL.stringLiteral(ctx.getText());
   }
 
   @Override
-  public UnresolvedExpression visitNoFieldRelevanceFunction(
-          NoFieldRelevanceFunctionContext ctx) {
+  public UnresolvedExpression visitNoFieldRelevanceFunction(NoFieldRelevanceFunctionContext ctx) {
     return new Function(
-            ctx.noFieldRelevanceFunctionName().getText().toLowerCase(),
-            noFieldRelevanceArguments(ctx));
+        ctx.noFieldRelevanceFunctionName().getText().toLowerCase(), noFieldRelevanceArguments(ctx));
   }
 
   @Override
@@ -475,10 +440,9 @@ public UnresolvedExpression visitMultiFieldRelevanceFunction(
     // 'MULTI_MATCH('query'='query_val', 'fields'='*fields_val')'
     String funcName = StringUtils.unquoteText(ctx.multiFieldRelevanceFunctionName().getText());
     if ((funcName.equalsIgnoreCase(BuiltinFunctionName.MULTI_MATCH.toString())
-        || funcName.equalsIgnoreCase(BuiltinFunctionName.MULTIMATCH.toString())
-        || funcName.equalsIgnoreCase(BuiltinFunctionName.MULTIMATCHQUERY.toString()))
-        && !ctx.getRuleContexts(AlternateMultiMatchQueryContext.class)
-        .isEmpty()) {
+            || funcName.equalsIgnoreCase(BuiltinFunctionName.MULTIMATCH.toString())
+            || funcName.equalsIgnoreCase(BuiltinFunctionName.MULTIMATCHQUERY.toString()))
+        && !ctx.getRuleContexts(AlternateMultiMatchQueryContext.class).isEmpty()) {
       return new Function(
           ctx.multiFieldRelevanceFunctionName().getText().toLowerCase(),
           alternateMultiMatchArguments(ctx));
@@ -511,78 +475,81 @@ public UnresolvedExpression visitScoreRelevanceFunction(ScoreRelevanceFunctionCo
     return new ScoreFunction(visit(ctx.relevanceFunction()), weight);
   }
 
-  private Function buildFunction(String functionName,
-                                 List arg) {
+  private Function buildFunction(String functionName, List arg) {
     return new Function(
-        functionName,
-        arg
-            .stream()
-            .map(this::visitFunctionArg)
-            .collect(Collectors.toList())
-    );
+        functionName, arg.stream().map(this::visitFunctionArg).collect(Collectors.toList()));
   }
 
   @Override
   public UnresolvedExpression visitExtractFunctionCall(ExtractFunctionCallContext ctx) {
     return new Function(
-        ctx.extractFunction().EXTRACT().toString(),
-        getExtractFunctionArguments(ctx));
+        ctx.extractFunction().EXTRACT().toString(), getExtractFunctionArguments(ctx));
   }
 
-
   private QualifiedName visitIdentifiers(List identifiers) {
     return new QualifiedName(
         identifiers.stream()
-                   .map(RuleContext::getText)
-                   .map(StringUtils::unquoteIdentifier)
-                   .collect(Collectors.toList()));
+            .map(RuleContext::getText)
+            .map(StringUtils::unquoteIdentifier)
+            .collect(Collectors.toList()));
   }
 
-  private void fillRelevanceArgs(List args,
-                                 ImmutableList.Builder builder) {
+  private void fillRelevanceArgs(
+      List args, ImmutableList.Builder builder) {
     // To support old syntax we must support argument keys as quoted strings.
-    args.forEach(v -> builder.add(v.argName == null
-        ? new UnresolvedArgument(v.relevanceArgName().getText().toLowerCase(),
-            new Literal(StringUtils.unquoteText(v.relevanceArgValue().getText()),
-            DataType.STRING))
-        : new UnresolvedArgument(StringUtils.unquoteText(v.argName.getText()).toLowerCase(),
-            new Literal(StringUtils.unquoteText(v.argVal.getText()), DataType.STRING))));
+    args.forEach(
+        v ->
+            builder.add(
+                v.argName == null
+                    ? new UnresolvedArgument(
+                        v.relevanceArgName().getText().toLowerCase(),
+                        new Literal(
+                            StringUtils.unquoteText(v.relevanceArgValue().getText()),
+                            DataType.STRING))
+                    : new UnresolvedArgument(
+                        StringUtils.unquoteText(v.argName.getText()).toLowerCase(),
+                        new Literal(
+                            StringUtils.unquoteText(v.argVal.getText()), DataType.STRING))));
   }
 
   private List noFieldRelevanceArguments(
-          NoFieldRelevanceFunctionContext ctx) {
+      NoFieldRelevanceFunctionContext ctx) {
     // all the arguments are defaulted to string values
     // to skip environment resolving and function signature resolving
     ImmutableList.Builder builder = ImmutableList.builder();
-    builder.add(new UnresolvedArgument("query",
-            new Literal(StringUtils.unquoteText(ctx.query.getText()), DataType.STRING)));
+    builder.add(
+        new UnresolvedArgument(
+            "query", new Literal(StringUtils.unquoteText(ctx.query.getText()), DataType.STRING)));
     fillRelevanceArgs(ctx.relevanceArg(), builder);
     return builder.build();
   }
 
   private List singleFieldRelevanceArguments(
-        SingleFieldRelevanceFunctionContext ctx) {
+      SingleFieldRelevanceFunctionContext ctx) {
     // all the arguments are defaulted to string values
     // to skip environment resolving and function signature resolving
     ImmutableList.Builder builder = ImmutableList.builder();
-    builder.add(new UnresolvedArgument("field",
-        new QualifiedName(StringUtils.unquoteText(ctx.field.getText()))));
-    builder.add(new UnresolvedArgument("query",
-        new Literal(StringUtils.unquoteText(ctx.query.getText()), DataType.STRING)));
+    builder.add(
+        new UnresolvedArgument(
+            "field", new QualifiedName(StringUtils.unquoteText(ctx.field.getText()))));
+    builder.add(
+        new UnresolvedArgument(
+            "query", new Literal(StringUtils.unquoteText(ctx.query.getText()), DataType.STRING)));
     fillRelevanceArgs(ctx.relevanceArg(), builder);
     return builder.build();
   }
 
-
   private List altSingleFieldRelevanceFunctionArguments(
       AltSingleFieldRelevanceFunctionContext ctx) {
     // all the arguments are defaulted to string values
     // to skip environment resolving and function signature resolving
     ImmutableList.Builder builder = ImmutableList.builder();
-    builder.add(new UnresolvedArgument("field",
-        new QualifiedName(StringUtils.unquoteText(ctx.field.getText()))));
-    builder.add(new UnresolvedArgument("query",
-        new Literal(StringUtils.unquoteText(ctx.query.getText()), DataType.STRING)));
+    builder.add(
+        new UnresolvedArgument(
+            "field", new QualifiedName(StringUtils.unquoteText(ctx.field.getText()))));
+    builder.add(
+        new UnresolvedArgument(
+            "query", new Literal(StringUtils.unquoteText(ctx.query.getText()), DataType.STRING)));
     fillRelevanceArgs(ctx.relevanceArg(), builder);
     return builder.build();
   }
@@ -592,43 +559,45 @@ private List multiFieldRelevanceArguments(
     // all the arguments are defaulted to string values
     // to skip environment resolving and function signature resolving
     ImmutableList.Builder builder = ImmutableList.builder();
-    var fields = new RelevanceFieldList(ctx
-        .getRuleContexts(RelevanceFieldAndWeightContext.class)
-        .stream()
-        .collect(Collectors.toMap(
-            f -> StringUtils.unquoteText(f.field.getText()),
-            f -> (f.weight == null) ? 1F : Float.parseFloat(f.weight.getText()))));
+    var fields =
+        new RelevanceFieldList(
+            ctx.getRuleContexts(RelevanceFieldAndWeightContext.class).stream()
+                .collect(
+                    Collectors.toMap(
+                        f -> StringUtils.unquoteText(f.field.getText()),
+                        f -> (f.weight == null) ? 1F : Float.parseFloat(f.weight.getText()))));
     builder.add(new UnresolvedArgument("fields", fields));
-    builder.add(new UnresolvedArgument("query",
-        new Literal(StringUtils.unquoteText(ctx.query.getText()), DataType.STRING)));
+    builder.add(
+        new UnresolvedArgument(
+            "query", new Literal(StringUtils.unquoteText(ctx.query.getText()), DataType.STRING)));
     fillRelevanceArgs(ctx.relevanceArg(), builder);
     return builder.build();
   }
 
-  private List getFormatFunctionArguments(
-      GetFormatFunctionCallContext ctx) {
-    List args = Arrays.asList(
-        new Literal(ctx.getFormatFunction().getFormatType().getText(), DataType.STRING),
-        visitFunctionArg(ctx.getFormatFunction().functionArg())
-    );
+  private List getFormatFunctionArguments(GetFormatFunctionCallContext ctx) {
+    List args =
+        Arrays.asList(
+            new Literal(ctx.getFormatFunction().getFormatType().getText(), DataType.STRING),
+            visitFunctionArg(ctx.getFormatFunction().functionArg()));
     return args;
   }
 
-  private List timestampFunctionArguments(
-      TimestampFunctionCallContext ctx) {
-    List args = Arrays.asList(
-        new Literal(
-            ctx.timestampFunction().simpleDateTimePart().getText(),
-            DataType.STRING),
-        visitFunctionArg(ctx.timestampFunction().firstArg),
-        visitFunctionArg(ctx.timestampFunction().secondArg)
-    );
+  private List timestampFunctionArguments(TimestampFunctionCallContext ctx) {
+    List args =
+        Arrays.asList(
+            new Literal(ctx.timestampFunction().simpleDateTimePart().getText(), DataType.STRING),
+            visitFunctionArg(ctx.timestampFunction().firstArg),
+            visitFunctionArg(ctx.timestampFunction().secondArg));
     return args;
   }
 
   /**
+   *
+   *
+   * 
    * Adds support for multi_match alternate syntax like
    * MULTI_MATCH('query'='Dale', 'fields'='*name').
+   * 
* * @param ctx : Context for multi field relevance function. * @return : Returns list of all arguments for relevance function. @@ -640,25 +609,32 @@ private List alternateMultiMatchArguments( ImmutableList.Builder builder = ImmutableList.builder(); Map fieldAndWeightMap = new HashMap<>(); - String[] fieldAndWeights = StringUtils.unquoteText( - ctx.getRuleContexts(AlternateMultiMatchFieldContext.class) - .stream().findFirst().get().argVal.getText()).split(","); + String[] fieldAndWeights = + StringUtils.unquoteText( + ctx.getRuleContexts(AlternateMultiMatchFieldContext.class).stream() + .findFirst() + .get() + .argVal + .getText()) + .split(","); for (var fieldAndWeight : fieldAndWeights) { String[] splitFieldAndWeights = fieldAndWeight.split("\\^"); - fieldAndWeightMap.put(splitFieldAndWeights[0], + fieldAndWeightMap.put( + splitFieldAndWeights[0], splitFieldAndWeights.length > 1 ? Float.parseFloat(splitFieldAndWeights[1]) : 1F); } - builder.add(new UnresolvedArgument("fields", - new RelevanceFieldList(fieldAndWeightMap))); - - ctx.getRuleContexts(AlternateMultiMatchQueryContext.class) - .stream().findFirst().ifPresent( - arg -> - builder.add(new UnresolvedArgument("query", + builder.add(new UnresolvedArgument("fields", new RelevanceFieldList(fieldAndWeightMap))); + + ctx.getRuleContexts(AlternateMultiMatchQueryContext.class).stream() + .findFirst() + .ifPresent( + arg -> + builder.add( + new UnresolvedArgument( + "query", new Literal( - StringUtils.unquoteText(arg.argVal.getText()), DataType.STRING))) - ); + StringUtils.unquoteText(arg.argVal.getText()), DataType.STRING)))); fillRelevanceArgs(ctx.relevanceArg(), builder); @@ -674,18 +650,18 @@ private List altMultiFieldRelevanceFunctionArguments( ImmutableList.Builder builder = ImmutableList.builder(); var fields = new RelevanceFieldList(map); builder.add(new UnresolvedArgument("fields", fields)); - builder.add(new UnresolvedArgument("query", - new Literal(StringUtils.unquoteText(ctx.query.getText()), DataType.STRING))); + builder.add( + new UnresolvedArgument( + "query", new Literal(StringUtils.unquoteText(ctx.query.getText()), DataType.STRING))); fillRelevanceArgs(ctx.relevanceArg(), builder); return builder.build(); } - private List getExtractFunctionArguments( - ExtractFunctionCallContext ctx) { - List args = Arrays.asList( - new Literal(ctx.extractFunction().datetimePart().getText(), DataType.STRING), - visitFunctionArg(ctx.extractFunction().functionArg()) - ); + private List getExtractFunctionArguments(ExtractFunctionCallContext ctx) { + List args = + Arrays.asList( + new Literal(ctx.extractFunction().datetimePart().getText(), DataType.STRING), + visitFunctionArg(ctx.extractFunction().functionArg())); return args; } } diff --git a/sql/src/main/java/org/opensearch/sql/sql/parser/AstHavingFilterBuilder.java b/sql/src/main/java/org/opensearch/sql/sql/parser/AstHavingFilterBuilder.java index f90ea2f991..94c11d05af 100644 --- a/sql/src/main/java/org/opensearch/sql/sql/parser/AstHavingFilterBuilder.java +++ b/sql/src/main/java/org/opensearch/sql/sql/parser/AstHavingFilterBuilder.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql.parser; import static org.opensearch.sql.sql.antlr.parser.OpenSearchSQLParser.QualifiedNameContext; @@ -13,10 +12,9 @@ import org.opensearch.sql.sql.parser.context.QuerySpecification; /** - * AST Having filter builder that builds HAVING clause condition expressions - * and replace alias by original expression in SELECT clause. - * The reason for this is it's hard to replace afterwards since UnresolvedExpression - * is immutable. + * AST Having filter builder that builds HAVING clause condition expressions and replace alias by + * original expression in SELECT clause. The reason for this is it's hard to replace afterwards + * since UnresolvedExpression is immutable. */ @RequiredArgsConstructor public class AstHavingFilterBuilder extends AstExpressionBuilder { @@ -34,5 +32,4 @@ private UnresolvedExpression replaceAlias(UnresolvedExpression expr) { } return expr; } - } diff --git a/sql/src/main/java/org/opensearch/sql/sql/parser/AstSortBuilder.java b/sql/src/main/java/org/opensearch/sql/sql/parser/AstSortBuilder.java index 1b872dce54..2594709f4f 100644 --- a/sql/src/main/java/org/opensearch/sql/sql/parser/AstSortBuilder.java +++ b/sql/src/main/java/org/opensearch/sql/sql/parser/AstSortBuilder.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql.parser; import static org.opensearch.sql.ast.dsl.AstDSL.booleanLiteral; @@ -27,8 +26,8 @@ import org.opensearch.sql.sql.parser.context.QuerySpecification; /** - * AST sort builder that builds Sort AST node from ORDER BY clause. During this process, the item - * in order by may be replaced by item in project list if it's an alias or ordinal. This is same as + * AST sort builder that builds Sort AST node from ORDER BY clause. During this process, the item in + * order by may be replaced by item in project list if it's an alias or ordinal. This is same as * GROUP BY building process. */ @RequiredArgsConstructor @@ -38,9 +37,7 @@ public class AstSortBuilder extends OpenSearchSQLParserBaseVisitor createSortFields() { @@ -57,8 +54,8 @@ private List createSortFields() { } /** - * Argument "asc" is required. - * Argument "nullFirst" is optional and determined by Analyzer later if absent. + * Argument "asc" is required. Argument "nullFirst" is optional and determined by Analyzer later + * if absent. */ private List createSortArguments(SortOption option) { SortOrder sortOrder = option.getSortOrder(); @@ -71,5 +68,4 @@ private List createSortArguments(SortOption option) { } return args.build(); } - } diff --git a/sql/src/main/java/org/opensearch/sql/sql/parser/ParserUtils.java b/sql/src/main/java/org/opensearch/sql/sql/parser/ParserUtils.java index 947dca51b9..3c60d43733 100644 --- a/sql/src/main/java/org/opensearch/sql/sql/parser/ParserUtils.java +++ b/sql/src/main/java/org/opensearch/sql/sql/parser/ParserUtils.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql.parser; import static org.opensearch.sql.ast.tree.Sort.NullOrder; @@ -16,33 +15,24 @@ import org.antlr.v4.runtime.Token; import org.antlr.v4.runtime.tree.TerminalNode; -/** - * Parser Utils Class. - */ +/** Parser Utils Class. */ @UtilityClass public class ParserUtils { - /** - * Get original text in query. - */ + /** Get original text in query. */ public static String getTextInQuery(ParserRuleContext ctx, String queryString) { Token start = ctx.getStart(); Token stop = ctx.getStop(); return queryString.substring(start.getStartIndex(), stop.getStopIndex() + 1); } - /** - * Create sort option from syntax tree node. - */ + /** Create sort option from syntax tree node. */ public static SortOption createSortOption(OrderByElementContext orderBy) { return new SortOption( - createSortOrder(orderBy.order), - createNullOrder(orderBy.FIRST(), orderBy.LAST())); + createSortOrder(orderBy.order), createNullOrder(orderBy.FIRST(), orderBy.LAST())); } - /** - * Create sort order for sort option use from ASC/DESC token. - */ + /** Create sort order for sort option use from ASC/DESC token. */ public static SortOrder createSortOrder(Token ctx) { if (ctx == null) { return null; @@ -50,9 +40,7 @@ public static SortOrder createSortOrder(Token ctx) { return SortOrder.valueOf(ctx.getText().toUpperCase()); } - /** - * Create null order for sort option use from FIRST/LAST token. - */ + /** Create null order for sort option use from FIRST/LAST token. */ public static NullOrder createNullOrder(TerminalNode first, TerminalNode last) { if (first != null) { return NullOrder.NULL_FIRST; @@ -62,5 +50,4 @@ public static NullOrder createNullOrder(TerminalNode first, TerminalNode last) { return null; } } - } diff --git a/sql/src/main/java/org/opensearch/sql/sql/parser/context/ParsingContext.java b/sql/src/main/java/org/opensearch/sql/sql/parser/context/ParsingContext.java index 33b313367d..297fdfd749 100644 --- a/sql/src/main/java/org/opensearch/sql/sql/parser/context/ParsingContext.java +++ b/sql/src/main/java/org/opensearch/sql/sql/parser/context/ParsingContext.java @@ -3,21 +3,20 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql.parser.context; import java.util.ArrayDeque; import java.util.Deque; /** - * SQL parsing context that maintains stack of query specifications for nested queries. - * Currently this is just a thin wrapper by a stack. + * SQL parsing context that maintains stack of query specifications for nested queries. Currently + * this is just a thin wrapper by a stack. */ public class ParsingContext { /** - * Use stack rather than linked query specification because there is no need - * to look up through the stack. + * Use stack rather than linked query specification because there is no need to look up through + * the stack. */ private final Deque contexts = new ArrayDeque<>(); @@ -31,10 +30,10 @@ public QuerySpecification peek() { /** * Pop up query context. - * @return query context after popup. + * + * @return query context after popup. */ public QuerySpecification pop() { return contexts.pop(); } - } diff --git a/sql/src/main/java/org/opensearch/sql/sql/parser/context/QuerySpecification.java b/sql/src/main/java/org/opensearch/sql/sql/parser/context/QuerySpecification.java index 21dddde2b9..5625371f05 100644 --- a/sql/src/main/java/org/opensearch/sql/sql/parser/context/QuerySpecification.java +++ b/sql/src/main/java/org/opensearch/sql/sql/parser/context/QuerySpecification.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql.parser.context; import static org.opensearch.sql.sql.antlr.parser.OpenSearchSQLParser.FilteredAggregationFunctionCallContext; @@ -42,6 +41,7 @@ /** * Query specification domain that collects basic info for a simple query. + * *
  * (I) What is the impact of this new abstraction?
  *  This abstraction and collecting process turns AST building process into two phases:
@@ -61,10 +61,9 @@
 @ToString
 public class QuerySpecification {
 
-  /**
-   * Items in SELECT clause and mapping from alias to select item.
-   */
+  /** Items in SELECT clause and mapping from alias to select item. */
   private final List selectItems = new ArrayList<>();
+
   private final Map selectItemsByAlias = new HashMap<>();
 
   /**
@@ -74,31 +73,36 @@ public class QuerySpecification {
   private final Set aggregators = new LinkedHashSet<>();
 
   /**
+   *
+   *
+   * 
    * Items in GROUP BY clause that may be:
    *  1) Simple field name
    *  2) Field nested in scalar function call
    *  3) Ordinal that points to expression in SELECT
    *  4) Alias that points to expression in SELECT.
+   *  
*/ private final List groupByItems = new ArrayList<>(); - /** - * Items in ORDER BY clause that may be different forms as above and its options. - */ + /** Items in ORDER BY clause that may be different forms as above and its options. */ private final List orderByItems = new ArrayList<>(); + private final List orderByOptions = new ArrayList<>(); /** * Collect all query information in the parse tree excluding info in sub-query). - * @param query query spec node in parse tree + * + * @param query query spec node in parse tree */ public void collect(QuerySpecificationContext query, String queryString) { query.accept(new QuerySpecificationCollector(queryString)); } /** - * Replace unresolved expression if it's an alias or ordinal that represents - * an actual expression in SELECT list. + * Replace unresolved expression if it's an alias or ordinal that represents an actual expression + * in SELECT list. + * * @param expr item to be replaced * @return select item that the given expr represents */ @@ -118,8 +122,8 @@ private boolean isIntegerLiteral(UnresolvedExpression expr) { } if (((Literal) expr).getType() != DataType.INTEGER) { - throw new SemanticCheckException(StringUtils.format( - "Non-integer constant [%s] found in ordinal", expr)); + throw new SemanticCheckException( + StringUtils.format("Non-integer constant [%s] found in ordinal", expr)); } return true; } @@ -127,25 +131,26 @@ private boolean isIntegerLiteral(UnresolvedExpression expr) { private UnresolvedExpression getSelectItemByOrdinal(UnresolvedExpression expr) { int ordinal = (Integer) ((Literal) expr).getValue(); if (ordinal <= 0 || ordinal > selectItems.size()) { - throw new SemanticCheckException(StringUtils.format( - "Ordinal [%d] is out of bound of select item list", ordinal)); + throw new SemanticCheckException( + StringUtils.format("Ordinal [%d] is out of bound of select item list", ordinal)); } return selectItems.get(ordinal - 1); } /** * Check if an expression is a select alias. - * @param expr expression + * + * @param expr expression * @return true if it's an alias */ public boolean isSelectAlias(UnresolvedExpression expr) { - return (expr instanceof QualifiedName) - && (selectItemsByAlias.containsKey(expr.toString())); + return (expr instanceof QualifiedName) && (selectItemsByAlias.containsKey(expr.toString())); } /** * Get original expression aliased in SELECT clause. - * @param expr alias + * + * @param expr alias * @return expression in SELECT */ public UnresolvedExpression getSelectItemByAlias(UnresolvedExpression expr) { @@ -223,8 +228,7 @@ public Void visitAggregateFunctionCall(AggregateFunctionCallContext ctx) { @Override public Void visitFilteredAggregationFunctionCall(FilteredAggregationFunctionCallContext ctx) { UnresolvedExpression aggregateFunction = visitAstExpression(ctx); - aggregators.add( - AstDSL.alias(getTextInQuery(ctx, queryString), aggregateFunction)); + aggregators.add(AstDSL.alias(getTextInQuery(ctx, queryString), aggregateFunction)); return super.visitFilteredAggregationFunctionCall(ctx); } @@ -236,5 +240,4 @@ private UnresolvedExpression visitAstExpression(ParseTree tree) { return expressionBuilder.visit(tree); } } - } diff --git a/sql/src/test/java/org/opensearch/sql/common/antlr/SyntaxParserTestBase.java b/sql/src/test/java/org/opensearch/sql/common/antlr/SyntaxParserTestBase.java index 63d7666c62..87f2083774 100644 --- a/sql/src/test/java/org/opensearch/sql/common/antlr/SyntaxParserTestBase.java +++ b/sql/src/test/java/org/opensearch/sql/common/antlr/SyntaxParserTestBase.java @@ -7,16 +7,14 @@ import lombok.Getter; import lombok.RequiredArgsConstructor; -/** - * A base class for tests for SQL or PPL parser. - */ +/** A base class for tests for SQL or PPL parser. */ @RequiredArgsConstructor(access = AccessLevel.PROTECTED) public abstract class SyntaxParserTestBase { - @Getter - private final Parser parser; + @Getter private final Parser parser; /** * A helper function that fails a test if the parser rejects a given query. + * * @param query Query to test. */ protected void acceptQuery(String query) { @@ -25,6 +23,7 @@ protected void acceptQuery(String query) { /** * A helper function that fails a test if the parser accepts a given query. + * * @param query Query to test. */ protected void rejectQuery(String query) { diff --git a/sql/src/test/java/org/opensearch/sql/sql/SQLServiceTest.java b/sql/src/test/java/org/opensearch/sql/sql/SQLServiceTest.java index f4342d877d..8cb2994dc3 100644 --- a/sql/src/test/java/org/opensearch/sql/sql/SQLServiceTest.java +++ b/sql/src/test/java/org/opensearch/sql/sql/SQLServiceTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -45,14 +44,13 @@ class SQLServiceTest { private DefaultQueryManager queryManager; - @Mock - private QueryService queryService; + @Mock private QueryService queryService; @BeforeEach public void setUp() { queryManager = DefaultQueryManager.defaultQueryManager(); - sqlService = new SQLService(new SQLSyntaxParser(), queryManager, - new QueryPlanFactory(queryService)); + sqlService = + new SQLService(new SQLSyntaxParser(), queryManager, new QueryPlanFactory(queryService)); } @AfterEach @@ -97,8 +95,8 @@ public void onFailure(Exception e) { @Test public void can_execute_close_cursor_query() { sqlService.execute( - new SQLQueryRequest(new JSONObject(), null, QUERY + "/close", - Map.of("format", "jdbc"), "n:cursor"), + new SQLQueryRequest( + new JSONObject(), null, QUERY + "/close", Map.of("format", "jdbc"), "n:cursor"), new ResponseListener<>() { @Override public void onResponse(QueryResponse response) { @@ -131,13 +129,17 @@ public void onFailure(Exception e) { @Test public void can_explain_sql_query() { - doAnswer(invocation -> { - ResponseListener listener = invocation.getArgument(1); - listener.onResponse(new ExplainResponse(new ExplainResponseNode("Test"))); - return null; - }).when(queryService).explain(any(), any()); + doAnswer( + invocation -> { + ResponseListener listener = invocation.getArgument(1); + listener.onResponse(new ExplainResponse(new ExplainResponseNode("Test"))); + return null; + }) + .when(queryService) + .explain(any(), any()); - sqlService.explain(new SQLQueryRequest(new JSONObject(), "SELECT 123", EXPLAIN, "csv"), + sqlService.explain( + new SQLQueryRequest(new JSONObject(), "SELECT 123", EXPLAIN, "csv"), new ResponseListener() { @Override public void onResponse(ExplainResponse response) { @@ -153,8 +155,8 @@ public void onFailure(Exception e) { @Test public void cannot_explain_cursor_query() { - sqlService.explain(new SQLQueryRequest(new JSONObject(), null, EXPLAIN, - Map.of("format", "jdbc"), "n:cursor"), + sqlService.explain( + new SQLQueryRequest(new JSONObject(), null, EXPLAIN, Map.of("format", "jdbc"), "n:cursor"), new ResponseListener() { @Override public void onResponse(ExplainResponse response) { @@ -163,8 +165,10 @@ public void onResponse(ExplainResponse response) { @Override public void onFailure(Exception e) { - assertEquals("Explain of a paged query continuation is not supported." - + " Use `explain` for the initial query request.", e.getMessage()); + assertEquals( + "Explain of a paged query continuation is not supported." + + " Use `explain` for the initial query request.", + e.getMessage()); } }); } diff --git a/sql/src/test/java/org/opensearch/sql/sql/antlr/BracketedTimestampTest.java b/sql/src/test/java/org/opensearch/sql/sql/antlr/BracketedTimestampTest.java index 0f7a284aa7..120cd233fc 100644 --- a/sql/src/test/java/org/opensearch/sql/sql/antlr/BracketedTimestampTest.java +++ b/sql/src/test/java/org/opensearch/sql/sql/antlr/BracketedTimestampTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql.antlr; import org.junit.jupiter.api.Test; diff --git a/sql/src/test/java/org/opensearch/sql/sql/antlr/HighlightTest.java b/sql/src/test/java/org/opensearch/sql/sql/antlr/HighlightTest.java index 6826a37c0b..a0dbc2fc02 100644 --- a/sql/src/test/java/org/opensearch/sql/sql/antlr/HighlightTest.java +++ b/sql/src/test/java/org/opensearch/sql/sql/antlr/HighlightTest.java @@ -15,14 +15,14 @@ void single_field_test() { @Test void multiple_highlights_test() { - acceptQuery("SELECT HIGHLIGHT(Tags), HIGHLIGHT(Body) FROM Index " - + "WHERE MULTI_MATCH([Tags, Body], 'Time')"); + acceptQuery( + "SELECT HIGHLIGHT(Tags), HIGHLIGHT(Body) FROM Index " + + "WHERE MULTI_MATCH([Tags, Body], 'Time')"); } @Test void wildcard_test() { - acceptQuery("SELECT HIGHLIGHT('T*') FROM Index " - + "WHERE MULTI_MATCH([Tags, Body], 'Time')"); + acceptQuery("SELECT HIGHLIGHT('T*') FROM Index WHERE MULTI_MATCH([Tags, Body], 'Time')"); } @Test @@ -33,13 +33,12 @@ void highlight_all_test() { @Test void multiple_parameters_failure_test() { - rejectQuery("SELECT HIGHLIGHT(Tags1, Tags2) FROM Index " - + "WHERE MULTI_MATCH([Tags, Body], 'Time')"); + rejectQuery( + "SELECT HIGHLIGHT(Tags1, Tags2) FROM Index WHERE MULTI_MATCH([Tags, Body], 'Time')"); } @Test void no_parameters_failure_test() { - rejectQuery("SELECT HIGHLIGHT() FROM Index " - + "WHERE MULTI_MATCH([Tags, Body], 'Time')"); + rejectQuery("SELECT HIGHLIGHT() FROM Index WHERE MULTI_MATCH([Tags, Body], 'Time')"); } } diff --git a/sql/src/test/java/org/opensearch/sql/sql/antlr/MatchBoolPrefixParserTest.java b/sql/src/test/java/org/opensearch/sql/sql/antlr/MatchBoolPrefixParserTest.java index 66c4d5be9d..db5ce18edb 100644 --- a/sql/src/test/java/org/opensearch/sql/sql/antlr/MatchBoolPrefixParserTest.java +++ b/sql/src/test/java/org/opensearch/sql/sql/antlr/MatchBoolPrefixParserTest.java @@ -25,14 +25,13 @@ static Stream generateValidArguments() { new String("max_expansions=50"), new String("fuzzy_transpositions=true"), new String("fuzzy_rewrite=constant_score"), - new String("boost=1") - ); + new String("boost=1")); } @ParameterizedTest @MethodSource("generateValidArguments") public void testValidArguments(String arg) { - acceptQuery("SELECT * FROM T WHERE MATCH_BOOL_PREFIX(message, 'query', " + arg + ")"); + acceptQuery("SELECT * FROM T WHERE MATCH_BOOL_PREFIX(message, 'query', " + arg + ")"); } @Test diff --git a/sql/src/test/java/org/opensearch/sql/sql/antlr/SQLParserTest.java b/sql/src/test/java/org/opensearch/sql/sql/antlr/SQLParserTest.java index 3f323725ab..db091a4932 100644 --- a/sql/src/test/java/org/opensearch/sql/sql/antlr/SQLParserTest.java +++ b/sql/src/test/java/org/opensearch/sql/sql/antlr/SQLParserTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql.antlr; import org.opensearch.sql.common.antlr.SyntaxParserTestBase; diff --git a/sql/src/test/java/org/opensearch/sql/sql/antlr/SQLSyntaxParserTest.java b/sql/src/test/java/org/opensearch/sql/sql/antlr/SQLSyntaxParserTest.java index ade4983f58..f68c27deea 100644 --- a/sql/src/test/java/org/opensearch/sql/sql/antlr/SQLSyntaxParserTest.java +++ b/sql/src/test/java/org/opensearch/sql/sql/antlr/SQLSyntaxParserTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql.antlr; import static org.junit.jupiter.api.Assertions.assertAll; @@ -73,8 +72,7 @@ public void canParseHiddenIndexName() { @Test public void canNotParseIndexNameWithSpecialChar() { - assertThrows(SyntaxCheckException.class, - () -> parser.parse("SELECT * FROM hello+world")); + assertThrows(SyntaxCheckException.class, () -> parser.parse("SELECT * FROM hello+world")); } @Test @@ -84,14 +82,12 @@ public void canParseIndexNameWithSpecialCharQuoted() { @Test public void canNotParseIndexNameStartingWithNumber() { - assertThrows(SyntaxCheckException.class, - () -> parser.parse("SELECT * FROM 123test")); + assertThrows(SyntaxCheckException.class, () -> parser.parse("SELECT * FROM 123test")); } @Test public void canNotParseIndexNameSingleQuoted() { - assertThrows(SyntaxCheckException.class, - () -> parser.parse("SELECT * FROM 'test'")); + assertThrows(SyntaxCheckException.class, () -> parser.parse("SELECT * FROM 'test'")); } @Test @@ -101,14 +97,15 @@ public void canParseWhereClause() { @Test public void canParseSelectClauseWithLogicalOperator() { - assertNotNull(parser.parse( - "SELECT age = 10 AND name = 'John' OR NOT (balance > 1000) FROM test")); + assertNotNull( + parser.parse("SELECT age = 10 AND name = 'John' OR NOT (balance > 1000) FROM test")); } @Test public void canParseWhereClauseWithLogicalOperator() { - assertNotNull(parser.parse("SELECT name FROM test " - + "WHERE age = 10 AND name = 'John' OR NOT (balance > 1000)")); + assertNotNull( + parser.parse( + "SELECT name FROM test WHERE age = 10 AND name = 'John' OR NOT (balance > 1000)")); } @Test @@ -128,9 +125,11 @@ public void canParseDistinctClause() { @Test public void canParseCaseStatement() { assertNotNull(parser.parse("SELECT CASE WHEN age > 30 THEN 'age1' ELSE 'age2' END FROM test")); - assertNotNull(parser.parse("SELECT CASE WHEN age > 30 THEN 'age1' " - + " WHEN age < 50 THEN 'age2' " - + " ELSE 'age3' END FROM test")); + assertNotNull( + parser.parse( + "SELECT CASE WHEN age > 30 THEN 'age1' " + + " WHEN age < 50 THEN 'age2' " + + " ELSE 'age3' END FROM test")); assertNotNull(parser.parse("SELECT CASE age WHEN 30 THEN 'age1' ELSE 'age2' END FROM test")); assertNotNull(parser.parse("SELECT CASE age WHEN 30 THEN 'age1' END FROM test")); } @@ -147,10 +146,11 @@ public void canNotParseAggregateFunctionWithWrongArgument() { public void canParseOrderByClause() { assertNotNull(parser.parse("SELECT name, age FROM test ORDER BY name, age")); assertNotNull(parser.parse("SELECT name, age FROM test ORDER BY name ASC, age DESC")); - assertNotNull(parser.parse( - "SELECT name, age FROM test ORDER BY name NULLS LAST, age NULLS FIRST")); - assertNotNull(parser.parse( - "SELECT name, age FROM test ORDER BY name ASC NULLS FIRST, age DESC NULLS LAST")); + assertNotNull( + parser.parse("SELECT name, age FROM test ORDER BY name NULLS LAST, age NULLS FIRST")); + assertNotNull( + parser.parse( + "SELECT name, age FROM test ORDER BY name ASC NULLS FIRST, age DESC NULLS LAST")); } @Test @@ -171,8 +171,7 @@ private static Stream nowLikeFunctionsData() { Arguments.of("current_date", false, true), Arguments.of("utc_date", false, true), Arguments.of("utc_time", false, true), - Arguments.of("utc_timestamp", false, true) - ); + Arguments.of("utc_timestamp", false, true)); } private static Stream getPartForExtractFunction() { @@ -196,8 +195,7 @@ private static Stream getPartForExtractFunction() { Arguments.of("DAY_SECOND"), Arguments.of("DAY_MINUTE"), Arguments.of("DAY_HOUR"), - Arguments.of("YEAR_MONTH") - ); + Arguments.of("YEAR_MONTH")); } @ParameterizedTest(name = "{0}") @@ -207,11 +205,7 @@ public void can_parse_extract_function(String part) { } private static Stream getInvalidPartForExtractFunction() { - return Stream.of( - Arguments.of("INVALID"), - Arguments.of("\"SECOND\""), - Arguments.of("123") - ); + return Stream.of(Arguments.of("INVALID"), Arguments.of("\"SECOND\""), Arguments.of("123")); } @ParameterizedTest(name = "{0}") @@ -231,9 +225,12 @@ public void can_parse_weekday_function() { @ParameterizedTest(name = "{0}") @MethodSource("nowLikeFunctionsData") public void can_parse_now_like_functions(String name, Boolean hasFsp, Boolean hasShortcut) { - var calls = new ArrayList() {{ - add(name + "()"); - }}; + var calls = + new ArrayList() { + { + add(name + "()"); + } + }; if (hasShortcut) { calls.add(name); } @@ -270,8 +267,7 @@ public void can_parse_get_format_function(String type, String format) { @Test public void cannot_parse_get_format_function_with_bad_arg() { assertThrows( - SyntaxCheckException.class, - () -> parser.parse("GET_FORMAT(NONSENSE_ARG,'INTERNAL')")); + SyntaxCheckException.class, () -> parser.parse("GET_FORMAT(NONSENSE_ARG,'INTERNAL')")); } @Test @@ -326,53 +322,55 @@ public void can_parse_month_of_year_function() { assertNotNull(parser.parse("SELECT month(timestamp('2022-11-18 00:00:00'))")); assertNotNull(parser.parse("SELECT month_of_year(timestamp('2022-11-18 00:00:00'))")); - } @Test public void can_parse_multi_match_relevance_function() { - assertNotNull(parser.parse( - "SELECT id FROM test WHERE multimatch(\"fields\"=\"field\", query=\"query\")")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE multimatchquery(fields=\"field\", \"query\"=\"query\")")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE multi_match(\"fields\"=\"field\", \"query\"=\"query\")")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE multi_match(\'fields\'=\'field\', \'query\'=\'query\')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE multi_match(fields=\'field\', query=\'query\')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE multi_match(['address'], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE multi_match(['address', 'notes'], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE multi_match([\"*\"], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE multi_match([\"address\"], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE multi_match([`address`], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE multi_match([address], 'query')")); - - assertNotNull(parser.parse( - "SELECT id FROM test WHERE" - + " multi_match(['address' ^ 1.0, 'notes' ^ 2.2], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE multi_match(['address' ^ 1.1, 'notes'], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE multi_match(['address', 'notes' ^ 1.5], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE multi_match(['address', 'notes' 3], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE multi_match(['address' ^ .3, 'notes' 3], 'query')")); - - assertNotNull(parser.parse( - "SELECT id FROM test WHERE" - + " multi_match([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE" - + " multi_match([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query', analyzer=keyword," - + "operator='AND', tie_breaker=0.3, type = \"most_fields\", fuzziness = \"AUTO\")")); + assertNotNull( + parser.parse( + "SELECT id FROM test WHERE multimatch(\"fields\"=\"field\", query=\"query\")")); + assertNotNull( + parser.parse( + "SELECT id FROM test WHERE multimatchquery(fields=\"field\", \"query\"=\"query\")")); + assertNotNull( + parser.parse( + "SELECT id FROM test WHERE multi_match(\"fields\"=\"field\", \"query\"=\"query\")")); + assertNotNull( + parser.parse( + "SELECT id FROM test WHERE multi_match(\'fields\'=\'field\', \'query\'=\'query\')")); + assertNotNull( + parser.parse("SELECT id FROM test WHERE multi_match(fields=\'field\', query=\'query\')")); + assertNotNull(parser.parse("SELECT id FROM test WHERE multi_match(['address'], 'query')")); + assertNotNull( + parser.parse("SELECT id FROM test WHERE multi_match(['address', 'notes'], 'query')")); + assertNotNull(parser.parse("SELECT id FROM test WHERE multi_match([\"*\"], 'query')")); + assertNotNull(parser.parse("SELECT id FROM test WHERE multi_match([\"address\"], 'query')")); + assertNotNull(parser.parse("SELECT id FROM test WHERE multi_match([`address`], 'query')")); + assertNotNull(parser.parse("SELECT id FROM test WHERE multi_match([address], 'query')")); + + assertNotNull( + parser.parse( + "SELECT id FROM test WHERE" + + " multi_match(['address' ^ 1.0, 'notes' ^ 2.2], 'query')")); + assertNotNull( + parser.parse("SELECT id FROM test WHERE multi_match(['address' ^ 1.1, 'notes'], 'query')")); + assertNotNull( + parser.parse("SELECT id FROM test WHERE multi_match(['address', 'notes' ^ 1.5], 'query')")); + assertNotNull( + parser.parse("SELECT id FROM test WHERE multi_match(['address', 'notes' 3], 'query')")); + assertNotNull( + parser.parse( + "SELECT id FROM test WHERE multi_match(['address' ^ .3, 'notes' 3], 'query')")); + + assertNotNull( + parser.parse( + "SELECT id FROM test WHERE" + + " multi_match([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query')")); + assertNotNull( + parser.parse( + "SELECT id FROM test WHERE multi_match([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query'," + + " analyzer=keyword,operator='AND', tie_breaker=0.3, type = \"most_fields\"," + + " fuzziness = \"AUTO\")")); } @Test @@ -385,160 +383,137 @@ public void can_parse_second_functions() { @Test public void can_parse_simple_query_string_relevance_function() { - assertNotNull(parser.parse( - "SELECT id FROM test WHERE simple_query_string(['address'], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE simple_query_string(['address', 'notes'], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE simple_query_string([\"*\"], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE simple_query_string([\"address\"], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE simple_query_string([`address`], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE simple_query_string([address], 'query')")); - - assertNotNull(parser.parse( - "SELECT id FROM test WHERE" - + " simple_query_string(['address' ^ 1.0, 'notes' ^ 2.2], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE simple_query_string(['address' ^ 1.1, 'notes'], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE simple_query_string(['address', 'notes' ^ 1.5], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE simple_query_string(['address', 'notes' 3], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE simple_query_string(['address' ^ .3, 'notes' 3], 'query')")); - - assertNotNull(parser.parse( - "SELECT id FROM test WHERE" - + " simple_query_string([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE" - + " simple_query_string([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query', analyzer=keyword," - + "flags='AND', quote_field_suffix=\".exact\", fuzzy_prefix_length = 4)")); + assertNotNull( + parser.parse("SELECT id FROM test WHERE simple_query_string(['address'], 'query')")); + assertNotNull( + parser.parse( + "SELECT id FROM test WHERE simple_query_string(['address', 'notes'], 'query')")); + assertNotNull(parser.parse("SELECT id FROM test WHERE simple_query_string([\"*\"], 'query')")); + assertNotNull( + parser.parse("SELECT id FROM test WHERE simple_query_string([\"address\"], 'query')")); + assertNotNull( + parser.parse("SELECT id FROM test WHERE simple_query_string([`address`], 'query')")); + assertNotNull( + parser.parse("SELECT id FROM test WHERE simple_query_string([address], 'query')")); + + assertNotNull( + parser.parse( + "SELECT id FROM test WHERE" + + " simple_query_string(['address' ^ 1.0, 'notes' ^ 2.2], 'query')")); + assertNotNull( + parser.parse( + "SELECT id FROM test WHERE simple_query_string(['address' ^ 1.1, 'notes'], 'query')")); + assertNotNull( + parser.parse( + "SELECT id FROM test WHERE simple_query_string(['address', 'notes' ^ 1.5], 'query')")); + assertNotNull( + parser.parse( + "SELECT id FROM test WHERE simple_query_string(['address', 'notes' 3], 'query')")); + assertNotNull( + parser.parse( + "SELECT id FROM test WHERE simple_query_string(['address' ^ .3, 'notes' 3], 'query')")); + + assertNotNull( + parser.parse( + "SELECT id FROM test WHERE" + + " simple_query_string([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query')")); + assertNotNull( + parser.parse( + "SELECT id FROM test WHERE simple_query_string([\"Tags\" ^ 1.5, Title, `Body` 4.2]," + + " 'query', analyzer=keyword,flags='AND', quote_field_suffix=\".exact\"," + + " fuzzy_prefix_length = 4)")); } @Test public void can_parse_str_to_date() { - assertNotNull(parser.parse( - "SELECT STR_TO_DATE('01,5,2013','%d,%m,%Y')" - )); + assertNotNull(parser.parse("SELECT STR_TO_DATE('01,5,2013','%d,%m,%Y')")); - assertNotNull(parser.parse( - "SELECT STR_TO_DATE('a09:30:17','a%h:%i:%s')" - )); + assertNotNull(parser.parse("SELECT STR_TO_DATE('a09:30:17','a%h:%i:%s')")); - assertNotNull(parser.parse( - "SELECT STR_TO_DATE('abc','abc');" - )); + assertNotNull(parser.parse("SELECT STR_TO_DATE('abc','abc');")); } @Test public void can_parse_query_string_relevance_function() { - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query_string(['*'], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query_string(['address'], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query_string(['add*'], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query_string(['*ess'], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query_string(['address', 'notes'], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query_string([\"*\"], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query_string([\"address\"], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query_string([\"ad*\"], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query_string([\"*s\"], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query_string([\"address\", \"notes\"], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query_string([`*`], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query_string([`address`], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query_string([`ad*`], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query_string([`*ss`], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query_string([`address`, `notes`], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query_string([address], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query_string([addr*], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query_string([*ss], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query_string([address, notes], 'query')")); - - assertNotNull(parser.parse( - "SELECT id FROM test WHERE" - + " query_string(['address' ^ 1.0, 'notes' ^ 2.2], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query_string(['address' ^ 1.1, 'notes'], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query_string(['address', 'notes' ^ 1.5], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query_string(['address', 'notes' 3], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query_string(['address' ^ .3, 'notes' 3], 'query')")); - - assertNotNull(parser.parse( - "SELECT id FROM test WHERE" - + " query_string([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE" - + " query_string([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query', analyzer=keyword," - + "operator='AND', tie_breaker=0.3, type = \"most_fields\", fuzziness = 4)")); - } + assertNotNull(parser.parse("SELECT id FROM test WHERE query_string(['*'], 'query')")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query_string(['address'], 'query')")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query_string(['add*'], 'query')")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query_string(['*ess'], 'query')")); + assertNotNull( + parser.parse("SELECT id FROM test WHERE query_string(['address', 'notes'], 'query')")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query_string([\"*\"], 'query')")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query_string([\"address\"], 'query')")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query_string([\"ad*\"], 'query')")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query_string([\"*s\"], 'query')")); + assertNotNull( + parser.parse("SELECT id FROM test WHERE query_string([\"address\", \"notes\"], 'query')")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query_string([`*`], 'query')")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query_string([`address`], 'query')")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query_string([`ad*`], 'query')")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query_string([`*ss`], 'query')")); + assertNotNull( + parser.parse("SELECT id FROM test WHERE query_string([`address`, `notes`], 'query')")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query_string([address], 'query')")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query_string([addr*], 'query')")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query_string([*ss], 'query')")); + assertNotNull( + parser.parse("SELECT id FROM test WHERE query_string([address, notes], 'query')")); + assertNotNull( + parser.parse( + "SELECT id FROM test WHERE" + + " query_string(['address' ^ 1.0, 'notes' ^ 2.2], 'query')")); + assertNotNull( + parser.parse( + "SELECT id FROM test WHERE query_string(['address' ^ 1.1, 'notes'], 'query')")); + assertNotNull( + parser.parse( + "SELECT id FROM test WHERE query_string(['address', 'notes' ^ 1.5], 'query')")); + assertNotNull( + parser.parse("SELECT id FROM test WHERE query_string(['address', 'notes' 3], 'query')")); + assertNotNull( + parser.parse( + "SELECT id FROM test WHERE query_string(['address' ^ .3, 'notes' 3], 'query')")); + + assertNotNull( + parser.parse( + "SELECT id FROM test WHERE" + + " query_string([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query')")); + assertNotNull( + parser.parse( + "SELECT id FROM test WHERE" + + " query_string([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query', analyzer=keyword," + + "operator='AND', tie_breaker=0.3, type = \"most_fields\", fuzziness = 4)")); + } @Test public void can_parse_query_relevance_function() { - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query('address:query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query('address:query OR notes:query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query(\"address:query\")")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query(\"address:query OR notes:query\")")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query(`address:query`)")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query(`address:query OR notes:query`)")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query('*:query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query(\"*:query\")")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query(`*:query`)")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query('address:*uery OR notes:?uery')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query(\"address:*uery OR notes:?uery\")")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query(`address:*uery OR notes:?uery`)")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query('address:qu*ry OR notes:qu?ry')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query(\"address:qu*ry OR notes:qu?ry\")")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query(`address:qu*ry OR notes:qu?ry`)")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query('address:query notes:query')")); - assertNotNull(parser.parse( - "SELECT id FROM test WHERE query(\"address:query notes:query\")")); - assertNotNull(parser.parse( + assertNotNull(parser.parse("SELECT id FROM test WHERE query('address:query')")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query('address:query OR notes:query')")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query(\"address:query\")")); + assertNotNull( + parser.parse("SELECT id FROM test WHERE query(\"address:query OR notes:query\")")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query(`address:query`)")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query(`address:query OR notes:query`)")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query('*:query')")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query(\"*:query\")")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query(`*:query`)")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query('address:*uery OR notes:?uery')")); + assertNotNull( + parser.parse("SELECT id FROM test WHERE query(\"address:*uery OR notes:?uery\")")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query(`address:*uery OR notes:?uery`)")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query('address:qu*ry OR notes:qu?ry')")); + assertNotNull( + parser.parse("SELECT id FROM test WHERE query(\"address:qu*ry OR notes:qu?ry\")")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query(`address:qu*ry OR notes:qu?ry`)")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query('address:query notes:query')")); + assertNotNull(parser.parse("SELECT id FROM test WHERE query(\"address:query notes:query\")")); + assertNotNull( + parser.parse( "SELECT id FROM test WHERE " - + "query(\"Body:\'taste beer\' Tags:\'taste beer\' Title:\'taste beer\'\")")); + + "query(\"Body:\'taste beer\' Tags:\'taste beer\' Title:\'taste beer\'\")")); } - @Test public void can_parse_match_relevance_function() { assertNotNull(parser.parse("SELECT * FROM test WHERE match(column, \"this is a test\")")); @@ -552,19 +527,18 @@ public void can_parse_match_relevance_function() { public void can_parse_matchquery_relevance_function() { assertNotNull(parser.parse("SELECT * FROM test WHERE matchquery(column, \"this is a test\")")); assertNotNull(parser.parse("SELECT * FROM test WHERE matchquery(column, 'this is a test')")); - assertNotNull(parser.parse( - "SELECT * FROM test WHERE matchquery(`column`, \"this is a test\")")); + assertNotNull( + parser.parse("SELECT * FROM test WHERE matchquery(`column`, \"this is a test\")")); assertNotNull(parser.parse("SELECT * FROM test WHERE matchquery(`column`, 'this is a test')")); assertNotNull(parser.parse("SELECT * FROM test WHERE matchquery(column, 100500)")); } @Test public void can_parse_match_query_relevance_function() { - assertNotNull(parser.parse( - "SELECT * FROM test WHERE match_query(column, \"this is a test\")")); + assertNotNull(parser.parse("SELECT * FROM test WHERE match_query(column, \"this is a test\")")); assertNotNull(parser.parse("SELECT * FROM test WHERE match_query(column, 'this is a test')")); - assertNotNull(parser.parse( - "SELECT * FROM test WHERE match_query(`column`, \"this is a test\")")); + assertNotNull( + parser.parse("SELECT * FROM test WHERE match_query(`column`, \"this is a test\")")); assertNotNull(parser.parse("SELECT * FROM test WHERE match_query(`column`, 'this is a test')")); assertNotNull(parser.parse("SELECT * FROM test WHERE match_query(column, 100500)")); } @@ -572,21 +546,21 @@ public void can_parse_match_query_relevance_function() { @Test public void can_parse_match_phrase_relevance_function() { assertNotNull( - parser.parse("SELECT * FROM test WHERE match_phrase(column, \"this is a test\")")); + parser.parse("SELECT * FROM test WHERE match_phrase(column, \"this is a test\")")); assertNotNull(parser.parse("SELECT * FROM test WHERE match_phrase(column, 'this is a test')")); assertNotNull( - parser.parse("SELECT * FROM test WHERE match_phrase(`column`, \"this is a test\")")); + parser.parse("SELECT * FROM test WHERE match_phrase(`column`, \"this is a test\")")); assertNotNull( - parser.parse("SELECT * FROM test WHERE match_phrase(`column`, 'this is a test')")); + parser.parse("SELECT * FROM test WHERE match_phrase(`column`, 'this is a test')")); assertNotNull(parser.parse("SELECT * FROM test WHERE match_phrase(column, 100500)")); } @Test public void can_parse_minute_of_day_function() { assertNotNull(parser.parse("SELECT minute_of_day(\"12:23:34\");")); - assertNotNull(parser.parse("SELECT minute_of_day('12:23:34');"));; - assertNotNull(parser.parse("SELECT minute_of_day(\"2022-12-14 12:23:34\");"));; - assertNotNull(parser.parse("SELECT minute_of_day('2022-12-14 12:23:34');"));; + assertNotNull(parser.parse("SELECT minute_of_day('12:23:34');")); + assertNotNull(parser.parse("SELECT minute_of_day(\"2022-12-14 12:23:34\");")); + assertNotNull(parser.parse("SELECT minute_of_day('2022-12-14 12:23:34');")); } @Test @@ -631,35 +605,20 @@ public void can_parse_wildcard_query_relevance_function() { assertNotNull( parser.parse("SELECT * FROM test WHERE wildcard_query(`column`, 'this is a test*')")); assertNotNull( - parser.parse("SELECT * FROM test WHERE wildcard_query(`column`, 'this is a test*', " - + "boost=1.5, case_insensitive=true, rewrite=\"scoring_boolean\")")); + parser.parse( + "SELECT * FROM test WHERE wildcard_query(`column`, 'this is a test*', " + + "boost=1.5, case_insensitive=true, rewrite=\"scoring_boolean\")")); } @Test public void can_parse_nested_function() { - assertNotNull( - parser.parse("SELECT NESTED(PATH.INNER_FIELD) FROM TEST")); - assertNotNull( - parser.parse("SELECT NESTED('PATH.INNER_FIELD') FROM TEST")); - assertNotNull( - parser.parse("SELECT SUM(NESTED(PATH.INNER_FIELD)) FROM TEST")); - assertNotNull( - parser.parse("SELECT NESTED(PATH.INNER_FIELD, PATH) FROM TEST")); - assertNotNull( - parser.parse( - "SELECT * FROM TEST WHERE NESTED(PATH.INNER_FIELDS) = 'A'" - ) - ); - assertNotNull( - parser.parse( - "SELECT * FROM TEST WHERE NESTED(PATH.INNER_FIELDS, PATH) = 'A'" - ) - ); - assertNotNull( - parser.parse( - "SELECT FIELD FROM TEST ORDER BY nested(PATH.INNER_FIELD, PATH)" - ) - ); + assertNotNull(parser.parse("SELECT NESTED(PATH.INNER_FIELD) FROM TEST")); + assertNotNull(parser.parse("SELECT NESTED('PATH.INNER_FIELD') FROM TEST")); + assertNotNull(parser.parse("SELECT SUM(NESTED(PATH.INNER_FIELD)) FROM TEST")); + assertNotNull(parser.parse("SELECT NESTED(PATH.INNER_FIELD, PATH) FROM TEST")); + assertNotNull(parser.parse("SELECT * FROM TEST WHERE NESTED(PATH.INNER_FIELDS) = 'A'")); + assertNotNull(parser.parse("SELECT * FROM TEST WHERE NESTED(PATH.INNER_FIELDS, PATH) = 'A'")); + assertNotNull(parser.parse("SELECT FIELD FROM TEST ORDER BY nested(PATH.INNER_FIELD, PATH)")); } @Test @@ -671,68 +630,69 @@ public void can_parse_yearweek_function() { @Test public void describe_request_accepts_only_quoted_string_literals() { assertAll( - () -> assertThrows(SyntaxCheckException.class, - () -> parser.parse("DESCRIBE TABLES LIKE bank")), - () -> assertThrows(SyntaxCheckException.class, - () -> parser.parse("DESCRIBE TABLES LIKE %bank%")), - () -> assertThrows(SyntaxCheckException.class, - () -> parser.parse("DESCRIBE TABLES LIKE `bank`")), - () -> assertThrows(SyntaxCheckException.class, - () -> parser.parse("DESCRIBE TABLES LIKE %bank% COLUMNS LIKE %status%")), - () -> assertThrows(SyntaxCheckException.class, - () -> parser.parse("DESCRIBE TABLES LIKE 'bank' COLUMNS LIKE status")), + () -> + assertThrows( + SyntaxCheckException.class, () -> parser.parse("DESCRIBE TABLES LIKE bank")), + () -> + assertThrows( + SyntaxCheckException.class, () -> parser.parse("DESCRIBE TABLES LIKE %bank%")), + () -> + assertThrows( + SyntaxCheckException.class, () -> parser.parse("DESCRIBE TABLES LIKE `bank`")), + () -> + assertThrows( + SyntaxCheckException.class, + () -> parser.parse("DESCRIBE TABLES LIKE %bank% COLUMNS LIKE %status%")), + () -> + assertThrows( + SyntaxCheckException.class, + () -> parser.parse("DESCRIBE TABLES LIKE 'bank' COLUMNS LIKE status")), () -> assertNotNull(parser.parse("DESCRIBE TABLES LIKE 'bank' COLUMNS LIKE \"status\"")), - () -> assertNotNull(parser.parse("DESCRIBE TABLES LIKE \"bank\" COLUMNS LIKE 'status'")) - ); + () -> assertNotNull(parser.parse("DESCRIBE TABLES LIKE \"bank\" COLUMNS LIKE 'status'"))); } @Test public void show_request_accepts_only_quoted_string_literals() { assertAll( - () -> assertThrows(SyntaxCheckException.class, - () -> parser.parse("SHOW TABLES LIKE bank")), - () -> assertThrows(SyntaxCheckException.class, - () -> parser.parse("SHOW TABLES LIKE %bank%")), - () -> assertThrows(SyntaxCheckException.class, - () -> parser.parse("SHOW TABLES LIKE `bank`")), + () -> assertThrows(SyntaxCheckException.class, () -> parser.parse("SHOW TABLES LIKE bank")), + () -> + assertThrows(SyntaxCheckException.class, () -> parser.parse("SHOW TABLES LIKE %bank%")), + () -> + assertThrows(SyntaxCheckException.class, () -> parser.parse("SHOW TABLES LIKE `bank`")), () -> assertNotNull(parser.parse("SHOW TABLES LIKE 'bank'")), - () -> assertNotNull(parser.parse("SHOW TABLES LIKE \"bank\"")) - ); + () -> assertNotNull(parser.parse("SHOW TABLES LIKE \"bank\""))); } @ParameterizedTest @MethodSource({ - "matchPhraseComplexQueries", - "matchPhraseGeneratedQueries", - "generateMatchPhraseQueries", - "matchPhraseQueryComplexQueries" + "matchPhraseComplexQueries", + "matchPhraseGeneratedQueries", + "generateMatchPhraseQueries", + "matchPhraseQueryComplexQueries" }) public void canParseComplexMatchPhraseArgsTest(String query) { assertNotNull(parser.parse(query)); } @ParameterizedTest - @MethodSource({ - "generateMatchPhrasePrefixQueries" - }) + @MethodSource({"generateMatchPhrasePrefixQueries"}) public void canParseComplexMatchPhrasePrefixQueries(String query) { assertNotNull(parser.parse(query)); } private static Stream matchPhraseComplexQueries() { return Stream.of( - "SELECT * FROM t WHERE match_phrase(c, 3)", - "SELECT * FROM t WHERE match_phrase(c, 3, fuzziness=AUTO)", - "SELECT * FROM t WHERE match_phrase(c, 3, zero_terms_query=\"all\")", - "SELECT * FROM t WHERE match_phrase(c, 3, lenient=true)", - "SELECT * FROM t WHERE match_phrase(c, 3, lenient='true')", - "SELECT * FROM t WHERE match_phrase(c, 3, operator=xor)", - "SELECT * FROM t WHERE match_phrase(c, 3, cutoff_frequency=0.04)", - "SELECT * FROM t WHERE match_phrase(c, 3, cutoff_frequency=0.04, analyzer = english, " - + "prefix_length=34, fuzziness='auto', minimum_should_match='2<-25% 9<-3')", - "SELECT * FROM t WHERE match_phrase(c, 3, minimum_should_match='2<-25% 9<-3')", - "SELECT * FROM t WHERE match_phrase(c, 3, operator='AUTO')" - ); + "SELECT * FROM t WHERE match_phrase(c, 3)", + "SELECT * FROM t WHERE match_phrase(c, 3, fuzziness=AUTO)", + "SELECT * FROM t WHERE match_phrase(c, 3, zero_terms_query=\"all\")", + "SELECT * FROM t WHERE match_phrase(c, 3, lenient=true)", + "SELECT * FROM t WHERE match_phrase(c, 3, lenient='true')", + "SELECT * FROM t WHERE match_phrase(c, 3, operator=xor)", + "SELECT * FROM t WHERE match_phrase(c, 3, cutoff_frequency=0.04)", + "SELECT * FROM t WHERE match_phrase(c, 3, cutoff_frequency=0.04, analyzer = english, " + + "prefix_length=34, fuzziness='auto', minimum_should_match='2<-25% 9<-3')", + "SELECT * FROM t WHERE match_phrase(c, 3, minimum_should_match='2<-25% 9<-3')", + "SELECT * FROM t WHERE match_phrase(c, 3, operator='AUTO')"); } @Test @@ -771,50 +731,51 @@ private static Stream matchPhraseQueryComplexQueries() { "SELECT * FROM t WHERE matchphrasequery(c, 3, cutoff_frequency=0.04, analyzer = english, " + "prefix_length=34, fuzziness='auto', minimum_should_match='2<-25% 9<-3')", "SELECT * FROM t WHERE matchphrasequery(c, 3, minimum_should_match='2<-25% 9<-3')", - "SELECT * FROM t WHERE matchphrasequery(c, 3, operator='AUTO')" - ); + "SELECT * FROM t WHERE matchphrasequery(c, 3, operator='AUTO')"); } private static Stream matchPhraseGeneratedQueries() { var matchArgs = new HashMap(); - matchArgs.put("fuzziness", new String[]{ "AUTO", "AUTO:1,5", "1" }); - matchArgs.put("fuzzy_transpositions", new Boolean[]{ true, false }); - matchArgs.put("operator", new String[]{ "and", "or" }); - matchArgs.put("minimum_should_match", - new String[]{ "3", "-2", "75%", "-25%", "3<90%", "2<-25% 9<-3" }); - matchArgs.put("analyzer", new String[]{ "standard", "stop", "english" }); - matchArgs.put("zero_terms_query", new String[]{ "none", "all" }); - matchArgs.put("lenient", new Boolean[]{ true, false }); + matchArgs.put("fuzziness", new String[] {"AUTO", "AUTO:1,5", "1"}); + matchArgs.put("fuzzy_transpositions", new Boolean[] {true, false}); + matchArgs.put("operator", new String[] {"and", "or"}); + matchArgs.put( + "minimum_should_match", new String[] {"3", "-2", "75%", "-25%", "3<90%", "2<-25% 9<-3"}); + matchArgs.put("analyzer", new String[] {"standard", "stop", "english"}); + matchArgs.put("zero_terms_query", new String[] {"none", "all"}); + matchArgs.put("lenient", new Boolean[] {true, false}); // deprecated - matchArgs.put("cutoff_frequency", new Double[]{ .0, 0.001, 1., 42. }); - matchArgs.put("prefix_length", new Integer[]{ 0, 2, 5 }); - matchArgs.put("max_expansions", new Integer[]{ 0, 5, 20 }); - matchArgs.put("boost", new Double[]{ .5, 1., 2.3 }); + matchArgs.put("cutoff_frequency", new Double[] {.0, 0.001, 1., 42.}); + matchArgs.put("prefix_length", new Integer[] {0, 2, 5}); + matchArgs.put("max_expansions", new Integer[] {0, 5, 20}); + matchArgs.put("boost", new Double[] {.5, 1., 2.3}); return generateQueries("match", matchArgs); } private static Stream generateMatchPhraseQueries() { var matchPhraseArgs = new HashMap(); - matchPhraseArgs.put("analyzer", new String[]{ "standard", "stop", "english" }); - matchPhraseArgs.put("max_expansions", new Integer[]{ 0, 5, 20 }); - matchPhraseArgs.put("slop", new Integer[]{ 0, 1, 2 }); + matchPhraseArgs.put("analyzer", new String[] {"standard", "stop", "english"}); + matchPhraseArgs.put("max_expansions", new Integer[] {0, 5, 20}); + matchPhraseArgs.put("slop", new Integer[] {0, 1, 2}); return generateQueries("match_phrase", matchPhraseArgs); } private static Stream generateMatchPhrasePrefixQueries() { - return generateQueries("match_phrase_prefix", ImmutableMap.builder() - .put("analyzer", new String[] {"standard", "stop", "english"}) - .put("slop", new Integer[] {0, 1, 2}) - .put("max_expansions", new Integer[] {0, 3, 10}) - .put("zero_terms_query", new String[] {"NONE", "ALL", "NULL"}) - .put("boost", new Float[] {-0.5f, 1.0f, 1.2f}) - .build()); - } - - private static Stream generateQueries(String function, - Map functionArgs) { + return generateQueries( + "match_phrase_prefix", + ImmutableMap.builder() + .put("analyzer", new String[] {"standard", "stop", "english"}) + .put("slop", new Integer[] {0, 1, 2}) + .put("max_expansions", new Integer[] {0, 3, 10}) + .put("zero_terms_query", new String[] {"NONE", "ALL", "NULL"}) + .put("boost", new Float[] {-0.5f, 1.0f, 1.2f}) + .build()); + } + + private static Stream generateQueries( + String function, Map functionArgs) { var rand = new Random(0); class QueryGenerator implements Iterator { @@ -822,7 +783,7 @@ class QueryGenerator implements Iterator { private int currentQuery = 0; private String randomIdentifier() { - return RandomStringUtils.random(10, 0, 0,true, false, null, rand); + return RandomStringUtils.random(10, 0, 0, true, false, null, rand); } @Override @@ -836,16 +797,17 @@ public String next() { currentQuery += 1; StringBuilder query = new StringBuilder(); - query.append(String.format("SELECT * FROM test WHERE %s(%s, %s", function, - randomIdentifier(), - randomIdentifier())); + query.append( + String.format( + "SELECT * FROM test WHERE %s(%s, %s", + function, randomIdentifier(), randomIdentifier())); var args = new ArrayList(); for (var pair : functionArgs.entrySet()) { if (rand.nextBoolean()) { var arg = new StringBuilder(); arg.append(rand.nextBoolean() ? "," : ", "); - arg.append(rand.nextBoolean() ? pair.getKey().toLowerCase() - : pair.getKey().toUpperCase()); + arg.append( + rand.nextBoolean() ? pair.getKey().toLowerCase() : pair.getKey().toUpperCase()); arg.append(rand.nextBoolean() ? "=" : " = "); if (pair.getValue() instanceof String[] || rand.nextBoolean()) { var quoteSymbol = rand.nextBoolean() ? '\'' : '"'; diff --git a/sql/src/test/java/org/opensearch/sql/sql/domain/SQLQueryRequestTest.java b/sql/src/test/java/org/opensearch/sql/sql/domain/SQLQueryRequestTest.java index 1ffa4f0fa8..2b64b13b35 100644 --- a/sql/src/test/java/org/opensearch/sql/sql/domain/SQLQueryRequestTest.java +++ b/sql/src/test/java/org/opensearch/sql/sql/domain/SQLQueryRequestTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql.domain; import static org.junit.jupiter.api.Assertions.assertAll; @@ -32,21 +31,15 @@ public void should_support_query() { @Test public void should_support_query_with_JDBC_format() { - SQLQueryRequest request = SQLQueryRequestBuilder.request("SELECT 1") - .format("jdbc") - .build(); + SQLQueryRequest request = SQLQueryRequestBuilder.request("SELECT 1").format("jdbc").build(); assertAll( - () -> assertTrue(request.isSupported()), - () -> assertEquals(request.format(), Format.JDBC) - ); + () -> assertTrue(request.isSupported()), () -> assertEquals(request.format(), Format.JDBC)); } @Test public void should_support_query_with_query_field_only() { SQLQueryRequest request = - SQLQueryRequestBuilder.request("SELECT 1") - .jsonContent("{\"query\": \"SELECT 1\"}") - .build(); + SQLQueryRequestBuilder.request("SELECT 1").jsonContent("{\"query\": \"SELECT 1\"}").build(); assertTrue(request.isSupported()); } @@ -57,21 +50,16 @@ public void should_support_query_with_parameters() { .jsonContent("{\"query\": \"SELECT 1\", \"parameters\":[]}") .build(); SQLQueryRequest requestWithParams = - SQLQueryRequestBuilder.request("SELECT 1") - .params(Map.of("one", "two")) - .build(); + SQLQueryRequestBuilder.request("SELECT 1").params(Map.of("one", "two")).build(); assertAll( () -> assertTrue(requestWithContent.isSupported()), - () -> assertTrue(requestWithParams.isSupported()) - ); + () -> assertTrue(requestWithParams.isSupported())); } @Test public void should_support_query_without_parameters() { SQLQueryRequest requestWithNoParams = - SQLQueryRequestBuilder.request("SELECT 1") - .params(Map.of()) - .build(); + SQLQueryRequestBuilder.request("SELECT 1").params(Map.of()).build(); assertTrue(requestWithNoParams.isSupported()); } @@ -79,8 +67,8 @@ public void should_support_query_without_parameters() { public void should_support_query_with_zero_fetch_size() { SQLQueryRequest request = SQLQueryRequestBuilder.request("SELECT 1") - .jsonContent("{\"query\": \"SELECT 1\", \"fetch_size\": 0}") - .build(); + .jsonContent("{\"query\": \"SELECT 1\", \"fetch_size\": 0}") + .build(); assertTrue(request.isSupported()); } @@ -96,52 +84,37 @@ public void should_support_query_with_parameters_and_zero_fetch_size() { @Test public void should_support_explain() { SQLQueryRequest explainRequest = - SQLQueryRequestBuilder.request("SELECT 1") - .path("_plugins/_sql/_explain") - .build(); + SQLQueryRequestBuilder.request("SELECT 1").path("_plugins/_sql/_explain").build(); assertAll( () -> assertTrue(explainRequest.isExplainRequest()), - () -> assertTrue(explainRequest.isSupported()) - ); + () -> assertTrue(explainRequest.isSupported())); } @Test public void should_support_cursor_request() { SQLQueryRequest fetchSizeRequest = SQLQueryRequestBuilder.request("SELECT 1") - .jsonContent("{\"query\": \"SELECT 1\", \"fetch_size\": 5}") - .build(); + .jsonContent("{\"query\": \"SELECT 1\", \"fetch_size\": 5}") + .build(); SQLQueryRequest cursorRequest = - SQLQueryRequestBuilder.request(null) - .cursor("abcdefgh...") - .build(); + SQLQueryRequestBuilder.request(null).cursor("abcdefgh...").build(); assertAll( () -> assertTrue(fetchSizeRequest.isSupported()), - () -> assertTrue(cursorRequest.isSupported()) - ); + () -> assertTrue(cursorRequest.isSupported())); } @Test public void should_support_cursor_close_request() { SQLQueryRequest closeRequest = - SQLQueryRequestBuilder.request(null) - .cursor("pewpew") - .path("_plugins/_sql/close") - .build(); + SQLQueryRequestBuilder.request(null).cursor("pewpew").path("_plugins/_sql/close").build(); SQLQueryRequest emptyCloseRequest = - SQLQueryRequestBuilder.request(null) - .cursor("") - .path("_plugins/_sql/close") - .build(); + SQLQueryRequestBuilder.request(null).cursor("").path("_plugins/_sql/close").build(); - SQLQueryRequest pagingRequest = - SQLQueryRequestBuilder.request(null) - .cursor("pewpew") - .build(); + SQLQueryRequest pagingRequest = SQLQueryRequestBuilder.request(null).cursor("pewpew").build(); assertAll( () -> assertTrue(closeRequest.isSupported()), @@ -149,71 +122,52 @@ public void should_support_cursor_close_request() { () -> assertTrue(pagingRequest.isSupported()), () -> assertFalse(pagingRequest.isCursorCloseRequest()), () -> assertFalse(emptyCloseRequest.isSupported()), - () -> assertTrue(emptyCloseRequest.isCursorCloseRequest()) - ); + () -> assertTrue(emptyCloseRequest.isCursorCloseRequest())); } @Test public void should_not_support_request_with_empty_cursor() { SQLQueryRequest requestWithEmptyCursor = - SQLQueryRequestBuilder.request(null) - .cursor("") - .build(); + SQLQueryRequestBuilder.request(null).cursor("").build(); SQLQueryRequest requestWithNullCursor = - SQLQueryRequestBuilder.request(null) - .cursor(null) - .build(); + SQLQueryRequestBuilder.request(null).cursor(null).build(); assertAll( () -> assertFalse(requestWithEmptyCursor.isSupported()), - () -> assertFalse(requestWithNullCursor.isSupported()) - ); + () -> assertFalse(requestWithNullCursor.isSupported())); } @Test public void should_not_support_request_with_unknown_field() { SQLQueryRequest request = - SQLQueryRequestBuilder.request("SELECT 1") - .jsonContent("{\"pewpew\": 42}") - .build(); + SQLQueryRequestBuilder.request("SELECT 1").jsonContent("{\"pewpew\": 42}").build(); assertFalse(request.isSupported()); } @Test public void should_not_support_request_with_cursor_and_something_else() { SQLQueryRequest requestWithQuery = - SQLQueryRequestBuilder.request("SELECT 1") - .cursor("n:12356") - .build(); + SQLQueryRequestBuilder.request("SELECT 1").cursor("n:12356").build(); SQLQueryRequest requestWithParams = - SQLQueryRequestBuilder.request(null) - .cursor("n:12356") - .params(Map.of("one", "two")) - .build(); + SQLQueryRequestBuilder.request(null).cursor("n:12356").params(Map.of("one", "two")).build(); SQLQueryRequest requestWithParamsWithFormat = SQLQueryRequestBuilder.request(null) - .cursor("n:12356") - .params(Map.of("format", "jdbc")) - .build(); + .cursor("n:12356") + .params(Map.of("format", "jdbc")) + .build(); SQLQueryRequest requestWithParamsWithFormatAnd = SQLQueryRequestBuilder.request(null) - .cursor("n:12356") - .params(Map.of("format", "jdbc", "something", "else")) - .build(); + .cursor("n:12356") + .params(Map.of("format", "jdbc", "something", "else")) + .build(); SQLQueryRequest requestWithFetchSize = SQLQueryRequestBuilder.request(null) - .cursor("n:12356") - .jsonContent("{\"fetch_size\": 5}") - .build(); + .cursor("n:12356") + .jsonContent("{\"fetch_size\": 5}") + .build(); SQLQueryRequest requestWithNoParams = - SQLQueryRequestBuilder.request(null) - .cursor("n:12356") - .params(Map.of()) - .build(); + SQLQueryRequestBuilder.request(null).cursor("n:12356").params(Map.of()).build(); SQLQueryRequest requestWithNoContent = - SQLQueryRequestBuilder.request(null) - .cursor("n:12356") - .jsonContent("{}") - .build(); + SQLQueryRequestBuilder.request(null).cursor("n:12356").jsonContent("{}").build(); assertAll( () -> assertFalse(requestWithQuery.isSupported()), () -> assertFalse(requestWithParams.isSupported()), @@ -221,8 +175,7 @@ public void should_not_support_request_with_cursor_and_something_else() { () -> assertTrue(requestWithNoParams.isSupported()), () -> assertTrue(requestWithParamsWithFormat.isSupported()), () -> assertFalse(requestWithParamsWithFormatAnd.isSupported()), - () -> assertTrue(requestWithNoContent.isSupported()) - ); + () -> assertTrue(requestWithNoContent.isSupported())); } @Test @@ -234,15 +187,11 @@ public void should_use_JDBC_format_by_default() { @Test public void should_support_CSV_format_and_sanitize() { - SQLQueryRequest csvRequest = - SQLQueryRequestBuilder.request("SELECT 1") - .format("csv") - .build(); + SQLQueryRequest csvRequest = SQLQueryRequestBuilder.request("SELECT 1").format("csv").build(); assertAll( () -> assertTrue(csvRequest.isSupported()), () -> assertEquals(csvRequest.format(), Format.CSV), - () -> assertTrue(csvRequest.sanitize()) - ); + () -> assertTrue(csvRequest.sanitize())); } @Test @@ -252,36 +201,28 @@ public void should_skip_sanitize_if_set_false() { SQLQueryRequest csvRequest = SQLQueryRequestBuilder.request("SELECT 1").params(params).build(); assertAll( () -> assertEquals(csvRequest.format(), Format.CSV), - () -> assertFalse(csvRequest.sanitize()) - ); + () -> assertFalse(csvRequest.sanitize())); } @Test public void should_not_support_other_format() { - SQLQueryRequest csvRequest = - SQLQueryRequestBuilder.request("SELECT 1") - .format("other") - .build(); + SQLQueryRequest csvRequest = SQLQueryRequestBuilder.request("SELECT 1").format("other").build(); assertAll( () -> assertFalse(csvRequest.isSupported()), - () -> assertEquals("response in other format is not supported.", - assertThrows(IllegalArgumentException.class, csvRequest::format).getMessage()) - ); + () -> + assertEquals( + "response in other format is not supported.", + assertThrows(IllegalArgumentException.class, csvRequest::format).getMessage())); } @Test public void should_support_raw_format() { - SQLQueryRequest csvRequest = - SQLQueryRequestBuilder.request("SELECT 1") - .format("raw") - .build(); + SQLQueryRequest csvRequest = SQLQueryRequestBuilder.request("SELECT 1").format("raw").build(); assertTrue(csvRequest.isSupported()); } - /** - * SQL query request build helper to improve test data setup readability. - */ + /** SQL query request build helper to improve test data setup readability. */ private static class SQLQueryRequestBuilder { private String jsonContent; private String query; @@ -325,9 +266,8 @@ SQLQueryRequest build() { if (format != null) { params.put("format", format); } - return new SQLQueryRequest(jsonContent == null ? null : new JSONObject(jsonContent), - query, path, params, cursor); + return new SQLQueryRequest( + jsonContent == null ? null : new JSONObject(jsonContent), query, path, params, cursor); } } - } diff --git a/sql/src/test/java/org/opensearch/sql/sql/parser/AnonymizerListenerTest.java b/sql/src/test/java/org/opensearch/sql/sql/parser/AnonymizerListenerTest.java index 59d723e3a2..4d2addf3d3 100644 --- a/sql/src/test/java/org/opensearch/sql/sql/parser/AnonymizerListenerTest.java +++ b/sql/src/test/java/org/opensearch/sql/sql/parser/AnonymizerListenerTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql.parser; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -23,6 +22,7 @@ public class AnonymizerListenerTest { /** * Helper function to parse SQl queries for testing purposes. + * * @param query SQL query to be anonymized. */ private void parse(String query) { @@ -36,8 +36,9 @@ private void parse(String query) { @Test public void queriesShouldHaveAnonymousFieldAndIndex() { String query = "SELECT ABS(balance) FROM accounts WHERE age > 30 GROUP BY ABS(balance)"; - String expectedQuery = "( SELECT ABS ( identifier ) FROM table " - + "WHERE identifier > number GROUP BY ABS ( identifier ) )"; + String expectedQuery = + "( SELECT ABS ( identifier ) FROM table " + + "WHERE identifier > number GROUP BY ABS ( identifier ) )"; parse(query); assertEquals(expectedQuery, anonymizerListener.getAnonymizedQueryString()); } @@ -92,12 +93,13 @@ public void queriesWithAggregatesShouldAnonymizeSensitiveData() { @Test public void queriesWithSubqueriesShouldAnonymizeSensitiveData() { - String query = "SELECT a.f, a.l, a.a FROM " - + "(SELECT firstname AS f, lastname AS l, age AS a FROM accounts WHERE age > 30) a"; + String query = + "SELECT a.f, a.l, a.a FROM " + + "(SELECT firstname AS f, lastname AS l, age AS a FROM accounts WHERE age > 30) a"; String expectedQuery = - "( SELECT identifier.identifier, identifier.identifier, identifier.identifier FROM " - + "( SELECT identifier AS identifier, identifier AS identifier, identifier AS identifier " - + "FROM table WHERE identifier > number ) identifier )"; + "( SELECT identifier.identifier, identifier.identifier, identifier.identifier FROM ( SELECT" + + " identifier AS identifier, identifier AS identifier, identifier AS identifier FROM" + + " table WHERE identifier > number ) identifier )"; parse(query); assertEquals(expectedQuery, anonymizerListener.getAnonymizedQueryString()); } @@ -121,8 +123,9 @@ public void queriesWithOrderByShouldAnonymizeSensitiveData() { @Test public void queriesWithHavingShouldAnonymizeSensitiveData() { String query = "SELECT SUM(balance) FROM accounts GROUP BY lastname HAVING COUNT(balance) > 2"; - String expectedQuery = "( SELECT SUM ( identifier ) FROM table " - + "GROUP BY identifier HAVING COUNT ( identifier ) > number )"; + String expectedQuery = + "( SELECT SUM ( identifier ) FROM table " + + "GROUP BY identifier HAVING COUNT ( identifier ) > number )"; parse(query); assertEquals(expectedQuery, anonymizerListener.getAnonymizedQueryString()); } @@ -130,8 +133,9 @@ public void queriesWithHavingShouldAnonymizeSensitiveData() { @Test public void queriesWithHighlightShouldAnonymizeSensitiveData() { String query = "SELECT HIGHLIGHT(str0) FROM CALCS WHERE QUERY_STRING(['str0'], 'FURNITURE')"; - String expectedQuery = "( SELECT HIGHLIGHT ( identifier ) FROM table WHERE " - + "QUERY_STRING ( [ 'string_literal' ], 'string_literal' ) )"; + String expectedQuery = + "( SELECT HIGHLIGHT ( identifier ) FROM table WHERE " + + "QUERY_STRING ( [ 'string_literal' ], 'string_literal' ) )"; parse(query); assertEquals(expectedQuery, anonymizerListener.getAnonymizedQueryString()); } @@ -139,8 +143,8 @@ public void queriesWithHighlightShouldAnonymizeSensitiveData() { @Test public void queriesWithMatchShouldAnonymizeSensitiveData() { String query = "SELECT str0 FROM CALCS WHERE MATCH(str0, 'FURNITURE')"; - String expectedQuery = "( SELECT identifier FROM table " - + "WHERE MATCH ( identifier, 'string_literal' ) )"; + String expectedQuery = + "( SELECT identifier FROM table " + "WHERE MATCH ( identifier, 'string_literal' ) )"; parse(query); assertEquals(expectedQuery, anonymizerListener.getAnonymizedQueryString()); } @@ -155,10 +159,12 @@ public void queriesWithPositionShouldAnonymizeSensitiveData() { @Test public void queriesWithMatch_Bool_Prefix_ShouldAnonymizeSensitiveData() { - String query = "SELECT firstname, address FROM accounts WHERE " - + "match_bool_prefix(address, 'Bristol Street', minimum_should_match=2)"; - String expectedQuery = "( SELECT identifier, identifier FROM table WHERE MATCH_BOOL_PREFIX " - + "( identifier, 'string_literal', MINIMUM_SHOULD_MATCH = number ) )"; + String query = + "SELECT firstname, address FROM accounts WHERE " + + "match_bool_prefix(address, 'Bristol Street', minimum_should_match=2)"; + String expectedQuery = + "( SELECT identifier, identifier FROM table WHERE MATCH_BOOL_PREFIX " + + "( identifier, 'string_literal', MINIMUM_SHOULD_MATCH = number ) )"; parse(query); assertEquals(expectedQuery, anonymizerListener.getAnonymizedQueryString()); } @@ -195,10 +201,7 @@ public void queriesWithNotEqualAlternateShouldAnonymizeSensitiveData() { assertEquals(expectedQuery, anonymizerListener.getAnonymizedQueryString()); } - - /** - * Test added for coverage, but the errorNode will not be hit normally. - */ + /** Test added for coverage, but the errorNode will not be hit normally. */ @Test public void enterErrorNote() { ErrorNode node = mock(ErrorNode.class); diff --git a/sql/src/test/java/org/opensearch/sql/sql/parser/AstAggregationBuilderTest.java b/sql/src/test/java/org/opensearch/sql/sql/parser/AstAggregationBuilderTest.java index fff789de44..95188e20b6 100644 --- a/sql/src/test/java/org/opensearch/sql/sql/parser/AstAggregationBuilderTest.java +++ b/sql/src/test/java/org/opensearch/sql/sql/parser/AstAggregationBuilderTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql.parser; import static java.util.Collections.emptyList; @@ -59,10 +58,9 @@ void can_build_group_by_clause_with_scalar_expression() { buildAggregation("SELECT ABS(age + 1) FROM test GROUP BY ABS(age + 1)"), allOf( hasGroupByItems( - alias("ABS(+(age, 1))", function("ABS", - function("+", - qualifiedName("age"), - intLiteral(1))))), + alias( + "ABS(+(age, 1))", + function("ABS", function("+", qualifiedName("age"), intLiteral(1))))), hasAggregators())); } @@ -79,9 +77,7 @@ void can_build_group_by_clause_with_complicated_aggregators() { void can_build_group_by_clause_without_aggregators() { assertThat( buildAggregation("SELECT state FROM test GROUP BY state"), - allOf( - hasGroupByItems(alias("state", qualifiedName("state"))), - hasAggregators())); + allOf(hasGroupByItems(alias("state", qualifiedName("state"))), hasAggregators())); } @Test @@ -101,50 +97,43 @@ void can_build_implicit_group_by_for_aggregator_in_having_clause() { buildAggregation("SELECT true FROM test HAVING AVG(age) > 30"), allOf( hasGroupByItems(), - hasAggregators( - alias("AVG(age)", aggregate("AVG", qualifiedName("age")))))); + hasAggregators(alias("AVG(age)", aggregate("AVG", qualifiedName("age")))))); assertThat( - buildAggregation("SELECT PI() FROM test HAVING AVG(age) > 30"), - allOf( - hasGroupByItems(), - hasAggregators( - alias("AVG(age)", aggregate("AVG", qualifiedName("age")))))); + buildAggregation("SELECT PI() FROM test HAVING AVG(age) > 30"), + allOf( + hasGroupByItems(), + hasAggregators(alias("AVG(age)", aggregate("AVG", qualifiedName("age")))))); assertThat( - buildAggregation("SELECT ABS(1.5) FROM test HAVING AVG(age) > 30"), - allOf( - hasGroupByItems(), - hasAggregators( - alias("AVG(age)", aggregate("AVG", qualifiedName("age")))))); + buildAggregation("SELECT ABS(1.5) FROM test HAVING AVG(age) > 30"), + allOf( + hasGroupByItems(), + hasAggregators(alias("AVG(age)", aggregate("AVG", qualifiedName("age")))))); assertThat( - buildAggregation("SELECT ABS(ABS(1.5)) FROM test HAVING AVG(age) > 30"), - allOf( - hasGroupByItems(), - hasAggregators( - alias("AVG(age)", aggregate("AVG", qualifiedName("age")))))); + buildAggregation("SELECT ABS(ABS(1.5)) FROM test HAVING AVG(age) > 30"), + allOf( + hasGroupByItems(), + hasAggregators(alias("AVG(age)", aggregate("AVG", qualifiedName("age")))))); assertThat( buildAggregation("SELECT INTERVAL 1 DAY FROM test HAVING AVG(age) > 30"), allOf( hasGroupByItems(), - hasAggregators( - alias("AVG(age)", aggregate("AVG", qualifiedName("age")))))); + hasAggregators(alias("AVG(age)", aggregate("AVG", qualifiedName("age")))))); assertThat( buildAggregation("SELECT CAST(1 AS LONG) FROM test HAVING AVG(age) > 30"), allOf( hasGroupByItems(), - hasAggregators( - alias("AVG(age)", aggregate("AVG", qualifiedName("age")))))); + hasAggregators(alias("AVG(age)", aggregate("AVG", qualifiedName("age")))))); assertThat( buildAggregation("SELECT CASE WHEN true THEN 1 ELSE 2 END FROM test HAVING AVG(age) > 30"), allOf( hasGroupByItems(), - hasAggregators( - alias("AVG(age)", aggregate("AVG", qualifiedName("age")))))); + hasAggregators(alias("AVG(age)", aggregate("AVG", qualifiedName("age")))))); } @Test @@ -154,8 +143,7 @@ void can_build_distinct_aggregator() { allOf( hasGroupByItems(alias("age", qualifiedName("age"))), hasAggregators( - alias("COUNT(DISTINCT name)", distinctAggregate("COUNT", qualifiedName( - "name")))))); + alias("COUNT(DISTINCT name)", distinctAggregate("COUNT", qualifiedName("name")))))); } @Test @@ -167,8 +155,8 @@ void should_build_nothing_if_no_group_by_and_no_aggregators_in_select() { void should_replace_group_by_alias_by_expression_in_select_clause() { assertThat( buildAggregation("SELECT state AS s, name FROM test GROUP BY s, name"), - hasGroupByItems(alias("state", qualifiedName("state")), - alias("name", qualifiedName("name")))); + hasGroupByItems( + alias("state", qualifiedName("state")), alias("name", qualifiedName("name")))); assertThat( buildAggregation("SELECT ABS(age) AS a FROM test GROUP BY a"), @@ -190,25 +178,30 @@ void should_replace_group_by_ordinal_by_expression_in_select_clause() { @Test void should_report_error_for_non_integer_ordinal_in_group_by() { - SemanticCheckException error = assertThrows(SemanticCheckException.class, () -> - buildAggregation("SELECT state AS s FROM test GROUP BY 1.5")); - assertEquals( - "Non-integer constant [1.5] found in ordinal", - error.getMessage()); + SemanticCheckException error = + assertThrows( + SemanticCheckException.class, + () -> buildAggregation("SELECT state AS s FROM test GROUP BY 1.5")); + assertEquals("Non-integer constant [1.5] found in ordinal", error.getMessage()); } - @Disabled("This validation is supposed to be in analyzing phase. This test should be enabled " + @Disabled( + "This validation is supposed to be in analyzing phase. This test should be enabled " + "once https://github.com/opensearch-project/sql/issues/910 has been resolved") @Test void should_report_error_for_mismatch_between_select_and_group_by_items() { - SemanticCheckException error1 = assertThrows(SemanticCheckException.class, () -> - buildAggregation("SELECT name FROM test GROUP BY state")); + SemanticCheckException error1 = + assertThrows( + SemanticCheckException.class, + () -> buildAggregation("SELECT name FROM test GROUP BY state")); assertEquals( "Expression [name] that contains non-aggregated column is not present in group by clause", error1.getMessage()); - SemanticCheckException error2 = assertThrows(SemanticCheckException.class, () -> - buildAggregation("SELECT ABS(name + 1) FROM test GROUP BY name")); + SemanticCheckException error2 = + assertThrows( + SemanticCheckException.class, + () -> buildAggregation("SELECT ABS(name + 1) FROM test GROUP BY name")); assertEquals( "Expression [Function(funcName=ABS, funcArgs=[Function(funcName=+, " + "funcArgs=[name, Literal(value=1, type=INTEGER)])])] that contains " @@ -218,15 +211,19 @@ void should_report_error_for_mismatch_between_select_and_group_by_items() { @Test void should_report_error_for_non_aggregated_item_in_select_if_no_group_by() { - SemanticCheckException error1 = assertThrows(SemanticCheckException.class, () -> - buildAggregation("SELECT age, AVG(balance) FROM tests")); + SemanticCheckException error1 = + assertThrows( + SemanticCheckException.class, + () -> buildAggregation("SELECT age, AVG(balance) FROM tests")); assertEquals( "Explicit GROUP BY clause is required because expression [age] " + "contains non-aggregated column", error1.getMessage()); - SemanticCheckException error2 = assertThrows(SemanticCheckException.class, () -> - buildAggregation("SELECT ABS(age + 1), AVG(balance) FROM tests")); + SemanticCheckException error2 = + assertThrows( + SemanticCheckException.class, + () -> buildAggregation("SELECT ABS(age + 1), AVG(balance) FROM tests")); assertEquals( "Explicit GROUP BY clause is required because expression [ABS(+(age, 1))] " + "contains non-aggregated column", @@ -235,19 +232,25 @@ void should_report_error_for_non_aggregated_item_in_select_if_no_group_by() { @Test void should_report_error_for_group_by_ordinal_out_of_bound_of_select_list() { - SemanticCheckException error1 = assertThrows(SemanticCheckException.class, () -> - buildAggregation("SELECT age, AVG(balance) FROM tests GROUP BY 0")); + SemanticCheckException error1 = + assertThrows( + SemanticCheckException.class, + () -> buildAggregation("SELECT age, AVG(balance) FROM tests GROUP BY 0")); assertEquals("Ordinal [0] is out of bound of select item list", error1.getMessage()); - SemanticCheckException error2 = assertThrows(SemanticCheckException.class, () -> - buildAggregation("SELECT age, AVG(balance) FROM tests GROUP BY 3")); + SemanticCheckException error2 = + assertThrows( + SemanticCheckException.class, + () -> buildAggregation("SELECT age, AVG(balance) FROM tests GROUP BY 3")); assertEquals("Ordinal [3] is out of bound of select item list", error2.getMessage()); } @Test void should_report_error_for_non_aggregated_item_in_select_if_only_having() { - SemanticCheckException error = assertThrows(SemanticCheckException.class, () -> - buildAggregation("SELECT age FROM tests HAVING AVG(balance) > 30")); + SemanticCheckException error = + assertThrows( + SemanticCheckException.class, + () -> buildAggregation("SELECT age FROM tests HAVING AVG(balance) > 30")); assertEquals( "Explicit GROUP BY clause is required because expression [age] " + "contains non-aggregated column", @@ -262,10 +265,10 @@ private Matcher hasAggregators(UnresolvedExpression... exprs) { return featureValueOf("aggregators", Aggregation::getAggExprList, exprs); } - private Matcher featureValueOf(String name, - Function> getter, - UnresolvedExpression... exprs) { + private Matcher featureValueOf( + String name, + Function> getter, + UnresolvedExpression... exprs) { Matcher> subMatcher = (exprs.length == 0) ? equalTo(emptyList()) : equalTo(Arrays.asList(exprs)); return new FeatureMatcher>(subMatcher, name, "") { @@ -295,5 +298,4 @@ private QuerySpecificationContext parse(String query) { parser.addErrorListener(new SyntaxAnalysisErrorListener()); return parser.querySpecification(); } - } diff --git a/sql/src/test/java/org/opensearch/sql/sql/parser/AstBuilderTest.java b/sql/src/test/java/org/opensearch/sql/sql/parser/AstBuilderTest.java index 3e56a89754..8ab314f695 100644 --- a/sql/src/test/java/org/opensearch/sql/sql/parser/AstBuilderTest.java +++ b/sql/src/test/java/org/opensearch/sql/sql/parser/AstBuilderTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql.parser; import static java.util.Collections.emptyList; @@ -53,36 +52,20 @@ public void can_build_select_literals() { alias("'hello'", stringLiteral("hello")), alias("\"world\"", stringLiteral("world")), alias("false", booleanLiteral(false)), - alias("-4.567", doubleLiteral(-4.567)) - ), - buildAST("SELECT 123, 'hello', \"world\", false, -4.567") - ); + alias("-4.567", doubleLiteral(-4.567))), + buildAST("SELECT 123, 'hello', \"world\", false, -4.567")); } @Test public void can_build_select_function_call_with_alias() { assertEquals( - project( - relation("test"), - alias( - "ABS(age)", - function("ABS", qualifiedName("age")), - "a" - ) - ), - buildAST("SELECT ABS(age) AS a FROM test") - ); + project(relation("test"), alias("ABS(age)", function("ABS", qualifiedName("age")), "a")), + buildAST("SELECT ABS(age) AS a FROM test")); } @Test public void can_build_select_all_from_index() { - assertEquals( - project( - relation("test"), - AllFields.of() - ), - buildAST("SELECT * FROM test") - ); + assertEquals(project(relation("test"), AllFields.of()), buildAST("SELECT * FROM test")); assertThrows(SyntaxCheckException.class, () -> buildAST("SELECT *")); } @@ -90,14 +73,8 @@ public void can_build_select_all_from_index() { @Test public void can_build_nested_select_all() { assertEquals( - project( - relation("test"), - alias("nested(field.*)", - new NestedAllTupleFields("field") - ) - ), - buildAST("SELECT nested(field.*) FROM test") - ); + project(relation("test"), alias("nested(field.*)", new NestedAllTupleFields("field"))), + buildAST("SELECT nested(field.*) FROM test")); } @Test @@ -107,32 +84,22 @@ public void can_build_select_all_and_fields_from_index() { relation("test"), AllFields.of(), alias("age", qualifiedName("age")), - alias("age", qualifiedName("age"), "a") - ), - buildAST("SELECT *, age, age as a FROM test") - ); + alias("age", qualifiedName("age"), "a")), + buildAST("SELECT *, age, age as a FROM test")); } @Test public void can_build_select_fields_from_index() { assertEquals( - project( - relation("test"), - alias("age", qualifiedName("age")) - ), - buildAST("SELECT age FROM test") - ); + project(relation("test"), alias("age", qualifiedName("age"))), + buildAST("SELECT age FROM test")); } @Test public void can_build_select_fields_with_alias() { assertEquals( - project( - relation("test"), - alias("age", qualifiedName("age"), "a") - ), - buildAST("SELECT age AS a FROM test") - ); + project(relation("test"), alias("age", qualifiedName("age"), "a")), + buildAST("SELECT age AS a FROM test")); } @Test @@ -140,17 +107,8 @@ public void can_build_select_fields_with_alias_quoted() { assertEquals( project( relation("test"), - alias( - "(age + 10)", - function("+", qualifiedName("age"), intLiteral(10)), - "Age_Expr" - ) - ), - buildAST("SELECT" - + " (age + 10) AS `Age_Expr` " - + "FROM test" - ) - ); + alias("(age + 10)", function("+", qualifiedName("age"), intLiteral(10)), "Age_Expr")), + buildAST("SELECT (age + 10) AS `Age_Expr` FROM test")); } @Test @@ -158,42 +116,27 @@ public void can_build_from_index_with_alias() { assertEquals( project( filter( - relation("test", "tt"), - function("=", qualifiedName("tt", "age"), intLiteral(30))), - alias("tt.name", qualifiedName("tt", "name")) - ), - buildAST("SELECT tt.name FROM test AS tt WHERE tt.age = 30") - ); + relation("test", "tt"), function("=", qualifiedName("tt", "age"), intLiteral(30))), + alias("tt.name", qualifiedName("tt", "name"))), + buildAST("SELECT tt.name FROM test AS tt WHERE tt.age = 30")); } @Test public void can_build_from_index_with_alias_quoted() { assertEquals( project( - filter( - relation("test", "t"), - function("=", qualifiedName("t", "age"), intLiteral(30))), - alias("`t`.name", qualifiedName("t", "name")) - ), - buildAST("SELECT `t`.name FROM test `t` WHERE `t`.age = 30") - ); + filter(relation("test", "t"), function("=", qualifiedName("t", "age"), intLiteral(30))), + alias("`t`.name", qualifiedName("t", "name"))), + buildAST("SELECT `t`.name FROM test `t` WHERE `t`.age = 30")); } @Test public void can_build_where_clause() { assertEquals( project( - filter( - relation("test"), - function( - "=", - qualifiedName("name"), - stringLiteral("John")) - ), - alias("name", qualifiedName("name")) - ), - buildAST("SELECT name FROM test WHERE name = 'John'") - ); + filter(relation("test"), function("=", qualifiedName("name"), stringLiteral("John"))), + alias("name", qualifiedName("name"))), + buildAST("SELECT name FROM test WHERE name = 'John'")); } @Test @@ -202,8 +145,7 @@ public void can_build_count_literal() { project( agg( relation("test"), - ImmutableList.of( - alias("COUNT(1)", aggregate("COUNT", intLiteral(1)))), + ImmutableList.of(alias("COUNT(1)", aggregate("COUNT", intLiteral(1)))), emptyList(), emptyList(), emptyList()), @@ -217,8 +159,7 @@ public void can_build_count_star() { project( agg( relation("test"), - ImmutableList.of( - alias("COUNT(*)", aggregate("COUNT", AllFields.of()))), + ImmutableList.of(alias("COUNT(*)", aggregate("COUNT", AllFields.of()))), emptyList(), emptyList(), emptyList()), @@ -328,9 +269,7 @@ public void can_build_having_clause() { emptyList(), ImmutableList.of(alias("name", qualifiedName("name"))), emptyList()), - function(">", - aggregate("MIN", qualifiedName("balance")), - intLiteral(1000))), + function(">", aggregate("MIN", qualifiedName("balance")), intLiteral(1000))), alias("name", qualifiedName("name")), alias("AVG(age)", aggregate("AVG", qualifiedName("age")))), buildAST("SELECT name, AVG(age) FROM test GROUP BY name HAVING MIN(balance) > 1000")); @@ -343,14 +282,11 @@ public void can_build_having_condition_using_alias() { filter( agg( relation("test"), - ImmutableList.of( - alias("AVG(age)", aggregate("AVG", qualifiedName("age")))), + ImmutableList.of(alias("AVG(age)", aggregate("AVG", qualifiedName("age")))), emptyList(), ImmutableList.of(alias("name", qualifiedName("name"))), emptyList()), - function(">", - aggregate("AVG", qualifiedName("age")), - intLiteral(1000))), + function(">", aggregate("AVG", qualifiedName("age")), intLiteral(1000))), alias("name", qualifiedName("name")), alias("AVG(age)", aggregate("AVG", qualifiedName("age")), "a")), buildAST("SELECT name, AVG(age) AS a FROM test GROUP BY name HAVING a > 1000")); @@ -360,9 +296,7 @@ public void can_build_having_condition_using_alias() { public void can_build_order_by_field_name() { assertEquals( project( - sort( - relation("test"), - field("name", argument("asc", booleanLiteral(true)))), + sort(relation("test"), field("name", argument("asc", booleanLiteral(true)))), alias("name", qualifiedName("name"))), buildAST("SELECT name FROM test ORDER BY name")); } @@ -374,8 +308,7 @@ public void can_build_order_by_function() { sort( relation("test"), field( - function("ABS", qualifiedName("name")), - argument("asc", booleanLiteral(true)))), + function("ABS", qualifiedName("name")), argument("asc", booleanLiteral(true)))), alias("name", qualifiedName("name"))), buildAST("SELECT name FROM test ORDER BY ABS(name)")); } @@ -384,9 +317,7 @@ public void can_build_order_by_function() { public void can_build_order_by_alias() { assertEquals( project( - sort( - relation("test"), - field("name", argument("asc", booleanLiteral(true)))), + sort(relation("test"), field("name", argument("asc", booleanLiteral(true)))), alias("name", qualifiedName("name"), "n")), buildAST("SELECT name AS n FROM test ORDER BY n ASC")); } @@ -395,9 +326,7 @@ public void can_build_order_by_alias() { public void can_build_order_by_ordinal() { assertEquals( project( - sort( - relation("test"), - field("name", argument("asc", booleanLiteral(false)))), + sort(relation("test"), field("name", argument("asc", booleanLiteral(false)))), alias("name", qualifiedName("name"))), buildAST("SELECT name FROM test ORDER BY 1 DESC")); } @@ -424,8 +353,7 @@ public void can_build_select_distinct_clause() { emptyList(), emptyList(), ImmutableList.of( - alias("name", qualifiedName("name")), - alias("age", qualifiedName("age"))), + alias("name", qualifiedName("name")), alias("age", qualifiedName("age"))), emptyList()), alias("name", qualifiedName("name")), alias("age", qualifiedName("age"))), @@ -441,26 +369,21 @@ public void can_build_select_distinct_clause_with_function() { emptyList(), emptyList(), ImmutableList.of( - alias("SUBSTRING(name, 1, 2)", + alias( + "SUBSTRING(name, 1, 2)", function( - "SUBSTRING", - qualifiedName("name"), - intLiteral(1), intLiteral(2)))), + "SUBSTRING", qualifiedName("name"), intLiteral(1), intLiteral(2)))), emptyList()), - alias("SUBSTRING(name, 1, 2)", - function( - "SUBSTRING", - qualifiedName("name"), - intLiteral(1), intLiteral(2)))), + alias( + "SUBSTRING(name, 1, 2)", + function("SUBSTRING", qualifiedName("name"), intLiteral(1), intLiteral(2)))), buildAST("SELECT DISTINCT SUBSTRING(name, 1, 2) FROM test")); } @Test public void can_build_select_all_clause() { assertEquals( - buildAST("SELECT name, age FROM test"), - buildAST("SELECT ALL name, age FROM test") - ); + buildAST("SELECT name, age FROM test"), buildAST("SELECT ALL name, age FROM test")); } @Test @@ -469,22 +392,28 @@ public void can_build_order_by_null_option() { project( sort( relation("test"), - field("name", + field( + "name", argument("asc", booleanLiteral(true)), argument("nullFirst", booleanLiteral(false)))), - alias("name", qualifiedName("name"))), + alias("name", qualifiedName("name"))), buildAST("SELECT name FROM test ORDER BY name NULLS LAST")); } /** + * + * + *
    * Ensure Nested function falls back to legacy engine when used in an HAVING clause.
    * TODO Remove this test when support is added.
+   * 
*/ @Test public void nested_in_having_clause_throws_exception() { - SyntaxCheckException exception = assertThrows(SyntaxCheckException.class, - () -> buildAST("SELECT count(*) FROM test HAVING nested(message.info)") - ); + SyntaxCheckException exception = + assertThrows( + SyntaxCheckException.class, + () -> buildAST("SELECT count(*) FROM test HAVING nested(message.info)")); assertEquals( "Falling back to legacy engine. Nested function is not supported in the HAVING clause.", @@ -495,23 +424,15 @@ public void nested_in_having_clause_throws_exception() { public void can_build_order_by_sort_order_keyword_insensitive() { assertEquals( project( - sort( - relation("test"), - field("age", - argument("asc", booleanLiteral(true)))), + sort(relation("test"), field("age", argument("asc", booleanLiteral(true)))), alias("age", qualifiedName("age"))), - buildAST("SELECT age FROM test ORDER BY age ASC") - ); + buildAST("SELECT age FROM test ORDER BY age ASC")); assertEquals( project( - sort( - relation("test"), - field("age", - argument("asc", booleanLiteral(true)))), + sort(relation("test"), field("age", argument("asc", booleanLiteral(true)))), alias("age", qualifiedName("age"))), - buildAST("SELECT age FROM test ORDER BY age asc") - ); + buildAST("SELECT age FROM test ORDER BY age asc")); } @Test @@ -523,20 +444,15 @@ public void can_build_from_subquery() { project( relation("test"), alias("firstname", qualifiedName("firstname"), "firstName"), - alias("lastname", qualifiedName("lastname"), "lastName") - ), - "a" - ), - function(">", qualifiedName("age"), intLiteral(20)) - ), + alias("lastname", qualifiedName("lastname"), "lastName")), + "a"), + function(">", qualifiedName("age"), intLiteral(20))), alias("a.firstName", qualifiedName("a", "firstName")), alias("lastName", qualifiedName("lastName"))), buildAST( "SELECT a.firstName, lastName FROM (" + "SELECT firstname AS firstName, lastname AS lastName FROM test" - + ") AS a where age > 20" - ) - ); + + ") AS a where age > 20")); } @Test @@ -545,19 +461,15 @@ public void can_build_from_subquery_with_backquoted_alias() { project( relationSubquery( project( - relation("test"), - alias("firstname", qualifiedName("firstname"), "firstName")), + relation("test"), alias("firstname", qualifiedName("firstname"), "firstName")), "a"), - alias("a.firstName", qualifiedName("a", "firstName")) - ), + alias("a.firstName", qualifiedName("a", "firstName"))), buildAST( "SELECT a.firstName " + "FROM ( " + " SELECT `firstname` AS `firstName` " + " FROM `test` " - + ") AS `a`" - ) - ); + + ") AS `a`")); } @Test @@ -566,12 +478,9 @@ public void can_build_show_all_tables() { project( filter( relation(TABLE_INFO), - function("like", qualifiedName("TABLE_NAME"), stringLiteral("%")) - ), - AllFields.of() - ), - buildAST("SHOW TABLES LIKE '%'") - ); + function("like", qualifiedName("TABLE_NAME"), stringLiteral("%"))), + AllFields.of()), + buildAST("SHOW TABLES LIKE '%'")); } @Test @@ -580,12 +489,9 @@ public void can_build_show_selected_tables() { project( filter( relation(TABLE_INFO), - function("like", qualifiedName("TABLE_NAME"), stringLiteral("a_c%")) - ), - AllFields.of() - ), - buildAST("SHOW TABLES LIKE 'a_c%'") - ); + function("like", qualifiedName("TABLE_NAME"), stringLiteral("a_c%"))), + AllFields.of()), + buildAST("SHOW TABLES LIKE 'a_c%'")); } @Test @@ -594,23 +500,16 @@ public void show_compatible_with_old_engine_syntax() { project( filter( relation(TABLE_INFO), - function("like", qualifiedName("TABLE_NAME"), stringLiteral("%")) - ), - AllFields.of() - ), - buildAST("SHOW TABLES LIKE '%'") - ); + function("like", qualifiedName("TABLE_NAME"), stringLiteral("%"))), + AllFields.of()), + buildAST("SHOW TABLES LIKE '%'")); } @Test public void can_build_describe_selected_tables() { assertEquals( - project( - relation(mappingTable("a_c%")), - AllFields.of() - ), - buildAST("DESCRIBE TABLES LIKE 'a_c%'") - ); + project(relation(mappingTable("a_c%")), AllFields.of()), + buildAST("DESCRIBE TABLES LIKE 'a_c%'")); } @Test @@ -619,23 +518,16 @@ public void can_build_describe_selected_tables_field_filter() { project( filter( relation(mappingTable("a_c%")), - function("like", qualifiedName("COLUMN_NAME"), stringLiteral("name%")) - ), - AllFields.of() - ), - buildAST("DESCRIBE TABLES LIKE 'a_c%' COLUMNS LIKE 'name%'") - ); + function("like", qualifiedName("COLUMN_NAME"), stringLiteral("name%"))), + AllFields.of()), + buildAST("DESCRIBE TABLES LIKE 'a_c%' COLUMNS LIKE 'name%'")); } @Test public void can_build_alias_by_keywords() { assertEquals( - project( - relation("test"), - alias("avg_age", qualifiedName("avg_age"), "avg") - ), - buildAST("SELECT avg_age AS avg FROM test") - ); + project(relation("test"), alias("avg_age", qualifiedName("avg_age"), "avg")), + buildAST("SELECT avg_age AS avg FROM test")); } @Test @@ -643,42 +535,20 @@ public void can_build_limit_clause() { assertEquals( project( limit( - sort( - relation("test"), - field("age", argument("asc", booleanLiteral(true))) - ), - 10, - 0 - ), + sort(relation("test"), field("age", argument("asc", booleanLiteral(true)))), 10, 0), alias("name", qualifiedName("name")), - alias("age", qualifiedName("age")) - ), - buildAST("SELECT name, age FROM test ORDER BY age LIMIT 10") - ); + alias("age", qualifiedName("age"))), + buildAST("SELECT name, age FROM test ORDER BY age LIMIT 10")); } @Test public void can_build_limit_clause_with_offset() { assertEquals( - project( - limit( - relation("test"), - 10, - 5 - ), - alias("name", qualifiedName("name")) - ), + project(limit(relation("test"), 10, 5), alias("name", qualifiedName("name"))), buildAST("SELECT name FROM test LIMIT 10 OFFSET 5")); assertEquals( - project( - limit( - relation("test"), - 10, - 5 - ), - alias("name", qualifiedName("name")) - ), + project(limit(relation("test"), 10, 5), alias("name", qualifiedName("name"))), buildAST("SELECT name FROM test LIMIT 5, 10")); } @@ -686,11 +556,10 @@ public void can_build_limit_clause_with_offset() { public void can_build_qualified_name_highlight() { Map args = new HashMap<>(); assertEquals( - project(relation("test"), - alias("highlight(fieldA)", - highlight(AstDSL.qualifiedName("fieldA"), args))), - buildAST("SELECT highlight(fieldA) FROM test") - ); + project( + relation("test"), + alias("highlight(fieldA)", highlight(AstDSL.qualifiedName("fieldA"), args))), + buildAST("SELECT highlight(fieldA) FROM test")); } @Test @@ -699,22 +568,22 @@ public void can_build_qualified_highlight_with_arguments() { args.put("pre_tags", new Literal("", DataType.STRING)); args.put("post_tags", new Literal("", DataType.STRING)); assertEquals( - project(relation("test"), - alias("highlight(fieldA, pre_tags='', post_tags='')", + project( + relation("test"), + alias( + "highlight(fieldA, pre_tags='', post_tags='')", highlight(AstDSL.qualifiedName("fieldA"), args))), - buildAST("SELECT highlight(fieldA, pre_tags='', post_tags='') " - + "FROM test") - ); + buildAST( + "SELECT highlight(fieldA, pre_tags='', post_tags='') " + "FROM test")); } @Test public void can_build_string_literal_highlight() { Map args = new HashMap<>(); assertEquals( - project(relation("test"), - alias("highlight(\"fieldA\")", - highlight(AstDSL.stringLiteral("fieldA"), args))), - buildAST("SELECT highlight(\"fieldA\") FROM test") - ); + project( + relation("test"), + alias("highlight(\"fieldA\")", highlight(AstDSL.stringLiteral("fieldA"), args))), + buildAST("SELECT highlight(\"fieldA\") FROM test")); } } diff --git a/sql/src/test/java/org/opensearch/sql/sql/parser/AstBuilderTestBase.java b/sql/src/test/java/org/opensearch/sql/sql/parser/AstBuilderTestBase.java index 2161eb5b1a..602f17ce85 100644 --- a/sql/src/test/java/org/opensearch/sql/sql/parser/AstBuilderTestBase.java +++ b/sql/src/test/java/org/opensearch/sql/sql/parser/AstBuilderTestBase.java @@ -10,9 +10,7 @@ import org.opensearch.sql.sql.antlr.SQLSyntaxParser; public class AstBuilderTestBase { - /** - * SQL syntax parser that helps prepare parse tree as AstBuilder input. - */ + /** SQL syntax parser that helps prepare parse tree as AstBuilder input. */ private final SQLSyntaxParser parser = new SQLSyntaxParser(); protected UnresolvedPlan buildAST(String query) { diff --git a/sql/src/test/java/org/opensearch/sql/sql/parser/AstExpressionBuilderTest.java b/sql/src/test/java/org/opensearch/sql/sql/parser/AstExpressionBuilderTest.java index 20655bc020..f2e7fdb2d8 100644 --- a/sql/src/test/java/org/opensearch/sql/sql/parser/AstExpressionBuilderTest.java +++ b/sql/src/test/java/org/opensearch/sql/sql/parser/AstExpressionBuilderTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql.parser; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -57,185 +56,122 @@ class AstExpressionBuilderTest { @Test public void canBuildStringLiteral() { - assertEquals( - stringLiteral("hello"), - buildExprAst("'hello'") - ); - assertEquals( - stringLiteral("hello"), - buildExprAst("\"hello\"") - ); + assertEquals(stringLiteral("hello"), buildExprAst("'hello'")); + assertEquals(stringLiteral("hello"), buildExprAst("\"hello\"")); } @Test public void canBuildIntegerLiteral() { - assertEquals( - intLiteral(123), - buildExprAst("123") - ); - assertEquals( - intLiteral(Integer.MAX_VALUE), - buildExprAst(String.valueOf(Integer.MAX_VALUE)) - ); - assertEquals( - intLiteral(Integer.MIN_VALUE), - buildExprAst(String.valueOf(Integer.MIN_VALUE)) - ); + assertEquals(intLiteral(123), buildExprAst("123")); + assertEquals(intLiteral(Integer.MAX_VALUE), buildExprAst(String.valueOf(Integer.MAX_VALUE))); + assertEquals(intLiteral(Integer.MIN_VALUE), buildExprAst(String.valueOf(Integer.MIN_VALUE))); } @Test public void canBuildLongLiteral() { + assertEquals(longLiteral(1234567890123L), buildExprAst("1234567890123")); assertEquals( - longLiteral(1234567890123L), - buildExprAst("1234567890123") - ); + longLiteral(Integer.MAX_VALUE + 1L), buildExprAst(String.valueOf(Integer.MAX_VALUE + 1L))); assertEquals( - longLiteral(Integer.MAX_VALUE + 1L), - buildExprAst(String.valueOf(Integer.MAX_VALUE + 1L)) - ); - assertEquals( - longLiteral(Integer.MIN_VALUE - 1L), - buildExprAst(String.valueOf(Integer.MIN_VALUE - 1L)) - ); + longLiteral(Integer.MIN_VALUE - 1L), buildExprAst(String.valueOf(Integer.MIN_VALUE - 1L))); } @Test public void canBuildNegativeRealLiteral() { - assertEquals( - doubleLiteral(-4.567), - buildExprAst("-4.567") - ); + assertEquals(doubleLiteral(-4.567), buildExprAst("-4.567")); } @Test public void canBuildBooleanLiteral() { - assertEquals( - booleanLiteral(true), - buildExprAst("true") - ); + assertEquals(booleanLiteral(true), buildExprAst("true")); } @Test public void canBuildDateLiteral() { - assertEquals( - dateLiteral("2020-07-07"), - buildExprAst("DATE '2020-07-07'") - ); + assertEquals(dateLiteral("2020-07-07"), buildExprAst("DATE '2020-07-07'")); } @Test public void canBuildTimeLiteral() { - assertEquals( - timeLiteral("11:30:45"), - buildExprAst("TIME '11:30:45'") - ); + assertEquals(timeLiteral("11:30:45"), buildExprAst("TIME '11:30:45'")); } @Test public void canBuildTimestampLiteral() { assertEquals( - timestampLiteral("2020-07-07 11:30:45"), - buildExprAst("TIMESTAMP '2020-07-07 11:30:45'") - ); + timestampLiteral("2020-07-07 11:30:45"), buildExprAst("TIMESTAMP '2020-07-07 11:30:45'")); } @Test public void canBuildIntervalLiteral() { - assertEquals( - intervalLiteral(1, DataType.INTEGER, "day"), - buildExprAst("interval 1 day") - ); + assertEquals(intervalLiteral(1, DataType.INTEGER, "day"), buildExprAst("interval 1 day")); } @Test public void canBuildArithmeticExpression() { - assertEquals( - function("+", intLiteral(1), intLiteral(2)), - buildExprAst("1 + 2") - ); + assertEquals(function("+", intLiteral(1), intLiteral(2)), buildExprAst("1 + 2")); } @Test public void canBuildArithmeticExpressionPrecedence() { assertEquals( - function("+", - intLiteral(1), - function("*", - intLiteral(2), intLiteral(3))), - buildExprAst("1 + 2 * 3") - ); + function("+", intLiteral(1), function("*", intLiteral(2), intLiteral(3))), + buildExprAst("1 + 2 * 3")); } @Test public void canBuildFunctionWithoutArguments() { - assertEquals( - function("PI"), - buildExprAst("PI()") - ); + assertEquals(function("PI"), buildExprAst("PI()")); } @Test public void canBuildExpressionWithParentheses() { assertEquals( - function("*", + function( + "*", function("+", doubleLiteral(-1.0), doubleLiteral(2.3)), - function("-", intLiteral(3), intLiteral(1)) - ), - buildExprAst("(-1.0 + 2.3) * (3 - 1)") - ); + function("-", intLiteral(3), intLiteral(1))), + buildExprAst("(-1.0 + 2.3) * (3 - 1)")); } @Test public void canBuildFunctionCall() { - assertEquals( - function("abs", intLiteral(-1)), - buildExprAst("abs(-1)") - ); + assertEquals(function("abs", intLiteral(-1)), buildExprAst("abs(-1)")); } @Test public void canBuildExtractFunctionCall() { assertEquals( function("extract", stringLiteral("DAY"), dateLiteral("2023-02-09")).toString(), - buildExprAst("extract(DAY FROM \"2023-02-09\")").toString() - ); + buildExprAst("extract(DAY FROM \"2023-02-09\")").toString()); } @Test public void canBuildGetFormatFunctionCall() { assertEquals( function("get_format", stringLiteral("DATE"), stringLiteral("USA")), - buildExprAst("get_format(DATE,\"USA\")") - ); + buildExprAst("get_format(DATE,\"USA\")")); } @Test public void canBuildNestedFunctionCall() { assertEquals( - function("abs", - function("*", - function("abs", intLiteral(-5)), - intLiteral(-1) - ) - ), - buildExprAst("abs(abs(-5) * -1)") - ); + function("abs", function("*", function("abs", intLiteral(-5)), intLiteral(-1))), + buildExprAst("abs(abs(-5) * -1)")); } @Test public void canBuildDateAndTimeFunctionCall() { assertEquals( function("dayofmonth", dateLiteral("2020-07-07")), - buildExprAst("dayofmonth(DATE '2020-07-07')") - ); + buildExprAst("dayofmonth(DATE '2020-07-07')")); } @Test public void canBuildTimestampAddFunctionCall() { assertEquals( function("timestampadd", stringLiteral("WEEK"), intLiteral(1), dateLiteral("2023-03-14")), - buildExprAst("timestampadd(WEEK, 1, DATE '2023-03-14')") - ); + buildExprAst("timestampadd(WEEK, 1, DATE '2023-03-14')")); } @Test @@ -246,105 +182,69 @@ public void canBuildTimstampDiffFunctionCall() { stringLiteral("WEEK"), timestampLiteral("2023-03-15 00:00:01"), dateLiteral("2023-03-14")), - buildExprAst("timestampdiff(WEEK, TIMESTAMP '2023-03-15 00:00:01', DATE '2023-03-14')") - ); + buildExprAst("timestampdiff(WEEK, TIMESTAMP '2023-03-15 00:00:01', DATE '2023-03-14')")); } @Test public void canBuildComparisonExpression() { - assertEquals( - function("!=", intLiteral(1), intLiteral(2)), - buildExprAst("1 != 2") - ); + assertEquals(function("!=", intLiteral(1), intLiteral(2)), buildExprAst("1 != 2")); - assertEquals( - function("!=", intLiteral(1), intLiteral(2)), - buildExprAst("1 <> 2") - ); + assertEquals(function("!=", intLiteral(1), intLiteral(2)), buildExprAst("1 <> 2")); } @Test public void canBuildNullTestExpression() { - assertEquals( - function("is null", intLiteral(1)), - buildExprAst("1 is NULL") - ); + assertEquals(function("is null", intLiteral(1)), buildExprAst("1 is NULL")); - assertEquals( - function("is not null", intLiteral(1)), - buildExprAst("1 IS NOT null") - ); + assertEquals(function("is not null", intLiteral(1)), buildExprAst("1 IS NOT null")); } @Test public void canBuildNullTestExpressionWithNULLLiteral() { - assertEquals( - function("is null", nullLiteral()), - buildExprAst("NULL is NULL") - ); + assertEquals(function("is null", nullLiteral()), buildExprAst("NULL is NULL")); - assertEquals( - function("is not null", nullLiteral()), - buildExprAst("NULL IS NOT null") - ); + assertEquals(function("is not null", nullLiteral()), buildExprAst("NULL IS NOT null")); } @Test public void canBuildLikeExpression() { assertEquals( function("like", stringLiteral("str"), stringLiteral("st%")), - buildExprAst("'str' like 'st%'") - ); + buildExprAst("'str' like 'st%'")); assertEquals( function("not like", stringLiteral("str"), stringLiteral("st%")), - buildExprAst("'str' not like 'st%'") - ); + buildExprAst("'str' not like 'st%'")); } @Test public void canBuildRegexpExpression() { assertEquals( function("regexp", stringLiteral("str"), stringLiteral(".*")), - buildExprAst("'str' regexp '.*'") - ); + buildExprAst("'str' regexp '.*'")); } @Test public void canBuildBetweenExpression() { assertEquals( - between( - qualifiedName("age"), intLiteral(10), intLiteral(30)), - buildExprAst("age BETWEEN 10 AND 30") - ); + between(qualifiedName("age"), intLiteral(10), intLiteral(30)), + buildExprAst("age BETWEEN 10 AND 30")); } @Test public void canBuildNotBetweenExpression() { assertEquals( - not( - between( - qualifiedName("age"), intLiteral(10), intLiteral(30))), - buildExprAst("age NOT BETWEEN 10 AND 30") - ); + not(between(qualifiedName("age"), intLiteral(10), intLiteral(30))), + buildExprAst("age NOT BETWEEN 10 AND 30")); } @Test public void canBuildLogicalExpression() { - assertEquals( - and(booleanLiteral(true), booleanLiteral(false)), - buildExprAst("true AND false") - ); + assertEquals(and(booleanLiteral(true), booleanLiteral(false)), buildExprAst("true AND false")); - assertEquals( - or(booleanLiteral(true), booleanLiteral(false)), - buildExprAst("true OR false") - ); + assertEquals(or(booleanLiteral(true), booleanLiteral(false)), buildExprAst("true OR false")); - assertEquals( - not(booleanLiteral(false)), - buildExprAst("NOT false") - ); + assertEquals(not(booleanLiteral(false)), buildExprAst("NOT false")); } @Test @@ -373,8 +273,8 @@ public void canBuildWindowFunctionWithNullOrderSpecified() { window( function("DENSE_RANK"), ImmutableList.of(), - ImmutableList.of(ImmutablePair.of( - new SortOption(ASC, NULL_LAST), qualifiedName("age")))), + ImmutableList.of( + ImmutablePair.of(new SortOption(ASC, NULL_LAST), qualifiedName("age")))), buildExprAst("DENSE_RANK() OVER (ORDER BY age ASC NULLS LAST)")); } @@ -382,35 +282,27 @@ public void canBuildWindowFunctionWithNullOrderSpecified() { public void canBuildStringLiteralHighlightFunction() { HashMap args = new HashMap<>(); assertEquals( - highlight(AstDSL.stringLiteral("fieldA"), args), - buildExprAst("highlight(\"fieldA\")") - ); + highlight(AstDSL.stringLiteral("fieldA"), args), buildExprAst("highlight(\"fieldA\")")); } @Test public void canBuildQualifiedNameHighlightFunction() { HashMap args = new HashMap<>(); assertEquals( - highlight(AstDSL.qualifiedName("fieldA"), args), - buildExprAst("highlight(fieldA)") - ); + highlight(AstDSL.qualifiedName("fieldA"), args), buildExprAst("highlight(fieldA)")); } @Test public void canBuildStringLiteralPositionFunction() { assertEquals( - function("position", stringLiteral("substr"), stringLiteral("str")), - buildExprAst("position(\"substr\" IN \"str\")") - ); + function("position", stringLiteral("substr"), stringLiteral("str")), + buildExprAst("position(\"substr\" IN \"str\")")); } @Test public void canBuildWindowFunctionWithoutOrderBy() { assertEquals( - window( - function("RANK"), - ImmutableList.of(qualifiedName("state")), - ImmutableList.of()), + window(function("RANK"), ImmutableList.of(qualifiedName("state")), ImmutableList.of()), buildExprAst("RANK() OVER (PARTITION BY state)")); } @@ -420,8 +312,7 @@ public void canBuildAggregateWindowFunction() { window( aggregate("AVG", qualifiedName("age")), ImmutableList.of(qualifiedName("state")), - ImmutableList.of(ImmutablePair.of( - new SortOption(null, null), qualifiedName("age")))), + ImmutableList.of(ImmutablePair.of(new SortOption(null, null), qualifiedName("age")))), buildExprAst("AVG(age) OVER (PARTITION BY state ORDER BY age)")); } @@ -430,11 +321,8 @@ public void canBuildCaseConditionStatement() { assertEquals( caseWhen( null, // no else statement - when( - function(">", qualifiedName("age"), intLiteral(30)), - stringLiteral("age1"))), - buildExprAst("CASE WHEN age > 30 THEN 'age1' END") - ); + when(function(">", qualifiedName("age"), intLiteral(30)), stringLiteral("age1"))), + buildExprAst("CASE WHEN age > 30 THEN 'age1' END")); } @Test @@ -444,168 +332,147 @@ public void canBuildCaseValueStatement() { qualifiedName("age"), stringLiteral("age2"), when(intLiteral(30), stringLiteral("age1"))), - buildExprAst("CASE age WHEN 30 THEN 'age1' ELSE 'age2' END") - ); + buildExprAst("CASE age WHEN 30 THEN 'age1' ELSE 'age2' END")); } @Test public void canBuildKeywordsAsIdentifiers() { - assertEquals( - qualifiedName("timestamp"), - buildExprAst("timestamp") - ); + assertEquals(qualifiedName("timestamp"), buildExprAst("timestamp")); } @Test public void canBuildKeywordsAsIdentInQualifiedName() { - assertEquals( - qualifiedName("test", "timestamp"), - buildExprAst("test.timestamp") - ); + assertEquals(qualifiedName("test", "timestamp"), buildExprAst("test.timestamp")); } @Test public void canBuildMetaDataFieldAsQualifiedName() { - Stream.of("_id", "_index", "_sort", "_score", "_maxscore").forEach( - field -> assertEquals( - qualifiedName(field), - buildExprAst(field) - ) - ); + Stream.of("_id", "_index", "_sort", "_score", "_maxscore") + .forEach(field -> assertEquals(qualifiedName(field), buildExprAst(field))); } @Test public void canBuildNonMetaDataFieldAsQualifiedName() { - Stream.of("id", "__id", "_routing", "___field").forEach( - field -> assertEquals( - qualifiedName(field), - buildExprAst(field) - ) - ); + Stream.of("id", "__id", "_routing", "___field") + .forEach(field -> assertEquals(qualifiedName(field), buildExprAst(field))); } @Test public void canCastFieldAsString() { assertEquals( AstDSL.cast(qualifiedName("state"), stringLiteral("string")), - buildExprAst("cast(state as string)") - ); + buildExprAst("cast(state as string)")); } @Test public void canCastValueAsString() { assertEquals( - AstDSL.cast(intLiteral(1), stringLiteral("string")), - buildExprAst("cast(1 as string)") - ); + AstDSL.cast(intLiteral(1), stringLiteral("string")), buildExprAst("cast(1 as string)")); } @Test public void filteredAggregation() { assertEquals( - AstDSL.filteredAggregate("avg", qualifiedName("age"), - function(">", qualifiedName("age"), intLiteral(20))), - buildExprAst("avg(age) filter(where age > 20)") - ); + AstDSL.filteredAggregate( + "avg", qualifiedName("age"), function(">", qualifiedName("age"), intLiteral(20))), + buildExprAst("avg(age) filter(where age > 20)")); } @Test public void canBuildVarSamp() { - assertEquals( - aggregate("var_samp", qualifiedName("age")), - buildExprAst("var_samp(age)")); + assertEquals(aggregate("var_samp", qualifiedName("age")), buildExprAst("var_samp(age)")); } @Test public void canBuildVarPop() { - assertEquals( - aggregate("var_pop", qualifiedName("age")), - buildExprAst("var_pop(age)")); + assertEquals(aggregate("var_pop", qualifiedName("age")), buildExprAst("var_pop(age)")); } @Test public void canBuildVariance() { - assertEquals( - aggregate("variance", qualifiedName("age")), - buildExprAst("variance(age)")); + assertEquals(aggregate("variance", qualifiedName("age")), buildExprAst("variance(age)")); } @Test public void distinctCount() { assertEquals( AstDSL.distinctAggregate("count", qualifiedName("name")), - buildExprAst("count(distinct name)") - ); + buildExprAst("count(distinct name)")); } @Test public void filteredDistinctCount() { assertEquals( - AstDSL.filteredDistinctCount("count", qualifiedName("name"), function( - ">", qualifiedName("age"), intLiteral(30))), - buildExprAst("count(distinct name) filter(where age > 30)") - ); + AstDSL.filteredDistinctCount( + "count", qualifiedName("name"), function(">", qualifiedName("age"), intLiteral(30))), + buildExprAst("count(distinct name) filter(where age > 30)")); } @Test public void matchPhraseQueryAllParameters() { assertEquals( - AstDSL.function("matchphrasequery", + AstDSL.function( + "matchphrasequery", unresolvedArg("field", qualifiedName("test")), unresolvedArg("query", stringLiteral("search query")), unresolvedArg("slop", stringLiteral("3")), unresolvedArg("analyzer", stringLiteral("standard")), - unresolvedArg("zero_terms_query", stringLiteral("NONE")) - ), - buildExprAst("matchphrasequery(test, 'search query', slop = 3" - + ", analyzer = 'standard', zero_terms_query='NONE'" - + ")") - ); + unresolvedArg("zero_terms_query", stringLiteral("NONE"))), + buildExprAst( + "matchphrasequery(test, 'search query', slop = 3" + + ", analyzer = 'standard', zero_terms_query='NONE'" + + ")")); } @Test public void matchPhrasePrefixAllParameters() { assertEquals( - AstDSL.function("match_phrase_prefix", - unresolvedArg("field", qualifiedName("test")), - unresolvedArg("query", stringLiteral("search query")), - unresolvedArg("slop", stringLiteral("3")), - unresolvedArg("boost", stringLiteral("1.5")), - unresolvedArg("analyzer", stringLiteral("standard")), - unresolvedArg("max_expansions", stringLiteral("4")), - unresolvedArg("zero_terms_query", stringLiteral("NONE")) - ), - buildExprAst("match_phrase_prefix(test, 'search query', slop = 3, boost = 1.5" - + ", analyzer = 'standard', max_expansions = 4, zero_terms_query='NONE'" - + ")") - ); + AstDSL.function( + "match_phrase_prefix", + unresolvedArg("field", qualifiedName("test")), + unresolvedArg("query", stringLiteral("search query")), + unresolvedArg("slop", stringLiteral("3")), + unresolvedArg("boost", stringLiteral("1.5")), + unresolvedArg("analyzer", stringLiteral("standard")), + unresolvedArg("max_expansions", stringLiteral("4")), + unresolvedArg("zero_terms_query", stringLiteral("NONE"))), + buildExprAst( + "match_phrase_prefix(test, 'search query', slop = 3, boost = 1.5" + + ", analyzer = 'standard', max_expansions = 4, zero_terms_query='NONE'" + + ")")); } @Test public void relevanceMatch() { - assertEquals(AstDSL.function("match", - unresolvedArg("field", qualifiedName("message")), - unresolvedArg("query", stringLiteral("search query"))), - buildExprAst("match('message', 'search query')") - ); - - assertEquals(AstDSL.function("match", - unresolvedArg("field", qualifiedName("message")), - unresolvedArg("query", stringLiteral("search query")), - unresolvedArg("analyzer", stringLiteral("keyword")), - unresolvedArg("operator", stringLiteral("AND"))), + assertEquals( + AstDSL.function( + "match", + unresolvedArg("field", qualifiedName("message")), + unresolvedArg("query", stringLiteral("search query"))), + buildExprAst("match('message', 'search query')")); + + assertEquals( + AstDSL.function( + "match", + unresolvedArg("field", qualifiedName("message")), + unresolvedArg("query", stringLiteral("search query")), + unresolvedArg("analyzer", stringLiteral("keyword")), + unresolvedArg("operator", stringLiteral("AND"))), buildExprAst("match('message', 'search query', analyzer='keyword', operator='AND')")); } @Test public void relevanceMatchQuery() { - assertEquals(AstDSL.function("matchquery", + assertEquals( + AstDSL.function( + "matchquery", unresolvedArg("field", qualifiedName("message")), unresolvedArg("query", stringLiteral("search query"))), - buildExprAst("matchquery('message', 'search query')") - ); + buildExprAst("matchquery('message', 'search query')")); - assertEquals(AstDSL.function("matchquery", + assertEquals( + AstDSL.function( + "matchquery", unresolvedArg("field", qualifiedName("message")), unresolvedArg("query", stringLiteral("search query")), unresolvedArg("analyzer", stringLiteral("keyword")), @@ -615,13 +482,16 @@ public void relevanceMatchQuery() { @Test public void relevanceMatch_Query() { - assertEquals(AstDSL.function("match_query", + assertEquals( + AstDSL.function( + "match_query", unresolvedArg("field", qualifiedName("message")), unresolvedArg("query", stringLiteral("search query"))), - buildExprAst("match_query('message', 'search query')") - ); + buildExprAst("match_query('message', 'search query')")); - assertEquals(AstDSL.function("match_query", + assertEquals( + AstDSL.function( + "match_query", unresolvedArg("field", qualifiedName("message")), unresolvedArg("query", stringLiteral("search query")), unresolvedArg("analyzer", stringLiteral("keyword")), @@ -631,238 +501,279 @@ public void relevanceMatch_Query() { @Test public void relevanceMatchQueryAltSyntax() { - assertEquals(AstDSL.function("match_query", - unresolvedArg("field", stringLiteral("message")), - unresolvedArg("query", stringLiteral("search query"))).toString(), - buildExprAst("message = match_query('search query')").toString() - ); + assertEquals( + AstDSL.function( + "match_query", + unresolvedArg("field", stringLiteral("message")), + unresolvedArg("query", stringLiteral("search query"))) + .toString(), + buildExprAst("message = match_query('search query')").toString()); - assertEquals(AstDSL.function("match_query", - unresolvedArg("field", stringLiteral("message")), - unresolvedArg("query", stringLiteral("search query"))).toString(), - buildExprAst("message = match_query(\"search query\")").toString() - ); + assertEquals( + AstDSL.function( + "match_query", + unresolvedArg("field", stringLiteral("message")), + unresolvedArg("query", stringLiteral("search query"))) + .toString(), + buildExprAst("message = match_query(\"search query\")").toString()); - assertEquals(AstDSL.function("matchquery", - unresolvedArg("field", stringLiteral("message")), - unresolvedArg("query", stringLiteral("search query"))).toString(), - buildExprAst("message = matchquery('search query')").toString() - ); + assertEquals( + AstDSL.function( + "matchquery", + unresolvedArg("field", stringLiteral("message")), + unresolvedArg("query", stringLiteral("search query"))) + .toString(), + buildExprAst("message = matchquery('search query')").toString()); - assertEquals(AstDSL.function("matchquery", - unresolvedArg("field", stringLiteral("message")), - unresolvedArg("query", stringLiteral("search query"))).toString(), - buildExprAst("message = matchquery(\"search query\")").toString() - ); + assertEquals( + AstDSL.function( + "matchquery", + unresolvedArg("field", stringLiteral("message")), + unresolvedArg("query", stringLiteral("search query"))) + .toString(), + buildExprAst("message = matchquery(\"search query\")").toString()); } @Test public void relevanceMatchPhraseAltSyntax() { - assertEquals(AstDSL.function("match_phrase", - unresolvedArg("field", stringLiteral("message")), - unresolvedArg("query", stringLiteral("search query"))).toString(), - buildExprAst("message = match_phrase('search query')").toString() - ); + assertEquals( + AstDSL.function( + "match_phrase", + unresolvedArg("field", stringLiteral("message")), + unresolvedArg("query", stringLiteral("search query"))) + .toString(), + buildExprAst("message = match_phrase('search query')").toString()); - assertEquals(AstDSL.function("match_phrase", - unresolvedArg("field", stringLiteral("message")), - unresolvedArg("query", stringLiteral("search query"))).toString(), - buildExprAst("message = match_phrase(\"search query\")").toString() - ); + assertEquals( + AstDSL.function( + "match_phrase", + unresolvedArg("field", stringLiteral("message")), + unresolvedArg("query", stringLiteral("search query"))) + .toString(), + buildExprAst("message = match_phrase(\"search query\")").toString()); - assertEquals(AstDSL.function("matchphrase", - unresolvedArg("field", stringLiteral("message")), - unresolvedArg("query", stringLiteral("search query"))).toString(), - buildExprAst("message = matchphrase('search query')").toString() - ); + assertEquals( + AstDSL.function( + "matchphrase", + unresolvedArg("field", stringLiteral("message")), + unresolvedArg("query", stringLiteral("search query"))) + .toString(), + buildExprAst("message = matchphrase('search query')").toString()); - assertEquals(AstDSL.function("matchphrase", - unresolvedArg("field", stringLiteral("message")), - unresolvedArg("query", stringLiteral("search query"))).toString(), - buildExprAst("message = matchphrase(\"search query\")").toString() - ); + assertEquals( + AstDSL.function( + "matchphrase", + unresolvedArg("field", stringLiteral("message")), + unresolvedArg("query", stringLiteral("search query"))) + .toString(), + buildExprAst("message = matchphrase(\"search query\")").toString()); } @Test public void relevanceMultiMatchAltSyntax() { - assertEquals(AstDSL.function("multi_match", + assertEquals( + AstDSL.function( + "multi_match", unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of("field1", 1.F))), unresolvedArg("query", stringLiteral("search query"))), - buildExprAst("field1 = multi_match('search query')") - ); + buildExprAst("field1 = multi_match('search query')")); - assertEquals(AstDSL.function("multi_match", + assertEquals( + AstDSL.function( + "multi_match", unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of("field1", 1.F))), unresolvedArg("query", stringLiteral("search query"))), - buildExprAst("field1 = multi_match(\"search query\")") - ); + buildExprAst("field1 = multi_match(\"search query\")")); - assertEquals(AstDSL.function("multimatch", + assertEquals( + AstDSL.function( + "multimatch", unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of("field1", 1.F))), unresolvedArg("query", stringLiteral("search query"))), - buildExprAst("field1 = multimatch('search query')") - ); + buildExprAst("field1 = multimatch('search query')")); - assertEquals(AstDSL.function("multimatch", + assertEquals( + AstDSL.function( + "multimatch", unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of("field1", 1.F))), unresolvedArg("query", stringLiteral("search query"))), - buildExprAst("field1 = multimatch(\"search query\")") - ); + buildExprAst("field1 = multimatch(\"search query\")")); } @Test public void relevanceMulti_match() { - assertEquals(AstDSL.function("multi_match", - unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of( - "field2", 3.2F, "field1", 1.F))), + assertEquals( + AstDSL.function( + "multi_match", + unresolvedArg( + "fields", new RelevanceFieldList(ImmutableMap.of("field2", 3.2F, "field1", 1.F))), unresolvedArg("query", stringLiteral("search query"))), - buildExprAst("multi_match(['field1', 'field2' ^ 3.2], 'search query')") - ); + buildExprAst("multi_match(['field1', 'field2' ^ 3.2], 'search query')")); - assertEquals(AstDSL.function("multi_match", - unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of( - "field2", 3.2F, "field1", 1.F))), + assertEquals( + AstDSL.function( + "multi_match", + unresolvedArg( + "fields", new RelevanceFieldList(ImmutableMap.of("field2", 3.2F, "field1", 1.F))), unresolvedArg("query", stringLiteral("search query")), unresolvedArg("analyzer", stringLiteral("keyword")), unresolvedArg("operator", stringLiteral("AND"))), - buildExprAst("multi_match(['field1', 'field2' ^ 3.2], 'search query'," - + "analyzer='keyword', 'operator'='AND')")); + buildExprAst( + "multi_match(['field1', 'field2' ^ 3.2], 'search query'," + + "analyzer='keyword', 'operator'='AND')")); } @Test public void relevanceMultimatch_alternate_parameter_syntax() { - assertEquals(AstDSL.function("multimatch", - unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of( - "field1", 1F, "field2", 2F))), + assertEquals( + AstDSL.function( + "multimatch", + unresolvedArg( + "fields", new RelevanceFieldList(ImmutableMap.of("field1", 1F, "field2", 2F))), unresolvedArg("query", stringLiteral("search query"))), - buildExprAst("multimatch(query='search query', fields=['field1^1.0,field2^2.0'])") - ); + buildExprAst("multimatch(query='search query', fields=['field1^1.0,field2^2.0'])")); - assertEquals(AstDSL.function("multimatch", - unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of( - "field1", 1F, "field2", 2F))), + assertEquals( + AstDSL.function( + "multimatch", + unresolvedArg( + "fields", new RelevanceFieldList(ImmutableMap.of("field1", 1F, "field2", 2F))), unresolvedArg("query", stringLiteral("search query")), unresolvedArg("analyzer", stringLiteral("keyword")), unresolvedArg("operator", stringLiteral("AND"))), - buildExprAst("multimatch(query='search query', fields=['field1^1.0,field2^2.0']," - + "analyzer='keyword', operator='AND')")); + buildExprAst( + "multimatch(query='search query', fields=['field1^1.0,field2^2.0']," + + "analyzer='keyword', operator='AND')")); } @Test public void relevanceMultimatchquery_alternate_parameter_syntax() { - assertEquals(AstDSL.function("multimatchquery", - unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of( - "field", 1F))), + assertEquals( + AstDSL.function( + "multimatchquery", + unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of("field", 1F))), unresolvedArg("query", stringLiteral("search query"))), - buildExprAst("multimatchquery(query='search query', fields='field')") - ); + buildExprAst("multimatchquery(query='search query', fields='field')")); - assertEquals(AstDSL.function("multimatchquery", - unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of( - "field", 1F))), + assertEquals( + AstDSL.function( + "multimatchquery", + unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of("field", 1F))), unresolvedArg("query", stringLiteral("search query")), unresolvedArg("analyzer", stringLiteral("keyword")), unresolvedArg("operator", stringLiteral("AND"))), - buildExprAst("multimatchquery(query='search query', fields='field'," - + "analyzer='keyword', 'operator'='AND')")); + buildExprAst( + "multimatchquery(query='search query', fields='field'," + + "analyzer='keyword', 'operator'='AND')")); } @Test public void relevanceSimple_query_string() { - assertEquals(AstDSL.function("simple_query_string", - unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of( - "field2", 3.2F, "field1", 1.F))), + assertEquals( + AstDSL.function( + "simple_query_string", + unresolvedArg( + "fields", new RelevanceFieldList(ImmutableMap.of("field2", 3.2F, "field1", 1.F))), unresolvedArg("query", stringLiteral("search query"))), - buildExprAst("simple_query_string(['field1', 'field2' ^ 3.2], 'search query')") - ); + buildExprAst("simple_query_string(['field1', 'field2' ^ 3.2], 'search query')")); - assertEquals(AstDSL.function("simple_query_string", - unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of( - "field2", 3.2F, "field1", 1.F))), + assertEquals( + AstDSL.function( + "simple_query_string", + unresolvedArg( + "fields", new RelevanceFieldList(ImmutableMap.of("field2", 3.2F, "field1", 1.F))), unresolvedArg("query", stringLiteral("search query")), unresolvedArg("analyzer", stringLiteral("keyword")), unresolvedArg("operator", stringLiteral("AND"))), - buildExprAst("simple_query_string(['field1', 'field2' ^ 3.2], 'search query'," - + "analyzer='keyword', operator='AND')")); + buildExprAst( + "simple_query_string(['field1', 'field2' ^ 3.2], 'search query'," + + "analyzer='keyword', operator='AND')")); } @Test public void relevanceQuery_string() { - assertEquals(AstDSL.function("query_string", - unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of( - "field2", 3.2F, "field1", 1.F))), + assertEquals( + AstDSL.function( + "query_string", + unresolvedArg( + "fields", new RelevanceFieldList(ImmutableMap.of("field2", 3.2F, "field1", 1.F))), unresolvedArg("query", stringLiteral("search query"))), - buildExprAst("query_string(['field1', 'field2' ^ 3.2], 'search query')") - ); + buildExprAst("query_string(['field1', 'field2' ^ 3.2], 'search query')")); - assertEquals(AstDSL.function("query_string", - unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of( - "field2", 3.2F, "field1", 1.F))), + assertEquals( + AstDSL.function( + "query_string", + unresolvedArg( + "fields", new RelevanceFieldList(ImmutableMap.of("field2", 3.2F, "field1", 1.F))), unresolvedArg("query", stringLiteral("search query")), unresolvedArg("analyzer", stringLiteral("keyword")), unresolvedArg("time_zone", stringLiteral("Canada/Pacific")), unresolvedArg("tie_breaker", stringLiteral("1.3"))), - buildExprAst("query_string(['field1', 'field2' ^ 3.2], 'search query'," - + "analyzer='keyword', time_zone='Canada/Pacific', tie_breaker='1.3')")); + buildExprAst( + "query_string(['field1', 'field2' ^ 3.2], 'search query'," + + "analyzer='keyword', time_zone='Canada/Pacific', tie_breaker='1.3')")); } @Test public void relevanceWildcard_query() { - assertEquals(AstDSL.function("wildcard_query", + assertEquals( + AstDSL.function( + "wildcard_query", unresolvedArg("field", qualifiedName("field")), unresolvedArg("query", stringLiteral("search query*")), unresolvedArg("boost", stringLiteral("1.5")), unresolvedArg("case_insensitive", stringLiteral("true")), unresolvedArg("rewrite", stringLiteral("scoring_boolean"))), - buildExprAst("wildcard_query(field, 'search query*', boost=1.5," - + "case_insensitive=true, rewrite='scoring_boolean'))") - ); + buildExprAst( + "wildcard_query(field, 'search query*', boost=1.5," + + "case_insensitive=true, rewrite='scoring_boolean'))")); } @Test public void relevanceScore_query() { assertEquals( AstDSL.score( - AstDSL.function("query_string", - unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of( - "field1", 1.F, "field2", 3.2F))), - unresolvedArg("query", stringLiteral("search query")) - ), - AstDSL.doubleLiteral(1.0) - ), - buildExprAst("score(query_string(['field1', 'field2' ^ 3.2], 'search query'))") - ); + AstDSL.function( + "query_string", + unresolvedArg( + "fields", + new RelevanceFieldList(ImmutableMap.of("field1", 1.F, "field2", 3.2F))), + unresolvedArg("query", stringLiteral("search query"))), + AstDSL.doubleLiteral(1.0)), + buildExprAst("score(query_string(['field1', 'field2' ^ 3.2], 'search query'))")); } @Test public void relevanceScore_withBoost_query() { assertEquals( AstDSL.score( - AstDSL.function("query_string", - unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of( - "field1", 1.F, "field2", 3.2F))), - unresolvedArg("query", stringLiteral("search query")) - ), - doubleLiteral(1.0) - ), - buildExprAst("score(query_string(['field1', 'field2' ^ 3.2], 'search query'), 1.0)") - ); + AstDSL.function( + "query_string", + unresolvedArg( + "fields", + new RelevanceFieldList(ImmutableMap.of("field1", 1.F, "field2", 3.2F))), + unresolvedArg("query", stringLiteral("search query"))), + doubleLiteral(1.0)), + buildExprAst("score(query_string(['field1', 'field2' ^ 3.2], 'search query'), 1.0)")); } @Test public void relevanceQuery() { - assertEquals(AstDSL.function("query", - unresolvedArg("query", stringLiteral("field1:query OR field2:query"))), - buildExprAst("query('field1:query OR field2:query')") - ); + assertEquals( + AstDSL.function( + "query", unresolvedArg("query", stringLiteral("field1:query OR field2:query"))), + buildExprAst("query('field1:query OR field2:query')")); - assertEquals(AstDSL.function("query", - unresolvedArg("query", stringLiteral("search query")), - unresolvedArg("analyzer", stringLiteral("keyword")), - unresolvedArg("time_zone", stringLiteral("Canada/Pacific")), - unresolvedArg("tie_breaker", stringLiteral("1.3"))), - buildExprAst("query('search query'," - + "analyzer='keyword', time_zone='Canada/Pacific', tie_breaker='1.3')")); + assertEquals( + AstDSL.function( + "query", + unresolvedArg("query", stringLiteral("search query")), + unresolvedArg("analyzer", stringLiteral("keyword")), + unresolvedArg("time_zone", stringLiteral("Canada/Pacific")), + unresolvedArg("tie_breaker", stringLiteral("1.3"))), + buildExprAst( + "query('search query'," + + "analyzer='keyword', time_zone='Canada/Pacific', tie_breaker='1.3')")); } @Test @@ -876,7 +787,8 @@ public void canBuildInClause() { buildExprAst("age not in (20, 30)")); assertEquals( - AstDSL.in(qualifiedName("age"), + AstDSL.in( + qualifiedName("age"), AstDSL.function("abs", AstDSL.intLiteral(20)), AstDSL.function("abs", AstDSL.intLiteral(30))), buildExprAst("age in (abs(20), abs(30))")); diff --git a/sql/src/test/java/org/opensearch/sql/sql/parser/AstHavingFilterBuilderTest.java b/sql/src/test/java/org/opensearch/sql/sql/parser/AstHavingFilterBuilderTest.java index 1cb1ab5f8b..b2e4c54160 100644 --- a/sql/src/test/java/org/opensearch/sql/sql/parser/AstHavingFilterBuilderTest.java +++ b/sql/src/test/java/org/opensearch/sql/sql/parser/AstHavingFilterBuilderTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql.parser; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -30,8 +29,7 @@ @ExtendWith(MockitoExtension.class) class AstHavingFilterBuilderTest { - @Mock - private QuerySpecification querySpec; + @Mock private QuerySpecification querySpec; private AstHavingFilterBuilder builder; diff --git a/sql/src/test/java/org/opensearch/sql/sql/parser/AstNowLikeFunctionTest.java b/sql/src/test/java/org/opensearch/sql/sql/parser/AstNowLikeFunctionTest.java index 4ce2a2d3f7..639d73e419 100644 --- a/sql/src/test/java/org/opensearch/sql/sql/parser/AstNowLikeFunctionTest.java +++ b/sql/src/test/java/org/opensearch/sql/sql/parser/AstNowLikeFunctionTest.java @@ -25,32 +25,29 @@ class AstNowLikeFunctionTest extends AstBuilderTestBase { private static Stream allFunctions() { - return Stream.of("curdate", - "current_date", - "current_time", - "current_timestamp", - "curtime", - "localtimestamp", - "localtime", - "now", - "sysdate", - "utc_date", - "utc_time", - "utc_timestamp") + return Stream.of( + "curdate", + "current_date", + "current_time", + "current_timestamp", + "curtime", + "localtimestamp", + "localtime", + "now", + "sysdate", + "utc_date", + "utc_time", + "utc_timestamp") .map(Arguments::of); } private static Stream supportFsp() { - return Stream.of("sysdate") - .map(Arguments::of); + return Stream.of("sysdate").map(Arguments::of); } private static Stream supportShortcut() { - return Stream.of("current_date", - "current_time", - "current_timestamp", - "localtimestamp", - "localtime") + return Stream.of( + "current_date", "current_time", "current_timestamp", "localtimestamp", "localtime") .map(Arguments::of); } @@ -59,12 +56,7 @@ private static Stream supportShortcut() { void project_call(String name) { String call = name + "()"; assertEquals( - project( - values(emptyList()), - alias(call, function(name)) - ), - buildAST("SELECT " + call) - ); + project(values(emptyList()), alias(call, function(name))), buildAST("SELECT " + call)); } @ParameterizedTest @@ -73,29 +65,16 @@ void filter_call(String name) { String call = name + "()"; assertEquals( project( - filter( - relation("test"), - function( - "=", - qualifiedName("data"), - function(name)) - ), - AllFields.of() - ), - buildAST("SELECT * FROM test WHERE data = " + call) - ); + filter(relation("test"), function("=", qualifiedName("data"), function(name))), + AllFields.of()), + buildAST("SELECT * FROM test WHERE data = " + call)); } - @ParameterizedTest @MethodSource("supportFsp") void fsp(String name) { assertEquals( - project( - values(emptyList()), - alias(name + "(0)", function(name, intLiteral(0))) - ), - buildAST("SELECT " + name + "(0)") - ); + project(values(emptyList()), alias(name + "(0)", function(name, intLiteral(0)))), + buildAST("SELECT " + name + "(0)")); } } diff --git a/sql/src/test/java/org/opensearch/sql/sql/parser/AstQualifiedNameBuilderTest.java b/sql/src/test/java/org/opensearch/sql/sql/parser/AstQualifiedNameBuilderTest.java index 28665dd7ef..b0a7592990 100644 --- a/sql/src/test/java/org/opensearch/sql/sql/parser/AstQualifiedNameBuilderTest.java +++ b/sql/src/test/java/org/opensearch/sql/sql/parser/AstQualifiedNameBuilderTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql.parser; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -66,9 +65,10 @@ public void functionNameCanBeUsedAsIdentifier() { void assertFunctionNameCouldBeId(String antlrFunctionName) { List functionList = - Arrays.stream(antlrFunctionName.split("\\|")).map(String::stripLeading) - .map(String::stripTrailing).collect( - Collectors.toList()); + Arrays.stream(antlrFunctionName.split("\\|")) + .map(String::stripLeading) + .map(String::stripTrailing) + .collect(Collectors.toList()); assertFalse(functionList.isEmpty()); for (String functionName : functionList) { @@ -109,5 +109,4 @@ private OpenSearchSQLParser createParser(String expr) { return parser; } } - } diff --git a/sql/src/test/java/org/opensearch/sql/sql/parser/AstSortBuilderTest.java b/sql/src/test/java/org/opensearch/sql/sql/parser/AstSortBuilderTest.java index 3c8d155e65..f72f1ba0ff 100644 --- a/sql/src/test/java/org/opensearch/sql/sql/parser/AstSortBuilderTest.java +++ b/sql/src/test/java/org/opensearch/sql/sql/parser/AstSortBuilderTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql.parser; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -40,14 +39,11 @@ @ExtendWith(MockitoExtension.class) class AstSortBuilderTest { - @Mock - private QuerySpecification querySpec; + @Mock private QuerySpecification querySpec; - @Mock - private OrderByClauseContext orderByClause; + @Mock private OrderByClauseContext orderByClause; - @Mock - private UnresolvedPlan child; + @Mock private UnresolvedPlan child; @Test void can_build_sort_node() { @@ -56,32 +52,35 @@ void can_build_sort_node() { ImmutableMap> expects = ImmutableMap.>builder() - .put(new SortOption(null, null), - ImmutableList.of(argument("asc", booleanLiteral(true)))) - .put(new SortOption(ASC, null), - ImmutableList.of(argument("asc", booleanLiteral(true)))) - .put(new SortOption(DESC, null), + .put( + new SortOption(null, null), ImmutableList.of(argument("asc", booleanLiteral(true)))) + .put(new SortOption(ASC, null), ImmutableList.of(argument("asc", booleanLiteral(true)))) + .put( + new SortOption(DESC, null), ImmutableList.of(argument("asc", booleanLiteral(false)))) - .put(new SortOption(null, NULL_LAST), + .put( + new SortOption(null, NULL_LAST), ImmutableList.of( argument("asc", booleanLiteral(true)), argument("nullFirst", booleanLiteral(false)))) - .put(new SortOption(DESC, NULL_FIRST), + .put( + new SortOption(DESC, NULL_FIRST), ImmutableList.of( argument("asc", booleanLiteral(false)), argument("nullFirst", booleanLiteral(true)))) .build(); - expects.forEach((option, expect) -> { - when(querySpec.getOrderByOptions()).thenReturn(ImmutableList.of(option)); + expects.forEach( + (option, expect) -> { + when(querySpec.getOrderByOptions()).thenReturn(ImmutableList.of(option)); - AstSortBuilder sortBuilder = new AstSortBuilder(querySpec); - assertEquals( - new Sort( - child, // has to mock and attach child otherwise Guava ImmutableList NPE in getChild() - ImmutableList.of(field("name", expect))), - sortBuilder.visitOrderByClause(orderByClause).attach(child)); - }); + AstSortBuilder sortBuilder = new AstSortBuilder(querySpec); + assertEquals( + new Sort( + child, // has to mock and attach child otherwise Guava ImmutableList NPE in + // getChild() + ImmutableList.of(field("name", expect))), + sortBuilder.visitOrderByClause(orderByClause).attach(child)); + }); } - } diff --git a/sql/src/test/java/org/opensearch/sql/sql/parser/context/QuerySpecificationTest.java b/sql/src/test/java/org/opensearch/sql/sql/parser/context/QuerySpecificationTest.java index 2f75e89002..6dd027a74c 100644 --- a/sql/src/test/java/org/opensearch/sql/sql/parser/context/QuerySpecificationTest.java +++ b/sql/src/test/java/org/opensearch/sql/sql/parser/context/QuerySpecificationTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql.parser.context; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -35,32 +34,27 @@ class QuerySpecificationTest { @Test void can_collect_group_by_items_in_group_by_clause() { - QuerySpecification querySpec = collect( - "SELECT name, ABS(age) FROM test GROUP BY name, ABS(age)"); + QuerySpecification querySpec = + collect("SELECT name, ABS(age) FROM test GROUP BY name, ABS(age)"); assertEquals( - ImmutableList.of( - qualifiedName("name"), - function("ABS", qualifiedName("age"))), + ImmutableList.of(qualifiedName("name"), function("ABS", qualifiedName("age"))), querySpec.getGroupByItems()); } @Test void can_collect_select_items_in_select_clause() { - QuerySpecification querySpec = collect( - "SELECT name, ABS(age) FROM test"); + QuerySpecification querySpec = collect("SELECT name, ABS(age) FROM test"); assertEquals( - ImmutableList.of( - qualifiedName("name"), - function("ABS", qualifiedName("age"))), + ImmutableList.of(qualifiedName("name"), function("ABS", qualifiedName("age"))), querySpec.getSelectItems()); } @Test void can_collect_aggregators_in_select_clause() { - QuerySpecification querySpec = collect( - "SELECT name, AVG(age), SUM(balance) FROM test GROUP BY name"); + QuerySpecification querySpec = + collect("SELECT name, AVG(age), SUM(balance) FROM test GROUP BY name"); assertEquals( ImmutableSet.of( @@ -71,29 +65,25 @@ void can_collect_aggregators_in_select_clause() { @Test void can_collect_nested_aggregators_in_select_clause() { - QuerySpecification querySpec = collect( - "SELECT name, ABS(1 + AVG(age)) FROM test GROUP BY name"); + QuerySpecification querySpec = + collect("SELECT name, ABS(1 + AVG(age)) FROM test GROUP BY name"); assertEquals( - ImmutableSet.of( - alias("AVG(age)", aggregate("AVG", qualifiedName("age")))), + ImmutableSet.of(alias("AVG(age)", aggregate("AVG", qualifiedName("age")))), querySpec.getAggregators()); } @Test void can_collect_alias_in_select_clause() { - QuerySpecification querySpec = collect( - "SELECT name AS n FROM test GROUP BY n"); + QuerySpecification querySpec = collect("SELECT name AS n FROM test GROUP BY n"); - assertEquals( - ImmutableMap.of("n", qualifiedName("name")), - querySpec.getSelectItemsByAlias()); + assertEquals(ImmutableMap.of("n", qualifiedName("name")), querySpec.getSelectItemsByAlias()); } @Test void should_deduplicate_same_aggregators() { - QuerySpecification querySpec = collect( - "SELECT AVG(age), AVG(balance), AVG(age) FROM test GROUP BY name"); + QuerySpecification querySpec = + collect("SELECT AVG(age), AVG(balance), AVG(age) FROM test GROUP BY name"); assertEquals( ImmutableSet.of( @@ -119,20 +109,24 @@ void can_collect_sort_options_in_order_by_clause() { @Test void should_skip_sort_items_in_window_function() { - assertEquals(1, - collect("SELECT name, RANK() OVER(ORDER BY age) " - + "FROM test ORDER BY name" - ).getOrderByOptions().size()); + assertEquals( + 1, + collect("SELECT name, RANK() OVER(ORDER BY age) FROM test ORDER BY name") + .getOrderByOptions() + .size()); } @Test void can_collect_filtered_aggregation() { assertEquals( - ImmutableSet.of(alias("AVG(age) FILTER(WHERE age > 20)", - filteredAggregate("AVG", qualifiedName("age"), - function(">", qualifiedName("age"), intLiteral(20))))), - collect("SELECT AVG(age) FILTER(WHERE age > 20) FROM test").getAggregators() - ); + ImmutableSet.of( + alias( + "AVG(age) FILTER(WHERE age > 20)", + filteredAggregate( + "AVG", + qualifiedName("age"), + function(">", qualifiedName("age"), intLiteral(20))))), + collect("SELECT AVG(age) FILTER(WHERE age > 20) FROM test").getAggregators()); } private QuerySpecification collect(String query) { @@ -147,5 +141,4 @@ private QuerySpecificationContext parse(String query) { parser.addErrorListener(new SyntaxAnalysisErrorListener()); return parser.querySpecification(); } - } From 983d77dbfd7ba83c7641057f49164a9d4cf56800 Mon Sep 17 00:00:00 2001 From: Mitchell Gale Date: Wed, 16 Aug 2023 15:27:30 -0700 Subject: [PATCH 24/42] [Spotless] Applying Google Code Format for integ-tests #9 (#1966) * Add spotless apply 60 files. (#328) rebase changes. Addressed PR comments. Signed-off-by: Mitchell Gale * Apply suggestions from code review Co-authored-by: Yury-Fridlyand Co-authored-by: Guian Gumpac Signed-off-by: Mitchell Gale * Update integ-test/src/test/java/org/opensearch/sql/legacy/QueryIT.java Co-authored-by: Guian Gumpac Signed-off-by: Mitchell Gale * Update integ-test/src/test/java/org/opensearch/sql/legacy/QueryIT.java Co-authored-by: Guian Gumpac Signed-off-by: Mitchell Gale * Update integ-test/src/test/java/org/opensearch/sql/legacy/QueryIT.java Co-authored-by: Guian Gumpac Signed-off-by: Mitchell Gale * Update integ-test/src/test/java/org/opensearch/sql/legacy/QueryIT.java Co-authored-by: Guian Gumpac Signed-off-by: Mitchell Gale * Update integ-test/src/test/java/org/opensearch/sql/legacy/QueryIT.java Co-authored-by: Guian Gumpac Signed-off-by: Mitchell Gale * Apply suggestions from code review Co-authored-by: Guian Gumpac Signed-off-by: Mitchell Gale * Apply suggestions from code review Co-authored-by: Guian Gumpac Signed-off-by: Mitchell Gale * Apply suggestions from code review Co-authored-by: Guian Gumpac Signed-off-by: Mitchell Gale * Apply suggestions from code review Co-authored-by: Guian Gumpac Signed-off-by: Mitchell Gale * Update integ-test/src/test/java/org/opensearch/sql/legacy/QueryIT.java Co-authored-by: Guian Gumpac Signed-off-by: Mitchell Gale --------- Signed-off-by: Mitchell Gale Signed-off-by: Mitchell Gale Co-authored-by: Yury-Fridlyand Co-authored-by: Guian Gumpac --- integ-test/build.gradle | 5 + .../sql/bwc/SQLBackwardsCompatibilityIT.java | 2 - .../connection/OpenSearchConnection.java | 16 +- .../sql/correctness/runner/resultset/Row.java | 6 +- .../tests/OpenSearchConnectionTest.java | 33 +- .../sql/correctness/tests/RowTest.java | 6 +- .../sql/legacy/NestedFieldQueryIT.java | 486 +++--- .../sql/legacy/ObjectFieldSelectIT.java | 63 +- .../sql/legacy/OpenSearchSQLRestTestCase.java | 134 +- .../org/opensearch/sql/legacy/OrderIT.java | 39 +- .../sql/legacy/OrdinalAliasRewriterIT.java | 219 ++- .../org/opensearch/sql/legacy/PluginIT.java | 586 ++++--- .../sql/legacy/PreparedStatementIT.java | 86 +- .../sql/legacy/PrettyFormatResponseIT.java | 328 ++-- .../sql/legacy/PrettyFormatterIT.java | 11 +- .../sql/legacy/QueryAnalysisIT.java | 105 +- .../sql/legacy/QueryFunctionsIT.java | 157 +- .../org/opensearch/sql/legacy/QueryIT.java | 1551 ++++++++++------- .../sql/legacy/RestIntegTestCase.java | 3 + .../opensearch/sql/legacy/SQLFunctionsIT.java | 666 ++++--- .../sql/legacy/SQLIntegTestCase.java | 226 +-- .../org/opensearch/sql/legacy/ShowIT.java | 4 +- .../opensearch/sql/legacy/SourceFieldIT.java | 58 +- .../opensearch/sql/ppl/NowLikeFunctionIT.java | 207 ++- .../sql/ppl/ObjectFieldOperateIT.java | 67 +- .../org/opensearch/sql/ppl/OperatorIT.java | 116 +- .../opensearch/sql/ppl/PPLIntegTestCase.java | 22 +- .../org/opensearch/sql/ppl/PPLPluginIT.java | 12 +- .../opensearch/sql/ppl/ParseCommandIT.java | 22 +- .../sql/ppl/PositionFunctionIT.java | 162 +- .../ppl/PrometheusDataSourceCommandsIT.java | 143 +- .../opensearch/sql/ppl/QueryAnalysisIT.java | 16 +- .../org/opensearch/sql/ppl/QueryStringIT.java | 27 +- .../org/opensearch/sql/ppl/RareCommandIT.java | 11 +- .../sql/ppl/RelevanceFunctionIT.java | 68 +- .../opensearch/sql/ppl/RenameCommandIT.java | 5 +- .../opensearch/sql/ppl/ResourceMonitorIT.java | 9 +- .../opensearch/sql/ppl/SearchCommandIT.java | 1 - .../org/opensearch/sql/ppl/SettingsIT.java | 9 +- .../sql/ppl/ShowDataSourcesCommandIT.java | 35 +- .../sql/ppl/SimpleQueryStringIT.java | 34 +- .../org/opensearch/sql/ppl/SortCommandIT.java | 1 - .../java/org/opensearch/sql/sql/NestedIT.java | 388 +++-- .../opensearch/sql/sql/NowLikeFunctionIT.java | 195 ++- .../org/opensearch/sql/sql/NullLiteralIT.java | 13 +- .../sql/sql/PaginationBlackboxIT.java | 37 +- .../sql/sql/PaginationFallbackIT.java | 36 +- .../sql/sql/PaginationFilterIT.java | 106 +- .../org/opensearch/sql/sql/PaginationIT.java | 55 +- .../sql/sql/PaginationWindowIT.java | 32 +- .../sql/sql/PositionFunctionIT.java | 90 +- .../sql/sql/PreparedStatementIT.java | 27 +- .../java/org/opensearch/sql/sql/QueryIT.java | 130 +- .../org/opensearch/sql/sql/QueryStringIT.java | 57 +- .../opensearch/sql/sql/QueryValidationIT.java | 36 +- .../org/opensearch/sql/sql/RawFormatIT.java | 27 +- .../sql/sql/RelevanceFunctionIT.java | 64 +- .../opensearch/sql/sql/SQLCorrectnessIT.java | 31 +- .../org/opensearch/sql/sql/ScoreQueryIT.java | 76 +- .../sql/sql/SimpleQueryStringIT.java | 47 +- 60 files changed, 3847 insertions(+), 3357 deletions(-) diff --git a/integ-test/build.gradle b/integ-test/build.gradle index b52cfd5f22..6ee9cb425e 100644 --- a/integ-test/build.gradle +++ b/integ-test/build.gradle @@ -53,6 +53,11 @@ repositories { } } +// Being ignored as a temporary measure before being removed in favour of +// spotless https://github.com/opensearch-project/sql/issues/1101 +checkstyleTest.ignoreFailures = true +checkstyleMain.ignoreFailures = true + ext { projectSubstitutions = [:] licenseFile = rootProject.file('LICENSE.TXT') diff --git a/integ-test/src/test/java/org/opensearch/sql/bwc/SQLBackwardsCompatibilityIT.java b/integ-test/src/test/java/org/opensearch/sql/bwc/SQLBackwardsCompatibilityIT.java index 799dadcd2d..f5c903d434 100644 --- a/integ-test/src/test/java/org/opensearch/sql/bwc/SQLBackwardsCompatibilityIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/bwc/SQLBackwardsCompatibilityIT.java @@ -3,10 +3,8 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.bwc; - import static org.opensearch.sql.legacy.TestUtils.createIndexByRestClient; import static org.opensearch.sql.legacy.TestUtils.isIndexExist; import static org.opensearch.sql.legacy.TestUtils.loadDataByRestClient; diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/runner/connection/OpenSearchConnection.java b/integ-test/src/test/java/org/opensearch/sql/correctness/runner/connection/OpenSearchConnection.java index 258c031b76..8a2240855f 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/runner/connection/OpenSearchConnection.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/runner/connection/OpenSearchConnection.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.runner.connection; import java.io.IOException; @@ -16,18 +15,15 @@ import org.opensearch.sql.correctness.runner.resultset.DBResult; /** - * OpenSearch database connection for insertion. This class wraps JDBCConnection to delegate query method. + * OpenSearch database connection for insertion. This class wraps JDBCConnection to delegate query + * method. */ public class OpenSearchConnection implements DBConnection { - /** - * Connection via our OpenSearch JDBC driver - */ + /** Connection via our OpenSearch JDBC driver */ private final DBConnection connection; - /** - * Native OpenSearch REST client for operation unsupported by driver such as CREATE/INSERT - */ + /** Native OpenSearch REST client for operation unsupported by driver such as CREATE/INSERT */ private final RestClient client; public OpenSearchConnection(String connectionUrl, RestClient client) { @@ -112,10 +108,8 @@ private String buildBulkBody(String[] columnNames, List batch) { } } - body.append("{\"index\":{}}\n"). - append(json).append("\n"); + body.append("{\"index\":{}}\n").append(json).append("\n"); } return body.toString(); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/runner/resultset/Row.java b/integ-test/src/test/java/org/opensearch/sql/correctness/runner/resultset/Row.java index da08487a10..973ea76e71 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/runner/resultset/Row.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/runner/resultset/Row.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.runner.resultset; import java.math.BigDecimal; @@ -15,9 +14,7 @@ import lombok.Getter; import lombok.ToString; -/** - * Row in result set. - */ +/** Row in result set. */ @EqualsAndHashCode @ToString @Getter @@ -77,5 +74,4 @@ public int compareTo(Row other) { } return 0; } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/OpenSearchConnectionTest.java b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/OpenSearchConnectionTest.java index 5b33884814..e5130d8fc1 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/OpenSearchConnectionTest.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/OpenSearchConnectionTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.tests; import static org.junit.Assert.assertEquals; @@ -30,14 +29,11 @@ import org.opensearch.client.RestClient; import org.opensearch.sql.correctness.runner.connection.OpenSearchConnection; -/** - * Tests for {@link OpenSearchConnection} - */ +/** Tests for {@link OpenSearchConnection} */ @RunWith(MockitoJUnitRunner.class) public class OpenSearchConnectionTest { - @Mock - private RestClient client; + @Mock private RestClient client; private OpenSearchConnection conn; @@ -63,36 +59,30 @@ public void testCreateTable() throws IOException { @Test public void testInsertData() throws IOException { - conn.insert("test", new String[] {"name"}, - Arrays.asList(new String[] {"John"}, new String[] {"Hank"})); + conn.insert( + "test", new String[] {"name"}, Arrays.asList(new String[] {"John"}, new String[] {"Hank"})); Request actual = captureActualArg(); assertEquals("POST", actual.getMethod()); assertEquals("/test/_bulk?refresh=true", actual.getEndpoint()); assertEquals( - "{\"index\":{}}\n" - + "{\"name\":\"John\"}\n" - + "{\"index\":{}}\n" - + "{\"name\":\"Hank\"}\n", - getBody(actual) - ); + "{\"index\":{}}\n{\"name\":\"John\"}\n{\"index\":{}}\n{\"name\":\"Hank\"}\n", + getBody(actual)); } @Test public void testInsertNullData() throws IOException { - conn.insert("test", new String[] {"name", "age"}, + conn.insert( + "test", + new String[] {"name", "age"}, Arrays.asList(new Object[] {null, 30}, new Object[] {"Hank", null})); Request actual = captureActualArg(); assertEquals("POST", actual.getMethod()); assertEquals("/test/_bulk?refresh=true", actual.getEndpoint()); assertEquals( - "{\"index\":{}}\n" - + "{\"age\":30}\n" - + "{\"index\":{}}\n" - + "{\"name\":\"Hank\"}\n", - getBody(actual) - ); + "{\"index\":{}}\n{\"age\":30}\n{\"index\":{}}\n{\"name\":\"Hank\"}\n", + getBody(actual)); } @Test @@ -114,5 +104,4 @@ private String getBody(Request request) throws IOException { InputStream inputStream = request.getEntity().getContent(); return CharStreams.toString(new InputStreamReader(inputStream)); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/RowTest.java b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/RowTest.java index 66cc1a0500..79e134fe7b 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/RowTest.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/RowTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.tests; import static org.junit.Assert.assertEquals; @@ -12,9 +11,7 @@ import org.junit.Test; import org.opensearch.sql.correctness.runner.resultset.Row; -/** - * Unit test {@link Row} - */ +/** Unit test {@link Row} */ public class RowTest { @Test @@ -47,5 +44,4 @@ public void shouldConsiderNullGreater() { row2.add("world"); assertEquals(1, row1.compareTo(row2)); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/NestedFieldQueryIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/NestedFieldQueryIT.java index 378fbda937..48d053c6e1 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/NestedFieldQueryIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/NestedFieldQueryIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.allOf; @@ -40,6 +39,7 @@ import org.opensearch.search.SearchHit; /** + *
  * Integration test cases for both rewriting and projection logic.
  * 

* Test result: @@ -56,13 +56,13 @@ * 4) Subquery * 5) HAVING * 6) Verification for conditions mixed with regular and nested fields + *

*/ public class NestedFieldQueryIT extends SQLIntegTestCase { private static final String FROM = "FROM " + TestsConstants.TEST_INDEX_NESTED_TYPE + " n, n.message m"; - @Override protected void init() throws Exception { loadIndex(Index.NESTED); @@ -83,188 +83,71 @@ private void queryAll(String sql) throws IOException { assertThat( query(sql), hits( - hit( - myNum(1), - someField("b"), - innerHits("message", - hit( - author("e"), - info("a") - ) - ) - ), - hit( - myNum(2), - someField("a"), - innerHits("message", - hit( - author("f"), - info("b") - ) - ) - ), - hit( - myNum(3), - someField("a"), - innerHits("message", - hit( - author("g"), - info("c") - ) - ) - ), + hit(myNum(1), someField("b"), innerHits("message", hit(author("e"), info("a")))), + hit(myNum(2), someField("a"), innerHits("message", hit(author("f"), info("b")))), + hit(myNum(3), someField("a"), innerHits("message", hit(author("g"), info("c")))), hit( myNum(4), someField("b"), - innerHits("message", - hit( - author("h"), - info("c") - ), - hit( - author("i"), - info("a") - ) - ) - ), + innerHits("message", hit(author("h"), info("c")), hit(author("i"), info("a")))), hit( myNum(new int[] {3, 4}), someField("a"), - innerHits("message", - hit( - author("zz"), - info("zz") - ) - ) - ) - ) - ); + innerHits("message", hit(author("zz"), info("zz")))))); } @Test public void singleCondition() throws IOException { assertThat( - query( - "SELECT myNum, m.author, m.info", - "WHERE m.info = 'c'" - ), + query("SELECT myNum, m.author, m.info", "WHERE m.info = 'c'"), hits( - hit( - myNum(3), - innerHits("message", - hit( - author("g"), - info("c") - ) - ) - ), - hit( - myNum(4), - innerHits("message", - hit( - author("h"), - info("c") - ) - ) - ) - ) - ); + hit(myNum(3), innerHits("message", hit(author("g"), info("c")))), + hit(myNum(4), innerHits("message", hit(author("h"), info("c")))))); } @Test public void multipleConditionsOfNestedField() throws IOException { assertThat( - query( - "SELECT someField, m.author, m.info", - "WHERE m.info = 'c' AND m.author = 'h'" - ), - hits( - hit( - someField("b"), - innerHits("message", - hit( - author("h"), - info("c") - ) - ) - ) - ) - ); + query("SELECT someField, m.author, m.info", "WHERE m.info = 'c' AND m.author = 'h'"), + hits(hit(someField("b"), innerHits("message", hit(author("h"), info("c")))))); } @Test public void multipleConditionsOfNestedFieldNoMatch() throws IOException { assertThat( - query( - "SELECT someField, m.author, m.info", - "WHERE m.info = 'c' AND m.author = 'i'" - ), - hits() - ); + query("SELECT someField, m.author, m.info", "WHERE m.info = 'c' AND m.author = 'i'"), + hits()); } @Test public void multipleConditionsOfRegularAndNestedField() throws IOException { assertThat( - query( - "SELECT myNum, m.author, m.info", - "WHERE myNum = 3 AND m.info = 'c'" - ), - hits( - hit( - myNum(3), - innerHits("message", - hit( - author("g"), - info("c") - ) - ) - ) - ) - ); + query("SELECT myNum, m.author, m.info", "WHERE myNum = 3 AND m.info = 'c'"), + hits(hit(myNum(3), innerHits("message", hit(author("g"), info("c")))))); } @Test public void multipleConditionsOfRegularOrNestedField() throws IOException { assertThat( - query( - "SELECT myNum, m.author, m.info", - "WHERE myNum = 2 OR m.info = 'c'" - ), + query("SELECT myNum, m.author, m.info", "WHERE myNum = 2 OR m.info = 'c'"), hits( - hit( - myNum(2) - ), // Note: no inner hit here because of no match in nested field - hit( - myNum(3), - innerHits("message", - hit( - author("g"), - info("c") - ) - ) - ), - hit( - myNum(4), - innerHits("message", - hit( - author("h"), - info("c") - ) - ) - ) - ) - ); + hit(myNum(2)), // Note: no inner hit here because of no match in nested field + hit(myNum(3), innerHits("message", hit(author("g"), info("c")))), + hit(myNum(4), innerHits("message", hit(author("h"), info("c")))))); } @Test public void leftJoinSelectAll() throws IOException { - String sql = "SELECT * " + - "FROM opensearch-sql_test_index_employee_nested e " + - "LEFT JOIN e.projects p"; + String sql = + "SELECT * " + + "FROM opensearch-sql_test_index_employee_nested e " + + "LEFT JOIN e.projects p"; String explain = explainQuery(sql); - assertThat(explain, containsString("{\"bool\":{\"must_not\":[{\"nested\":{\"query\":" + - "{\"exists\":{\"field\":\"projects\",\"boost\":1.0}},\"path\":\"projects\"")); + assertThat( + explain, + containsString( + "{\"bool\":{\"must_not\":[{\"nested\":{\"query\":" + + "{\"exists\":{\"field\":\"projects\",\"boost\":1.0}},\"path\":\"projects\"")); assertThat(explain, containsString("\"_source\":{\"includes\":[\"projects.*\"")); @@ -274,42 +157,50 @@ public void leftJoinSelectAll() throws IOException { @Test public void leftJoinSpecificFields() throws IOException { - String sql = "SELECT e.name, p.name, p.started_year " + - "FROM opensearch-sql_test_index_employee_nested e " + - "LEFT JOIN e.projects p"; + String sql = + "SELECT e.name, p.name, p.started_year " + + "FROM opensearch-sql_test_index_employee_nested e " + + "LEFT JOIN e.projects p"; String explain = explainQuery(sql); - assertThat(explain, containsString("{\"bool\":{\"must_not\":[{\"nested\":{\"query\":" + - "{\"exists\":{\"field\":\"projects\",\"boost\":1.0}},\"path\":\"projects\"")); + assertThat( + explain, + containsString( + "{\"bool\":{\"must_not\":[{\"nested\":{\"query\":" + + "{\"exists\":{\"field\":\"projects\",\"boost\":1.0}},\"path\":\"projects\"")); assertThat(explain, containsString("\"_source\":{\"includes\":[\"name\"],")); - assertThat(explain, + assertThat( + explain, containsString("\"_source\":{\"includes\":[\"projects.name\",\"projects.started_year\"]")); JSONObject results = executeQuery(sql); Assert.assertThat(getTotalHits(results), equalTo(4)); } - @Ignore("Comma join in left join won't pass syntax check in new ANTLR parser. " - + "Ignore for now and require to change grammar too when we want to support this case.") + @Ignore( + "Comma join in left join won't pass syntax check in new ANTLR parser. " + + "Ignore for now and require to change grammar too when we want to support this case.") @Test public void leftJoinExceptionOnExtraNestedFields() throws IOException { - String sql = "SELECT * " + - "FROM opensearch-sql_test_index_employee_nested e " + - "LEFT JOIN e.projects p, e.comments c"; + String sql = + "SELECT * " + + "FROM opensearch-sql_test_index_employee_nested e " + + "LEFT JOIN e.projects p, e.comments c"; try { String explain = explainQuery(sql); Assert.fail("Expected ResponseException, but none was thrown"); } catch (ResponseException e) { - assertThat(e.getResponse().getStatusLine().getStatusCode(), + assertThat( + e.getResponse().getStatusLine().getStatusCode(), equalTo(RestStatus.BAD_REQUEST.getStatus())); final String entity = TestUtils.getResponseBody(e.getResponse()); - assertThat(entity, + assertThat( + entity, containsString("only single nested field is allowed as right table for LEFT JOIN")); assertThat(entity, containsString("\"type\":\"verification_exception\"")); } } - @Test public void aggregationWithoutGroupBy() throws IOException { String sql = "SELECT AVG(m.dayOfWeek) AS avgDay " + FROM; @@ -317,7 +208,9 @@ public void aggregationWithoutGroupBy() throws IOException { JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "message.dayOfWeek@NESTED"); - Assert.assertThat(((BigDecimal) aggregation.query("/avgDay/value")).doubleValue(), closeTo(3.166666666, 0.01)); + Assert.assertThat( + ((BigDecimal) aggregation.query("/avgDay/value")).doubleValue(), + closeTo(3.166666666, 0.01)); } @Test @@ -351,39 +244,36 @@ public void groupByRegularFieldAndSum() throws IOException { Assert.assertNotNull(msgInfoBuckets); Assert.assertThat(msgInfoBuckets.length(), equalTo(2)); Assert.assertThat(msgInfoBuckets.query("/0/key"), equalTo("a")); - Assert.assertThat(((BigDecimal) msgInfoBuckets.query("/0/message.dayOfWeek@NESTED/sumDay/value")).doubleValue(), + Assert.assertThat( + ((BigDecimal) msgInfoBuckets.query("/0/message.dayOfWeek@NESTED/sumDay/value")) + .doubleValue(), closeTo(9.0, 0.01)); Assert.assertThat(msgInfoBuckets.query("/1/key"), equalTo("b")); - Assert.assertThat(((BigDecimal) msgInfoBuckets.query("/1/message.dayOfWeek@NESTED/sumDay/value")).doubleValue(), + Assert.assertThat( + ((BigDecimal) msgInfoBuckets.query("/1/message.dayOfWeek@NESTED/sumDay/value")) + .doubleValue(), closeTo(10.0, 0.01)); } @Test public void nestedFiledIsNotNull() throws IOException { - String sql = "SELECT e.name " + - "FROM opensearch-sql_test_index_employee_nested as e, e.projects as p " + - "WHERE p IS NOT NULL"; + String sql = + "SELECT e.name " + + "FROM opensearch-sql_test_index_employee_nested as e, e.projects as p " + + "WHERE p IS NOT NULL"; assertThat( executeQuery(sql), hitAll( kvString("/_source/name", Is.is("Bob Smith")), - kvString("/_source/name", Is.is("Jane Smith")) - ) - ); + kvString("/_source/name", Is.is("Jane Smith")))); } // Doesn't support: aggregate function other than COUNT() @SuppressWarnings("unused") public void groupByNestedFieldAndAvg() throws IOException { - query( - "SELECT m.info, AVG(m.dayOfWeek)", - "GROUP BY m.info" - ); - query( - "SELECT m.info, AVG(myNum)", - "GROUP BY m.info" - ); + query("SELECT m.info, AVG(m.dayOfWeek)", "GROUP BY m.info"); + query("SELECT m.info, AVG(myNum)", "GROUP BY m.info"); } @Test @@ -418,10 +308,11 @@ public void groupByNestedAndRegularField() throws IOException { @Test public void countAggWithoutWhere() throws IOException { - String sql = "SELECT e.name, COUNT(p) as c " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "GROUP BY e.name " + - "HAVING c > 1"; + String sql = + "SELECT e.name, COUNT(p) as c " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "GROUP BY e.name " + + "HAVING c > 1"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -437,11 +328,12 @@ public void countAggWithoutWhere() throws IOException { @Test public void countAggWithWhereOnParent() throws IOException { - String sql = "SELECT e.name, COUNT(p) as c " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "WHERE e.name like '%smith%' " + - "GROUP BY e.name " + - "HAVING c > 1"; + String sql = + "SELECT e.name, COUNT(p) as c " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "WHERE e.name like '%smith%' " + + "GROUP BY e.name " + + "HAVING c > 1"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -457,11 +349,12 @@ public void countAggWithWhereOnParent() throws IOException { @Test public void countAggWithWhereOnNested() throws IOException { - String sql = "SELECT e.name, COUNT(p) as c " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "WHERE p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING c > 1"; + String sql = + "SELECT e.name, COUNT(p) as c " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "WHERE p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING c > 1"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -477,11 +370,12 @@ public void countAggWithWhereOnNested() throws IOException { @Test public void countAggWithWhereOnParentOrNested() throws IOException { - String sql = "SELECT e.name, COUNT(p) as c " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "WHERE e.name like '%smith%' or p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING c > 1"; + String sql = + "SELECT e.name, COUNT(p) as c " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "WHERE e.name like '%smith%' or p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING c > 1"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -497,11 +391,12 @@ public void countAggWithWhereOnParentOrNested() throws IOException { @Test public void countAggWithWhereOnParentAndNested() throws IOException { - String sql = "SELECT e.name, COUNT(p) as c " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "WHERE e.name like '%smith%' AND p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING c > 1"; + String sql = + "SELECT e.name, COUNT(p) as c " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "WHERE e.name like '%smith%' AND p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING c > 1"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -517,11 +412,12 @@ public void countAggWithWhereOnParentAndNested() throws IOException { @Test public void countAggWithWhereOnNestedAndNested() throws IOException { - String sql = "SELECT e.name, COUNT(p) as c " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "WHERE p.started_year > 2000 AND p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING c > 0"; + String sql = + "SELECT e.name, COUNT(p) as c " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "WHERE p.started_year > 2000 AND p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING c > 0"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -537,11 +433,12 @@ public void countAggWithWhereOnNestedAndNested() throws IOException { @Test public void countAggWithWhereOnNestedOrNested() throws IOException { - String sql = "SELECT e.name, COUNT(p) as c " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "WHERE p.started_year > 2000 OR p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING c > 1"; + String sql = + "SELECT e.name, COUNT(p) as c " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "WHERE p.started_year > 2000 OR p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING c > 1"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -557,11 +454,12 @@ public void countAggWithWhereOnNestedOrNested() throws IOException { @Test public void countAggOnNestedInnerFieldWithoutWhere() throws IOException { - String sql = "SELECT e.name, COUNT(p.started_year) as count " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "WHERE p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING count > 0"; + String sql = + "SELECT e.name, COUNT(p.started_year) as count " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "WHERE p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING count > 0"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -581,10 +479,11 @@ public void countAggOnNestedInnerFieldWithoutWhere() throws IOException { @Test public void maxAggOnNestedInnerFieldWithoutWhere() throws IOException { - String sql = "SELECT e.name, MAX(p.started_year) as max " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "WHERE p.name LIKE '%security%' " + - "GROUP BY e.name"; + String sql = + "SELECT e.name, MAX(p.started_year) as max " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "WHERE p.name LIKE '%security%' " + + "GROUP BY e.name"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -594,20 +493,27 @@ public void maxAggOnNestedInnerFieldWithoutWhere() throws IOException { Assert.assertThat(bucket.length(), equalTo(2)); Assert.assertThat(bucket.query("/0/key"), equalTo("Bob Smith")); Assert.assertThat( - ((BigDecimal) bucket.query("/0/projects.started_year@NESTED/projects.started_year@FILTER/max/value")).doubleValue(), + ((BigDecimal) + bucket.query( + "/0/projects.started_year@NESTED/projects.started_year@FILTER/max/value")) + .doubleValue(), closeTo(2015.0, 0.01)); Assert.assertThat(bucket.query("/1/key"), equalTo("Jane Smith")); Assert.assertThat( - ((BigDecimal) bucket.query("/1/projects.started_year@NESTED/projects.started_year@FILTER/max/value")).doubleValue(), + ((BigDecimal) + bucket.query( + "/1/projects.started_year@NESTED/projects.started_year@FILTER/max/value")) + .doubleValue(), closeTo(2015.0, 0.01)); } @Test public void havingCountAggWithoutWhere() throws IOException { - String sql = "SELECT e.name " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "GROUP BY e.name " + - "HAVING COUNT(p) > 1"; + String sql = + "SELECT e.name " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "GROUP BY e.name " + + "HAVING COUNT(p) > 1"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -623,11 +529,12 @@ public void havingCountAggWithoutWhere() throws IOException { @Test public void havingCountAggWithWhereOnParent() throws IOException { - String sql = "SELECT e.name " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "WHERE e.name like '%smith%' " + - "GROUP BY e.name " + - "HAVING COUNT(p) > 1"; + String sql = + "SELECT e.name " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "WHERE e.name like '%smith%' " + + "GROUP BY e.name " + + "HAVING COUNT(p) > 1"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -643,11 +550,12 @@ public void havingCountAggWithWhereOnParent() throws IOException { @Test public void havingCountAggWithWhereOnNested() throws IOException { - String sql = "SELECT e.name " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "WHERE p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING COUNT(p) > 1"; + String sql = + "SELECT e.name " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "WHERE p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING COUNT(p) > 1"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -663,11 +571,12 @@ public void havingCountAggWithWhereOnNested() throws IOException { @Test public void havingCountAggWithWhereOnParentOrNested() throws IOException { - String sql = "SELECT e.name " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "WHERE e.name like '%smith%' or p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING COUNT(p) > 1"; + String sql = + "SELECT e.name " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "WHERE e.name like '%smith%' or p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING COUNT(p) > 1"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -683,11 +592,12 @@ public void havingCountAggWithWhereOnParentOrNested() throws IOException { @Test public void havingCountAggWithWhereOnParentAndNested() throws IOException { - String sql = "SELECT e.name " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "WHERE e.name like '%smith%' AND p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING COUNT(p) > 1"; + String sql = + "SELECT e.name " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "WHERE e.name like '%smith%' AND p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING COUNT(p) > 1"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -703,11 +613,12 @@ public void havingCountAggWithWhereOnParentAndNested() throws IOException { @Test public void havingCountAggWithWhereOnNestedAndNested() throws IOException { - String sql = "SELECT e.name " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "WHERE p.started_year > 2000 AND p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING COUNT(p) > 0"; + String sql = + "SELECT e.name " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "WHERE p.started_year > 2000 AND p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING COUNT(p) > 0"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -723,11 +634,12 @@ public void havingCountAggWithWhereOnNestedAndNested() throws IOException { @Test public void havingCountAggWithWhereOnNestedOrNested() throws IOException { - String sql = "SELECT e.name " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "WHERE p.started_year > 2000 OR p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING COUNT(p) > 1"; + String sql = + "SELECT e.name " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "WHERE p.started_year > 2000 OR p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING COUNT(p) > 1"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -743,11 +655,12 @@ public void havingCountAggWithWhereOnNestedOrNested() throws IOException { @Test public void havingCountAggOnNestedInnerFieldWithoutWhere() throws IOException { - String sql = "SELECT e.name " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "WHERE p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING COUNT(p.started_year) > 0"; + String sql = + "SELECT e.name " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "WHERE p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING COUNT(p.started_year) > 0"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -767,11 +680,12 @@ public void havingCountAggOnNestedInnerFieldWithoutWhere() throws IOException { @Test public void havingMaxAggOnNestedInnerFieldWithoutWhere() throws IOException { - String sql = "SELECT e.name " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "WHERE p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING MAX(p.started_year) > 1990"; + String sql = + "SELECT e.name " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "WHERE p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING MAX(p.started_year) > 1990"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -781,22 +695,28 @@ public void havingMaxAggOnNestedInnerFieldWithoutWhere() throws IOException { Assert.assertThat(bucket.length(), equalTo(2)); Assert.assertThat(bucket.query("/0/key"), equalTo("Bob Smith")); Assert.assertThat( - ((BigDecimal) bucket.query("/0/projects.started_year@NESTED/projects.started_year@FILTER/max_0/value")).doubleValue(), + ((BigDecimal) + bucket.query( + "/0/projects.started_year@NESTED/projects.started_year@FILTER/max_0/value")) + .doubleValue(), closeTo(2015.0, 0.01)); Assert.assertThat(bucket.query("/1/key"), equalTo("Jane Smith")); Assert.assertThat( - ((BigDecimal) bucket.query("/1/projects.started_year@NESTED/projects.started_year@FILTER/max_0/value")).doubleValue(), + ((BigDecimal) + bucket.query( + "/1/projects.started_year@NESTED/projects.started_year@FILTER/max_0/value")) + .doubleValue(), closeTo(2015.0, 0.01)); } /*********************************************************** - Matchers for Non-Aggregation Testing + * Matchers for Non-Aggregation Testing ***********************************************************/ @SafeVarargs private final Matcher hits(Matcher... subMatchers) { - return featureValueOf("hits", arrayContainingInAnyOrder(subMatchers), - resp -> resp.getHits().getHits()); + return featureValueOf( + "hits", arrayContainingInAnyOrder(subMatchers), resp -> resp.getHits().getHits()); } @SafeVarargs @@ -834,8 +754,7 @@ public boolean matches(Object item) { } @Override - public void describeTo(Description description) { - } + public void describeTo(Description description) {} }; } @@ -860,16 +779,15 @@ private final Matcher innerHits(String path, Matcher... in return featureValueOf( "innerHits", arrayContainingInAnyOrder(innerHitMatchers), - hit -> hit.getInnerHits().get(path).getHits() - ); + hit -> hit.getInnerHits().get(path).getHits()); } /*********************************************************** - Matchers for Aggregation Testing + * Matchers for Aggregation Testing ***********************************************************/ - private FeatureMatcher featureValueOf(String name, Matcher subMatcher, - Function getter) { + private FeatureMatcher featureValueOf( + String name, Matcher subMatcher, Function getter) { return new FeatureMatcher(subMatcher, name, name) { @Override protected U featureValueOf(T actual) { @@ -879,7 +797,7 @@ protected U featureValueOf(T actual) { } /*********************************************************** - Query Utility to Fetch Response for SQL + * Query Utility to Fetch Response for SQL ***********************************************************/ private SearchResponse query(String select, String... statements) throws IOException { @@ -889,10 +807,11 @@ private SearchResponse query(String select, String... statements) throws IOExcep private SearchResponse execute(String sql) throws IOException { final JSONObject jsonObject = executeQuery(sql); - final XContentParser parser = new JsonXContentParser( - NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, - new JsonFactory().createParser(jsonObject.toString())); + final XContentParser parser = + new JsonXContentParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + new JsonFactory().createParser(jsonObject.toString())); return SearchResponse.fromXContent(parser); } @@ -904,5 +823,4 @@ private JSONObject getAggregation(final JSONObject queryResult, final String agg Assert.assertTrue(aggregations.has(aggregationName)); return aggregations.getJSONObject(aggregationName); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/ObjectFieldSelectIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/ObjectFieldSelectIT.java index ce781123d6..3a2f48d497 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/ObjectFieldSelectIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/ObjectFieldSelectIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_DEEP_NESTED; @@ -18,9 +17,8 @@ import org.opensearch.sql.legacy.utils.StringUtils; /** - * Integration test for OpenSearch object field (and nested field). - * This class is focused on simple SELECT-FROM query to ensure right column - * number and value is returned. + * Integration test for OpenSearch object field (and nested field). This class is focused on simple + * SELECT-FROM query to ensure right column number and value is returned. */ public class ObjectFieldSelectIT extends SQLIntegTestCase { @@ -36,33 +34,28 @@ public void testSelectObjectFieldItself() { verifySchema(response, schema("city", null, "object")); // Expect object field itself is returned in a single cell - verifyDataRows(response, - rows(new JSONObject( - "{\n" - + " \"name\": \"Seattle\",\n" - + " \"location\": {\"latitude\": 10.5}\n" - + "}") - ) - ); + verifyDataRows( + response, + rows( + new JSONObject( + "{\n" + + " \"name\": \"Seattle\",\n" + + " \"location\": {\"latitude\": 10.5}\n" + + "}"))); } @Test public void testSelectObjectInnerFields() { - JSONObject response = new JSONObject(query( - "SELECT city.location, city.location.latitude FROM %s")); + JSONObject response = + new JSONObject(query("SELECT city.location, city.location.latitude FROM %s")); - verifySchema(response, + verifySchema( + response, schema("city.location", null, "object"), - schema("city.location.latitude", null, "double") - ); + schema("city.location.latitude", null, "double")); // Expect inner regular or object field returned in its single cell - verifyDataRows(response, - rows( - new JSONObject("{\"latitude\": 10.5}"), - 10.5 - ) - ); + verifyDataRows(response, rows(new JSONObject("{\"latitude\": 10.5}"), 10.5)); } @Test @@ -72,15 +65,15 @@ public void testSelectNestedFieldItself() { verifySchema(response, schema("projects", null, "nested")); // Expect nested field itself is returned in a single cell - verifyDataRows(response, - rows(new JSONArray( - "[\n" - + " {\"name\": \"AWS Redshift Spectrum querying\"},\n" - + " {\"name\": \"AWS Redshift security\"},\n" - + " {\"name\": \"AWS Aurora security\"}\n" - + "]") - ) - ); + verifyDataRows( + response, + rows( + new JSONArray( + "[\n" + + " {\"name\": \"AWS Redshift Spectrum querying\"},\n" + + " {\"name\": \"AWS Redshift security\"},\n" + + " {\"name\": \"AWS Aurora security\"}\n" + + "]"))); } @Test @@ -100,10 +93,6 @@ public void testSelectObjectFieldOfArrayValuesInnerFields() { } private String query(String sql) { - return executeQuery( - StringUtils.format(sql, TEST_INDEX_DEEP_NESTED), - "jdbc" - ); + return executeQuery(StringUtils.format(sql, TEST_INDEX_DEEP_NESTED), "jdbc"); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/OpenSearchSQLRestTestCase.java b/integ-test/src/test/java/org/opensearch/sql/legacy/OpenSearchSQLRestTestCase.java index 7b5961de35..385c9bc6ba 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/OpenSearchSQLRestTestCase.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/OpenSearchSQLRestTestCase.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static java.util.Collections.unmodifiableList; @@ -43,8 +42,9 @@ import org.opensearch.test.rest.OpenSearchRestTestCase; /** - * OpenSearch SQL integration test base class to support both security disabled and enabled OpenSearch cluster. - * Allows interaction with multiple external test clusters using OpenSearch's {@link RestClient}. + * OpenSearch SQL integration test base class to support both security disabled and enabled + * OpenSearch cluster. Allows interaction with multiple external test clusters using OpenSearch's + * {@link RestClient}. */ public abstract class OpenSearchSQLRestTestCase extends OpenSearchRestTestCase { @@ -53,17 +53,20 @@ public abstract class OpenSearchSQLRestTestCase extends OpenSearchRestTestCase { public static final String MATCH_ALL_REMOTE_CLUSTER = "*"; private static RestClient remoteClient; + /** - * A client for the running remote OpenSearch cluster configured to take test administrative actions - * like remove all indexes after the test completes + * A client for the running remote OpenSearch cluster configured to take test administrative + * actions like remove all indexes after the test completes */ private static RestClient remoteAdminClient; protected boolean isHttps() { - boolean isHttps = Optional.ofNullable(System.getProperty("https")) - .map("true"::equalsIgnoreCase).orElse(false); + boolean isHttps = + Optional.ofNullable(System.getProperty("https")) + .map("true"::equalsIgnoreCase) + .orElse(false); if (isHttps) { - //currently only external cluster is supported for security enabled testing + // currently only external cluster is supported for security enabled testing if (!Optional.ofNullable(System.getProperty("tests.rest.cluster")).isPresent()) { throw new RuntimeException( "external cluster url should be provided for security enabled testing"); @@ -77,16 +80,14 @@ protected String getProtocol() { return isHttps() ? "https" : "http"; } - /** - * Get the client to remote cluster used for ordinary api calls while writing a test. - */ + /** Get the client to remote cluster used for ordinary api calls while writing a test. */ protected static RestClient remoteClient() { return remoteClient; } /** - * Get the client to remote cluster used for test administrative actions. - * Do not use this while writing a test. Only use it for cleaning up after tests. + * Get the client to remote cluster used for test administrative actions. Do not use this while + * writing a test. Only use it for cleaning up after tests. */ protected static RestClient remoteAdminClient() { return remoteAdminClient; @@ -128,9 +129,7 @@ public void initRemoteClient() throws IOException { assert remoteAdminClient != null; } - /** - * Get a comma delimited list of [host:port] to which to send REST requests. - */ + /** Get a comma delimited list of [host:port] to which to send REST requests. */ protected String getTestRestCluster(String clusterName) { String cluster = System.getProperty("tests.rest." + clusterName + ".http_hosts"); if (cluster == null) { @@ -138,15 +137,12 @@ protected String getTestRestCluster(String clusterName) { "Must specify [tests.rest." + clusterName + ".http_hosts] system property with a comma delimited list of [host:port] " - + "to which to send REST requests" - ); + + "to which to send REST requests"); } return cluster; } - /** - * Get a comma delimited list of [host:port] for connections between clusters. - */ + /** Get a comma delimited list of [host:port] for connections between clusters. */ protected String getTestTransportCluster(String clusterName) { String cluster = System.getProperty("tests.rest." + clusterName + ".transport_hosts"); if (cluster == null) { @@ -154,8 +150,7 @@ protected String getTestTransportCluster(String clusterName) { "Must specify [tests.rest." + clusterName + ".transport_hosts] system property with a comma delimited list of [host:port] " - + "for connections between clusters" - ); + + "for connections between clusters"); } return cluster; } @@ -181,18 +176,22 @@ protected static void wipeAllOpenSearchIndices(RestClient client) throws IOExcep // include all the indices, included hidden indices. // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-indices.html#cat-indices-api-query-params try { - Response response = client.performRequest(new Request("GET", "/_cat/indices?format=json&expand_wildcards=all")); + Response response = + client.performRequest( + new Request("GET", "/_cat/indices?format=json&expand_wildcards=all")); JSONArray jsonArray = new JSONArray(EntityUtils.toString(response.getEntity(), "UTF-8")); for (Object object : jsonArray) { JSONObject jsonObject = (JSONObject) object; String indexName = jsonObject.getString("index"); try { - // System index, mostly named .opensearch-xxx or .opendistro-xxx, are not allowed to delete + // System index, mostly named .opensearch-xxx or .opendistro-xxx, are not allowed to + // delete if (!indexName.startsWith(".opensearch") && !indexName.startsWith(".opendistro")) { client.performRequest(new Request("DELETE", "/" + indexName)); } } catch (Exception e) { - // TODO: Ignore index delete error for now. Remove this if strict check on system index added above. + // TODO: Ignore index delete error for now. Remove this if strict check on system index + // added above. LOG.warn("Failed to delete index: " + indexName, e); } } @@ -201,9 +200,8 @@ protected static void wipeAllOpenSearchIndices(RestClient client) throws IOExcep } } - protected static void configureHttpsClient(RestClientBuilder builder, Settings settings, - HttpHost httpHost) - throws IOException { + protected static void configureHttpsClient( + RestClientBuilder builder, Settings settings, HttpHost httpHost) throws IOException { Map headers = ThreadContext.buildDefaultHeaders(settings); Header[] defaultHeaders = new Header[headers.size()]; int i = 0; @@ -211,56 +209,66 @@ protected static void configureHttpsClient(RestClientBuilder builder, Settings s defaultHeaders[i++] = new BasicHeader(entry.getKey(), entry.getValue()); } builder.setDefaultHeaders(defaultHeaders); - builder.setHttpClientConfigCallback(httpClientBuilder -> { - String userName = Optional.ofNullable(System.getProperty("user")) - .orElseThrow(() -> new RuntimeException("user name is missing")); - String password = Optional.ofNullable(System.getProperty("password")) - .orElseThrow(() -> new RuntimeException("password is missing")); - BasicCredentialsProvider credentialsProvider = new BasicCredentialsProvider(); - credentialsProvider - .setCredentials(new AuthScope(httpHost), new UsernamePasswordCredentials(userName, - password.toCharArray())); - try { - final TlsStrategy tlsStrategy = ClientTlsStrategyBuilder.create() - .setSslContext(SSLContextBuilder.create() - .loadTrustMaterial(null, (chains, authType) -> true) - .build()) - .setHostnameVerifier(NoopHostnameVerifier.INSTANCE) - .build(); + builder.setHttpClientConfigCallback( + httpClientBuilder -> { + String userName = + Optional.ofNullable(System.getProperty("user")) + .orElseThrow(() -> new RuntimeException("user name is missing")); + String password = + Optional.ofNullable(System.getProperty("password")) + .orElseThrow(() -> new RuntimeException("password is missing")); + BasicCredentialsProvider credentialsProvider = new BasicCredentialsProvider(); + credentialsProvider.setCredentials( + new AuthScope(httpHost), + new UsernamePasswordCredentials(userName, password.toCharArray())); + try { + final TlsStrategy tlsStrategy = + ClientTlsStrategyBuilder.create() + .setSslContext( + SSLContextBuilder.create() + .loadTrustMaterial(null, (chains, authType) -> true) + .build()) + .setHostnameVerifier(NoopHostnameVerifier.INSTANCE) + .build(); - return httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider) - .setConnectionManager(PoolingAsyncClientConnectionManagerBuilder.create() - .setTlsStrategy(tlsStrategy) - .build()); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); + return httpClientBuilder + .setDefaultCredentialsProvider(credentialsProvider) + .setConnectionManager( + PoolingAsyncClientConnectionManagerBuilder.create() + .setTlsStrategy(tlsStrategy) + .build()); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); final String socketTimeoutString = settings.get(CLIENT_SOCKET_TIMEOUT); final TimeValue socketTimeout = - TimeValue.parseTimeValue(socketTimeoutString == null ? "60s" : socketTimeoutString, - CLIENT_SOCKET_TIMEOUT); + TimeValue.parseTimeValue( + socketTimeoutString == null ? "60s" : socketTimeoutString, CLIENT_SOCKET_TIMEOUT); builder.setRequestConfigCallback( - conf -> conf.setResponseTimeout(Timeout.ofMilliseconds(Math.toIntExact(socketTimeout.getMillis())))); + conf -> + conf.setResponseTimeout( + Timeout.ofMilliseconds(Math.toIntExact(socketTimeout.getMillis())))); if (settings.hasValue(CLIENT_PATH_PREFIX)) { builder.setPathPrefix(settings.get(CLIENT_PATH_PREFIX)); } } /** - * Initialize rest client to remote cluster, - * and create a connection to it from the coordinating cluster. + * Initialize rest client to remote cluster, and create a connection to it from the coordinating + * cluster. */ public void configureMultiClusters() throws IOException { initRemoteClient(); Request connectionRequest = new Request("PUT", "_cluster/settings"); - String connectionSetting = "{\"persistent\": {\"cluster\": {\"remote\": {\"" - + REMOTE_CLUSTER - + "\": {\"seeds\": [\"" - + getTestTransportCluster(REMOTE_CLUSTER).split(",")[0] - + "\"]}}}}}"; + String connectionSetting = + "{\"persistent\": {\"cluster\": {\"remote\": {\"" + + REMOTE_CLUSTER + + "\": {\"seeds\": [\"" + + getTestTransportCluster(REMOTE_CLUSTER).split(",")[0] + + "\"]}}}}}"; connectionRequest.setJsonEntity(connectionSetting); adminClient().performRequest(connectionRequest); } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/OrderIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/OrderIT.java index c8b4b87f69..01e989e9f0 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/OrderIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/OrderIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.equalTo; @@ -76,17 +75,17 @@ public void orderByIsNull() throws IOException { assertThat(query(hits, "/0/_source/id"), equalTo("5")); // Another equivalent syntax - assertThat(explainQuery("SELECT * FROM opensearch-sql_test_index_order " + - "ORDER BY id IS NULL, id DESC"), - equalTo(explainQuery("SELECT * FROM opensearch-sql_test_index_order " + - "ORDER BY id IS NULL DESC")) - ); + assertThat( + explainQuery( + "SELECT * FROM opensearch-sql_test_index_order ORDER BY id IS NULL, id DESC"), + equalTo( + explainQuery( + "SELECT * FROM opensearch-sql_test_index_order ORDER BY id IS NULL DESC"))); } @Test public void orderByIsNotNull() throws IOException { - String query = - "SELECT id, name FROM opensearch-sql_test_index_order ORDER BY name IS NOT NULL"; + String query = "SELECT id, name FROM opensearch-sql_test_index_order ORDER BY name IS NOT NULL"; JSONArray result = getSortExplain(query); assertThat(1, equalTo(result.length())); assertThat(query(result, "/0/name.keyword/order"), equalTo("asc")); @@ -95,21 +94,24 @@ public void orderByIsNotNull() throws IOException { JSONObject response = executeQuery(query); JSONArray hits = getHits(response); assertFalse(hits.getJSONObject(0).getJSONObject("_source").has("name")); - assertThat(hits.getJSONObject(hits.length() - 1).query("/_source/name").toString(), - equalTo("f")); + assertThat( + hits.getJSONObject(hits.length() - 1).query("/_source/name").toString(), equalTo("f")); // Another equivalent syntax - assertThat(explainQuery("SELECT id, name FROM opensearch-sql_test_index_order " + - "ORDER BY name IS NOT NULL"), - equalTo(explainQuery("SELECT id, name FROM opensearch-sql_test_index_order " + - "ORDER BY name IS NOT NULL ASC")) - ); + assertThat( + explainQuery( + "SELECT id, name FROM opensearch-sql_test_index_order ORDER BY name IS NOT NULL"), + equalTo( + explainQuery( + "SELECT id, name FROM opensearch-sql_test_index_order " + + "ORDER BY name IS NOT NULL ASC"))); } @Test public void multipleOrderByWithNulls() throws IOException { String query = - "SELECT id, name FROM opensearch-sql_test_index_order ORDER BY id IS NULL, name IS NOT NULL"; + "SELECT id, name FROM opensearch-sql_test_index_order ORDER BY id IS NULL, name IS NOT" + + " NULL"; JSONArray result = getSortExplain(query); assertThat(result.length(), equalTo(2)); assertThat(query(result, "/0/id/missing"), equalTo("_last")); @@ -118,8 +120,9 @@ public void multipleOrderByWithNulls() throws IOException { @Test public void testOrderByMergeForSameField() throws IOException { - String query = "SELECT * FROM opensearch-sql_test_index_order " + - "ORDER BY id IS NULL, name DESC, id DESC, id IS NOT NULL, name IS NULL"; + String query = + "SELECT * FROM opensearch-sql_test_index_order " + + "ORDER BY id IS NULL, name DESC, id DESC, id IS NOT NULL, name IS NULL"; JSONArray result = getSortExplain(query); assertThat(2, equalTo(result.length())); assertThat(query(result, "/0/id/order"), equalTo("asc")); diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/OrdinalAliasRewriterIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/OrdinalAliasRewriterIT.java index ecec5844be..caea2aa7c6 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/OrdinalAliasRewriterIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/OrdinalAliasRewriterIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.equalTo; @@ -23,125 +22,191 @@ protected void init() throws Exception { // tests query results with jdbc output @Test public void simpleGroupByOrdinal() { - String expected = executeQuery(StringUtils.format( - "SELECT lastname FROM %s AS b GROUP BY lastname LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT), "jdbc"); - String actual = executeQuery(StringUtils.format( - "SELECT lastname FROM %s AS b GROUP BY 1 LIMIT 3", TestsConstants.TEST_INDEX_ACCOUNT), - "jdbc"); + String expected = + executeQuery( + StringUtils.format( + "SELECT lastname FROM %s AS b GROUP BY lastname LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); + String actual = + executeQuery( + StringUtils.format( + "SELECT lastname FROM %s AS b GROUP BY 1 LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); assertThat(actual, equalTo(expected)); } @Test public void multipleGroupByOrdinal() { - String expected = executeQuery(StringUtils.format( - "SELECT lastname, firstname, age FROM %s AS b GROUP BY firstname, age, lastname LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT), "jdbc"); - String actual = executeQuery(StringUtils.format( - "SELECT lastname, firstname, age FROM %s AS b GROUP BY 2, 3, 1 LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT), "jdbc"); + String expected = + executeQuery( + StringUtils.format( + "SELECT lastname, firstname, age FROM %s AS b GROUP BY firstname, age, lastname" + + " LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); + String actual = + executeQuery( + StringUtils.format( + "SELECT lastname, firstname, age FROM %s AS b GROUP BY 2, 3, 1 LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); assertThat(actual, equalTo(expected)); } @Test public void selectFieldiWithBacticksGroupByOrdinal() { - String expected = executeQuery(StringUtils.format( - "SELECT `lastname` FROM %s AS b GROUP BY `lastname` LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT), "jdbc"); - String actual = executeQuery(StringUtils.format( - "SELECT `lastname` FROM %s AS b GROUP BY 1 LIMIT 3", TestsConstants.TEST_INDEX_ACCOUNT), - "jdbc"); + String expected = + executeQuery( + StringUtils.format( + "SELECT `lastname` FROM %s AS b GROUP BY `lastname` LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); + String actual = + executeQuery( + StringUtils.format( + "SELECT `lastname` FROM %s AS b GROUP BY 1 LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); assertThat(actual, equalTo(expected)); } @Test public void selectFieldiWithBacticksAndTableAliasGroupByOrdinal() { - String expected = executeQuery(StringUtils.format( - "SELECT `b`.`lastname`, `age`, firstname FROM %s AS b GROUP BY `age`, `b`.`lastname` , firstname LIMIT 10", - TestsConstants.TEST_INDEX_ACCOUNT), "jdbc"); - String actual = executeQuery(StringUtils.format( - "SELECT `b`.`lastname`, `age`, firstname FROM %s AS b GROUP BY 2, 1, 3 LIMIT 10", - TestsConstants.TEST_INDEX_ACCOUNT), "jdbc"); + String expected = + executeQuery( + StringUtils.format( + "SELECT `b`.`lastname`, `age`, firstname FROM %s AS b GROUP BY `age`," + + " `b`.`lastname` , firstname LIMIT 10", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); + String actual = + executeQuery( + StringUtils.format( + "SELECT `b`.`lastname`, `age`, firstname FROM %s AS b GROUP BY 2, 1, 3 LIMIT 10", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); assertThat(actual, equalTo(expected)); } @Test public void simpleOrderByOrdinal() { - String expected = executeQuery(StringUtils.format( - "SELECT lastname FROM %s AS b ORDER BY lastname LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT), "jdbc"); - String actual = executeQuery(StringUtils.format( - "SELECT lastname FROM %s AS b ORDER BY 1 LIMIT 3", TestsConstants.TEST_INDEX_ACCOUNT), - "jdbc"); + String expected = + executeQuery( + StringUtils.format( + "SELECT lastname FROM %s AS b ORDER BY lastname LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); + String actual = + executeQuery( + StringUtils.format( + "SELECT lastname FROM %s AS b ORDER BY 1 LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); assertThat(actual, equalTo(expected)); } @Test public void multipleOrderByOrdinal() { - String expected = executeQuery(StringUtils.format( - "SELECT lastname, firstname, age FROM %s AS b ORDER BY firstname, age, lastname LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT), "jdbc"); - String actual = executeQuery(StringUtils.format( - "SELECT lastname, firstname, age FROM %s AS b ORDER BY 2, 3, 1 LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT), "jdbc"); + String expected = + executeQuery( + StringUtils.format( + "SELECT lastname, firstname, age FROM %s AS b ORDER BY firstname, age, lastname" + + " LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); + String actual = + executeQuery( + StringUtils.format( + "SELECT lastname, firstname, age FROM %s AS b ORDER BY 2, 3, 1 LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); assertThat(actual, equalTo(expected)); } @Test public void selectFieldiWithBacticksOrderByOrdinal() { - String expected = executeQuery(StringUtils.format( - "SELECT `lastname` FROM %s AS b ORDER BY `lastname` LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT), "jdbc"); - String actual = executeQuery(StringUtils.format( - "SELECT `lastname` FROM %s AS b ORDER BY 1 LIMIT 3", TestsConstants.TEST_INDEX_ACCOUNT), - "jdbc"); + String expected = + executeQuery( + StringUtils.format( + "SELECT `lastname` FROM %s AS b ORDER BY `lastname` LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); + String actual = + executeQuery( + StringUtils.format( + "SELECT `lastname` FROM %s AS b ORDER BY 1 LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); assertThat(actual, equalTo(expected)); } @Test public void selectFieldiWithBacticksAndTableAliasOrderByOrdinal() { - String expected = executeQuery(StringUtils.format( - "SELECT `b`.`lastname` FROM %s AS b ORDER BY `b`.`lastname` LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT), "jdbc"); - String actual = executeQuery(StringUtils.format( - "SELECT `b`.`lastname` FROM %s AS b ORDER BY 1 LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT), "jdbc"); + String expected = + executeQuery( + StringUtils.format( + "SELECT `b`.`lastname` FROM %s AS b ORDER BY `b`.`lastname` LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); + String actual = + executeQuery( + StringUtils.format( + "SELECT `b`.`lastname` FROM %s AS b ORDER BY 1 LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); assertThat(actual, equalTo(expected)); } // ORDER BY IS NULL/NOT NULL @Test public void selectFieldiWithBacticksAndTableAliasOrderByOrdinalAndNull() { - String expected = executeQuery(StringUtils.format( - "SELECT `b`.`lastname`, age FROM %s AS b ORDER BY `b`.`lastname` IS NOT NULL DESC, age is NULL LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT), "jdbc"); - String actual = executeQuery(StringUtils.format( - "SELECT `b`.`lastname`, age FROM %s AS b ORDER BY 1 IS NOT NULL DESC, 2 IS NULL LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT), "jdbc"); + String expected = + executeQuery( + StringUtils.format( + "SELECT `b`.`lastname`, age FROM %s AS b ORDER BY `b`.`lastname` IS NOT NULL DESC," + + " age is NULL LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); + String actual = + executeQuery( + StringUtils.format( + "SELECT `b`.`lastname`, age FROM %s AS b ORDER BY 1 IS NOT NULL DESC, 2 IS NULL" + + " LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); assertThat(actual, equalTo(expected)); } - // explain @Test public void explainSelectFieldiWithBacticksAndTableAliasGroupByOrdinal() throws IOException { - String expected = explainQuery(StringUtils.format( - "SELECT `b`.`lastname` FROM %s AS b GROUP BY `b`.`lastname` LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT)); - String actual = explainQuery(StringUtils.format( - "SELECT `b`.`lastname` FROM %s AS b GROUP BY 1 LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT)); + String expected = + explainQuery( + StringUtils.format( + "SELECT `b`.`lastname` FROM %s AS b GROUP BY `b`.`lastname` LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT)); + String actual = + explainQuery( + StringUtils.format( + "SELECT `b`.`lastname` FROM %s AS b GROUP BY 1 LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT)); assertThat(actual, equalTo(expected)); } @Test public void explainSelectFieldiWithBacticksAndTableAliasOrderByOrdinal() throws IOException { - String expected = explainQuery(StringUtils.format( - "SELECT `b`.`lastname` FROM %s AS b ORDER BY `b`.`lastname` LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT)); - String actual = explainQuery(StringUtils.format( - "SELECT `b`.`lastname` FROM %s AS b ORDER BY 1 LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT)); + String expected = + explainQuery( + StringUtils.format( + "SELECT `b`.`lastname` FROM %s AS b ORDER BY `b`.`lastname` LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT)); + String actual = + explainQuery( + StringUtils.format( + "SELECT `b`.`lastname` FROM %s AS b ORDER BY 1 LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT)); assertThat(actual, equalTo(expected)); } @@ -149,12 +214,18 @@ public void explainSelectFieldiWithBacticksAndTableAliasOrderByOrdinal() throws @Ignore("only work for legacy engine") public void explainSelectFieldiWithBacticksAndTableAliasOrderByOrdinalAndNull() throws IOException { - String expected = explainQuery(StringUtils.format( - "SELECT `b`.`lastname`, age FROM %s AS b ORDER BY `b`.`lastname` IS NOT NULL DESC, age is NULL LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT)); - String actual = explainQuery(StringUtils.format( - "SELECT `b`.`lastname`, age FROM %s AS b ORDER BY 1 IS NOT NULL DESC, 2 IS NULL LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT)); + String expected = + explainQuery( + StringUtils.format( + "SELECT `b`.`lastname`, age FROM %s AS b ORDER BY `b`.`lastname` IS NOT NULL DESC," + + " age is NULL LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT)); + String actual = + explainQuery( + StringUtils.format( + "SELECT `b`.`lastname`, age FROM %s AS b ORDER BY 1 IS NOT NULL DESC, 2 IS NULL" + + " LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT)); assertThat(actual, equalTo(expected)); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/PluginIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/PluginIT.java index 5f7de5d496..9cbb73cd5b 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/PluginIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/PluginIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.equalTo; @@ -33,8 +32,9 @@ protected void init() throws Exception { public void sqlEnableSettingsTest() throws IOException { loadIndex(Index.ACCOUNT); updateClusterSettings(new ClusterSetting(PERSISTENT, "plugins.sql.enabled", "true")); - String query = String - .format(Locale.ROOT, "SELECT firstname FROM %s WHERE account_number=1", TEST_INDEX_ACCOUNT); + String query = + String.format( + Locale.ROOT, "SELECT firstname FROM %s WHERE account_number=1", TEST_INDEX_ACCOUNT); JSONObject queryResult = executeQuery(query); assertThat(getHits(queryResult).length(), equalTo(1)); @@ -50,16 +50,19 @@ public void sqlEnableSettingsTest() throws IOException { assertThat(queryResult.getInt("status"), equalTo(400)); JSONObject error = queryResult.getJSONObject("error"); assertThat(error.getString("reason"), equalTo("Invalid SQL query")); - assertThat(error.getString("details"), equalTo( - "Either plugins.sql.enabled or rest.action.multi.allow_explicit_index setting is false")); + assertThat( + error.getString("details"), + equalTo( + "Either plugins.sql.enabled or rest.action.multi.allow_explicit_index setting is" + + " false")); assertThat(error.getString("type"), equalTo("SQLFeatureDisabledException")); wipeAllClusterSettings(); } @Test public void sqlDeleteSettingsTest() throws IOException { - updateClusterSettings(new ClusterSetting(PERSISTENT, - Settings.Key.SQL_DELETE_ENABLED.getKeyValue(), "false")); + updateClusterSettings( + new ClusterSetting(PERSISTENT, Settings.Key.SQL_DELETE_ENABLED.getKeyValue(), "false")); String deleteQuery = StringUtils.format("DELETE FROM %s", TestsConstants.TEST_INDEX_ACCOUNT); final ResponseException exception = @@ -70,8 +73,8 @@ public void sqlDeleteSettingsTest() throws IOException { "{\n" + " \"error\": {\n" + " \"reason\": \"Invalid SQL query\",\n" - + " \"details\": \"DELETE clause is disabled by default and will be deprecated. Using " - + "the plugins.sql.delete.enabled setting to enable it\",\n" + + " \"details\": \"DELETE clause is disabled by default and will be deprecated." + + " Using the plugins.sql.delete.enabled setting to enable it\",\n" + " \"type\": \"SQLFeatureDisabledException\"\n" + " },\n" + " \"status\": 400\n" @@ -84,329 +87,355 @@ public void sqlDeleteSettingsTest() throws IOException { @Test public void sqlTransientOnlySettingTest() throws IOException { // (1) compact form - String settings = "{" + - " \"transient\": {" + - " \"plugins.query.metrics.rolling_interval\": \"80\"" + - " }" + - "}"; + String settings = + "{" + + " \"transient\": {" + + " \"plugins.query.metrics.rolling_interval\": \"80\"" + + " }" + + "}"; JSONObject actual = updateViaSQLSettingsAPI(settings); - JSONObject expected = new JSONObject("{" + - " \"acknowledged\" : true," + - " \"persistent\" : { }," + - " \"transient\" : {" + - " \"plugins\" : {" + - " \"query\" : {" + - " \"metrics\" : {" + - " \"rolling_interval\" : \"80\"" + - " }" + - " }" + - " }" + - " }" + - "}"); + JSONObject expected = + new JSONObject( + "{" + + " \"acknowledged\" : true," + + " \"persistent\" : { }," + + " \"transient\" : {" + + " \"plugins\" : {" + + " \"query\" : {" + + " \"metrics\" : {" + + " \"rolling_interval\" : \"80\"" + + " }" + + " }" + + " }" + + " }" + + "}"); assertTrue(actual.similar(expected)); // (2) partial expanded form - settings = "{" + - " \"transient\": {" + - " \"plugins\" : {" + - " \"query\" : {" + - " \"metrics.rolling_interval\": \"75\"" + - " }" + - " }" + - " }" + - "}"; + settings = + "{" + + " \"transient\": {" + + " \"plugins\" : {" + + " \"query\" : {" + + " \"metrics.rolling_interval\": \"75\"" + + " }" + + " }" + + " }" + + "}"; actual = updateViaSQLSettingsAPI(settings); - expected = new JSONObject("{" + - " \"acknowledged\" : true," + - " \"persistent\" : { }," + - " \"transient\" : {" + - " \"plugins\" : {" + - " \"query\" : {" + - " \"metrics\" : {" + - " \"rolling_interval\" : \"75\"" + - " }" + - " }" + - " }" + - " }" + - "}"); + expected = + new JSONObject( + "{" + + " \"acknowledged\" : true," + + " \"persistent\" : { }," + + " \"transient\" : {" + + " \"plugins\" : {" + + " \"query\" : {" + + " \"metrics\" : {" + + " \"rolling_interval\" : \"75\"" + + " }" + + " }" + + " }" + + " }" + + "}"); assertTrue(actual.similar(expected)); - // (3) full expanded form - settings = "{" + - " \"transient\": {" + - " \"plugins\" : {" + - " \"query\" : {" + - " \"metrics\": {" + - " \"rolling_interval\": \"65\"" + - " }" + - " }" + - " }" + - " }" + - "}"; + settings = + "{" + + " \"transient\": {" + + " \"plugins\" : {" + + " \"query\" : {" + + " \"metrics\": {" + + " \"rolling_interval\": \"65\"" + + " }" + + " }" + + " }" + + " }" + + "}"; actual = updateViaSQLSettingsAPI(settings); - expected = new JSONObject("{" + - " \"acknowledged\" : true," + - " \"persistent\" : { }," + - " \"transient\" : {" + - " \"plugins\" : {" + - " \"query\" : {" + - " \"metrics\" : {" + - " \"rolling_interval\" : \"65\"" + - " }" + - " }" + - " }" + - " }" + - "}"); + expected = + new JSONObject( + "{" + + " \"acknowledged\" : true," + + " \"persistent\" : { }," + + " \"transient\" : {" + + " \"plugins\" : {" + + " \"query\" : {" + + " \"metrics\" : {" + + " \"rolling_interval\" : \"65\"" + + " }" + + " }" + + " }" + + " }" + + "}"); assertTrue(actual.similar(expected)); } @Test public void sqlPersistentOnlySettingTest() throws IOException { // (1) compact form - String settings = "{" + - " \"persistent\": {" + - " \"plugins.query.metrics.rolling_interval\": \"80\"" + - " }" + - "}"; + String settings = + "{" + + " \"persistent\": {" + + " \"plugins.query.metrics.rolling_interval\": \"80\"" + + " }" + + "}"; JSONObject actual = updateViaSQLSettingsAPI(settings); - JSONObject expected = new JSONObject("{" + - " \"acknowledged\" : true," + - " \"transient\" : { }," + - " \"persistent\" : {" + - " \"plugins\" : {" + - " \"query\" : {" + - " \"metrics\" : {" + - " \"rolling_interval\" : \"80\"" + - " }" + - " }" + - " }" + - " }" + - "}"); + JSONObject expected = + new JSONObject( + "{" + + " \"acknowledged\" : true," + + " \"transient\" : { }," + + " \"persistent\" : {" + + " \"plugins\" : {" + + " \"query\" : {" + + " \"metrics\" : {" + + " \"rolling_interval\" : \"80\"" + + " }" + + " }" + + " }" + + " }" + + "}"); assertTrue(actual.similar(expected)); // (2) partial expanded form - settings = "{" + - " \"persistent\": {" + - " \"plugins\" : {" + - " \"query\" : {" + - " \"metrics.rolling_interval\": \"75\"" + - " }" + - " }" + - " }" + - "}"; + settings = + "{" + + " \"persistent\": {" + + " \"plugins\" : {" + + " \"query\" : {" + + " \"metrics.rolling_interval\": \"75\"" + + " }" + + " }" + + " }" + + "}"; actual = updateViaSQLSettingsAPI(settings); - expected = new JSONObject("{" + - " \"acknowledged\" : true," + - " \"transient\" : { }," + - " \"persistent\" : {" + - " \"plugins\" : {" + - " \"query\" : {" + - " \"metrics\" : {" + - " \"rolling_interval\" : \"75\"" + - " }" + - " }" + - " }" + - " }" + - "}"); + expected = + new JSONObject( + "{" + + " \"acknowledged\" : true," + + " \"transient\" : { }," + + " \"persistent\" : {" + + " \"plugins\" : {" + + " \"query\" : {" + + " \"metrics\" : {" + + " \"rolling_interval\" : \"75\"" + + " }" + + " }" + + " }" + + " }" + + "}"); assertTrue(actual.similar(expected)); - // (3) full expanded form - settings = "{" + - " \"persistent\": {" + - " \"plugins\" : {" + - " \"query\" : {" + - " \"metrics\": {" + - " \"rolling_interval\": \"65\"" + - " }" + - " }" + - " }" + - " }" + - "}"; + settings = + "{" + + " \"persistent\": {" + + " \"plugins\" : {" + + " \"query\" : {" + + " \"metrics\": {" + + " \"rolling_interval\": \"65\"" + + " }" + + " }" + + " }" + + " }" + + "}"; actual = updateViaSQLSettingsAPI(settings); - expected = new JSONObject("{" + - " \"acknowledged\" : true," + - " \"transient\" : { }," + - " \"persistent\" : {" + - " \"plugins\" : {" + - " \"query\" : {" + - " \"metrics\" : {" + - " \"rolling_interval\" : \"65\"" + - " }" + - " }" + - " }" + - " }" + - "}"); + expected = + new JSONObject( + "{" + + " \"acknowledged\" : true," + + " \"transient\" : { }," + + " \"persistent\" : {" + + " \"plugins\" : {" + + " \"query\" : {" + + " \"metrics\" : {" + + " \"rolling_interval\" : \"65\"" + + " }" + + " }" + + " }" + + " }" + + "}"); assertTrue(actual.similar(expected)); } /** - * Both transient and persistent settings are applied for same settings. - * This is similar to _cluster/settings behavior + * Both transient and persistent settings are applied for same settings. This is similar to + * _cluster/settings behavior */ @Test public void sqlCombinedSettingTest() throws IOException { - String settings = "{" + - " \"transient\": {" + - " \"plugins.query.metrics.rolling_window\": \"3700\"" + - " }," + - " \"persistent\": {" + - " \"plugins.sql.slowlog\" : \"2\"" + - " }" + - "}"; + String settings = + "{" + + " \"transient\": {" + + " \"plugins.query.metrics.rolling_window\": \"3700\"" + + " }," + + " \"persistent\": {" + + " \"plugins.sql.slowlog\" : \"2\"" + + " }" + + "}"; JSONObject actual = updateViaSQLSettingsAPI(settings); - JSONObject expected = new JSONObject("{" + - " \"acknowledged\" : true," + - " \"persistent\" : {" + - " \"plugins\" : {" + - " \"sql\" : {" + - " \"slowlog\" : \"2\"" + - " }" + - " }" + - " }," + - " \"transient\" : {" + - " \"plugins\" : {" + - " \"query\" : {" + - " \"metrics\" : {" + - " \"rolling_window\" : \"3700\"" + - " }" + - " }" + - " }" + - " }" + - "}"); + JSONObject expected = + new JSONObject( + "{" + + " \"acknowledged\" : true," + + " \"persistent\" : {" + + " \"plugins\" : {" + + " \"sql\" : {" + + " \"slowlog\" : \"2\"" + + " }" + + " }" + + " }," + + " \"transient\" : {" + + " \"plugins\" : {" + + " \"query\" : {" + + " \"metrics\" : {" + + " \"rolling_window\" : \"3700\"" + + " }" + + " }" + + " }" + + " }" + + "}"); assertTrue(actual.similar(expected)); } - /** - * Ignore all non plugins.sql settings. - * Only settings starting with plugins.sql. are affected - */ + /** Ignore all non plugins.sql settings. Only settings starting with plugins.sql. are affected */ @Test public void ignoreNonSQLSettingsTest() throws IOException { - String settings = "{" + - " \"transient\": {" + - " \"plugins.query.metrics.rolling_window\": \"3700\"," + - " \"plugins.alerting.metrics.rolling_window\": \"3700\"," + - " \"search.max_buckets\": \"10000\"," + - " \"search.max_keep_alive\": \"24h\"" + - " }," + - " \"persistent\": {" + - " \"plugins.sql.slowlog\": \"2\"," + - " \"plugins.alerting.metrics.rolling_window\": \"3700\"," + - " \"thread_pool.analyze.queue_size\": \"16\"" + - " }" + - "}"; + String settings = + "{" + + " \"transient\": {" + + " \"plugins.query.metrics.rolling_window\": \"3700\"," + + " \"plugins.alerting.metrics.rolling_window\": \"3700\"," + + " \"search.max_buckets\": \"10000\"," + + " \"search.max_keep_alive\": \"24h\"" + + " }," + + " \"persistent\": {" + + " \"plugins.sql.slowlog\": \"2\"," + + " \"plugins.alerting.metrics.rolling_window\": \"3700\"," + + " \"thread_pool.analyze.queue_size\": \"16\"" + + " }" + + "}"; JSONObject actual = updateViaSQLSettingsAPI(settings); - JSONObject expected = new JSONObject("{" + - " \"acknowledged\" : true," + - " \"persistent\" : {" + - " \"plugins\" : {" + - " \"sql\" : {" + - " \"slowlog\" : \"2\"" + - " }" + - " }" + - " }," + - " \"transient\" : {" + - " \"plugins\" : {" + - " \"query\" : {" + - " \"metrics\" : {" + - " \"rolling_window\" : \"3700\"" + - " }" + - " }" + - " }" + - " }" + - "}"); + JSONObject expected = + new JSONObject( + "{" + + " \"acknowledged\" : true," + + " \"persistent\" : {" + + " \"plugins\" : {" + + " \"sql\" : {" + + " \"slowlog\" : \"2\"" + + " }" + + " }" + + " }," + + " \"transient\" : {" + + " \"plugins\" : {" + + " \"query\" : {" + + " \"metrics\" : {" + + " \"rolling_window\" : \"3700\"" + + " }" + + " }" + + " }" + + " }" + + "}"); assertTrue(actual.similar(expected)); } @Test public void ignoreNonTransientNonPersistentSettingsTest() throws IOException { - String settings = "{" + - " \"transient\": {" + - " \"plugins.query.metrics.rolling_window\": \"3700\"" + - " }," + - " \"persistent\": {" + - " \"plugins.sql.slowlog\": \"2\"" + - " }," + - " \"hello\": {" + - " \"world\" : {" + - " \"name\" : \"John Doe\"" + - " }" + - " }" + - "}"; + String settings = + "{" + + " \"transient\": {" + + " \"plugins.query.metrics.rolling_window\": \"3700\"" + + " }," + + " \"persistent\": {" + + " \"plugins.sql.slowlog\": \"2\"" + + " }," + + " \"hello\": {" + + " \"world\" : {" + + " \"name\" : \"John Doe\"" + + " }" + + " }" + + "}"; JSONObject actual = updateViaSQLSettingsAPI(settings); - JSONObject expected = new JSONObject("{" + - " \"acknowledged\" : true," + - " \"persistent\" : {" + - " \"plugins\" : {" + - " \"sql\" : {" + - " \"slowlog\" : \"2\"" + - " }" + - " }" + - " }," + - " \"transient\" : {" + - " \"plugins\" : {" + - " \"query\" : {" + - " \"metrics\" : {" + - " \"rolling_window\" : \"3700\"" + - " }" + - " }" + - " }" + - " }" + - "}"); + JSONObject expected = + new JSONObject( + "{" + + " \"acknowledged\" : true," + + " \"persistent\" : {" + + " \"plugins\" : {" + + " \"sql\" : {" + + " \"slowlog\" : \"2\"" + + " }" + + " }" + + " }," + + " \"transient\" : {" + + " \"plugins\" : {" + + " \"query\" : {" + + " \"metrics\" : {" + + " \"rolling_window\" : \"3700\"" + + " }" + + " }" + + " }" + + " }" + + "}"); assertTrue(actual.similar(expected)); } @Test public void sqlCombinedMixedSettingTest() throws IOException { - String settings = "{" + - " \"transient\": {" + - " \"plugins.query.metrics.rolling_window\": \"3700\"" + - " }," + - " \"persistent\": {" + - " \"plugins\": {" + - " \"sql\": {" + - " \"slowlog\": \"1\"" + - " }" + - " }" + - " }," + - " \"hello\": {" + - " \"world\": {" + - " \"city\": \"Seattle\"" + - " }" + - " }" + - "}"; + String settings = + "{" + + " \"transient\": {" + + " \"plugins.query.metrics.rolling_window\": \"3700\"" + + " }," + + " \"persistent\": {" + + " \"plugins\": {" + + " \"sql\": {" + + " \"slowlog\": \"1\"" + + " }" + + " }" + + " }," + + " \"hello\": {" + + " \"world\": {" + + " \"city\": \"Seattle\"" + + " }" + + " }" + + "}"; JSONObject actual = updateViaSQLSettingsAPI(settings); - JSONObject expected = new JSONObject("{" + - " \"acknowledged\" : true," + - " \"persistent\" : {" + - " \"plugins\" : {" + - " \"sql\" : {" + - " \"slowlog\" : \"1\"" + - " }" + - " }" + - " }," + - " \"transient\" : {" + - " \"plugins\" : {" + - " \"query\" : {" + - " \"metrics\" : {" + - " \"rolling_window\" : \"3700\"" + - " }" + - " }" + - " }" + - " }" + - "}"); + JSONObject expected = + new JSONObject( + "{" + + " \"acknowledged\" : true," + + " \"persistent\" : {" + + " \"plugins\" : {" + + " \"sql\" : {" + + " \"slowlog\" : \"1\"" + + " }" + + " }" + + " }," + + " \"transient\" : {" + + " \"plugins\" : {" + + " \"query\" : {" + + " \"metrics\" : {" + + " \"rolling_window\" : \"3700\"" + + " }" + + " }" + + " }" + + " }" + + "}"); assertTrue(actual.similar(expected)); } @Test public void nonRegisteredSQLSettingsThrowException() throws IOException { - String settings = "{" + - " \"transient\": {" + - " \"plugins.sql.query.state.city\": \"Seattle\"" + - " }" + - "}"; + String settings = + "{" + + " \"transient\": {" + + " \"plugins.sql.query.state.city\": \"Seattle\"" + + " }" + + "}"; JSONObject actual; Response response = null; @@ -421,8 +450,7 @@ public void nonRegisteredSQLSettingsThrowException() throws IOException { assertThat(actual.query("/error/type"), equalTo("settings_exception")); assertThat( actual.query("/error/reason"), - equalTo("transient setting [plugins.sql.query.state.city], not recognized") - ); + equalTo("transient setting [plugins.sql.query.state.city], not recognized")); } protected static JSONObject updateViaSQLSettingsAPI(String body) throws IOException { diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/PreparedStatementIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/PreparedStatementIT.java index 88f72d1907..dd177ec1f1 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/PreparedStatementIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/PreparedStatementIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import java.io.IOException; @@ -25,27 +24,34 @@ protected void init() throws Exception { public void testPreparedStatement() throws IOException { int ageToCompare = 35; - JSONObject response = executeRequest(String.format("{\n" + - " \"query\": \"SELECT * FROM %s WHERE age > ? AND state in (?, ?) LIMIT ?\",\n" + - " \"parameters\": [\n" + - " {\n" + - " \"type\": \"integer\",\n" + - " \"value\": \"" + ageToCompare + "\"\n" + - " },\n" + - " {\n" + - " \"type\": \"string\",\n" + - " \"value\": \"TN\"\n" + - " },\n" + - " {\n" + - " \"type\": \"string\",\n" + - " \"value\": \"UT\"\n" + - " },\n" + - " {\n" + - " \"type\": \"integer\",\n" + - " \"value\": \"20\"\n" + - " }\n" + - " ]\n" + - "}", TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeRequest( + String.format( + "{\n" + + " \"query\": \"SELECT * FROM %s WHERE age > ? AND state in (?, ?) LIMIT" + + " ?\",\n" + + " \"parameters\": [\n" + + " {\n" + + " \"type\": \"integer\",\n" + + " \"value\": \"" + + ageToCompare + + "\"\n" + + " },\n" + + " {\n" + + " \"type\": \"string\",\n" + + " \"value\": \"TN\"\n" + + " },\n" + + " {\n" + + " \"type\": \"string\",\n" + + " \"value\": \"UT\"\n" + + " },\n" + + " {\n" + + " \"type\": \"integer\",\n" + + " \"value\": \"20\"\n" + + " }\n" + + " ]\n" + + "}", + TestsConstants.TEST_INDEX_ACCOUNT)); Assert.assertTrue(response.has("hits")); Assert.assertTrue(response.getJSONObject("hits").has("hits")); @@ -58,23 +64,23 @@ public void testPreparedStatement() throws IOException { } } - /* currently the integ test case will fail if run using Intellj, have to run using gradle command - * because the integ test cluster created by IntellJ has http diabled, need to spend some time later to - * figure out how to configure the integ test cluster properly. Related online resources: - * https://discuss.elastic.co/t/http-enabled-with-OpenSearchIntegTestCase/102032 - * https://discuss.elastic.co/t/help-with-OpenSearchIntegTestCase/105245 - @Override - protected Collection> nodePlugins() { - return Arrays.asList(MockTcpTransportPlugin.class); - } + /* currently the integ test case will fail if run using Intellj, have to run using gradle command + * because the integ test cluster created by IntellJ has http diabled, need to spend some time later to + * figure out how to configure the integ test cluster properly. Related online resources: + * https://discuss.elastic.co/t/http-enabled-with-OpenSearchIntegTestCase/102032 + * https://discuss.elastic.co/t/help-with-OpenSearchIntegTestCase/105245 + @Override + protected Collection> nodePlugins() { + return Arrays.asList(MockTcpTransportPlugin.class); + } - @Override - protected Settings nodeSettings(int nodeOrdinal) { - return Settings.builder().put(super.nodeSettings(nodeOrdinal)) - // .put("node.mode", "network") - .put("http.enabled", true) - //.put("http.type", "netty4") - .build(); - } - */ + @Override + protected Settings nodeSettings(int nodeOrdinal) { + return Settings.builder().put(super.nodeSettings(nodeOrdinal)) + // .put("node.mode", "network") + .put("http.enabled", true) + //.put("http.type", "netty4") + .build(); + } + */ } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/PrettyFormatResponseIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/PrettyFormatResponseIT.java index ef80098df6..70f8a3c433 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/PrettyFormatResponseIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/PrettyFormatResponseIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static java.util.stream.Collectors.toSet; @@ -30,6 +29,7 @@ import org.opensearch.client.Request; /** + *
  * PrettyFormatResponseIT will likely be excluding some of the tests written in PrettyFormatResponseTest since
  * those tests were asserting on class objects directly. These updated tests will only be making assertions based
  * on the REST response.
@@ -40,21 +40,32 @@
  * Tests from original integ tests excluded:
  * - noIndexType()
  * - withIndexType()
+ * 
*/ public class PrettyFormatResponseIT extends SQLIntegTestCase { - private static final Set allAccountFields = Sets.newHashSet( - "account_number", "balance", "firstname", "lastname", "age", "gender", "address", "employer", - "email", "city", "state" - ); + private static final Set allAccountFields = + Sets.newHashSet( + "account_number", + "balance", + "firstname", + "lastname", + "age", + "gender", + "address", + "employer", + "email", + "city", + "state"); private static final Set regularFields = Sets.newHashSet("someField", "myNum"); - private static final Set messageFields = Sets.newHashSet( - "message.dayOfWeek", "message.info", "message.author"); + private static final Set messageFields = + Sets.newHashSet("message.dayOfWeek", "message.info", "message.author"); - private static final Set messageFieldsWithNestedFunction = Sets.newHashSet( - "nested(message.dayOfWeek)", "nested(message.info)", "nested(message.author)"); + private static final Set messageFieldsWithNestedFunction = + Sets.newHashSet( + "nested(message.dayOfWeek)", "nested(message.info)", "nested(message.author)"); private static final Set commentFields = Sets.newHashSet("comment.data", "comment.likes"); @@ -83,19 +94,20 @@ protected Request getSqlRequest(String request, boolean explain) { public void wrongIndexType() throws IOException { String type = "wrongType"; try { - executeQuery(String.format(Locale.ROOT, "SELECT * FROM %s/%s", - TestsConstants.TEST_INDEX_ACCOUNT, type)); + executeQuery( + String.format( + Locale.ROOT, "SELECT * FROM %s/%s", TestsConstants.TEST_INDEX_ACCOUNT, type)); } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), - is(String.format(Locale.ROOT, "Index type %s does not exist", type))); + assertThat( + e.getMessage(), is(String.format(Locale.ROOT, "Index type %s does not exist", type))); } } @Test public void selectAll() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s", - TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format(Locale.ROOT, "SELECT * FROM %s", TestsConstants.TEST_INDEX_ACCOUNT)); // This also tests that .keyword fields are ignored when SELECT * is called assertContainsColumnsInAnyOrder(getSchema(response), allAccountFields); @@ -104,9 +116,12 @@ public void selectAll() throws IOException { @Test public void selectNames() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT firstname, lastname FROM %s", - TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT firstname, lastname FROM %s", + TestsConstants.TEST_INDEX_ACCOUNT)); assertContainsColumns(getSchema(response), nameFields); assertContainsData(getDataRows(response), nameFields); @@ -115,13 +130,15 @@ public void selectNames() throws IOException { @Ignore("Semantic analysis takes care of this") @Test public void selectWrongField() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT wrongField FROM %s", - TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, "SELECT wrongField FROM %s", TestsConstants.TEST_INDEX_ACCOUNT)); assertThat(getSchema(response).length(), equalTo(0)); - // DataRows object will still get populated with SearchHits but since wrongField is not available in the Map + // DataRows object will still get populated with SearchHits but since wrongField is not + // available in the Map // each row in the response will be empty // TODO Perhaps a code change should be made to format logic to ensure a // 'datarows' length of 0 in response for this case @@ -131,9 +148,12 @@ public void selectWrongField() throws IOException { @Test @Ignore("_score tested in V2 engine - @see org.opensearch.sql.sql.ScoreQueryIT") public void selectScore() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT _score FROM %s WHERE SCORE(match_phrase(phrase, 'brown fox'))", - TestsConstants.TEST_INDEX_PHRASE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT _score FROM %s WHERE SCORE(match_phrase(phrase, 'brown fox'))", + TestsConstants.TEST_INDEX_PHRASE)); List fields = Collections.singletonList("_score"); assertContainsColumns(getSchema(response), fields); @@ -142,14 +162,14 @@ public void selectScore() throws IOException { @Test public void selectAllFromNestedWithoutFieldInFrom() throws IOException { - assertNestedFieldQueryResultContainsColumnsAndData("SELECT * FROM %s", - regularFields, fields("message", "comment")); + assertNestedFieldQueryResultContainsColumnsAndData( + "SELECT * FROM %s", regularFields, fields("message", "comment")); } @Test public void selectAllFromNestedWithFieldInFrom() throws IOException { - assertNestedFieldQueryResultContainsColumnsAndData("SELECT * FROM %s e, e.message m", - regularFields, messageFields); + assertNestedFieldQueryResultContainsColumnsAndData( + "SELECT * FROM %s e, e.message m", regularFields, messageFields); } @Test @@ -161,29 +181,27 @@ public void selectAllFromNestedWithMultipleFieldsInFrom() throws IOException { @Test public void selectAllNestedFromNestedWithFieldInFrom() throws IOException { - assertNestedFieldQueryResultContainsColumnsAndData("SELECT m.* FROM %s e, e.message m", - messageFields); + assertNestedFieldQueryResultContainsColumnsAndData( + "SELECT m.* FROM %s e, e.message m", messageFields); } @Test public void selectSpecificRegularFieldAndAllFromNestedWithFieldInFrom() throws IOException { assertNestedFieldQueryResultContainsColumnsAndData( - "SELECT e.someField, m.* FROM %s e, e.message m", - fields("someField"), messageFields); + "SELECT e.someField, m.* FROM %s e, e.message m", fields("someField"), messageFields); } /** - * Execute the query against index with nested fields and assert result contains columns and data as expected. + * Execute the query against index with nested fields and assert result contains columns and data + * as expected. */ @SafeVarargs - private final void assertNestedFieldQueryResultContainsColumnsAndData(String query, - Set... expectedFieldNames) - throws IOException { + private final void assertNestedFieldQueryResultContainsColumnsAndData( + String query, Set... expectedFieldNames) throws IOException { JSONObject response = executeQuery(String.format(Locale.ROOT, query, TestsConstants.TEST_INDEX_NESTED_TYPE)); - Set allExpectedFieldNames = Stream.of(expectedFieldNames). - flatMap(Set::stream). - collect(toSet()); + Set allExpectedFieldNames = + Stream.of(expectedFieldNames).flatMap(Set::stream).collect(toSet()); assertContainsColumnsInAnyOrder(getSchema(response), allExpectedFieldNames); assertContainsData(getDataRows(response), allExpectedFieldNames); @@ -195,24 +213,31 @@ private Set fields(String... fieldNames) { @Test public void selectNestedFields() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT nested(message.info), someField FROM %s", - TestsConstants.TEST_INDEX_NESTED_TYPE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT nested(message.info), someField FROM %s", + TestsConstants.TEST_INDEX_NESTED_TYPE)); List fields = Arrays.asList("nested(message.info)", "someField"); assertContainsColumns(getSchema(response), fields); assertContainsData(getDataRows(response), fields); - // The nested test index being used contains 5 entries but one of them has an array of 2 message objects, so + // The nested test index being used contains 5 entries but one of them has an array of 2 message + // objects, so // we check to see if the amount of data rows is 6 since that is the result after flattening assertThat(getDataRows(response).length(), equalTo(6)); } @Test public void selectNestedFieldWithWildcard() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT nested(message.*) FROM %s", - TestsConstants.TEST_INDEX_NESTED_TYPE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT nested(message.*) FROM %s", + TestsConstants.TEST_INDEX_NESTED_TYPE)); assertContainsColumnsInAnyOrder(getSchema(response), messageFieldsWithNestedFunction); assertContainsData(getDataRows(response), messageFields); @@ -221,11 +246,13 @@ public void selectNestedFieldWithWildcard() throws IOException { @Test public void selectWithWhere() throws IOException { int balanceToCompare = 30000; - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT balance " + - "FROM %s " + - "WHERE balance > %d", - TestsConstants.TEST_INDEX_ACCOUNT, balanceToCompare)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT balance " + "FROM %s " + "WHERE balance > %d", + TestsConstants.TEST_INDEX_ACCOUNT, + balanceToCompare)); /* * Previously the DataRows map was used to check specific fields but the JDBC response for "datarows" is a @@ -243,9 +270,10 @@ public void selectWithWhere() throws IOException { @Test public void groupBySingleField() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s GROUP BY age", - TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, "SELECT * FROM %s GROUP BY age", TestsConstants.TEST_INDEX_ACCOUNT)); List fields = Collections.singletonList("age"); assertContainsColumns(getSchema(response), fields); @@ -254,9 +282,12 @@ public void groupBySingleField() throws IOException { @Test public void groupByMultipleFields() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s GROUP BY age, balance", - TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s GROUP BY age, balance", + TestsConstants.TEST_INDEX_ACCOUNT)); List fields = Arrays.asList("age", "balance"); assertContainsColumns(getSchema(response), fields); @@ -265,35 +296,42 @@ public void groupByMultipleFields() throws IOException { @Ignore("only work for legacy engine") public void testSizeAndTotal() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE balance > 30000 " + - "LIMIT 5", - TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE balance > 30000 LIMIT 5", + TestsConstants.TEST_INDEX_ACCOUNT)); JSONArray dataRows = getDataRows(response); assertThat(dataRows.length(), equalTo(5)); - // The value to compare to here was obtained by running the query in the plugin and looking at the SearchHits + // The value to compare to here was obtained by running the query in the plugin and looking at + // the SearchHits int totalHits = response.getInt("total"); assertThat(totalHits, equalTo(402)); } @Test public void testSizeWithGroupBy() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s GROUP BY age LIMIT 5", - TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s GROUP BY age LIMIT 5", + TestsConstants.TEST_INDEX_ACCOUNT)); assertThat(getDataRows(response).length(), equalTo(5)); } @Test public void aggregationFunctionInSelect() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT COUNT(*) FROM %s GROUP BY age", - TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT COUNT(*) FROM %s GROUP BY age", + TestsConstants.TEST_INDEX_ACCOUNT)); List fields = Arrays.asList("COUNT(*)"); assertContainsColumns(getSchema(response), fields); @@ -310,9 +348,12 @@ public void aggregationFunctionInSelect() throws IOException { @Ignore("In MySQL and our new engine, the original text in SELECT is used as final column name") @Test public void aggregationFunctionInSelectCaseCheck() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT count(*) FROM %s GROUP BY age", - TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT count(*) FROM %s GROUP BY age", + TestsConstants.TEST_INDEX_ACCOUNT)); List fields = Arrays.asList("COUNT(*)"); assertContainsColumns(getSchema(response), fields); @@ -328,9 +369,12 @@ public void aggregationFunctionInSelectCaseCheck() throws IOException { @Ignore("only work for legacy engine") public void aggregationFunctionInSelectWithAlias() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT COUNT(*) AS total FROM %s GROUP BY age", - TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT COUNT(*) AS total FROM %s GROUP BY age", + TestsConstants.TEST_INDEX_ACCOUNT)); List fields = Arrays.asList("total"); assertContainsColumns(getSchema(response), fields); @@ -346,8 +390,10 @@ public void aggregationFunctionInSelectWithAlias() throws IOException { @Test public void aggregationFunctionInSelectNoGroupBy() throws IOException { - JSONObject response = executeQuery(String.format(Locale.ROOT, "SELECT SUM(age) FROM %s", - TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, "SELECT SUM(age) FROM %s", TestsConstants.TEST_INDEX_ACCOUNT)); String ageSum = "SUM(age)"; assertContainsColumns(getSchema(response), Collections.singletonList(ageSum)); @@ -363,9 +409,12 @@ public void aggregationFunctionInSelectNoGroupBy() throws IOException { @Test public void multipleAggregationFunctionsInSelect() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT COUNT(*), AVG(age) FROM %s GROUP BY age", - TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT COUNT(*), AVG(age) FROM %s GROUP BY age", + TestsConstants.TEST_INDEX_ACCOUNT)); List fields = Arrays.asList("COUNT(*)", "AVG(age)"); assertContainsColumns(getSchema(response), fields); @@ -374,12 +423,12 @@ public void multipleAggregationFunctionsInSelect() throws IOException { @Test public void aggregationFunctionInHaving() throws IOException { - JSONObject response = executeQuery(String.format(Locale.ROOT, - "SELECT gender " + - "FROM %s " + - "GROUP BY gender " + - "HAVING count(*) > 500", - TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT gender FROM %s GROUP BY gender HAVING count(*) > 500", + TestsConstants.TEST_INDEX_ACCOUNT)); String ageSum = "gender"; assertContainsColumns(getSchema(response), Collections.singletonList(ageSum)); @@ -390,20 +439,24 @@ public void aggregationFunctionInHaving() throws IOException { } /** - * This case doesn't seem to be supported by the plugin at the moment. - * Looks like the painless script of the inner function is put inside the aggregation function but - * this syntax may not be correct since it returns 0 which is the default value (since 0 is returned in - * cases like COUNT(wrongField) as well). + * This case doesn't seem to be supported by the plugin at the moment. Looks like the painless + * script of the inner function is put inside the aggregation function but this syntax may not be + * correct since it returns 0 which is the default value (since 0 is returned in cases like + * COUNT(wrongField) as well). */ -// @Test -// public void nestedAggregationFunctionInSelect() { -// String query = String.format(Locale.ROOT, "SELECT SUM(SQRT(age)) FROM age GROUP BY age", TEST_INDEX_ACCOUNT); -// } + // @Test + // public void nestedAggregationFunctionInSelect() { + // String query = String.format(Locale.ROOT, "SELECT SUM(SQRT(age)) FROM age GROUP BY age", + // TEST_INDEX_ACCOUNT); + // } @Test public void fieldsWithAlias() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT firstname AS first, age AS a FROM %s", - TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT firstname AS first, age AS a FROM %s", + TestsConstants.TEST_INDEX_ACCOUNT)); Map aliases = new HashMap<>(); aliases.put("firstname", "first"); @@ -414,25 +467,32 @@ public void fieldsWithAlias() throws IOException { @Test public void indexWithMissingFields() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT phrase, insert_time2 " + - "FROM %s " + - "WHERE match_phrase(phrase, 'brown fox')", - TestsConstants.TEST_INDEX_PHRASE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT phrase, insert_time2 " + + "FROM %s " + + "WHERE match_phrase(phrase, 'brown fox')", + TestsConstants.TEST_INDEX_PHRASE)); JSONArray dataRowEntry = getDataRows(response).getJSONArray(0); assertThat(dataRowEntry.length(), equalTo(2)); assertThat(dataRowEntry.get(0), equalTo("brown fox")); - assertThat(dataRowEntry.get(1), - equalTo(JSONObject.NULL)); // TODO See if this null check is failing + assertThat( + dataRowEntry.get(1), equalTo(JSONObject.NULL)); // TODO See if this null check is failing } @Test public void joinQuery() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT b1.balance, b1.age, b2.firstname " + - "FROM %s b1 JOIN %s b2 ON b1.age = b2.age", - TestsConstants.TEST_INDEX_ACCOUNT, TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT b1.balance, b1.age, b2.firstname " + + "FROM %s b1 JOIN %s b2 ON b1.age = b2.age", + TestsConstants.TEST_INDEX_ACCOUNT, + TestsConstants.TEST_INDEX_ACCOUNT)); List fields = Arrays.asList("b1.balance", "b1.age", "b2.firstname"); assertContainsColumns(getSchema(response), fields); @@ -441,9 +501,14 @@ public void joinQuery() throws IOException { @Test public void joinQueryWithAlias() throws IOException { - JSONObject response = executeQuery(String.format(Locale.ROOT, "SELECT b1.balance AS bal, " + - " b1.age AS age, b2.firstname AS name FROM %s b1 JOIN %s b2 ON b1.age = b2.age", - TestsConstants.TEST_INDEX_ACCOUNT, TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT b1.balance AS bal, b1.age AS age, b2.firstname AS name FROM %s b1 JOIN %s" + + " b2 ON b1.age = b2.age", + TestsConstants.TEST_INDEX_ACCOUNT, + TestsConstants.TEST_INDEX_ACCOUNT)); Map aliases = new HashMap<>(); aliases.put("b1.balance", "bal"); @@ -456,16 +521,20 @@ public void joinQueryWithAlias() throws IOException { @Test public void joinQueryWithObjectFieldInSelect() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT c.name.firstname, d.name.lastname " + - "FROM %s c JOIN %s d ON d.hname = c.house", - TestsConstants.TEST_INDEX_GAME_OF_THRONES, - TestsConstants.TEST_INDEX_GAME_OF_THRONES)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT c.name.firstname, d.name.lastname " + + "FROM %s c JOIN %s d ON d.hname = c.house", + TestsConstants.TEST_INDEX_GAME_OF_THRONES, + TestsConstants.TEST_INDEX_GAME_OF_THRONES)); List fields = Arrays.asList("c.name.firstname", "d.name.lastname"); assertContainsColumns(getSchema(response), fields); - // d.name.lastname is null here since entries with hname don't have a name.lastname entry, so only length is + // d.name.lastname is null here since entries with hname don't have a name.lastname entry, so + // only length is // checked JSONArray dataRows = getDataRows(response); assertThat(dataRows.length(), greaterThan(0)); @@ -476,10 +545,13 @@ public void joinQueryWithObjectFieldInSelect() throws IOException { @Test public void joinQuerySelectOnlyOnOneTable() throws Exception { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT b1.age " + - "FROM %s b1 JOIN %s b2 ON b1.firstname = b2.firstname", - TestsConstants.TEST_INDEX_ACCOUNT, TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT b1.age FROM %s b1 JOIN %s b2 ON b1.firstname = b2.firstname", + TestsConstants.TEST_INDEX_ACCOUNT, + TestsConstants.TEST_INDEX_ACCOUNT)); List fields = Collections.singletonList("b1.age"); assertContainsColumns(getSchema(response), fields); @@ -508,8 +580,12 @@ private void testFieldOrder(final String[] expectedFields, final Object[] expect throws IOException { final String fields = String.join(", ", expectedFields); - final String query = String.format(Locale.ROOT, "SELECT %s FROM %s " + - "WHERE email='amberduke@pyrami.com'", fields, TestsConstants.TEST_INDEX_ACCOUNT); + final String query = + String.format( + Locale.ROOT, + "SELECT %s FROM %s WHERE email='amberduke@pyrami.com'", + fields, + TestsConstants.TEST_INDEX_ACCOUNT); final JSONObject result = executeQuery(query); for (int i = 0; i < expectedFields.length; ++i) { diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/PrettyFormatterIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/PrettyFormatterIT.java index 463a0bc6db..c81839a6e5 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/PrettyFormatterIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/PrettyFormatterIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.equalTo; @@ -29,16 +28,18 @@ protected void init() throws Exception { public void assertExplainPrettyFormatted() throws IOException { String query = StringUtils.format("SELECT firstname FROM %s", TEST_INDEX_ACCOUNT); - String notPrettyExplainOutputFilePath = TestUtils.getResourceFilePath( - "src/test/resources/expectedOutput/explainIT_format_not_pretty.json"); + String notPrettyExplainOutputFilePath = + TestUtils.getResourceFilePath( + "src/test/resources/expectedOutput/explainIT_format_not_pretty.json"); String notPrettyExplainOutput = Files.toString(new File(notPrettyExplainOutputFilePath), StandardCharsets.UTF_8); assertThat(executeExplainRequest(query, ""), equalTo(notPrettyExplainOutput)); assertThat(executeExplainRequest(query, "pretty=false"), equalTo(notPrettyExplainOutput)); - String prettyExplainOutputFilePath = TestUtils.getResourceFilePath( - "src/test/resources/expectedOutput/explainIT_format_pretty.json"); + String prettyExplainOutputFilePath = + TestUtils.getResourceFilePath( + "src/test/resources/expectedOutput/explainIT_format_pretty.json"); String prettyExplainOutput = Files.toString(new File(prettyExplainOutputFilePath), StandardCharsets.UTF_8); diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/QueryAnalysisIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/QueryAnalysisIT.java index 3a58b7ffc0..62a87d3bff 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/QueryAnalysisIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/QueryAnalysisIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.containsString; @@ -24,9 +23,7 @@ import org.opensearch.sql.legacy.exception.SqlFeatureNotImplementedException; import org.opensearch.sql.legacy.utils.StringUtils; -/** - * Integration test for syntax and semantic analysis against query by new ANTLR parser. - */ +/** Integration test for syntax and semantic analysis against query by new ANTLR parser. */ public class QueryAnalysisIT extends SQLIntegTestCase { @Override @@ -41,9 +38,7 @@ public void missingFromClauseShouldThrowSyntaxException() { @Test public void unsupportedOperatorShouldThrowSyntaxException() { - queryShouldThrowSyntaxException( - "SELECT * FROM opensearch-sql_test_index_bank WHERE age <=> 1" - ); + queryShouldThrowSyntaxException("SELECT * FROM opensearch-sql_test_index_bank WHERE age <=> 1"); } @Test @@ -51,8 +46,8 @@ public void nonExistingFieldNameShouldThrowSemanticException() { queryShouldThrowSemanticException( "SELECT * FROM opensearch-sql_test_index_bank WHERE balance1 = 1000", "Field [balance1] cannot be found or used here." - //"Did you mean [balance]?" - ); + // "Did you mean [balance]?" + ); } @Test @@ -60,16 +55,15 @@ public void nonExistingIndexAliasShouldThrowSemanticException() { queryShouldThrowSemanticException( "SELECT * FROM opensearch-sql_test_index_bank b WHERE a.balance = 1000", "Field [a.balance] cannot be found or used here." - //"Did you mean [b.balance]?" - ); + // "Did you mean [b.balance]?" + ); } @Test public void indexJoinNonNestedFieldShouldThrowSemanticException() { queryShouldThrowSemanticException( "SELECT * FROM opensearch-sql_test_index_bank b1, b1.firstname f1", - "Operator [JOIN] cannot work with [INDEX, KEYWORD]." - ); + "Operator [JOIN] cannot work with [INDEX, KEYWORD]."); } @Test @@ -77,8 +71,7 @@ public void scalarFunctionCallWithTypoInNameShouldThrowSemanticException() { queryShouldThrowSemanticException( "SELECT * FROM opensearch-sql_test_index_bank WHERE ABSa(age) = 1", "Function [ABSA] cannot be found or used here.", - "Did you mean [ABS]?" - ); + "Did you mean [ABS]?"); } @Test @@ -86,17 +79,16 @@ public void scalarFunctionCallWithWrongTypeArgumentShouldThrowSemanticException( queryShouldThrowSemanticException( "SELECT * FROM opensearch-sql_test_index_bank WHERE LOG(lastname) = 1", "Function [LOG] cannot work with [KEYWORD].", - "Usage: LOG(NUMBER T) -> DOUBLE or LOG(NUMBER T, NUMBER) -> DOUBLE" - ); + "Usage: LOG(NUMBER T) -> DOUBLE or LOG(NUMBER T, NUMBER) -> DOUBLE"); } @Test public void aggregateFunctionCallWithWrongNumberOfArgumentShouldThrowSemanticException() { queryShouldThrowSemanticException( - "SELECT city FROM opensearch-sql_test_index_bank GROUP BY city HAVING MAX(age, birthdate) > 1", + "SELECT city FROM opensearch-sql_test_index_bank GROUP BY city HAVING MAX(age, birthdate) >" + + " 1", "Function [MAX] cannot work with [INTEGER, DATE].", - "Usage: MAX(NUMBER T) -> T" - ); + "Usage: MAX(NUMBER T) -> T"); } @Test @@ -104,8 +96,7 @@ public void compareIntegerFieldWithBooleanShouldThrowSemanticException() { queryShouldThrowSemanticException( "SELECT * FROM opensearch-sql_test_index_bank b WHERE b.age IS FALSE", "Operator [IS] cannot work with [INTEGER, BOOLEAN].", - "Usage: Please use compatible types from each side." - ); + "Usage: Please use compatible types from each side."); } @Test @@ -113,8 +104,7 @@ public void compareNumberFieldWithStringShouldThrowSemanticException() { queryShouldThrowSemanticException( "SELECT * FROM opensearch-sql_test_index_bank b WHERE b.age >= 'test'", "Operator [>=] cannot work with [INTEGER, STRING].", - "Usage: Please use compatible types from each side." - ); + "Usage: Please use compatible types from each side."); } @Test @@ -122,43 +112,38 @@ public void compareLogFunctionCallWithNumberFieldWithStringShouldThrowSemanticEx queryShouldThrowSemanticException( "SELECT * FROM opensearch-sql_test_index_bank b WHERE LOG(b.balance) != 'test'", "Operator [!=] cannot work with [DOUBLE, STRING].", - "Usage: Please use compatible types from each side." - ); + "Usage: Please use compatible types from each side."); } @Test public void unionNumberFieldWithStringShouldThrowSemanticException() { queryShouldThrowSemanticException( - "SELECT age FROM opensearch-sql_test_index_bank" + - " UNION SELECT address FROM opensearch-sql_test_index_bank", - "Operator [UNION] cannot work with [INTEGER, TEXT]." - ); + "SELECT age FROM opensearch-sql_test_index_bank" + + " UNION SELECT address FROM opensearch-sql_test_index_bank", + "Operator [UNION] cannot work with [INTEGER, TEXT]."); } @Test public void minusBooleanFieldWithDateShouldThrowSemanticException() { queryShouldThrowSemanticException( - "SELECT male FROM opensearch-sql_test_index_bank" + - " MINUS SELECT birthdate FROM opensearch-sql_test_index_bank", - "Operator [MINUS] cannot work with [BOOLEAN, DATE]." - ); + "SELECT male FROM opensearch-sql_test_index_bank" + + " MINUS SELECT birthdate FROM opensearch-sql_test_index_bank", + "Operator [MINUS] cannot work with [BOOLEAN, DATE]."); } @Test public void useInClauseWithIncompatibleFieldTypesShouldFail() { queryShouldThrowSemanticException( - "SELECT * FROM opensearch-sql_test_index_bank WHERE male " + - " IN (SELECT 1 FROM opensearch-sql_test_index_bank)", - "Operator [IN] cannot work with [BOOLEAN, INTEGER]." - ); + "SELECT * FROM opensearch-sql_test_index_bank WHERE male " + + " IN (SELECT 1 FROM opensearch-sql_test_index_bank)", + "Operator [IN] cannot work with [BOOLEAN, INTEGER]."); } @Test public void queryWithNestedFunctionShouldFail() { queryShouldThrowFeatureNotImplementedException( "SELECT abs(log(balance)) FROM opensearch-sql_test_index_bank", - "Nested function calls like [abs(log(balance))] are not supported yet" - ); + "Nested function calls like [abs(log(balance))] are not supported yet"); } @Test @@ -170,29 +155,24 @@ public void nestedFunctionWithMathConstantAsInnerFunctionShouldPass() { public void aggregateWithFunctionAggregatorShouldFail() { queryShouldThrowFeatureNotImplementedException( "SELECT max(log(age)) FROM opensearch-sql_test_index_bank", - "Aggregation calls with function aggregator like [max(log(age))] are not supported yet" - ); + "Aggregation calls with function aggregator like [max(log(age))] are not supported yet"); } @Test public void queryWithUnsupportedFunctionShouldFail() { queryShouldThrowFeatureNotImplementedException( "SELECT balance DIV age FROM opensearch-sql_test_index_bank", - "Operator [DIV] is not supported yet" - ); + "Operator [DIV] is not supported yet"); } @Test public void useNegativeNumberConstantShouldPass() { queryShouldPassAnalysis( - "SELECT * FROM opensearch-sql_test_index_bank " + - "WHERE age > -1 AND balance < -123.456789" - ); + "SELECT * FROM opensearch-sql_test_index_bank " + + "WHERE age > -1 AND balance < -123.456789"); } - /** - * Run the query with cluster setting changed and cleaned after complete - */ + /** Run the query with cluster setting changed and cleaned after complete */ private void runWithClusterSetting(ClusterSetting setting, Runnable query) { try { updateClusterSettings(setting); @@ -201,7 +181,8 @@ private void runWithClusterSetting(ClusterSetting setting, Runnable query) { throw new IllegalStateException( StringUtils.format("Exception raised when running with cluster setting [%s]", setting)); } finally { - // Clean up or OpenSearch will throw java.lang.AssertionError: test leaves persistent cluster metadata behind + // Clean up or OpenSearch will throw java.lang.AssertionError: test leaves persistent cluster + // metadata behind try { updateClusterSettings(setting.nullify()); } catch (IOException e) { @@ -218,20 +199,19 @@ private void queryShouldThrowSemanticException(String query, String... expectedM queryShouldThrowException(query, SemanticAnalysisException.class, expectedMsgs); } - private void queryShouldThrowFeatureNotImplementedException(String query, - String... expectedMsgs) { - queryShouldThrowExceptionWithRestStatus(query, SqlFeatureNotImplementedException.class, - SERVICE_UNAVAILABLE, expectedMsgs); + private void queryShouldThrowFeatureNotImplementedException( + String query, String... expectedMsgs) { + queryShouldThrowExceptionWithRestStatus( + query, SqlFeatureNotImplementedException.class, SERVICE_UNAVAILABLE, expectedMsgs); } - private void queryShouldThrowException(String query, Class exceptionType, - String... expectedMsgs) { + private void queryShouldThrowException( + String query, Class exceptionType, String... expectedMsgs) { queryShouldThrowExceptionWithRestStatus(query, exceptionType, BAD_REQUEST, expectedMsgs); } - private void queryShouldThrowExceptionWithRestStatus(String query, Class exceptionType, - RestStatus status, - String... expectedMsgs) { + private void queryShouldThrowExceptionWithRestStatus( + String query, Class exceptionType, RestStatus status, String... expectedMsgs) { try { executeQuery(query); Assert.fail("Expected ResponseException, but none was thrown for query: " + query); @@ -244,8 +224,8 @@ private void queryShouldThrowExceptionWithRestStatus(String query, Class } } catch (IOException e) { throw new IllegalStateException( - "Unexpected IOException raised rather than expected AnalysisException for query: " + - query); + "Unexpected IOException raised rather than expected AnalysisException for query: " + + query); } } @@ -285,5 +265,4 @@ void assertBodyContains(String content) { assertThat(body, containsString(content)); } } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/QueryFunctionsIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/QueryFunctionsIT.java index c538db830f..b36144ce5f 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/QueryFunctionsIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/QueryFunctionsIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.anyOf; @@ -44,10 +43,11 @@ public class QueryFunctionsIT extends SQLIntegTestCase { private static final String FROM_PHRASE = "FROM " + TEST_INDEX_PHRASE; /** + *
    * TODO Looks like Math/Date Functions test all use the same query() and execute() functions
    * TODO execute/featureValueOf/hits functions are the same as used in NestedFieldQueryIT, should refactor into util
+   * 
*/ - @Override protected void init() throws Exception { loadIndex(Index.ACCOUNT); @@ -58,63 +58,39 @@ protected void init() throws Exception { @Test public void query() throws IOException { assertThat( - query( - "SELECT state", - FROM_ACCOUNTS, - "WHERE QUERY('CA')" - ), - hits( - hasValueForFields("CA", "state") - ) - ); + query("SELECT state", FROM_ACCOUNTS, "WHERE QUERY('CA')"), + hits(hasValueForFields("CA", "state"))); } @Test public void matchQueryRegularField() throws IOException { assertThat( - query( - "SELECT firstname", - FROM_ACCOUNTS, - "WHERE MATCH_QUERY(firstname, 'Ayers')" - ), - hits( - hasValueForFields("Ayers", "firstname") - ) - ); + query("SELECT firstname", FROM_ACCOUNTS, "WHERE MATCH_QUERY(firstname, 'Ayers')"), + hits(hasValueForFields("Ayers", "firstname"))); } @Test public void matchQueryNestedField() throws IOException { SearchHit[] hits = query("SELECT comment.data", FROM_NESTED, "WHERE MATCH_QUERY(NESTED(comment.data), 'aa')") - .getHits().getHits(); + .getHits() + .getHits(); Map source = hits[0].getSourceAsMap(); // SearchHits innerHits = hits[0].getInnerHits().get("comment"); assertThat( - query( - "SELECT comment.data", - FROM_NESTED, - "WHERE MATCH_QUERY(NESTED(comment.data), 'aa')" - ), + query("SELECT comment.data", FROM_NESTED, "WHERE MATCH_QUERY(NESTED(comment.data), 'aa')"), hits( - anyOf(hasNestedField("comment", "data", "aa"), - hasNestedArrayField("comment", "data", "aa")) - ) - ); + anyOf( + hasNestedField("comment", "data", "aa"), + hasNestedArrayField("comment", "data", "aa")))); } @Test public void scoreQuery() throws IOException { assertThat( query( - "SELECT firstname", - FROM_ACCOUNTS, - "WHERE SCORE(MATCH_QUERY(firstname, 'Ayers'), 10)" - ), - hits( - hasValueForFields("Ayers", "firstname") - ) - ); + "SELECT firstname", FROM_ACCOUNTS, "WHERE SCORE(MATCH_QUERY(firstname, 'Ayers'), 10)"), + hits(hasValueForFields("Ayers", "firstname"))); } @Test @@ -123,42 +99,24 @@ public void scoreQueryWithNestedField() throws IOException { query( "SELECT comment.data", FROM_NESTED, - "WHERE SCORE(MATCH_QUERY(NESTED(comment.data), 'ab'), 10)" - ), + "WHERE SCORE(MATCH_QUERY(NESTED(comment.data), 'ab'), 10)"), hits( - //hasValueForFields("ab", "comment.data") - hasNestedField("comment", - "data", "ab") - ) - ); + // hasValueForFields("ab", "comment.data") + hasNestedField("comment", "data", "ab"))); } @Test public void wildcardQuery() throws IOException { assertThat( - query( - "SELECT city", - FROM_ACCOUNTS, - "WHERE WILDCARD_QUERY(city.keyword, 'B*')" - ), - hits( - hasFieldWithPrefix("city", "B") - ) - ); + query("SELECT city", FROM_ACCOUNTS, "WHERE WILDCARD_QUERY(city.keyword, 'B*')"), + hits(hasFieldWithPrefix("city", "B"))); } @Test public void matchPhraseQuery() throws IOException { assertThat( - query( - "SELECT phrase", - FROM_PHRASE, - "WHERE MATCH_PHRASE(phrase, 'brown fox')" - ), - hits( - hasValueForFields("brown fox", "phrase") - ) - ); + query("SELECT phrase", FROM_PHRASE, "WHERE MATCH_PHRASE(phrase, 'brown fox')"), + hits(hasValueForFields("brown fox", "phrase"))); } @Test @@ -167,12 +125,8 @@ public void multiMatchQuerySingleField() throws IOException { query( "SELECT firstname", FROM_ACCOUNTS, - "WHERE MULTI_MATCH('query'='Ayers', 'fields'='firstname')" - ), - hits( - hasValueForFields("Ayers", "firstname") - ) - ); + "WHERE MULTI_MATCH('query'='Ayers', 'fields'='firstname')"), + hits(hasValueForFields("Ayers", "firstname"))); } @Test @@ -181,36 +135,30 @@ public void multiMatchQueryWildcardField() throws IOException { query( "SELECT firstname, lastname", FROM_ACCOUNTS, - "WHERE MULTI_MATCH('query'='Bradshaw', 'fields'='*name')" - ), - hits( - hasValueForFields("Bradshaw", "firstname", "lastname") - ) - ); + "WHERE MULTI_MATCH('query'='Bradshaw', 'fields'='*name')"), + hits(hasValueForFields("Bradshaw", "firstname", "lastname"))); } @Test public void numberLiteralInSelectField() { assertTrue( - executeQuery(StringUtils.format("SELECT 234234 AS number from %s", TEST_INDEX_ACCOUNT), - "jdbc") - .contains("234234") - ); + executeQuery( + StringUtils.format("SELECT 234234 AS number from %s", TEST_INDEX_ACCOUNT), "jdbc") + .contains("234234")); assertTrue( - executeQuery(StringUtils.format("SELECT 2.34234 AS number FROM %s", TEST_INDEX_ACCOUNT), - "jdbc") - .contains("2.34234") - ); + executeQuery( + StringUtils.format("SELECT 2.34234 AS number FROM %s", TEST_INDEX_ACCOUNT), "jdbc") + .contains("2.34234")); } private final Matcher hits(Matcher subMatcher) { - return featureValueOf("hits", everyItem(subMatcher), - resp -> Arrays.asList(resp.getHits().getHits())); + return featureValueOf( + "hits", everyItem(subMatcher), resp -> Arrays.asList(resp.getHits().getHits())); } - private FeatureMatcher featureValueOf(String name, Matcher subMatcher, - Function getter) { + private FeatureMatcher featureValueOf( + String name, Matcher subMatcher, Function getter) { return new FeatureMatcher(subMatcher, name, name) { @Override protected U featureValueOf(T actual) { @@ -220,6 +168,7 @@ protected U featureValueOf(T actual) { } /** + *
    * Create Matchers for each field and its value
    * Only one of the Matchers need to match (per hit)
    * 

@@ -228,36 +177,33 @@ protected U featureValueOf(T actual) { *

* Then the value "Ayers" can be found in either the firstname or lastname field. Only one of these fields * need to satisfy the query value to be evaluated as correct expected output. - * - * @param value The value to match for a field in the sourceMap + *

+ * @param value The value to match for a field in the sourceMap * @param fields A list of fields to match */ @SafeVarargs private final Matcher hasValueForFields(String value, String... fields) { return anyOf( - Arrays.asList(fields). - stream(). - map(field -> kv(field, is(value))). - collect(Collectors.toList())); + Arrays.asList(fields).stream() + .map(field -> kv(field, is(value))) + .collect(Collectors.toList())); } private final Matcher hasFieldWithPrefix(String field, String prefix) { - return featureValueOf(field, startsWith(prefix), - hit -> (String) hit.getSourceAsMap().get(field)); + return featureValueOf( + field, startsWith(prefix), hit -> (String) hit.getSourceAsMap().get(field)); } private final Matcher hasNestedField(String path, String field, String value) { - return featureValueOf(field, is(value), - hit -> ((HashMap) hit.getSourceAsMap().get(path)).get(field)); + return featureValueOf( + field, is(value), hit -> ((HashMap) hit.getSourceAsMap().get(path)).get(field)); } private final Matcher hasNestedArrayField(String path, String field, String value) { return new BaseMatcher() { @Override - public void describeTo(Description description) { - - } + public void describeTo(Description description) {} @Override public boolean matches(Object item) { @@ -275,7 +221,7 @@ private Matcher kv(String key, Matcher valMatcher) { } /*********************************************************** - Query Utility to Fetch Response for SQL + * Query Utility to Fetch Response for SQL ***********************************************************/ private SearchResponse query(String select, String from, String... statements) @@ -286,10 +232,11 @@ private SearchResponse query(String select, String from, String... statements) private SearchResponse execute(String sql) throws IOException { final JSONObject jsonObject = executeQuery(sql); - final XContentParser parser = new JsonXContentParser( - NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, - new JsonFactory().createParser(jsonObject.toString())); + final XContentParser parser = + new JsonXContentParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + new JsonFactory().createParser(jsonObject.toString())); return SearchResponse.fromXContent(parser); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/QueryIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/QueryIT.java index f99285a90b..3f684deaa9 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/QueryIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/QueryIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.allOf; @@ -44,6 +43,7 @@ public class QueryIT extends SQLIntegTestCase { /** + *
    * Currently commenting out tests related to JoinType index since there is an issue with mapping.
    * 

* Also ignoring the following tests as they are failing, will require investigation: @@ -57,10 +57,11 @@ public class QueryIT extends SQLIntegTestCase { * The following tests are being ignored because subquery is still running in OpenSearch transport thread: * - twoSubQueriesTest() * - inTermsSubQueryTest() + *

*/ + static final int BANK_INDEX_MALE_TRUE = 4; - final static int BANK_INDEX_MALE_TRUE = 4; - final static int BANK_INDEX_MALE_FALSE = 3; + static final int BANK_INDEX_MALE_FALSE = 3; @Override protected void init() throws Exception { @@ -87,92 +88,87 @@ public void queryEndWithSemiColonTest() { @Test public void searchTypeTest() throws IOException { - JSONObject response = executeQuery(String.format(Locale.ROOT, "SELECT * FROM %s LIMIT 1000", - TestsConstants.TEST_INDEX_PHRASE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, "SELECT * FROM %s LIMIT 1000", TestsConstants.TEST_INDEX_PHRASE)); Assert.assertTrue(response.has("hits")); Assert.assertEquals(6, getTotalHits(response)); } @Test public void multipleFromTest() throws IOException { - JSONObject response = executeQuery(String.format(Locale.ROOT, - "SELECT * FROM %s, %s LIMIT 2000", - TestsConstants.TEST_INDEX_BANK, TestsConstants.TEST_INDEX_BANK_TWO)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s, %s LIMIT 2000", + TestsConstants.TEST_INDEX_BANK, + TestsConstants.TEST_INDEX_BANK_TWO)); Assert.assertTrue(response.has("hits")); Assert.assertEquals(14, getTotalHits(response)); } @Test public void selectAllWithFieldReturnsAll() throws IOException { - JSONObject response = executeQuery(StringUtils.format( - "SELECT *, age " + - "FROM %s " + - "LIMIT 5", - TestsConstants.TEST_INDEX_BANK - )); + JSONObject response = + executeQuery( + StringUtils.format( + "SELECT *, age FROM %s LIMIT 5", TestsConstants.TEST_INDEX_BANK)); checkSelectAllAndFieldResponseSize(response); } @Test public void selectAllWithFieldReverseOrder() throws IOException { - JSONObject response = executeQuery(StringUtils.format( - "SELECT *, age " + - "FROM %s " + - "LIMIT 5", - TestsConstants.TEST_INDEX_BANK - )); + JSONObject response = + executeQuery( + StringUtils.format( + "SELECT *, age FROM %s LIMIT 5", TestsConstants.TEST_INDEX_BANK)); checkSelectAllAndFieldResponseSize(response); } @Test public void selectAllWithMultipleFields() throws IOException { - JSONObject response = executeQuery(StringUtils.format( - "SELECT *, age, address " + - "FROM %s " + - "LIMIT 5", - TestsConstants.TEST_INDEX_BANK - )); + JSONObject response = + executeQuery( + StringUtils.format( + "SELECT *, age, address FROM %s LIMIT 5", + TestsConstants.TEST_INDEX_BANK)); checkSelectAllAndFieldResponseSize(response); } @Test public void selectAllWithFieldAndOrderBy() throws IOException { - JSONObject response = executeQuery(StringUtils.format( - "SELECT *, age " + - "FROM %s " + - "ORDER BY age " + - "LIMIT 5", - TestsConstants.TEST_INDEX_BANK - )); + JSONObject response = + executeQuery( + StringUtils.format( + "SELECT *, age FROM %s ORDER BY age LIMIT 5", + TestsConstants.TEST_INDEX_BANK)); checkSelectAllAndFieldResponseSize(response); } @Test public void selectAllWithFieldAndGroupBy() throws IOException { - JSONObject response = executeQuery(StringUtils.format( - "SELECT *, age " + - "FROM %s " + - "GROUP BY age " + - "LIMIT 10", - TestsConstants.TEST_INDEX_BANK - )); + JSONObject response = + executeQuery( + StringUtils.format( + "SELECT *, age FROM %s GROUP BY age LIMIT 10", + TestsConstants.TEST_INDEX_BANK)); checkSelectAllAndFieldAggregationResponseSize(response, "age"); } @Test public void selectAllWithFieldAndGroupByReverseOrder() throws IOException { - JSONObject response = executeQuery(StringUtils.format( - "SELECT *, age " + - "FROM %s " + - "GROUP BY age " + - "LIMIT 10", - TestsConstants.TEST_INDEX_BANK - )); + JSONObject response = + executeQuery( + StringUtils.format( + "SELECT *, age FROM %s GROUP BY age LIMIT 10", + TestsConstants.TEST_INDEX_BANK)); checkSelectAllAndFieldAggregationResponseSize(response, "age"); } @@ -180,14 +176,16 @@ public void selectAllWithFieldAndGroupByReverseOrder() throws IOException { @Test public void selectFieldWithAliasAndGroupBy() { String response = - executeQuery("SELECT lastname AS name FROM " + TEST_INDEX_ACCOUNT + " GROUP BY name", - "jdbc"); + executeQuery( + "SELECT lastname AS name FROM " + TEST_INDEX_ACCOUNT + " GROUP BY name", "jdbc"); assertThat(response, containsString("\"alias\": \"name\"")); } public void indexWithWildcardTest() throws IOException { - JSONObject response = executeQuery(String.format(Locale.ROOT, "SELECT * FROM %s* LIMIT 1000", - TestsConstants.TEST_INDEX_BANK)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, "SELECT * FROM %s* LIMIT 1000", TestsConstants.TEST_INDEX_BANK)); Assert.assertTrue(response.has("hits")); assertThat(getTotalHits(response), greaterThan(0)); } @@ -198,8 +196,8 @@ public void selectSpecificFields() throws IOException { Set expectedSource = new HashSet<>(Arrays.asList(arr)); JSONObject response = - executeQuery(String.format(Locale.ROOT, "SELECT age, account_number FROM %s", - TEST_INDEX_ACCOUNT)); + executeQuery( + String.format(Locale.ROOT, "SELECT age, account_number FROM %s", TEST_INDEX_ACCOUNT)); assertResponseForSelectSpecificFields(response, expectedSource); } @@ -209,8 +207,9 @@ public void selectSpecificFieldsUsingTableAlias() throws IOException { Set expectedSource = new HashSet<>(Arrays.asList(arr)); JSONObject response = - executeQuery(String.format(Locale.ROOT, "SELECT a.age, a.account_number FROM %s a", - TEST_INDEX_ACCOUNT)); + executeQuery( + String.format( + Locale.ROOT, "SELECT a.age, a.account_number FROM %s a", TEST_INDEX_ACCOUNT)); assertResponseForSelectSpecificFields(response, expectedSource); } @@ -219,15 +218,18 @@ public void selectSpecificFieldsUsingTableNamePrefix() throws IOException { String[] arr = new String[] {"age", "account_number"}; Set expectedSource = new HashSet<>(Arrays.asList(arr)); - JSONObject response = executeQuery(String.format(Locale.ROOT, - "SELECT opensearch-sql_test_index_account.age, opensearch-sql_test_index_account.account_number" + - " FROM %s", - TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT opensearch-sql_test_index_account.age," + + " opensearch-sql_test_index_account.account_number FROM %s", + TEST_INDEX_ACCOUNT)); assertResponseForSelectSpecificFields(response, expectedSource); } - private void assertResponseForSelectSpecificFields(JSONObject response, - Set expectedSource) { + private void assertResponseForSelectSpecificFields( + JSONObject response, Set expectedSource) { JSONArray hits = getHits(response); for (int i = 0; i < hits.length(); i++) { JSONObject hit = hits.getJSONObject(i); @@ -240,9 +242,12 @@ public void selectFieldWithSpace() throws IOException { String[] arr = new String[] {"test field"}; Set expectedSource = new HashSet<>(Arrays.asList(arr)); - JSONObject response = executeQuery(String.format(Locale.ROOT, "SELECT ['test field'] FROM %s " + - "WHERE ['test field'] IS NOT null", - TestsConstants.TEST_INDEX_PHRASE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT ['test field'] FROM %s WHERE ['test field'] IS NOT null", + TestsConstants.TEST_INDEX_PHRASE)); JSONArray hits = getHits(response); for (int i = 0; i < hits.length(); i++) { @@ -259,19 +264,28 @@ public void selectAliases() throws IOException { String[] arr = new String[] {"myage", "myaccount_number"}; Set expectedSource = new HashSet<>(Arrays.asList(arr)); - JSONObject result = executeQuery(String.format(Locale.ROOT, - "SELECT age AS myage, account_number AS myaccount_number FROM %s", TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + String.format( + Locale.ROOT, + "SELECT age AS myage, account_number AS myaccount_number FROM %s", + TEST_INDEX_ACCOUNT)); JSONArray hits = getHits(result); - hits.forEach(hitObj -> { - JSONObject hit = (JSONObject) hitObj; - Assert.assertEquals(expectedSource, hit.getJSONObject("_source").keySet()); - }); + hits.forEach( + hitObj -> { + JSONObject hit = (JSONObject) hitObj; + Assert.assertEquals(expectedSource, hit.getJSONObject("_source").keySet()); + }); } @Test public void useTableAliasInWhereClauseTest() throws IOException { - JSONObject response = executeQuery(String.format(Locale.ROOT, - "SELECT * FROM %s a WHERE a.city = 'Nogal' LIMIT 1000", TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s a WHERE a.city = 'Nogal' LIMIT 1000", + TEST_INDEX_ACCOUNT)); JSONArray hits = getHits(response); Assert.assertEquals(1, getTotalHits(response)); @@ -280,8 +294,12 @@ public void useTableAliasInWhereClauseTest() throws IOException { @Test public void notUseTableAliasInWhereClauseTest() throws IOException { - JSONObject response = executeQuery(String.format(Locale.ROOT, - "SELECT * FROM %s a WHERE city = 'Nogal' LIMIT 1000", TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s a WHERE city = 'Nogal' LIMIT 1000", + TEST_INDEX_ACCOUNT)); JSONArray hits = getHits(response); Assert.assertEquals(1, getTotalHits(response)); @@ -290,10 +308,13 @@ public void notUseTableAliasInWhereClauseTest() throws IOException { @Test public void useTableNamePrefixInWhereClauseTest() throws IOException { - JSONObject response = executeQuery(String.format(Locale.ROOT, - "SELECT * FROM %s WHERE opensearch-sql_test_index_account.city = 'Nogal' LIMIT 1000", - TEST_INDEX_ACCOUNT - )); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE opensearch-sql_test_index_account.city = 'Nogal' LIMIT" + + " 1000", + TEST_INDEX_ACCOUNT)); JSONArray hits = getHits(response); Assert.assertEquals(1, getTotalHits(response)); @@ -302,8 +323,12 @@ public void useTableNamePrefixInWhereClauseTest() throws IOException { @Test public void equalityTest() throws IOException { - JSONObject response = executeQuery(String.format(Locale.ROOT, - "SELECT * FROM %s WHERE city = 'Nogal' LIMIT 1000", TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE city = 'Nogal' LIMIT 1000", + TEST_INDEX_ACCOUNT)); JSONArray hits = getHits(response); Assert.assertEquals(1, getTotalHits(response)); @@ -312,9 +337,12 @@ public void equalityTest() throws IOException { @Test public void equalityTestPhrase() throws IOException { - JSONObject response = executeQuery(String.format(Locale.ROOT, "SELECT * FROM %s WHERE " + - "match_phrase(phrase, 'quick fox here') LIMIT 1000", - TestsConstants.TEST_INDEX_PHRASE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE match_phrase(phrase, 'quick fox here') LIMIT 1000", + TestsConstants.TEST_INDEX_PHRASE)); JSONArray hits = getHits(response); Assert.assertEquals(1, getTotalHits(response)); @@ -324,10 +352,13 @@ public void equalityTestPhrase() throws IOException { @Test public void greaterThanTest() throws IOException { int someAge = 25; - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s WHERE age > %s LIMIT 1000", - TestsConstants.TEST_INDEX_PEOPLE, - someAge)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE age > %s LIMIT 1000", + TestsConstants.TEST_INDEX_PEOPLE, + someAge)); JSONArray hits = getHits(response); for (int i = 0; i < hits.length(); i++) { @@ -340,10 +371,13 @@ public void greaterThanTest() throws IOException { @Test public void greaterThanOrEqualTest() throws IOException { int someAge = 25; - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s WHERE age >= %s LIMIT 1000", - TEST_INDEX_ACCOUNT, - someAge)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE age >= %s LIMIT 1000", + TEST_INDEX_ACCOUNT, + someAge)); boolean isEqualFound = false; JSONArray hits = getHits(response); @@ -352,24 +386,27 @@ public void greaterThanOrEqualTest() throws IOException { int age = getSource(hit).getInt("age"); assertThat(age, greaterThanOrEqualTo(someAge)); - if (age == someAge) { - isEqualFound = true; - } + if (age == someAge) { + isEqualFound = true; + } } Assert.assertTrue( - String.format(Locale.ROOT, "At least one of the documents need to contains age equal to %s", - someAge), + String.format( + Locale.ROOT, "At least one of the documents need to contains age equal to %s", someAge), isEqualFound); } @Test public void lessThanTest() throws IOException { int someAge = 25; - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s WHERE age < %s LIMIT 1000", - TestsConstants.TEST_INDEX_PEOPLE, - someAge)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE age < %s LIMIT 1000", + TestsConstants.TEST_INDEX_PEOPLE, + someAge)); JSONArray hits = getHits(response); for (int i = 0; i < hits.length(); i++) { @@ -382,10 +419,13 @@ public void lessThanTest() throws IOException { @Test public void lessThanOrEqualTest() throws IOException { int someAge = 25; - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s WHERE age <= %s LIMIT 1000", - TEST_INDEX_ACCOUNT, - someAge)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE age <= %s LIMIT 1000", + TEST_INDEX_ACCOUNT, + someAge)); boolean isEqualFound = false; JSONArray hits = getHits(response); @@ -394,32 +434,39 @@ public void lessThanOrEqualTest() throws IOException { int age = getSource(hit).getInt("age"); assertThat(age, lessThanOrEqualTo(someAge)); - if (age == someAge) { - isEqualFound = true; - } + if (age == someAge) { + isEqualFound = true; + } } Assert.assertTrue( - String.format(Locale.ROOT, "At least one of the documents need to contains age equal to %s", - someAge), + String.format( + Locale.ROOT, "At least one of the documents need to contains age equal to %s", someAge), isEqualFound); } @Test public void orTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE match_phrase(gender, 'F') OR match_phrase(gender, 'M') " + - "LIMIT 1000", TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * " + + "FROM %s " + + "WHERE match_phrase(gender, 'F') OR match_phrase(gender, 'M') " + + "LIMIT 1000", + TEST_INDEX_ACCOUNT)); Assert.assertEquals(1000, getTotalHits(response)); } @Test public void andTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s WHERE age=32 AND gender='M' LIMIT 1000", - TestsConstants.TEST_INDEX_PEOPLE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE age=32 AND gender='M' LIMIT 1000", + TestsConstants.TEST_INDEX_PEOPLE)); JSONArray hits = getHits(response); for (int i = 0; i < hits.length(); i++) { @@ -431,9 +478,12 @@ public void andTest() throws IOException { @Test public void likeTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s WHERE firstname LIKE 'amb%%' LIMIT 1000", - TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE firstname LIKE 'amb%%' LIMIT 1000", + TEST_INDEX_ACCOUNT)); JSONArray hits = getHits(response); Assert.assertEquals(1, getTotalHits(response)); @@ -442,9 +492,12 @@ public void likeTest() throws IOException { @Test public void notLikeTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s WHERE firstname NOT LIKE 'amb%%'", - TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE firstname NOT LIKE 'amb%%'", + TEST_INDEX_ACCOUNT)); JSONArray hits = getHits(response); Assert.assertNotEquals(0, getTotalHits(response)); @@ -456,11 +509,13 @@ public void notLikeTest() throws IOException { @Test public void regexQueryTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE dog_name = REGEXP_QUERY('sn.*', 'INTERSECTION|COMPLEMENT|EMPTY', 10000)", - TestsConstants.TEST_INDEX_DOG)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE dog_name = REGEXP_QUERY('sn.*'," + + " 'INTERSECTION|COMPLEMENT|EMPTY', 10000)", + TestsConstants.TEST_INDEX_DOG)); JSONArray hits = getHits(response); Assert.assertEquals(1, hits.length()); @@ -473,11 +528,13 @@ public void regexQueryTest() throws IOException { @Test public void negativeRegexQueryTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE NOT(dog_name = REGEXP_QUERY('sn.*', 'INTERSECTION|COMPLEMENT|EMPTY', 10000))", - TestsConstants.TEST_INDEX_DOG)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE NOT(dog_name = REGEXP_QUERY('sn.*'," + + " 'INTERSECTION|COMPLEMENT|EMPTY', 10000))", + TestsConstants.TEST_INDEX_DOG)); JSONArray hits = getHits(response); Assert.assertEquals(1, hits.length()); @@ -489,28 +546,36 @@ public void negativeRegexQueryTest() throws IOException { @Test public void doubleNotTest() throws IOException { - JSONObject response1 = executeQuery( - String.format(Locale.ROOT, - "SELECT * FROM %s WHERE NOT gender LIKE 'm' AND NOT gender LIKE 'f'", - TEST_INDEX_ACCOUNT)); + JSONObject response1 = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE NOT gender LIKE 'm' AND NOT gender LIKE 'f'", + TEST_INDEX_ACCOUNT)); Assert.assertEquals(0, getTotalHits(response1)); - JSONObject response2 = executeQuery( - String.format(Locale.ROOT, - "SELECT * FROM %s WHERE NOT gender LIKE 'm' AND gender NOT LIKE 'f'", - TEST_INDEX_ACCOUNT)); + JSONObject response2 = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE NOT gender LIKE 'm' AND gender NOT LIKE 'f'", + TEST_INDEX_ACCOUNT)); Assert.assertEquals(0, getTotalHits(response2)); - JSONObject response3 = executeQuery( - String.format(Locale.ROOT, - "SELECT * FROM %s WHERE gender NOT LIKE 'm' AND gender NOT LIKE 'f'", - TEST_INDEX_ACCOUNT)); + JSONObject response3 = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE gender NOT LIKE 'm' AND gender NOT LIKE 'f'", + TEST_INDEX_ACCOUNT)); Assert.assertEquals(0, getTotalHits(response3)); - JSONObject response4 = executeQuery( - String.format(Locale.ROOT, - "SELECT * FROM %s WHERE gender LIKE 'm' AND NOT gender LIKE 'f'", - TEST_INDEX_ACCOUNT)); + JSONObject response4 = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE gender LIKE 'm' AND NOT gender LIKE 'f'", + TEST_INDEX_ACCOUNT)); // Assert there are results and they all have gender 'm' Assert.assertNotEquals(0, getTotalHits(response4)); JSONArray hits = getHits(response4); @@ -519,16 +584,19 @@ public void doubleNotTest() throws IOException { Assert.assertEquals("m", getSource(hit).getString("gender").toLowerCase()); } - JSONObject response5 = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s WHERE NOT (gender = 'm' OR gender = 'f')", - TEST_INDEX_ACCOUNT)); + JSONObject response5 = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE NOT (gender = 'm' OR gender = 'f')", + TEST_INDEX_ACCOUNT)); Assert.assertEquals(0, getTotalHits(response5)); } @Test public void limitTest() throws IOException { - JSONObject response = executeQuery(String.format(Locale.ROOT, "SELECT * FROM %s LIMIT 30", - TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery(String.format(Locale.ROOT, "SELECT * FROM %s LIMIT 30", TEST_INDEX_ACCOUNT)); JSONArray hits = getHits(response); Assert.assertEquals(30, hits.length()); @@ -538,9 +606,14 @@ public void limitTest() throws IOException { public void betweenTest() throws IOException { int min = 27; int max = 30; - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s WHERE age BETWEEN %s AND %s LIMIT 1000", - TestsConstants.TEST_INDEX_PEOPLE, min, max)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE age BETWEEN %s AND %s LIMIT 1000", + TestsConstants.TEST_INDEX_PEOPLE, + min, + max)); JSONArray hits = getHits(response); for (int i = 0; i < hits.length(); i++) { @@ -556,9 +629,14 @@ public void betweenTest() throws IOException { public void notBetweenTest() throws IOException { int min = 20; int max = 37; - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s WHERE age NOT BETWEEN %s AND %s LIMIT 1000", - TestsConstants.TEST_INDEX_PEOPLE, min, max)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE age NOT BETWEEN %s AND %s LIMIT 1000", + TestsConstants.TEST_INDEX_PEOPLE, + min, + max)); JSONArray hits = getHits(response); for (int i = 0; i < hits.length(); i++) { @@ -575,9 +653,12 @@ public void notBetweenTest() throws IOException { @Test public void inTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT age FROM %s WHERE age IN (20, 22) LIMIT 1000", - TestsConstants.TEST_INDEX_PHRASE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT age FROM %s WHERE age IN (20, 22) LIMIT 1000", + TestsConstants.TEST_INDEX_PHRASE)); JSONArray hits = getHits(response); for (int i = 0; i < hits.length(); i++) { @@ -589,10 +670,12 @@ public void inTest() throws IOException { @Test public void inTestWithStrings() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, - "SELECT phrase FROM %s WHERE phrase IN ('quick', 'fox') LIMIT 1000", - TestsConstants.TEST_INDEX_PHRASE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT phrase FROM %s WHERE phrase IN ('quick', 'fox') LIMIT 1000", + TestsConstants.TEST_INDEX_PHRASE)); JSONArray hits = getHits(response); for (int i = 0; i < hits.length(); i++) { @@ -604,12 +687,15 @@ public void inTestWithStrings() throws IOException { @Test public void inTermsTestWithIdentifiersTreatedLikeStrings() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT name " + - "FROM %s " + - "WHERE name.firstname = IN_TERMS('daenerys','eddard') " + - "LIMIT 1000", - TestsConstants.TEST_INDEX_GAME_OF_THRONES)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT name " + + "FROM %s " + + "WHERE name.firstname = IN_TERMS('daenerys','eddard') " + + "LIMIT 1000", + TestsConstants.TEST_INDEX_GAME_OF_THRONES)); JSONArray hits = getHits(response); Assert.assertEquals(2, getTotalHits(response)); @@ -622,12 +708,15 @@ public void inTermsTestWithIdentifiersTreatedLikeStrings() throws IOException { @Test public void inTermsTestWithStrings() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT name " + - "FROM %s " + - "WHERE name.firstname = IN_TERMS('daenerys','eddard') " + - "LIMIT 1000", - TestsConstants.TEST_INDEX_GAME_OF_THRONES)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT name " + + "FROM %s " + + "WHERE name.firstname = IN_TERMS('daenerys','eddard') " + + "LIMIT 1000", + TestsConstants.TEST_INDEX_GAME_OF_THRONES)); JSONArray hits = getHits(response); Assert.assertEquals(2, getTotalHits(response)); @@ -640,12 +729,15 @@ public void inTermsTestWithStrings() throws IOException { @Test public void inTermsWithNumbers() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT name " + - "FROM %s " + - "WHERE name.ofHisName = IN_TERMS(4,2) " + - "LIMIT 1000", - TestsConstants.TEST_INDEX_GAME_OF_THRONES)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT name " + + "FROM %s " + + "WHERE name.ofHisName = IN_TERMS(4,2) " + + "LIMIT 1000", + TestsConstants.TEST_INDEX_GAME_OF_THRONES)); JSONArray hits = getHits(response); Assert.assertEquals(1, getTotalHits(response)); @@ -657,10 +749,12 @@ public void inTermsWithNumbers() throws IOException { @Test public void termQueryWithNumber() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, - "SELECT name FROM %s WHERE name.ofHisName = term(4) LIMIT 1000", - TestsConstants.TEST_INDEX_GAME_OF_THRONES)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT name FROM %s WHERE name.ofHisName = term(4) LIMIT 1000", + TestsConstants.TEST_INDEX_GAME_OF_THRONES)); JSONArray hits = getHits(response); Assert.assertEquals(1, getTotalHits(response)); @@ -672,12 +766,15 @@ public void termQueryWithNumber() throws IOException { @Test public void termQueryWithStringIdentifier() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT name " + - "FROM %s " + - "WHERE name.firstname = term('brandon') " + - "LIMIT 1000", - TestsConstants.TEST_INDEX_GAME_OF_THRONES)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT name " + + "FROM %s " + + "WHERE name.firstname = term('brandon') " + + "LIMIT 1000", + TestsConstants.TEST_INDEX_GAME_OF_THRONES)); JSONArray hits = getHits(response); Assert.assertEquals(1, getTotalHits(response)); @@ -689,12 +786,15 @@ public void termQueryWithStringIdentifier() throws IOException { @Test public void termQueryWithStringLiteral() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT name " + - "FROM %s " + - "WHERE name.firstname = term('brandon') " + - "LIMIT 1000", - TestsConstants.TEST_INDEX_GAME_OF_THRONES)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT name " + + "FROM %s " + + "WHERE name.firstname = term('brandon') " + + "LIMIT 1000", + TestsConstants.TEST_INDEX_GAME_OF_THRONES)); JSONArray hits = getHits(response); Assert.assertEquals(1, getTotalHits(response)); @@ -708,9 +808,12 @@ public void termQueryWithStringLiteral() throws IOException { // are returned as well. This may be incorrect behavior. @Test public void notInTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT age FROM %s WHERE age NOT IN (20, 22) LIMIT 1000", - TestsConstants.TEST_INDEX_PEOPLE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT age FROM %s WHERE age NOT IN (20, 22) LIMIT 1000", + TestsConstants.TEST_INDEX_PEOPLE)); JSONArray hits = getHits(response); for (int i = 0; i < hits.length(); i++) { @@ -730,9 +833,12 @@ public void dateSearch() throws IOException { DateTimeFormatter formatter = DateTimeFormat.forPattern(TestsConstants.DATE_FORMAT); DateTime dateToCompare = new DateTime(2014, 8, 18, 0, 0, 0); - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT insert_time FROM %s WHERE insert_time < '2014-08-18'", - TestsConstants.TEST_INDEX_ONLINE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT insert_time FROM %s WHERE insert_time < '2014-08-18'", + TestsConstants.TEST_INDEX_ONLINE)); JSONArray hits = getHits(response); for (int i = 0; i < hits.length(); i++) { JSONObject hit = hits.getJSONObject(i); @@ -740,8 +846,8 @@ public void dateSearch() throws IOException { DateTime insertTime = formatter.parseDateTime(source.getString("insert_time")); String errorMessage = - String.format(Locale.ROOT, "insert_time must be before 2014-08-18. Found: %s", - insertTime); + String.format( + Locale.ROOT, "insert_time must be before 2014-08-18. Found: %s", insertTime); Assert.assertTrue(errorMessage, insertTime.isBefore(dateToCompare)); } } @@ -751,10 +857,12 @@ public void dateSearchBraces() throws IOException { DateTimeFormatter formatter = DateTimeFormat.forPattern(TestsConstants.TS_DATE_FORMAT); DateTime dateToCompare = new DateTime(2015, 3, 15, 0, 0, 0); - JSONObject response = executeQuery( - String.format(Locale.ROOT, - "SELECT odbc_time FROM %s WHERE odbc_time < {ts '2015-03-15 00:00:00.000'}", - TestsConstants.TEST_INDEX_ODBC)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT odbc_time FROM %s WHERE odbc_time < {ts '2015-03-15 00:00:00.000'}", + TestsConstants.TEST_INDEX_ODBC)); JSONArray hits = getHits(response); for (int i = 0; i < hits.length(); i++) { JSONObject hit = hits.getJSONObject(i); @@ -764,8 +872,8 @@ public void dateSearchBraces() throws IOException { DateTime insertTime = formatter.parseDateTime(insertTimeStr); String errorMessage = - String.format(Locale.ROOT, "insert_time must be before 2015-03-15. Found: %s", - insertTime); + String.format( + Locale.ROOT, "insert_time must be before 2015-03-15. Found: %s", insertTime); Assert.assertTrue(errorMessage, insertTime.isBefore(dateToCompare)); } } @@ -777,20 +885,24 @@ public void dateBetweenSearch() throws IOException { DateTime dateLimit1 = new DateTime(2014, 8, 18, 0, 0, 0); DateTime dateLimit2 = new DateTime(2014, 8, 21, 0, 0, 0); - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT insert_time " + - "FROM %s " + - "WHERE insert_time BETWEEN '2014-08-18' AND '2014-08-21' " + - "LIMIT 3", - TestsConstants.TEST_INDEX_ONLINE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT insert_time " + + "FROM %s " + + "WHERE insert_time BETWEEN '2014-08-18' AND '2014-08-21' " + + "LIMIT 3", + TestsConstants.TEST_INDEX_ONLINE)); JSONArray hits = getHits(response); for (int i = 0; i < hits.length(); i++) { JSONObject hit = hits.getJSONObject(i); JSONObject source = getSource(hit); DateTime insertTime = formatter.parseDateTime(source.getString("insert_time")); - boolean isBetween = (insertTime.isAfter(dateLimit1) || insertTime.isEqual(dateLimit1)) && - (insertTime.isBefore(dateLimit2) || insertTime.isEqual(dateLimit2)); + boolean isBetween = + (insertTime.isAfter(dateLimit1) || insertTime.isEqual(dateLimit1)) + && (insertTime.isBefore(dateLimit2) || insertTime.isEqual(dateLimit2)); Assert.assertTrue("insert_time must be between 2014-08-18 and 2014-08-21", isBetween); } @@ -798,9 +910,12 @@ public void dateBetweenSearch() throws IOException { @Test public void missFilterSearch() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s WHERE insert_time2 IS missing", - TestsConstants.TEST_INDEX_PHRASE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE insert_time2 IS missing", + TestsConstants.TEST_INDEX_PHRASE)); JSONArray hits = getHits(response); Assert.assertEquals(4, getTotalHits(response)); @@ -814,9 +929,12 @@ public void missFilterSearch() throws IOException { @Test public void notMissFilterSearch() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s WHERE insert_time2 IS NOT missing", - TestsConstants.TEST_INDEX_PHRASE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE insert_time2 IS NOT missing", + TestsConstants.TEST_INDEX_PHRASE)); JSONArray hits = getHits(response); Assert.assertEquals(2, getTotalHits(response)); @@ -830,15 +948,19 @@ public void notMissFilterSearch() throws IOException { @Test public void complexConditionQuery() throws IOException { - String errorMessage = "Result does not exist to the condition " + - "(gender='m' AND (age> 25 OR account_number>5)) OR (gender='f' AND (age>30 OR account_number < 8)"; + String errorMessage = + "Result does not exist to the condition (gender='m' AND (age> 25 OR account_number>5)) OR" + + " (gender='f' AND (age>30 OR account_number < 8)"; - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE (gender='m' AND (age> 25 OR account_number>5)) " + - "OR (gender='f' AND (age>30 OR account_number < 8))", - TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * " + + "FROM %s " + + "WHERE (gender='m' AND (age> 25 OR account_number>5)) " + + "OR (gender='f' AND (age>30 OR account_number < 8))", + TEST_INDEX_ACCOUNT)); JSONArray hits = getHits(response); for (int i = 0; i < hits.length(); i++) { @@ -849,7 +971,8 @@ public void complexConditionQuery() throws IOException { int age = source.getInt("age"); int accountNumber = source.getInt("account_number"); - Assert.assertTrue(errorMessage, + Assert.assertTrue( + errorMessage, (gender.equals("m") && (age > 25 || accountNumber > 5)) || (gender.equals("f") && (age > 30 || accountNumber < 8))); } @@ -857,16 +980,20 @@ public void complexConditionQuery() throws IOException { @Test public void complexNotConditionQuery() throws IOException { - String errorMessage = "Result does not exist to the condition " + - "NOT (gender='m' AND NOT (age > 25 OR account_number > 5)) " + - "OR (NOT gender='f' AND NOT (age > 30 OR account_number < 8))"; + String errorMessage = + "Result does not exist to the condition " + + "NOT (gender='m' AND NOT (age > 25 OR account_number > 5)) " + + "OR (NOT gender='f' AND NOT (age > 30 OR account_number < 8))"; - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE NOT (gender='m' AND NOT (age > 25 OR account_number > 5)) " + - "OR (NOT gender='f' AND NOT (age > 30 OR account_number < 8))", - TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * " + + "FROM %s " + + "WHERE NOT (gender='m' AND NOT (age > 25 OR account_number > 5)) " + + "OR (NOT gender='f' AND NOT (age > 30 OR account_number < 8))", + TEST_INDEX_ACCOUNT)); JSONArray hits = getHits(response); Assert.assertNotEquals(0, hits.length()); @@ -878,7 +1005,8 @@ public void complexNotConditionQuery() throws IOException { int age = source.getInt("age"); int accountNumber = source.getInt("account_number"); - Assert.assertTrue(errorMessage, + Assert.assertTrue( + errorMessage, !(gender.equals("m") && !(age > 25 || accountNumber > 5)) || (!gender.equals("f") && !(age > 30 || accountNumber < 8))); } @@ -887,9 +1015,10 @@ public void complexNotConditionQuery() throws IOException { @Test @SuppressWarnings("unchecked") public void orderByAscTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT age FROM %s ORDER BY age ASC LIMIT 1000", - TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, "SELECT age FROM %s ORDER BY age ASC LIMIT 1000", TEST_INDEX_ACCOUNT)); JSONArray hits = getHits(response); ArrayList ages = new ArrayList<>(); @@ -907,17 +1036,23 @@ public void orderByAscTest() throws IOException { @Test public void orderByDescTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT age FROM %s ORDER BY age DESC LIMIT 1000", - TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT age FROM %s ORDER BY age DESC LIMIT 1000", + TEST_INDEX_ACCOUNT)); assertResponseForOrderByTest(response); } @Test public void orderByDescUsingTableAliasTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT a.age FROM %s a ORDER BY a.age DESC LIMIT 1000", - TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT a.age FROM %s a ORDER BY a.age DESC LIMIT 1000", + TEST_INDEX_ACCOUNT)); assertResponseForOrderByTest(response); } @@ -940,13 +1075,16 @@ private void assertResponseForOrderByTest(JSONObject response) { @Test @SuppressWarnings("unchecked") public void orderByAscFieldWithSpaceTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE `test field` IS NOT null " + - "ORDER BY `test field` ASC " + - "LIMIT 1000", - TestsConstants.TEST_INDEX_PHRASE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * " + + "FROM %s " + + "WHERE `test field` IS NOT null " + + "ORDER BY `test field` ASC " + + "LIMIT 1000", + TestsConstants.TEST_INDEX_PHRASE)); JSONArray hits = getHits(response); ArrayList testFields = new ArrayList<>(); @@ -964,195 +1102,177 @@ public void orderByAscFieldWithSpaceTest() throws IOException { @Test public void testWhereWithBoolEqualsTrue() throws IOException { - JSONObject response = executeQuery( - StringUtils.format( - "SELECT * " + - "FROM %s " + - "WHERE male = true " + - "LIMIT 5", - TestsConstants.TEST_INDEX_BANK) - ); + JSONObject response = + executeQuery( + StringUtils.format( + "SELECT * FROM %s WHERE male = true LIMIT 5", + TestsConstants.TEST_INDEX_BANK)); checkResponseSize(response, BANK_INDEX_MALE_TRUE); } @Test public void testWhereWithBoolEqualsTrueAndGroupBy() throws IOException { - JSONObject response = executeQuery( - StringUtils.format( - "SELECT * " + - "FROM %s " + - "WHERE male = true " + - "GROUP BY balance " + - "LIMIT 5", - TestsConstants.TEST_INDEX_BANK) - ); + JSONObject response = + executeQuery( + StringUtils.format( + "SELECT * FROM %s WHERE male = true GROUP BY balance LIMIT 5", + TestsConstants.TEST_INDEX_BANK)); checkAggregationResponseSize(response, BANK_INDEX_MALE_TRUE); } @Test public void testWhereWithBoolEqualsTrueAndOrderBy() throws IOException { - JSONObject response = executeQuery( - StringUtils.format( - "SELECT * " + - "FROM %s " + - "WHERE male = true " + - "ORDER BY age " + - "LIMIT 5", - TestsConstants.TEST_INDEX_BANK) - ); + JSONObject response = + executeQuery( + StringUtils.format( + "SELECT * FROM %s WHERE male = true ORDER BY age LIMIT 5", + TestsConstants.TEST_INDEX_BANK)); checkResponseSize(response, BANK_INDEX_MALE_TRUE); } @Test public void testWhereWithBoolIsTrue() throws IOException { - JSONObject response = executeQuery( - StringUtils.format( - "SELECT * " + - "FROM %s " + - "WHERE male IS true " + - "GROUP BY balance " + - "LIMIT 5", - TestsConstants.TEST_INDEX_BANK) - ); + JSONObject response = + executeQuery( + StringUtils.format( + "SELECT * FROM %s WHERE male IS true GROUP BY balance LIMIT 5", + TestsConstants.TEST_INDEX_BANK)); checkAggregationResponseSize(response, BANK_INDEX_MALE_TRUE); } @Test public void testWhereWithBoolIsNotTrue() throws IOException { - JSONObject response = executeQuery( - StringUtils.format( - "SELECT * " + - "FROM %s " + - "WHERE male IS NOT true " + - "GROUP BY balance " + - "LIMIT 5", - TestsConstants.TEST_INDEX_BANK) - ); + JSONObject response = + executeQuery( + StringUtils.format( + "SELECT * " + + "FROM %s " + + "WHERE male IS NOT true " + + "GROUP BY balance " + + "LIMIT 5", + TestsConstants.TEST_INDEX_BANK)); checkAggregationResponseSize(response, BANK_INDEX_MALE_FALSE); } @Test public void testWhereWithBoolEqualsFalse() throws IOException { - JSONObject response = executeQuery( - StringUtils.format( - "SELECT * " + - "FROM %s " + - "WHERE male = false " + - "LIMIT 5", - TestsConstants.TEST_INDEX_BANK) - ); + JSONObject response = + executeQuery( + StringUtils.format( + "SELECT * FROM %s WHERE male = false LIMIT 5", + TestsConstants.TEST_INDEX_BANK)); checkResponseSize(response, BANK_INDEX_MALE_FALSE); } @Test public void testWhereWithBoolEqualsFalseAndGroupBy() throws IOException { - JSONObject response = executeQuery( - StringUtils.format( - "SELECT * " + - "FROM %s " + - "WHERE male = false " + - "GROUP BY balance " + - "LIMIT 5", - TestsConstants.TEST_INDEX_BANK) - ); + JSONObject response = + executeQuery( + StringUtils.format( + "SELECT * FROM %s WHERE male = false GROUP BY balance LIMIT 5", + TestsConstants.TEST_INDEX_BANK)); checkAggregationResponseSize(response, BANK_INDEX_MALE_FALSE); } @Test public void testWhereWithBoolEqualsFalseAndOrderBy() throws IOException { - JSONObject response = executeQuery( - StringUtils.format( - "SELECT * " + - "FROM %s " + - "WHERE male = false " + - "ORDER BY age " + - "LIMIT 5", - TestsConstants.TEST_INDEX_BANK) - ); + JSONObject response = + executeQuery( + StringUtils.format( + "SELECT * FROM %s WHERE male = false ORDER BY age LIMIT 5", + TestsConstants.TEST_INDEX_BANK)); checkResponseSize(response, BANK_INDEX_MALE_FALSE); } @Test public void testWhereWithBoolIsFalse() throws IOException { - JSONObject response = executeQuery( - StringUtils.format( - "SELECT * " + - "FROM %s " + - "WHERE male IS false " + - "GROUP BY balance " + - "LIMIT 5", - TestsConstants.TEST_INDEX_BANK) - ); + JSONObject response = + executeQuery( + StringUtils.format( + "SELECT * FROM %s WHERE male IS false GROUP BY balance LIMIT 5", + TestsConstants.TEST_INDEX_BANK)); checkAggregationResponseSize(response, BANK_INDEX_MALE_FALSE); } @Test public void testWhereWithBoolIsNotFalse() throws IOException { - JSONObject response = executeQuery( - StringUtils.format( - "SELECT * " + - "FROM %s " + - "WHERE male IS NOT false " + - "GROUP BY balance " + - "LIMIT 5", - TestsConstants.TEST_INDEX_BANK) - ); + JSONObject response = + executeQuery( + StringUtils.format( + "SELECT * " + + "FROM %s " + + "WHERE male IS NOT false " + + "GROUP BY balance " + + "LIMIT 5", + TestsConstants.TEST_INDEX_BANK)); checkAggregationResponseSize(response, BANK_INDEX_MALE_TRUE); } @Test public void testMultiPartWhere() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE (firstname LIKE 'opal' OR firstname LIKE 'rodriquez') " + - "AND (state like 'oh' OR state like 'hi')", - TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * " + + "FROM %s " + + "WHERE (firstname LIKE 'opal' OR firstname LIKE 'rodriquez') " + + "AND (state like 'oh' OR state like 'hi')", + TEST_INDEX_ACCOUNT)); Assert.assertEquals(2, getTotalHits(response)); } @Test public void testMultiPartWhere2() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE ((account_number > 200 AND account_number < 300) OR gender LIKE 'm') " + - "AND (state LIKE 'hi' OR address LIKE 'avenue')", - TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * " + + "FROM %s " + + "WHERE ((account_number > 200 AND account_number < 300) OR gender LIKE 'm') " + + "AND (state LIKE 'hi' OR address LIKE 'avenue')", + TEST_INDEX_ACCOUNT)); Assert.assertEquals(127, getTotalHits(response)); } @Test public void testMultiPartWhere3() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE ((account_number > 25 AND account_number < 75) AND age >35 ) " + - "AND (state LIKE 'md' OR (address LIKE 'avenue' OR address LIKE 'street'))", - TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * " + + "FROM %s " + + "WHERE ((account_number > 25 AND account_number < 75) AND age >35 ) " + + "AND (state LIKE 'md' OR (address LIKE 'avenue' OR address LIKE 'street'))", + TEST_INDEX_ACCOUNT)); Assert.assertEquals(7, getTotalHits(response)); } @Test public void filterPolygonTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE GEO_INTERSECTS(place,'POLYGON ((102 2, 103 2, 103 3, 102 3, 102 2))')", - TestsConstants.TEST_INDEX_LOCATION)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * " + + "FROM %s " + + "WHERE GEO_INTERSECTS(place,'POLYGON ((102 2, 103 2, 103 3, 102 3, 102 2))')", + TestsConstants.TEST_INDEX_LOCATION)); JSONArray hits = getHits(response); Assert.assertEquals(1, getTotalHits(response)); @@ -1163,10 +1283,12 @@ public void filterPolygonTest() throws IOException { @Test public void boundingBox() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, - "SELECT * FROM %s WHERE GEO_BOUNDING_BOX(center, 100.0, 1.0, 101, 0.0)", - TestsConstants.TEST_INDEX_LOCATION)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE GEO_BOUNDING_BOX(center, 100.0, 1.0, 101, 0.0)", + TestsConstants.TEST_INDEX_LOCATION)); JSONArray hits = getHits(response); Assert.assertEquals(1, getTotalHits(response)); @@ -1177,10 +1299,12 @@ public void boundingBox() throws IOException { @Test public void geoDistance() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, - "SELECT * FROM %s WHERE GEO_DISTANCE(center, '1km', 100.5, 0.500001)", - TestsConstants.TEST_INDEX_LOCATION)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE GEO_DISTANCE(center, '1km', 100.5, 0.500001)", + TestsConstants.TEST_INDEX_LOCATION)); JSONArray hits = getHits(response); Assert.assertEquals(1, getTotalHits(response)); @@ -1191,10 +1315,12 @@ public void geoDistance() throws IOException { @Test public void geoPolygon() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, - "SELECT * FROM %s WHERE GEO_POLYGON(center, 100,0, 100.5, 2, 101.0,0)", - TestsConstants.TEST_INDEX_LOCATION)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE GEO_POLYGON(center, 100,0, 100.5, 2, 101.0,0)", + TestsConstants.TEST_INDEX_LOCATION)); JSONArray hits = getHits(response); Assert.assertEquals(1, getTotalHits(response)); @@ -1206,36 +1332,45 @@ public void geoPolygon() throws IOException { @Ignore @Test public void escapedCharactersCheck() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE MATCH_PHRASE(nickname, 'Daenerys \"Stormborn\"') " + - "LIMIT 1000", - TestsConstants.TEST_INDEX_GAME_OF_THRONES)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * " + + "FROM %s " + + "WHERE MATCH_PHRASE(nickname, 'Daenerys \"Stormborn\"') " + + "LIMIT 1000", + TestsConstants.TEST_INDEX_GAME_OF_THRONES)); Assert.assertEquals(1, getTotalHits(response)); } @Test public void complexObjectSearch() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE MATCH_PHRASE(name.firstname, 'Jaime') " + - "LIMIT 1000", - TestsConstants.TEST_INDEX_GAME_OF_THRONES)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * " + + "FROM %s " + + "WHERE MATCH_PHRASE(name.firstname, 'Jaime') " + + "LIMIT 1000", + TestsConstants.TEST_INDEX_GAME_OF_THRONES)); Assert.assertEquals(1, getTotalHits(response)); } @Test public void complexObjectReturnField() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT parents.father " + - "FROM %s " + - "WHERE MATCH_PHRASE(name.firstname, 'Brandon') " + - "LIMIT 1000", - TestsConstants.TEST_INDEX_GAME_OF_THRONES)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT parents.father " + + "FROM %s " + + "WHERE MATCH_PHRASE(name.firstname, 'Brandon') " + + "LIMIT 1000", + TestsConstants.TEST_INDEX_GAME_OF_THRONES)); JSONArray hits = getHits(response); Assert.assertEquals(1, getTotalHits(response)); @@ -1246,14 +1381,18 @@ public void complexObjectReturnField() throws IOException { /** * TODO: Fields prefixed with @ gets converted to SQLVariantRefExpr instead of SQLIdentifierExpr - * Either change SQLVariantRefExpr to SQLIdentifierExpr - * Or handle the special case for SQLVariantRefExpr + * Either change SQLVariantRefExpr to SQLIdentifierExpr Or handle the special case for + * SQLVariantRefExpr */ @Ignore @Test public void queryWithAtFieldOnWhere() throws IOException { - JSONObject response = executeQuery(String.format(Locale.ROOT, - "SELECT * FROM %s where @wolf = 'Summer' LIMIT 1000", TEST_INDEX_GAME_OF_THRONES)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s where @wolf = 'Summer' LIMIT 1000", + TEST_INDEX_GAME_OF_THRONES)); Assert.assertEquals(1, getTotalHits(response)); JSONObject hit = getHits(response).getJSONObject(0); Assert.assertEquals("Summer", hit.get("@wolf")); @@ -1265,19 +1404,22 @@ public void queryWithDotAtStartOfIndexName() throws Exception { TestUtils.createHiddenIndexByRestClient(client(), ".bank", null); TestUtils.loadDataByRestClient(client(), ".bank", "/src/test/resources/.bank.json"); - String response = executeQuery("SELECT education FROM .bank WHERE account_number = 12345", - "jdbc"); + String response = + executeQuery("SELECT education FROM .bank WHERE account_number = 12345", "jdbc"); Assert.assertTrue(response.contains("PhD")); } @Test public void notLikeTests() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT name " + - "FROM %s " + - "WHERE name.firstname NOT LIKE 'd%%' AND name IS NOT NULL " + - "LIMIT 1000", - TestsConstants.TEST_INDEX_GAME_OF_THRONES)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT name " + + "FROM %s " + + "WHERE name.firstname NOT LIKE 'd%%' AND name IS NOT NULL " + + "LIMIT 1000", + TestsConstants.TEST_INDEX_GAME_OF_THRONES)); JSONArray hits = getHits(response); Assert.assertEquals(3, getTotalHits(response)); @@ -1286,45 +1428,49 @@ public void notLikeTests() throws IOException { JSONObject source = getSource(hit); String name = source.getJSONObject("name").getString("firstname"); - Assert - .assertFalse(String.format(Locale.ROOT, "Name [%s] should not match pattern [d%%]", name), - name.startsWith("d")); + Assert.assertFalse( + String.format(Locale.ROOT, "Name [%s] should not match pattern [d%%]", name), + name.startsWith("d")); } } @Test public void isNullTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT name " + - "FROM %s " + - "WHERE nickname IS NULL " + - "LIMIT 1000", - TestsConstants.TEST_INDEX_GAME_OF_THRONES)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT name FROM %s WHERE nickname IS NULL LIMIT 1000", + TestsConstants.TEST_INDEX_GAME_OF_THRONES)); Assert.assertEquals(6, getTotalHits(response)); } @Test public void isNotNullTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT name " + - "FROM %s " + - "WHERE nickname IS NOT NULL " + - "LIMIT 1000", - TestsConstants.TEST_INDEX_GAME_OF_THRONES)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT name FROM %s WHERE nickname IS NOT NULL LIMIT 1000", + TestsConstants.TEST_INDEX_GAME_OF_THRONES)); Assert.assertEquals(1, getTotalHits(response)); } @Test public void innerQueryTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s D " + - "WHERE holdersName IN (SELECT firstname " + - "FROM %s " + - "WHERE firstname = 'Hattie')", - TestsConstants.TEST_INDEX_DOG, TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * " + + "FROM %s D " + + "WHERE holdersName IN (SELECT firstname " + + "FROM %s " + + "WHERE firstname = 'Hattie')", + TestsConstants.TEST_INDEX_DOG, + TEST_INDEX_ACCOUNT)); JSONArray hits = getHits(response); Assert.assertEquals(1, hits.length()); @@ -1339,19 +1485,22 @@ public void innerQueryTest() throws IOException { @Ignore @Test public void twoSubQueriesTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE holdersName IN (SELECT firstname " + - "FROM %s " + - "WHERE firstname = 'Hattie') " + - "AND age IN (SELECT name.ofHisName " + - "FROM %s " + - "WHERE name.firstname <> 'Daenerys' " + - "AND name.ofHisName IS NOT NULL) ", - TestsConstants.TEST_INDEX_DOG, - TEST_INDEX_ACCOUNT, - TestsConstants.TEST_INDEX_GAME_OF_THRONES)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * " + + "FROM %s " + + "WHERE holdersName IN (SELECT firstname " + + "FROM %s " + + "WHERE firstname = 'Hattie') " + + "AND age IN (SELECT name.ofHisName " + + "FROM %s " + + "WHERE name.firstname <> 'Daenerys' " + + "AND name.ofHisName IS NOT NULL) ", + TestsConstants.TEST_INDEX_DOG, + TEST_INDEX_ACCOUNT, + TestsConstants.TEST_INDEX_GAME_OF_THRONES)); JSONArray hits = getHits(response); Assert.assertEquals(1, hits.length()); @@ -1366,14 +1515,18 @@ public void twoSubQueriesTest() throws IOException { @Ignore @Test public void inTermsSubQueryTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE age = IN_TERMS (SELECT name.ofHisName " + - "FROM %s " + - "WHERE name.firstname <> 'Daenerys' " + - "AND name.ofHisName IS NOT NULL)", - TestsConstants.TEST_INDEX_DOG, TestsConstants.TEST_INDEX_GAME_OF_THRONES)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * " + + "FROM %s " + + "WHERE age = IN_TERMS (SELECT name.ofHisName " + + "FROM %s " + + "WHERE name.firstname <> 'Daenerys' " + + "AND name.ofHisName IS NOT NULL)", + TestsConstants.TEST_INDEX_DOG, + TestsConstants.TEST_INDEX_GAME_OF_THRONES)); JSONArray hits = getHits(response); Assert.assertEquals(1, hits.length()); @@ -1388,9 +1541,12 @@ public void inTermsSubQueryTest() throws IOException { @Ignore @Test public void idsQueryOneId() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s WHERE _id = IDS_QUERY(dog, 1)", - TestsConstants.TEST_INDEX_DOG)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE _id = IDS_QUERY(dog, 1)", + TestsConstants.TEST_INDEX_DOG)); JSONArray hits = getHits(response); Assert.assertEquals(1, hits.length()); @@ -1405,9 +1561,12 @@ public void idsQueryOneId() throws IOException { @Ignore @Test public void idsQueryMultipleId() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s WHERE _id = IDS_QUERY(dog, 1, 2, 3)", - TestsConstants.TEST_INDEX_DOG)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE _id = IDS_QUERY(dog, 1, 2, 3)", + TestsConstants.TEST_INDEX_DOG)); JSONArray hits = getHits(response); Assert.assertEquals(1, hits.length()); @@ -1422,14 +1581,18 @@ public void idsQueryMultipleId() throws IOException { @Ignore @Test public void idsQuerySubQueryIds() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE _id = IDS_QUERY(dog, (SELECT name.ofHisName " + - "FROM %s " + - "WHERE name.firstname <> 'Daenerys' " + - "AND name.ofHisName IS NOT NULL))", - TestsConstants.TEST_INDEX_DOG, TestsConstants.TEST_INDEX_GAME_OF_THRONES)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * " + + "FROM %s " + + "WHERE _id = IDS_QUERY(dog, (SELECT name.ofHisName " + + "FROM %s " + + "WHERE name.firstname <> 'Daenerys' " + + "AND name.ofHisName IS NOT NULL))", + TestsConstants.TEST_INDEX_DOG, + TestsConstants.TEST_INDEX_GAME_OF_THRONES)); JSONArray hits = getHits(response); Assert.assertEquals(1, hits.length()); @@ -1443,18 +1606,24 @@ public void idsQuerySubQueryIds() throws IOException { @Test public void nestedEqualsTestFieldNormalField() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s WHERE nested(message.info)='b'", - TestsConstants.TEST_INDEX_NESTED_TYPE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE nested(message.info)='b'", + TestsConstants.TEST_INDEX_NESTED_TYPE)); Assert.assertEquals(1, getTotalHits(response)); } @Test public void nestedEqualsTestFieldInsideArrays() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s WHERE nested(message.info) = 'a'", - TestsConstants.TEST_INDEX_NESTED_TYPE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE nested(message.info) = 'a'", + TestsConstants.TEST_INDEX_NESTED_TYPE)); Assert.assertEquals(2, getTotalHits(response)); } @@ -1462,106 +1631,124 @@ public void nestedEqualsTestFieldInsideArrays() throws IOException { @Ignore // Seems like we don't support nested with IN, throwing IllegalArgumentException @Test public void nestedOnInQuery() throws IOException { - JSONObject response = executeQuery(String.format(Locale.ROOT, - "SELECT * FROM %s where nested(message.info) IN ('a','b')", TEST_INDEX_NESTED_TYPE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s where nested(message.info) IN ('a','b')", + TEST_INDEX_NESTED_TYPE)); Assert.assertEquals(3, getTotalHits(response)); } @Test public void complexNestedQueryBothOnSameObject() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE nested('message', message.info = 'a' AND message.author ='i')", - TestsConstants.TEST_INDEX_NESTED_TYPE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * " + + "FROM %s " + + "WHERE nested('message', message.info = 'a' AND message.author ='i')", + TestsConstants.TEST_INDEX_NESTED_TYPE)); Assert.assertEquals(1, getTotalHits(response)); } @Test public void complexNestedQueryNotBothOnSameObject() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE nested('message', message.info = 'a' AND message.author ='h')", - TestsConstants.TEST_INDEX_NESTED_TYPE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * " + + "FROM %s " + + "WHERE nested('message', message.info = 'a' AND message.author ='h')", + TestsConstants.TEST_INDEX_NESTED_TYPE)); Assert.assertEquals(0, getTotalHits(response)); } @Test public void nestedOnInTermsQuery() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE nested(message.info) = IN_TERMS('a', 'b')", - TestsConstants.TEST_INDEX_NESTED_TYPE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE nested(message.info) = IN_TERMS('a', 'b')", + TestsConstants.TEST_INDEX_NESTED_TYPE)); Assert.assertEquals(3, getTotalHits(response)); } // TODO Uncomment these after problem with loading join index is resolved -// @Test -// public void childrenEqualsTestFieldNormalField() throws IOException { -// JSONObject response = executeQuery( -// String.format(Locale.ROOT, "SELECT * " + -// "FROM %s/joinType " + -// "WHERE children(childrenType, info) = 'b'", TestsConstants.TEST_INDEX_JOIN_TYPE)); -// -// Assert.assertEquals(1, getTotalHits(response)); -// } -// -// @Test -// public void childrenOnInQuery() throws IOException { -// JSONObject response = executeQuery( -// String.format(Locale.ROOT, "SELECT * " + -// "FROM %s/joinType " + -// "WHERE children(childrenType, info) IN ('a', 'b')", -// TestsConstants.TEST_INDEX_JOIN_TYPE)); -// -// Assert.assertEquals(2, getTotalHits(response)); -// } -// -// @Test -// public void complexChildrenQueryBothOnSameObject() throws IOException { -// JSONObject response = executeQuery( -// String.format(Locale.ROOT, "SELECT * " + -// "FROM %s/joinType " + -// "WHERE children(childrenType, info = 'a' AND author ='e')", -// TestsConstants.TEST_INDEX_JOIN_TYPE)); -// -// Assert.assertEquals(1, getTotalHits(response)); -// } -// -// @Test -// public void complexChildrenQueryNotOnSameObject() throws IOException { -// JSONObject response = executeQuery( -// String.format(Locale.ROOT, "SELECT * " + -// "FROM %s/joinType " + -// "WHERE children(childrenType, info = 'a' AND author ='j')", -// TestsConstants.TEST_INDEX_JOIN_TYPE)); -// -// Assert.assertEquals(0, getTotalHits(response)); -// } -// -// @Test -// public void childrenOnInTermsQuery() throws IOException { -// JSONObject response = executeQuery( -// String.format(Locale.ROOT, "SELECT * " + -// "FROM %s/joinType " + -// "WHERE children(childrenType, info) = IN_TERMS(a, b)", -// TestsConstants.TEST_INDEX_JOIN_TYPE)); -// -// Assert.assertEquals(2, getTotalHits(response)); -// } + // @Test + // public void childrenEqualsTestFieldNormalField() throws IOException { + // JSONObject response = executeQuery( + // String.format(Locale.ROOT, "SELECT * " + + // "FROM %s/joinType " + + // "WHERE children(childrenType, info) = 'b'", + // TestsConstants.TEST_INDEX_JOIN_TYPE)); + // + // Assert.assertEquals(1, getTotalHits(response)); + // } + // + // @Test + // public void childrenOnInQuery() throws IOException { + // JSONObject response = executeQuery( + // String.format(Locale.ROOT, "SELECT * " + + // "FROM %s/joinType " + + // "WHERE children(childrenType, info) IN ('a', 'b')", + // TestsConstants.TEST_INDEX_JOIN_TYPE)); + // + // Assert.assertEquals(2, getTotalHits(response)); + // } + // + // @Test + // public void complexChildrenQueryBothOnSameObject() throws IOException { + // JSONObject response = executeQuery( + // String.format(Locale.ROOT, "SELECT * " + + // "FROM %s/joinType " + + // "WHERE children(childrenType, info = 'a' AND author + // ='e')", + // TestsConstants.TEST_INDEX_JOIN_TYPE)); + // + // Assert.assertEquals(1, getTotalHits(response)); + // } + // + // @Test + // public void complexChildrenQueryNotOnSameObject() throws IOException { + // JSONObject response = executeQuery( + // String.format(Locale.ROOT, "SELECT * " + + // "FROM %s/joinType " + + // "WHERE children(childrenType, info = 'a' AND author + // ='j')", + // TestsConstants.TEST_INDEX_JOIN_TYPE)); + // + // Assert.assertEquals(0, getTotalHits(response)); + // } + // + // @Test + // public void childrenOnInTermsQuery() throws IOException { + // JSONObject response = executeQuery( + // String.format(Locale.ROOT, "SELECT * " + + // "FROM %s/joinType " + + // "WHERE children(childrenType, info) = IN_TERMS(a, b)", + // TestsConstants.TEST_INDEX_JOIN_TYPE)); + // + // Assert.assertEquals(2, getTotalHits(response)); + // } @Ignore // the hint does not really work, NoSuchIndexException is thrown @Test public void multipleIndicesOneNotExistWithHint() throws IOException { - JSONObject response = executeQuery(String - .format(Locale.ROOT, "SELECT /*! IGNORE_UNAVAILABLE */ * FROM %s,%s ", TEST_INDEX_ACCOUNT, - "badindex")); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT /*! IGNORE_UNAVAILABLE */ * FROM %s,%s ", + TEST_INDEX_ACCOUNT, + "badindex")); Assert.assertTrue(getTotalHits(response) > 0); } @@ -1573,8 +1760,8 @@ public void multipleIndicesOneNotExistWithoutHint() throws IOException { String.format(Locale.ROOT, "SELECT * FROM %s, %s", TEST_INDEX_ACCOUNT, "badindex")); Assert.fail("Expected exception, but call succeeded"); } catch (ResponseException e) { - Assert.assertEquals(RestStatus.BAD_REQUEST.getStatus(), - e.getResponse().getStatusLine().getStatusCode()); + Assert.assertEquals( + RestStatus.BAD_REQUEST.getStatus(), e.getResponse().getStatusLine().getStatusCode()); final String entity = TestUtils.getResponseBody(e.getResponse()); Assert.assertThat(entity, containsString("\"type\": \"IndexNotFoundException\"")); } @@ -1582,29 +1769,36 @@ public void multipleIndicesOneNotExistWithoutHint() throws IOException { // TODO Find way to check routing() without SearchRequestBuilder // to properly update these tests to OpenSearchIntegTestCase format -// @Test -// public void routingRequestOneRounting() throws IOException { -// SqlElasticSearchRequestBuilder request = getRequestBuilder(String.format(Locale.ROOT, -// "SELECT /*! ROUTINGS(hey) */ * FROM %s ", TEST_INDEX_ACCOUNT)); -// SearchRequestBuilder searchRequestBuilder = (SearchRequestBuilder) request.getBuilder(); -// Assert.assertEquals("hey",searchRequestBuilder.request().routing()); -// } -// -// @Test -// public void routingRequestMultipleRountings() throws IOException { -// SqlElasticSearchRequestBuilder request = getRequestBuilder(String.format(Locale.ROOT, -// "SELECT /*! ROUTINGS(hey,bye) */ * FROM %s ", TEST_INDEX_ACCOUNT)); -// SearchRequestBuilder searchRequestBuilder = (SearchRequestBuilder) request.getBuilder(); -// Assert.assertEquals("hey,bye",searchRequestBuilder.request().routing()); -// } + // @Test + // public void routingRequestOneRounting() throws IOException { + // SqlElasticSearchRequestBuilder request = getRequestBuilder(String.format(Locale.ROOT, + // "SELECT /*! ROUTINGS(hey) */ * FROM %s ", + // TEST_INDEX_ACCOUNT)); + // SearchRequestBuilder searchRequestBuilder = (SearchRequestBuilder) request.getBuilder(); + // Assert.assertEquals("hey",searchRequestBuilder.request().routing()); + // } + // + // @Test + // public void routingRequestMultipleRountings() throws IOException { + // SqlElasticSearchRequestBuilder request = getRequestBuilder(String.format(Locale.ROOT, + // "SELECT /*! ROUTINGS(hey,bye) */ * FROM %s ", + // TEST_INDEX_ACCOUNT)); + // SearchRequestBuilder searchRequestBuilder = (SearchRequestBuilder) request.getBuilder(); + // Assert.assertEquals("hey,bye",searchRequestBuilder.request().routing()); + // } @Ignore // Getting parser error: syntax error, expect RPAREN, actual IDENTIFIER insert_time @Test public void scriptFilterNoParams() throws IOException { - JSONObject result = executeQuery(String.format(Locale.ROOT, - "SELECT insert_time FROM %s where script('doc[\\'insert_time\''].date.hourOfDay==16') " + - "and insert_time <'2014-08-21T00:00:00.000Z'", TEST_INDEX_ONLINE)); + JSONObject result = + executeQuery( + String.format( + Locale.ROOT, + "SELECT insert_time FROM %s where" + + " script('doc[\\'insert_time\''].date.hourOfDay==16') and insert_time" + + " <'2014-08-21T00:00:00.000Z'", + TEST_INDEX_ONLINE)); Assert.assertEquals(237, getTotalHits(result)); } @@ -1612,20 +1806,28 @@ public void scriptFilterNoParams() throws IOException { @Test public void scriptFilterWithParams() throws IOException { - JSONObject result = executeQuery(String.format(Locale.ROOT, - "SELECT insert_time FROM %s where script('doc[\\'insert_time\''].date.hourOfDay==x','x'=16) " + - "and insert_time <'2014-08-21T00:00:00.000Z'", TEST_INDEX_ONLINE)); + JSONObject result = + executeQuery( + String.format( + Locale.ROOT, + "SELECT insert_time FROM %s where" + + " script('doc[\\'insert_time\''].date.hourOfDay==x','x'=16) and insert_time" + + " <'2014-08-21T00:00:00.000Z'", + TEST_INDEX_ONLINE)); Assert.assertEquals(237, getTotalHits(result)); } @Test public void highlightPreTagsAndPostTags() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, - "SELECT /*! HIGHLIGHT(phrase, pre_tags : [''], post_tags : ['']) */ " + - "* FROM %s " + - "WHERE phrase LIKE 'fox' " + - "ORDER BY _score", TestsConstants.TEST_INDEX_PHRASE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT /*! HIGHLIGHT(phrase, pre_tags : [''], post_tags : ['']) */ " + + "* FROM %s " + + "WHERE phrase LIKE 'fox' " + + "ORDER BY _score", + TestsConstants.TEST_INDEX_PHRASE)); JSONArray hits = getHits(response); for (int i = 0; i < hits.length(); i++) { @@ -1640,13 +1842,17 @@ public void highlightPreTagsAndPostTags() throws IOException { @Ignore @Test public void fieldCollapsingTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT /*! COLLAPSE({\"field\":\"age\"," + - "\"inner_hits\":{\"name\": \"account\"," + - "\"size\":1," + - "\"sort\":[{\"age\":\"asc\"}]}," + - "\"max_concurrent_group_searches\": 4}) */ " + - "* FROM %s", TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT /*! COLLAPSE({\"field\":\"age\"," + + "\"inner_hits\":{\"name\": \"account\"," + + "\"size\":1," + + "\"sort\":[{\"age\":\"asc\"}]}," + + "\"max_concurrent_group_searches\": 4}) */ " + + "* FROM %s", + TEST_INDEX_ACCOUNT)); JSONArray hits = getHits(response); Assert.assertEquals(21, hits.length()); @@ -1656,8 +1862,8 @@ public void fieldCollapsingTest() throws IOException { @Test public void backticksQuotedIndexNameTest() throws Exception { TestUtils.createIndexByRestClient(client(), "bank_unquote", null); - TestUtils - .loadDataByRestClient(client(), "bank", "/src/test/resources/bank_for_unquote_test.json"); + TestUtils.loadDataByRestClient( + client(), "bank", "/src/test/resources/bank_for_unquote_test.json"); JSONArray hits = getHits(executeQuery("SELECT lastname FROM `bank`")); Object responseIndex = ((JSONObject) hits.get(0)).query("/_index"); @@ -1665,39 +1871,57 @@ public void backticksQuotedIndexNameTest() throws Exception { assertEquals( executeQuery("SELECT lastname FROM bank", "jdbc"), - executeQuery("SELECT `bank`.`lastname` FROM `bank`", "jdbc") - ); + executeQuery("SELECT `bank`.`lastname` FROM `bank`", "jdbc")); assertEquals( executeQuery( - "SELECT `b`.`age` AS `AGE`, AVG(`b`.`balance`) FROM `bank` AS `b` " + - "WHERE ABS(`b`.`age`) > 20 GROUP BY `b`.`age` ORDER BY `b`.`age`", + "SELECT `b`.`age` AS `AGE`, AVG(`b`.`balance`) FROM `bank` AS `b` " + + "WHERE ABS(`b`.`age`) > 20 GROUP BY `b`.`age` ORDER BY `b`.`age`", "jdbc"), - executeQuery("SELECT b.age AS AGE, AVG(balance) FROM bank AS b " + - "WHERE ABS(age) > 20 GROUP BY b.age ORDER BY b.age", - "jdbc") - ); + executeQuery( + "SELECT b.age AS AGE, AVG(balance) FROM bank AS b " + + "WHERE ABS(age) > 20 GROUP BY b.age ORDER BY b.age", + "jdbc")); } @Test public void backticksQuotedFieldNamesTest() { - String expected = executeQuery(StringUtils.format("SELECT b.lastname FROM %s " + - "AS b ORDER BY age LIMIT 3", TestsConstants.TEST_INDEX_BANK), "jdbc"); - String quotedFieldResult = executeQuery(StringUtils.format("SELECT b.`lastname` FROM %s " + - "AS b ORDER BY age LIMIT 3", TestsConstants.TEST_INDEX_BANK), "jdbc"); + String expected = + executeQuery( + StringUtils.format( + "SELECT b.lastname FROM %s AS b ORDER BY age LIMIT 3", + TestsConstants.TEST_INDEX_BANK), + "jdbc"); + String quotedFieldResult = + executeQuery( + StringUtils.format( + "SELECT b.`lastname` FROM %s AS b ORDER BY age LIMIT 3", + TestsConstants.TEST_INDEX_BANK), + "jdbc"); assertEquals(expected, quotedFieldResult); } @Test public void backticksQuotedAliasTest() { - String expected = executeQuery(StringUtils.format("SELECT b.lastname FROM %s " + - "AS b ORDER BY age LIMIT 3", TestsConstants.TEST_INDEX_BANK), "jdbc"); - String quotedAliasResult = executeQuery(StringUtils.format("SELECT `b`.lastname FROM %s" + - " AS `b` ORDER BY age LIMIT 3", TestsConstants.TEST_INDEX_BANK), "jdbc"); + String expected = + executeQuery( + StringUtils.format( + "SELECT b.lastname FROM %s AS b ORDER BY age LIMIT 3", + TestsConstants.TEST_INDEX_BANK), + "jdbc"); + String quotedAliasResult = + executeQuery( + StringUtils.format( + "SELECT `b`.lastname FROM %s AS `b` ORDER BY age LIMIT 3", + TestsConstants.TEST_INDEX_BANK), + "jdbc"); String quotedAliasAndFieldResult = - executeQuery(StringUtils.format("SELECT `b`.`lastname` FROM %s " + - "AS `b` ORDER BY age LIMIT 3", TestsConstants.TEST_INDEX_BANK), "jdbc"); + executeQuery( + StringUtils.format( + "SELECT `b`.`lastname` FROM %s AS `b` ORDER BY age LIMIT 3", + TestsConstants.TEST_INDEX_BANK), + "jdbc"); assertEquals(expected, quotedAliasResult); assertEquals(expected, quotedAliasAndFieldResult); @@ -1705,19 +1929,28 @@ public void backticksQuotedAliasTest() { @Test public void backticksQuotedAliasWithSpecialCharactersTest() { - String expected = executeQuery(StringUtils.format("SELECT b.lastname FROM %s " + - "AS b ORDER BY age LIMIT 3", TestsConstants.TEST_INDEX_BANK), "jdbc"); + String expected = + executeQuery( + StringUtils.format( + "SELECT b.lastname FROM %s AS b ORDER BY age LIMIT 3", + TestsConstants.TEST_INDEX_BANK), + "jdbc"); String specialCharAliasResult = - executeQuery(StringUtils.format("SELECT `b k`.lastname FROM %s " + - "AS `b k` ORDER BY age LIMIT 3", TestsConstants.TEST_INDEX_BANK), "jdbc"); + executeQuery( + StringUtils.format( + "SELECT `b k`.lastname FROM %s AS `b k` ORDER BY age LIMIT 3", + TestsConstants.TEST_INDEX_BANK), + "jdbc"); assertEquals(expected, specialCharAliasResult); } @Test public void backticksQuotedAliasInJDBCResponseTest() { - String query = StringUtils.format("SELECT `b`.`lastname` AS `name` FROM %s AS `b` " + - "ORDER BY age LIMIT 3", TestsConstants.TEST_INDEX_BANK); + String query = + StringUtils.format( + "SELECT `b`.`lastname` AS `name` FROM %s AS `b` ORDER BY age LIMIT 3", + TestsConstants.TEST_INDEX_BANK); String response = executeQuery(query, "jdbc"); assertTrue(response.contains("\"alias\": \"name\"")); @@ -1725,10 +1958,14 @@ public void backticksQuotedAliasInJDBCResponseTest() { @Test public void caseWhenSwitchTest() throws IOException { - JSONObject response = executeQuery("SELECT CASE age " + - "WHEN 30 THEN '1' " + - "WHEN 40 THEN '2' " + - "ELSE '0' END AS cases FROM " + TEST_INDEX_ACCOUNT + " WHERE age IS NOT NULL"); + JSONObject response = + executeQuery( + "SELECT CASE age " + + "WHEN 30 THEN '1' " + + "WHEN 40 THEN '2' " + + "ELSE '0' END AS cases FROM " + + TEST_INDEX_ACCOUNT + + " WHERE age IS NOT NULL"); JSONObject hit = getHits(response).getJSONObject(0); String age = hit.query("/_source/age").toString(); String cases = age.equals("30") ? "1" : age.equals("40") ? "2" : "0"; @@ -1738,49 +1975,61 @@ public void caseWhenSwitchTest() throws IOException { @Test public void caseWhenJdbcResponseTest() { - String response = executeQuery("SELECT CASE age " + - "WHEN 30 THEN 'age is 30' " + - "WHEN 40 THEN 'age is 40' " + - "ELSE 'NA' END AS cases FROM " + TEST_INDEX_ACCOUNT + " WHERE age is not null", "jdbc"); + String response = + executeQuery( + "SELECT CASE age " + + "WHEN 30 THEN 'age is 30' " + + "WHEN 40 THEN 'age is 40' " + + "ELSE 'NA' END AS cases FROM " + + TEST_INDEX_ACCOUNT + + " WHERE age is not null", + "jdbc"); assertTrue( - response.contains("age is 30") || - response.contains("age is 40") || - response.contains("NA") - ); + response.contains("age is 30") + || response.contains("age is 40") + || response.contains("NA")); } @Ignore("This is already supported in new SQL engine") @Test public void functionInCaseFieldShouldThrowESExceptionDueToIllegalScriptInJdbc() { - String response = executeQuery( - "select case lower(firstname) when 'amber' then '1' else '2' end as cases from " + - TEST_INDEX_ACCOUNT, - "jdbc"); - queryInJdbcResponseShouldIndicateESException(response, "SearchPhaseExecutionException", + String response = + executeQuery( + "select case lower(firstname) when 'amber' then '1' else '2' end as cases from " + + TEST_INDEX_ACCOUNT, + "jdbc"); + queryInJdbcResponseShouldIndicateESException( + response, + "SearchPhaseExecutionException", "For more details, please send request for Json format"); } @Ignore("This is already supported in our new query engine") @Test public void functionCallWithIllegalScriptShouldThrowESExceptionInJdbc() { - String response = executeQuery("select log(balance + 2) from " + TEST_INDEX_BANK, - "jdbc"); - queryInJdbcResponseShouldIndicateESException(response, "SearchPhaseExecutionException", + String response = executeQuery("select log(balance + 2) from " + TEST_INDEX_BANK, "jdbc"); + queryInJdbcResponseShouldIndicateESException( + response, + "SearchPhaseExecutionException", "please send request for Json format to see the raw response from OpenSearch engine."); } - @Ignore("Goes in different route, does not call PrettyFormatRestExecutor.execute methods." + - "The performRequest method in RestClient doesn't throw any exceptions for null value fields in script") + @Ignore( + "Goes in different route, does not call PrettyFormatRestExecutor.execute methods.The" + + " performRequest method in RestClient doesn't throw any exceptions for null value" + + " fields in script") @Test public void functionArgWithNullValueFieldShouldThrowESExceptionInJdbc() { - String response = executeQuery( - "select log(balance) from " + TEST_INDEX_BANK_WITH_NULL_VALUES, "jdbc"); - queryInJdbcResponseShouldIndicateESException(response, "SearchPhaseExecutionException", + String response = + executeQuery("select log(balance) from " + TEST_INDEX_BANK_WITH_NULL_VALUES, "jdbc"); + queryInJdbcResponseShouldIndicateESException( + response, + "SearchPhaseExecutionException", "For more details, please send request for Json format"); } - private void queryInJdbcResponseShouldIndicateESException(String response, String exceptionType, - String... errMsgs) { + private void queryInJdbcResponseShouldIndicateESException( + String response, String exceptionType, String... errMsgs) { Assert.assertThat(response, containsString(exceptionType)); for (String errMsg : errMsgs) { Assert.assertThat(response, containsString(errMsg)); @@ -1803,9 +2052,21 @@ private void checkAggregationResponseSize(JSONObject response, int sizeCheck) { private void checkSelectAllAndFieldResponseSize(JSONObject response) { String[] arr = - new String[] {"account_number", "firstname", "address", "birthdate", "gender", "city", - "lastname", - "balance", "employer", "state", "age", "email", "male"}; + new String[] { + "account_number", + "firstname", + "address", + "birthdate", + "gender", + "city", + "lastname", + "balance", + "employer", + "state", + "age", + "email", + "male" + }; Set expectedSource = new HashSet<>(Arrays.asList(arr)); JSONArray hits = getHits(response); diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/RestIntegTestCase.java b/integ-test/src/test/java/org/opensearch/sql/legacy/RestIntegTestCase.java index dd48d82114..e4f1cc552d 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/RestIntegTestCase.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/RestIntegTestCase.java @@ -44,11 +44,13 @@ import org.junit.Before; import org.opensearch.client.Request; import org.opensearch.client.Response; +import org.opensearch.core.common.Strings; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.rest.RestStatus; import org.opensearch.core.xcontent.XContentBuilder; /** + *
  * SQL plugin integration test base class (migrated from SQLIntegTestCase)
  * 

* The execution of order is as follows: @@ -60,6 +62,7 @@ * XXXTIT: 3) init() 5) init() *

* TODO: this base class should extends ODFERestTestCase + *

*/ public abstract class RestIntegTestCase extends OpenSearchSQLRestTestCase { diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/SQLFunctionsIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/SQLFunctionsIT.java index c1c1a26f4a..356b910d5f 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/SQLFunctionsIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/SQLFunctionsIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.allOf; @@ -45,10 +44,7 @@ import org.opensearch.search.SearchHit; import org.opensearch.search.SearchHits; - -/** - * Created by allwefantasy on 8/25/16. - */ +/** Created by allwefantasy on 8/25/16. */ public class SQLFunctionsIT extends SQLIntegTestCase { @Override @@ -61,70 +57,75 @@ protected void init() throws Exception { @Test public void functionFieldAliasAndGroupByAlias() throws Exception { - String query = "SELECT " + - "floor(substring(address,0,3)*20) as key," + - "sum(age) cvalue FROM " + TEST_INDEX_ACCOUNT + " where address is not null " + - "group by key order by cvalue desc limit 10 "; + String query = + "SELECT " + + "floor(substring(address,0,3)*20) as key," + + "sum(age) cvalue FROM " + + TEST_INDEX_ACCOUNT + + " where address is not null " + + "group by key order by cvalue desc limit 10 "; final JSONObject result = executeQuery(query); - - IntStream.rangeClosed(0, 9).forEach(i -> { - Assert.assertNotNull(result.query(String.format("/aggregations/key/buckets/%d/key", i))); - Assert.assertNotNull( - result.query(String.format("/aggregations/key/buckets/%d/cvalue/value", i))); - } - ); + IntStream.rangeClosed(0, 9) + .forEach( + i -> { + Assert.assertNotNull( + result.query(String.format("/aggregations/key/buckets/%d/key", i))); + Assert.assertNotNull( + result.query(String.format("/aggregations/key/buckets/%d/cvalue/value", i))); + }); } /** * todo fix the issue. * - * @see https://github.com/opendistro-for-elasticsearch/sql/issues/59 + * @see https://github.com/opendistro-for-elasticsearch/sql/issues/59 */ @Ignore public void normalFieldAlias() throws Exception { - //here is a bug,csv field with spa - String query = "SELECT " + - "address as key,age from " + - TEST_INDEX_ACCOUNT + " where address is not null " + - "limit 10 "; + // here is a bug,csv field with spa + String query = + "SELECT " + + "address as key,age from " + + TEST_INDEX_ACCOUNT + + " where address is not null " + + "limit 10 "; - assertThat( - executeQuery(query), - hitAny(kvString("/_source/key", not(isEmptyOrNullString()))) - ); + assertThat(executeQuery(query), hitAny(kvString("/_source/key", not(isEmptyOrNullString())))); } @Test public void functionAlias() throws Exception { - //here is a bug,if only script fields are included,then all fields will return; fix later - String query = "SELECT " + - "substring(address,0,3) as key,address from " + - TEST_INDEX_ACCOUNT + " where address is not null " + - "order by address desc limit 10 "; + // here is a bug,if only script fields are included,then all fields will return; fix later + String query = + "SELECT " + + "substring(address,0,3) as key,address from " + + TEST_INDEX_ACCOUNT + + " where address is not null " + + "order by address desc limit 10 "; assertThat( executeQuery(query), - hitAny(both(kvString("/_source/address", equalTo("863 Wythe Place"))) - .and(kvString("/fields/key/0", - equalTo("863")))) - ); + hitAny( + both(kvString("/_source/address", equalTo("863 Wythe Place"))) + .and(kvString("/fields/key/0", equalTo("863"))))); } @Test public void caseChangeTest() throws IOException { - String query = "SELECT LOWER(firstname) " + - "FROM opensearch-sql_test_index_account " + - "WHERE UPPER(lastname)='DUKE' " + - "ORDER BY upper(lastname) "; + String query = + "SELECT LOWER(firstname) " + + "FROM opensearch-sql_test_index_account " + + "WHERE UPPER(lastname)='DUKE' " + + "ORDER BY upper(lastname) "; assertThat( executeQuery(query), hitAny( kvString("/_source/address", equalTo("880 Holmes Lane")), - kvString("/fields/LOWER(firstname)/0", equalTo("amber"))) - ); + kvString("/fields/LOWER(firstname)/0", equalTo("amber")))); } @Test @@ -133,23 +134,23 @@ public void caseChangeTestWithLocale() throws IOException { // "IL".toLowerCase() in a Turkish locale returns "ıl" // https://stackoverflow.com/questions/11063102/using-locales-with-javas-tolowercase-and-touppercase - String query = "SELECT LOWER(state.keyword, 'tr') " + - "FROM opensearch-sql_test_index_account " + - "WHERE account_number=1"; + String query = + "SELECT LOWER(state.keyword, 'tr') " + + "FROM opensearch-sql_test_index_account " + + "WHERE account_number=1"; assertThat( executeQuery(query), - hitAny( - kvString("/fields/LOWER(state.keyword, 'tr')/0", equalTo("ıl"))) - ); + hitAny(kvString("/fields/LOWER(state.keyword, 'tr')/0", equalTo("ıl")))); } @Test public void caseChangeWithAggregationTest() throws IOException { - String query = "SELECT UPPER(e.firstname) AS upper, COUNT(*)" + - "FROM opensearch-sql_test_index_account e " + - "WHERE LOWER(e.lastname)='duke' " + - "GROUP BY upper"; + String query = + "SELECT UPPER(e.firstname) AS upper, COUNT(*)" + + "FROM opensearch-sql_test_index_account e " + + "WHERE LOWER(e.lastname)='duke' " + + "GROUP BY upper"; assertThat( executeQuery(query), @@ -158,8 +159,10 @@ public void caseChangeWithAggregationTest() throws IOException { @Test public void castIntFieldToDoubleWithoutAliasTest() throws IOException { - String query = "SELECT CAST(age AS DOUBLE) FROM " + TestsConstants.TEST_INDEX_ACCOUNT + - " ORDER BY age DESC LIMIT 5"; + String query = + "SELECT CAST(age AS DOUBLE) FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY age DESC LIMIT 5"; SearchHit[] hits = query(query).getHits(); checkSuccessfulFieldCast(hits, "cast_age", "DOUBLE"); @@ -171,8 +174,9 @@ public void castIntFieldToDoubleWithoutAliasTest() throws IOException { @Test public void castIntFieldToDoubleWithAliasTest() throws IOException { String query = - "SELECT CAST(age AS DOUBLE) AS test_alias FROM " + TestsConstants.TEST_INDEX_ACCOUNT + - " ORDER BY age LIMIT 5"; + "SELECT CAST(age AS DOUBLE) AS test_alias FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY age LIMIT 5"; SearchHit[] hits = query(query).getHits(); checkSuccessfulFieldCast(hits, "test_alias", "DOUBLE"); @@ -183,8 +187,10 @@ public void castIntFieldToDoubleWithAliasTest() throws IOException { @Test public void castIntFieldToStringWithoutAliasTest() throws IOException { - String query = "SELECT CAST(balance AS STRING) FROM " + TestsConstants.TEST_INDEX_ACCOUNT + - " ORDER BY balance LIMIT 1"; + String query = + "SELECT CAST(balance AS STRING) FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY balance LIMIT 1"; SearchHit[] hits = query(query).getHits(); checkSuccessfulFieldCast(hits, "cast_balance", "STRING"); @@ -195,48 +201,51 @@ public void castIntFieldToStringWithoutAliasTest() throws IOException { @Test public void castIntFieldToStringWithAliasTest() throws IOException { - String query = "SELECT CAST(balance AS STRING) AS cast_string_alias FROM " + - TestsConstants.TEST_INDEX_ACCOUNT + - " ORDER BY cast_string_alias DESC LIMIT 1"; + String query = + "SELECT CAST(balance AS STRING) AS cast_string_alias FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY cast_string_alias DESC LIMIT 1"; SearchHit[] hits = query(query).getHits(); checkSuccessfulFieldCast(hits, "cast_string_alias", "STRING"); for (int i = 0; i < hits.length; ++i) { Assert.assertThat(hits[i].getFields().get("cast_string_alias").getValue(), is("9838")); } - } @Test public void castIntFieldToFloatWithoutAliasJdbcFormatTest() { - JSONObject response = executeJdbcRequest( - "SELECT CAST(balance AS FLOAT) AS cast_balance FROM " + TestsConstants.TEST_INDEX_ACCOUNT + - " ORDER BY balance DESC LIMIT 1"); + JSONObject response = + executeJdbcRequest( + "SELECT CAST(balance AS FLOAT) AS cast_balance FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY balance DESC LIMIT 1"); - verifySchema(response, - schema("CAST(balance AS FLOAT)", "cast_balance", "float")); + verifySchema(response, schema("CAST(balance AS FLOAT)", "cast_balance", "float")); - verifyDataRows(response, - rows(49989.0)); + verifyDataRows(response, rows(49989.0)); } @Test public void castIntFieldToFloatWithAliasJdbcFormatTest() { - JSONObject response = executeJdbcRequest( - "SELECT CAST(balance AS FLOAT) AS jdbc_float_alias " + - "FROM " + TestsConstants.TEST_INDEX_ACCOUNT + " ORDER BY jdbc_float_alias LIMIT 1"); + JSONObject response = + executeJdbcRequest( + "SELECT CAST(balance AS FLOAT) AS jdbc_float_alias " + + "FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY jdbc_float_alias LIMIT 1"); - verifySchema(response, - schema("CAST(balance AS FLOAT)", "jdbc_float_alias", "float")); + verifySchema(response, schema("CAST(balance AS FLOAT)", "jdbc_float_alias", "float")); - verifyDataRows(response, - rows(1011.0)); + verifyDataRows(response, rows(1011.0)); } @Test public void castIntFieldToDoubleWithoutAliasOrderByTest() throws IOException { - String query = "SELECT CAST(age AS DOUBLE) FROM " + TestsConstants.TEST_INDEX_ACCOUNT + - " ORDER BY age LIMIT 1"; + String query = + "SELECT CAST(age AS DOUBLE) FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY age LIMIT 1"; SearchHit[] hits = query(query).getHits(); checkSuccessfulFieldCast(hits, "cast_age", "DOUBLE"); @@ -247,148 +256,138 @@ public void castIntFieldToDoubleWithoutAliasOrderByTest() throws IOException { @Test public void castIntFieldToDoubleWithAliasOrderByTest() throws IOException { - String query = "SELECT CAST(age AS DOUBLE) AS alias FROM " + TestsConstants.TEST_INDEX_ACCOUNT + - " ORDER BY alias DESC LIMIT 1"; + String query = + "SELECT CAST(age AS DOUBLE) AS alias FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY alias DESC LIMIT 1"; SearchHit[] hits = query(query).getHits(); checkSuccessfulFieldCast(hits, "alias", "DOUBLE"); for (int i = 0; i < hits.length; ++i) { Assert.assertThat(hits[i].getFields().get("alias").getValue(), is(40.0)); } - } @Test public void castIntFieldToFloatWithoutAliasJdbcFormatGroupByTest() { - JSONObject response = executeJdbcRequest( - "SELECT CAST(balance AS FLOAT) FROM " + - TestsConstants.TEST_INDEX_ACCOUNT + " GROUP BY balance ORDER BY balance DESC LIMIT 5"); + JSONObject response = + executeJdbcRequest( + "SELECT CAST(balance AS FLOAT) FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " GROUP BY balance ORDER BY balance DESC LIMIT 5"); - verifySchema(response, - schema("CAST(balance AS FLOAT)", null, "float")); + verifySchema(response, schema("CAST(balance AS FLOAT)", null, "float")); - verifyDataRows(response, - rows(49989.0), - rows(49795.0), - rows(49741.0), - rows(49671.0), - rows(49587.0)); + verifyDataRows( + response, rows(49989.0), rows(49795.0), rows(49741.0), rows(49671.0), rows(49587.0)); } @Test public void castIntFieldToFloatWithAliasJdbcFormatGroupByTest() { - JSONObject response = executeJdbcRequest( - "SELECT CAST(balance AS FLOAT) AS jdbc_float_alias " - + " FROM " + TestsConstants.TEST_INDEX_ACCOUNT - + " GROUP BY jdbc_float_alias " - + " ORDER BY jdbc_float_alias ASC " - + " LIMIT 5"); + JSONObject response = + executeJdbcRequest( + "SELECT CAST(balance AS FLOAT) AS jdbc_float_alias " + + " FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " GROUP BY jdbc_float_alias " + + " ORDER BY jdbc_float_alias ASC " + + " LIMIT 5"); - verifySchema(response, - schema("CAST(balance AS FLOAT)", "jdbc_float_alias", "float")); + verifySchema(response, schema("CAST(balance AS FLOAT)", "jdbc_float_alias", "float")); - verifyDataRows(response, - rows(1011.0), - rows(10116.0), - rows(10138.0), - rows(10147.0), - rows(10178.0)); + verifyDataRows( + response, rows(1011.0), rows(10116.0), rows(10138.0), rows(10147.0), rows(10178.0)); } @Test public void castIntFieldToDoubleWithAliasJdbcFormatGroupByTest() { - JSONObject response = executeJdbcRequest( - "SELECT CAST(age AS DOUBLE) AS jdbc_double_alias " + - "FROM " + TestsConstants.TEST_INDEX_ACCOUNT + - " GROUP BY jdbc_double_alias DESC LIMIT 5"); + JSONObject response = + executeJdbcRequest( + "SELECT CAST(age AS DOUBLE) AS jdbc_double_alias " + + "FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " GROUP BY jdbc_double_alias DESC LIMIT 5"); - verifySchema(response, - schema("jdbc_double_alias", "jdbc_double_alias", "double")); + verifySchema(response, schema("jdbc_double_alias", "jdbc_double_alias", "double")); - verifyDataRows(response, - rows("31.0"), - rows("39.0"), - rows("26.0"), - rows("32.0"), - rows("35.0")); + verifyDataRows(response, rows("31.0"), rows("39.0"), rows("26.0"), rows("32.0"), rows("35.0")); } @Test public void castKeywordFieldToDatetimeWithoutAliasJdbcFormatTest() { - JSONObject response = executeJdbcRequest("SELECT CAST(date_keyword AS DATETIME) FROM " - + TestsConstants.TEST_INDEX_DATE + " ORDER BY date_keyword"); + JSONObject response = + executeJdbcRequest( + "SELECT CAST(date_keyword AS DATETIME) FROM " + + TestsConstants.TEST_INDEX_DATE + + " ORDER BY date_keyword"); verifySchema(response, schema("cast_date_keyword", null, "date")); - verifyDataRows(response, - rows("2014-08-19 07:09:13.434"), - rows("2019-09-25 02:04:13.469")); + verifyDataRows(response, rows("2014-08-19 07:09:13.434"), rows("2019-09-25 02:04:13.469")); } @Test public void castKeywordFieldToDatetimeWithAliasJdbcFormatTest() { JSONObject response = - executeJdbcRequest("SELECT CAST(date_keyword AS DATETIME) AS test_alias FROM " - + TestsConstants.TEST_INDEX_DATE + " ORDER BY date_keyword"); + executeJdbcRequest( + "SELECT CAST(date_keyword AS DATETIME) AS test_alias FROM " + + TestsConstants.TEST_INDEX_DATE + + " ORDER BY date_keyword"); verifySchema(response, schema("test_alias", null, "date")); - verifyDataRows(response, - rows("2014-08-19 07:09:13.434"), - rows("2019-09-25 02:04:13.469")); + verifyDataRows(response, rows("2014-08-19 07:09:13.434"), rows("2019-09-25 02:04:13.469")); } @Test public void castFieldToDatetimeWithWhereClauseJdbcFormatTest() { - JSONObject response = executeJdbcRequest("SELECT CAST(date_keyword AS DATETIME) FROM " - + TestsConstants.TEST_INDEX_DATE + " WHERE date_keyword IS NOT NULL ORDER BY date_keyword"); + JSONObject response = + executeJdbcRequest( + "SELECT CAST(date_keyword AS DATETIME) FROM " + + TestsConstants.TEST_INDEX_DATE + + " WHERE date_keyword IS NOT NULL ORDER BY date_keyword"); verifySchema(response, schema("cast_date_keyword", null, "date")); - verifyDataRows(response, - rows("2014-08-19 07:09:13.434"), - rows("2019-09-25 02:04:13.469")); + verifyDataRows(response, rows("2014-08-19 07:09:13.434"), rows("2019-09-25 02:04:13.469")); } @Test public void castFieldToDatetimeWithGroupByJdbcFormatTest() { JSONObject response = - executeJdbcRequest("SELECT CAST(date_keyword AS DATETIME) AS test_alias FROM " - + TestsConstants.TEST_INDEX_DATE + " GROUP BY test_alias DESC"); + executeJdbcRequest( + "SELECT CAST(date_keyword AS DATETIME) AS test_alias FROM " + + TestsConstants.TEST_INDEX_DATE + + " GROUP BY test_alias DESC"); verifySchema(response, schema("test_alias", "test_alias", "double")); - verifyDataRows(response, - rows("2014-08-19T07:09:13.434Z"), - rows("2019-09-25T02:04:13.469Z")); + verifyDataRows(response, rows("2014-08-19T07:09:13.434Z"), rows("2019-09-25T02:04:13.469Z")); } - @Test public void castBoolFieldToNumericValueInSelectClause() { JSONObject response = executeJdbcRequest( "SELECT " - + " male, " - + " CAST(male AS INT) AS cast_int, " - + " CAST(male AS LONG) AS cast_long, " - + " CAST(male AS FLOAT) AS cast_float, " - + " CAST(male AS DOUBLE) AS cast_double " - + "FROM " + TestsConstants.TEST_INDEX_BANK + " " - + "WHERE account_number = 1 OR account_number = 13" - ); - - verifySchema(response, + + " male, " + + " CAST(male AS INT) AS cast_int, " + + " CAST(male AS LONG) AS cast_long, " + + " CAST(male AS FLOAT) AS cast_float, " + + " CAST(male AS DOUBLE) AS cast_double " + + "FROM " + + TestsConstants.TEST_INDEX_BANK + + " " + + "WHERE account_number = 1 OR account_number = 13"); + + verifySchema( + response, schema("male", "boolean"), schema("CAST(male AS INT)", "cast_int", "integer"), schema("CAST(male AS LONG)", "cast_long", "long"), schema("CAST(male AS FLOAT)", "cast_float", "float"), - schema("CAST(male AS DOUBLE)", "cast_double", "double") - ); - verifyDataRows(response, - rows(true, 1, 1, 1.0, 1.0), - rows(false, 0, 0, 0.0, 0.0) - ); + schema("CAST(male AS DOUBLE)", "cast_double", "double")); + verifyDataRows(response, rows(true, 1, 1, 1.0, 1.0), rows(false, 0, 0, 0.0, 0.0)); } @Test @@ -396,90 +395,82 @@ public void castBoolFieldToNumericValueWithGroupByAlias() { JSONObject response = executeJdbcRequest( "SELECT " - + "CAST(male AS INT) AS cast_int, " - + "COUNT(*) " - + "FROM " + TestsConstants.TEST_INDEX_BANK + " " - + "GROUP BY cast_int" - ); - - verifySchema(response, + + "CAST(male AS INT) AS cast_int, " + + "COUNT(*) " + + "FROM " + + TestsConstants.TEST_INDEX_BANK + + " " + + "GROUP BY cast_int"); + + verifySchema( + response, schema("CAST(male AS INT)", "cast_int", "integer"), - schema("COUNT(*)", "integer") - ); - verifyDataRows(response, - rows(0, 3), - rows(1, 4) - ); + schema("COUNT(*)", "integer")); + verifyDataRows(response, rows(0, 3), rows(1, 4)); } @Test public void castStatementInWhereClauseGreaterThanTest() { - JSONObject response = executeJdbcRequest("SELECT balance FROM " + TEST_INDEX_ACCOUNT - + " WHERE (account_number < CAST(age AS DOUBLE)) ORDER BY balance LIMIT 5"); + JSONObject response = + executeJdbcRequest( + "SELECT balance FROM " + + TEST_INDEX_ACCOUNT + + " WHERE (account_number < CAST(age AS DOUBLE)) ORDER BY balance LIMIT 5"); verifySchema(response, schema("balance", null, "long")); - verifyDataRows(response, - rows(4180), - rows(5686), - rows(7004), - rows(7831), - rows(14127)); + verifyDataRows(response, rows(4180), rows(5686), rows(7004), rows(7831), rows(14127)); } @Test public void castStatementInWhereClauseLessThanTest() { - JSONObject response = executeJdbcRequest("SELECT balance FROM " + TEST_INDEX_ACCOUNT - + " WHERE (account_number > CAST(age AS DOUBLE)) ORDER BY balance LIMIT 5"); + JSONObject response = + executeJdbcRequest( + "SELECT balance FROM " + + TEST_INDEX_ACCOUNT + + " WHERE (account_number > CAST(age AS DOUBLE)) ORDER BY balance LIMIT 5"); verifySchema(response, schema("balance", null, "long")); - verifyDataRows(response, - rows(1011), - rows(1031), - rows(1110), - rows(1133), - rows(1172)); + verifyDataRows(response, rows(1011), rows(1031), rows(1110), rows(1133), rows(1172)); } @Test public void castStatementInWhereClauseEqualToConstantTest() { - JSONObject response = executeJdbcRequest("SELECT balance FROM " + TEST_INDEX_ACCOUNT - + " WHERE (CAST(age AS DOUBLE) = 36.0) ORDER BY balance LIMIT 5"); + JSONObject response = + executeJdbcRequest( + "SELECT balance FROM " + + TEST_INDEX_ACCOUNT + + " WHERE (CAST(age AS DOUBLE) = 36.0) ORDER BY balance LIMIT 5"); verifySchema(response, schema("balance", null, "long")); - verifyDataRows(response, - rows(1249), - rows(1463), - rows(3960), - rows(5686), - rows(6025)); + verifyDataRows(response, rows(1249), rows(1463), rows(3960), rows(5686), rows(6025)); } @Test public void castStatementInWhereClauseLessThanConstantTest() { - JSONObject response = executeJdbcRequest("SELECT balance FROM " + TEST_INDEX_ACCOUNT - + " WHERE (CAST(age AS DOUBLE) < 36.0) ORDER BY balance LIMIT 5"); + JSONObject response = + executeJdbcRequest( + "SELECT balance FROM " + + TEST_INDEX_ACCOUNT + + " WHERE (CAST(age AS DOUBLE) < 36.0) ORDER BY balance LIMIT 5"); verifySchema(response, schema("balance", null, "long")); - verifyDataRows(response, - rows(1011), - rows(1031), - rows(1110), - rows(1133), - rows(1172)); + verifyDataRows(response, rows(1011), rows(1031), rows(1110), rows(1133), rows(1172)); } /** - * Testing compilation - * Result comparison is empty then comparing different types (Date and keyword) + * Testing compilation Result comparison is empty then comparing different types (Date and + * keyword) */ @Test public void castStatementInWhereClauseDatetimeCastTest() { - JSONObject response = executeJdbcRequest("SELECT date_keyword FROM " - + TestsConstants.TEST_INDEX_DATE - + " WHERE (CAST(date_keyword AS DATETIME) = '2014-08-19T07:09:13.434Z')"); + JSONObject response = + executeJdbcRequest( + "SELECT date_keyword FROM " + + TestsConstants.TEST_INDEX_DATE + + " WHERE (CAST(date_keyword AS DATETIME) = '2014-08-19T07:09:13.434Z')"); String schema_result = "{\"name\":\"date_keyword\",\"type\":\"keyword\"}"; assertEquals(response.getJSONArray("schema").get(0).toString(), schema_result); @@ -487,30 +478,32 @@ public void castStatementInWhereClauseDatetimeCastTest() { @Test public void concat_ws_field_and_string() throws Exception { - //here is a bug,csv field with spa - String query = "SELECT " + - " concat_ws('-',age,'-') as age,address from " + - TEST_INDEX_ACCOUNT + " " + - " limit 10 "; + // here is a bug,csv field with spa + String query = + "SELECT " + + " concat_ws('-',age,'-') as age,address from " + + TEST_INDEX_ACCOUNT + + " " + + " limit 10 "; - assertThat( - executeQuery(query), - hitAny(kvString("/fields/age/0", endsWith("--"))) - ); + assertThat(executeQuery(query), hitAny(kvString("/fields/age/0", endsWith("--")))); } /** * Ignore this test case because painless doesn't allowlist String.split function. * - * @see https://www.elastic.co/guide/en/elasticsearch/painless/7.0/painless-api-reference.html + * @see https://www.elastic.co/guide/en/elasticsearch/painless/7.0/painless-api-reference.html */ @Ignore public void whereConditionLeftFunctionRightVariableEqualTest() throws Exception { - String query = "SELECT " + - " * from " + - TestsConstants.TEST_INDEX + " " + - " where split(address,' ')[0]='806' limit 1000 "; + String query = + "SELECT " + + " * from " + + TestsConstants.TEST_INDEX + + " " + + " where split(address,' ')[0]='806' limit 1000 "; assertThat(executeQuery(query).query("/hits/total"), equalTo(4)); } @@ -518,15 +511,18 @@ public void whereConditionLeftFunctionRightVariableEqualTest() throws Exception /** * Ignore this test case because painless doesn't allowlist String.split function. * - * @see https://www.elastic.co/guide/en/elasticsearch/painless/7.0/painless-api-reference.html + * @see https://www.elastic.co/guide/en/elasticsearch/painless/7.0/painless-api-reference.html */ @Ignore public void whereConditionLeftFunctionRightVariableGreatTest() throws Exception { - String query = "SELECT " + - " * from " + - TestsConstants.TEST_INDEX + " " + - " where floor(split(address,' ')[0]+0) > 805 limit 1000 "; + String query = + "SELECT " + + " * from " + + TestsConstants.TEST_INDEX + + " " + + " where floor(split(address,' ')[0]+0) > 805 limit 1000 "; assertThat(executeQuery(query).query("/hits/total"), equalTo(223)); } @@ -534,42 +530,45 @@ public void whereConditionLeftFunctionRightVariableGreatTest() throws Exception @Test public void concat_ws_fields() throws Exception { - //here is a bug,csv field with spa - String query = "SELECT " + - " concat_ws('-',age,address) as combine,address from " + - TEST_INDEX_ACCOUNT + " " + - " limit 10 "; - assertThat( - executeQuery(query), - hitAny(kvString("/fields/combine/0", containsString("-"))) - ); + // here is a bug,csv field with spa + String query = + "SELECT " + + " concat_ws('-',age,address) as combine,address from " + + TEST_INDEX_ACCOUNT + + " " + + " limit 10 "; + assertThat(executeQuery(query), hitAny(kvString("/fields/combine/0", containsString("-")))); } @Test public void functionLogs() throws Exception { - String query = "SELECT log10(100) as a, log(1) as b, log(2, 4) as c, log2(8) as d from " - + TEST_INDEX_ACCOUNT + " limit 1"; + String query = + "SELECT log10(100) as a, log(1) as b, log(2, 4) as c, log2(8) as d from " + + TEST_INDEX_ACCOUNT + + " limit 1"; assertThat( executeQuery(query), - hitAny(both(kvDouble("/fields/a/0", equalTo(Math.log10(100)))) - .and(kvDouble("/fields/b/0", equalTo(Math.log(1)))) - .and(kvDouble("/fields/c/0", closeTo(Math.log(4) / Math.log(2), 0.0001))) - .and(kvDouble("/fields/d/0", closeTo(Math.log(8) / Math.log(2), 0.0001)))) - ); + hitAny( + both(kvDouble("/fields/a/0", equalTo(Math.log10(100)))) + .and(kvDouble("/fields/b/0", equalTo(Math.log(1)))) + .and(kvDouble("/fields/c/0", closeTo(Math.log(4) / Math.log(2), 0.0001))) + .and(kvDouble("/fields/d/0", closeTo(Math.log(8) / Math.log(2), 0.0001))))); } @Test public void functionPow() throws Exception { - String query = "SELECT pow(account_number, 2) as key," + - "abs(age - 60) as new_age from " + TEST_INDEX_ACCOUNT + - " WHERE firstname = 'Virginia' and lastname='Ayala' limit 1"; + String query = + "SELECT pow(account_number, 2) as key," + + "abs(age - 60) as new_age from " + + TEST_INDEX_ACCOUNT + + " WHERE firstname = 'Virginia' and lastname='Ayala' limit 1"; assertThat( executeQuery(query), - hitAny(both(kvDouble("/fields/new_age/0", equalTo(21.0))) - .and(kvDouble("/fields/key/0", equalTo(625.0)))) - ); + hitAny( + both(kvDouble("/fields/new_age/0", equalTo(21.0))) + .and(kvDouble("/fields/key/0", equalTo(625.0))))); } @Test @@ -577,96 +576,88 @@ public void operatorSubstring() throws IOException { assertThat( executeQuery( "SELECT substring('sampleName', 1, 4) AS substring FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvString("/fields/substring/0", equalTo("samp"))) - ); + hitAny(kvString("/fields/substring/0", equalTo("samp")))); assertThat( executeQuery( "SELECT substring('sampleName', 0, 20) AS substring FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvString("/fields/substring/0", equalTo("sampleName"))) - ); + hitAny(kvString("/fields/substring/0", equalTo("sampleName")))); } @Test public void operatorLength() throws IOException { assertThat( - executeQuery("SELECT LENGTH(lastname) FROM " + TEST_INDEX_ACCOUNT + executeQuery( + "SELECT LENGTH(lastname) FROM " + + TEST_INDEX_ACCOUNT + " WHERE lastname IS NOT NULL GROUP BY LENGTH(lastname) ORDER BY LENGTH(lastname)", "jdbc"), - containsString("\"type\": \"integer\"") - ); + containsString("\"type\": \"integer\"")); assertThat( executeQuery("SELECT LENGTH('sampleName') AS length FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvInt("/fields/length/0", equalTo(10))) - ); - + hitAny(kvInt("/fields/length/0", equalTo(10)))); } @Test public void operatorReplace() { String query = "SELECT REPLACE('elastic', 'el', 'fant') FROM " + TEST_INDEX_ACCOUNT; - assertThat( - executeQuery(query, "jdbc"), - containsString("fantastic") - ); + assertThat(executeQuery(query, "jdbc"), containsString("fantastic")); } - @Ignore("The LOCATE function is not implemented in new SQL engine. https://github" - + ".com/opensearch-project/sql/issues/74") + @Ignore( + "The LOCATE function is not implemented in new SQL engine. https://github" + + ".com/opensearch-project/sql/issues/74") public void operatorLocate() throws IOException { - String query = "SELECT LOCATE('a', lastname, 0) FROM " + TEST_INDEX_ACCOUNT - + - " WHERE lastname IS NOT NULL GROUP BY LOCATE('a', lastname, 0) ORDER BY LOCATE('a', lastname, 0)"; - assertThat( - executeQuery(query, "jdbc"), containsString("\"type\": \"integer\"") - ); + String query = + "SELECT LOCATE('a', lastname, 0) FROM " + + TEST_INDEX_ACCOUNT + + " WHERE lastname IS NOT NULL GROUP BY LOCATE('a', lastname, 0) ORDER BY LOCATE('a'," + + " lastname, 0)"; + assertThat(executeQuery(query, "jdbc"), containsString("\"type\": \"integer\"")); assertThat( executeQuery("SELECT LOCATE('a', 'sampleName', 3) AS locate FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvInt("/fields/locate/0", equalTo(8))) - ); + hitAny(kvInt("/fields/locate/0", equalTo(8)))); assertThat( executeQuery("SELECT LOCATE('a', 'sampleName') AS locate FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvInt("/fields/locate/0", equalTo(2))) - ); + hitAny(kvInt("/fields/locate/0", equalTo(2)))); } @Test public void rtrim() throws IOException { assertThat( executeQuery("SELECT RTRIM(' sampleName ') AS rtrim FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvString("/fields/rtrim/0", equalTo(" sampleName"))) - ); + hitAny(kvString("/fields/rtrim/0", equalTo(" sampleName")))); } @Test public void ltrim() throws IOException { assertThat( executeQuery("SELECT LTRIM(' sampleName ') AS ltrim FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvString("/fields/ltrim/0", equalTo("sampleName "))) - ); + hitAny(kvString("/fields/ltrim/0", equalTo("sampleName ")))); } - @Ignore("The ASCII function is not implemented in new SQL engine. https://github" - + ".com/opensearch-project/sql/issues/73") + @Ignore( + "The ASCII function is not implemented in new SQL engine. https://github" + + ".com/opensearch-project/sql/issues/73") public void ascii() throws IOException { assertThat( - executeQuery("SELECT ASCII(lastname) FROM " + TEST_INDEX_ACCOUNT - + - " WHERE lastname IS NOT NULL GROUP BY ASCII(lastname) ORDER BY ASCII(lastname) LIMIT 5", + executeQuery( + "SELECT ASCII(lastname) FROM " + + TEST_INDEX_ACCOUNT + + " WHERE lastname IS NOT NULL GROUP BY ASCII(lastname) ORDER BY ASCII(lastname)" + + " LIMIT 5", "jdbc"), - containsString("\"type\": \"integer\"") - ); + containsString("\"type\": \"integer\"")); assertThat( executeQuery("SELECT ASCII('sampleName') AS ascii FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvInt("/fields/ascii/0", equalTo(115))) - ); + hitAny(kvInt("/fields/ascii/0", equalTo(115)))); } /** - * The following tests for LEFT and RIGHT are ignored because the OpenSearch client fails to parse "LEFT"/"RIGHT" in - * the integTest + * The following tests for LEFT and RIGHT are ignored because the OpenSearch client fails to parse + * "LEFT"/"RIGHT" in the integTest */ @Ignore @Test @@ -674,13 +665,11 @@ public void left() throws IOException { assertThat( executeQuery( "SELECT LEFT('sample', 2) AS left FROM " + TEST_INDEX_ACCOUNT + " ORDER BY left"), - hitAny(kvString("/fields/left/0", equalTo("sa"))) - ); + hitAny(kvString("/fields/left/0", equalTo("sa")))); assertThat( executeQuery( "SELECT LEFT('sample', 20) AS left FROM " + TEST_INDEX_ACCOUNT + " ORDER BY left"), - hitAny(kvString("/fields/left/0", equalTo("sample"))) - ); + hitAny(kvString("/fields/left/0", equalTo("sample")))); } @Ignore @@ -689,20 +678,20 @@ public void right() throws IOException { assertThat( executeQuery( "SELECT RIGHT('elastic', 3) AS right FROM " + TEST_INDEX_ACCOUNT + " ORDER BY right"), - hitAny(kvString("/fields/right/0", equalTo("tic"))) - ); + hitAny(kvString("/fields/right/0", equalTo("tic")))); assertThat( executeQuery( "SELECT RIGHT('elastic', 20) AS right FROM " + TEST_INDEX_ACCOUNT + " ORDER BY right"), - hitAny(kvString("/fields/right/0", equalTo("elastic"))) - ); + hitAny(kvString("/fields/right/0", equalTo("elastic")))); } @Test public void ifFuncShouldPassJDBC() { - JSONObject response = executeJdbcRequest( - "SELECT IF(age > 30, 'True', 'False') AS Ages FROM " + TEST_INDEX_ACCOUNT - + " WHERE age IS NOT NULL GROUP BY Ages"); + JSONObject response = + executeJdbcRequest( + "SELECT IF(age > 30, 'True', 'False') AS Ages FROM " + + TEST_INDEX_ACCOUNT + + " WHERE age IS NOT NULL GROUP BY Ages"); assertEquals("IF(age > 30, 'True', 'False')", response.query("/schema/0/name")); assertEquals("Ages", response.query("/schema/0/alias")); assertEquals("keyword", response.query("/schema/0/type")); @@ -712,35 +701,33 @@ public void ifFuncShouldPassJDBC() { public void ifFuncWithBinaryComparisonAsConditionTest() throws IOException { assertThat( executeQuery("SELECT IF(2 > 0, 'hello', 'world') AS ifTrue FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvString("/fields/ifTrue/0", equalTo("hello"))) - ); + hitAny(kvString("/fields/ifTrue/0", equalTo("hello")))); assertThat( executeQuery("SELECT IF(2 = 0, 'hello', 'world') AS ifFalse FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvString("/fields/ifFalse/0", equalTo("world"))) - ); + hitAny(kvString("/fields/ifFalse/0", equalTo("world")))); } @Test public void ifFuncWithBooleanExprInputAsConditionTest() throws IOException { assertThat( executeQuery("SELECT IF(true, 1, 0) AS ifBoolean FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvInt("/fields/ifBoolean/0", equalTo(1))) - ); + hitAny(kvInt("/fields/ifBoolean/0", equalTo(1)))); } @Test public void ifFuncWithNullInputAsConditionTest() throws IOException { assertThat( executeQuery("SELECT IF(null, 1, 0) AS ifNull FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvInt("/fields/ifNull/0", equalTo(0))) - ); + hitAny(kvInt("/fields/ifNull/0", equalTo(0)))); } @Test public void ifnullShouldPassJDBC() throws IOException { - JSONObject response = executeJdbcRequest( - "SELECT IFNULL(lastname, 'unknown') AS name FROM " + TEST_INDEX_ACCOUNT - + " GROUP BY name"); + JSONObject response = + executeJdbcRequest( + "SELECT IFNULL(lastname, 'unknown') AS name FROM " + + TEST_INDEX_ACCOUNT + + " GROUP BY name"); assertEquals("IFNULL(lastname, 'unknown')", response.query("/schema/0/name")); assertEquals("name", response.query("/schema/0/alias")); assertEquals("keyword", response.query("/schema/0/type")); @@ -750,27 +737,23 @@ public void ifnullShouldPassJDBC() throws IOException { public void ifnullWithNotNullInputTest() throws IOException { assertThat( executeQuery("SELECT IFNULL('sample', 'IsNull') AS ifnull FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvString("/fields/ifnull/0", equalTo("sample"))) - ); + hitAny(kvString("/fields/ifnull/0", equalTo("sample")))); } @Test public void ifnullWithNullInputTest() throws IOException { assertThat( executeQuery("SELECT IFNULL(null, 10) AS ifnull FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvInt("/fields/ifnull/0", equalTo(10))) - ); + hitAny(kvInt("/fields/ifnull/0", equalTo(10)))); assertThat( executeQuery("SELECT IFNULL('', 10) AS ifnull FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvString("/fields/ifnull/0", equalTo(""))) - ); + hitAny(kvString("/fields/ifnull/0", equalTo("")))); } @Test public void isnullShouldPassJDBC() { JSONObject response = - executeJdbcRequest( - "SELECT ISNULL(lastname) AS name FROM " + TEST_INDEX_ACCOUNT); + executeJdbcRequest("SELECT ISNULL(lastname) AS name FROM " + TEST_INDEX_ACCOUNT); assertEquals("ISNULL(lastname)", response.query("/schema/0/name")); assertEquals("name", response.query("/schema/0/alias")); assertEquals("boolean", response.query("/schema/0/type")); @@ -780,61 +763,57 @@ public void isnullShouldPassJDBC() { public void isnullWithNotNullInputTest() throws IOException { assertThat( executeQuery("SELECT ISNULL('elastic') AS isnull FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvInt("/fields/isnull/0", equalTo(0))) - ); + hitAny(kvInt("/fields/isnull/0", equalTo(0)))); assertThat( executeQuery("SELECT ISNULL('') AS isnull FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvInt("/fields/isnull/0", equalTo(0))) - ); + hitAny(kvInt("/fields/isnull/0", equalTo(0)))); } @Test public void isnullWithNullInputTest() throws IOException { assertThat( executeQuery("SELECT ISNULL(null) AS isnull FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvInt("/fields/isnull/0", equalTo(1))) - ); + hitAny(kvInt("/fields/isnull/0", equalTo(1)))); } @Test public void isnullWithMathExpr() throws IOException { assertThat( executeQuery("SELECT ISNULL(1+1) AS isnull FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvInt("/fields/isnull/0", equalTo(0))) - ); + hitAny(kvInt("/fields/isnull/0", equalTo(0)))); assertThat( executeQuery("SELECT ISNULL(1+1*1/0) AS isnull FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvInt("/fields/isnull/0", equalTo(1))) - ); + hitAny(kvInt("/fields/isnull/0", equalTo(1)))); } /** * Ignore this test case because painless doesn't allowlist String.split function. * - * @see https://www.elastic.co/guide/en/elasticsearch/painless/7.0/painless-api-reference.html + * @see https://www.elastic.co/guide/en/elasticsearch/painless/7.0/painless-api-reference.html */ @Ignore public void split_field() throws Exception { - //here is a bug,csv field with spa - String query = "SELECT " + - " split(address,' ')[0],age from " + - TestsConstants.TEST_INDEX + " where address is not null " + - " limit 10 "; + // here is a bug,csv field with spa + String query = + "SELECT " + + " split(address,' ')[0],age from " + + TestsConstants.TEST_INDEX + + " where address is not null " + + " limit 10 "; } @Test public void literal() throws Exception { - String query = "SELECT 10 " + - "from " + TEST_INDEX_ACCOUNT + " limit 1"; + String query = "SELECT 10 from " + TEST_INDEX_ACCOUNT + " limit 1"; final SearchHit[] hits = query(query).getHits(); assertThat(hits[0].getFields(), hasValue(contains(10))); } @Test public void literalWithDoubleValue() throws Exception { - String query = "SELECT 10.0 " + - "from " + TEST_INDEX_ACCOUNT + " limit 1"; + String query = "SELECT 10.0 from " + TEST_INDEX_ACCOUNT + " limit 1"; final SearchHit[] hits = query(query).getHits(); assertThat(hits[0].getFields(), hasValue(contains(10.0))); @@ -842,8 +821,7 @@ public void literalWithDoubleValue() throws Exception { @Test public void literalWithAlias() throws Exception { - String query = "SELECT 10 as key " + - "from " + TEST_INDEX_ACCOUNT + " limit 1"; + String query = "SELECT 10 as key from " + TEST_INDEX_ACCOUNT + " limit 1"; final SearchHit[] hits = query(query).getHits(); assertThat(hits.length, is(1)); @@ -852,8 +830,7 @@ public void literalWithAlias() throws Exception { @Test public void literalMultiField() throws Exception { - String query = "SELECT 1, 2 " + - "from " + TEST_INDEX_ACCOUNT + " limit 1"; + String query = "SELECT 1, 2 from " + TEST_INDEX_ACCOUNT + " limit 1"; final SearchHit[] hits = query(query).getHits(); assertThat(hits.length, is(1)); @@ -863,10 +840,11 @@ public void literalMultiField() throws Exception { private SearchHits query(String query) throws IOException { final String rsp = executeQueryWithStringOutput(query); - final XContentParser parser = new JsonXContentParser( - NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, - new JsonFactory().createParser(rsp)); + final XContentParser parser = + new JsonXContentParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + new JsonFactory().createParser(rsp)); return SearchResponse.fromXContent(parser).getHits(); } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/SQLIntegTestCase.java b/integ-test/src/test/java/org/opensearch/sql/legacy/SQLIntegTestCase.java index 58e55c4101..4479abdcc6 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/SQLIntegTestCase.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/SQLIntegTestCase.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static com.google.common.base.Strings.isNullOrEmpty; @@ -68,9 +67,7 @@ import org.opensearch.sql.common.setting.Settings; import org.opensearch.sql.datasource.model.DataSourceMetadata; -/** - * OpenSearch Rest integration test base for SQL testing - */ +/** OpenSearch Rest integration test base for SQL testing */ public abstract class SQLIntegTestCase extends OpenSearchSQLRestTestCase { public static final String PERSISTENT = "persistent"; @@ -102,12 +99,14 @@ protected boolean preserveClusterUponCompletion() { } /** + *
    * We need to be able to dump the jacoco coverage before cluster is shut down.
    * The new internal testing framework removed some of the gradle tasks we were listening to
    * to choose a good time to do it. This will dump the executionData to file after each test.
    * TODO: This is also currently just overwriting integTest.exec with the updated execData without
    * resetting after writing each time. This can be improved to either write an exec file per test
    * or by letting jacoco append to the file
+   * 
*/ public interface IProxy { byte[] getExecutionData(boolean reset); @@ -128,10 +127,12 @@ public static void dumpCoverage() { String serverUrl = "service:jmx:rmi:///jndi/rmi://127.0.0.1:7777/jmxrmi"; try (JMXConnector connector = JMXConnectorFactory.connect(new JMXServiceURL(serverUrl))) { - IProxy proxy = MBeanServerInvocationHandler.newProxyInstance( - connector.getMBeanServerConnection(), new ObjectName("org.jacoco:type=Runtime"), - IProxy.class, - false); + IProxy proxy = + MBeanServerInvocationHandler.newProxyInstance( + connector.getMBeanServerConnection(), + new ObjectName("org.jacoco:type=Runtime"), + IProxy.class, + false); Path path = Paths.get(jacocoBuildPath + "/integTest.exec"); Files.write(path, proxy.getExecutionData(false)); @@ -141,9 +142,10 @@ public static void dumpCoverage() { } /** - * As JUnit JavaDoc says: - * "The @AfterClass methods declared in superclasses will be run after those of the current class." - * So this method is supposed to run before closeClients() in parent class. + * As JUnit JavaDoc says:
+ "The @AfterClass methods declared in superclasses will be run after those of the current class."
+ So this method is supposed to run before closeClients() in parent class. + * class. */ @AfterClass public static void cleanUpIndices() throws IOException { @@ -155,13 +157,16 @@ public static void cleanUpIndices() throws IOException { protected void setQuerySizeLimit(Integer limit) throws IOException { updateClusterSettings( - new ClusterSetting("transient", Settings.Key.QUERY_SIZE_LIMIT.getKeyValue(), limit.toString())); + new ClusterSetting( + "transient", Settings.Key.QUERY_SIZE_LIMIT.getKeyValue(), limit.toString())); } protected void resetQuerySizeLimit() throws IOException { updateClusterSettings( - new ClusterSetting("transient", Settings.Key.QUERY_SIZE_LIMIT.getKeyValue(), DEFAULT_QUERY_SIZE_LIMIT - .toString())); + new ClusterSetting( + "transient", + Settings.Key.QUERY_SIZE_LIMIT.getKeyValue(), + DEFAULT_QUERY_SIZE_LIMIT.toString())); } protected static void wipeAllClusterSettings() throws IOException { @@ -178,19 +183,16 @@ protected void setMaxResultWindow(String indexName, Integer window) throws IOExc } protected void resetMaxResultWindow(String indexName) throws IOException { - updateIndexSettings(indexName, - "{ \"index\": { \"max_result_window\": " + DEFAULT_MAX_RESULT_WINDOW + " } }"); + updateIndexSettings( + indexName, "{ \"index\": { \"max_result_window\": " + DEFAULT_MAX_RESULT_WINDOW + " } }"); } - /** - * Provide for each test to load test index, data and other setup work - */ - protected void init() throws Exception { - } + /** Provide for each test to load test index, data and other setup work */ + protected void init() throws Exception {} /** - * Make it thread-safe in case tests are running in parallel but does not guarantee - * if test like DeleteIT that mutates cluster running in parallel. + * Make it thread-safe in case tests are running in parallel but does not guarantee if test like + * DeleteIT that mutates cluster running in parallel. */ protected synchronized void loadIndex(Index index, RestClient client) throws IOException { String indexName = index.getName(); @@ -304,8 +306,9 @@ protected Request buildGetEndpointRequest(final String sqlQuery) { Assert.fail(utf8CharsetName + " not available"); } - final String requestUrl = String.format(Locale.ROOT, "%s?sql=%s&format=%s", QUERY_API_ENDPOINT, - urlEncodedQuery, "json"); + final String requestUrl = + String.format( + Locale.ROOT, "%s?sql=%s&format=%s", QUERY_API_ENDPOINT, urlEncodedQuery, "json"); return new Request("GET", requestUrl); } @@ -344,7 +347,8 @@ private String executeRequest(final String requestBody, final boolean isExplainQ return executeRequest(sqlRequest); } - protected static String executeRequest(final Request request, RestClient client) throws IOException { + protected static String executeRequest(final Request request, RestClient client) + throws IOException { Response response = client.performRequest(request); Assert.assertEquals(200, response.getStatusLine().getStatusCode()); return getResponseBody(response); @@ -373,10 +377,12 @@ protected JSONObject executeCursorCloseQuery(final String cursor) throws IOExcep return new JSONObject(executeRequest(sqlRequest)); } - protected static JSONObject updateClusterSettings(ClusterSetting setting, RestClient client) throws IOException { + protected static JSONObject updateClusterSettings(ClusterSetting setting, RestClient client) + throws IOException { Request request = new Request("PUT", "/_cluster/settings"); - String persistentSetting = String.format(Locale.ROOT, - "{\"%s\": {\"%s\": %s}}", setting.type, setting.name, setting.value); + String persistentSetting = + String.format( + Locale.ROOT, "{\"%s\": {\"%s\": %s}}", setting.type, setting.name, setting.value); request.setJsonEntity(persistentSetting); RequestOptions.Builder restOptionsBuilder = RequestOptions.DEFAULT.toBuilder(); restOptionsBuilder.addHeader("Content-Type", "application/json"); @@ -413,11 +419,7 @@ ClusterSetting nullify() { @Override public String toString() { - return "ClusterSetting{" + - "type='" + type + '\'' + - ", path='" + name + '\'' + - ", value='" + value + '\'' + - '}'; + return String.format("ClusterSetting{type='%s', path='%s', value='%s'}", type, name, value); } } @@ -438,10 +440,8 @@ protected String makeRequest(String query) { } protected String makeRequest(String query, int fetch_size) { - return String.format("{\n" + - " \"fetch_size\": \"%s\",\n" + - " \"query\": \"%s\"\n" + - "}", fetch_size, query); + return String.format( + "{ \"fetch_size\": \"%s\", \"query\": \"%s\" }", fetch_size, query); } protected String makeFetchLessRequest(String query) { @@ -500,7 +500,6 @@ protected static Request getFetchDataSourceRequest(String name) { return request; } - protected static Request getDeleteDataSourceRequest(String name) { Request request = new Request("DELETE", "/_plugins/_query/_datasources" + "/" + name); RequestOptions.Builder restOptionsBuilder = RequestOptions.DEFAULT.toBuilder(); @@ -509,175 +508,196 @@ protected static Request getDeleteDataSourceRequest(String name) { return request; } - /** - * Enum for associating test index with relevant mapping and data. - */ + /** Enum for associating test index with relevant mapping and data. */ public enum Index { - ONLINE(TestsConstants.TEST_INDEX_ONLINE, - "online", - null, - "src/test/resources/online.json"), - ACCOUNT(TestsConstants.TEST_INDEX_ACCOUNT, + ONLINE(TestsConstants.TEST_INDEX_ONLINE, "online", null, "src/test/resources/online.json"), + ACCOUNT( + TestsConstants.TEST_INDEX_ACCOUNT, "account", getAccountIndexMapping(), "src/test/resources/accounts.json"), - PHRASE(TestsConstants.TEST_INDEX_PHRASE, + PHRASE( + TestsConstants.TEST_INDEX_PHRASE, "phrase", getPhraseIndexMapping(), "src/test/resources/phrases.json"), - DOG(TestsConstants.TEST_INDEX_DOG, - "dog", - getDogIndexMapping(), - "src/test/resources/dogs.json"), - DOGS2(TestsConstants.TEST_INDEX_DOG2, + DOG(TestsConstants.TEST_INDEX_DOG, "dog", getDogIndexMapping(), "src/test/resources/dogs.json"), + DOGS2( + TestsConstants.TEST_INDEX_DOG2, "dog", getDogs2IndexMapping(), "src/test/resources/dogs2.json"), - DOGS3(TestsConstants.TEST_INDEX_DOG3, + DOGS3( + TestsConstants.TEST_INDEX_DOG3, "dog", getDogs3IndexMapping(), "src/test/resources/dogs3.json"), - DOGSSUBQUERY(TestsConstants.TEST_INDEX_DOGSUBQUERY, + DOGSSUBQUERY( + TestsConstants.TEST_INDEX_DOGSUBQUERY, "dog", getDogIndexMapping(), "src/test/resources/dogsubquery.json"), - PEOPLE(TestsConstants.TEST_INDEX_PEOPLE, - "people", - null, - "src/test/resources/peoples.json"), - PEOPLE2(TestsConstants.TEST_INDEX_PEOPLE2, + PEOPLE(TestsConstants.TEST_INDEX_PEOPLE, "people", null, "src/test/resources/peoples.json"), + PEOPLE2( + TestsConstants.TEST_INDEX_PEOPLE2, "people", getPeople2IndexMapping(), "src/test/resources/people2.json"), - GAME_OF_THRONES(TestsConstants.TEST_INDEX_GAME_OF_THRONES, + GAME_OF_THRONES( + TestsConstants.TEST_INDEX_GAME_OF_THRONES, "gotCharacters", getGameOfThronesIndexMapping(), "src/test/resources/game_of_thrones_complex.json"), - SYSTEM(TestsConstants.TEST_INDEX_SYSTEM, - "systems", - null, - "src/test/resources/systems.json"), - ODBC(TestsConstants.TEST_INDEX_ODBC, + SYSTEM(TestsConstants.TEST_INDEX_SYSTEM, "systems", null, "src/test/resources/systems.json"), + ODBC( + TestsConstants.TEST_INDEX_ODBC, "odbc", getOdbcIndexMapping(), "src/test/resources/odbc-date-formats.json"), - LOCATION(TestsConstants.TEST_INDEX_LOCATION, + LOCATION( + TestsConstants.TEST_INDEX_LOCATION, "location", getLocationIndexMapping(), "src/test/resources/locations.json"), - LOCATION_TWO(TestsConstants.TEST_INDEX_LOCATION2, + LOCATION_TWO( + TestsConstants.TEST_INDEX_LOCATION2, "location2", getLocationIndexMapping(), "src/test/resources/locations2.json"), - NESTED(TestsConstants.TEST_INDEX_NESTED_TYPE, + NESTED( + TestsConstants.TEST_INDEX_NESTED_TYPE, "nestedType", getNestedTypeIndexMapping(), "src/test/resources/nested_objects.json"), - NESTED_WITHOUT_ARRAYS(TestsConstants.TEST_INDEX_NESTED_TYPE_WITHOUT_ARRAYS, + NESTED_WITHOUT_ARRAYS( + TestsConstants.TEST_INDEX_NESTED_TYPE_WITHOUT_ARRAYS, "nestedTypeWithoutArrays", getNestedTypeIndexMapping(), "src/test/resources/nested_objects_without_arrays.json"), - NESTED_WITH_QUOTES(TestsConstants.TEST_INDEX_NESTED_WITH_QUOTES, + NESTED_WITH_QUOTES( + TestsConstants.TEST_INDEX_NESTED_WITH_QUOTES, "nestedType", getNestedTypeIndexMapping(), "src/test/resources/nested_objects_quotes_in_values.json"), - EMPLOYEE_NESTED(TestsConstants.TEST_INDEX_EMPLOYEE_NESTED, + EMPLOYEE_NESTED( + TestsConstants.TEST_INDEX_EMPLOYEE_NESTED, "_doc", getEmployeeNestedTypeIndexMapping(), "src/test/resources/employee_nested.json"), - JOIN(TestsConstants.TEST_INDEX_JOIN_TYPE, + JOIN( + TestsConstants.TEST_INDEX_JOIN_TYPE, "joinType", getJoinTypeIndexMapping(), "src/test/resources/join_objects.json"), - UNEXPANDED_OBJECT(TestsConstants.TEST_INDEX_UNEXPANDED_OBJECT, + UNEXPANDED_OBJECT( + TestsConstants.TEST_INDEX_UNEXPANDED_OBJECT, "unexpandedObject", getUnexpandedObjectIndexMapping(), "src/test/resources/unexpanded_objects.json"), - BANK(TestsConstants.TEST_INDEX_BANK, + BANK( + TestsConstants.TEST_INDEX_BANK, "account", getBankIndexMapping(), "src/test/resources/bank.json"), - BANK_TWO(TestsConstants.TEST_INDEX_BANK_TWO, + BANK_TWO( + TestsConstants.TEST_INDEX_BANK_TWO, "account_two", getBankIndexMapping(), "src/test/resources/bank_two.json"), - BANK_WITH_NULL_VALUES(TestsConstants.TEST_INDEX_BANK_WITH_NULL_VALUES, + BANK_WITH_NULL_VALUES( + TestsConstants.TEST_INDEX_BANK_WITH_NULL_VALUES, "account_null", getBankWithNullValuesIndexMapping(), "src/test/resources/bank_with_null_values.json"), - BANK_WITH_STRING_VALUES(TestsConstants.TEST_INDEX_STRINGS, + BANK_WITH_STRING_VALUES( + TestsConstants.TEST_INDEX_STRINGS, "strings", getStringIndexMapping(), "src/test/resources/strings.json"), - BANK_CSV_SANITIZE(TestsConstants.TEST_INDEX_BANK_CSV_SANITIZE, + BANK_CSV_SANITIZE( + TestsConstants.TEST_INDEX_BANK_CSV_SANITIZE, "account", getBankIndexMapping(), "src/test/resources/bank_csv_sanitize.json"), - BANK_RAW_SANITIZE(TestsConstants.TEST_INDEX_BANK_RAW_SANITIZE, - "account", - getBankIndexMapping(), - "src/test/resources/bank_raw_sanitize.json"), - ORDER(TestsConstants.TEST_INDEX_ORDER, + BANK_RAW_SANITIZE( + TestsConstants.TEST_INDEX_BANK_RAW_SANITIZE, + "account", + getBankIndexMapping(), + "src/test/resources/bank_raw_sanitize.json"), + ORDER( + TestsConstants.TEST_INDEX_ORDER, "_doc", getOrderIndexMapping(), "src/test/resources/order.json"), - WEBLOG(TestsConstants.TEST_INDEX_WEBLOG, + WEBLOG( + TestsConstants.TEST_INDEX_WEBLOG, "weblog", getWeblogsIndexMapping(), "src/test/resources/weblogs.json"), - DATE(TestsConstants.TEST_INDEX_DATE, + DATE( + TestsConstants.TEST_INDEX_DATE, "dates", getDateIndexMapping(), "src/test/resources/dates.json"), - DATETIME(TestsConstants.TEST_INDEX_DATE_TIME, + DATETIME( + TestsConstants.TEST_INDEX_DATE_TIME, "_doc", getDateTimeIndexMapping(), "src/test/resources/datetime.json"), - NESTED_SIMPLE(TestsConstants.TEST_INDEX_NESTED_SIMPLE, + NESTED_SIMPLE( + TestsConstants.TEST_INDEX_NESTED_SIMPLE, "_doc", getNestedSimpleIndexMapping(), "src/test/resources/nested_simple.json"), - DEEP_NESTED(TestsConstants.TEST_INDEX_DEEP_NESTED, + DEEP_NESTED( + TestsConstants.TEST_INDEX_DEEP_NESTED, "_doc", getDeepNestedIndexMapping(), "src/test/resources/deep_nested_index_data.json"), - DATA_TYPE_NUMERIC(TestsConstants.TEST_INDEX_DATATYPE_NUMERIC, + DATA_TYPE_NUMERIC( + TestsConstants.TEST_INDEX_DATATYPE_NUMERIC, "_doc", getDataTypeNumericIndexMapping(), "src/test/resources/datatypes_numeric.json"), - DATA_TYPE_NONNUMERIC(TestsConstants.TEST_INDEX_DATATYPE_NONNUMERIC, + DATA_TYPE_NONNUMERIC( + TestsConstants.TEST_INDEX_DATATYPE_NONNUMERIC, "_doc", getDataTypeNonnumericIndexMapping(), "src/test/resources/datatypes.json"), - BEER(TestsConstants.TEST_INDEX_BEER, - "beer", - null, - "src/test/resources/beer.stackexchange.json"), - NULL_MISSING(TestsConstants.TEST_INDEX_NULL_MISSING, + BEER( + TestsConstants.TEST_INDEX_BEER, "beer", null, "src/test/resources/beer.stackexchange.json"), + NULL_MISSING( + TestsConstants.TEST_INDEX_NULL_MISSING, "null_missing", getMappingFile("null_missing_index_mapping.json"), "src/test/resources/null_missing.json"), - CALCS(TestsConstants.TEST_INDEX_CALCS, + CALCS( + TestsConstants.TEST_INDEX_CALCS, "calcs", getMappingFile("calcs_index_mappings.json"), "src/test/resources/calcs.json"), - DATE_FORMATS(TestsConstants.TEST_INDEX_DATE_FORMATS, + DATE_FORMATS( + TestsConstants.TEST_INDEX_DATE_FORMATS, "date_formats", getMappingFile("date_formats_index_mapping.json"), "src/test/resources/date_formats.json"), - WILDCARD(TestsConstants.TEST_INDEX_WILDCARD, + WILDCARD( + TestsConstants.TEST_INDEX_WILDCARD, "wildcard", getMappingFile("wildcard_index_mappings.json"), "src/test/resources/wildcard.json"), - DATASOURCES(TestsConstants.DATASOURCES, + DATASOURCES( + TestsConstants.DATASOURCES, "datasource", getMappingFile("datasources_index_mappings.json"), "src/test/resources/datasources.json"), - MULTI_NESTED(TestsConstants.TEST_INDEX_MULTI_NESTED_TYPE, + MULTI_NESTED( + TestsConstants.TEST_INDEX_MULTI_NESTED_TYPE, "multi_nested", getMappingFile("multi_nested.json"), "src/test/resources/multi_nested_objects.json"), - NESTED_WITH_NULLS(TestsConstants.TEST_INDEX_NESTED_WITH_NULLS, + NESTED_WITH_NULLS( + TestsConstants.TEST_INDEX_NESTED_WITH_NULLS, "multi_nested", getNestedTypeIndexMapping(), "src/test/resources/nested_with_nulls.json"); @@ -709,7 +729,5 @@ public String getMapping() { public String getDataSet() { return this.dataSet; } - - } } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/ShowIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/ShowIT.java index b28336c482..fa86bbbc22 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/ShowIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/ShowIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.equalTo; @@ -20,7 +19,8 @@ public class ShowIT extends SQLIntegTestCase { @Override protected void init() { - // Note: not using the existing TEST_INDEX_* indices, since underscore in the names causes issues + // Note: not using the existing TEST_INDEX_* indices, since underscore in the names causes + // issues createEmptyIndexIfNotExist("abcdefg"); createEmptyIndexIfNotExist("abcdefghijk"); createEmptyIndexIfNotExist("abcdijk"); diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/SourceFieldIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/SourceFieldIT.java index a6a1a1cfe9..bf288262b6 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/SourceFieldIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/SourceFieldIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_ACCOUNT; @@ -31,32 +30,44 @@ protected void init() throws Exception { @Test public void includeTest() throws IOException { - SearchHits response = query(String.format( - "SELECT include('*name','*ge'),include('b*'),include('*ddre*'),include('gender') FROM %s LIMIT 1000", - TEST_INDEX_ACCOUNT)); + SearchHits response = + query( + String.format( + "SELECT include('*name','*ge'),include('b*'),include('*ddre*'),include('gender')" + + " FROM %s LIMIT 1000", + TEST_INDEX_ACCOUNT)); for (SearchHit hit : response.getHits()) { Set keySet = hit.getSourceAsMap().keySet(); for (String field : keySet) { - Assert.assertTrue(field.endsWith("name") || field.endsWith("ge") || field.startsWith("b") || - field.contains("ddre") || field.equals("gender")); + Assert.assertTrue( + field.endsWith("name") + || field.endsWith("ge") + || field.startsWith("b") + || field.contains("ddre") + || field.equals("gender")); } } - } @Test public void excludeTest() throws IOException { - SearchHits response = query(String.format( - "SELECT exclude('*name','*ge'),exclude('b*'),exclude('*ddre*'),exclude('gender') FROM %s LIMIT 1000", - TEST_INDEX_ACCOUNT)); + SearchHits response = + query( + String.format( + "SELECT exclude('*name','*ge'),exclude('b*'),exclude('*ddre*'),exclude('gender')" + + " FROM %s LIMIT 1000", + TEST_INDEX_ACCOUNT)); for (SearchHit hit : response.getHits()) { Set keySet = hit.getSourceAsMap().keySet(); for (String field : keySet) { Assert.assertFalse( - field.endsWith("name") || field.endsWith("ge") || field.startsWith("b") || - field.contains("ddre") || field.equals("gender")); + field.endsWith("name") + || field.endsWith("ge") + || field.startsWith("b") + || field.contains("ddre") + || field.equals("gender")); } } } @@ -64,15 +75,18 @@ public void excludeTest() throws IOException { @Test public void allTest() throws IOException { - SearchHits response = query(String.format( - "SELECT exclude('*name','*ge'),include('b*'),exclude('*ddre*'),include('gender') FROM %s LIMIT 1000", - TEST_INDEX_ACCOUNT)); + SearchHits response = + query( + String.format( + "SELECT exclude('*name','*ge'),include('b*'),exclude('*ddre*'),include('gender')" + + " FROM %s LIMIT 1000", + TEST_INDEX_ACCOUNT)); for (SearchHit hit : response.getHits()) { Set keySet = hit.getSourceAsMap().keySet(); for (String field : keySet) { - Assert - .assertFalse(field.endsWith("name") || field.endsWith("ge") || field.contains("ddre")); + Assert.assertFalse( + field.endsWith("name") || field.endsWith("ge") || field.contains("ddre")); Assert.assertTrue(field.startsWith("b") || field.equals("gender")); } } @@ -81,11 +95,11 @@ public void allTest() throws IOException { private SearchHits query(String query) throws IOException { final JSONObject jsonObject = executeQuery(query); - final XContentParser parser = new JsonXContentParser( - NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, - new JsonFactory().createParser(jsonObject.toString())); + final XContentParser parser = + new JsonXContentParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + new JsonFactory().createParser(jsonObject.toString())); return SearchResponse.fromXContent(parser).getHits(); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/NowLikeFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/NowLikeFunctionIT.java index a330614d21..2d94dc6a3b 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/NowLikeFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/NowLikeFunctionIT.java @@ -72,8 +72,7 @@ public NowLikeFunctionIT( @Name("constValue") Boolean constValue, @Name("referenceGetter") Supplier referenceGetter, @Name("parser") BiFunction parser, - @Name("serializationPatternStr") String serializationPatternStr - ) { + @Name("serializationPatternStr") String serializationPatternStr) { this.name = name; this.hasFsp = hasFsp; this.hasShortcut = hasShortcut; @@ -85,56 +84,104 @@ public NowLikeFunctionIT( @ParametersFactory(argumentFormatting = "%1$s") public static Iterable compareTwoDates() { - return Arrays.asList($$( - $("now", false, false, true, - (Supplier) LocalDateTime::now, - (BiFunction) LocalDateTime::parse, - "uuuu-MM-dd HH:mm:ss"), - $("current_timestamp", false, false, true, - (Supplier) LocalDateTime::now, - (BiFunction) LocalDateTime::parse, - "uuuu-MM-dd HH:mm:ss"), - $("localtimestamp", false, false, true, - (Supplier) LocalDateTime::now, - (BiFunction) LocalDateTime::parse, - "uuuu-MM-dd HH:mm:ss"), - $("localtime", false, false, true, - (Supplier) LocalDateTime::now, - (BiFunction) LocalDateTime::parse, - "uuuu-MM-dd HH:mm:ss"), - $("sysdate", true, false, false, - (Supplier) LocalDateTime::now, - (BiFunction) LocalDateTime::parse, - "uuuu-MM-dd HH:mm:ss"), - $("curtime", false, false, false, - (Supplier) LocalTime::now, - (BiFunction) LocalTime::parse, - "HH:mm:ss"), - $("current_time", false, false, false, - (Supplier) LocalTime::now, - (BiFunction) LocalTime::parse, - "HH:mm:ss"), - $("curdate", false, false, false, - (Supplier) LocalDate::now, - (BiFunction) LocalDate::parse, - "uuuu-MM-dd"), - $("current_date", false, false, false, - (Supplier) LocalDate::now, - (BiFunction) LocalDate::parse, - "uuuu-MM-dd"), - $("utc_date", false, false, true, - (Supplier) (() -> utcDateTimeNow().toLocalDate()), - (BiFunction) LocalDate::parse, - "uuuu-MM-dd"), - $("utc_time", false, false, true, - (Supplier) (() -> utcDateTimeNow().toLocalTime()), - (BiFunction) LocalTime::parse, - "HH:mm:ss"), - $("utc_timestamp", false, false, true, - (Supplier) (org.opensearch.sql.sql.NowLikeFunctionIT::utcDateTimeNow), - (BiFunction) LocalDateTime::parse, - "uuuu-MM-dd HH:mm:ss") - )); + return Arrays.asList( + $$( + $( + "now", + false, + false, + true, + (Supplier) LocalDateTime::now, + (BiFunction) LocalDateTime::parse, + "uuuu-MM-dd HH:mm:ss"), + $( + "current_timestamp", + false, + false, + true, + (Supplier) LocalDateTime::now, + (BiFunction) LocalDateTime::parse, + "uuuu-MM-dd HH:mm:ss"), + $( + "localtimestamp", + false, + false, + true, + (Supplier) LocalDateTime::now, + (BiFunction) LocalDateTime::parse, + "uuuu-MM-dd HH:mm:ss"), + $( + "localtime", + false, + false, + true, + (Supplier) LocalDateTime::now, + (BiFunction) LocalDateTime::parse, + "uuuu-MM-dd HH:mm:ss"), + $( + "sysdate", + true, + false, + false, + (Supplier) LocalDateTime::now, + (BiFunction) LocalDateTime::parse, + "uuuu-MM-dd HH:mm:ss"), + $( + "curtime", + false, + false, + false, + (Supplier) LocalTime::now, + (BiFunction) LocalTime::parse, + "HH:mm:ss"), + $( + "current_time", + false, + false, + false, + (Supplier) LocalTime::now, + (BiFunction) LocalTime::parse, + "HH:mm:ss"), + $( + "curdate", + false, + false, + false, + (Supplier) LocalDate::now, + (BiFunction) LocalDate::parse, + "uuuu-MM-dd"), + $( + "current_date", + false, + false, + false, + (Supplier) LocalDate::now, + (BiFunction) LocalDate::parse, + "uuuu-MM-dd"), + $( + "utc_date", + false, + false, + true, + (Supplier) (() -> utcDateTimeNow().toLocalDate()), + (BiFunction) LocalDate::parse, + "uuuu-MM-dd"), + $( + "utc_time", + false, + false, + true, + (Supplier) (() -> utcDateTimeNow().toLocalTime()), + (BiFunction) LocalTime::parse, + "HH:mm:ss"), + $( + "utc_timestamp", + false, + false, + true, + (Supplier) (org.opensearch.sql.sql.NowLikeFunctionIT::utcDateTimeNow), + (BiFunction) LocalDateTime::parse, + "uuuu-MM-dd HH:mm:ss"))); } private long getDiff(Temporal sample, Temporal reference) { @@ -146,7 +193,8 @@ private long getDiff(Temporal sample, Temporal reference) { @Test public void testNowLikeFunctions() throws IOException { - var serializationPattern = new DateTimeFormatterBuilder() + var serializationPattern = + new DateTimeFormatterBuilder() .appendPattern(serializationPatternStr) .optionalStart() .appendFraction(ChronoField.NANO_OF_SECOND, 0, 9, true) @@ -156,42 +204,57 @@ public void testNowLikeFunctions() throws IOException { double delta = 2d; // acceptable time diff, secs if (reference instanceof LocalDate) delta = 1d; // Max date delta could be 1 if test runs on the very edge of two days - // We ignore probability of a test run on edge of month or year to simplify the checks + // We ignore probability of a test run on edge of month or year to simplify the checks - var calls = new ArrayList() {{ - add(name + "()"); - }}; - if (hasShortcut) - calls.add(name); - if (hasFsp) - calls.add(name + "(0)"); + var calls = + new ArrayList() { + { + add(name + "()"); + } + }; + if (hasShortcut) calls.add(name); + if (hasFsp) calls.add(name + "(0)"); // Column order is: func(), func, func(0) // shortcut ^ fsp ^ // Query looks like: // source=people2 | eval `now()`=now() | fields `now()`; - JSONObject result = executeQuery("source=" + TEST_INDEX_PEOPLE2 - + " | eval " + calls.stream().map(c -> String.format("`%s`=%s", c, c)).collect(Collectors.joining(",")) - + " | fields " + calls.stream().map(c -> String.format("`%s`", c)).collect(Collectors.joining(","))); + JSONObject result = + executeQuery( + "source=" + + TEST_INDEX_PEOPLE2 + + " | eval " + + calls.stream() + .map(c -> String.format("`%s`=%s", c, c)) + .collect(Collectors.joining(",")) + + " | fields " + + calls.stream() + .map(c -> String.format("`%s`", c)) + .collect(Collectors.joining(","))); var rows = result.getJSONArray("datarows"); JSONArray firstRow = rows.getJSONArray(0); for (int i = 0; i < rows.length(); i++) { var row = rows.getJSONArray(i); - if (constValue) - assertTrue(firstRow.similar(row)); + if (constValue) assertTrue(firstRow.similar(row)); int column = 0; - assertEquals(0, - getDiff(reference, parser.apply(row.getString(column++), serializationPattern)), delta); + assertEquals( + 0, + getDiff(reference, parser.apply(row.getString(column++), serializationPattern)), + delta); if (hasShortcut) { - assertEquals(0, - getDiff(reference, parser.apply(row.getString(column++), serializationPattern)), delta); + assertEquals( + 0, + getDiff(reference, parser.apply(row.getString(column++), serializationPattern)), + delta); } if (hasFsp) { - assertEquals(0, - getDiff(reference, parser.apply(row.getString(column), serializationPattern)), delta); + assertEquals( + 0, + getDiff(reference, parser.apply(row.getString(column), serializationPattern)), + delta); } } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/ObjectFieldOperateIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/ObjectFieldOperateIT.java index 6178552728..501d4bcb5e 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/ObjectFieldOperateIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/ObjectFieldOperateIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.SQLIntegTestCase.Index.DEEP_NESTED; @@ -26,55 +25,45 @@ public void init() throws IOException { @Test public void select_object_field() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | " - + "fields city.name, city.location.latitude", - TEST_INDEX_DEEP_NESTED)); - verifySchema(result, - schema("city.name", "string"), - schema("city.location.latitude", "double")); - verifyDataRows(result, - rows("Seattle", 10.5)); + JSONObject result = + executeQuery( + String.format( + "source=%s | fields city.name, city.location.latitude", + TEST_INDEX_DEEP_NESTED)); + verifySchema(result, schema("city.name", "string"), schema("city.location.latitude", "double")); + verifyDataRows(result, rows("Seattle", 10.5)); } @Test public void compare_object_field_in_where() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s " - + "| where city.name = 'Seattle' " - + "| fields city.name, city.location.latitude", - TEST_INDEX_DEEP_NESTED)); - verifySchema(result, - schema("city.name", "string"), - schema("city.location.latitude", "double")); - verifyDataRows(result, - rows("Seattle", 10.5)); + JSONObject result = + executeQuery( + String.format( + "source=%s " + + "| where city.name = 'Seattle' " + + "| fields city.name, city.location.latitude", + TEST_INDEX_DEEP_NESTED)); + verifySchema(result, schema("city.name", "string"), schema("city.location.latitude", "double")); + verifyDataRows(result, rows("Seattle", 10.5)); } @Test public void group_object_field_in_stats() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s " - + "| stats count() by city.name", - TEST_INDEX_DEEP_NESTED)); - verifySchema(result, - schema("count()", "integer"), - schema("city.name", "string")); - verifyDataRows(result, - rows(1, "Seattle")); + JSONObject result = + executeQuery( + String.format("source=%s | stats count() by city.name", TEST_INDEX_DEEP_NESTED)); + verifySchema(result, schema("count()", "integer"), schema("city.name", "string")); + verifyDataRows(result, rows(1, "Seattle")); } @Test public void sort_by_object_field() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s " - + "| sort city.name" - + "| fields city.name, city.location.latitude", - TEST_INDEX_DEEP_NESTED)); - verifySchema(result, - schema("city.name", "string"), - schema("city.location.latitude", "double")); - verifyDataRows(result, - rows("Seattle", 10.5)); + JSONObject result = + executeQuery( + String.format( + "source=%s | sort city.name | fields city.name, city.location.latitude", + TEST_INDEX_DEEP_NESTED)); + verifySchema(result, schema("city.name", "string"), schema("city.location.latitude", "double")); + verifyDataRows(result, rows("Seattle", 10.5)); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/OperatorIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/OperatorIT.java index e6ca958991..42ed08b00c 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/OperatorIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/OperatorIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK; @@ -26,40 +25,28 @@ public void init() throws IOException { @Test public void testAddOperator() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | where age = 31 + 1 | fields age", - TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | where age = 31 + 1 | fields age", TEST_INDEX_BANK)); verifyDataRows(result, rows(32)); } @Test public void testSubtractOperator() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | where age = 33 - 1 | fields age", - TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | where age = 33 - 1 | fields age", TEST_INDEX_BANK)); verifyDataRows(result, rows(32)); } @Test public void testMultiplyOperator() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | where age = 16 * 2 | fields age", - TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | where age = 16 * 2 | fields age", TEST_INDEX_BANK)); verifyDataRows(result, rows(32)); } @Test public void testDivideOperator() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | where age / 2 = 16 | fields age", - TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | where age / 2 = 16 | fields age", TEST_INDEX_BANK)); verifyDataRows(result, rows(32), rows(33)); } @@ -67,9 +54,7 @@ public void testDivideOperator() throws IOException { public void testModuleOperator() throws IOException { JSONObject result = executeQuery( - String.format( - "source=%s | where age %s 32 = 0 | fields age", - TEST_INDEX_BANK, "%")); + String.format("source=%s | where age %s 32 = 0 | fields age", TEST_INDEX_BANK, "%")); verifyDataRows(result, rows(32)); } @@ -78,11 +63,9 @@ public void testArithmeticOperatorWithNullValue() throws IOException { JSONObject result = executeQuery( String.format( - "source=%s | eval f = age + 0 | fields f", - TEST_INDEX_BANK_WITH_NULL_VALUES)); + "source=%s | eval f = age + 0 | fields f", TEST_INDEX_BANK_WITH_NULL_VALUES)); verifyDataRows( - result, rows(32), rows(36), rows(28), rows(33), rows(36), rows(JSONObject.NULL), - rows(34)); + result, rows(32), rows(36), rows(28), rows(33), rows(36), rows(JSONObject.NULL), rows(34)); } @Test @@ -92,8 +75,14 @@ public void testArithmeticOperatorWithMissingValue() throws IOException { String.format( "source=%s | eval f = balance * 1 | fields f", TEST_INDEX_BANK_WITH_NULL_VALUES)); verifyDataRows( - result, rows(39225), rows(32838), rows(4180), rows(48086), rows(JSONObject.NULL), - rows(JSONObject.NULL), rows(JSONObject.NULL)); + result, + rows(39225), + rows(32838), + rows(4180), + rows(48086), + rows(JSONObject.NULL), + rows(JSONObject.NULL), + rows(JSONObject.NULL)); } @Test @@ -101,8 +90,7 @@ public void testMultipleArithmeticOperators() throws IOException { JSONObject result = executeQuery( String.format( - "source=%s | where (age+2) * 3 / 2 - 1 = 50 | fields age", - TEST_INDEX_BANK)); + "source=%s | where (age+2) * 3 / 2 - 1 = 50 | fields age", TEST_INDEX_BANK)); verifyDataRows(result, rows(32)); } @@ -127,14 +115,12 @@ public void testAndOperator() throws IOException { public void testOrOperator() throws IOException { JSONObject result = executeQuery( - String.format( - "source=%s | where age=32 or age=34 | fields age", TEST_INDEX_BANK)); + String.format("source=%s | where age=32 or age=34 | fields age", TEST_INDEX_BANK)); verifyDataRows(result, rows(32), rows(34)); result = executeQuery( - String.format( - "source=%s | where age=34 or age=32| fields age", TEST_INDEX_BANK)); + String.format("source=%s | where age=34 or age=32| fields age", TEST_INDEX_BANK)); verifyDataRows(result, rows(32), rows(34)); } @@ -158,92 +144,64 @@ public void testXorOperator() throws IOException { @Test public void testNotOperator() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s not age > 32 | fields age", - TEST_INDEX_BANK)); + executeQuery(String.format("source=%s not age > 32 | fields age", TEST_INDEX_BANK)); verifyDataRows(result, rows(28), rows(32)); } @Test public void testEqualOperator() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s age = 32 | fields age", - TEST_INDEX_BANK)); + executeQuery(String.format("source=%s age = 32 | fields age", TEST_INDEX_BANK)); verifyDataRows(result, rows(32)); - result = - executeQuery( - String.format( - "source=%s 32 = age | fields age", - TEST_INDEX_BANK)); + result = executeQuery(String.format("source=%s 32 = age | fields age", TEST_INDEX_BANK)); verifyDataRows(result, rows(32)); } @Test public void testNotEqualOperator() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s age != 32 | fields age", - TEST_INDEX_BANK)); + executeQuery(String.format("source=%s age != 32 | fields age", TEST_INDEX_BANK)); verifyDataRows(result, rows(28), rows(33), rows(34), rows(36), rows(36), rows(39)); - result = - executeQuery( - String.format( - "source=%s 32 != age | fields age", - TEST_INDEX_BANK)); + result = executeQuery(String.format("source=%s 32 != age | fields age", TEST_INDEX_BANK)); verifyDataRows(result, rows(28), rows(33), rows(34), rows(36), rows(36), rows(39)); } @Test public void testLessOperator() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s age < 32 | fields age", - TEST_INDEX_BANK)); + executeQuery(String.format("source=%s age < 32 | fields age", TEST_INDEX_BANK)); verifyDataRows(result, rows(28)); } @Test public void testLteOperator() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s age <= 32 | fields age", - TEST_INDEX_BANK)); + executeQuery(String.format("source=%s age <= 32 | fields age", TEST_INDEX_BANK)); verifyDataRows(result, rows(28), rows(32)); } @Test public void testGreaterOperator() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s age > 36 | fields age", - TEST_INDEX_BANK)); + executeQuery(String.format("source=%s age > 36 | fields age", TEST_INDEX_BANK)); verifyDataRows(result, rows(39)); } @Test public void testGteOperator() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s age >= 36 | fields age", - TEST_INDEX_BANK)); + executeQuery(String.format("source=%s age >= 36 | fields age", TEST_INDEX_BANK)); verifyDataRows(result, rows(36), rows(36), rows(39)); } @Test public void testLikeFunction() throws IOException { JSONObject result = - executeQuery(String.format("source=%s like(firstname, 'Hatti_') | fields firstname", - TEST_INDEX_BANK)); + executeQuery( + String.format( + "source=%s like(firstname, 'Hatti_') | fields firstname", TEST_INDEX_BANK)); verifyDataRows(result, rows("Hattie")); } @@ -251,8 +209,8 @@ public void testLikeFunction() throws IOException { public void testBinaryPredicateWithNullValue() throws IOException { JSONObject result = executeQuery( - String.format("source=%s | where age >= 36 | fields age", - TEST_INDEX_BANK_WITH_NULL_VALUES)); + String.format( + "source=%s | where age >= 36 | fields age", TEST_INDEX_BANK_WITH_NULL_VALUES)); verifyDataRows(result, rows(36), rows(36)); } @@ -260,7 +218,8 @@ public void testBinaryPredicateWithNullValue() throws IOException { public void testBinaryPredicateWithMissingValue() throws IOException { JSONObject result = executeQuery( - String.format("source=%s | where balance > 40000 | fields balance", + String.format( + "source=%s | where balance > 40000 | fields balance", TEST_INDEX_BANK_WITH_NULL_VALUES)); verifyDataRows(result, rows(48086)); } @@ -269,12 +228,13 @@ private void queryExecutionShouldThrowExceptionDueToNullOrMissingValue( String query, String... errorMsgs) { try { executeQuery(query); - fail("Expected to throw ExpressionEvaluationException, but none was thrown for query: " - + query); + fail( + "Expected to throw ExpressionEvaluationException, but none was thrown for query: " + + query); } catch (ResponseException e) { String errorMsg = e.getMessage(); assertTrue(errorMsg.contains("ExpressionEvaluationException")); - for (String msg: errorMsgs) { + for (String msg : errorMsgs) { assertTrue(errorMsg.contains(msg)); } } catch (IOException e) { diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/PPLIntegTestCase.java b/integ-test/src/test/java/org/opensearch/sql/ppl/PPLIntegTestCase.java index bcf183e9c6..459788021d 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/PPLIntegTestCase.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/PPLIntegTestCase.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestUtils.getResponseBody; @@ -20,9 +19,7 @@ import org.opensearch.client.Response; import org.opensearch.sql.legacy.SQLIntegTestCase; -/** - * OpenSearch Rest integration test base for PPL testing. - */ +/** OpenSearch Rest integration test base for PPL testing. */ public abstract class PPLIntegTestCase extends SQLIntegTestCase { protected JSONObject executeQuery(String query) throws IOException { @@ -42,8 +39,10 @@ protected String explainQueryToString(String query) throws IOException { } protected String executeCsvQuery(String query, boolean sanitize) throws IOException { - Request request = buildRequest(query, - QUERY_API_ENDPOINT + String.format(Locale.ROOT, "?format=csv&sanitize=%b", sanitize)); + Request request = + buildRequest( + query, + QUERY_API_ENDPOINT + String.format(Locale.ROOT, "?format=csv&sanitize=%b", sanitize)); Response response = client().performRequest(request); Assert.assertEquals(200, response.getStatusLine().getStatusCode()); return getResponseBody(response, true); @@ -65,8 +64,9 @@ protected Request buildRequest(String query, String endpoint) { protected static JSONObject updateClusterSettings(ClusterSetting setting) throws IOException { Request request = new Request("PUT", "/_cluster/settings"); - String persistentSetting = String.format(Locale.ROOT, - "{\"%s\": {\"%s\": %s}}", setting.type, setting.name, setting.value); + String persistentSetting = + String.format( + Locale.ROOT, "{\"%s\": {\"%s\": %s}}", setting.type, setting.name, setting.value); request.setJsonEntity(persistentSetting); RequestOptions.Builder restOptionsBuilder = RequestOptions.DEFAULT.toBuilder(); restOptionsBuilder.addHeader("Content-Type", "application/json"); @@ -91,11 +91,7 @@ SQLIntegTestCase.ClusterSetting nullify() { @Override public String toString() { - return "ClusterSetting{" - + "type='" + type + '\'' - + ", path='" + name + '\'' - + ", value='" + value + '\'' - + '}'; + return String.format("ClusterSetting{type='%s', path='%s', value'%s'}", type, name, value); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/PPLPluginIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/PPLPluginIT.java index df7b464118..0c638be1e7 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/PPLPluginIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/PPLPluginIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.hamcrest.Matchers.equalTo; @@ -28,8 +27,7 @@ import org.opensearch.sql.util.TestUtils; public class PPLPluginIT extends PPLIntegTestCase { - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); + @Rule public ExpectedException exceptionRule = ExpectedException.none(); private static final String PERSISTENT = "persistent"; @@ -86,9 +84,11 @@ public void sqlEnableSettingsTest() throws IOException { assertThat(result.getInt("status"), equalTo(400)); JSONObject error = result.getJSONObject("error"); assertThat(error.getString("reason"), equalTo("Invalid Query")); - assertThat(error.getString("details"), equalTo( - "Either plugins.ppl.enabled or rest.action.multi.allow_explicit_index setting is " - + "false")); + assertThat( + error.getString("details"), + equalTo( + "Either plugins.ppl.enabled or rest.action.multi.allow_explicit_index setting is " + + "false")); assertThat(error.getString("type"), equalTo("IllegalAccessException")); // reset the setting diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/ParseCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/ParseCommandIT.java index 36fcb4bf3b..7f25f6f160 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/ParseCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/ParseCommandIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK; @@ -23,9 +22,10 @@ public void init() throws IOException { @Test public void testParseCommand() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | parse email '.+@(?.+)' | fields email, host", - TEST_INDEX_BANK)); + JSONObject result = + executeQuery( + String.format( + "source=%s | parse email '.+@(?.+)' | fields email, host", TEST_INDEX_BANK)); verifyOrder( result, rows("amberduke@pyrami.com", "pyrami.com"), @@ -39,8 +39,10 @@ public void testParseCommand() throws IOException { @Test public void testParseCommandReplaceOriginalField() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | parse email '.+@(?.+)' | fields email", TEST_INDEX_BANK)); + JSONObject result = + executeQuery( + String.format( + "source=%s | parse email '.+@(?.+)' | fields email", TEST_INDEX_BANK)); verifyOrder( result, rows("pyrami.com"), @@ -54,8 +56,12 @@ public void testParseCommandReplaceOriginalField() throws IOException { @Test public void testParseCommandWithOtherRunTimeFields() throws IOException { - JSONObject result = executeQuery(String.format("source=%s | parse email '.+@(?.+)' | " - + "eval eval_result=1 | fields host, eval_result", TEST_INDEX_BANK)); + JSONObject result = + executeQuery( + String.format( + "source=%s | parse email '.+@(?.+)' | " + + "eval eval_result=1 | fields host, eval_result", + TEST_INDEX_BANK)); verifyOrder( result, rows("pyrami.com", 1), diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/PositionFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/PositionFunctionIT.java index 59aade8bbd..a7f638b3dd 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/PositionFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/PositionFunctionIT.java @@ -13,87 +13,83 @@ import org.junit.Test; public class PositionFunctionIT extends PPLIntegTestCase { - @Override - public void init() throws IOException { - loadIndex(Index.CALCS); - } - - @Test - public void test_position_function() throws IOException { - String query = "source=" + TEST_INDEX_CALCS - + " | eval f=position('ON', str1) | fields f"; - - var result = executeQuery(query); - - assertEquals(17, result.getInt("total")); - verifyDataRows(result, - rows(7), rows(7), - rows(2), rows(0), - rows(0), rows(0), - rows(0), rows(0), - rows(0), rows(0), - rows(0), rows(0), - rows(0), rows(0), - rows(0), rows(0), - rows(0)); - } - - @Test - public void test_position_function_with_fields_only() throws IOException { - String query = "source=" + TEST_INDEX_CALCS - + " | eval f=position(str3 IN str2) | where str2 IN ('one', 'two', 'three')| fields f"; - - var result = executeQuery(query); - - assertEquals(3, result.getInt("total")); - verifyDataRows(result, rows(3), rows(0), rows(4)); - } - - @Test - public void test_position_function_with_string_literals() throws IOException { - String query = "source=" + TEST_INDEX_CALCS - + " | eval f=position('world' IN 'hello world') | where str2='one' | fields f"; - - var result = executeQuery(query); - - assertEquals(1, result.getInt("total")); - verifyDataRows(result, rows(7)); - } - - @Test - public void test_position_function_with_nulls() throws IOException { - String query = "source=" + TEST_INDEX_CALCS - + " | eval f=position('ee' IN str2) | where isnull(str2) | fields str2,f"; - - var result = executeQuery(query); - - assertEquals(4, result.getInt("total")); - verifyDataRows(result, - rows(null, null), - rows(null, null), - rows(null, null), - rows(null, null)); - } - - @Test - public void test_position_function_with_function_as_arg() throws IOException { - String query = "source=" + TEST_INDEX_CALCS - + " | eval f=position(upper(str3) IN str1) | where like(str1, 'BINDING SUPPLIES') | fields f"; - - var result = executeQuery(query); - - assertEquals(1, result.getInt("total")); - verifyDataRows(result, rows(15)); - } - - @Test - public void test_position_function_with_function_in_where_clause() throws IOException { - String query = "source=" + TEST_INDEX_CALCS - + " | where position(str3 IN str2)=1 | fields str2"; - - var result = executeQuery(query); - - assertEquals(2, result.getInt("total")); - verifyDataRows(result, rows("eight"), rows("eleven")); - } + @Override + public void init() throws IOException { + loadIndex(Index.CALCS); + } + + @Test + public void test_position_function() throws IOException { + String query = "source=" + TEST_INDEX_CALCS + " | eval f=position('ON', str1) | fields f"; + + var result = executeQuery(query); + + assertEquals(17, result.getInt("total")); + verifyDataRows( + result, rows(7), rows(7), rows(2), rows(0), rows(0), rows(0), rows(0), rows(0), rows(0), + rows(0), rows(0), rows(0), rows(0), rows(0), rows(0), rows(0), rows(0)); + } + + @Test + public void test_position_function_with_fields_only() throws IOException { + String query = + "source=" + + TEST_INDEX_CALCS + + " | eval f=position(str3 IN str2) | where str2 IN ('one', 'two', 'three')| fields f"; + + var result = executeQuery(query); + + assertEquals(3, result.getInt("total")); + verifyDataRows(result, rows(3), rows(0), rows(4)); + } + + @Test + public void test_position_function_with_string_literals() throws IOException { + String query = + "source=" + + TEST_INDEX_CALCS + + " | eval f=position('world' IN 'hello world') | where str2='one' | fields f"; + + var result = executeQuery(query); + + assertEquals(1, result.getInt("total")); + verifyDataRows(result, rows(7)); + } + + @Test + public void test_position_function_with_nulls() throws IOException { + String query = + "source=" + + TEST_INDEX_CALCS + + " | eval f=position('ee' IN str2) | where isnull(str2) | fields str2,f"; + + var result = executeQuery(query); + + assertEquals(4, result.getInt("total")); + verifyDataRows(result, rows(null, null), rows(null, null), rows(null, null), rows(null, null)); + } + + @Test + public void test_position_function_with_function_as_arg() throws IOException { + String query = + "source=" + + TEST_INDEX_CALCS + + " | eval f=position(upper(str3) IN str1) | where like(str1, 'BINDING SUPPLIES') |" + + " fields f"; + + var result = executeQuery(query); + + assertEquals(1, result.getInt("total")); + verifyDataRows(result, rows(15)); + } + + @Test + public void test_position_function_with_function_in_where_clause() throws IOException { + String query = "source=" + TEST_INDEX_CALCS + " | where position(str3 IN str2)=1 | fields str2"; + + var result = executeQuery(query); + + assertEquals(2, result.getInt("total")); + verifyDataRows(result, rows("eight"), rows("eleven")); + } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/PrometheusDataSourceCommandsIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/PrometheusDataSourceCommandsIT.java index 011f91eed5..8d72f02e29 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/PrometheusDataSourceCommandsIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/PrometheusDataSourceCommandsIT.java @@ -40,10 +40,10 @@ public class PrometheusDataSourceCommandsIT extends PPLIntegTestCase { /** - * Integ tests are dependent on self generated metrics in prometheus instance. - * When running individual integ tests there - * is no time for generation of metrics in the test prometheus instance. - * This method gives prometheus time to generate metrics on itself. + * Integ tests are dependent on self generated metrics in prometheus instance. When running + * individual integ tests there is no time for generation of metrics in the test prometheus + * instance. This method gives prometheus time to generate metrics on itself. + * * @throws InterruptedException */ @BeforeClass @@ -54,8 +54,11 @@ protected static void metricGenerationWait() throws InterruptedException { @Override protected void init() throws InterruptedException, IOException { DataSourceMetadata createDSM = - new DataSourceMetadata("my_prometheus", DataSourceType.PROMETHEUS, - ImmutableList.of(), ImmutableMap.of("prometheus.uri", "http://localhost:9090")); + new DataSourceMetadata( + "my_prometheus", + DataSourceType.PROMETHEUS, + ImmutableList.of(), + ImmutableMap.of("prometheus.uri", "http://localhost:9090")); Request createRequest = getCreateDataSourceRequest(createDSM); Response response = client().performRequest(createRequest); Assert.assertEquals(201, response.getStatusLine().getStatusCode()); @@ -71,15 +74,15 @@ protected void deleteDataSourceMetadata() throws IOException { @Test @SneakyThrows public void testSourceMetricCommand() { - JSONObject response = - executeQuery("source=my_prometheus.prometheus_http_requests_total"); - verifySchema(response, + JSONObject response = executeQuery("source=my_prometheus.prometheus_http_requests_total"); + verifySchema( + response, schema(VALUE, "double"), - schema(TIMESTAMP, "timestamp"), - schema("handler", "string"), - schema("code", "string"), - schema("instance", "string"), - schema("job", "string")); + schema(TIMESTAMP, "timestamp"), + schema("handler", "string"), + schema("code", "string"), + schema("instance", "string"), + schema("job", "string")); Assertions.assertTrue(response.getInt("size") > 0); Assertions.assertEquals(6, response.getJSONArray("datarows").getJSONArray(0).length()); JSONArray firstRow = response.getJSONArray("datarows").getJSONArray(0); @@ -93,19 +96,20 @@ public void testSourceMetricCommand() { @SneakyThrows public void testSourceMetricCommandWithTimestamp() { SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); - String query = "source=my_prometheus.prometheus_http_requests_total | where @timestamp > '" - + format.format(new Date(System.currentTimeMillis() - 3600 * 1000)) - + "' | sort + @timestamp | head 5"; + String query = + "source=my_prometheus.prometheus_http_requests_total | where @timestamp > '" + + format.format(new Date(System.currentTimeMillis() - 3600 * 1000)) + + "' | sort + @timestamp | head 5"; - JSONObject response = - executeQuery(query); - verifySchema(response, + JSONObject response = executeQuery(query); + verifySchema( + response, schema(VALUE, "double"), - schema(TIMESTAMP, "timestamp"), - schema("handler", "string"), - schema("code", "string"), - schema("instance", "string"), - schema("job", "string")); + schema(TIMESTAMP, "timestamp"), + schema("handler", "string"), + schema("code", "string"), + schema("instance", "string"), + schema("job", "string")); // Currently, data is not injected into prometheus, // so asserting on result is not possible. Verifying only schema. } @@ -114,9 +118,12 @@ public void testSourceMetricCommandWithTimestamp() { @SneakyThrows public void testMetricAvgAggregationCommand() { JSONObject response = - executeQuery("source=`my_prometheus`.`prometheus_http_requests_total` | stats avg(@value) as `agg` by span(@timestamp, 15s), `handler`, `job`"); - verifySchema(response, - schema("agg", "double"), + executeQuery( + "source=`my_prometheus`.`prometheus_http_requests_total` | stats avg(@value) as `agg`" + + " by span(@timestamp, 15s), `handler`, `job`"); + verifySchema( + response, + schema("agg", "double"), schema("span(@timestamp,15s)", "timestamp"), schema("handler", "string"), schema("job", "string")); @@ -133,9 +140,12 @@ public void testMetricAvgAggregationCommand() { @SneakyThrows public void testMetricAvgAggregationCommandWithAlias() { JSONObject response = - executeQuery("source=my_prometheus.prometheus_http_requests_total | stats avg(@value) as agg by span(@timestamp, 15s), `handler`, job"); - verifySchema(response, - schema("agg", "double"), + executeQuery( + "source=my_prometheus.prometheus_http_requests_total | stats avg(@value) as agg by" + + " span(@timestamp, 15s), `handler`, job"); + verifySchema( + response, + schema("agg", "double"), schema("span(@timestamp,15s)", "timestamp"), schema("handler", "string"), schema("job", "string")); @@ -148,15 +158,15 @@ public void testMetricAvgAggregationCommandWithAlias() { } } - @Test @SneakyThrows public void testMetricMaxAggregationCommand() { JSONObject response = - executeQuery("source=my_prometheus.prometheus_http_requests_total | stats max(@value) by span(@timestamp, 15s)"); - verifySchema(response, - schema("max(@value)", "double"), - schema("span(@timestamp,15s)", "timestamp")); + executeQuery( + "source=my_prometheus.prometheus_http_requests_total | stats max(@value) by" + + " span(@timestamp, 15s)"); + verifySchema( + response, schema("max(@value)", "double"), schema("span(@timestamp,15s)", "timestamp")); Assertions.assertTrue(response.getInt("size") > 0); Assertions.assertEquals(2, response.getJSONArray("datarows").getJSONArray(0).length()); JSONArray firstRow = response.getJSONArray("datarows").getJSONArray(0); @@ -166,14 +176,16 @@ public void testMetricMaxAggregationCommand() { } } - @Test @SneakyThrows public void testMetricMinAggregationCommand() { JSONObject response = - executeQuery("source=my_prometheus.prometheus_http_requests_total | stats min(@value) by span(@timestamp, 15s), handler"); - verifySchema(response, - schema("min(@value)", "double"), + executeQuery( + "source=my_prometheus.prometheus_http_requests_total | stats min(@value) by" + + " span(@timestamp, 15s), handler"); + verifySchema( + response, + schema("min(@value)", "double"), schema("span(@timestamp,15s)", "timestamp"), schema("handler", "string")); Assertions.assertTrue(response.getInt("size") > 0); @@ -189,9 +201,12 @@ public void testMetricMinAggregationCommand() { @SneakyThrows public void testMetricCountAggregationCommand() { JSONObject response = - executeQuery("source=my_prometheus.prometheus_http_requests_total | stats count() by span(@timestamp, 15s), handler, job"); - verifySchema(response, - schema("count()", "integer"), + executeQuery( + "source=my_prometheus.prometheus_http_requests_total | stats count() by" + + " span(@timestamp, 15s), handler, job"); + verifySchema( + response, + schema("count()", "integer"), schema("span(@timestamp,15s)", "timestamp"), schema("handler", "string"), schema("job", "string")); @@ -208,9 +223,12 @@ public void testMetricCountAggregationCommand() { @SneakyThrows public void testMetricSumAggregationCommand() { JSONObject response = - executeQuery("source=my_prometheus.prometheus_http_requests_total | stats sum(@value) by span(@timestamp, 15s), handler, job"); - verifySchema(response, - schema("sum(@value)", "double"), + executeQuery( + "source=my_prometheus.prometheus_http_requests_total | stats sum(@value) by" + + " span(@timestamp, 15s), handler, job"); + verifySchema( + response, + schema("sum(@value)", "double"), schema("span(@timestamp,15s)", "timestamp"), schema("handler", "string"), schema("job", "string")); @@ -223,18 +241,21 @@ public void testMetricSumAggregationCommand() { } } - @Test @SneakyThrows public void testQueryRange() { long currentTimestamp = new Date().getTime(); JSONObject response = - executeQuery("source=my_prometheus.query_range('prometheus_http_requests_total'," - + ((currentTimestamp/1000)-3600) + "," + currentTimestamp/1000 + ", " + "'14'" + ")" ); - verifySchema(response, - schema(LABELS, "struct"), - schema(VALUE, "array"), - schema(TIMESTAMP, "array")); + executeQuery( + "source=my_prometheus.query_range('prometheus_http_requests_total'," + + ((currentTimestamp / 1000) - 3600) + + "," + + currentTimestamp / 1000 + + ", " + + "'14'" + + ")"); + verifySchema( + response, schema(LABELS, "struct"), schema(VALUE, "array"), schema(TIMESTAMP, "array")); Assertions.assertTrue(response.getInt("size") > 0); } @@ -243,25 +264,23 @@ public void explainQueryRange() throws Exception { String expected = loadFromFile("expectedOutput/ppl/explain_query_range.json"); assertJsonEquals( expected, - explainQueryToString("source = my_prometheus" - + ".query_range('prometheus_http_requests_total',1689281439,1689291439,14)") - ); + explainQueryToString( + "source = my_prometheus" + + ".query_range('prometheus_http_requests_total',1689281439,1689291439,14)")); } - @Test + @Test public void testExplainForQueryExemplars() throws Exception { String expected = loadFromFile("expectedOutput/ppl/explain_query_exemplars.json"); assertJsonEquals( expected, - explainQueryToString("source = my_prometheus." - + "query_exemplars('app_ads_ad_requests_total',1689228292,1689232299)") - ); + explainQueryToString( + "source = my_prometheus." + + "query_exemplars('app_ads_ad_requests_total',1689228292,1689232299)")); } String loadFromFile(String filename) throws Exception { URI uri = Resources.getResource(filename).toURI(); return new String(Files.readAllBytes(Paths.get(uri))); } - - } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/QueryAnalysisIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/QueryAnalysisIT.java index 422cc92cd2..80a89ed9c3 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/QueryAnalysisIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/QueryAnalysisIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_ACCOUNT; @@ -79,10 +78,7 @@ public void queryShouldBeCaseInsensitiveInKeywords() { queryShouldPassSyntaxAndSemanticCheck(query); } - /** - * Commands that fail syntax analysis should throw - * {@link SyntaxCheckException}. - */ + /** Commands that fail syntax analysis should throw {@link SyntaxCheckException}. */ @Test public void queryNotStartingWithSearchCommandShouldFailSyntaxCheck() { String query = "fields firstname"; @@ -107,14 +103,12 @@ public void unsupportedAggregationFunctionShouldFailSyntaxCheck() { queryShouldThrowSyntaxException(query, "Failed to parse query due to offending symbol"); } - /** - * Commands that fail semantic analysis should throw {@link SemanticCheckException}. - */ + /** Commands that fail semantic analysis should throw {@link SemanticCheckException}. */ @Test public void nonexistentFieldShouldFailSemanticCheck() { String query = String.format("search source=%s | fields name", TEST_INDEX_ACCOUNT); - queryShouldThrowSemanticException(query, "can't resolve Symbol(namespace=FIELD_NAME, " - + "name=name) in type env"); + queryShouldThrowSemanticException( + query, "can't resolve Symbol(namespace=FIELD_NAME, name=name) in type env"); } private void queryShouldPassSyntaxAndSemanticCheck(String query) { @@ -134,7 +128,7 @@ private void queryShouldThrowSyntaxException(String query, String... messages) { } catch (ResponseException e) { String errorMsg = e.getMessage(); assertTrue(errorMsg.contains("SyntaxCheckException")); - for (String msg: messages) { + for (String msg : messages) { assertTrue(errorMsg.contains(msg)); } } catch (IOException e) { diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/QueryStringIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/QueryStringIT.java index 4ace407d72..42a637ead7 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/QueryStringIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/QueryStringIT.java @@ -27,22 +27,29 @@ public void all_fields_test() throws IOException { @Test public void mandatory_params_test() throws IOException { - String query = "source=" + TEST_INDEX_BEER + " | where query_string([\\\"Tags\\\" ^ 1.5, Title, 'Body' 4.2], 'taste')"; + String query = + "source=" + + TEST_INDEX_BEER + + " | where query_string([\\\"Tags\\\" ^ 1.5, Title, 'Body' 4.2], 'taste')"; JSONObject result = executeQuery(query); assertEquals(16, result.getInt("total")); } @Test public void all_params_test() throws IOException { - String query = "source=" + TEST_INDEX_BEER + " | where query_string(['Body', Tags, Title], 'taste beer'," - + "allow_leading_wildcard=true, enable_position_increments=true, escape=false," - + "fuzziness= 1, fuzzy_rewrite='constant_score', max_determinized_states = 10000," - + "analyzer='english', analyze_wildcard = false, quote_field_suffix = '.exact'," - + "auto_generate_synonyms_phrase_query=true, boost = 0.77," - + "quote_analyzer='standard', phrase_slop=0, rewrite='constant_score', type='best_fields'," - + "tie_breaker=0.3, time_zone='Canada/Pacific', default_operator='or'," - + "fuzzy_transpositions = false, lenient = true, fuzzy_max_expansions = 25," - + "minimum_should_match = '2<-25% 9<-3', fuzzy_prefix_length = 7)"; + String query = + "source=" + + TEST_INDEX_BEER + + " | where query_string(['Body', Tags, Title], 'taste" + + " beer',allow_leading_wildcard=true, enable_position_increments=true," + + " escape=false,fuzziness= 1, fuzzy_rewrite='constant_score', max_determinized_states" + + " = 10000,analyzer='english', analyze_wildcard = false, quote_field_suffix =" + + " '.exact',auto_generate_synonyms_phrase_query=true, boost =" + + " 0.77,quote_analyzer='standard', phrase_slop=0, rewrite='constant_score'," + + " type='best_fields',tie_breaker=0.3, time_zone='Canada/Pacific'," + + " default_operator='or',fuzzy_transpositions = false, lenient = true," + + " fuzzy_max_expansions = 25,minimum_should_match = '2<-25% 9<-3', fuzzy_prefix_length" + + " = 7)"; JSONObject result = executeQuery(query); assertEquals(49, result.getInt("total")); } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/RareCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/RareCommandIT.java index f65941b8f7..e3ed1661cd 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/RareCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/RareCommandIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_ACCOUNT; @@ -30,12 +29,8 @@ public void afterTest() throws IOException { @Test public void testRareWithoutGroup() throws IOException { - JSONObject result = - executeQuery(String.format("source=%s | rare gender", TEST_INDEX_ACCOUNT)); - verifyDataRows( - result, - rows("F"), - rows("M")); + JSONObject result = executeQuery(String.format("source=%s | rare gender", TEST_INDEX_ACCOUNT)); + verifyDataRows(result, rows("F"), rows("M")); } @Test @@ -65,6 +60,4 @@ public void testRareWithGroup() throws IOException { rows("M", "KY"), rows("M", "IN")); } - - } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/RelevanceFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/RelevanceFunctionIT.java index 7c57bd5481..8e6614dfed 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/RelevanceFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/RelevanceFunctionIT.java @@ -18,11 +18,11 @@ public void init() throws IOException { @Test public void test_wildcard_simple_query_string() throws IOException { - String query1 = "SOURCE=" + TEST_INDEX_BEER - + " | WHERE simple_query_string(['Tags'], 'taste') | fields Id"; + String query1 = + "SOURCE=" + TEST_INDEX_BEER + " | WHERE simple_query_string(['Tags'], 'taste') | fields Id"; var result1 = executeQuery(query1); - String query2 = "SOURCE=" + TEST_INDEX_BEER - + " | WHERE simple_query_string(['T*'], 'taste') | fields Id"; + String query2 = + "SOURCE=" + TEST_INDEX_BEER + " | WHERE simple_query_string(['T*'], 'taste') | fields Id"; var result2 = executeQuery(query2); assertNotEquals(result2.getInt("total"), result1.getInt("total")); } @@ -33,11 +33,15 @@ public void test_wildcard_simple_query_string() throws IOException { */ @Test public void verify_flags_in_simple_query_string() throws IOException { - String query1 = "SOURCE=" - + TEST_INDEX_BEER + " | WHERE simple_query_string(['Body'], '-free', flags='NONE|PREFIX|ESCAPE')"; + String query1 = + "SOURCE=" + + TEST_INDEX_BEER + + " | WHERE simple_query_string(['Body'], '-free', flags='NONE|PREFIX|ESCAPE')"; var result1 = executeQuery(query1); - String query2 = "SOURCE=" - + TEST_INDEX_BEER + " | WHERE simple_query_string([Body], '-free', flags='NOT|AND|OR')"; + String query2 = + "SOURCE=" + + TEST_INDEX_BEER + + " | WHERE simple_query_string([Body], '-free', flags='NOT|AND|OR')"; var result2 = executeQuery(query2); assertNotEquals(result2.getInt("total"), result1.getInt("total")); @@ -53,11 +57,11 @@ public void verify_flags_in_simple_query_string() throws IOException { */ @Test public void verify_escape_in_query_string() throws IOException { - String query1 = "SOURCE=" - + TEST_INDEX_BEER + " | WHERE query_string([Title], '?', escape=true);"; + String query1 = + "SOURCE=" + TEST_INDEX_BEER + " | WHERE query_string([Title], '?', escape=true);"; var result1 = executeQuery(query1); - String query2 = "SOURCE=" - + TEST_INDEX_BEER + " | WHERE query_string([Title], '?', escape=false);"; + String query2 = + "SOURCE=" + TEST_INDEX_BEER + " | WHERE query_string([Title], '?', escape=false);"; var result2 = executeQuery(query2); assertEquals(0, result1.getInt("total")); assertEquals(8, result2.getInt("total")); @@ -70,11 +74,15 @@ public void verify_escape_in_query_string() throws IOException { */ @Test public void verify_default_operator_in_query_string() throws IOException { - String query1 = "SOURCE=" - + TEST_INDEX_BEER + " | WHERE query_string([Title], 'beer taste', default_operator='OR')"; + String query1 = + "SOURCE=" + + TEST_INDEX_BEER + + " | WHERE query_string([Title], 'beer taste', default_operator='OR')"; var result1 = executeQuery(query1); - String query2 = "SOURCE=" - + TEST_INDEX_BEER + " | WHERE query_string([Title], 'beer taste', default_operator='AND')"; + String query2 = + "SOURCE=" + + TEST_INDEX_BEER + + " | WHERE query_string([Title], 'beer taste', default_operator='AND')"; var result2 = executeQuery(query2); assertEquals(16, result1.getInt("total")); assertEquals(4, result2.getInt("total")); @@ -82,11 +90,15 @@ public void verify_default_operator_in_query_string() throws IOException { @Test public void verify_default_operator_in_simple_query_string() throws IOException { - String query1 = "SOURCE=" - + TEST_INDEX_BEER + " | WHERE simple_query_string([Title], 'beer taste', default_operator='OR')"; + String query1 = + "SOURCE=" + + TEST_INDEX_BEER + + " | WHERE simple_query_string([Title], 'beer taste', default_operator='OR')"; var result1 = executeQuery(query1); - String query2 = "SOURCE=" - + TEST_INDEX_BEER + " | WHERE simple_query_string([Title], 'beer taste', default_operator='AND')"; + String query2 = + "SOURCE=" + + TEST_INDEX_BEER + + " | WHERE simple_query_string([Title], 'beer taste', default_operator='AND')"; var result2 = executeQuery(query2); assertEquals(16, result1.getInt("total")); assertEquals(4, result2.getInt("total")); @@ -94,11 +106,11 @@ public void verify_default_operator_in_simple_query_string() throws IOException @Test public void verify_default_operator_in_multi_match() throws IOException { - String query1 = "SOURCE=" - + TEST_INDEX_BEER + " | WHERE multi_match([Title], 'beer taste', operator='OR')"; + String query1 = + "SOURCE=" + TEST_INDEX_BEER + " | WHERE multi_match([Title], 'beer taste', operator='OR')"; var result1 = executeQuery(query1); - String query2 = "SOURCE=" - + TEST_INDEX_BEER + " | WHERE multi_match([Title], 'beer taste', operator='AND')"; + String query2 = + "SOURCE=" + TEST_INDEX_BEER + " | WHERE multi_match([Title], 'beer taste', operator='AND')"; var result2 = executeQuery(query2); assertEquals(16, result1.getInt("total")); assertEquals(4, result2.getInt("total")); @@ -106,11 +118,11 @@ public void verify_default_operator_in_multi_match() throws IOException { @Test public void verify_operator_in_match() throws IOException { - String query1 = "SOURCE=" - + TEST_INDEX_BEER + " | WHERE match(Title, 'beer taste', operator='OR')"; + String query1 = + "SOURCE=" + TEST_INDEX_BEER + " | WHERE match(Title, 'beer taste', operator='OR')"; var result1 = executeQuery(query1); - String query2 = "SOURCE=" - + TEST_INDEX_BEER + " | WHERE match(Title, 'beer taste', operator='AND')"; + String query2 = + "SOURCE=" + TEST_INDEX_BEER + " | WHERE match(Title, 'beer taste', operator='AND')"; var result2 = executeQuery(query2); assertEquals(16, result1.getInt("total")); assertEquals(4, result2.getInt("total")); diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/RenameCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/RenameCommandIT.java index ad1add4e12..ae06e75a06 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/RenameCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/RenameCommandIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_ACCOUNT; @@ -43,7 +42,9 @@ public void testRenameMultiField() throws IOException { verifyColumn(result, columnName("FIRSTNAME"), columnName("AGE")); } - @Ignore("Wildcard is unsupported yet. Enable once https://github.com/opensearch-project/sql/issues/787 is resolved.") + @Ignore( + "Wildcard is unsupported yet. Enable once" + + " https://github.com/opensearch-project/sql/issues/787 is resolved.") @Test public void testRenameWildcardFields() throws IOException { JSONObject result = executeQuery("source=" + TEST_INDEX_ACCOUNT + " | rename %name as %NAME"); diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/ResourceMonitorIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/ResourceMonitorIT.java index e608e94512..56b54ba748 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/ResourceMonitorIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/ResourceMonitorIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_DOG; @@ -31,11 +30,11 @@ public void queryExceedResourceLimitShouldFail() throws IOException { new ClusterSetting("persistent", Settings.Key.QUERY_MEMORY_LIMIT.getKeyValue(), "1%")); String query = String.format("search source=%s age=20", TEST_INDEX_DOG); - ResponseException exception = - expectThrows(ResponseException.class, () -> executeQuery(query)); + ResponseException exception = expectThrows(ResponseException.class, () -> executeQuery(query)); assertEquals(503, exception.getResponse().getStatusLine().getStatusCode()); - assertThat(exception.getMessage(), Matchers.containsString("resource is not enough to run the" - + " query, quit.")); + assertThat( + exception.getMessage(), + Matchers.containsString("resource is not enough to run the" + " query, quit.")); // update plugins.ppl.query.memory_limit to default value 85% updateClusterSettings( diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/SearchCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/SearchCommandIT.java index 2e62b464bb..5d1b0203d7 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/SearchCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/SearchCommandIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK; diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/SettingsIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/SettingsIT.java index d012cce9e8..224afde4c5 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/SettingsIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/SettingsIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK; @@ -26,17 +25,13 @@ public void init() throws IOException { public void testQuerySizeLimit() throws IOException { // Default setting, fetch 200 rows from source JSONObject result = - executeQuery( - String.format( - "search source=%s age>35 | fields firstname", TEST_INDEX_BANK)); + executeQuery(String.format("search source=%s age>35 | fields firstname", TEST_INDEX_BANK)); verifyDataRows(result, rows("Hattie"), rows("Elinor"), rows("Virginia")); // Fetch 1 rows from source setQuerySizeLimit(1); result = - executeQuery( - String.format( - "search source=%s age>35 | fields firstname", TEST_INDEX_BANK)); + executeQuery(String.format("search source=%s age>35 | fields firstname", TEST_INDEX_BANK)); verifyDataRows(result, rows("Hattie")); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/ShowDataSourcesCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/ShowDataSourcesCommandIT.java index 4845d30033..c9c4854212 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/ShowDataSourcesCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/ShowDataSourcesCommandIT.java @@ -28,10 +28,10 @@ public class ShowDataSourcesCommandIT extends PPLIntegTestCase { /** - * Integ tests are dependent on self generated metrics in prometheus instance. - * When running individual integ tests there - * is no time for generation of metrics in the test prometheus instance. - * This method gives prometheus time to generate metrics on itself. + * Integ tests are dependent on self generated metrics in prometheus instance. When running + * individual integ tests there is no time for generation of metrics in the test prometheus + * instance. This method gives prometheus time to generate metrics on itself. + * * @throws InterruptedException */ @BeforeClass @@ -42,8 +42,11 @@ protected static void metricGenerationWait() throws InterruptedException { @Override protected void init() throws InterruptedException, IOException { DataSourceMetadata createDSM = - new DataSourceMetadata("my_prometheus", DataSourceType.PROMETHEUS, - ImmutableList.of(), ImmutableMap.of("prometheus.uri", "http://localhost:9090")); + new DataSourceMetadata( + "my_prometheus", + DataSourceType.PROMETHEUS, + ImmutableList.of(), + ImmutableMap.of("prometheus.uri", "http://localhost:9090")); Request createRequest = getCreateDataSourceRequest(createDSM); Response response = client().performRequest(createRequest); Assert.assertEquals(201, response.getStatusLine().getStatusCode()); @@ -59,26 +62,14 @@ protected void deleteDataSourceMetadata() throws IOException { @Test public void testShowDataSourcesCommands() throws IOException { JSONObject result = executeQuery("show datasources"); - verifyDataRows(result, - rows("my_prometheus", "PROMETHEUS"), - rows("@opensearch", "OPENSEARCH")); - verifyColumn( - result, - columnName("DATASOURCE_NAME"), - columnName("CONNECTOR_TYPE") - ); + verifyDataRows(result, rows("my_prometheus", "PROMETHEUS"), rows("@opensearch", "OPENSEARCH")); + verifyColumn(result, columnName("DATASOURCE_NAME"), columnName("CONNECTOR_TYPE")); } @Test public void testShowDataSourcesCommandsWithWhereClause() throws IOException { JSONObject result = executeQuery("show datasources | where CONNECTOR_TYPE='PROMETHEUS'"); - verifyDataRows(result, - rows("my_prometheus", "PROMETHEUS")); - verifyColumn( - result, - columnName("DATASOURCE_NAME"), - columnName("CONNECTOR_TYPE") - ); + verifyDataRows(result, rows("my_prometheus", "PROMETHEUS")); + verifyColumn(result, columnName("DATASOURCE_NAME"), columnName("CONNECTOR_TYPE")); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/SimpleQueryStringIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/SimpleQueryStringIT.java index 46111b902e..714557412f 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/SimpleQueryStringIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/SimpleQueryStringIT.java @@ -19,36 +19,42 @@ public void init() throws IOException { @Test public void test_simple_query_string() throws IOException { - String query = "SOURCE=" + TEST_INDEX_BEER - + " | WHERE simple_query_string([\\\"Tags\\\" ^ 1.5, Title, 'Body' 4.2], 'taste') | fields Id"; + String query = + "SOURCE=" + + TEST_INDEX_BEER + + " | WHERE simple_query_string([\\\"Tags\\\" ^ 1.5, Title, 'Body' 4.2], 'taste') |" + + " fields Id"; var result = executeQuery(query); assertEquals(16, result.getInt("total")); } @Test public void test_simple_query_string_all_params() throws IOException { - String query = "SOURCE=" + TEST_INDEX_BEER - + " | WHERE simple_query_string(['Body', Tags, Title], 'taste beer', default_operator='or'," - + "analyzer=english, analyze_wildcard = false, quote_field_suffix = '.exact'," - + "auto_generate_synonyms_phrase_query=true, boost = 0.77, flags='PREFIX'," - + "fuzzy_transpositions = false, lenient = true, fuzzy_max_expansions = 25," - + "minimum_should_match = '2<-25% 9<-3', fuzzy_prefix_length = 7) | fields Id"; + String query = + "SOURCE=" + + TEST_INDEX_BEER + + " | WHERE simple_query_string(['Body', Tags, Title], 'taste beer'," + + " default_operator='or',analyzer=english, analyze_wildcard = false," + + " quote_field_suffix = '.exact',auto_generate_synonyms_phrase_query=true, boost =" + + " 0.77, flags='PREFIX',fuzzy_transpositions = false, lenient = true," + + " fuzzy_max_expansions = 25,minimum_should_match = '2<-25% 9<-3', fuzzy_prefix_length" + + " = 7) | fields Id"; var result = executeQuery(query); assertEquals(49, result.getInt("total")); } @Test public void test_wildcard_simple_query_string() throws IOException { - String query1 = "SOURCE=" + TEST_INDEX_BEER - + " | WHERE simple_query_string(['Tags'], 'taste') | fields Id"; + String query1 = + "SOURCE=" + TEST_INDEX_BEER + " | WHERE simple_query_string(['Tags'], 'taste') | fields Id"; var result1 = executeQuery(query1); - String query2 = "SOURCE=" + TEST_INDEX_BEER - + " | WHERE simple_query_string(['T*'], 'taste') | fields Id"; + String query2 = + "SOURCE=" + TEST_INDEX_BEER + " | WHERE simple_query_string(['T*'], 'taste') | fields Id"; var result2 = executeQuery(query2); assertNotEquals(result2.getInt("total"), result1.getInt("total")); - String query3 = "source=" + TEST_INDEX_BEER - + " | where simple_query_string(['*Date'], '2014-01-22')"; + String query3 = + "source=" + TEST_INDEX_BEER + " | where simple_query_string(['*Date'], '2014-01-22')"; JSONObject result3 = executeQuery(query3); assertEquals(10, result3.getInt("total")); } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/SortCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/SortCommandIT.java index 01befa0541..c90a506252 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/SortCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/SortCommandIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK; diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/NestedIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/NestedIT.java index d3230188b7..54831cb561 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/NestedIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/NestedIT.java @@ -37,11 +37,13 @@ public void init() throws IOException { @Test public void nested_function_with_array_of_nested_field_test() { - String query = "SELECT nested(message.info), nested(comment.data) FROM " + TEST_INDEX_NESTED_TYPE; + String query = + "SELECT nested(message.info), nested(comment.data) FROM " + TEST_INDEX_NESTED_TYPE; JSONObject result = executeJdbcRequest(query); assertEquals(6, result.getInt("total")); - verifyDataRows(result, + verifyDataRows( + result, rows("c", "ab"), rows("a", "ab"), rows("b", "aa"), @@ -52,17 +54,20 @@ public void nested_function_with_array_of_nested_field_test() { @Test public void nested_function_in_select_test() { - String query = "SELECT nested(message.info), nested(comment.data), " - + "nested(message.dayOfWeek) FROM " - + TEST_INDEX_NESTED_TYPE_WITHOUT_ARRAYS; + String query = + "SELECT nested(message.info), nested(comment.data), " + + "nested(message.dayOfWeek) FROM " + + TEST_INDEX_NESTED_TYPE_WITHOUT_ARRAYS; JSONObject result = executeJdbcRequest(query); assertEquals(5, result.getInt("total")); - verifySchema(result, + verifySchema( + result, schema("nested(message.info)", null, "keyword"), schema("nested(comment.data)", null, "keyword"), schema("nested(message.dayOfWeek)", null, "long")); - verifyDataRows(result, + verifyDataRows( + result, rows("a", "ab", 1), rows("b", "aa", 2), rows("c", "aa", 1), @@ -74,8 +79,8 @@ public void nested_function_in_select_test() { // gets resolved @Disabled // TODO fix me when aggregation is supported public void nested_function_in_an_aggregate_function_in_select_test() { - String query = "SELECT sum(nested(message.dayOfWeek)) FROM " + - TEST_INDEX_NESTED_TYPE_WITHOUT_ARRAYS; + String query = + "SELECT sum(nested(message.dayOfWeek)) FROM " + TEST_INDEX_NESTED_TYPE_WITHOUT_ARRAYS; JSONObject result = executeJdbcRequest(query); verifyDataRows(result, rows(14)); } @@ -83,84 +88,67 @@ public void nested_function_in_an_aggregate_function_in_select_test() { // TODO Enable me when nested aggregation is supported @Disabled public void nested_function_with_arrays_in_an_aggregate_function_in_select_test() { - String query = "SELECT sum(nested(message.dayOfWeek)) FROM " + - TEST_INDEX_NESTED_TYPE; + String query = "SELECT sum(nested(message.dayOfWeek)) FROM " + TEST_INDEX_NESTED_TYPE; JSONObject result = executeJdbcRequest(query); verifyDataRows(result, rows(19)); } @Test public void nested_function_in_a_function_in_select_test() { - String query = "SELECT upper(nested(message.info)) FROM " + - TEST_INDEX_NESTED_TYPE_WITHOUT_ARRAYS; + String query = + "SELECT upper(nested(message.info)) FROM " + TEST_INDEX_NESTED_TYPE_WITHOUT_ARRAYS; JSONObject result = executeJdbcRequest(query); - verifyDataRows(result, - rows("A"), - rows("B"), - rows("C"), - rows("C"), - rows("ZZ")); + verifyDataRows(result, rows("A"), rows("B"), rows("C"), rows("C"), rows("ZZ")); } @Test public void nested_all_function_in_a_function_in_select_test() { - String query = "SELECT nested(message.*) FROM " + - TEST_INDEX_NESTED_TYPE_WITHOUT_ARRAYS + " WHERE nested(message.info) = 'a'"; + String query = + "SELECT nested(message.*) FROM " + + TEST_INDEX_NESTED_TYPE_WITHOUT_ARRAYS + + " WHERE nested(message.info) = 'a'"; JSONObject result = executeJdbcRequest(query); verifyDataRows(result, rows("e", 1, "a")); } @Test public void invalid_multiple_nested_all_function_in_a_function_in_select_test() { - String query = "SELECT nested(message.*), nested(message.info) FROM " + - TEST_INDEX_NESTED_TYPE_WITHOUT_ARRAYS; - RuntimeException result = assertThrows( - RuntimeException.class, - () -> executeJdbcRequest(query) - ); + String query = + "SELECT nested(message.*), nested(message.info) FROM " + + TEST_INDEX_NESTED_TYPE_WITHOUT_ARRAYS; + RuntimeException result = assertThrows(RuntimeException.class, () -> executeJdbcRequest(query)); assertTrue( result.getMessage().contains("IllegalArgumentException") - && result.getMessage().contains("Multiple entries with same key") - ); + && result.getMessage().contains("Multiple entries with same key")); } @Test public void nested_all_function_with_limit_test() { - String query = "SELECT nested(message.*) FROM " + - TEST_INDEX_NESTED_TYPE_WITHOUT_ARRAYS + " LIMIT 3"; + String query = + "SELECT nested(message.*) FROM " + TEST_INDEX_NESTED_TYPE_WITHOUT_ARRAYS + " LIMIT 3"; JSONObject result = executeJdbcRequest(query); - verifyDataRows(result, - rows("e", 1, "a"), - rows("f", 2, "b"), - rows("g", 1, "c") - ); + verifyDataRows(result, rows("e", 1, "a"), rows("f", 2, "b"), rows("g", 1, "c")); } - @Test public void nested_function_with_array_of_multi_nested_field_test() { String query = "SELECT nested(message.author.name) FROM " + TEST_INDEX_MULTI_NESTED_TYPE; JSONObject result = executeJdbcRequest(query); assertEquals(6, result.getInt("total")); - verifyDataRows(result, - rows("e"), - rows("f"), - rows("g"), - rows("h"), - rows("p"), - rows("yy")); + verifyDataRows(result, rows("e"), rows("f"), rows("g"), rows("h"), rows("p"), rows("yy")); } @Test public void nested_function_with_null_and_missing_fields_test() { - String query = "SELECT nested(message.info), nested(comment.data) FROM " - + TEST_INDEX_NESTED_WITH_NULLS; + String query = + "SELECT nested(message.info), nested(comment.data) FROM " + TEST_INDEX_NESTED_WITH_NULLS; JSONObject result = executeJdbcRequest(query); assertEquals(10, result.getInt("total")); - verifyDataRows(result, + verifyDataRows( + result, rows(null, "hh"), rows("b", "aa"), rows("c", "aa"), @@ -176,12 +164,14 @@ public void nested_function_with_null_and_missing_fields_test() { @Test public void nested_function_multiple_fields_with_matched_and_mismatched_paths_test() { String query = - "SELECT nested(message.author), nested(message.dayOfWeek), nested(message.info), nested(comment.data), " - + "nested(comment.likes) FROM " + TEST_INDEX_NESTED_TYPE; + "SELECT nested(message.author), nested(message.dayOfWeek), nested(message.info)," + + " nested(comment.data), nested(comment.likes) FROM " + + TEST_INDEX_NESTED_TYPE; JSONObject result = executeJdbcRequest(query); assertEquals(6, result.getInt("total")); - verifyDataRows(result, + verifyDataRows( + result, rows("e", 1, "a", "ab", 3), rows("f", 2, "b", "aa", 2), rows("g", 1, "c", "aa", 3), @@ -192,12 +182,12 @@ public void nested_function_multiple_fields_with_matched_and_mismatched_paths_te @Test public void nested_function_mixed_with_non_nested_type_test() { - String query = - "SELECT nested(message.info), someField FROM " + TEST_INDEX_NESTED_TYPE; + String query = "SELECT nested(message.info), someField FROM " + TEST_INDEX_NESTED_TYPE; JSONObject result = executeJdbcRequest(query); assertEquals(6, result.getInt("total")); - verifyDataRows(result, + verifyDataRows( + result, rows("a", "b"), rows("b", "a"), rows("c", "a"), @@ -209,46 +199,38 @@ public void nested_function_mixed_with_non_nested_type_test() { @Test public void nested_function_with_order_by_clause() { String query = - "SELECT nested(message.info) FROM " + TEST_INDEX_NESTED_TYPE + "SELECT nested(message.info) FROM " + + TEST_INDEX_NESTED_TYPE + " ORDER BY nested(message.info)"; JSONObject result = executeJdbcRequest(query); assertEquals(6, result.getInt("total")); - verifyDataRows(result, - rows("a"), - rows("c"), - rows("a"), - rows("b"), - rows("c"), - rows("zz")); + verifyDataRows(result, rows("a"), rows("c"), rows("a"), rows("b"), rows("c"), rows("zz")); } @Test public void nested_function_with_order_by_clause_desc() { String query = - "SELECT nested(message.info) FROM " + TEST_INDEX_NESTED_TYPE + "SELECT nested(message.info) FROM " + + TEST_INDEX_NESTED_TYPE + " ORDER BY nested(message.info, message) DESC"; JSONObject result = executeJdbcRequest(query); assertEquals(6, result.getInt("total")); - verifyDataRows(result, - rows("zz"), - rows("c"), - rows("c"), - rows("a"), - rows("b"), - rows("a")); + verifyDataRows(result, rows("zz"), rows("c"), rows("c"), rows("a"), rows("b"), rows("a")); } @Test public void nested_function_and_field_with_order_by_clause() { String query = - "SELECT nested(message.info), myNum FROM " + TEST_INDEX_NESTED_TYPE + "SELECT nested(message.info), myNum FROM " + + TEST_INDEX_NESTED_TYPE + " ORDER BY nested(message.info, message), myNum"; JSONObject result = executeJdbcRequest(query); assertEquals(6, result.getInt("total")); - verifyDataRows(result, + verifyDataRows( + result, rows("a", 1), rows("c", 4), rows("a", 4), @@ -266,9 +248,12 @@ public void nested_function_with_group_by_clause() { "SELECT count(*) FROM " + TEST_INDEX_NESTED_TYPE + " GROUP BY nested(message.info)"; JSONObject result = executeJdbcRequest(query); - assertTrue(result.getJSONObject("error").get("details").toString().contains( - "Aggregation type nested is not yet implemented" - )); + assertTrue( + result + .getJSONObject("error") + .get("details") + .toString() + .contains("Aggregation type nested is not yet implemented")); } // Nested function in HAVING clause is not yet implemented for JDBC format. This test ensures @@ -277,12 +262,19 @@ public void nested_function_with_group_by_clause() { @Test public void nested_function_with_having_clause() { String query = - "SELECT count(*) FROM " + TEST_INDEX_NESTED_TYPE + " GROUP BY myNum HAVING nested(comment.likes) > 7"; + "SELECT count(*) FROM " + + TEST_INDEX_NESTED_TYPE + + " GROUP BY myNum HAVING nested(comment.likes) > 7"; JSONObject result = executeJdbcRequest(query); - assertTrue(result.getJSONObject("error").get("details").toString().contains( - "For more details, please send request for Json format to see the raw response from OpenSearch engine." - )); + assertTrue( + result + .getJSONObject("error") + .get("details") + .toString() + .contains( + "For more details, please send request for Json format to see the raw response from" + + " OpenSearch engine.")); } @Test @@ -292,13 +284,11 @@ public void nested_function_mixed_with_non_nested_types_test() { JSONObject result = executeJdbcRequest(query); assertEquals(6, result.getInt("total")); - verifyDataRows(result, - rows("a", - new JSONObject(Map.of("south", 3, "west", "ab")), "ab"), - rows("b", - new JSONObject(Map.of("south", 5, "west", "ff")), "ff"), - rows("c", - new JSONObject(Map.of("south", 3, "west", "ll")), "ll"), + verifyDataRows( + result, + rows("a", new JSONObject(Map.of("south", 3, "west", "ab")), "ab"), + rows("b", new JSONObject(Map.of("south", 5, "west", "ff")), "ff"), + rows("c", new JSONObject(Map.of("south", 3, "west", "ll")), "ll"), rows("d", null, null), rows("i", null, null), rows("zz", null, null)); @@ -308,11 +298,13 @@ public void nested_function_mixed_with_non_nested_types_test() { public void nested_function_with_relevance_query() { String query = "SELECT nested(message.info), highlight(someField) FROM " - + TEST_INDEX_NESTED_TYPE + " WHERE match(someField, 'b')"; + + TEST_INDEX_NESTED_TYPE + + " WHERE match(someField, 'b')"; JSONObject result = executeJdbcRequest(query); assertEquals(3, result.getInt("total")); - verifyDataRows(result, + verifyDataRows( + result, rows("a", new JSONArray(List.of("b"))), rows("c", new JSONArray(List.of("b"))), rows("a", new JSONArray(List.of("b")))); @@ -322,60 +314,68 @@ public void nested_function_with_relevance_query() { public void nested_with_non_nested_type_test() { String query = "SELECT nested(someField) FROM " + TEST_INDEX_NESTED_TYPE; - Exception exception = assertThrows(RuntimeException.class, - () -> executeJdbcRequest(query)); - assertTrue(exception.getMessage().contains( - "{\n" + - " \"error\": {\n" + - " \"reason\": \"Invalid SQL query\",\n" + - " \"details\": \"Illegal nested field name: someField\",\n" + - " \"type\": \"IllegalArgumentException\"\n" + - " },\n" + - " \"status\": 400\n" + - "}" - )); + Exception exception = assertThrows(RuntimeException.class, () -> executeJdbcRequest(query)); + assertTrue( + exception + .getMessage() + .contains( + "{\n" + + " \"error\": {\n" + + " \"reason\": \"Invalid SQL query\",\n" + + " \"details\": \"Illegal nested field name: someField\",\n" + + " \"type\": \"IllegalArgumentException\"\n" + + " },\n" + + " \"status\": 400\n" + + "}")); } @Test public void nested_missing_path() { String query = "SELECT nested(message.invalid) FROM " + TEST_INDEX_MULTI_NESTED_TYPE; - Exception exception = assertThrows(RuntimeException.class, - () -> executeJdbcRequest(query)); - assertTrue(exception.getMessage().contains("" + - "{\n" + - " \"error\": {\n" + - " \"reason\": \"Invalid SQL query\",\n" + - " \"details\": \"can't resolve Symbol(namespace=FIELD_NAME, name=message.invalid) in type env\",\n" + - " \"type\": \"SemanticCheckException\"\n" + - " },\n" + - " \"status\": 400\n" + - "}" - )); + Exception exception = assertThrows(RuntimeException.class, () -> executeJdbcRequest(query)); + assertTrue( + exception + .getMessage() + .contains( + "{\n" + + " \"error\": {\n" + + " \"reason\": \"Invalid SQL query\",\n" + + " \"details\": \"can't resolve Symbol(namespace=FIELD_NAME," + + " name=message.invalid) in type env\",\n" + + " \"type\": \"SemanticCheckException\"\n" + + " },\n" + + " \"status\": 400\n" + + "}")); } @Test public void nested_missing_path_argument() { - String query = "SELECT nested(message.author.name, invalid) FROM " + TEST_INDEX_MULTI_NESTED_TYPE; + String query = + "SELECT nested(message.author.name, invalid) FROM " + TEST_INDEX_MULTI_NESTED_TYPE; - Exception exception = assertThrows(RuntimeException.class, - () -> executeJdbcRequest(query)); - assertTrue(exception.getMessage().contains("" + - "{\n" + - " \"error\": {\n" + - " \"reason\": \"Invalid SQL query\",\n" + - " \"details\": \"can't resolve Symbol(namespace=FIELD_NAME, name=invalid) in type env\",\n" + - " \"type\": \"SemanticCheckException\"\n" + - " },\n" + - " \"status\": 400\n" + - "}" - )); + Exception exception = assertThrows(RuntimeException.class, () -> executeJdbcRequest(query)); + assertTrue( + exception + .getMessage() + .contains( + "{\n" + + " \"error\": {\n" + + " \"reason\": \"Invalid SQL query\",\n" + + " \"details\": \"can't resolve Symbol(namespace=FIELD_NAME, name=invalid)" + + " in type env\",\n" + + " \"type\": \"SemanticCheckException\"\n" + + " },\n" + + " \"status\": 400\n" + + "}")); } @Test public void test_nested_where_with_and_conditional() { - String query = "SELECT nested(message.info), nested(message.author) FROM " + TEST_INDEX_NESTED_TYPE - + " WHERE nested(message, message.info = 'a' AND message.author = 'e')"; + String query = + "SELECT nested(message.info), nested(message.author) FROM " + + TEST_INDEX_NESTED_TYPE + + " WHERE nested(message, message.info = 'a' AND message.author = 'e')"; JSONObject result = executeJdbcRequest(query); assertEquals(1, result.getInt("total")); verifyDataRows(result, rows("a", "e")); @@ -383,22 +383,19 @@ public void test_nested_where_with_and_conditional() { @Test public void test_nested_in_select_and_where_as_predicate_expression() { - String query = "SELECT nested(message.info) FROM " + TEST_INDEX_NESTED_TYPE - + " WHERE nested(message.info) = 'a'"; + String query = + "SELECT nested(message.info) FROM " + + TEST_INDEX_NESTED_TYPE + + " WHERE nested(message.info) = 'a'"; JSONObject result = executeJdbcRequest(query); assertEquals(3, result.getInt("total")); - verifyDataRows( - result, - rows("a"), - rows("c"), - rows("a") - ); + verifyDataRows(result, rows("a"), rows("c"), rows("a")); } @Test public void test_nested_in_where_as_predicate_expression() { - String query = "SELECT message.info FROM " + TEST_INDEX_NESTED_TYPE - + " WHERE nested(message.info) = 'a'"; + String query = + "SELECT message.info FROM " + TEST_INDEX_NESTED_TYPE + " WHERE nested(message.info) = 'a'"; JSONObject result = executeJdbcRequest(query); assertEquals(2, result.getInt("total")); // Only first index of array is returned. Second index has 'a' @@ -407,8 +404,10 @@ public void test_nested_in_where_as_predicate_expression() { @Test public void test_nested_in_where_as_predicate_expression_with_like() { - String query = "SELECT message.info FROM " + TEST_INDEX_NESTED_TYPE - + " WHERE nested(message.info) LIKE 'a'"; + String query = + "SELECT message.info FROM " + + TEST_INDEX_NESTED_TYPE + + " WHERE nested(message.info) LIKE 'a'"; JSONObject result = executeJdbcRequest(query); assertEquals(2, result.getInt("total")); // Only first index of array is returned. Second index has 'a' @@ -417,21 +416,22 @@ public void test_nested_in_where_as_predicate_expression_with_like() { @Test public void test_nested_in_where_as_predicate_expression_with_multiple_conditions() { - String query = "SELECT message.info, comment.data, message.dayOfWeek FROM " + TEST_INDEX_NESTED_TYPE - + " WHERE nested(message.info) = 'zz' OR nested(comment.data) = 'ab' AND nested(message.dayOfWeek) >= 4"; + String query = + "SELECT message.info, comment.data, message.dayOfWeek FROM " + + TEST_INDEX_NESTED_TYPE + + " WHERE nested(message.info) = 'zz' OR nested(comment.data) = 'ab' AND" + + " nested(message.dayOfWeek) >= 4"; JSONObject result = executeJdbcRequest(query); assertEquals(2, result.getInt("total")); - verifyDataRows( - result, - rows("c", "ab", 4), - rows("zz", "aa", 6) - ); + verifyDataRows(result, rows("c", "ab", 4), rows("zz", "aa", 6)); } @Test public void test_nested_in_where_as_predicate_expression_with_relevance_query() { - String query = "SELECT comment.likes, someField FROM " + TEST_INDEX_NESTED_TYPE - + " WHERE nested(comment.likes) = 10 AND match(someField, 'a')"; + String query = + "SELECT comment.likes, someField FROM " + + TEST_INDEX_NESTED_TYPE + + " WHERE nested(comment.likes) = 10 AND match(someField, 'a')"; JSONObject result = executeJdbcRequest(query); assertEquals(1, result.getInt("total")); verifyDataRows(result, rows(10, "a")); @@ -443,11 +443,13 @@ public void nested_function_all_subfields() { JSONObject result = executeJdbcRequest(query); assertEquals(6, result.getInt("total")); - verifySchema(result, + verifySchema( + result, schema("nested(message.author)", null, "keyword"), schema("nested(message.dayOfWeek)", null, "long"), schema("nested(message.info)", null, "keyword")); - verifyDataRows(result, + verifyDataRows( + result, rows("e", 1, "a"), rows("f", 2, "b"), rows("g", 1, "c"), @@ -458,17 +460,18 @@ public void nested_function_all_subfields() { @Test public void nested_function_all_subfields_and_specified_subfield() { - String query = "SELECT nested(message.*), nested(comment.data) FROM " - + TEST_INDEX_NESTED_TYPE; + String query = "SELECT nested(message.*), nested(comment.data) FROM " + TEST_INDEX_NESTED_TYPE; JSONObject result = executeJdbcRequest(query); assertEquals(6, result.getInt("total")); - verifySchema(result, + verifySchema( + result, schema("nested(message.author)", null, "keyword"), schema("nested(message.dayOfWeek)", null, "long"), schema("nested(message.info)", null, "keyword"), schema("nested(comment.data)", null, "keyword")); - verifyDataRows(result, + verifyDataRows( + result, rows("e", 1, "a", "ab"), rows("f", 2, "b", "aa"), rows("g", 1, "c", "aa"), @@ -479,15 +482,16 @@ public void nested_function_all_subfields_and_specified_subfield() { @Test public void nested_function_all_deep_nested_subfields() { - String query = "SELECT nested(message.author.address.*) FROM " - + TEST_INDEX_MULTI_NESTED_TYPE; + String query = "SELECT nested(message.author.address.*) FROM " + TEST_INDEX_MULTI_NESTED_TYPE; JSONObject result = executeJdbcRequest(query); assertEquals(6, result.getInt("total")); - verifySchema(result, + verifySchema( + result, schema("nested(message.author.address.number)", null, "integer"), schema("nested(message.author.address.street)", null, "keyword")); - verifyDataRows(result, + verifyDataRows( + result, rows(1, "bc"), rows(2, "ab"), rows(3, "sk"), @@ -498,18 +502,19 @@ public void nested_function_all_deep_nested_subfields() { @Test public void nested_function_all_subfields_for_two_nested_fields() { - String query = "SELECT nested(message.*), nested(comment.*) FROM " - + TEST_INDEX_NESTED_TYPE; + String query = "SELECT nested(message.*), nested(comment.*) FROM " + TEST_INDEX_NESTED_TYPE; JSONObject result = executeJdbcRequest(query); assertEquals(6, result.getInt("total")); - verifySchema(result, + verifySchema( + result, schema("nested(message.author)", null, "keyword"), schema("nested(message.dayOfWeek)", null, "long"), schema("nested(message.info)", null, "keyword"), schema("nested(comment.data)", null, "keyword"), schema("nested(comment.likes)", null, "long")); - verifyDataRows(result, + verifyDataRows( + result, rows("e", 1, "a", "ab", 3), rows("f", 2, "b", "aa", 2), rows("g", 1, "c", "aa", 3), @@ -524,12 +529,14 @@ public void nested_function_all_subfields_and_non_nested_field() { JSONObject result = executeJdbcRequest(query); assertEquals(6, result.getInt("total")); - verifySchema(result, + verifySchema( + result, schema("nested(message.author)", null, "keyword"), schema("nested(message.dayOfWeek)", null, "long"), schema("nested(message.info)", null, "keyword"), schema("myNum", null, "long")); - verifyDataRows(result, + verifyDataRows( + result, rows("e", 1, "a", 1), rows("f", 2, "b", 2), rows("g", 1, "c", 3), @@ -544,17 +551,15 @@ public void nested_function_with_date_types_as_object_arrays_within_arrays_test( JSONObject result = executeJdbcRequest(query); assertEquals(11, result.getInt("total")); - verifySchema(result, - schema("nested(address.moveInDate)", null, "object") - ); - verifyDataRows(result, - rows(new JSONObject(Map.of("dateAndTime","1984-04-12 09:07:42"))), - rows(new JSONArray( - List.of( - Map.of("dateAndTime", "2023-05-03 08:07:42"), - Map.of("dateAndTime", "2001-11-11 04:07:44")) - ) - ), + verifySchema(result, schema("nested(address.moveInDate)", null, "object")); + verifyDataRows( + result, + rows(new JSONObject(Map.of("dateAndTime", "1984-04-12 09:07:42"))), + rows( + new JSONArray( + List.of( + Map.of("dateAndTime", "2023-05-03 08:07:42"), + Map.of("dateAndTime", "2001-11-11 04:07:44")))), rows(new JSONObject(Map.of("dateAndTime", "1966-03-19 03:04:55"))), rows(new JSONObject(Map.of("dateAndTime", "2011-06-01 01:01:42"))), rows(new JSONObject(Map.of("dateAndTime", "1901-08-11 04:03:33"))), @@ -563,30 +568,27 @@ public void nested_function_with_date_types_as_object_arrays_within_arrays_test( rows(new JSONObject(Map.of("dateAndTime", "1977-07-13 09:04:41"))), rows(new JSONObject(Map.of("dateAndTime", "1933-12-12 05:05:45"))), rows(new JSONObject(Map.of("dateAndTime", "1909-06-17 01:04:21"))), - rows(new JSONArray( - List.of( - Map.of("dateAndTime", "2001-11-11 04:07:44")) - ) - ) - ); + rows(new JSONArray(List.of(Map.of("dateAndTime", "2001-11-11 04:07:44"))))); } @Test public void nested_function_all_subfields_in_wrong_clause() { String query = "SELECT * FROM " + TEST_INDEX_NESTED_TYPE + " ORDER BY nested(message.*)"; - Exception exception = assertThrows(RuntimeException.class, () -> - executeJdbcRequest(query)); - - assertTrue(exception.getMessage().contains("" + - "{\n" + - " \"error\": {\n" + - " \"reason\": \"There was internal problem at backend\",\n" + - " \"details\": \"Invalid use of expression nested(message.*)\",\n" + - " \"type\": \"UnsupportedOperationException\"\n" + - " },\n" + - " \"status\": 503\n" + - "}" - )); + Exception exception = assertThrows(RuntimeException.class, () -> executeJdbcRequest(query)); + + assertTrue( + exception + .getMessage() + .contains( + "" + + "{\n" + + " \"error\": {\n" + + " \"reason\": \"There was internal problem at backend\",\n" + + " \"details\": \"Invalid use of expression nested(message.*)\",\n" + + " \"type\": \"UnsupportedOperationException\"\n" + + " },\n" + + " \"status\": 503\n" + + "}")); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/NowLikeFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/NowLikeFunctionIT.java index de3dd0fe98..547c88859e 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/NowLikeFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/NowLikeFunctionIT.java @@ -77,8 +77,7 @@ public NowLikeFunctionIT( @Name("constValue") Boolean constValue, @Name("referenceGetter") Supplier referenceGetter, @Name("parser") BiFunction parser, - @Name("serializationPatternStr") String serializationPatternStr - ) { + @Name("serializationPatternStr") String serializationPatternStr) { this.name = name; this.hasFsp = hasFsp; this.hasShortcut = hasShortcut; @@ -90,56 +89,104 @@ public NowLikeFunctionIT( @ParametersFactory(argumentFormatting = "%1$s") public static Iterable compareTwoDates() { - return Arrays.asList($$( - $("now", false, false, true, - (Supplier) LocalDateTime::now, - (BiFunction) LocalDateTime::parse, - "uuuu-MM-dd HH:mm:ss"), - $("current_timestamp", false, false, true, - (Supplier) LocalDateTime::now, - (BiFunction) LocalDateTime::parse, - "uuuu-MM-dd HH:mm:ss"), - $("localtimestamp", false, false, true, - (Supplier) LocalDateTime::now, - (BiFunction) LocalDateTime::parse, - "uuuu-MM-dd HH:mm:ss"), - $("localtime", false, false, true, - (Supplier) LocalDateTime::now, - (BiFunction) LocalDateTime::parse, - "uuuu-MM-dd HH:mm:ss"), - $("sysdate", true, false, false, - (Supplier) LocalDateTime::now, - (BiFunction) LocalDateTime::parse, - "uuuu-MM-dd HH:mm:ss"), - $("curtime", false, false, false, - (Supplier) LocalTime::now, - (BiFunction) LocalTime::parse, - "HH:mm:ss"), - $("current_time", false, false, false, - (Supplier) LocalTime::now, - (BiFunction) LocalTime::parse, - "HH:mm:ss"), - $("curdate", false, false, false, - (Supplier) LocalDate::now, - (BiFunction) LocalDate::parse, - "uuuu-MM-dd"), - $("current_date", false, false, false, - (Supplier) LocalDate::now, - (BiFunction) LocalDate::parse, - "uuuu-MM-dd"), - $("utc_date", false, false, true, - (Supplier) (() -> utcDateTimeNow().toLocalDate()), - (BiFunction) LocalDate::parse, - "uuuu-MM-dd"), - $("utc_time", false, false, true, - (Supplier) (() -> utcDateTimeNow().toLocalTime()), - (BiFunction) LocalTime::parse, - "HH:mm:ss"), - $("utc_timestamp", false, false, true, - (Supplier) (NowLikeFunctionIT::utcDateTimeNow), - (BiFunction) LocalDateTime::parse, - "uuuu-MM-dd HH:mm:ss") - )); + return Arrays.asList( + $$( + $( + "now", + false, + false, + true, + (Supplier) LocalDateTime::now, + (BiFunction) LocalDateTime::parse, + "uuuu-MM-dd HH:mm:ss"), + $( + "current_timestamp", + false, + false, + true, + (Supplier) LocalDateTime::now, + (BiFunction) LocalDateTime::parse, + "uuuu-MM-dd HH:mm:ss"), + $( + "localtimestamp", + false, + false, + true, + (Supplier) LocalDateTime::now, + (BiFunction) LocalDateTime::parse, + "uuuu-MM-dd HH:mm:ss"), + $( + "localtime", + false, + false, + true, + (Supplier) LocalDateTime::now, + (BiFunction) LocalDateTime::parse, + "uuuu-MM-dd HH:mm:ss"), + $( + "sysdate", + true, + false, + false, + (Supplier) LocalDateTime::now, + (BiFunction) LocalDateTime::parse, + "uuuu-MM-dd HH:mm:ss"), + $( + "curtime", + false, + false, + false, + (Supplier) LocalTime::now, + (BiFunction) LocalTime::parse, + "HH:mm:ss"), + $( + "current_time", + false, + false, + false, + (Supplier) LocalTime::now, + (BiFunction) LocalTime::parse, + "HH:mm:ss"), + $( + "curdate", + false, + false, + false, + (Supplier) LocalDate::now, + (BiFunction) LocalDate::parse, + "uuuu-MM-dd"), + $( + "current_date", + false, + false, + false, + (Supplier) LocalDate::now, + (BiFunction) LocalDate::parse, + "uuuu-MM-dd"), + $( + "utc_date", + false, + false, + true, + (Supplier) (() -> utcDateTimeNow().toLocalDate()), + (BiFunction) LocalDate::parse, + "uuuu-MM-dd"), + $( + "utc_time", + false, + false, + true, + (Supplier) (() -> utcDateTimeNow().toLocalTime()), + (BiFunction) LocalTime::parse, + "HH:mm:ss"), + $( + "utc_timestamp", + false, + false, + true, + (Supplier) (NowLikeFunctionIT::utcDateTimeNow), + (BiFunction) LocalDateTime::parse, + "uuuu-MM-dd HH:mm:ss"))); } private long getDiff(Temporal sample, Temporal reference) { @@ -150,14 +197,14 @@ private long getDiff(Temporal sample, Temporal reference) { } public static LocalDateTime utcDateTimeNow() { - ZonedDateTime zonedDateTime = - LocalDateTime.now().atZone(TimeZone.getDefault().toZoneId()); + ZonedDateTime zonedDateTime = LocalDateTime.now().atZone(TimeZone.getDefault().toZoneId()); return zonedDateTime.withZoneSameInstant(ZoneId.of("UTC")).toLocalDateTime(); } @Test public void testNowLikeFunctions() throws IOException { - var serializationPattern = new DateTimeFormatterBuilder() + var serializationPattern = + new DateTimeFormatterBuilder() .appendPattern(serializationPatternStr) .optionalStart() .appendFraction(ChronoField.NANO_OF_SECOND, 0, 9, true) @@ -167,15 +214,16 @@ public void testNowLikeFunctions() throws IOException { double delta = 2d; // acceptable time diff, secs if (reference instanceof LocalDate) delta = 1d; // Max date delta could be 1 if test runs on the very edge of two days - // We ignore probability of a test run on edge of month or year to simplify the checks + // We ignore probability of a test run on edge of month or year to simplify the checks - var calls = new ArrayList() {{ - add(name + "()"); - }}; - if (hasShortcut) - calls.add(name); - if (hasFsp) - calls.add(name + "(0)"); + var calls = + new ArrayList() { + { + add(name + "()"); + } + }; + if (hasShortcut) calls.add(name); + if (hasFsp) calls.add(name + "(0)"); // Column order is: func(), func, func(0) // shortcut ^ fsp ^ @@ -185,20 +233,25 @@ public void testNowLikeFunctions() throws IOException { JSONArray firstRow = rows.getJSONArray(0); for (int i = 0; i < rows.length(); i++) { var row = rows.getJSONArray(i); - if (constValue) - assertTrue(firstRow.similar(row)); + if (constValue) assertTrue(firstRow.similar(row)); int column = 0; - assertEquals(0, - getDiff(reference, parser.apply(row.getString(column++), serializationPattern)), delta); + assertEquals( + 0, + getDiff(reference, parser.apply(row.getString(column++), serializationPattern)), + delta); if (hasShortcut) { - assertEquals(0, - getDiff(reference, parser.apply(row.getString(column++), serializationPattern)), delta); + assertEquals( + 0, + getDiff(reference, parser.apply(row.getString(column++), serializationPattern)), + delta); } if (hasFsp) { - assertEquals(0, - getDiff(reference, parser.apply(row.getString(column), serializationPattern)), delta); + assertEquals( + 0, + getDiff(reference, parser.apply(row.getString(column), serializationPattern)), + delta); } } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/NullLiteralIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/NullLiteralIT.java index b8bf0963b5..f885b6d4e0 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/NullLiteralIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/NullLiteralIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.opensearch.sql.util.MatcherUtils.rows; @@ -34,28 +33,22 @@ public void testNullLiteralSchema() { @Test public void testNullLiteralInOperator() { - verifyDataRows( - query("SELECT NULL = NULL, NULL AND TRUE"), - rows(null, null)); + verifyDataRows(query("SELECT NULL = NULL, NULL AND TRUE"), rows(null, null)); } @Test public void testNullLiteralInFunction() { - verifyDataRows( - query("SELECT ABS(NULL), POW(2, FLOOR(NULL))"), - rows(null, null)); + verifyDataRows(query("SELECT ABS(NULL), POW(2, FLOOR(NULL))"), rows(null, null)); } @Test public void testNullLiteralInInterval() { verifyDataRows( query("SELECT INTERVAL NULL DAY, INTERVAL 60 * 60 * 24 * (NULL - FLOOR(NULL)) SECOND"), - rows(null, null) - ); + rows(null, null)); } private JSONObject query(String sql) { return new JSONObject(executeQuery(sql, "jdbc")); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/PaginationBlackboxIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/PaginationBlackboxIT.java index e6f4e18468..84289d8f57 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/PaginationBlackboxIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/PaginationBlackboxIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import com.carrotsearch.randomizedtesting.annotations.Name; @@ -27,8 +26,7 @@ public class PaginationBlackboxIT extends SQLIntegTestCase { private final Index index; private final Integer pageSize; - public PaginationBlackboxIT(@Name("index") Index index, - @Name("pageSize") Integer pageSize) { + public PaginationBlackboxIT(@Name("index") Index index, @Name("pageSize") Integer pageSize) { this.index = index; this.pageSize = pageSize; } @@ -45,7 +43,7 @@ public static Iterable compareTwoDates() { var testData = new ArrayList(); for (var index : indices) { for (var pageSize : pageSizes) { - testData.add(new Object[] { index, pageSize }); + testData.add(new Object[] {index, pageSize}); } } return testData; @@ -64,14 +62,19 @@ public void test_pagination_blackbox() { var responseCounter = 1; this.logger.info(testReportPrefix + "first response"); - response = new JSONObject(executeFetchQuery( - String.format("select * from %s", index.getName()), pageSize, "jdbc")); + response = + new JSONObject( + executeFetchQuery( + String.format("select * from %s", index.getName()), pageSize, "jdbc")); - var cursor = response.has("cursor")? response.getString("cursor") : ""; + var cursor = response.has("cursor") ? response.getString("cursor") : ""; do { - this.logger.info(testReportPrefix - + String.format("subsequent response %d/%d", responseCounter++, (indexSize / pageSize) + 1)); - assertTrue("Paged response schema doesn't match to non-paged", + this.logger.info( + testReportPrefix + + String.format( + "subsequent response %d/%d", responseCounter++, (indexSize / pageSize) + 1)); + assertTrue( + "Paged response schema doesn't match to non-paged", schema.similar(response.getJSONArray("schema"))); rowsReturned += response.getInt("size"); @@ -88,13 +91,17 @@ public void test_pagination_blackbox() { cursor = ""; } - } while(!cursor.isEmpty()); - assertTrue("Paged response schema doesn't match to non-paged", + } while (!cursor.isEmpty()); + assertTrue( + "Paged response schema doesn't match to non-paged", schema.similar(response.getJSONArray("schema"))); - assertEquals(testReportPrefix + "Paged responses return another row count that non-paged", - indexSize, rowsReturned); - assertTrue(testReportPrefix + "Paged accumulated result has other rows than non-paged", + assertEquals( + testReportPrefix + "Paged responses return another row count that non-paged", + indexSize, + rowsReturned); + assertTrue( + testReportPrefix + "Paged accumulated result has other rows than non-paged", rows.similar(rowsPaged)); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/PaginationFallbackIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/PaginationFallbackIT.java index 213c9322e1..dfb0bb2080 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/PaginationFallbackIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/PaginationFallbackIT.java @@ -36,23 +36,24 @@ public void testSelectAll() throws IOException { @Test public void testSelectWithOpenSearchFuncInFilter() throws IOException { - var response = executeQueryTemplate( - "SELECT * FROM %s WHERE `11` = match_phrase('96')", TEST_INDEX_ONLINE); + var response = + executeQueryTemplate("SELECT * FROM %s WHERE `11` = match_phrase('96')", TEST_INDEX_ONLINE); verifyIsV2Cursor(response); } @Test public void testSelectWithHighlight() throws IOException { - var response = executeQueryTemplate( - "SELECT highlight(`11`) FROM %s WHERE match_query(`11`, '96')", TEST_INDEX_ONLINE); + var response = + executeQueryTemplate( + "SELECT highlight(`11`) FROM %s WHERE match_query(`11`, '96')", TEST_INDEX_ONLINE); verifyIsV2Cursor(response); } @Test public void testSelectWithFullTextSearch() throws IOException { - var response = executeQueryTemplate( - "SELECT * FROM %s WHERE match_phrase(`11`, '96')", TEST_INDEX_ONLINE); + var response = + executeQueryTemplate("SELECT * FROM %s WHERE match_phrase(`11`, '96')", TEST_INDEX_ONLINE); verifyIsV2Cursor(response); } @@ -64,8 +65,7 @@ public void testSelectFromIndexWildcard() throws IOException { @Test public void testSelectFromDataSource() throws IOException { - var response = executeQueryTemplate("SELECT * FROM @opensearch.%s", - TEST_INDEX_ONLINE); + var response = executeQueryTemplate("SELECT * FROM @opensearch.%s", TEST_INDEX_ONLINE); verifyIsV2Cursor(response); } @@ -77,31 +77,29 @@ public void testSelectColumnReference() throws IOException { @Test public void testSubquery() throws IOException { - var response = executeQueryTemplate("SELECT `107` from (SELECT * FROM %s)", - TEST_INDEX_ONLINE); + var response = executeQueryTemplate("SELECT `107` from (SELECT * FROM %s)", TEST_INDEX_ONLINE); verifyIsV1Cursor(response); } @Test public void testSelectExpression() throws IOException { - var response = executeQueryTemplate("SELECT 1 + 1 - `107` from %s", - TEST_INDEX_ONLINE); + var response = executeQueryTemplate("SELECT 1 + 1 - `107` from %s", TEST_INDEX_ONLINE); verifyIsV2Cursor(response); } @Test public void testGroupBy() throws IOException { // GROUP BY is not paged by either engine. - var response = executeQueryTemplate("SELECT * FROM %s GROUP BY `107`", - TEST_INDEX_ONLINE); + var response = executeQueryTemplate("SELECT * FROM %s GROUP BY `107`", TEST_INDEX_ONLINE); TestUtils.verifyNoCursor(response); } @Test public void testGroupByHaving() throws IOException { // GROUP BY is not paged by either engine. - var response = executeQueryTemplate("SELECT * FROM %s GROUP BY `107` HAVING `107` > 400", - TEST_INDEX_ONLINE); + var response = + executeQueryTemplate( + "SELECT * FROM %s GROUP BY `107` HAVING `107` > 400", TEST_INDEX_ONLINE); TestUtils.verifyNoCursor(response); } @@ -113,15 +111,13 @@ public void testLimit() throws IOException { @Test public void testLimitOffset() throws IOException { - var response = executeQueryTemplate("SELECT * FROM %s LIMIT 8 OFFSET 4", - TEST_INDEX_ONLINE); + var response = executeQueryTemplate("SELECT * FROM %s LIMIT 8 OFFSET 4", TEST_INDEX_ONLINE); verifyIsV1Cursor(response); } @Test public void testOrderBy() throws IOException { - var response = executeQueryTemplate("SELECT * FROM %s ORDER By `107`", - TEST_INDEX_ONLINE); + var response = executeQueryTemplate("SELECT * FROM %s ORDER By `107`", TEST_INDEX_ONLINE); verifyIsV2Cursor(response); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/PaginationFilterIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/PaginationFilterIT.java index 6ebc05efad..038596cf57 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/PaginationFilterIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/PaginationFilterIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import com.carrotsearch.randomizedtesting.annotations.Name; @@ -22,38 +21,48 @@ import org.opensearch.sql.legacy.TestsConstants; /** - * Test pagination with `WHERE` clause using a parametrized test. - * See constructor {@link #PaginationFilterIT} for list of parameters - * and {@link #generateParameters} and {@link #STATEMENT_TO_NUM_OF_PAGES} - * to see how these parameters are generated. + * Test pagination with `WHERE` clause using a parametrized test. See constructor {@link + * #PaginationFilterIT} for list of parameters and {@link #generateParameters} and {@link + * #STATEMENT_TO_NUM_OF_PAGES} to see how these parameters are generated. */ @DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) public class PaginationFilterIT extends SQLIntegTestCase { /** - * Map of the OS-SQL statement sent to SQL-plugin, and the total number - * of expected hits (on all pages) from the filtered result + * Map of the OS-SQL statement sent to SQL-plugin, and the total number of expected hits (on all + * pages) from the filtered result */ - final private static Map STATEMENT_TO_NUM_OF_PAGES = Map.of( - "SELECT * FROM " + TestsConstants.TEST_INDEX_ACCOUNT, 1000, - "SELECT * FROM " + TestsConstants.TEST_INDEX_ACCOUNT + " WHERE match(address, 'street')", 385, - "SELECT * FROM " + TestsConstants.TEST_INDEX_ACCOUNT + " WHERE match(address, 'street') AND match(city, 'Ola')", 1, - "SELECT firstname, lastname, highlight(address) FROM " + TestsConstants.TEST_INDEX_ACCOUNT + " WHERE match(address, 'street') AND match(state, 'OH')", 5, - "SELECT firstname, lastname, highlight('*') FROM " + TestsConstants.TEST_INDEX_ACCOUNT + " WHERE match(address, 'street') AND match(state, 'OH')", 5, - "SELECT * FROM " + TestsConstants.TEST_INDEX_BEER + " WHERE true", 60, - "SELECT * FROM " + TestsConstants.TEST_INDEX_BEER + " WHERE Id=10", 1, - "SELECT * FROM " + TestsConstants.TEST_INDEX_BEER + " WHERE Id + 5=15", 1, - "SELECT * FROM " + TestsConstants.TEST_INDEX_BANK, 7 - ); + private static final Map STATEMENT_TO_NUM_OF_PAGES = + Map.of( + "SELECT * FROM " + TestsConstants.TEST_INDEX_ACCOUNT, 1000, + "SELECT * FROM " + TestsConstants.TEST_INDEX_ACCOUNT + " WHERE match(address, 'street')", + 385, + "SELECT * FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " WHERE match(address, 'street') AND match(city, 'Ola')", + 1, + "SELECT firstname, lastname, highlight(address) FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " WHERE match(address, 'street') AND match(state, 'OH')", + 5, + "SELECT firstname, lastname, highlight('*') FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " WHERE match(address, 'street') AND match(state, 'OH')", + 5, + "SELECT * FROM " + TestsConstants.TEST_INDEX_BEER + " WHERE true", 60, + "SELECT * FROM " + TestsConstants.TEST_INDEX_BEER + " WHERE Id=10", 1, + "SELECT * FROM " + TestsConstants.TEST_INDEX_BEER + " WHERE Id + 5=15", 1, + "SELECT * FROM " + TestsConstants.TEST_INDEX_BANK, 7); private final String sqlStatement; private final Integer totalHits; private final Integer pageSize; - public PaginationFilterIT(@Name("statement") String sqlStatement, - @Name("total_hits") Integer totalHits, - @Name("page_size") Integer pageSize) { + public PaginationFilterIT( + @Name("statement") String sqlStatement, + @Name("total_hits") Integer totalHits, + @Name("page_size") Integer pageSize) { this.sqlStatement = sqlStatement; this.totalHits = totalHits; this.pageSize = pageSize; @@ -72,18 +81,18 @@ public static Iterable generateParameters() { List pageSizes = List.of(5, 1000); List testData = new ArrayList(); - STATEMENT_TO_NUM_OF_PAGES.forEach((statement, totalHits) -> { - for (var pageSize : pageSizes) { - testData.add(new Object[] { statement, totalHits, pageSize }); - } - }); + STATEMENT_TO_NUM_OF_PAGES.forEach( + (statement, totalHits) -> { + for (var pageSize : pageSizes) { + testData.add(new Object[] {statement, totalHits, pageSize}); + } + }); return testData; } /** - * Test compares non-paginated results with paginated results - * To ensure that the pushdowns return the same number of hits even - * with filter WHERE pushed down + * Test compares non-paginated results with paginated results To ensure that the pushdowns return + * the same number of hits even with filter WHERE pushed down */ @Test @SneakyThrows @@ -93,7 +102,10 @@ public void test_pagination_with_where() { int totalResultsCount = nonPaginatedResponse.getInt("total"); JSONArray rows = nonPaginatedResponse.getJSONArray("datarows"); JSONArray schema = nonPaginatedResponse.getJSONArray("schema"); - var testReportPrefix = String.format("query: %s; total hits: %d; page size: %d || ", sqlStatement, totalResultsCount, pageSize); + var testReportPrefix = + String.format( + "query: %s; total hits: %d; page size: %d || ", + sqlStatement, totalResultsCount, pageSize); assertEquals(totalHits.intValue(), totalResultsCount); var rowsPaged = new JSONArray(); @@ -101,7 +113,8 @@ public void test_pagination_with_where() { var responseCounter = 1; // make first request - with a cursor - JSONObject paginatedResponse = new JSONObject(executeFetchQuery(sqlStatement, pageSize, "jdbc")); + JSONObject paginatedResponse = + new JSONObject(executeFetchQuery(sqlStatement, pageSize, "jdbc")); this.logger.info(testReportPrefix + ""); do { var cursor = paginatedResponse.has("cursor") ? paginatedResponse.getString("cursor") : null; @@ -117,27 +130,34 @@ public void test_pagination_with_where() { if (cursor != null) { assertTrue( - testReportPrefix + "Cursor returned from legacy engine", - cursor.startsWith("n:")); + testReportPrefix + "Cursor returned from legacy engine", cursor.startsWith("n:")); paginatedResponse = executeCursorQuery(cursor); - this.logger.info(testReportPrefix - + String.format("response %d/%d", responseCounter++, (totalResultsCount / pageSize) + 1)); + this.logger.info( + testReportPrefix + + String.format( + "response %d/%d", responseCounter++, (totalResultsCount / pageSize) + 1)); } else { break; } } while (true); // last page expected results: - assertEquals(testReportPrefix + "Last page", - totalHits % pageSize, paginatedResponse.getInt("size")); - assertEquals(testReportPrefix + "Last page", - totalHits % pageSize, paginatedResponse.getJSONArray("datarows").length()); + assertEquals( + testReportPrefix + "Last page", totalHits % pageSize, paginatedResponse.getInt("size")); + assertEquals( + testReportPrefix + "Last page", + totalHits % pageSize, + paginatedResponse.getJSONArray("datarows").length()); // compare paginated and non-paginated counts - assertEquals(testReportPrefix + "Paged responses returned an unexpected total", - totalResultsCount, pagedSize); - assertEquals(testReportPrefix + "Paged responses returned an unexpected rows count", - rows.length(), rowsPaged.length()); + assertEquals( + testReportPrefix + "Paged responses returned an unexpected total", + totalResultsCount, + pagedSize); + assertEquals( + testReportPrefix + "Paged responses returned an unexpected rows count", + rows.length(), + rowsPaged.length()); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/PaginationIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/PaginationIT.java index 224a1e95e4..49ef7c583e 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/PaginationIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/PaginationIT.java @@ -73,12 +73,16 @@ public void testCursorTimeout() throws IOException, InterruptedException { ResponseException exception = expectThrows(ResponseException.class, () -> executeCursorQuery(cursor)); response = new JSONObject(TestUtils.getResponseBody(exception.getResponse())); - assertEquals(response.getJSONObject("error").getString("reason"), + assertEquals( + response.getJSONObject("error").getString("reason"), "Error occurred in OpenSearch engine: all shards failed"); - assertTrue(response.getJSONObject("error").getString("details") - .contains("SearchContextMissingException[No search context found for id")); - assertEquals(response.getJSONObject("error").getString("type"), - "SearchPhaseExecutionException"); + assertTrue( + response + .getJSONObject("error") + .getString("details") + .contains("SearchContextMissingException[No search context found for id")); + assertEquals( + response.getJSONObject("error").getString("type"), "SearchPhaseExecutionException"); wipeAllClusterSettings(); } @@ -106,12 +110,16 @@ public void testCloseCursor() { ResponseException exception = expectThrows(ResponseException.class, () -> executeCursorQuery(cursor)); response = new JSONObject(TestUtils.getResponseBody(exception.getResponse())); - assertEquals(response.getJSONObject("error").getString("reason"), + assertEquals( + response.getJSONObject("error").getString("reason"), "Error occurred in OpenSearch engine: all shards failed"); - assertTrue(response.getJSONObject("error").getString("details") - .contains("SearchContextMissingException[No search context found for id")); - assertEquals(response.getJSONObject("error").getString("type"), - "SearchPhaseExecutionException"); + assertTrue( + response + .getJSONObject("error") + .getString("details") + .contains("SearchContextMissingException[No search context found for id")); + assertEquals( + response.getJSONObject("error").getString("type"), "SearchPhaseExecutionException"); } @Test @@ -134,7 +142,8 @@ public void testQueryWithOrderBy() { var cursor = response.getString("cursor"); do { assertTrue(cursor.isEmpty() || cursor.startsWith("n:")); - assertTrue("Paged response schema doesn't match to non-paged", + assertTrue( + "Paged response schema doesn't match to non-paged", schema.similar(response.getJSONArray("schema"))); rowsReturnedAsc += response.getInt("size"); @@ -151,7 +160,7 @@ public void testQueryWithOrderBy() { cursor = ""; } - } while(!cursor.isEmpty()); + } while (!cursor.isEmpty()); query = String.format("SELECT * from %s ORDER BY num1 DESC", TEST_INDEX_CALCS); response = new JSONObject(executeFetchQuery(query, 7, "jdbc")); @@ -160,7 +169,8 @@ public void testQueryWithOrderBy() { cursor = response.getString("cursor"); do { assertTrue(cursor.isEmpty() || cursor.startsWith("n:")); - assertTrue("Paged response schema doesn't match to non-paged", + assertTrue( + "Paged response schema doesn't match to non-paged", schema.similar(response.getJSONArray("schema"))); rowsReturnedDesc += response.getInt("size"); @@ -177,19 +187,22 @@ public void testQueryWithOrderBy() { cursor = ""; } - } while(!cursor.isEmpty()); + } while (!cursor.isEmpty()); - assertEquals("Paged responses return another row count that non-paged", - indexSize, rowsReturnedAsc); - assertEquals("Paged responses return another row count that non-paged", - indexSize, rowsReturnedDesc); - assertTrue("Paged accumulated result has other rows than non-paged", + assertEquals( + "Paged responses return another row count that non-paged", indexSize, rowsReturnedAsc); + assertEquals( + "Paged responses return another row count that non-paged", indexSize, rowsReturnedDesc); + assertTrue( + "Paged accumulated result has other rows than non-paged", rows.toList().containsAll(rowsPagedAsc.toList())); - assertTrue("Paged accumulated result has other rows than non-paged", + assertTrue( + "Paged accumulated result has other rows than non-paged", rows.toList().containsAll(rowsPagedDesc.toList())); for (int row = 0; row < indexSize; row++) { - assertTrue(String.format("Row %d: row order is incorrect", row), + assertTrue( + String.format("Row %d: row order is incorrect", row), rowsPagedAsc.getJSONArray(row).similar(rowsPagedDesc.getJSONArray(indexSize - row - 1))); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/PaginationWindowIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/PaginationWindowIT.java index be208cd137..246cbfc4a0 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/PaginationWindowIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/PaginationWindowIT.java @@ -40,10 +40,11 @@ public void testFetchSizeLessThanMaxResultWindow() throws IOException { } while (response.has("cursor")); numRows += response.getJSONArray("datarows").length(); - var countRows = executeJdbcRequest("SELECT COUNT(*) FROM " + TEST_INDEX_PHRASE) - .getJSONArray("datarows") - .getJSONArray(0) - .get(0); + var countRows = + executeJdbcRequest("SELECT COUNT(*) FROM " + TEST_INDEX_PHRASE) + .getJSONArray("datarows") + .getJSONArray(0) + .get(0); assertEquals(countRows, numRows); } @@ -62,10 +63,11 @@ public void testQuerySizeLimitDoesNotEffectTotalRowsReturned() throws IOExceptio response = executeCursorQuery(cursor); } while (response.has("cursor")); numRows += response.getJSONArray("datarows").length(); - var countRows = executeJdbcRequest("SELECT COUNT(*) FROM " + TEST_INDEX_PHRASE) - .getJSONArray("datarows") - .getJSONArray(0) - .get(0); + var countRows = + executeJdbcRequest("SELECT COUNT(*) FROM " + TEST_INDEX_PHRASE) + .getJSONArray("datarows") + .getJSONArray(0) + .get(0); assertEquals(countRows, numRows); assertTrue(numRows > querySizeLimit); } @@ -74,12 +76,10 @@ public void testQuerySizeLimitDoesNotEffectTotalRowsReturned() throws IOExceptio public void testQuerySizeLimitDoesNotEffectPageSize() throws IOException { setQuerySizeLimit(3); setMaxResultWindow(TEST_INDEX_PHRASE, 4); - var response - = executeQueryTemplate("SELECT * FROM %s", TEST_INDEX_PHRASE, 4); + var response = executeQueryTemplate("SELECT * FROM %s", TEST_INDEX_PHRASE, 4); assertEquals(4, response.getInt("size")); - var response2 - = executeQueryTemplate("SELECT * FROM %s", TEST_INDEX_PHRASE, 2); + var response2 = executeQueryTemplate("SELECT * FROM %s", TEST_INDEX_PHRASE, 2); assertEquals(2, response2.getInt("size")); } @@ -87,11 +87,9 @@ public void testQuerySizeLimitDoesNotEffectPageSize() throws IOException { public void testFetchSizeLargerThanResultWindowFails() throws IOException { final int window = 2; setMaxResultWindow(TEST_INDEX_PHRASE, 2); - assertThrows(ResponseException.class, - () -> executeQueryTemplate("SELECT * FROM %s", - TEST_INDEX_PHRASE, window + 1)); + assertThrows( + ResponseException.class, + () -> executeQueryTemplate("SELECT * FROM %s", TEST_INDEX_PHRASE, window + 1)); resetMaxResultWindow(TEST_INDEX_PHRASE); } - - } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/PositionFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/PositionFunctionIT.java index d0587eab7f..6a9d40e7c3 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/PositionFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/PositionFunctionIT.java @@ -26,19 +26,29 @@ protected void init() throws Exception { @Test public void position_function_test() { String query = "SELECT firstname, position('a' IN firstname) FROM %s"; - JSONObject response = executeJdbcRequest(String.format(query, TestsConstants.TEST_INDEX_PEOPLE2)); + JSONObject response = + executeJdbcRequest(String.format(query, TestsConstants.TEST_INDEX_PEOPLE2)); - verifySchema(response, schema("firstname", null, "keyword"), - schema("position('a' IN firstname)", null, "integer")); + verifySchema( + response, + schema("firstname", null, "keyword"), + schema("position('a' IN firstname)", null, "integer")); assertEquals(12, response.getInt("total")); - verifyDataRows(response, - rows("Daenerys", 2), rows("Hattie", 2), - rows("Nanette", 2), rows("Dale", 2), - rows("Elinor", 0), rows("Virginia", 8), - rows("Dillard", 5), rows("Mcgee", 0), - rows("Aurelia", 7), rows("Fulton", 0), - rows("Burton", 0), rows("Josie", 0)); + verifyDataRows( + response, + rows("Daenerys", 2), + rows("Hattie", 2), + rows("Nanette", 2), + rows("Dale", 2), + rows("Elinor", 0), + rows("Virginia", 8), + rows("Dillard", 5), + rows("Mcgee", 0), + rows("Aurelia", 7), + rows("Fulton", 0), + rows("Burton", 0), + rows("Josie", 0)); } @Test @@ -46,20 +56,31 @@ public void position_function_with_nulls_test() { String query = "SELECT str2, position('ee' IN str2) FROM %s"; JSONObject response = executeJdbcRequest(String.format(query, TestsConstants.TEST_INDEX_CALCS)); - verifySchema(response, schema("str2", null, "keyword"), - schema("position('ee' IN str2)", null, "integer")); + verifySchema( + response, + schema("str2", null, "keyword"), + schema("position('ee' IN str2)", null, "integer")); assertEquals(17, response.getInt("total")); - verifyDataRows(response, - rows("one", 0), rows("two", 0), - rows("three", 4), rows(null, null), - rows("five", 0), rows("six", 0), - rows(null, null), rows("eight", 0), - rows("nine", 0), rows("ten", 0), - rows("eleven", 0), rows("twelve", 0), - rows(null, null), rows("fourteen", 6), - rows("fifteen", 5), rows("sixteen", 5), - rows(null, null)); + verifyDataRows( + response, + rows("one", 0), + rows("two", 0), + rows("three", 4), + rows(null, null), + rows("five", 0), + rows("six", 0), + rows(null, null), + rows("eight", 0), + rows("nine", 0), + rows("ten", 0), + rows("eleven", 0), + rows("twelve", 0), + rows(null, null), + rows("fourteen", 6), + rows("fifteen", 5), + rows("sixteen", 5), + rows(null, null)); } @Test @@ -86,7 +107,8 @@ public void position_function_with_only_fields_as_args_test() { @Test public void position_function_with_function_as_arg_test() { - String query = "SELECT position(upper(str3) IN str1) FROM %s WHERE str1 LIKE 'BINDING SUPPLIES'"; + String query = + "SELECT position(upper(str3) IN str1) FROM %s WHERE str1 LIKE 'BINDING SUPPLIES'"; JSONObject response = executeJdbcRequest(String.format(query, TestsConstants.TEST_INDEX_CALCS)); verifySchema(response, schema("position(upper(str3) IN str1)", null, "integer")); @@ -110,17 +132,21 @@ public void position_function_in_where_clause_test() { public void position_function_with_null_args_test() { String query1 = "SELECT str2, position(null IN str2) FROM %s WHERE str2 IN ('one')"; String query2 = "SELECT str2, position(str2 IN null) FROM %s WHERE str2 IN ('one')"; - JSONObject response1 = executeJdbcRequest(String.format(query1, TestsConstants.TEST_INDEX_CALCS)); - JSONObject response2 = executeJdbcRequest(String.format(query2, TestsConstants.TEST_INDEX_CALCS)); - - verifySchema(response1, - schema("str2", null, "keyword"), - schema("position(null IN str2)", null, "integer")); + JSONObject response1 = + executeJdbcRequest(String.format(query1, TestsConstants.TEST_INDEX_CALCS)); + JSONObject response2 = + executeJdbcRequest(String.format(query2, TestsConstants.TEST_INDEX_CALCS)); + + verifySchema( + response1, + schema("str2", null, "keyword"), + schema("position(null IN str2)", null, "integer")); assertEquals(1, response1.getInt("total")); - verifySchema(response2, - schema("str2", null, "keyword"), - schema("position(str2 IN null)", null, "integer")); + verifySchema( + response2, + schema("str2", null, "keyword"), + schema("position(str2 IN null)", null, "integer")); assertEquals(1, response2.getInt("total")); verifyDataRows(response1, rows("one", null)); diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/PreparedStatementIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/PreparedStatementIT.java index 38ff32b0d7..8200f64b66 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/PreparedStatementIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/PreparedStatementIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import org.json.JSONObject; @@ -20,16 +19,21 @@ protected void init() throws Exception { @Test public void testPreparedStatement() { - JSONObject response = new JSONObject( - executeQuery(String.format("{\n" - + " \"query\": \"SELECT state FROM %s WHERE state = ? GROUP BY state\",\n" - + " \"parameters\": [\n" - + " {\n" - + " \"type\": \"string\",\n" - + " \"value\": \"WA\"\n" - + " }\n" - + " ]\n" - + "}", TestsConstants.TEST_INDEX_ACCOUNT), "jdbc")); + JSONObject response = + new JSONObject( + executeQuery( + String.format( + "{\n" + + " \"query\": \"SELECT state FROM %s WHERE state = ? GROUP BY state\",\n" + + " \"parameters\": [\n" + + " {\n" + + " \"type\": \"string\",\n" + + " \"value\": \"WA\"\n" + + " }\n" + + " ]\n" + + "}", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc")); assertFalse(response.getJSONArray("datarows").isEmpty()); } @@ -39,5 +43,4 @@ protected String makeRequest(String query) { // Avoid wrap with "query" again return query; } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/QueryIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/QueryIT.java index e61593eb21..fd8066ea41 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/QueryIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/QueryIT.java @@ -13,72 +13,80 @@ import org.opensearch.sql.legacy.SQLIntegTestCase; public class QueryIT extends SQLIntegTestCase { - @Override - public void init() throws IOException { - loadIndex(Index.BEER); - } + @Override + public void init() throws IOException { + loadIndex(Index.BEER); + } - @Test - public void all_fields_test() throws IOException { - String query = "SELECT * FROM " - + TEST_INDEX_BEER + " WHERE query('*:taste')"; - JSONObject result = executeJdbcRequest(query); - assertEquals(16, result.getInt("total")); - } + @Test + public void all_fields_test() throws IOException { + String query = "SELECT * FROM " + TEST_INDEX_BEER + " WHERE query('*:taste')"; + JSONObject result = executeJdbcRequest(query); + assertEquals(16, result.getInt("total")); + } - @Test - public void mandatory_params_test() throws IOException { - String query = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE query('Tags:taste OR Body:taste')"; - JSONObject result = executeJdbcRequest(query); - assertEquals(16, result.getInt("total")); - } + @Test + public void mandatory_params_test() throws IOException { + String query = "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE query('Tags:taste OR Body:taste')"; + JSONObject result = executeJdbcRequest(query); + assertEquals(16, result.getInt("total")); + } - @Test - public void all_params_test() throws IOException { - String query = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE query('Tags:taste', escape=false," - + "allow_leading_wildcard=true, enable_position_increments=true," - + "fuzziness= 1, fuzzy_rewrite='constant_score', max_determinized_states = 10000," - + "analyzer='standard', analyze_wildcard = false, quote_field_suffix = '.exact'," - + "auto_generate_synonyms_phrase_query=true, boost = 0.77," - + "quote_analyzer='standard', phrase_slop=0, rewrite='constant_score', type='best_fields'," - + "tie_breaker=0.3, time_zone='Canada/Pacific', default_operator='or'," - + "fuzzy_transpositions = false, lenient = true, fuzzy_max_expansions = 25," - + "minimum_should_match = '2<-25% 9<-3', fuzzy_prefix_length = 7);"; - JSONObject result = executeJdbcRequest(query); - assertEquals(8, result.getInt("total")); - } + @Test + public void all_params_test() throws IOException { + String query = + "SELECT Id FROM " + + TEST_INDEX_BEER + + " WHERE query('Tags:taste'," + + " escape=false,allow_leading_wildcard=true," + + " enable_position_increments=true," + + " fuzziness= 1," + + " fuzzy_rewrite='constant_score'," + + " max_determinized_states = 10000," + + " analyzer='standard'," + + " analyze_wildcard = false," + + " quote_field_suffix = '.exact'," + + " auto_generate_synonyms_phrase_query=true," + + " boost = 0.77," + + " quote_analyzer='standard'," + + " phrase_slop=0," + + " rewrite='constant_score'," + + " type='best_fields'," + + " tie_breaker=0.3," + + " time_zone='Canada/Pacific'," + + " default_operator='or'," + + " fuzzy_transpositions = false," + + " lenient = true," + + " fuzzy_max_expansions = 25," + + " minimum_should_match = '2<-25% 9<-3'," + + " fuzzy_prefix_length = 7);"; + JSONObject result = executeJdbcRequest(query); + assertEquals(8, result.getInt("total")); + } - @Test - public void wildcard_test() throws IOException { - String query1 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE query('Tags:taste')"; - JSONObject result1 = executeJdbcRequest(query1); - String query2 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE query('*:taste')"; - JSONObject result2 = executeJdbcRequest(query2); - assertNotEquals(result2.getInt("total"), result1.getInt("total")); + @Test + public void wildcard_test() throws IOException { + String query1 = "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE query('Tags:taste')"; + JSONObject result1 = executeJdbcRequest(query1); + String query2 = "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE query('*:taste')"; + JSONObject result2 = executeJdbcRequest(query2); + assertNotEquals(result2.getInt("total"), result1.getInt("total")); - String query3 = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE query('Tags:tas*');"; - JSONObject result3 = executeJdbcRequest(query3); - assertEquals(8, result3.getInt("total")); + String query3 = "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE query('Tags:tas*');"; + JSONObject result3 = executeJdbcRequest(query3); + assertEquals(8, result3.getInt("total")); - String query4 = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE query('Tags:tas?e');"; - JSONObject result4 = executeJdbcRequest(query3); - assertEquals(8, result4.getInt("total")); - } + String query4 = "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE query('Tags:tas?e');"; + JSONObject result4 = executeJdbcRequest(query3); + assertEquals(8, result4.getInt("total")); + } - @Test - public void query_string_and_query_return_the_same_results_test() throws IOException { - String query1 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE query('Tags:taste')"; - JSONObject result1 = executeJdbcRequest(query1); - String query2 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE query_string(['Tags'],'taste')"; - JSONObject result2 = executeJdbcRequest(query2); - assertEquals(result2.getInt("total"), result1.getInt("total")); - } + @Test + public void query_string_and_query_return_the_same_results_test() throws IOException { + String query1 = "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE query('Tags:taste')"; + JSONObject result1 = executeJdbcRequest(query1); + String query2 = "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE query_string(['Tags'],'taste')"; + JSONObject result2 = executeJdbcRequest(query2); + assertEquals(result2.getInt("total"), result1.getInt("total")); + } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/QueryStringIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/QueryStringIT.java index 348889a0cc..3d4e08b4cd 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/QueryStringIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/QueryStringIT.java @@ -20,48 +20,65 @@ public void init() throws IOException { @Test public void all_fields_test() throws IOException { - String query = "SELECT * FROM " - + TEST_INDEX_BEER + " WHERE query_string(['*'], 'taste')"; + String query = "SELECT * FROM " + TEST_INDEX_BEER + " WHERE query_string(['*'], 'taste')"; JSONObject result = executeJdbcRequest(query); assertEquals(16, result.getInt("total")); } @Test public void mandatory_params_test() throws IOException { - String query = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE query_string([\\\"Tags\\\" ^ 1.5, Title, 'Body' 4.2], 'taste')"; + String query = + "SELECT Id FROM " + + TEST_INDEX_BEER + + " WHERE query_string([\\\"Tags\\\" ^ 1.5, Title, 'Body' 4.2], 'taste')"; JSONObject result = executeJdbcRequest(query); assertEquals(16, result.getInt("total")); } @Test public void all_params_test() throws IOException { - String query = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE query_string(['Body', Tags, Title], 'taste beer', escape=false," - + "allow_leading_wildcard=true, enable_position_increments=true," - + "fuzziness= 1, fuzzy_rewrite='constant_score', max_determinized_states = 10000," - + "analyzer='english', analyze_wildcard = false, quote_field_suffix = '.exact'," - + "auto_generate_synonyms_phrase_query=true, boost = 0.77," - + "quote_analyzer='standard', phrase_slop=0, rewrite='constant_score', type='best_fields'," - + "tie_breaker=0.3, time_zone='Canada/Pacific', default_operator='or'," - + "fuzzy_transpositions = false, lenient = true, fuzzy_max_expansions = 25," - + "minimum_should_match = '2<-25% 9<-3', fuzzy_prefix_length = 7);"; + String query = + "SELECT Id FROM " + + TEST_INDEX_BEER + + " WHERE query_string(['Body', Tags, Title]," + + " 'taste beer'," + + " escape=false," + + " allow_leading_wildcard=true," + + " enable_position_increments=true," + + " fuzziness= 1," + + " fuzzy_rewrite='constant_score'," + + " max_determinized_states = 10000," + + " analyzer='english'," + + " analyze_wildcard = false," + + " quote_field_suffix = '.exact'," + + " auto_generate_synonyms_phrase_query=true," + + " boost = 0.77," + + " quote_analyzer='standard'," + + " phrase_slop=0," + + " rewrite='constant_score'," + + " type='best_fields'," + + " tie_breaker=0.3," + + " time_zone='Canada/Pacific'," + + " default_operator='or'," + + " fuzzy_transpositions = false," + + " lenient = true," + + " fuzzy_max_expansions = 25," + + " minimum_should_match = '2<-25% 9<-3'," + + " fuzzy_prefix_length = 7);"; JSONObject result = executeJdbcRequest(query); assertEquals(49, result.getInt("total")); } @Test public void wildcard_test() throws IOException { - String query1 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE query_string(['Tags'], 'taste')"; + String query1 = "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE query_string(['Tags'], 'taste')"; JSONObject result1 = executeJdbcRequest(query1); - String query2 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE query_string(['T*'], 'taste')"; + String query2 = "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE query_string(['T*'], 'taste')"; JSONObject result2 = executeJdbcRequest(query2); assertNotEquals(result2.getInt("total"), result1.getInt("total")); - String query3 = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE query_string(['*Date'], '2014-01-22');"; + String query3 = + "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE query_string(['*Date'], '2014-01-22');"; JSONObject result3 = executeJdbcRequest(query3); assertEquals(10, result3.getInt("total")); } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/QueryValidationIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/QueryValidationIT.java index 5a16cd3f64..e42b68631f 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/QueryValidationIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/QueryValidationIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.hamcrest.Matchers.is; @@ -25,28 +24,29 @@ import org.opensearch.sql.legacy.SQLIntegTestCase; /** - * The query validation IT only covers test for error cases that not doable in comparison test. - * For all other tests, comparison test should be favored over manual written test like this. + * The query validation IT only covers test for error cases that not doable in comparison test. For + * all other tests, comparison test should be favored over manual written test like this. */ public class QueryValidationIT extends SQLIntegTestCase { - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); + @Rule public ExpectedException exceptionRule = ExpectedException.none(); @Override protected void init() throws Exception { loadIndex(Index.ACCOUNT); } - @Ignore("Will add this validation in analyzer later. This test should be enabled once " + - "https://github.com/opensearch-project/sql/issues/910 has been resolved") + @Ignore( + "Will add this validation in analyzer later. This test should be enabled once " + + "https://github.com/opensearch-project/sql/issues/910 has been resolved") @Test public void testNonAggregatedSelectColumnMissingInGroupByClause() throws IOException { expectResponseException() .hasStatusCode(BAD_REQUEST) .hasErrorType("SemanticCheckException") - .containsMessage("Expression [state] that contains non-aggregated column " - + "is not present in group by clause") + .containsMessage( + "Expression [state] that contains non-aggregated column " + + "is not present in group by clause") .whenExecute("SELECT state FROM opensearch-sql_test_index_account GROUP BY age"); } @@ -55,8 +55,9 @@ public void testNonAggregatedSelectColumnPresentWithoutGroupByClause() throws IO expectResponseException() .hasStatusCode(BAD_REQUEST) .hasErrorType("SemanticCheckException") - .containsMessage("Explicit GROUP BY clause is required because expression [state] " - + "contains non-aggregated column") + .containsMessage( + "Explicit GROUP BY clause is required because expression [state] " + + "contains non-aggregated column") .whenExecute("SELECT state, AVG(age) FROM opensearch-sql_test_index_account"); } @@ -87,8 +88,7 @@ public ResponseExceptionAssertion expectResponseException() { /** * Response exception assertion helper to assert property value in OpenSearch ResponseException - * and Response inside. This serves as syntax sugar to improve the readability of test - * code. + * and Response inside. This serves as syntax sugar to improve the readability of test code. */ private static class ResponseExceptionAssertion { private final ExpectedException exceptionRule; @@ -100,9 +100,12 @@ private ResponseExceptionAssertion(ExpectedException exceptionRule) { } ResponseExceptionAssertion hasStatusCode(RestStatus code) { - exceptionRule.expect(featureValueOf("statusCode", is(code), - (Function) e -> - RestStatus.fromCode(e.getResponse().getStatusLine().getStatusCode()))); + exceptionRule.expect( + featureValueOf( + "statusCode", + is(code), + (Function) + e -> RestStatus.fromCode(e.getResponse().getStatusLine().getStatusCode()))); return this; } @@ -133,5 +136,4 @@ private static void execute(String query) throws IOException { client().performRequest(request); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/RawFormatIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/RawFormatIT.java index eb693a4718..9d2861ce98 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/RawFormatIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/RawFormatIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK_RAW_SANITIZE; @@ -26,21 +25,27 @@ public void init() throws IOException { @Test public void rawFormatWithPipeFieldTest() { - String result = executeQuery( - String.format(Locale.ROOT, "SELECT firstname, lastname FROM %s", TEST_INDEX_BANK_RAW_SANITIZE), "raw"); - assertEquals(StringUtils.format( - "firstname|lastname%n" - + "+Amber JOHnny|Duke Willmington+%n" - + "-Hattie|Bond-%n" - + "=Nanette|Bates=%n" - + "@Dale|Adams@%n" - + "@Elinor|\"Ratliff|||\"%n"), + String result = + executeQuery( + String.format( + Locale.ROOT, "SELECT firstname, lastname FROM %s", TEST_INDEX_BANK_RAW_SANITIZE), + "raw"); + assertEquals( + StringUtils.format( + "firstname|lastname%n" + + "+Amber JOHnny|Duke Willmington+%n" + + "-Hattie|Bond-%n" + + "=Nanette|Bates=%n" + + "@Dale|Adams@%n" + + "@Elinor|\"Ratliff|||\"%n"), result); } @Test public void contentHeaderTest() throws IOException { - String query = String.format(Locale.ROOT, "SELECT firstname, lastname FROM %s", TEST_INDEX_BANK_RAW_SANITIZE); + String query = + String.format( + Locale.ROOT, "SELECT firstname, lastname FROM %s", TEST_INDEX_BANK_RAW_SANITIZE); String requestBody = makeRequest(query); Request sqlRequest = new Request("POST", "/_plugins/_sql?format=raw"); diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/RelevanceFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/RelevanceFunctionIT.java index 26fe735f12..755493c167 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/RelevanceFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/RelevanceFunctionIT.java @@ -24,11 +24,15 @@ public void init() throws IOException { */ @Test public void verify_flags_in_simple_query_string() throws IOException { - String query1 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE simple_query_string(['Body'], '-free', flags='NONE|PREFIX|ESCAPE')"; + String query1 = + "SELECT Id FROM " + + TEST_INDEX_BEER + + " WHERE simple_query_string(['Body'], '-free', flags='NONE|PREFIX|ESCAPE')"; var result1 = new JSONObject(executeQuery(query1, "jdbc")); - String query2 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE simple_query_string([Body], '-free', flags='NOT|AND|OR')"; + String query2 = + "SELECT Id FROM " + + TEST_INDEX_BEER + + " WHERE simple_query_string([Body], '-free', flags='NOT|AND|OR')"; var result2 = new JSONObject(executeQuery(query2, "jdbc")); assertNotEquals(result2.getInt("total"), result1.getInt("total")); @@ -44,11 +48,11 @@ public void verify_flags_in_simple_query_string() throws IOException { */ @Test public void verify_escape_in_query_string() throws IOException { - String query1 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE query_string([Title], '?', escape=true);"; + String query1 = + "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE query_string([Title], '?', escape=true);"; var result1 = new JSONObject(executeQuery(query1, "jdbc")); - String query2 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE query_string([Title], '?', escape=false);"; + String query2 = + "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE query_string([Title], '?', escape=false);"; var result2 = new JSONObject(executeQuery(query2, "jdbc")); assertEquals(0, result1.getInt("total")); assertEquals(8, result2.getInt("total")); @@ -61,11 +65,15 @@ public void verify_escape_in_query_string() throws IOException { */ @Test public void verify_default_operator_in_query_string() throws IOException { - String query1 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE query_string([Title], 'beer taste', default_operator='OR')"; + String query1 = + "SELECT Id FROM " + + TEST_INDEX_BEER + + " WHERE query_string([Title], 'beer taste', default_operator='OR')"; var result1 = new JSONObject(executeQuery(query1, "jdbc")); - String query2 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE query_string([Title], 'beer taste', default_operator='AND')"; + String query2 = + "SELECT Id FROM " + + TEST_INDEX_BEER + + " WHERE query_string([Title], 'beer taste', default_operator='AND')"; var result2 = new JSONObject(executeQuery(query2, "jdbc")); assertEquals(16, result1.getInt("total")); assertEquals(4, result2.getInt("total")); @@ -73,11 +81,15 @@ public void verify_default_operator_in_query_string() throws IOException { @Test public void verify_default_operator_in_simple_query_string() throws IOException { - String query1 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE simple_query_string([Title], 'beer taste', default_operator='OR')"; + String query1 = + "SELECT Id FROM " + + TEST_INDEX_BEER + + " WHERE simple_query_string([Title], 'beer taste', default_operator='OR')"; var result1 = new JSONObject(executeQuery(query1, "jdbc")); - String query2 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE simple_query_string([Title], 'beer taste', default_operator='AND')"; + String query2 = + "SELECT Id FROM " + + TEST_INDEX_BEER + + " WHERE simple_query_string([Title], 'beer taste', default_operator='AND')"; var result2 = new JSONObject(executeQuery(query2, "jdbc")); assertEquals(16, result1.getInt("total")); assertEquals(4, result2.getInt("total")); @@ -85,11 +97,15 @@ public void verify_default_operator_in_simple_query_string() throws IOException @Test public void verify_default_operator_in_multi_match() throws IOException { - String query1 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE multi_match([Title], 'beer taste', operator='OR')"; + String query1 = + "SELECT Id FROM " + + TEST_INDEX_BEER + + " WHERE multi_match([Title], 'beer taste', operator='OR')"; var result1 = new JSONObject(executeQuery(query1, "jdbc")); - String query2 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE multi_match([Title], 'beer taste', operator='AND')"; + String query2 = + "SELECT Id FROM " + + TEST_INDEX_BEER + + " WHERE multi_match([Title], 'beer taste', operator='AND')"; var result2 = new JSONObject(executeQuery(query2, "jdbc")); assertEquals(16, result1.getInt("total")); assertEquals(4, result2.getInt("total")); @@ -97,11 +113,11 @@ public void verify_default_operator_in_multi_match() throws IOException { @Test public void verify_operator_in_match() throws IOException { - String query1 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE match(Title, 'beer taste', operator='OR')"; + String query1 = + "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE match(Title, 'beer taste', operator='OR')"; var result1 = new JSONObject(executeQuery(query1, "jdbc")); - String query2 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE match(Title, 'beer taste', operator='AND')"; + String query2 = + "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE match(Title, 'beer taste', operator='AND')"; var result2 = new JSONObject(executeQuery(query2, "jdbc")); assertEquals(16, result1.getInt("total")); assertEquals(4, result2.getInt("total")); diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/SQLCorrectnessIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/SQLCorrectnessIT.java index 30f23547ec..6056a1c416 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/SQLCorrectnessIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/SQLCorrectnessIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import com.google.common.io.Resources; @@ -14,14 +13,12 @@ import java.util.function.Function; import org.junit.Test; -/** - * SQL integration test automated by comparison test framework. - */ +/** SQL integration test automated by comparison test framework. */ public class SQLCorrectnessIT extends CorrectnessTestBase { private static final String ROOT_DIR = "correctness/"; - private static final String[] EXPR_TEST_DIR = { "expressions" }; - private static final String[] QUERY_TEST_DIR = { "queries", "bugfixes" }; + private static final String[] EXPR_TEST_DIR = {"expressions"}; + private static final String[] QUERY_TEST_DIR = {"queries", "bugfixes"}; @Override protected void init() throws Exception { @@ -35,32 +32,30 @@ public void runAllTests() throws Exception { } /** - * Verify queries in files in directories with a converter to preprocess query. - * For example, for expressions it is converted to a SELECT clause before testing. + * Verify queries in files in directories with a converter to preprocess query. For example, for + * expressions it is converted to a SELECT clause before testing. */ @SuppressWarnings("UnstableApiUsage") private void verifyQueries(String[] dirs, Function converter) throws Exception { for (String dir : dirs) { Path dirPath = Paths.get(Resources.getResource(ROOT_DIR + dir).toURI()); Files.walk(dirPath) - .filter(Files::isRegularFile) - .forEach(file -> verifyQueries(file, converter)); + .filter(Files::isRegularFile) + .forEach(file -> verifyQueries(file, converter)); } } - /** - * Comment start with # - */ + /** Comment start with # */ private void verifyQueries(Path file, Function converter) { try { - String[] queries = Files.lines(file) - .filter(line -> !line.startsWith("#")) - .map(converter) - .toArray(String[]::new); + String[] queries = + Files.lines(file) + .filter(line -> !line.startsWith("#")) + .map(converter) + .toArray(String[]::new); verify(queries); } catch (IOException e) { throw new IllegalStateException("Failed to read file: " + file, e); } } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/ScoreQueryIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/ScoreQueryIT.java index e824b1ab2b..fdd35c47eb 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/ScoreQueryIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/ScoreQueryIT.java @@ -26,6 +26,7 @@ protected void init() throws Exception { } /** + *
    * "query" : {
    *   "from": 0,
    *   "size": 3,
@@ -83,18 +84,24 @@ protected void init() throws Exception {
    *   ],
    *   "track_scores": true
    * }
+   * 
* @throws IOException */ @Test public void scoreQueryExplainTest() throws IOException { - final String result = explainQuery(String.format(Locale.ROOT, - "select address from %s " + - "where score(matchQuery(address, 'Douglass'), 100) " + - "or score(matchQuery(address, 'Hall'), 0.5) order by _score desc limit 2", - TestsConstants.TEST_INDEX_ACCOUNT)); - Assert.assertThat(result, containsString("\\\"match\\\":{\\\"address\\\":{\\\"query\\\":\\\"Douglass\\\"")); + final String result = + explainQuery( + String.format( + Locale.ROOT, + "select address from %s " + + "where score(matchQuery(address, 'Douglass'), 100) " + + "or score(matchQuery(address, 'Hall'), 0.5) order by _score desc limit 2", + TestsConstants.TEST_INDEX_ACCOUNT)); + Assert.assertThat( + result, containsString("\\\"match\\\":{\\\"address\\\":{\\\"query\\\":\\\"Douglass\\\"")); Assert.assertThat(result, containsString("\\\"boost\\\":100.0")); - Assert.assertThat(result, containsString("\\\"match\\\":{\\\"address\\\":{\\\"query\\\":\\\"Hall\\\"")); + Assert.assertThat( + result, containsString("\\\"match\\\":{\\\"address\\\":{\\\"query\\\":\\\"Hall\\\"")); Assert.assertThat(result, containsString("\\\"boost\\\":0.5")); Assert.assertThat(result, containsString("\\\"sort\\\":[{\\\"_score\\\"")); Assert.assertThat(result, containsString("\\\"track_scores\\\":true")); @@ -102,26 +109,32 @@ public void scoreQueryExplainTest() throws IOException { @Test public void scoreQueryTest() throws IOException { - final JSONObject result = new JSONObject(executeQuery(String.format(Locale.ROOT, - "select address, _score from %s " + - "where score(matchQuery(address, 'Douglass'), 100) " + - "or score(matchQuery(address, 'Hall'), 0.5) order by _score desc limit 2", - TestsConstants.TEST_INDEX_ACCOUNT), "jdbc")); - verifySchema(result, - schema("address", null, "text"), - schema("_score", null, "float")); - verifyDataRows(result, - rows("154 Douglass Street", 650.1515), - rows("565 Hall Street", 3.2507575)); + final JSONObject result = + new JSONObject( + executeQuery( + String.format( + Locale.ROOT, + "select address, _score from %s " + + "where score(matchQuery(address, 'Douglass'), 100) " + + "or score(matchQuery(address, 'Hall'), 0.5) order by _score desc limit 2", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc")); + verifySchema(result, schema("address", null, "text"), schema("_score", null, "float")); + verifyDataRows( + result, rows("154 Douglass Street", 650.1515), rows("565 Hall Street", 3.2507575)); } @Test public void scoreQueryDefaultBoostExplainTest() throws IOException { - final String result = explainQuery(String.format(Locale.ROOT, - "select address from %s " + - "where score(matchQuery(address, 'Lane')) order by _score desc limit 2", - TestsConstants.TEST_INDEX_ACCOUNT)); - Assert.assertThat(result, containsString("\\\"match\\\":{\\\"address\\\":{\\\"query\\\":\\\"Lane\\\"")); + final String result = + explainQuery( + String.format( + Locale.ROOT, + "select address from %s " + + "where score(matchQuery(address, 'Lane')) order by _score desc limit 2", + TestsConstants.TEST_INDEX_ACCOUNT)); + Assert.assertThat( + result, containsString("\\\"match\\\":{\\\"address\\\":{\\\"query\\\":\\\"Lane\\\"")); Assert.assertThat(result, containsString("\\\"boost\\\":1.0")); Assert.assertThat(result, containsString("\\\"sort\\\":[{\\\"_score\\\"")); Assert.assertThat(result, containsString("\\\"track_scores\\\":true")); @@ -129,13 +142,16 @@ public void scoreQueryDefaultBoostExplainTest() throws IOException { @Test public void scoreQueryDefaultBoostQueryTest() throws IOException { - final JSONObject result = new JSONObject(executeQuery(String.format(Locale.ROOT, - "select address, _score from %s " + - "where score(matchQuery(address, 'Powell')) order by _score desc limit 2", - TestsConstants.TEST_INDEX_ACCOUNT), "jdbc")); - verifySchema(result, - schema("address", null, "text"), - schema("_score", null, "float")); + final JSONObject result = + new JSONObject( + executeQuery( + String.format( + Locale.ROOT, + "select address, _score from %s " + + "where score(matchQuery(address, 'Powell')) order by _score desc limit 2", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc")); + verifySchema(result, schema("address", null, "text"), schema("_score", null, "float")); verifyDataRows(result, rows("305 Powell Street", 6.501515)); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/SimpleQueryStringIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/SimpleQueryStringIT.java index 44f4e5ca9c..8742dedbc7 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/SimpleQueryStringIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/SimpleQueryStringIT.java @@ -31,43 +31,60 @@ public void init() throws IOException { @Test public void test_mandatory_params() throws IOException { - String query = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE simple_query_string([\\\"Tags\\\" ^ 1.5, Title, 'Body' 4.2], 'taste')"; + String query = + "SELECT Id FROM " + + TEST_INDEX_BEER + + " WHERE simple_query_string([\\\"Tags\\\" ^ 1.5, Title, 'Body' 4.2], 'taste')"; var result = new JSONObject(executeQuery(query, "jdbc")); assertEquals(16, result.getInt("total")); } @Test public void test_all_params() throws IOException { - String query = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE simple_query_string(['Body', Tags, Title], 'taste beer', default_operator='or'," - + "analyzer=english, analyze_wildcard = false, quote_field_suffix = '.exact'," - + "auto_generate_synonyms_phrase_query=true, boost = 0.77, flags='PREFIX'," - + "fuzzy_transpositions = false, lenient = true, fuzzy_max_expansions = 25," - + "minimum_should_match = '2<-25% 9<-3', fuzzy_prefix_length = 7);"; + String query = + "SELECT Id FROM " + + TEST_INDEX_BEER + + " WHERE simple_query_string(['Body', Tags, Title], " + + "'taste beer'," + + " default_operator='or'," + + " analyzer=english, " + + " analyze_wildcard = false," + + " quote_field_suffix = '.exact'," + + " auto_generate_synonyms_phrase_query=true," + + " boost = 0.77," + + " flags='PREFIX'," + + " fuzzy_transpositions = false," + + " lenient = true," + + " fuzzy_max_expansions = 25," + + " minimum_should_match = '2<-25% 9<-3'," + + " fuzzy_prefix_length = 7);"; var result = new JSONObject(executeQuery(query, "jdbc")); assertEquals(49, result.getInt("total")); } @Test public void verify_wildcard_test() throws IOException { - String query1 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE simple_query_string(['Tags'], 'taste')"; + String query1 = + "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE simple_query_string(['Tags'], 'taste')"; var result1 = new JSONObject(executeQuery(query1, "jdbc")); - String query2 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE simple_query_string(['T*'], 'taste')"; + String query2 = + "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE simple_query_string(['T*'], 'taste')"; var result2 = new JSONObject(executeQuery(query2, "jdbc")); assertNotEquals(result2.getInt("total"), result1.getInt("total")); - String query = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE simple_query_string(['*Date'], '2014-01-22');"; + String query = + "SELECT Id FROM " + + TEST_INDEX_BEER + + " WHERE simple_query_string(['*Date'], '2014-01-22');"; var result = new JSONObject(executeQuery(query, "jdbc")); assertEquals(10, result.getInt("total")); } @Test public void contentHeaderTest() throws IOException { - String query = "SELECT Id FROM " + TEST_INDEX_BEER + String query = + "SELECT Id FROM " + + TEST_INDEX_BEER + " WHERE simple_query_string([\\\"Tags\\\" ^ 1.5, Title, 'Body' 4.2], 'taste')"; String requestBody = makeRequest(query); From 0dfbc892c3f6f34914661122dcaf04a6a703949f Mon Sep 17 00:00:00 2001 From: Mitchell Gale Date: Wed, 16 Aug 2023 15:35:35 -0700 Subject: [PATCH 25/42] [Spotless] Applying Google Code Format for prometheus files #13 (#340) (#1973) * added ignorefailures * Spotless apply on Prometheus --------- Signed-off-by: Mitchell Gale Signed-off-by: Mitchell Gale --- build.gradle | 1 + prometheus/build.gradle | 3 + .../client/PrometheusClientImpl.java | 66 +- .../constants/PrometheusFieldConstants.java | 4 +- .../QueryExemplarFunctionImplementation.java | 73 +- .../QueryRangeFunctionImplementation.java | 79 +- .../QueryExemplarsTableFunctionResolver.java | 22 +- .../QueryRangeTableFunctionResolver.java | 14 +- .../PrometheusFunctionResponseHandle.java | 12 +- .../QueryExemplarsFunctionResponseHandle.java | 22 +- .../QueryRangeFunctionResponseHandle.java | 27 +- ...ueryExemplarsFunctionTableScanBuilder.java | 9 +- ...eryExemplarsFunctionTableScanOperator.java | 42 +- .../QueryRangeFunctionTableScanBuilder.java | 5 +- .../QueryRangeFunctionTableScanOperator.java | 40 +- .../logical/PrometheusLogicalMetricAgg.java | 41 +- .../logical/PrometheusLogicalMetricScan.java | 12 +- ...PrometheusLogicalPlanOptimizerFactory.java | 17 +- .../logical/rules/MergeAggAndIndexScan.java | 21 +- .../logical/rules/MergeAggAndRelation.java | 20 +- .../logical/rules/MergeFilterAndRelation.java | 20 +- .../PrometheusQueryExemplarsRequest.java | 18 +- .../request/PrometheusQueryRequest.java | 22 +- .../PrometheusDescribeMetricRequest.java | 71 +- .../system/PrometheusListMetricsRequest.java | 45 +- .../system/PrometheusSystemRequest.java | 5 +- .../response/PrometheusResponse.java | 32 +- .../PrometheusMetricDefaultSchema.java | 11 +- .../storage/PrometheusMetricScan.java | 43 +- .../storage/PrometheusMetricTable.java | 49 +- .../storage/PrometheusStorageEngine.java | 15 +- .../storage/PrometheusStorageFactory.java | 73 +- .../storage/QueryExemplarsTable.java | 15 +- .../PrometheusDefaultImplementor.java | 76 +- .../model/PrometheusResponseFieldNames.java | 2 - .../storage/model/QueryRangeParameters.java | 1 - .../querybuilder/AggregationQueryBuilder.java | 45 +- .../SeriesSelectionQueryBuilder.java | 22 +- .../querybuilder/StepParameterResolver.java | 18 +- .../TimeRangeParametersResolver.java | 15 +- .../storage/system/PrometheusSystemTable.java | 22 +- .../system/PrometheusSystemTableScan.java | 7 +- .../system/PrometheusSystemTableSchema.java | 33 +- .../prometheus/utils/TableFunctionUtils.java | 52 +- .../client/PrometheusClientImplTest.java | 87 +- ...ryExemplarsFunctionImplementationTest.java | 68 +- .../QueryRangeFunctionImplementationTest.java | 80 +- ...eryExemplarsTableFunctionResolverTest.java | 37 +- .../QueryRangeTableFunctionResolverTest.java | 190 ++-- ...ExemplarsFunctionTableScanBuilderTest.java | 32 +- ...xemplarsFunctionTableScanOperatorTest.java | 103 +- ...ueryRangeFunctionTableScanBuilderTest.java | 23 +- ...eryRangeFunctionTableScanOperatorTest.java | 116 ++- .../logical/PrometheusLogicOptimizerTest.java | 63 +- .../PrometheusDescribeMetricRequestTest.java | 103 +- .../PrometheusListMetricsRequestTest.java | 44 +- .../storage/PrometheusMetricScanTest.java | 183 ++-- .../storage/PrometheusMetricTableTest.java | 889 +++++++++++------- .../storage/PrometheusStorageEngineTest.java | 26 +- .../storage/PrometheusStorageFactoryTest.java | 86 +- .../storage/QueryExemplarsTableTest.java | 13 +- .../StepParameterResolverTest.java | 6 +- .../TimeRangeParametersResolverTest.java | 8 +- .../system/PrometheusSystemTableScanTest.java | 3 +- .../system/PrometheusSystemTableTest.java | 46 +- .../prometheus/utils/LogicalPlanUtils.java | 42 +- .../sql/prometheus/utils/TestUtils.java | 2 +- 67 files changed, 1797 insertions(+), 1695 deletions(-) diff --git a/build.gradle b/build.gradle index a256fd1d22..290a1e1786 100644 --- a/build.gradle +++ b/build.gradle @@ -86,6 +86,7 @@ spotless { target fileTree('.') { include 'datasources/**/*.java', 'core/**/*.java', + 'prometheus/**/*.java', 'sql/**/*.java', 'common/**/*.java', 'ppl/**/*.java' diff --git a/prometheus/build.gradle b/prometheus/build.gradle index e98dfd83e4..0d915a6d4a 100644 --- a/prometheus/build.gradle +++ b/prometheus/build.gradle @@ -13,6 +13,9 @@ repositories { mavenCentral() } +checkstyleTest.ignoreFailures = true +checkstyleMain.ignoreFailures = true + dependencies { api project(':core') implementation project(':datasources') diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/client/PrometheusClientImpl.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/client/PrometheusClientImpl.java index 9472be7487..2bfaaccd47 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/client/PrometheusClientImpl.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/client/PrometheusClientImpl.java @@ -38,16 +38,18 @@ public PrometheusClientImpl(OkHttpClient okHttpClient, URI uri) { this.uri = uri; } - @Override public JSONObject queryRange(String query, Long start, Long end, String step) throws IOException { - String queryUrl = String.format("%s/api/v1/query_range?query=%s&start=%s&end=%s&step=%s", - uri.toString().replaceAll("/$", ""), URLEncoder.encode(query, StandardCharsets.UTF_8), - start, end, step); + String queryUrl = + String.format( + "%s/api/v1/query_range?query=%s&start=%s&end=%s&step=%s", + uri.toString().replaceAll("/$", ""), + URLEncoder.encode(query, StandardCharsets.UTF_8), + start, + end, + step); logger.debug("queryUrl: " + queryUrl); - Request request = new Request.Builder() - .url(queryUrl) - .build(); + Request request = new Request.Builder().url(queryUrl).build(); Response response = this.okHttpClient.newCall(request).execute(); JSONObject jsonObject = readResponse(response); return jsonObject.getJSONObject("data"); @@ -55,14 +57,14 @@ public JSONObject queryRange(String query, Long start, Long end, String step) th @Override public List getLabels(String metricName) throws IOException { - String queryUrl = String.format("%s/api/v1/labels?%s=%s", - uri.toString().replaceAll("/$", ""), - URLEncoder.encode("match[]", StandardCharsets.UTF_8), - URLEncoder.encode(metricName, StandardCharsets.UTF_8)); + String queryUrl = + String.format( + "%s/api/v1/labels?%s=%s", + uri.toString().replaceAll("/$", ""), + URLEncoder.encode("match[]", StandardCharsets.UTF_8), + URLEncoder.encode(metricName, StandardCharsets.UTF_8)); logger.debug("queryUrl: " + queryUrl); - Request request = new Request.Builder() - .url(queryUrl) - .build(); + Request request = new Request.Builder().url(queryUrl).build(); Response response = this.okHttpClient.newCall(request).execute(); JSONObject jsonObject = readResponse(response); return toListOfLabels(jsonObject.getJSONArray("data")); @@ -70,28 +72,26 @@ public List getLabels(String metricName) throws IOException { @Override public Map> getAllMetrics() throws IOException { - String queryUrl = String.format("%s/api/v1/metadata", - uri.toString().replaceAll("/$", "")); + String queryUrl = String.format("%s/api/v1/metadata", uri.toString().replaceAll("/$", "")); logger.debug("queryUrl: " + queryUrl); - Request request = new Request.Builder() - .url(queryUrl) - .build(); + Request request = new Request.Builder().url(queryUrl).build(); Response response = this.okHttpClient.newCall(request).execute(); JSONObject jsonObject = readResponse(response); - TypeReference>> typeRef - = new TypeReference<>() {}; + TypeReference>> typeRef = new TypeReference<>() {}; return new ObjectMapper().readValue(jsonObject.getJSONObject("data").toString(), typeRef); } @Override public JSONArray queryExemplars(String query, Long start, Long end) throws IOException { - String queryUrl = String.format("%s/api/v1/query_exemplars?query=%s&start=%s&end=%s", - uri.toString().replaceAll("/$", ""), URLEncoder.encode(query, StandardCharsets.UTF_8), - start, end); + String queryUrl = + String.format( + "%s/api/v1/query_exemplars?query=%s&start=%s&end=%s", + uri.toString().replaceAll("/$", ""), + URLEncoder.encode(query, StandardCharsets.UTF_8), + start, + end); logger.debug("queryUrl: " + queryUrl); - Request request = new Request.Builder() - .url(queryUrl) - .build(); + Request request = new Request.Builder().url(queryUrl).build(); Response response = this.okHttpClient.newCall(request).execute(); JSONObject jsonObject = readResponse(response); return jsonObject.getJSONArray("data"); @@ -100,8 +100,8 @@ public JSONArray queryExemplars(String query, Long start, Long end) throws IOExc private List toListOfLabels(JSONArray array) { List result = new ArrayList<>(); for (int i = 0; i < array.length(); i++) { - //__name__ is internal label in prometheus representing the metric name. - //Exempting this from labels list as it is not required in any of the operations. + // __name__ is internal label in prometheus representing the metric name. + // Exempting this from labels list as it is not required in any of the operations. if (!"__name__".equals(array.optString(i))) { result.add(array.optString(i)); } @@ -109,7 +109,6 @@ private List toListOfLabels(JSONArray array) { return result; } - private JSONObject readResponse(Response response) throws IOException { if (response.isSuccessful()) { JSONObject jsonObject = new JSONObject(Objects.requireNonNull(response.body()).string()); @@ -120,10 +119,9 @@ private JSONObject readResponse(Response response) throws IOException { } } else { throw new RuntimeException( - String.format("Request to Prometheus is Unsuccessful with : %s", Objects.requireNonNull( - response.body(), "Response body can't be null").string())); + String.format( + "Request to Prometheus is Unsuccessful with : %s", + Objects.requireNonNull(response.body(), "Response body can't be null").string())); } } - - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/data/constants/PrometheusFieldConstants.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/data/constants/PrometheusFieldConstants.java index 88e9df6a88..0f687b3cd1 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/data/constants/PrometheusFieldConstants.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/data/constants/PrometheusFieldConstants.java @@ -18,6 +18,6 @@ public class PrometheusFieldConstants { public static final String EXEMPLARS_KEY = "exemplars"; public static final String TRACE_ID_KEY = "traceID"; public static final String LABELS_KEY = "labels"; - public static final String TIMESTAMP_KEY = "timestamp"; - public static final String VALUE_KEY = "value"; + public static final String TIMESTAMP_KEY = "timestamp"; + public static final String VALUE_KEY = "value"; } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/implementation/QueryExemplarFunctionImplementation.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/implementation/QueryExemplarFunctionImplementation.java index 9d455b3cfc..bbd3a36f5f 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/implementation/QueryExemplarFunctionImplementation.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/implementation/QueryExemplarFunctionImplementation.java @@ -28,8 +28,8 @@ import org.opensearch.sql.prometheus.storage.QueryExemplarsTable; import org.opensearch.sql.storage.Table; -public class QueryExemplarFunctionImplementation extends FunctionExpression implements - TableFunctionImplementation { +public class QueryExemplarFunctionImplementation extends FunctionExpression + implements TableFunctionImplementation { private final FunctionName functionName; private final List arguments; @@ -39,10 +39,10 @@ public class QueryExemplarFunctionImplementation extends FunctionExpression impl * Required argument constructor. * * @param functionName name of the function - * @param arguments a list of arguments provided + * @param arguments a list of arguments provided */ - public QueryExemplarFunctionImplementation(FunctionName functionName, List arguments, - PrometheusClient prometheusClient) { + public QueryExemplarFunctionImplementation( + FunctionName functionName, List arguments, PrometheusClient prometheusClient) { super(functionName, arguments); this.functionName = functionName; this.arguments = arguments; @@ -51,10 +51,11 @@ public QueryExemplarFunctionImplementation(FunctionName functionName, List valueEnv) { - throw new UnsupportedOperationException(String.format( - "Prometheus defined function [%s] is only " - + "supported in SOURCE clause with prometheus connector catalog", - functionName)); + throw new UnsupportedOperationException( + String.format( + "Prometheus defined function [%s] is only " + + "supported in SOURCE clause with prometheus connector catalog", + functionName)); } @Override @@ -64,10 +65,15 @@ public ExprType type() { @Override public String toString() { - List args = arguments.stream() - .map(arg -> String.format("%s=%s", ((NamedArgumentExpression) arg) - .getArgName(), ((NamedArgumentExpression) arg).getValue().toString())) - .collect(Collectors.toList()); + List args = + arguments.stream() + .map( + arg -> + String.format( + "%s=%s", + ((NamedArgumentExpression) arg).getArgName(), + ((NamedArgumentExpression) arg).getValue().toString())) + .collect(Collectors.toList()); return String.format("%s(%s)", functionName, String.join(", ", args)); } @@ -79,27 +85,26 @@ public Table applyArguments() { private PrometheusQueryExemplarsRequest buildExemplarsQueryRequest(List arguments) { PrometheusQueryExemplarsRequest request = new PrometheusQueryExemplarsRequest(); - arguments.forEach(arg -> { - String argName = ((NamedArgumentExpression) arg).getArgName(); - Expression argValue = ((NamedArgumentExpression) arg).getValue(); - ExprValue literalValue = argValue.valueOf(); - switch (argName) { - case QUERY: - request - .setQuery((String) literalValue.value()); - break; - case STARTTIME: - request.setStartTime(((Number) literalValue.value()).longValue()); - break; - case ENDTIME: - request.setEndTime(((Number) literalValue.value()).longValue()); - break; - default: - throw new ExpressionEvaluationException( - String.format("Invalid Function Argument:%s", argName)); - } - }); + arguments.forEach( + arg -> { + String argName = ((NamedArgumentExpression) arg).getArgName(); + Expression argValue = ((NamedArgumentExpression) arg).getValue(); + ExprValue literalValue = argValue.valueOf(); + switch (argName) { + case QUERY: + request.setQuery((String) literalValue.value()); + break; + case STARTTIME: + request.setStartTime(((Number) literalValue.value()).longValue()); + break; + case ENDTIME: + request.setEndTime(((Number) literalValue.value()).longValue()); + break; + default: + throw new ExpressionEvaluationException( + String.format("Invalid Function Argument:%s", argName)); + } + }); return request; } - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/implementation/QueryRangeFunctionImplementation.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/implementation/QueryRangeFunctionImplementation.java index 2d3710037a..0719bd1525 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/implementation/QueryRangeFunctionImplementation.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/implementation/QueryRangeFunctionImplementation.java @@ -29,8 +29,8 @@ import org.opensearch.sql.prometheus.storage.PrometheusMetricTable; import org.opensearch.sql.storage.Table; -public class QueryRangeFunctionImplementation extends FunctionExpression implements - TableFunctionImplementation { +public class QueryRangeFunctionImplementation extends FunctionExpression + implements TableFunctionImplementation { private final FunctionName functionName; private final List arguments; @@ -40,10 +40,10 @@ public class QueryRangeFunctionImplementation extends FunctionExpression impleme * Required argument constructor. * * @param functionName name of the function - * @param arguments a list of expressions + * @param arguments a list of expressions */ - public QueryRangeFunctionImplementation(FunctionName functionName, List arguments, - PrometheusClient prometheusClient) { + public QueryRangeFunctionImplementation( + FunctionName functionName, List arguments, PrometheusClient prometheusClient) { super(functionName, arguments); this.functionName = functionName; this.arguments = arguments; @@ -52,10 +52,11 @@ public QueryRangeFunctionImplementation(FunctionName functionName, List valueEnv) { - throw new UnsupportedOperationException(String.format( - "Prometheus defined function [%s] is only " - + "supported in SOURCE clause with prometheus connector catalog", - functionName)); + throw new UnsupportedOperationException( + String.format( + "Prometheus defined function [%s] is only " + + "supported in SOURCE clause with prometheus connector catalog", + functionName)); } @Override @@ -65,10 +66,15 @@ public ExprType type() { @Override public String toString() { - List args = arguments.stream() - .map(arg -> String.format("%s=%s", ((NamedArgumentExpression) arg) - .getArgName(), ((NamedArgumentExpression) arg).getValue().toString())) - .collect(Collectors.toList()); + List args = + arguments.stream() + .map( + arg -> + String.format( + "%s=%s", + ((NamedArgumentExpression) arg).getArgName(), + ((NamedArgumentExpression) arg).getValue().toString())) + .collect(Collectors.toList()); return String.format("%s(%s)", functionName, String.join(", ", args)); } @@ -80,30 +86,29 @@ public Table applyArguments() { private PrometheusQueryRequest buildQueryFromQueryRangeFunction(List arguments) { PrometheusQueryRequest prometheusQueryRequest = new PrometheusQueryRequest(); - arguments.forEach(arg -> { - String argName = ((NamedArgumentExpression) arg).getArgName(); - Expression argValue = ((NamedArgumentExpression) arg).getValue(); - ExprValue literalValue = argValue.valueOf(); - switch (argName) { - case QUERY: - prometheusQueryRequest - .setPromQl((String) literalValue.value()); - break; - case STARTTIME: - prometheusQueryRequest.setStartTime(((Number) literalValue.value()).longValue()); - break; - case ENDTIME: - prometheusQueryRequest.setEndTime(((Number) literalValue.value()).longValue()); - break; - case STEP: - prometheusQueryRequest.setStep(literalValue.value().toString()); - break; - default: - throw new ExpressionEvaluationException( - String.format("Invalid Function Argument:%s", argName)); - } - }); + arguments.forEach( + arg -> { + String argName = ((NamedArgumentExpression) arg).getArgName(); + Expression argValue = ((NamedArgumentExpression) arg).getValue(); + ExprValue literalValue = argValue.valueOf(); + switch (argName) { + case QUERY: + prometheusQueryRequest.setPromQl((String) literalValue.value()); + break; + case STARTTIME: + prometheusQueryRequest.setStartTime(((Number) literalValue.value()).longValue()); + break; + case ENDTIME: + prometheusQueryRequest.setEndTime(((Number) literalValue.value()).longValue()); + break; + case STEP: + prometheusQueryRequest.setStep(literalValue.value().toString()); + break; + default: + throw new ExpressionEvaluationException( + String.format("Invalid Function Argument:%s", argName)); + } + }); return prometheusQueryRequest; } - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/resolver/QueryExemplarsTableFunctionResolver.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/resolver/QueryExemplarsTableFunctionResolver.java index a82e5a397a..78d87b0a0b 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/resolver/QueryExemplarsTableFunctionResolver.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/resolver/QueryExemplarsTableFunctionResolver.java @@ -22,9 +22,9 @@ import org.opensearch.sql.prometheus.functions.implementation.QueryExemplarFunctionImplementation; /** - * This class is for query_exemplars table function resolver {@link FunctionResolver}. - * It takes care of validating function arguments and also creating - * required {@link org.opensearch.sql.expression.function.TableFunctionImplementation} Class. + * This class is for query_exemplars table function resolver {@link FunctionResolver}. It takes care + * of validating function arguments and also creating required {@link + * org.opensearch.sql.expression.function.TableFunctionImplementation} Class. */ @RequiredArgsConstructor public class QueryExemplarsTableFunctionResolver implements FunctionResolver { @@ -41,13 +41,15 @@ public Pair resolve(FunctionSignature unreso final FunctionName functionName = FunctionName.of(QUERY_EXEMPLARS); FunctionSignature functionSignature = new FunctionSignature(FunctionName.of(QUERY_EXEMPLARS), List.of(STRING, LONG, LONG)); - FunctionBuilder functionBuilder = (functionProperties, arguments) -> { - final List argumentNames = List.of(QUERY, STARTTIME, ENDTIME); - validatePrometheusTableFunctionArguments(arguments, argumentNames); - List namedArguments = getNamedArgumentsOfTableFunction(arguments, argumentNames); - return new QueryExemplarFunctionImplementation(functionName, - namedArguments, prometheusClient); - }; + FunctionBuilder functionBuilder = + (functionProperties, arguments) -> { + final List argumentNames = List.of(QUERY, STARTTIME, ENDTIME); + validatePrometheusTableFunctionArguments(arguments, argumentNames); + List namedArguments = + getNamedArgumentsOfTableFunction(arguments, argumentNames); + return new QueryExemplarFunctionImplementation( + functionName, namedArguments, prometheusClient); + }; return Pair.of(functionSignature, functionBuilder); } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/resolver/QueryRangeTableFunctionResolver.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/resolver/QueryRangeTableFunctionResolver.java index 8bb2a2d758..8dfa12134e 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/resolver/QueryRangeTableFunctionResolver.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/resolver/QueryRangeTableFunctionResolver.java @@ -39,11 +39,14 @@ public Pair resolve(FunctionSignature unreso FunctionSignature functionSignature = new FunctionSignature(functionName, List.of(STRING, LONG, LONG, STRING)); final List argumentNames = List.of(QUERY, STARTTIME, ENDTIME, STEP); - FunctionBuilder functionBuilder = (functionProperties, arguments) -> { - validatePrometheusTableFunctionArguments(arguments, argumentNames); - List namedArguments = getNamedArgumentsOfTableFunction(arguments, argumentNames); - return new QueryRangeFunctionImplementation(functionName, namedArguments, prometheusClient); - }; + FunctionBuilder functionBuilder = + (functionProperties, arguments) -> { + validatePrometheusTableFunctionArguments(arguments, argumentNames); + List namedArguments = + getNamedArgumentsOfTableFunction(arguments, argumentNames); + return new QueryRangeFunctionImplementation( + functionName, namedArguments, prometheusClient); + }; return Pair.of(functionSignature, functionBuilder); } @@ -51,5 +54,4 @@ public Pair resolve(FunctionSignature unreso public FunctionName getFunctionName() { return FunctionName.of(QUERY_RANGE); } - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/response/PrometheusFunctionResponseHandle.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/response/PrometheusFunctionResponseHandle.java index f2cefa85ec..bbc0516df6 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/response/PrometheusFunctionResponseHandle.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/response/PrometheusFunctionResponseHandle.java @@ -8,14 +8,10 @@ import org.opensearch.sql.data.model.ExprValue; import org.opensearch.sql.executor.ExecutionEngine; -/** - * Handle Prometheus response. - */ +/** Handle Prometheus response. */ public interface PrometheusFunctionResponseHandle { - /** - * Return true if Prometheus response has more result. - */ + /** Return true if Prometheus response has more result. */ boolean hasNext(); /** @@ -24,8 +20,6 @@ public interface PrometheusFunctionResponseHandle { */ ExprValue next(); - /** - * Return ExecutionEngine.Schema of the Prometheus response. - */ + /** Return ExecutionEngine.Schema of the Prometheus response. */ ExecutionEngine.Schema schema(); } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/response/QueryExemplarsFunctionResponseHandle.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/response/QueryExemplarsFunctionResponseHandle.java index f030ce8f7a..8d1c267a90 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/response/QueryExemplarsFunctionResponseHandle.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/response/QueryExemplarsFunctionResponseHandle.java @@ -44,8 +44,8 @@ public QueryExemplarsFunctionResponseHandle(JSONArray responseArray) { private void constructIteratorAndSchema(JSONArray responseArray) { List columnList = new ArrayList<>(); columnList.add(new ExecutionEngine.Schema.Column(SERIES_LABELS_KEY, SERIES_LABELS_KEY, STRUCT)); - columnList.add(new ExecutionEngine.Schema.Column(EXEMPLARS_KEY, EXEMPLARS_KEY, - ExprCoreType.ARRAY)); + columnList.add( + new ExecutionEngine.Schema.Column(EXEMPLARS_KEY, EXEMPLARS_KEY, ExprCoreType.ARRAY)); this.schema = new ExecutionEngine.Schema(columnList); List result = new ArrayList<>(); for (int i = 0; i < responseArray.length(); i++) { @@ -62,7 +62,8 @@ private void constructIteratorAndSchema(JSONArray responseArray) { private ExprValue constructSeriesLabels(JSONObject seriesLabels) { LinkedHashMap seriesLabelsMap = new LinkedHashMap<>(); - seriesLabels.keySet() + seriesLabels + .keySet() .forEach(key -> seriesLabelsMap.put(key, new ExprStringValue(seriesLabels.getString(key)))); return new ExprTupleValue(seriesLabelsMap); } @@ -78,13 +79,13 @@ private ExprValue constructExemplarList(JSONArray exemplars) { private ExprValue constructExemplar(JSONObject exemplarsJSONObject) { LinkedHashMap exemplarHashMap = new LinkedHashMap<>(); - exemplarHashMap.put(LABELS_KEY, - constructLabelsInExemplar(exemplarsJSONObject.getJSONObject(LABELS_KEY))); - exemplarHashMap.put(TIMESTAMP_KEY, - new ExprTimestampValue(Instant.ofEpochMilli((long)( - exemplarsJSONObject.getDouble(TIMESTAMP_KEY) * 1000)))); - exemplarHashMap.put(VALUE_KEY, - new ExprDoubleValue(exemplarsJSONObject.getDouble(VALUE_KEY))); + exemplarHashMap.put( + LABELS_KEY, constructLabelsInExemplar(exemplarsJSONObject.getJSONObject(LABELS_KEY))); + exemplarHashMap.put( + TIMESTAMP_KEY, + new ExprTimestampValue( + Instant.ofEpochMilli((long) (exemplarsJSONObject.getDouble(TIMESTAMP_KEY) * 1000)))); + exemplarHashMap.put(VALUE_KEY, new ExprDoubleValue(exemplarsJSONObject.getDouble(VALUE_KEY))); return new ExprTupleValue(exemplarHashMap); } @@ -106,7 +107,6 @@ public ExprValue next() { return responseIterator.next(); } - @Override public ExecutionEngine.Schema schema() { return schema; diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/response/QueryRangeFunctionResponseHandle.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/response/QueryRangeFunctionResponseHandle.java index a3c68617e8..e10c9d7aff 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/response/QueryRangeFunctionResponseHandle.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/response/QueryRangeFunctionResponseHandle.java @@ -30,9 +30,7 @@ import org.opensearch.sql.data.type.ExprCoreType; import org.opensearch.sql.executor.ExecutionEngine; -/** - * Default implementation of QueryRangeFunctionResponseHandle. - */ +/** Default implementation of QueryRangeFunctionResponseHandle. */ public class QueryRangeFunctionResponseHandle implements PrometheusFunctionResponseHandle { private final JSONObject responseObject; @@ -62,25 +60,26 @@ private void constructIterator() { result.add(new ExprTupleValue(linkedHashMap)); } } else { - throw new RuntimeException(String.format("Unexpected Result Type: %s during Prometheus " - + "Response Parsing. 'matrix' resultType is expected", - responseObject.getString("resultType"))); + throw new RuntimeException( + String.format( + "Unexpected Result Type: %s during Prometheus " + + "Response Parsing. 'matrix' resultType is expected", + responseObject.getString("resultType"))); } this.responseIterator = result.iterator(); } - private static void extractTimestampAndValues(JSONArray values, - LinkedHashMap linkedHashMap) { + private static void extractTimestampAndValues( + JSONArray values, LinkedHashMap linkedHashMap) { List timestampList = new ArrayList<>(); List valueList = new ArrayList<>(); for (int j = 0; j < values.length(); j++) { JSONArray value = values.getJSONArray(j); - timestampList.add(new ExprTimestampValue( - Instant.ofEpochMilli((long) (value.getDouble(0) * 1000)))); + timestampList.add( + new ExprTimestampValue(Instant.ofEpochMilli((long) (value.getDouble(0) * 1000)))); valueList.add(new ExprDoubleValue(value.getDouble(1))); } - linkedHashMap.put(TIMESTAMP, - new ExprCollectionValue(timestampList)); + linkedHashMap.put(TIMESTAMP, new ExprCollectionValue(timestampList)); linkedHashMap.put(VALUE, new ExprCollectionValue(valueList)); } @@ -90,12 +89,10 @@ private void constructSchema() { private ExprValue extractLabels(JSONObject metric) { LinkedHashMap labelsMap = new LinkedHashMap<>(); - metric.keySet().forEach(key - -> labelsMap.put(key, new ExprStringValue(metric.getString(key)))); + metric.keySet().forEach(key -> labelsMap.put(key, new ExprStringValue(metric.getString(key)))); return new ExprTupleValue(labelsMap); } - private List getColumnList() { List columnList = new ArrayList<>(); columnList.add(new ExecutionEngine.Schema.Column(LABELS, LABELS, ExprCoreType.STRUCT)); diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanBuilder.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanBuilder.java index 8364173889..7e779eb77c 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanBuilder.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanBuilder.java @@ -12,9 +12,7 @@ import org.opensearch.sql.storage.TableScanOperator; import org.opensearch.sql.storage.read.TableScanBuilder; -/** - * TableScanBuilder for query_exemplars table function of prometheus connector. - */ +/** TableScanBuilder for query_exemplars table function of prometheus connector. */ @AllArgsConstructor public class QueryExemplarsFunctionTableScanBuilder extends TableScanBuilder { @@ -24,8 +22,8 @@ public class QueryExemplarsFunctionTableScanBuilder extends TableScanBuilder { @Override public TableScanOperator build() { - return new QueryExemplarsFunctionTableScanOperator(prometheusClient, - prometheusQueryExemplarsRequest); + return new QueryExemplarsFunctionTableScanOperator( + prometheusClient, prometheusQueryExemplarsRequest); } // Since we are determining the schema after table scan, @@ -34,5 +32,4 @@ public TableScanOperator build() { public boolean pushDownProject(LogicalProject project) { return true; } - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanOperator.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanOperator.java index 85ba6c854a..1a58429328 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanOperator.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanOperator.java @@ -22,37 +22,37 @@ import org.opensearch.sql.storage.TableScanOperator; /** - * This class is for QueryExemplars function {@link TableScanOperator}. - * This takes care of getting exemplar data from prometheus by making - * {@link PrometheusQueryExemplarsRequest}. + * This class is for QueryExemplars function {@link TableScanOperator}. This takes care of getting + * exemplar data from prometheus by making {@link PrometheusQueryExemplarsRequest}. */ @RequiredArgsConstructor public class QueryExemplarsFunctionTableScanOperator extends TableScanOperator { private final PrometheusClient prometheusClient; - @Getter - private final PrometheusQueryExemplarsRequest request; + @Getter private final PrometheusQueryExemplarsRequest request; private QueryExemplarsFunctionResponseHandle queryExemplarsFunctionResponseHandle; private static final Logger LOG = LogManager.getLogger(); @Override public void open() { super.open(); - this.queryExemplarsFunctionResponseHandle - = AccessController - .doPrivileged((PrivilegedAction) () -> { - try { - JSONArray responseArray = prometheusClient.queryExemplars( - request.getQuery(), - request.getStartTime(), request.getEndTime()); - return new QueryExemplarsFunctionResponseHandle(responseArray); - } catch (IOException e) { - LOG.error(e.getMessage()); - throw new RuntimeException( - String.format("Error fetching data from prometheus server: %s", e.getMessage())); - } - }); + this.queryExemplarsFunctionResponseHandle = + AccessController.doPrivileged( + (PrivilegedAction) + () -> { + try { + JSONArray responseArray = + prometheusClient.queryExemplars( + request.getQuery(), request.getStartTime(), request.getEndTime()); + return new QueryExemplarsFunctionResponseHandle(responseArray); + } catch (IOException e) { + LOG.error(e.getMessage()); + throw new RuntimeException( + String.format( + "Error fetching data from prometheus server: %s", e.getMessage())); + } + }); } @Override @@ -72,7 +72,9 @@ public ExprValue next() { @Override public String explain() { - return String.format(Locale.ROOT, "query_exemplars(%s, %s, %s)", + return String.format( + Locale.ROOT, + "query_exemplars(%s, %s, %s)", request.getQuery(), request.getStartTime(), request.getEndTime()); diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/scan/QueryRangeFunctionTableScanBuilder.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/scan/QueryRangeFunctionTableScanBuilder.java index 00e2191d09..2d22c0af69 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/scan/QueryRangeFunctionTableScanBuilder.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/scan/QueryRangeFunctionTableScanBuilder.java @@ -15,9 +15,8 @@ import org.opensearch.sql.storage.read.TableScanBuilder; /** - * TableScanBuilder for query_range table function of prometheus connector. - * we can merge this when we refactor for existing - * ppl queries based on prometheus connector. + * TableScanBuilder for query_range table function of prometheus connector. we can merge this when + * we refactor for existing ppl queries based on prometheus connector. */ @AllArgsConstructor public class QueryRangeFunctionTableScanBuilder extends TableScanBuilder { diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/scan/QueryRangeFunctionTableScanOperator.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/scan/QueryRangeFunctionTableScanOperator.java index 68b9b60643..fc3f9f9a9b 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/scan/QueryRangeFunctionTableScanOperator.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/scan/QueryRangeFunctionTableScanOperator.java @@ -23,9 +23,7 @@ import org.opensearch.sql.prometheus.request.PrometheusQueryRequest; import org.opensearch.sql.storage.TableScanOperator; -/** - * This a table scan operator to handle Query Range table function. - */ +/** This a table scan operator to handle Query Range table function. */ @RequiredArgsConstructor public class QueryRangeFunctionTableScanOperator extends TableScanOperator { @@ -39,19 +37,25 @@ public class QueryRangeFunctionTableScanOperator extends TableScanOperator { @Override public void open() { super.open(); - this.prometheusResponseHandle - = AccessController.doPrivileged((PrivilegedAction) () -> { - try { - JSONObject responseObject = prometheusClient.queryRange( - request.getPromQl(), - request.getStartTime(), request.getEndTime(), request.getStep()); - return new QueryRangeFunctionResponseHandle(responseObject); - } catch (IOException e) { - LOG.error(e.getMessage()); - throw new RuntimeException( - String.format("Error fetching data from prometheus server: %s", e.getMessage())); - } - }); + this.prometheusResponseHandle = + AccessController.doPrivileged( + (PrivilegedAction) + () -> { + try { + JSONObject responseObject = + prometheusClient.queryRange( + request.getPromQl(), + request.getStartTime(), + request.getEndTime(), + request.getStep()); + return new QueryRangeFunctionResponseHandle(responseObject); + } catch (IOException e) { + LOG.error(e.getMessage()); + throw new RuntimeException( + String.format( + "Error fetching data from prometheus server: %s", e.getMessage())); + } + }); } @Override @@ -71,7 +75,9 @@ public ExprValue next() { @Override public String explain() { - return String.format(Locale.ROOT, "query_range(%s, %s, %s, %s)", + return String.format( + Locale.ROOT, + "query_range(%s, %s, %s, %s)", request.getPromQl(), request.getStartTime(), request.getEndTime(), diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/PrometheusLogicalMetricAgg.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/PrometheusLogicalMetricAgg.java index f348c699a1..f7c45f6ad2 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/PrometheusLogicalMetricAgg.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/PrometheusLogicalMetricAgg.java @@ -20,10 +20,7 @@ import org.opensearch.sql.planner.logical.LogicalPlan; import org.opensearch.sql.planner.logical.LogicalPlanNodeVisitor; - -/** - * Logical Metric Scan along with aggregation Operation. - */ +/** Logical Metric Scan along with aggregation Operation. */ @Getter @ToString @EqualsAndHashCode(callSuper = false) @@ -31,37 +28,29 @@ public class PrometheusLogicalMetricAgg extends LogicalPlan { private final String metricName; - /** - * Filter Condition. - */ - @Setter - private Expression filter; + /** Filter Condition. */ + @Setter private Expression filter; - /** - * Aggregation List. - */ - @Setter - private List aggregatorList; + /** Aggregation List. */ + @Setter private List aggregatorList; - /** - * Group List. - */ - @Setter - private List groupByList; + /** Group List. */ + @Setter private List groupByList; /** * Constructor for LogicalMetricAgg Logical Plan. * - * @param metricName metricName - * @param filter filter + * @param metricName metricName + * @param filter filter * @param aggregatorList aggregatorList - * @param groupByList groupByList. + * @param groupByList groupByList. */ @Builder - public PrometheusLogicalMetricAgg(String metricName, - Expression filter, - List aggregatorList, - List groupByList) { + public PrometheusLogicalMetricAgg( + String metricName, + Expression filter, + List aggregatorList, + List groupByList) { super(ImmutableList.of()); this.metricName = metricName; this.filter = filter; diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/PrometheusLogicalMetricScan.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/PrometheusLogicalMetricScan.java index 5e07d6899f..7b28a8a6c9 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/PrometheusLogicalMetricScan.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/PrometheusLogicalMetricScan.java @@ -17,8 +17,8 @@ import org.opensearch.sql.planner.logical.LogicalPlanNodeVisitor; /** - * Prometheus Logical Metric Scan Operation. - * In an optimized plan this node represents both Relation and Filter Operation. + * Prometheus Logical Metric Scan Operation. In an optimized plan this node represents both Relation + * and Filter Operation. */ @Getter @ToString @@ -27,9 +27,7 @@ public class PrometheusLogicalMetricScan extends LogicalPlan { private final String metricName; - /** - * Filter Condition. - */ + /** Filter Condition. */ private final Expression filter; /** @@ -39,8 +37,7 @@ public class PrometheusLogicalMetricScan extends LogicalPlan { * @param filter filter. */ @Builder - public PrometheusLogicalMetricScan(String metricName, - Expression filter) { + public PrometheusLogicalMetricScan(String metricName, Expression filter) { super(ImmutableList.of()); this.metricName = metricName; this.filter = filter; @@ -50,5 +47,4 @@ public PrometheusLogicalMetricScan(String metricName, public R accept(LogicalPlanNodeVisitor visitor, C context) { return visitor.visitNode(this, context); } - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/PrometheusLogicalPlanOptimizerFactory.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/PrometheusLogicalPlanOptimizerFactory.java index 8a365b2786..ea14be0e0a 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/PrometheusLogicalPlanOptimizerFactory.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/PrometheusLogicalPlanOptimizerFactory.java @@ -5,7 +5,6 @@ package org.opensearch.sql.prometheus.planner.logical; - import java.util.Arrays; import lombok.experimental.UtilityClass; import org.opensearch.sql.planner.optimizer.LogicalPlanOptimizer; @@ -13,20 +12,14 @@ import org.opensearch.sql.prometheus.planner.logical.rules.MergeAggAndRelation; import org.opensearch.sql.prometheus.planner.logical.rules.MergeFilterAndRelation; -/** - * Prometheus storage engine specified logical plan optimizer. - */ +/** Prometheus storage engine specified logical plan optimizer. */ @UtilityClass public class PrometheusLogicalPlanOptimizerFactory { - /** - * Create Prometheus storage specified logical plan optimizer. - */ + /** Create Prometheus storage specified logical plan optimizer. */ public static LogicalPlanOptimizer create() { - return new LogicalPlanOptimizer(Arrays.asList( - new MergeFilterAndRelation(), - new MergeAggAndIndexScan(), - new MergeAggAndRelation() - )); + return new LogicalPlanOptimizer( + Arrays.asList( + new MergeFilterAndRelation(), new MergeAggAndIndexScan(), new MergeAggAndRelation())); } } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/rules/MergeAggAndIndexScan.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/rules/MergeAggAndIndexScan.java index 76bc6cc840..2594b74eb5 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/rules/MergeAggAndIndexScan.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/rules/MergeAggAndIndexScan.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.prometheus.planner.logical.rules; import static com.facebook.presto.matching.Pattern.typeOf; @@ -20,9 +19,7 @@ import org.opensearch.sql.prometheus.planner.logical.PrometheusLogicalMetricAgg; import org.opensearch.sql.prometheus.planner.logical.PrometheusLogicalMetricScan; -/** - * Merge Aggregation -- Relation to MetricScanAggregation. - */ +/** Merge Aggregation -- Relation to MetricScanAggregation. */ public class MergeAggAndIndexScan implements Rule { private final Capture capture; @@ -31,22 +28,18 @@ public class MergeAggAndIndexScan implements Rule { @Getter private final Pattern pattern; - /** - * Constructor of MergeAggAndIndexScan. - */ + /** Constructor of MergeAggAndIndexScan. */ public MergeAggAndIndexScan() { this.capture = Capture.newCapture(); - this.pattern = typeOf(LogicalAggregation.class) - .with(source().matching(typeOf(PrometheusLogicalMetricScan.class) - .capturedAs(capture))); + this.pattern = + typeOf(LogicalAggregation.class) + .with(source().matching(typeOf(PrometheusLogicalMetricScan.class).capturedAs(capture))); } @Override - public LogicalPlan apply(LogicalAggregation aggregation, - Captures captures) { + public LogicalPlan apply(LogicalAggregation aggregation, Captures captures) { PrometheusLogicalMetricScan indexScan = captures.get(capture); - return PrometheusLogicalMetricAgg - .builder() + return PrometheusLogicalMetricAgg.builder() .metricName(indexScan.getMetricName()) .filter(indexScan.getFilter()) .aggregatorList(aggregation.getAggregatorList()) diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/rules/MergeAggAndRelation.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/rules/MergeAggAndRelation.java index fa9b0c7206..e6170e41f9 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/rules/MergeAggAndRelation.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/rules/MergeAggAndRelation.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.prometheus.planner.logical.rules; import static com.facebook.presto.matching.Pattern.typeOf; @@ -20,9 +19,7 @@ import org.opensearch.sql.planner.optimizer.Rule; import org.opensearch.sql.prometheus.planner.logical.PrometheusLogicalMetricAgg; -/** - * Merge Aggregation -- Relation to IndexScanAggregation. - */ +/** Merge Aggregation -- Relation to IndexScanAggregation. */ public class MergeAggAndRelation implements Rule { private final Capture relationCapture; @@ -31,21 +28,18 @@ public class MergeAggAndRelation implements Rule { @Getter private final Pattern pattern; - /** - * Constructor of MergeAggAndRelation. - */ + /** Constructor of MergeAggAndRelation. */ public MergeAggAndRelation() { this.relationCapture = Capture.newCapture(); - this.pattern = typeOf(LogicalAggregation.class) - .with(source().matching(typeOf(LogicalRelation.class).capturedAs(relationCapture))); + this.pattern = + typeOf(LogicalAggregation.class) + .with(source().matching(typeOf(LogicalRelation.class).capturedAs(relationCapture))); } @Override - public LogicalPlan apply(LogicalAggregation aggregation, - Captures captures) { + public LogicalPlan apply(LogicalAggregation aggregation, Captures captures) { LogicalRelation relation = captures.get(relationCapture); - return PrometheusLogicalMetricAgg - .builder() + return PrometheusLogicalMetricAgg.builder() .metricName(relation.getRelationName()) .aggregatorList(aggregation.getAggregatorList()) .groupByList(aggregation.getGroupByList()) diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/rules/MergeFilterAndRelation.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/rules/MergeFilterAndRelation.java index a99eb695be..2013938d73 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/rules/MergeFilterAndRelation.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/rules/MergeFilterAndRelation.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.prometheus.planner.logical.rules; import static com.facebook.presto.matching.Pattern.typeOf; @@ -18,21 +17,18 @@ import org.opensearch.sql.planner.optimizer.Rule; import org.opensearch.sql.prometheus.planner.logical.PrometheusLogicalMetricScan; -/** - * Merge Filter -- Relation to LogicalMetricScan. - */ +/** Merge Filter -- Relation to LogicalMetricScan. */ public class MergeFilterAndRelation implements Rule { private final Capture relationCapture; private final Pattern pattern; - /** - * Constructor of MergeFilterAndRelation. - */ + /** Constructor of MergeFilterAndRelation. */ public MergeFilterAndRelation() { this.relationCapture = Capture.newCapture(); - this.pattern = typeOf(LogicalFilter.class) - .with(source().matching(typeOf(LogicalRelation.class).capturedAs(relationCapture))); + this.pattern = + typeOf(LogicalFilter.class) + .with(source().matching(typeOf(LogicalRelation.class).capturedAs(relationCapture))); } @Override @@ -41,11 +37,9 @@ public Pattern pattern() { } @Override - public LogicalPlan apply(LogicalFilter filter, - Captures captures) { + public LogicalPlan apply(LogicalFilter filter, Captures captures) { LogicalRelation relation = captures.get(relationCapture); - return PrometheusLogicalMetricScan - .builder() + return PrometheusLogicalMetricScan.builder() .metricName(relation.getRelationName()) .filter(filter.getCondition()) .build(); diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/request/PrometheusQueryExemplarsRequest.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/request/PrometheusQueryExemplarsRequest.java index 9cf3d41522..d4eea97c48 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/request/PrometheusQueryExemplarsRequest.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/request/PrometheusQueryExemplarsRequest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.prometheus.request; import lombok.AllArgsConstructor; @@ -12,9 +11,7 @@ import lombok.NoArgsConstructor; import lombok.ToString; -/** - * Prometheus metric query request. - */ +/** Prometheus metric query request. */ @EqualsAndHashCode @Data @ToString @@ -22,19 +19,12 @@ @NoArgsConstructor public class PrometheusQueryExemplarsRequest { - /** - * PromQL. - */ + /** PromQL. */ private String query; - /** - * startTime of the query. - */ + /** startTime of the query. */ private Long startTime; - /** - * endTime of the query. - */ + /** endTime of the query. */ private Long endTime; - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/request/PrometheusQueryRequest.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/request/PrometheusQueryRequest.java index d287ea4d65..e24c27c52a 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/request/PrometheusQueryRequest.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/request/PrometheusQueryRequest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.prometheus.request; import lombok.AllArgsConstructor; @@ -12,9 +11,7 @@ import lombok.NoArgsConstructor; import lombok.ToString; -/** - * Prometheus metric query request. - */ +/** Prometheus metric query request. */ @EqualsAndHashCode @Data @ToString @@ -22,24 +19,15 @@ @NoArgsConstructor public class PrometheusQueryRequest { - /** - * PromQL. - */ + /** PromQL. */ private String promQl; - /** - * startTime of the query. - */ + /** startTime of the query. */ private Long startTime; - /** - * endTime of the query. - */ + /** endTime of the query. */ private Long endTime; - /** - * step is the resolution required between startTime and endTime. - */ + /** step is the resolution required between startTime and endTime. */ private String step; - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/request/system/PrometheusDescribeMetricRequest.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/request/system/PrometheusDescribeMetricRequest.java index 2e0d46b3e8..b6a4e3c49c 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/request/system/PrometheusDescribeMetricRequest.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/request/system/PrometheusDescribeMetricRequest.java @@ -5,7 +5,6 @@ * */ - package org.opensearch.sql.prometheus.request.system; import static org.opensearch.sql.data.model.ExprValueUtils.stringValue; @@ -31,60 +30,63 @@ import org.opensearch.sql.prometheus.storage.PrometheusMetricDefaultSchema; /** - * Describe Metric metadata request. - * This is triggered in case of both query range table function and relation. - * In case of table function metric name is null. + * Describe Metric metadata request. This is triggered in case of both query range table function + * and relation. In case of table function metric name is null. */ @ToString(onlyExplicitlyIncluded = true) public class PrometheusDescribeMetricRequest implements PrometheusSystemRequest { private final PrometheusClient prometheusClient; - @ToString.Include - private final String metricName; + @ToString.Include private final String metricName; private final DataSourceSchemaName dataSourceSchemaName; private static final Logger LOG = LogManager.getLogger(); /** - * Constructor for Prometheus Describe Metric Request. - * In case of pass through queries like query_range function, - * metric names are optional. + * Constructor for Prometheus Describe Metric Request. In case of pass through queries like + * query_range function, metric names are optional. * - * @param prometheusClient prometheusClient. + * @param prometheusClient prometheusClient. * @param dataSourceSchemaName dataSourceSchemaName. - * @param metricName metricName. + * @param metricName metricName. */ - public PrometheusDescribeMetricRequest(PrometheusClient prometheusClient, - DataSourceSchemaName dataSourceSchemaName, - @NonNull String metricName) { + public PrometheusDescribeMetricRequest( + PrometheusClient prometheusClient, + DataSourceSchemaName dataSourceSchemaName, + @NonNull String metricName) { this.prometheusClient = prometheusClient; this.metricName = metricName; this.dataSourceSchemaName = dataSourceSchemaName; } - /** - * Get the mapping of field and type. - * Returns labels and default schema fields. + * Get the mapping of field and type. Returns labels and default schema fields. * * @return mapping of field and type. */ public Map getFieldTypes() { Map fieldTypes = new HashMap<>(); - AccessController.doPrivileged((PrivilegedAction>) () -> { - try { - prometheusClient.getLabels(metricName) - .forEach(label -> fieldTypes.put(label, ExprCoreType.STRING)); - } catch (IOException e) { - LOG.error("Error while fetching labels for {} from prometheus: {}", - metricName, e.getMessage()); - throw new RuntimeException(String.format("Error while fetching labels " - + "for %s from prometheus: %s", metricName, e.getMessage())); - } - return null; - }); + AccessController.doPrivileged( + (PrivilegedAction>) + () -> { + try { + prometheusClient + .getLabels(metricName) + .forEach(label -> fieldTypes.put(label, ExprCoreType.STRING)); + } catch (IOException e) { + LOG.error( + "Error while fetching labels for {} from prometheus: {}", + metricName, + e.getMessage()); + throw new RuntimeException( + String.format( + "Error while fetching labels " + "for %s from prometheus: %s", + metricName, e.getMessage())); + } + return null; + }); fieldTypes.putAll(PrometheusMetricDefaultSchema.DEFAULT_MAPPING.getMapping()); return fieldTypes; } @@ -93,14 +95,17 @@ public Map getFieldTypes() { public List search() { List results = new ArrayList<>(); for (Map.Entry entry : getFieldTypes().entrySet()) { - results.add(row(entry.getKey(), entry.getValue().legacyTypeName().toLowerCase(), - dataSourceSchemaName)); + results.add( + row( + entry.getKey(), + entry.getValue().legacyTypeName().toLowerCase(), + dataSourceSchemaName)); } return results; } - private ExprTupleValue row(String fieldName, String fieldType, - DataSourceSchemaName dataSourceSchemaName) { + private ExprTupleValue row( + String fieldName, String fieldType, DataSourceSchemaName dataSourceSchemaName) { LinkedHashMap valueMap = new LinkedHashMap<>(); valueMap.put("TABLE_CATALOG", stringValue(dataSourceSchemaName.getDataSourceName())); valueMap.put("TABLE_SCHEMA", stringValue(dataSourceSchemaName.getSchemaName())); diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/request/system/PrometheusListMetricsRequest.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/request/system/PrometheusListMetricsRequest.java index f5d2a44340..0e6c2bb2c6 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/request/system/PrometheusListMetricsRequest.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/request/system/PrometheusListMetricsRequest.java @@ -34,28 +34,33 @@ public class PrometheusListMetricsRequest implements PrometheusSystemRequest { private static final Logger LOG = LogManager.getLogger(); - @Override public List search() { - return AccessController.doPrivileged((PrivilegedAction>) () -> { - try { - Map> result = prometheusClient.getAllMetrics(); - return result.keySet() - .stream() - .map(x -> { - MetricMetadata metricMetadata = result.get(x).get(0); - return row(x, metricMetadata.getType(), - metricMetadata.getUnit(), metricMetadata.getHelp()); - }) - .collect(Collectors.toList()); - } catch (IOException e) { - LOG.error("Error while fetching metric list for from prometheus: {}", - e.getMessage()); - throw new RuntimeException(String.format("Error while fetching metric list " - + "for from prometheus: %s", e.getMessage())); - } - }); - + return AccessController.doPrivileged( + (PrivilegedAction>) + () -> { + try { + Map> result = prometheusClient.getAllMetrics(); + return result.keySet().stream() + .map( + x -> { + MetricMetadata metricMetadata = result.get(x).get(0); + return row( + x, + metricMetadata.getType(), + metricMetadata.getUnit(), + metricMetadata.getHelp()); + }) + .collect(Collectors.toList()); + } catch (IOException e) { + LOG.error( + "Error while fetching metric list for from prometheus: {}", e.getMessage()); + throw new RuntimeException( + String.format( + "Error while fetching metric list " + "for from prometheus: %s", + e.getMessage())); + } + }); } private ExprTupleValue row(String metricName, String tableType, String unit, String help) { diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/request/system/PrometheusSystemRequest.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/request/system/PrometheusSystemRequest.java index e68ad22c30..6972a9390c 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/request/system/PrometheusSystemRequest.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/request/system/PrometheusSystemRequest.java @@ -10,9 +10,7 @@ import java.util.List; import org.opensearch.sql.data.model.ExprValue; -/** - * Prometheus system request query to get metadata Info. - */ +/** Prometheus system request query to get metadata Info. */ public interface PrometheusSystemRequest { /** @@ -21,5 +19,4 @@ public interface PrometheusSystemRequest { * @return list of ExprValue. */ List search(); - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/response/PrometheusResponse.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/response/PrometheusResponse.java index ca250125e6..339d882f5a 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/response/PrometheusResponse.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/response/PrometheusResponse.java @@ -42,13 +42,12 @@ public class PrometheusResponse implements Iterable { /** * Constructor. * - * @param responseObject Prometheus responseObject. - * @param prometheusResponseFieldNames data model which - * contains field names for the metric measurement - * and timestamp fieldName. + * @param responseObject Prometheus responseObject. + * @param prometheusResponseFieldNames data model which contains field names for the metric + * measurement and timestamp fieldName. */ - public PrometheusResponse(JSONObject responseObject, - PrometheusResponseFieldNames prometheusResponseFieldNames) { + public PrometheusResponse( + JSONObject responseObject, PrometheusResponseFieldNames prometheusResponseFieldNames) { this.responseObject = responseObject; this.prometheusResponseFieldNames = prometheusResponseFieldNames; } @@ -66,18 +65,22 @@ public Iterator iterator() { for (int j = 0; j < values.length(); j++) { LinkedHashMap linkedHashMap = new LinkedHashMap<>(); JSONArray val = values.getJSONArray(j); - linkedHashMap.put(prometheusResponseFieldNames.getTimestampFieldName(), + linkedHashMap.put( + prometheusResponseFieldNames.getTimestampFieldName(), new ExprTimestampValue(Instant.ofEpochMilli((long) (val.getDouble(0) * 1000)))); - linkedHashMap.put(prometheusResponseFieldNames.getValueFieldName(), getValue(val, 1, - prometheusResponseFieldNames.getValueType())); + linkedHashMap.put( + prometheusResponseFieldNames.getValueFieldName(), + getValue(val, 1, prometheusResponseFieldNames.getValueType())); insertLabels(linkedHashMap, metric); result.add(new ExprTupleValue(linkedHashMap)); } } } else { - throw new RuntimeException(String.format("Unexpected Result Type: %s during Prometheus " - + "Response Parsing. 'matrix' resultType is expected", - responseObject.getString(RESULT_TYPE_KEY))); + throw new RuntimeException( + String.format( + "Unexpected Result Type: %s during Prometheus " + + "Response Parsing. 'matrix' resultType is expected", + responseObject.getString(RESULT_TYPE_KEY))); } return result.iterator(); } @@ -103,12 +106,11 @@ private String getKey(String key) { } else { return this.prometheusResponseFieldNames.getGroupByList().stream() .filter(expression -> expression.getDelegated() instanceof ReferenceExpression) - .filter(expression - -> ((ReferenceExpression) expression.getDelegated()).getAttr().equals(key)) + .filter( + expression -> ((ReferenceExpression) expression.getDelegated()).getAttr().equals(key)) .findFirst() .map(NamedExpression::getName) .orElse(key); } } - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusMetricDefaultSchema.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusMetricDefaultSchema.java index 790189d903..f0933eee9d 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusMetricDefaultSchema.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusMetricDefaultSchema.java @@ -20,12 +20,11 @@ @Getter @RequiredArgsConstructor public enum PrometheusMetricDefaultSchema { - - DEFAULT_MAPPING(new ImmutableMap.Builder() - .put(TIMESTAMP, ExprCoreType.TIMESTAMP) - .put(VALUE, ExprCoreType.DOUBLE) - .build()); + DEFAULT_MAPPING( + new ImmutableMap.Builder() + .put(TIMESTAMP, ExprCoreType.TIMESTAMP) + .put(VALUE, ExprCoreType.DOUBLE) + .build()); private final Map mapping; - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusMetricScan.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusMetricScan.java index 7f75cb3c07..598e388914 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusMetricScan.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusMetricScan.java @@ -23,26 +23,19 @@ import org.opensearch.sql.prometheus.storage.model.PrometheusResponseFieldNames; import org.opensearch.sql.storage.TableScanOperator; -/** - * Prometheus metric scan operator. - */ +/** Prometheus metric scan operator. */ @EqualsAndHashCode(onlyExplicitlyIncluded = true, callSuper = false) @ToString(onlyExplicitlyIncluded = true) public class PrometheusMetricScan extends TableScanOperator { private final PrometheusClient prometheusClient; - @EqualsAndHashCode.Include - @Getter - @Setter - @ToString.Include + @EqualsAndHashCode.Include @Getter @Setter @ToString.Include private PrometheusQueryRequest request; private Iterator iterator; - @Setter - private PrometheusResponseFieldNames prometheusResponseFieldNames; - + @Setter private PrometheusResponseFieldNames prometheusResponseFieldNames; private static final Logger LOG = LogManager.getLogger(); @@ -60,17 +53,25 @@ public PrometheusMetricScan(PrometheusClient prometheusClient) { @Override public void open() { super.open(); - this.iterator = AccessController.doPrivileged((PrivilegedAction>) () -> { - try { - JSONObject responseObject = prometheusClient.queryRange( - request.getPromQl(), - request.getStartTime(), request.getEndTime(), request.getStep()); - return new PrometheusResponse(responseObject, prometheusResponseFieldNames).iterator(); - } catch (IOException e) { - LOG.error(e.getMessage()); - throw new RuntimeException("Error fetching data from prometheus server. " + e.getMessage()); - } - }); + this.iterator = + AccessController.doPrivileged( + (PrivilegedAction>) + () -> { + try { + JSONObject responseObject = + prometheusClient.queryRange( + request.getPromQl(), + request.getStartTime(), + request.getEndTime(), + request.getStep()); + return new PrometheusResponse(responseObject, prometheusResponseFieldNames) + .iterator(); + } catch (IOException e) { + LOG.error(e.getMessage()); + throw new RuntimeException( + "Error fetching data from prometheus server. " + e.getMessage()); + } + }); } @Override diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusMetricTable.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusMetricTable.java index 4844e1f6db..1124e93608 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusMetricTable.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusMetricTable.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.prometheus.storage; import static org.opensearch.sql.prometheus.data.constants.PrometheusFieldConstants.LABELS; @@ -26,40 +25,30 @@ import org.opensearch.sql.storage.read.TableScanBuilder; /** - * Prometheus table (metric) implementation. - * This can be constructed from a metric Name - * or from PrometheusQueryRequest In case of query_range table function. + * Prometheus table (metric) implementation. This can be constructed from a metric Name or from + * PrometheusQueryRequest In case of query_range table function. */ public class PrometheusMetricTable implements Table { private final PrometheusClient prometheusClient; - @Getter - private final String metricName; - - @Getter - private final PrometheusQueryRequest prometheusQueryRequest; + @Getter private final String metricName; + @Getter private final PrometheusQueryRequest prometheusQueryRequest; - /** - * The cached mapping of field and type in index. - */ + /** The cached mapping of field and type in index. */ private Map cachedFieldTypes = null; - /** - * Constructor only with metric name. - */ + /** Constructor only with metric name. */ public PrometheusMetricTable(PrometheusClient prometheusService, @Nonnull String metricName) { this.prometheusClient = prometheusService; this.metricName = metricName; this.prometheusQueryRequest = null; } - /** - * Constructor for entire promQl Request. - */ - public PrometheusMetricTable(PrometheusClient prometheusService, - @Nonnull PrometheusQueryRequest prometheusQueryRequest) { + /** Constructor for entire promQl Request. */ + public PrometheusMetricTable( + PrometheusClient prometheusService, @Nonnull PrometheusQueryRequest prometheusQueryRequest) { this.prometheusClient = prometheusService; this.metricName = null; this.prometheusQueryRequest = prometheusQueryRequest; @@ -67,14 +56,12 @@ public PrometheusMetricTable(PrometheusClient prometheusService, @Override public boolean exists() { - throw new UnsupportedOperationException( - "Prometheus metric exists operation is not supported"); + throw new UnsupportedOperationException("Prometheus metric exists operation is not supported"); } @Override public void create(Map schema) { - throw new UnsupportedOperationException( - "Prometheus metric create operation is not supported"); + throw new UnsupportedOperationException("Prometheus metric create operation is not supported"); } @Override @@ -82,11 +69,10 @@ public Map getFieldTypes() { if (cachedFieldTypes == null) { if (metricName != null) { cachedFieldTypes = - new PrometheusDescribeMetricRequest(prometheusClient, null, - metricName).getFieldTypes(); + new PrometheusDescribeMetricRequest(prometheusClient, null, metricName).getFieldTypes(); } else { - cachedFieldTypes = new HashMap<>(PrometheusMetricDefaultSchema.DEFAULT_MAPPING - .getMapping()); + cachedFieldTypes = + new HashMap<>(PrometheusMetricDefaultSchema.DEFAULT_MAPPING.getMapping()); cachedFieldTypes.put(LABELS, ExprCoreType.STRING); } } @@ -95,8 +81,7 @@ public Map getFieldTypes() { @Override public PhysicalPlan implement(LogicalPlan plan) { - PrometheusMetricScan metricScan = - new PrometheusMetricScan(prometheusClient); + PrometheusMetricScan metricScan = new PrometheusMetricScan(prometheusClient); return plan.accept(new PrometheusDefaultImplementor(), metricScan); } @@ -105,8 +90,8 @@ public LogicalPlan optimize(LogicalPlan plan) { return PrometheusLogicalPlanOptimizerFactory.create().optimize(plan); } - //Only handling query_range function for now. - //we need to move PPL implementations to ScanBuilder in future. + // Only handling query_range function for now. + // we need to move PPL implementations to ScanBuilder in future. @Override public TableScanBuilder createScanBuilder() { if (metricName == null) { diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusStorageEngine.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusStorageEngine.java index 738eb023b6..29fc15e2d0 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusStorageEngine.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusStorageEngine.java @@ -24,10 +24,7 @@ import org.opensearch.sql.storage.Table; import org.opensearch.sql.utils.SystemIndexUtils; - -/** - * Prometheus storage engine implementation. - */ +/** Prometheus storage engine implementation. */ @RequiredArgsConstructor public class PrometheusStorageEngine implements StorageEngine { @@ -52,16 +49,14 @@ public Table getTable(DataSourceSchemaName dataSourceSchemaName, String tableNam } } - private Table resolveInformationSchemaTable(DataSourceSchemaName dataSourceSchemaName, - String tableName) { + private Table resolveInformationSchemaTable( + DataSourceSchemaName dataSourceSchemaName, String tableName) { if (SystemIndexUtils.TABLE_NAME_FOR_TABLES_INFO.equals(tableName)) { - return new PrometheusSystemTable(prometheusClient, - dataSourceSchemaName, SystemIndexUtils.TABLE_INFO); + return new PrometheusSystemTable( + prometheusClient, dataSourceSchemaName, SystemIndexUtils.TABLE_INFO); } else { throw new SemanticCheckException( String.format("Information Schema doesn't contain %s table", tableName)); } } - - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusStorageFactory.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusStorageFactory.java index b3ecd25af3..edae263ce3 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusStorageFactory.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusStorageFactory.java @@ -56,23 +56,20 @@ public DataSourceType getDataSourceType() { @Override public DataSource createDataSource(DataSourceMetadata metadata) { return new DataSource( - metadata.getName(), - DataSourceType.PROMETHEUS, - getStorageEngine(metadata.getProperties())); + metadata.getName(), DataSourceType.PROMETHEUS, getStorageEngine(metadata.getProperties())); } - - //Need to refactor to a separate Validator class. + // Need to refactor to a separate Validator class. private void validateDataSourceConfigProperties(Map dataSourceMetadataConfig) throws URISyntaxException { if (dataSourceMetadataConfig.get(AUTH_TYPE) != null) { - AuthenticationType authenticationType - = AuthenticationType.get(dataSourceMetadataConfig.get(AUTH_TYPE)); + AuthenticationType authenticationType = + AuthenticationType.get(dataSourceMetadataConfig.get(AUTH_TYPE)); if (AuthenticationType.BASICAUTH.equals(authenticationType)) { validateMissingFields(dataSourceMetadataConfig, Set.of(URI, USERNAME, PASSWORD)); } else if (AuthenticationType.AWSSIGV4AUTH.equals(authenticationType)) { - validateMissingFields(dataSourceMetadataConfig, Set.of(URI, ACCESS_KEY, SECRET_KEY, - REGION)); + validateMissingFields( + dataSourceMetadataConfig, Set.of(URI, ACCESS_KEY, SECRET_KEY, REGION)); } } else { validateMissingFields(dataSourceMetadataConfig, Set.of(URI)); @@ -83,20 +80,21 @@ private void validateDataSourceConfigProperties(Map dataSourceMe StorageEngine getStorageEngine(Map requiredConfig) { PrometheusClient prometheusClient; prometheusClient = - AccessController.doPrivileged((PrivilegedAction) () -> { - try { - validateDataSourceConfigProperties(requiredConfig); - return new PrometheusClientImpl(getHttpClient(requiredConfig), - new URI(requiredConfig.get(URI))); - } catch (URISyntaxException e) { - throw new IllegalArgumentException( - String.format("Invalid URI in prometheus properties: %s", e.getMessage())); - } - }); + AccessController.doPrivileged( + (PrivilegedAction) + () -> { + try { + validateDataSourceConfigProperties(requiredConfig); + return new PrometheusClientImpl( + getHttpClient(requiredConfig), new URI(requiredConfig.get(URI))); + } catch (URISyntaxException e) { + throw new IllegalArgumentException( + String.format("Invalid URI in prometheus properties: %s", e.getMessage())); + } + }); return new PrometheusStorageEngine(prometheusClient); } - private OkHttpClient getHttpClient(Map config) { OkHttpClient.Builder okHttpClient = new OkHttpClient.Builder(); okHttpClient.callTimeout(1, TimeUnit.MINUTES); @@ -104,16 +102,19 @@ private OkHttpClient getHttpClient(Map config) { if (config.get(AUTH_TYPE) != null) { AuthenticationType authenticationType = AuthenticationType.get(config.get(AUTH_TYPE)); if (AuthenticationType.BASICAUTH.equals(authenticationType)) { - okHttpClient.addInterceptor(new BasicAuthenticationInterceptor(config.get(USERNAME), - config.get(PASSWORD))); + okHttpClient.addInterceptor( + new BasicAuthenticationInterceptor(config.get(USERNAME), config.get(PASSWORD))); } else if (AuthenticationType.AWSSIGV4AUTH.equals(authenticationType)) { - okHttpClient.addInterceptor(new AwsSigningInterceptor( - new AWSStaticCredentialsProvider( - new BasicAWSCredentials(config.get(ACCESS_KEY), config.get(SECRET_KEY))), - config.get(REGION), "aps")); + okHttpClient.addInterceptor( + new AwsSigningInterceptor( + new AWSStaticCredentialsProvider( + new BasicAWSCredentials(config.get(ACCESS_KEY), config.get(SECRET_KEY))), + config.get(REGION), + "aps")); } else { throw new IllegalArgumentException( - String.format("AUTH Type : %s is not supported with Prometheus Connector", + String.format( + "AUTH Type : %s is not supported with Prometheus Connector", config.get(AUTH_TYPE))); } } @@ -132,13 +133,14 @@ private void validateMissingFields(Map config, Set field } StringBuilder errorStringBuilder = new StringBuilder(); if (missingFields.size() > 0) { - errorStringBuilder.append(String.format( - "Missing %s fields in the Prometheus connector properties.", missingFields)); + errorStringBuilder.append( + String.format( + "Missing %s fields in the Prometheus connector properties.", missingFields)); } if (invalidLengthFields.size() > 0) { - errorStringBuilder.append(String.format( - "Fields %s exceeds more than 1000 characters.", invalidLengthFields)); + errorStringBuilder.append( + String.format("Fields %s exceeds more than 1000 characters.", invalidLengthFields)); } if (errorStringBuilder.length() > 0) { throw new IllegalArgumentException(errorStringBuilder.toString()); @@ -148,8 +150,9 @@ private void validateMissingFields(Map config, Set field private void validateURI(Map config) throws URISyntaxException { URI uri = new URI(config.get(URI)); String host = uri.getHost(); - if (host == null || (!(DomainValidator.getInstance().isValid(host) - || DomainValidator.getInstance().isValidLocalTld(host)))) { + if (host == null + || (!(DomainValidator.getInstance().isValid(host) + || DomainValidator.getInstance().isValidLocalTld(host)))) { throw new IllegalArgumentException( String.format("Invalid hostname in the uri: %s", config.get(URI))); } else { @@ -158,10 +161,10 @@ private void validateURI(Map config) throws URISyntaxException { Matcher matcher = allowHostsPattern.matcher(host); if (!matcher.matches()) { throw new IllegalArgumentException( - String.format("Disallowed hostname in the uri: %s. Validate with %s config", + String.format( + "Disallowed hostname in the uri: %s. Validate with %s config", config.get(URI), Settings.Key.DATASOURCES_URI_ALLOWHOSTS.getKeyValue())); } } } - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/QueryExemplarsTable.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/QueryExemplarsTable.java index dcb87c2cce..9ce8ae85fb 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/QueryExemplarsTable.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/QueryExemplarsTable.java @@ -20,20 +20,16 @@ import org.opensearch.sql.storage.read.TableScanBuilder; /** - * This is {@link Table} for querying exemplars in prometheus Table. - * Since {@link PrometheusMetricTable} is overloaded with query_range and normal - * PPL metric queries. Created a separate table for handling - * {@link PrometheusQueryExemplarsRequest} + * This is {@link Table} for querying exemplars in prometheus Table. Since {@link + * PrometheusMetricTable} is overloaded with query_range and normal PPL metric queries. Created a + * separate table for handling {@link PrometheusQueryExemplarsRequest} */ @RequiredArgsConstructor public class QueryExemplarsTable implements Table { - @Getter - private final PrometheusClient prometheusClient; - - @Getter - private final PrometheusQueryExemplarsRequest exemplarsRequest; + @Getter private final PrometheusClient prometheusClient; + @Getter private final PrometheusQueryExemplarsRequest exemplarsRequest; @Override public Map getFieldTypes() { @@ -49,5 +45,4 @@ public PhysicalPlan implement(LogicalPlan plan) { public TableScanBuilder createScanBuilder() { return new QueryExemplarsFunctionTableScanBuilder(prometheusClient, exemplarsRequest); } - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/implementor/PrometheusDefaultImplementor.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/implementor/PrometheusDefaultImplementor.java index 6d426d13c8..f83a97dc06 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/implementor/PrometheusDefaultImplementor.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/implementor/PrometheusDefaultImplementor.java @@ -29,13 +29,9 @@ import org.opensearch.sql.prometheus.storage.querybuilder.StepParameterResolver; import org.opensearch.sql.prometheus.storage.querybuilder.TimeRangeParametersResolver; -/** - * Default Implementor of Logical plan for prometheus. - */ +/** Default Implementor of Logical plan for prometheus. */ @RequiredArgsConstructor -public class PrometheusDefaultImplementor - extends DefaultImplementor { - +public class PrometheusDefaultImplementor extends DefaultImplementor { @Override public PhysicalPlan visitNode(LogicalPlan plan, PrometheusMetricScan context) { @@ -44,62 +40,64 @@ public PhysicalPlan visitNode(LogicalPlan plan, PrometheusMetricScan context) { } else if (plan instanceof PrometheusLogicalMetricAgg) { return visitIndexAggregation((PrometheusLogicalMetricAgg) plan, context); } else { - throw new IllegalStateException(StringUtils.format("unexpected plan node type %s", - plan.getClass())); + throw new IllegalStateException( + StringUtils.format("unexpected plan node type %s", plan.getClass())); } } - /** - * Implement PrometheusLogicalMetricScan. - */ - public PhysicalPlan visitIndexScan(PrometheusLogicalMetricScan node, - PrometheusMetricScan context) { + /** Implement PrometheusLogicalMetricScan. */ + public PhysicalPlan visitIndexScan( + PrometheusLogicalMetricScan node, PrometheusMetricScan context) { String query = SeriesSelectionQueryBuilder.build(node.getMetricName(), node.getFilter()); context.getRequest().setPromQl(query); setTimeRangeParameters(node.getFilter(), context); - context.getRequest() - .setStep(StepParameterResolver.resolve(context.getRequest().getStartTime(), - context.getRequest().getEndTime(), null)); + context + .getRequest() + .setStep( + StepParameterResolver.resolve( + context.getRequest().getStartTime(), context.getRequest().getEndTime(), null)); return context; } - /** - * Implement PrometheusLogicalMetricAgg. - */ - public PhysicalPlan visitIndexAggregation(PrometheusLogicalMetricAgg node, - PrometheusMetricScan context) { + /** Implement PrometheusLogicalMetricAgg. */ + public PhysicalPlan visitIndexAggregation( + PrometheusLogicalMetricAgg node, PrometheusMetricScan context) { setTimeRangeParameters(node.getFilter(), context); - context.getRequest() - .setStep(StepParameterResolver.resolve(context.getRequest().getStartTime(), - context.getRequest().getEndTime(), node.getGroupByList())); + context + .getRequest() + .setStep( + StepParameterResolver.resolve( + context.getRequest().getStartTime(), + context.getRequest().getEndTime(), + node.getGroupByList())); String step = context.getRequest().getStep(); - String seriesSelectionQuery - = SeriesSelectionQueryBuilder.build(node.getMetricName(), node.getFilter()); + String seriesSelectionQuery = + SeriesSelectionQueryBuilder.build(node.getMetricName(), node.getFilter()); - String aggregateQuery - = AggregationQueryBuilder.build(node.getAggregatorList(), - node.getGroupByList()); + String aggregateQuery = + AggregationQueryBuilder.build(node.getAggregatorList(), node.getGroupByList()); String finalQuery = String.format(aggregateQuery, seriesSelectionQuery + "[" + step + "]"); context.getRequest().setPromQl(finalQuery); - //Since prometheus response doesn't have any fieldNames in its output. - //the field names are sent to PrometheusResponse constructor via context. + // Since prometheus response doesn't have any fieldNames in its output. + // the field names are sent to PrometheusResponse constructor via context. setPrometheusResponseFieldNames(node, context); return context; } @Override - public PhysicalPlan visitRelation(LogicalRelation node, - PrometheusMetricScan context) { + public PhysicalPlan visitRelation(LogicalRelation node, PrometheusMetricScan context) { PrometheusMetricTable prometheusMetricTable = (PrometheusMetricTable) node.getTable(); String query = SeriesSelectionQueryBuilder.build(node.getRelationName(), null); context.getRequest().setPromQl(query); setTimeRangeParameters(null, context); - context.getRequest() - .setStep(StepParameterResolver.resolve(context.getRequest().getStartTime(), - context.getRequest().getEndTime(), null)); + context + .getRequest() + .setStep( + StepParameterResolver.resolve( + context.getRequest().getStartTime(), context.getRequest().getEndTime(), null)); return context; } @@ -110,8 +108,8 @@ private void setTimeRangeParameters(Expression filter, PrometheusMetricScan cont context.getRequest().setEndTime(timeRange.getSecond()); } - private void setPrometheusResponseFieldNames(PrometheusLogicalMetricAgg node, - PrometheusMetricScan context) { + private void setPrometheusResponseFieldNames( + PrometheusLogicalMetricAgg node, PrometheusMetricScan context) { Optional spanExpression = getSpanExpression(node.getGroupByList()); if (spanExpression.isEmpty()) { throw new RuntimeException( @@ -133,6 +131,4 @@ private Optional getSpanExpression(List namedE .filter(expression -> expression.getDelegated() instanceof SpanExpression) .findFirst(); } - - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/model/PrometheusResponseFieldNames.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/model/PrometheusResponseFieldNames.java index d3a6ef184f..303ace7906 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/model/PrometheusResponseFieldNames.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/model/PrometheusResponseFieldNames.java @@ -17,7 +17,6 @@ import org.opensearch.sql.data.type.ExprType; import org.opensearch.sql.expression.NamedExpression; - @Getter @Setter public class PrometheusResponseFieldNames { @@ -26,5 +25,4 @@ public class PrometheusResponseFieldNames { private ExprType valueType = DOUBLE; private String timestampFieldName = TIMESTAMP; private List groupByList; - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/model/QueryRangeParameters.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/model/QueryRangeParameters.java index 86ca99cea8..02187c5662 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/model/QueryRangeParameters.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/model/QueryRangeParameters.java @@ -21,5 +21,4 @@ public class QueryRangeParameters { private Long start; private Long end; private String step; - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/AggregationQueryBuilder.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/AggregationQueryBuilder.java index a141707077..540e2d8cf4 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/AggregationQueryBuilder.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/AggregationQueryBuilder.java @@ -18,49 +18,51 @@ import org.opensearch.sql.expression.span.SpanExpression; /** - * This class builds aggregation query for the given stats commands. - * In the generated query a placeholder(%s) is added in place of metric selection query - * and later replaced by metric selection query. + * This class builds aggregation query for the given stats commands. In the generated query a + * placeholder(%s) is added in place of metric selection query and later replaced by metric + * selection query. */ @NoArgsConstructor public class AggregationQueryBuilder { - private static final Set allowedStatsFunctions = Set.of( - BuiltinFunctionName.MAX.getName().getFunctionName(), - BuiltinFunctionName.MIN.getName().getFunctionName(), - BuiltinFunctionName.COUNT.getName().getFunctionName(), - BuiltinFunctionName.SUM.getName().getFunctionName(), - BuiltinFunctionName.AVG.getName().getFunctionName() - ); - + private static final Set allowedStatsFunctions = + Set.of( + BuiltinFunctionName.MAX.getName().getFunctionName(), + BuiltinFunctionName.MIN.getName().getFunctionName(), + BuiltinFunctionName.COUNT.getName().getFunctionName(), + BuiltinFunctionName.SUM.getName().getFunctionName(), + BuiltinFunctionName.AVG.getName().getFunctionName()); /** * Build Aggregation query from series selector query from expression. * * @return query string. */ - public static String build(List namedAggregatorList, - List groupByList) { + public static String build( + List namedAggregatorList, List groupByList) { if (namedAggregatorList.size() > 1) { throw new RuntimeException( "Prometheus Catalog doesn't multiple aggregations in stats command"); } - if (!allowedStatsFunctions - .contains(namedAggregatorList.get(0).getFunctionName().getFunctionName())) { - throw new RuntimeException(String.format( - "Prometheus Catalog only supports %s aggregations.", allowedStatsFunctions)); + if (!allowedStatsFunctions.contains( + namedAggregatorList.get(0).getFunctionName().getFunctionName())) { + throw new RuntimeException( + String.format( + "Prometheus Catalog only supports %s aggregations.", allowedStatsFunctions)); } StringBuilder aggregateQuery = new StringBuilder(); - aggregateQuery.append(namedAggregatorList.get(0).getFunctionName().getFunctionName()) + aggregateQuery + .append(namedAggregatorList.get(0).getFunctionName().getFunctionName()) .append(" "); if (groupByList != null && !groupByList.isEmpty()) { - groupByList = groupByList.stream() - .filter(expression -> !(expression.getDelegated() instanceof SpanExpression)) - .collect(Collectors.toList()); + groupByList = + groupByList.stream() + .filter(expression -> !(expression.getDelegated() instanceof SpanExpression)) + .collect(Collectors.toList()); if (groupByList.size() > 0) { aggregateQuery.append("by("); aggregateQuery.append( @@ -78,5 +80,4 @@ public static String build(List namedAggregatorList, .append("(%s))"); return aggregateQuery.toString(); } - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/SeriesSelectionQueryBuilder.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/SeriesSelectionQueryBuilder.java index 461b5341f8..d824fcb5b3 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/SeriesSelectionQueryBuilder.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/SeriesSelectionQueryBuilder.java @@ -7,7 +7,6 @@ package org.opensearch.sql.prometheus.storage.querybuilder; - import static org.opensearch.sql.prometheus.data.constants.PrometheusFieldConstants.TIMESTAMP; import java.util.stream.Collectors; @@ -19,14 +18,10 @@ import org.opensearch.sql.expression.ReferenceExpression; import org.opensearch.sql.expression.function.BuiltinFunctionName; -/** - * This class builds metric selection query from the filter condition - * and metric name. - */ +/** This class builds metric selection query from the filter condition and metric name. */ @NoArgsConstructor public class SeriesSelectionQueryBuilder { - /** * Build Prometheus series selector query from expression. * @@ -35,8 +30,8 @@ public class SeriesSelectionQueryBuilder { */ public static String build(String metricName, Expression filterCondition) { if (filterCondition != null) { - SeriesSelectionExpressionNodeVisitor seriesSelectionExpressionNodeVisitor - = new SeriesSelectionExpressionNodeVisitor(); + SeriesSelectionExpressionNodeVisitor seriesSelectionExpressionNodeVisitor = + new SeriesSelectionExpressionNodeVisitor(); String selectorQuery = filterCondition.accept(seriesSelectionExpressionNodeVisitor, null); if (selectorQuery != null) { return metricName + "{" + selectorQuery + "}"; @@ -54,9 +49,9 @@ public String visitFunction(FunctionExpression func, Object context) { .filter(StringUtils::isNotEmpty) .collect(Collectors.joining(" , ")); } else if ((BuiltinFunctionName.LTE.getName().equals(func.getFunctionName()) - || BuiltinFunctionName.GTE.getName().equals(func.getFunctionName()) - || BuiltinFunctionName.LESS.getName().equals(func.getFunctionName()) - || BuiltinFunctionName.GREATER.getName().equals(func.getFunctionName())) + || BuiltinFunctionName.GTE.getName().equals(func.getFunctionName()) + || BuiltinFunctionName.LESS.getName().equals(func.getFunctionName()) + || BuiltinFunctionName.GREATER.getName().equals(func.getFunctionName())) && ((ReferenceExpression) func.getArguments().get(0)).getAttr().equals(TIMESTAMP)) { return null; } else if (BuiltinFunctionName.EQUAL.getName().equals(func.getFunctionName())) { @@ -65,11 +60,10 @@ public String visitFunction(FunctionExpression func, Object context) { + func.getArguments().get(1); } else { throw new RuntimeException( - String.format("Prometheus Datasource doesn't support %s " - + "in where command.", + String.format( + "Prometheus Datasource doesn't support %s " + "in where command.", func.getFunctionName().getFunctionName())); } } } - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/StepParameterResolver.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/StepParameterResolver.java index 2078950a5d..4c23ea9086 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/StepParameterResolver.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/StepParameterResolver.java @@ -15,25 +15,20 @@ import org.opensearch.sql.expression.NamedExpression; import org.opensearch.sql.expression.span.SpanExpression; -/** - * This class resolves step parameter required for - * query_range api of prometheus. - */ +/** This class resolves step parameter required for query_range api of prometheus. */ @NoArgsConstructor public class StepParameterResolver { /** - * Extract step from groupByList or apply heuristic arithmetic - * on endTime and startTime. - * + * Extract step from groupByList or apply heuristic arithmetic on endTime and startTime. * * @param startTime startTime. * @param endTime endTime. * @param groupByList groupByList. * @return Step String. */ - public static String resolve(@NonNull Long startTime, @NonNull Long endTime, - List groupByList) { + public static String resolve( + @NonNull Long startTime, @NonNull Long endTime, List groupByList) { Optional spanExpression = getSpanExpression(groupByList); if (spanExpression.isPresent()) { if (StringUtils.isEmpty(spanExpression.get().getUnit().getName())) { @@ -48,7 +43,7 @@ public static String resolve(@NonNull Long startTime, @NonNull Long endTime, } private static Optional getSpanExpression( - List namedExpressionList) { + List namedExpressionList) { if (namedExpressionList == null) { return Optional.empty(); } @@ -57,7 +52,4 @@ private static Optional getSpanExpression( .map(expression -> (SpanExpression) expression.getDelegated()) .findFirst(); } - - - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/TimeRangeParametersResolver.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/TimeRangeParametersResolver.java index b462f6bafe..c7766f22d6 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/TimeRangeParametersResolver.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/TimeRangeParametersResolver.java @@ -22,16 +22,14 @@ @NoArgsConstructor public class TimeRangeParametersResolver extends ExpressionNodeVisitor { - private Long startTime; private Long endTime; /** - * Build Range Query Parameters from filter expression. - * If the filter condition consists of @timestamp, startTime and - * endTime are derived. or else it will be defaulted to now() and now()-1hr. - * If one of starttime and endtime are provided, the other will be derived from them - * by fixing the time range duration to 1hr. + * Build Range Query Parameters from filter expression. If the filter condition consists + * of @timestamp, startTime and endTime are derived. or else it will be defaulted to now() and + * now()-1hr. If one of starttime and endtime are provided, the other will be derived from them by + * fixing the time range duration to 1hr. * * @param filterCondition expression. * @return query string @@ -72,13 +70,10 @@ public Void visitFunction(FunctionExpression func, Object context) { } } } else { - func.getArguments() - .stream() + func.getArguments().stream() .filter(arg -> arg instanceof FunctionExpression) .forEach(arg -> visitFunction((FunctionExpression) arg, context)); } return null; } - - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTable.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTable.java index dca946da57..b5557e7298 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTable.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTable.java @@ -5,7 +5,6 @@ package org.opensearch.sql.prometheus.storage.system; - import static org.opensearch.sql.utils.SystemIndexUtils.systemTable; import com.google.common.annotations.VisibleForTesting; @@ -25,13 +24,9 @@ import org.opensearch.sql.storage.Table; import org.opensearch.sql.utils.SystemIndexUtils; -/** - * Prometheus System Table Implementation. - */ +/** Prometheus System Table Implementation. */ public class PrometheusSystemTable implements Table { - /** - * System Index Name. - */ + /** System Index Name. */ private final Pair systemIndexBundle; private final DataSourceSchemaName dataSourceSchemaName; @@ -54,8 +49,7 @@ public PhysicalPlan implement(LogicalPlan plan) { @VisibleForTesting @RequiredArgsConstructor - public class PrometheusSystemTableDefaultImplementor - extends DefaultImplementor { + public class PrometheusSystemTableDefaultImplementor extends DefaultImplementor { @Override public PhysicalPlan visitRelation(LogicalRelation node, Object context) { @@ -67,12 +61,14 @@ private Pair buildIndexBun PrometheusClient client, String indexName) { SystemIndexUtils.SystemTable systemTable = systemTable(indexName); if (systemTable.isSystemInfoTable()) { - return Pair.of(PrometheusSystemTableSchema.SYS_TABLE_TABLES, + return Pair.of( + PrometheusSystemTableSchema.SYS_TABLE_TABLES, new PrometheusListMetricsRequest(client, dataSourceSchemaName)); } else { - return Pair.of(PrometheusSystemTableSchema.SYS_TABLE_MAPPINGS, - new PrometheusDescribeMetricRequest(client, - dataSourceSchemaName, systemTable.getTableName())); + return Pair.of( + PrometheusSystemTableSchema.SYS_TABLE_MAPPINGS, + new PrometheusDescribeMetricRequest( + client, dataSourceSchemaName, systemTable.getTableName())); } } } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTableScan.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTableScan.java index 5c0bc656fe..907e8a0c15 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTableScan.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTableScan.java @@ -13,16 +13,13 @@ import org.opensearch.sql.prometheus.request.system.PrometheusSystemRequest; import org.opensearch.sql.storage.TableScanOperator; -/** - * Prometheus table scan operator. - */ +/** Prometheus table scan operator. */ @RequiredArgsConstructor @EqualsAndHashCode(onlyExplicitlyIncluded = true, callSuper = false) @ToString(onlyExplicitlyIncluded = true) public class PrometheusSystemTableScan extends TableScanOperator { - @EqualsAndHashCode.Include - private final PrometheusSystemRequest request; + @EqualsAndHashCode.Include private final PrometheusSystemRequest request; private Iterator iterator; diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTableSchema.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTableSchema.java index 668a208c79..9272731dce 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTableSchema.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTableSchema.java @@ -18,22 +18,23 @@ @Getter @RequiredArgsConstructor public enum PrometheusSystemTableSchema { - - SYS_TABLE_TABLES(new ImmutableMap.Builder() - .put("TABLE_CATALOG", STRING) - .put("TABLE_SCHEMA", STRING) - .put("TABLE_NAME", STRING) - .put("TABLE_TYPE", STRING) - .put("UNIT", STRING) - .put("REMARKS", STRING) - .build()), - SYS_TABLE_MAPPINGS(new ImmutableMap.Builder() - .put("TABLE_CATALOG", STRING) - .put("TABLE_SCHEMA", STRING) - .put("TABLE_NAME", STRING) - .put("COLUMN_NAME", STRING) - .put("DATA_TYPE", STRING) - .build()); + SYS_TABLE_TABLES( + new ImmutableMap.Builder() + .put("TABLE_CATALOG", STRING) + .put("TABLE_SCHEMA", STRING) + .put("TABLE_NAME", STRING) + .put("TABLE_TYPE", STRING) + .put("UNIT", STRING) + .put("REMARKS", STRING) + .build()), + SYS_TABLE_MAPPINGS( + new ImmutableMap.Builder() + .put("TABLE_CATALOG", STRING) + .put("TABLE_SCHEMA", STRING) + .put("TABLE_NAME", STRING) + .put("COLUMN_NAME", STRING) + .put("DATA_TYPE", STRING) + .build()); private final Map mapping; } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/utils/TableFunctionUtils.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/utils/TableFunctionUtils.java index 35edc83614..24bec1ede3 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/utils/TableFunctionUtils.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/utils/TableFunctionUtils.java @@ -16,52 +16,54 @@ import org.opensearch.sql.expression.Expression; import org.opensearch.sql.expression.NamedArgumentExpression; -/** - * Utility class for common table function methods. - */ +/** Utility class for common table function methods. */ @UtilityClass public class TableFunctionUtils { /** - * Validates if function arguments are valid - * in both the cases when the arguments are passed by position or name. + * Validates if function arguments are valid in both the cases when the arguments are passed by + * position or name. * * @param arguments arguments of function provided in the input order. * @param argumentNames ordered argument names of the function. */ - public static void validatePrometheusTableFunctionArguments(List arguments, - List argumentNames) { - Boolean argumentsPassedByName = arguments.stream() - .noneMatch(arg -> StringUtils.isEmpty(((NamedArgumentExpression) arg).getArgName())); - Boolean argumentsPassedByPosition = arguments.stream() - .allMatch(arg -> StringUtils.isEmpty(((NamedArgumentExpression) arg).getArgName())); + public static void validatePrometheusTableFunctionArguments( + List arguments, List argumentNames) { + Boolean argumentsPassedByName = + arguments.stream() + .noneMatch(arg -> StringUtils.isEmpty(((NamedArgumentExpression) arg).getArgName())); + Boolean argumentsPassedByPosition = + arguments.stream() + .allMatch(arg -> StringUtils.isEmpty(((NamedArgumentExpression) arg).getArgName())); if (!(argumentsPassedByName || argumentsPassedByPosition)) { throw new SemanticCheckException("Arguments should be either passed by name or position"); } if (arguments.size() != argumentNames.size()) { throw new SemanticCheckException( - generateErrorMessageForMissingArguments(argumentsPassedByPosition, arguments, - argumentNames)); + generateErrorMessageForMissingArguments( + argumentsPassedByPosition, arguments, argumentNames)); } } /** - * Get Named Arguments of Table Function Arguments. - * If they are passed by position create new ones or else return the same arguments passed. + * Get Named Arguments of Table Function Arguments. If they are passed by position create new ones + * or else return the same arguments passed. * * @param arguments arguments of function provided in the input order. * @param argumentNames ordered argument names of the function. */ - public static List getNamedArgumentsOfTableFunction(List arguments, - List argumentNames) { - boolean argumentsPassedByPosition = arguments.stream() - .allMatch(arg -> StringUtils.isEmpty(((NamedArgumentExpression) arg).getArgName())); + public static List getNamedArgumentsOfTableFunction( + List arguments, List argumentNames) { + boolean argumentsPassedByPosition = + arguments.stream() + .allMatch(arg -> StringUtils.isEmpty(((NamedArgumentExpression) arg).getArgName())); if (argumentsPassedByPosition) { List namedArguments = new ArrayList<>(); for (int i = 0; i < arguments.size(); i++) { - namedArguments.add(new NamedArgumentExpression(argumentNames.get(i), - ((NamedArgumentExpression) arguments.get(i)).getValue())); + namedArguments.add( + new NamedArgumentExpression( + argumentNames.get(i), ((NamedArgumentExpression) arguments.get(i)).getValue())); } return namedArguments; } @@ -73,17 +75,17 @@ private static String generateErrorMessageForMissingArguments( List arguments, List argumentNames) { if (areArgumentsPassedByPosition) { - return String.format("Missing arguments:[%s]", + return String.format( + "Missing arguments:[%s]", String.join(",", argumentNames.subList(arguments.size(), argumentNames.size()))); } else { Set requiredArguments = new HashSet<>(argumentNames); Set providedArguments = - arguments.stream().map(expression -> ((NamedArgumentExpression) expression).getArgName()) + arguments.stream() + .map(expression -> ((NamedArgumentExpression) expression).getArgName()) .collect(Collectors.toSet()); requiredArguments.removeAll(providedArguments); return String.format("Missing arguments:[%s]", String.join(",", requiredArguments)); } } - - } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/client/PrometheusClientImplTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/client/PrometheusClientImplTest.java index b26a45e301..735a1a1052 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/client/PrometheusClientImplTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/client/PrometheusClientImplTest.java @@ -43,7 +43,6 @@ public class PrometheusClientImplTest { private MockWebServer mockWebServer; private PrometheusClient prometheusClient; - @BeforeEach void setUp() throws IOException { this.mockWebServer = new MockWebServer(); @@ -52,13 +51,13 @@ void setUp() throws IOException { new PrometheusClientImpl(new OkHttpClient(), mockWebServer.url("").uri().normalize()); } - @Test @SneakyThrows void testQueryRange() { - MockResponse mockResponse = new MockResponse() - .addHeader("Content-Type", "application/json; charset=utf-8") - .setBody(getJson("query_range_response.json")); + MockResponse mockResponse = + new MockResponse() + .addHeader("Content-Type", "application/json; charset=utf-8") + .setBody(getJson("query_range_response.json")); mockWebServer.enqueue(mockResponse); JSONObject jsonObject = prometheusClient.queryRange(QUERY, STARTTIME, ENDTIME, STEP); assertTrue(new JSONObject(getJson("query_range_result.json")).similar(jsonObject)); @@ -69,13 +68,15 @@ void testQueryRange() { @Test @SneakyThrows void testQueryRangeWith2xxStatusAndError() { - MockResponse mockResponse = new MockResponse() - .addHeader("Content-Type", "application/json; charset=utf-8") - .setBody(getJson("error_response.json")); + MockResponse mockResponse = + new MockResponse() + .addHeader("Content-Type", "application/json; charset=utf-8") + .setBody(getJson("error_response.json")); mockWebServer.enqueue(mockResponse); - RuntimeException runtimeException - = assertThrows(RuntimeException.class, - () -> prometheusClient.queryRange(QUERY, STARTTIME, ENDTIME, STEP)); + RuntimeException runtimeException = + assertThrows( + RuntimeException.class, + () -> prometheusClient.queryRange(QUERY, STARTTIME, ENDTIME, STEP)); assertEquals("Error", runtimeException.getMessage()); RecordedRequest recordedRequest = mockWebServer.takeRequest(); verifyQueryRangeCall(recordedRequest); @@ -84,13 +85,15 @@ void testQueryRangeWith2xxStatusAndError() { @Test @SneakyThrows void testQueryRangeWithNon2xxError() { - MockResponse mockResponse = new MockResponse() - .addHeader("Content-Type", "application/json; charset=utf-8") - .setResponseCode(400); + MockResponse mockResponse = + new MockResponse() + .addHeader("Content-Type", "application/json; charset=utf-8") + .setResponseCode(400); mockWebServer.enqueue(mockResponse); - RuntimeException runtimeException - = assertThrows(RuntimeException.class, - () -> prometheusClient.queryRange(QUERY, STARTTIME, ENDTIME, STEP)); + RuntimeException runtimeException = + assertThrows( + RuntimeException.class, + () -> prometheusClient.queryRange(QUERY, STARTTIME, ENDTIME, STEP)); assertTrue( runtimeException.getMessage().contains("Request to Prometheus is Unsuccessful with :")); RecordedRequest recordedRequest = mockWebServer.takeRequest(); @@ -100,16 +103,20 @@ void testQueryRangeWithNon2xxError() { @Test @SneakyThrows void testGetLabel() { - MockResponse mockResponse = new MockResponse() - .addHeader("Content-Type", "application/json; charset=utf-8") - .setBody(getJson("get_labels_response.json")); + MockResponse mockResponse = + new MockResponse() + .addHeader("Content-Type", "application/json; charset=utf-8") + .setBody(getJson("get_labels_response.json")); mockWebServer.enqueue(mockResponse); List response = prometheusClient.getLabels(METRIC_NAME); - assertEquals(new ArrayList() {{ - add("call"); - add("code"); - } - }, response); + assertEquals( + new ArrayList() { + { + add("call"); + add("code"); + } + }, + response); RecordedRequest recordedRequest = mockWebServer.takeRequest(); verifyGetLabelsCall(recordedRequest); } @@ -117,30 +124,34 @@ void testGetLabel() { @Test @SneakyThrows void testGetAllMetrics() { - MockResponse mockResponse = new MockResponse() - .addHeader("Content-Type", "application/json; charset=utf-8") - .setBody(getJson("all_metrics_response.json")); + MockResponse mockResponse = + new MockResponse() + .addHeader("Content-Type", "application/json; charset=utf-8") + .setBody(getJson("all_metrics_response.json")); mockWebServer.enqueue(mockResponse); Map> response = prometheusClient.getAllMetrics(); Map> expected = new HashMap<>(); - expected.put("go_gc_duration_seconds", - Collections.singletonList(new MetricMetadata("summary", - "A summary of the pause duration of garbage collection cycles.", ""))); - expected.put("go_goroutines", - Collections.singletonList(new MetricMetadata("gauge", - "Number of goroutines that currently exist.", ""))); + expected.put( + "go_gc_duration_seconds", + Collections.singletonList( + new MetricMetadata( + "summary", "A summary of the pause duration of garbage collection cycles.", ""))); + expected.put( + "go_goroutines", + Collections.singletonList( + new MetricMetadata("gauge", "Number of goroutines that currently exist.", ""))); assertEquals(expected, response); RecordedRequest recordedRequest = mockWebServer.takeRequest(); verifyGetAllMetricsCall(recordedRequest); } - @Test @SneakyThrows void testQueryExemplars() { - MockResponse mockResponse = new MockResponse() - .addHeader("Content-Type", "application/json; charset=utf-8") - .setBody(getJson("query_exemplars_response.json")); + MockResponse mockResponse = + new MockResponse() + .addHeader("Content-Type", "application/json; charset=utf-8") + .setBody(getJson("query_exemplars_response.json")); mockWebServer.enqueue(mockResponse); JSONArray jsonArray = prometheusClient.queryExemplars(QUERY, STARTTIME, ENDTIME); assertTrue(new JSONArray(getJson("query_exemplars_result.json")).similar(jsonArray)); diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/implementation/QueryExemplarsFunctionImplementationTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/implementation/QueryExemplarsFunctionImplementationTest.java index d6e4a5cef4..6009d3229c 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/implementation/QueryExemplarsFunctionImplementationTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/implementation/QueryExemplarsFunctionImplementationTest.java @@ -25,29 +25,31 @@ import org.opensearch.sql.prometheus.request.PrometheusQueryExemplarsRequest; import org.opensearch.sql.prometheus.storage.QueryExemplarsTable; - @ExtendWith(MockitoExtension.class) class QueryExemplarsFunctionImplementationTest { - @Mock - private PrometheusClient client; - + @Mock private PrometheusClient client; @Test void testValueOfAndTypeAndToString() { FunctionName functionName = new FunctionName("query_exemplars"); - List namedArgumentExpressionList - = List.of(DSL.namedArgument("query", DSL.literal("http_latency")), - DSL.namedArgument("starttime", DSL.literal(12345)), - DSL.namedArgument("endtime", DSL.literal(12345))); - QueryExemplarFunctionImplementation queryExemplarFunctionImplementation - = + List namedArgumentExpressionList = + List.of( + DSL.namedArgument("query", DSL.literal("http_latency")), + DSL.namedArgument("starttime", DSL.literal(12345)), + DSL.namedArgument("endtime", DSL.literal(12345))); + QueryExemplarFunctionImplementation queryExemplarFunctionImplementation = new QueryExemplarFunctionImplementation(functionName, namedArgumentExpressionList, client); - UnsupportedOperationException exception = assertThrows(UnsupportedOperationException.class, - () -> queryExemplarFunctionImplementation.valueOf()); - assertEquals("Prometheus defined function [query_exemplars] is only " - + "supported in SOURCE clause with prometheus connector catalog", exception.getMessage()); - assertEquals("query_exemplars(query=\"http_latency\", starttime=12345, endtime=12345)", + UnsupportedOperationException exception = + assertThrows( + UnsupportedOperationException.class, + () -> queryExemplarFunctionImplementation.valueOf()); + assertEquals( + "Prometheus defined function [query_exemplars] is only " + + "supported in SOURCE clause with prometheus connector catalog", + exception.getMessage()); + assertEquals( + "query_exemplars(query=\"http_latency\", starttime=12345, endtime=12345)", queryExemplarFunctionImplementation.toString()); assertEquals(ExprCoreType.STRUCT, queryExemplarFunctionImplementation.type()); } @@ -55,15 +57,15 @@ void testValueOfAndTypeAndToString() { @Test void testApplyArguments() { FunctionName functionName = new FunctionName("query_exemplars"); - List namedArgumentExpressionList - = List.of(DSL.namedArgument("query", DSL.literal("http_latency")), - DSL.namedArgument("starttime", DSL.literal(12345)), - DSL.namedArgument("endtime", DSL.literal(1234))); - QueryExemplarFunctionImplementation queryExemplarFunctionImplementation - = + List namedArgumentExpressionList = + List.of( + DSL.namedArgument("query", DSL.literal("http_latency")), + DSL.namedArgument("starttime", DSL.literal(12345)), + DSL.namedArgument("endtime", DSL.literal(1234))); + QueryExemplarFunctionImplementation queryExemplarFunctionImplementation = new QueryExemplarFunctionImplementation(functionName, namedArgumentExpressionList, client); - QueryExemplarsTable queryExemplarsTable - = (QueryExemplarsTable) queryExemplarFunctionImplementation.applyArguments(); + QueryExemplarsTable queryExemplarsTable = + (QueryExemplarsTable) queryExemplarFunctionImplementation.applyArguments(); assertNotNull(queryExemplarsTable.getExemplarsRequest()); PrometheusQueryExemplarsRequest request = queryExemplarsTable.getExemplarsRequest(); assertEquals("http_latency", request.getQuery()); @@ -74,17 +76,17 @@ void testApplyArguments() { @Test void testApplyArgumentsException() { FunctionName functionName = new FunctionName("query_exemplars"); - List namedArgumentExpressionList - = List.of(DSL.namedArgument("query", DSL.literal("http_latency")), - DSL.namedArgument("starttime", DSL.literal(12345)), - DSL.namedArgument("end_time", DSL.literal(1234))); - QueryExemplarFunctionImplementation queryExemplarFunctionImplementation - = + List namedArgumentExpressionList = + List.of( + DSL.namedArgument("query", DSL.literal("http_latency")), + DSL.namedArgument("starttime", DSL.literal(12345)), + DSL.namedArgument("end_time", DSL.literal(1234))); + QueryExemplarFunctionImplementation queryExemplarFunctionImplementation = new QueryExemplarFunctionImplementation(functionName, namedArgumentExpressionList, client); - ExpressionEvaluationException exception = assertThrows(ExpressionEvaluationException.class, - () -> queryExemplarFunctionImplementation.applyArguments()); + ExpressionEvaluationException exception = + assertThrows( + ExpressionEvaluationException.class, + () -> queryExemplarFunctionImplementation.applyArguments()); assertEquals("Invalid Function Argument:end_time", exception.getMessage()); } - - } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/implementation/QueryRangeFunctionImplementationTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/implementation/QueryRangeFunctionImplementationTest.java index 48337e3f02..288bc35b0f 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/implementation/QueryRangeFunctionImplementationTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/implementation/QueryRangeFunctionImplementationTest.java @@ -26,29 +26,31 @@ import org.opensearch.sql.prometheus.request.PrometheusQueryRequest; import org.opensearch.sql.prometheus.storage.PrometheusMetricTable; - @ExtendWith(MockitoExtension.class) class QueryRangeFunctionImplementationTest { - @Mock - private PrometheusClient client; - + @Mock private PrometheusClient client; @Test void testValueOfAndTypeAndToString() { FunctionName functionName = new FunctionName("query_range"); - List namedArgumentExpressionList - = List.of(DSL.namedArgument("query", DSL.literal("http_latency")), - DSL.namedArgument("starttime", DSL.literal(12345)), - DSL.namedArgument("endtime", DSL.literal(12345)), - DSL.namedArgument("step", DSL.literal(14))); - QueryRangeFunctionImplementation queryRangeFunctionImplementation - = new QueryRangeFunctionImplementation(functionName, namedArgumentExpressionList, client); - UnsupportedOperationException exception = assertThrows(UnsupportedOperationException.class, - () -> queryRangeFunctionImplementation.valueOf()); - assertEquals("Prometheus defined function [query_range] is only " - + "supported in SOURCE clause with prometheus connector catalog", exception.getMessage()); - assertEquals("query_range(query=\"http_latency\", starttime=12345, endtime=12345, step=14)", + List namedArgumentExpressionList = + List.of( + DSL.namedArgument("query", DSL.literal("http_latency")), + DSL.namedArgument("starttime", DSL.literal(12345)), + DSL.namedArgument("endtime", DSL.literal(12345)), + DSL.namedArgument("step", DSL.literal(14))); + QueryRangeFunctionImplementation queryRangeFunctionImplementation = + new QueryRangeFunctionImplementation(functionName, namedArgumentExpressionList, client); + UnsupportedOperationException exception = + assertThrows( + UnsupportedOperationException.class, () -> queryRangeFunctionImplementation.valueOf()); + assertEquals( + "Prometheus defined function [query_range] is only " + + "supported in SOURCE clause with prometheus connector catalog", + exception.getMessage()); + assertEquals( + "query_range(query=\"http_latency\", starttime=12345, endtime=12345, step=14)", queryRangeFunctionImplementation.toString()); assertEquals(ExprCoreType.STRUCT, queryRangeFunctionImplementation.type()); } @@ -56,19 +58,20 @@ void testValueOfAndTypeAndToString() { @Test void testApplyArguments() { FunctionName functionName = new FunctionName("query_range"); - List namedArgumentExpressionList - = List.of(DSL.namedArgument("query", DSL.literal("http_latency")), - DSL.namedArgument("starttime", DSL.literal(12345)), - DSL.namedArgument("endtime", DSL.literal(1234)), - DSL.namedArgument("step", DSL.literal(14))); - QueryRangeFunctionImplementation queryRangeFunctionImplementation - = new QueryRangeFunctionImplementation(functionName, namedArgumentExpressionList, client); - PrometheusMetricTable prometheusMetricTable - = (PrometheusMetricTable) queryRangeFunctionImplementation.applyArguments(); + List namedArgumentExpressionList = + List.of( + DSL.namedArgument("query", DSL.literal("http_latency")), + DSL.namedArgument("starttime", DSL.literal(12345)), + DSL.namedArgument("endtime", DSL.literal(1234)), + DSL.namedArgument("step", DSL.literal(14))); + QueryRangeFunctionImplementation queryRangeFunctionImplementation = + new QueryRangeFunctionImplementation(functionName, namedArgumentExpressionList, client); + PrometheusMetricTable prometheusMetricTable = + (PrometheusMetricTable) queryRangeFunctionImplementation.applyArguments(); assertNull(prometheusMetricTable.getMetricName()); assertNotNull(prometheusMetricTable.getPrometheusQueryRequest()); - PrometheusQueryRequest prometheusQueryRequest - = prometheusMetricTable.getPrometheusQueryRequest(); + PrometheusQueryRequest prometheusQueryRequest = + prometheusMetricTable.getPrometheusQueryRequest(); assertEquals("http_latency", prometheusQueryRequest.getPromQl().toString()); assertEquals(12345, prometheusQueryRequest.getStartTime()); assertEquals(1234, prometheusQueryRequest.getEndTime()); @@ -78,17 +81,18 @@ void testApplyArguments() { @Test void testApplyArgumentsException() { FunctionName functionName = new FunctionName("query_range"); - List namedArgumentExpressionList - = List.of(DSL.namedArgument("query", DSL.literal("http_latency")), - DSL.namedArgument("starttime", DSL.literal(12345)), - DSL.namedArgument("end_time", DSL.literal(1234)), - DSL.namedArgument("step", DSL.literal(14))); - QueryRangeFunctionImplementation queryRangeFunctionImplementation - = new QueryRangeFunctionImplementation(functionName, namedArgumentExpressionList, client); - ExpressionEvaluationException exception = assertThrows(ExpressionEvaluationException.class, - () -> queryRangeFunctionImplementation.applyArguments()); + List namedArgumentExpressionList = + List.of( + DSL.namedArgument("query", DSL.literal("http_latency")), + DSL.namedArgument("starttime", DSL.literal(12345)), + DSL.namedArgument("end_time", DSL.literal(1234)), + DSL.namedArgument("step", DSL.literal(14))); + QueryRangeFunctionImplementation queryRangeFunctionImplementation = + new QueryRangeFunctionImplementation(functionName, namedArgumentExpressionList, client); + ExpressionEvaluationException exception = + assertThrows( + ExpressionEvaluationException.class, + () -> queryRangeFunctionImplementation.applyArguments()); assertEquals("Invalid Function Argument:end_time", exception.getMessage()); } - - } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/resolver/QueryExemplarsTableFunctionResolverTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/resolver/QueryExemplarsTableFunctionResolverTest.java index 3e26b46c8f..af8ebf48e2 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/resolver/QueryExemplarsTableFunctionResolverTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/resolver/QueryExemplarsTableFunctionResolverTest.java @@ -35,34 +35,34 @@ @ExtendWith(MockitoExtension.class) class QueryExemplarsTableFunctionResolverTest { - @Mock - private PrometheusClient client; + @Mock private PrometheusClient client; - @Mock - private FunctionProperties functionProperties; + @Mock private FunctionProperties functionProperties; @Test void testResolve() { - QueryExemplarsTableFunctionResolver queryExemplarsTableFunctionResolver - = new QueryExemplarsTableFunctionResolver(client); + QueryExemplarsTableFunctionResolver queryExemplarsTableFunctionResolver = + new QueryExemplarsTableFunctionResolver(client); FunctionName functionName = FunctionName.of("query_exemplars"); - List expressions - = List.of(DSL.namedArgument("query", DSL.literal("http_latency")), - DSL.namedArgument("starttime", DSL.literal(12345)), - DSL.namedArgument("endtime", DSL.literal(12345))); - FunctionSignature functionSignature = new FunctionSignature(functionName, expressions - .stream().map(Expression::type).collect(Collectors.toList())); - Pair resolution - = queryExemplarsTableFunctionResolver.resolve(functionSignature); + List expressions = + List.of( + DSL.namedArgument("query", DSL.literal("http_latency")), + DSL.namedArgument("starttime", DSL.literal(12345)), + DSL.namedArgument("endtime", DSL.literal(12345))); + FunctionSignature functionSignature = + new FunctionSignature( + functionName, expressions.stream().map(Expression::type).collect(Collectors.toList())); + Pair resolution = + queryExemplarsTableFunctionResolver.resolve(functionSignature); assertEquals(functionName, resolution.getKey().getFunctionName()); assertEquals(functionName, queryExemplarsTableFunctionResolver.getFunctionName()); assertEquals(List.of(STRING, LONG, LONG), resolution.getKey().getParamTypeList()); FunctionBuilder functionBuilder = resolution.getValue(); - TableFunctionImplementation functionImplementation - = (TableFunctionImplementation) functionBuilder.apply(functionProperties, expressions); + TableFunctionImplementation functionImplementation = + (TableFunctionImplementation) functionBuilder.apply(functionProperties, expressions); assertTrue(functionImplementation instanceof QueryExemplarFunctionImplementation); - QueryExemplarsTable queryExemplarsTable - = (QueryExemplarsTable) functionImplementation.applyArguments(); + QueryExemplarsTable queryExemplarsTable = + (QueryExemplarsTable) functionImplementation.applyArguments(); assertNotNull(queryExemplarsTable.getExemplarsRequest()); PrometheusQueryExemplarsRequest prometheusQueryExemplarsRequest = queryExemplarsTable.getExemplarsRequest(); @@ -70,5 +70,4 @@ void testResolve() { assertEquals(12345L, prometheusQueryExemplarsRequest.getStartTime()); assertEquals(12345L, prometheusQueryExemplarsRequest.getEndTime()); } - } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/resolver/QueryRangeTableFunctionResolverTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/resolver/QueryRangeTableFunctionResolverTest.java index 2a36600379..48050bcb15 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/resolver/QueryRangeTableFunctionResolverTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/resolver/QueryRangeTableFunctionResolverTest.java @@ -37,35 +37,35 @@ @ExtendWith(MockitoExtension.class) class QueryRangeTableFunctionResolverTest { - @Mock - private PrometheusClient client; + @Mock private PrometheusClient client; - @Mock - private FunctionProperties functionProperties; + @Mock private FunctionProperties functionProperties; @Test void testResolve() { - QueryRangeTableFunctionResolver queryRangeTableFunctionResolver - = new QueryRangeTableFunctionResolver(client); + QueryRangeTableFunctionResolver queryRangeTableFunctionResolver = + new QueryRangeTableFunctionResolver(client); FunctionName functionName = FunctionName.of("query_range"); - List expressions - = List.of(DSL.namedArgument("query", DSL.literal("http_latency")), - DSL.namedArgument("starttime", DSL.literal(12345)), - DSL.namedArgument("endtime", DSL.literal(12345)), - DSL.namedArgument("step", DSL.literal(14))); - FunctionSignature functionSignature = new FunctionSignature(functionName, expressions - .stream().map(Expression::type).collect(Collectors.toList())); - Pair resolution - = queryRangeTableFunctionResolver.resolve(functionSignature); + List expressions = + List.of( + DSL.namedArgument("query", DSL.literal("http_latency")), + DSL.namedArgument("starttime", DSL.literal(12345)), + DSL.namedArgument("endtime", DSL.literal(12345)), + DSL.namedArgument("step", DSL.literal(14))); + FunctionSignature functionSignature = + new FunctionSignature( + functionName, expressions.stream().map(Expression::type).collect(Collectors.toList())); + Pair resolution = + queryRangeTableFunctionResolver.resolve(functionSignature); assertEquals(functionName, resolution.getKey().getFunctionName()); assertEquals(functionName, queryRangeTableFunctionResolver.getFunctionName()); assertEquals(List.of(STRING, LONG, LONG, STRING), resolution.getKey().getParamTypeList()); FunctionBuilder functionBuilder = resolution.getValue(); - TableFunctionImplementation functionImplementation - = (TableFunctionImplementation) functionBuilder.apply(functionProperties, expressions); + TableFunctionImplementation functionImplementation = + (TableFunctionImplementation) functionBuilder.apply(functionProperties, expressions); assertTrue(functionImplementation instanceof QueryRangeFunctionImplementation); - PrometheusMetricTable prometheusMetricTable - = (PrometheusMetricTable) functionImplementation.applyArguments(); + PrometheusMetricTable prometheusMetricTable = + (PrometheusMetricTable) functionImplementation.applyArguments(); assertNotNull(prometheusMetricTable.getPrometheusQueryRequest()); PrometheusQueryRequest prometheusQueryRequest = prometheusMetricTable.getPrometheusQueryRequest(); @@ -77,29 +77,31 @@ void testResolve() { @Test void testArgumentsPassedByPosition() { - QueryRangeTableFunctionResolver queryRangeTableFunctionResolver - = new QueryRangeTableFunctionResolver(client); + QueryRangeTableFunctionResolver queryRangeTableFunctionResolver = + new QueryRangeTableFunctionResolver(client); FunctionName functionName = FunctionName.of("query_range"); - List expressions - = List.of(DSL.namedArgument(null, DSL.literal("http_latency")), - DSL.namedArgument(null, DSL.literal(12345)), - DSL.namedArgument(null, DSL.literal(12345)), - DSL.namedArgument(null, DSL.literal(14))); - FunctionSignature functionSignature = new FunctionSignature(functionName, expressions - .stream().map(Expression::type).collect(Collectors.toList())); - - Pair resolution - = queryRangeTableFunctionResolver.resolve(functionSignature); + List expressions = + List.of( + DSL.namedArgument(null, DSL.literal("http_latency")), + DSL.namedArgument(null, DSL.literal(12345)), + DSL.namedArgument(null, DSL.literal(12345)), + DSL.namedArgument(null, DSL.literal(14))); + FunctionSignature functionSignature = + new FunctionSignature( + functionName, expressions.stream().map(Expression::type).collect(Collectors.toList())); + + Pair resolution = + queryRangeTableFunctionResolver.resolve(functionSignature); assertEquals(functionName, resolution.getKey().getFunctionName()); assertEquals(functionName, queryRangeTableFunctionResolver.getFunctionName()); assertEquals(List.of(STRING, LONG, LONG, STRING), resolution.getKey().getParamTypeList()); FunctionBuilder functionBuilder = resolution.getValue(); - TableFunctionImplementation functionImplementation - = (TableFunctionImplementation) functionBuilder.apply(functionProperties, expressions); + TableFunctionImplementation functionImplementation = + (TableFunctionImplementation) functionBuilder.apply(functionProperties, expressions); assertTrue(functionImplementation instanceof QueryRangeFunctionImplementation); - PrometheusMetricTable prometheusMetricTable - = (PrometheusMetricTable) functionImplementation.applyArguments(); + PrometheusMetricTable prometheusMetricTable = + (PrometheusMetricTable) functionImplementation.applyArguments(); assertNotNull(prometheusMetricTable.getPrometheusQueryRequest()); PrometheusQueryRequest prometheusQueryRequest = prometheusMetricTable.getPrometheusQueryRequest(); @@ -109,32 +111,33 @@ void testArgumentsPassedByPosition() { assertEquals("14", prometheusQueryRequest.getStep()); } - @Test void testArgumentsPassedByNameWithDifferentOrder() { - QueryRangeTableFunctionResolver queryRangeTableFunctionResolver - = new QueryRangeTableFunctionResolver(client); + QueryRangeTableFunctionResolver queryRangeTableFunctionResolver = + new QueryRangeTableFunctionResolver(client); FunctionName functionName = FunctionName.of("query_range"); - List expressions - = List.of(DSL.namedArgument("query", DSL.literal("http_latency")), - DSL.namedArgument("endtime", DSL.literal(12345)), - DSL.namedArgument("step", DSL.literal(14)), - DSL.namedArgument("starttime", DSL.literal(12345))); - FunctionSignature functionSignature = new FunctionSignature(functionName, expressions - .stream().map(Expression::type).collect(Collectors.toList())); - - Pair resolution - = queryRangeTableFunctionResolver.resolve(functionSignature); + List expressions = + List.of( + DSL.namedArgument("query", DSL.literal("http_latency")), + DSL.namedArgument("endtime", DSL.literal(12345)), + DSL.namedArgument("step", DSL.literal(14)), + DSL.namedArgument("starttime", DSL.literal(12345))); + FunctionSignature functionSignature = + new FunctionSignature( + functionName, expressions.stream().map(Expression::type).collect(Collectors.toList())); + + Pair resolution = + queryRangeTableFunctionResolver.resolve(functionSignature); assertEquals(functionName, resolution.getKey().getFunctionName()); assertEquals(functionName, queryRangeTableFunctionResolver.getFunctionName()); assertEquals(List.of(STRING, LONG, LONG, STRING), resolution.getKey().getParamTypeList()); FunctionBuilder functionBuilder = resolution.getValue(); - TableFunctionImplementation functionImplementation - = (TableFunctionImplementation) functionBuilder.apply(functionProperties, expressions); + TableFunctionImplementation functionImplementation = + (TableFunctionImplementation) functionBuilder.apply(functionProperties, expressions); assertTrue(functionImplementation instanceof QueryRangeFunctionImplementation); - PrometheusMetricTable prometheusMetricTable - = (PrometheusMetricTable) functionImplementation.applyArguments(); + PrometheusMetricTable prometheusMetricTable = + (PrometheusMetricTable) functionImplementation.applyArguments(); assertNotNull(prometheusMetricTable.getPrometheusQueryRequest()); PrometheusQueryRequest prometheusQueryRequest = prometheusMetricTable.getPrometheusQueryRequest(); @@ -146,70 +149,81 @@ void testArgumentsPassedByNameWithDifferentOrder() { @Test void testMixedArgumentTypes() { - QueryRangeTableFunctionResolver queryRangeTableFunctionResolver - = new QueryRangeTableFunctionResolver(client); + QueryRangeTableFunctionResolver queryRangeTableFunctionResolver = + new QueryRangeTableFunctionResolver(client); FunctionName functionName = FunctionName.of("query_range"); - List expressions - = List.of(DSL.namedArgument("query", DSL.literal("http_latency")), - DSL.namedArgument(null, DSL.literal(12345)), - DSL.namedArgument(null, DSL.literal(12345)), - DSL.namedArgument(null, DSL.literal(14))); - FunctionSignature functionSignature = new FunctionSignature(functionName, expressions - .stream().map(Expression::type).collect(Collectors.toList())); - Pair resolution - = queryRangeTableFunctionResolver.resolve(functionSignature); + List expressions = + List.of( + DSL.namedArgument("query", DSL.literal("http_latency")), + DSL.namedArgument(null, DSL.literal(12345)), + DSL.namedArgument(null, DSL.literal(12345)), + DSL.namedArgument(null, DSL.literal(14))); + FunctionSignature functionSignature = + new FunctionSignature( + functionName, expressions.stream().map(Expression::type).collect(Collectors.toList())); + Pair resolution = + queryRangeTableFunctionResolver.resolve(functionSignature); assertEquals(functionName, resolution.getKey().getFunctionName()); assertEquals(functionName, queryRangeTableFunctionResolver.getFunctionName()); assertEquals(List.of(STRING, LONG, LONG, STRING), resolution.getKey().getParamTypeList()); - SemanticCheckException exception = assertThrows(SemanticCheckException.class, - () -> resolution.getValue().apply(functionProperties, expressions)); + SemanticCheckException exception = + assertThrows( + SemanticCheckException.class, + () -> resolution.getValue().apply(functionProperties, expressions)); assertEquals("Arguments should be either passed by name or position", exception.getMessage()); } @Test void testWrongArgumentsSizeWhenPassedByName() { - QueryRangeTableFunctionResolver queryRangeTableFunctionResolver - = new QueryRangeTableFunctionResolver(client); + QueryRangeTableFunctionResolver queryRangeTableFunctionResolver = + new QueryRangeTableFunctionResolver(client); FunctionName functionName = FunctionName.of("query_range"); - List expressions - = List.of(DSL.namedArgument("query", DSL.literal("http_latency")), - DSL.namedArgument("step", DSL.literal(12345))); - FunctionSignature functionSignature = new FunctionSignature(functionName, expressions - .stream().map(Expression::type).collect(Collectors.toList())); - Pair resolution - = queryRangeTableFunctionResolver.resolve(functionSignature); + List expressions = + List.of( + DSL.namedArgument("query", DSL.literal("http_latency")), + DSL.namedArgument("step", DSL.literal(12345))); + FunctionSignature functionSignature = + new FunctionSignature( + functionName, expressions.stream().map(Expression::type).collect(Collectors.toList())); + Pair resolution = + queryRangeTableFunctionResolver.resolve(functionSignature); assertEquals(functionName, resolution.getKey().getFunctionName()); assertEquals(functionName, queryRangeTableFunctionResolver.getFunctionName()); assertEquals(List.of(STRING, LONG, LONG, STRING), resolution.getKey().getParamTypeList()); - SemanticCheckException exception = assertThrows(SemanticCheckException.class, - () -> resolution.getValue().apply(functionProperties, expressions)); + SemanticCheckException exception = + assertThrows( + SemanticCheckException.class, + () -> resolution.getValue().apply(functionProperties, expressions)); assertEquals("Missing arguments:[endtime,starttime]", exception.getMessage()); } @Test void testWrongArgumentsSizeWhenPassedByPosition() { - QueryRangeTableFunctionResolver queryRangeTableFunctionResolver - = new QueryRangeTableFunctionResolver(client); + QueryRangeTableFunctionResolver queryRangeTableFunctionResolver = + new QueryRangeTableFunctionResolver(client); FunctionName functionName = FunctionName.of("query_range"); - List expressions - = List.of(DSL.namedArgument(null, DSL.literal("http_latency")), - DSL.namedArgument(null, DSL.literal(12345))); - FunctionSignature functionSignature = new FunctionSignature(functionName, expressions - .stream().map(Expression::type).collect(Collectors.toList())); - Pair resolution - = queryRangeTableFunctionResolver.resolve(functionSignature); + List expressions = + List.of( + DSL.namedArgument(null, DSL.literal("http_latency")), + DSL.namedArgument(null, DSL.literal(12345))); + FunctionSignature functionSignature = + new FunctionSignature( + functionName, expressions.stream().map(Expression::type).collect(Collectors.toList())); + Pair resolution = + queryRangeTableFunctionResolver.resolve(functionSignature); assertEquals(functionName, resolution.getKey().getFunctionName()); assertEquals(functionName, queryRangeTableFunctionResolver.getFunctionName()); assertEquals(List.of(STRING, LONG, LONG, STRING), resolution.getKey().getParamTypeList()); - SemanticCheckException exception = assertThrows(SemanticCheckException.class, - () -> resolution.getValue().apply(functionProperties, expressions)); + SemanticCheckException exception = + assertThrows( + SemanticCheckException.class, + () -> resolution.getValue().apply(functionProperties, expressions)); assertEquals("Missing arguments:[endtime,step]", exception.getMessage()); } - } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanBuilderTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanBuilderTest.java index 6fd782b417..bb7806f824 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanBuilderTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanBuilderTest.java @@ -7,7 +7,6 @@ package org.opensearch.sql.prometheus.functions.scan; - import static org.opensearch.sql.prometheus.constants.TestConstants.ENDTIME; import static org.opensearch.sql.prometheus.constants.TestConstants.QUERY; import static org.opensearch.sql.prometheus.constants.TestConstants.STARTTIME; @@ -22,40 +21,35 @@ public class QueryExemplarsFunctionTableScanBuilderTest { - @Mock - private PrometheusClient prometheusClient; + @Mock private PrometheusClient prometheusClient; - @Mock - private LogicalProject logicalProject; + @Mock private LogicalProject logicalProject; @Test void testBuild() { - PrometheusQueryExemplarsRequest exemplarsRequest - = new PrometheusQueryExemplarsRequest(); + PrometheusQueryExemplarsRequest exemplarsRequest = new PrometheusQueryExemplarsRequest(); exemplarsRequest.setQuery(QUERY); exemplarsRequest.setStartTime(STARTTIME); exemplarsRequest.setEndTime(ENDTIME); - QueryExemplarsFunctionTableScanBuilder queryExemplarsFunctionTableScanBuilder - = new QueryExemplarsFunctionTableScanBuilder(prometheusClient, exemplarsRequest); - TableScanOperator queryExemplarsFunctionTableScanOperator - = queryExemplarsFunctionTableScanBuilder.build(); + QueryExemplarsFunctionTableScanBuilder queryExemplarsFunctionTableScanBuilder = + new QueryExemplarsFunctionTableScanBuilder(prometheusClient, exemplarsRequest); + TableScanOperator queryExemplarsFunctionTableScanOperator = + queryExemplarsFunctionTableScanBuilder.build(); Assertions.assertNotNull(queryExemplarsFunctionTableScanOperator); - Assertions.assertTrue(queryExemplarsFunctionTableScanOperator - instanceof QueryExemplarsFunctionTableScanOperator); + Assertions.assertTrue( + queryExemplarsFunctionTableScanOperator instanceof QueryExemplarsFunctionTableScanOperator); } @Test void testPushProject() { - PrometheusQueryExemplarsRequest exemplarsRequest - = new PrometheusQueryExemplarsRequest(); + PrometheusQueryExemplarsRequest exemplarsRequest = new PrometheusQueryExemplarsRequest(); exemplarsRequest.setQuery(QUERY); exemplarsRequest.setStartTime(STARTTIME); exemplarsRequest.setEndTime(ENDTIME); - QueryExemplarsFunctionTableScanBuilder queryExemplarsFunctionTableScanBuilder - = new QueryExemplarsFunctionTableScanBuilder(prometheusClient, exemplarsRequest); - Assertions.assertTrue(queryExemplarsFunctionTableScanBuilder - .pushDownProject(logicalProject)); + QueryExemplarsFunctionTableScanBuilder queryExemplarsFunctionTableScanBuilder = + new QueryExemplarsFunctionTableScanBuilder(prometheusClient, exemplarsRequest); + Assertions.assertTrue(queryExemplarsFunctionTableScanBuilder.pushDownProject(logicalProject)); } } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanOperatorTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanOperatorTest.java index d4e31d4d1e..5b8cf34fc2 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanOperatorTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanOperatorTest.java @@ -41,22 +41,21 @@ @ExtendWith(MockitoExtension.class) public class QueryExemplarsFunctionTableScanOperatorTest { - @Mock - private PrometheusClient prometheusClient; + @Mock private PrometheusClient prometheusClient; @Test @SneakyThrows void testQueryResponseIterator() { - PrometheusQueryExemplarsRequest prometheusQueryExemplarsRequest - = new PrometheusQueryExemplarsRequest(); + PrometheusQueryExemplarsRequest prometheusQueryExemplarsRequest = + new PrometheusQueryExemplarsRequest(); prometheusQueryExemplarsRequest.setQuery(QUERY); prometheusQueryExemplarsRequest.setStartTime(STARTTIME); prometheusQueryExemplarsRequest.setEndTime(ENDTIME); - QueryExemplarsFunctionTableScanOperator queryExemplarsFunctionTableScanOperator - = new QueryExemplarsFunctionTableScanOperator(prometheusClient, - prometheusQueryExemplarsRequest); + QueryExemplarsFunctionTableScanOperator queryExemplarsFunctionTableScanOperator = + new QueryExemplarsFunctionTableScanOperator( + prometheusClient, prometheusQueryExemplarsRequest); when(prometheusClient.queryExemplars(any(), any(), any())) .thenReturn(new JSONArray(getJson("query_exemplars_result.json"))); @@ -68,24 +67,28 @@ void testQueryResponseIterator() { seriesLabelsHashMap.put("service", new ExprStringValue("bar")); seriesLabelsHashMap.put("job", new ExprStringValue("prometheus")); LinkedHashMap exemplarMap = new LinkedHashMap<>(); - exemplarMap.put("labels", new ExprTupleValue(new LinkedHashMap<>() { - { - put("traceID", new ExprStringValue("EpTxMJ40fUus7aGY")); - } - }) - ); + exemplarMap.put( + "labels", + new ExprTupleValue( + new LinkedHashMap<>() { + { + put("traceID", new ExprStringValue("EpTxMJ40fUus7aGY")); + } + })); exemplarMap.put("timestamp", new ExprTimestampValue(Instant.ofEpochMilli(1600096945479L))); exemplarMap.put("value", new ExprDoubleValue(6)); List exprValueList = new ArrayList<>(); exprValueList.add(new ExprTupleValue(exemplarMap)); ExprCollectionValue exemplars = new ExprCollectionValue(exprValueList); ExprTupleValue seriesLabels = new ExprTupleValue(seriesLabelsHashMap); - ExprTupleValue firstRow = new ExprTupleValue(new LinkedHashMap<>() { - { - put("seriesLabels", seriesLabels); - put("exemplars", exemplars); - } - }); + ExprTupleValue firstRow = + new ExprTupleValue( + new LinkedHashMap<>() { + { + put("seriesLabels", seriesLabels); + put("exemplars", exemplars); + } + }); assertEquals(firstRow, queryExemplarsFunctionTableScanOperator.next()); } @@ -93,15 +96,15 @@ void testQueryResponseIterator() { @Test @SneakyThrows void testEmptyQueryWithNoMatrixKeyInResultJson() { - PrometheusQueryExemplarsRequest prometheusQueryExemplarsRequest - = new PrometheusQueryExemplarsRequest(); + PrometheusQueryExemplarsRequest prometheusQueryExemplarsRequest = + new PrometheusQueryExemplarsRequest(); prometheusQueryExemplarsRequest.setQuery(QUERY); prometheusQueryExemplarsRequest.setStartTime(STARTTIME); prometheusQueryExemplarsRequest.setEndTime(ENDTIME); - QueryExemplarsFunctionTableScanOperator queryExemplarsFunctionTableScanOperator - = new QueryExemplarsFunctionTableScanOperator(prometheusClient, - prometheusQueryExemplarsRequest); + QueryExemplarsFunctionTableScanOperator queryExemplarsFunctionTableScanOperator = + new QueryExemplarsFunctionTableScanOperator( + prometheusClient, prometheusQueryExemplarsRequest); when(prometheusClient.queryExemplars(any(), any(), any())) .thenReturn(new JSONArray(getJson("query_exemplars_empty_result.json"))); @@ -113,15 +116,15 @@ void testEmptyQueryWithNoMatrixKeyInResultJson() { @SneakyThrows void testQuerySchema() { - PrometheusQueryExemplarsRequest prometheusQueryExemplarsRequest - = new PrometheusQueryExemplarsRequest(); + PrometheusQueryExemplarsRequest prometheusQueryExemplarsRequest = + new PrometheusQueryExemplarsRequest(); prometheusQueryExemplarsRequest.setQuery(QUERY); prometheusQueryExemplarsRequest.setStartTime(STARTTIME); prometheusQueryExemplarsRequest.setEndTime(ENDTIME); - QueryExemplarsFunctionTableScanOperator queryExemplarsFunctionTableScanOperator - = new QueryExemplarsFunctionTableScanOperator(prometheusClient, - prometheusQueryExemplarsRequest); + QueryExemplarsFunctionTableScanOperator queryExemplarsFunctionTableScanOperator = + new QueryExemplarsFunctionTableScanOperator( + prometheusClient, prometheusQueryExemplarsRequest); when(prometheusClient.queryExemplars(any(), any(), any())) .thenReturn(new JSONArray(getJson("query_exemplars_result.json"))); @@ -140,53 +143,53 @@ void testQuerySchema() { @SneakyThrows void testEmptyQueryWithException() { - PrometheusQueryExemplarsRequest prometheusQueryExemplarsRequest - = new PrometheusQueryExemplarsRequest(); + PrometheusQueryExemplarsRequest prometheusQueryExemplarsRequest = + new PrometheusQueryExemplarsRequest(); prometheusQueryExemplarsRequest.setQuery(QUERY); prometheusQueryExemplarsRequest.setStartTime(STARTTIME); prometheusQueryExemplarsRequest.setEndTime(ENDTIME); - QueryExemplarsFunctionTableScanOperator queryExemplarsFunctionTableScanOperator - = new QueryExemplarsFunctionTableScanOperator(prometheusClient, - prometheusQueryExemplarsRequest); + QueryExemplarsFunctionTableScanOperator queryExemplarsFunctionTableScanOperator = + new QueryExemplarsFunctionTableScanOperator( + prometheusClient, prometheusQueryExemplarsRequest); when(prometheusClient.queryExemplars(any(), any(), any())) .thenThrow(new IOException("Error Message")); - RuntimeException runtimeException - = assertThrows(RuntimeException.class, queryExemplarsFunctionTableScanOperator::open); - assertEquals("Error fetching data from prometheus server: Error Message", - runtimeException.getMessage()); + RuntimeException runtimeException = + assertThrows(RuntimeException.class, queryExemplarsFunctionTableScanOperator::open); + assertEquals( + "Error fetching data from prometheus server: Error Message", runtimeException.getMessage()); } - @Test @SneakyThrows void testExplain() { - PrometheusQueryExemplarsRequest prometheusQueryExemplarsRequest - = new PrometheusQueryExemplarsRequest(); + PrometheusQueryExemplarsRequest prometheusQueryExemplarsRequest = + new PrometheusQueryExemplarsRequest(); prometheusQueryExemplarsRequest.setQuery(QUERY); prometheusQueryExemplarsRequest.setStartTime(STARTTIME); prometheusQueryExemplarsRequest.setEndTime(ENDTIME); - QueryExemplarsFunctionTableScanOperator queryExemplarsFunctionTableScanOperator - = new QueryExemplarsFunctionTableScanOperator(prometheusClient, - prometheusQueryExemplarsRequest); - Assertions.assertEquals("query_exemplars(test_query, 1664767694133, 1664771294133)", + QueryExemplarsFunctionTableScanOperator queryExemplarsFunctionTableScanOperator = + new QueryExemplarsFunctionTableScanOperator( + prometheusClient, prometheusQueryExemplarsRequest); + Assertions.assertEquals( + "query_exemplars(test_query, 1664767694133, 1664771294133)", queryExemplarsFunctionTableScanOperator.explain()); } @Test @SneakyThrows void testClose() { - PrometheusQueryExemplarsRequest prometheusQueryExemplarsRequest - = new PrometheusQueryExemplarsRequest(); + PrometheusQueryExemplarsRequest prometheusQueryExemplarsRequest = + new PrometheusQueryExemplarsRequest(); prometheusQueryExemplarsRequest.setQuery(QUERY); prometheusQueryExemplarsRequest.setStartTime(STARTTIME); prometheusQueryExemplarsRequest.setEndTime(ENDTIME); - QueryExemplarsFunctionTableScanOperator queryExemplarsFunctionTableScanOperator - = new QueryExemplarsFunctionTableScanOperator(prometheusClient, - prometheusQueryExemplarsRequest); + QueryExemplarsFunctionTableScanOperator queryExemplarsFunctionTableScanOperator = + new QueryExemplarsFunctionTableScanOperator( + prometheusClient, prometheusQueryExemplarsRequest); queryExemplarsFunctionTableScanOperator.close(); } } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryRangeFunctionTableScanBuilderTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryRangeFunctionTableScanBuilderTest.java index 8532a35395..dca79d6905 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryRangeFunctionTableScanBuilderTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryRangeFunctionTableScanBuilderTest.java @@ -7,7 +7,6 @@ package org.opensearch.sql.prometheus.functions.scan; - import static org.opensearch.sql.prometheus.constants.TestConstants.ENDTIME; import static org.opensearch.sql.prometheus.constants.TestConstants.QUERY; import static org.opensearch.sql.prometheus.constants.TestConstants.STARTTIME; @@ -23,11 +22,9 @@ public class QueryRangeFunctionTableScanBuilderTest { - @Mock - private PrometheusClient prometheusClient; + @Mock private PrometheusClient prometheusClient; - @Mock - private LogicalProject logicalProject; + @Mock private LogicalProject logicalProject; @Test void testBuild() { @@ -37,13 +34,13 @@ void testBuild() { prometheusQueryRequest.setEndTime(ENDTIME); prometheusQueryRequest.setStep(STEP); - QueryRangeFunctionTableScanBuilder queryRangeFunctionTableScanBuilder - = new QueryRangeFunctionTableScanBuilder(prometheusClient, prometheusQueryRequest); - TableScanOperator queryRangeFunctionTableScanOperator - = queryRangeFunctionTableScanBuilder.build(); + QueryRangeFunctionTableScanBuilder queryRangeFunctionTableScanBuilder = + new QueryRangeFunctionTableScanBuilder(prometheusClient, prometheusQueryRequest); + TableScanOperator queryRangeFunctionTableScanOperator = + queryRangeFunctionTableScanBuilder.build(); Assertions.assertNotNull(queryRangeFunctionTableScanOperator); - Assertions.assertTrue(queryRangeFunctionTableScanOperator - instanceof QueryRangeFunctionTableScanOperator); + Assertions.assertTrue( + queryRangeFunctionTableScanOperator instanceof QueryRangeFunctionTableScanOperator); } @Test @@ -54,8 +51,8 @@ void testPushProject() { prometheusQueryRequest.setEndTime(ENDTIME); prometheusQueryRequest.setStep(STEP); - QueryRangeFunctionTableScanBuilder queryRangeFunctionTableScanBuilder - = new QueryRangeFunctionTableScanBuilder(prometheusClient, prometheusQueryRequest); + QueryRangeFunctionTableScanBuilder queryRangeFunctionTableScanBuilder = + new QueryRangeFunctionTableScanBuilder(prometheusClient, prometheusQueryRequest); Assertions.assertTrue(queryRangeFunctionTableScanBuilder.pushDownProject(logicalProject)); } } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryRangeFunctionTableScanOperatorTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryRangeFunctionTableScanOperatorTest.java index b476471153..e59a2bf7c4 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryRangeFunctionTableScanOperatorTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryRangeFunctionTableScanOperatorTest.java @@ -45,8 +45,7 @@ @ExtendWith(MockitoExtension.class) class QueryRangeFunctionTableScanOperatorTest { - @Mock - private PrometheusClient prometheusClient; + @Mock private PrometheusClient prometheusClient; @Test @SneakyThrows @@ -58,41 +57,63 @@ void testQueryResponseIterator() { prometheusQueryRequest.setEndTime(ENDTIME); prometheusQueryRequest.setStep(STEP); - QueryRangeFunctionTableScanOperator queryRangeFunctionTableScanOperator - = new QueryRangeFunctionTableScanOperator(prometheusClient, prometheusQueryRequest); + QueryRangeFunctionTableScanOperator queryRangeFunctionTableScanOperator = + new QueryRangeFunctionTableScanOperator(prometheusClient, prometheusQueryRequest); when(prometheusClient.queryRange(any(), any(), any(), any())) .thenReturn(new JSONObject(getJson("query_range_result.json"))); queryRangeFunctionTableScanOperator.open(); Assertions.assertTrue(queryRangeFunctionTableScanOperator.hasNext()); - LinkedHashMap labelsMap = new LinkedHashMap<>() {{ - put("instance", new ExprStringValue("localhost:9090")); - put("__name__", new ExprStringValue("up")); - put("job", new ExprStringValue("prometheus")); - }}; - ExprTupleValue firstRow = new ExprTupleValue(new LinkedHashMap<>() {{ - put(LABELS, new ExprTupleValue(labelsMap)); - put(TIMESTAMP, new ExprCollectionValue(Collections - .singletonList(new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))))); - put(VALUE, new ExprCollectionValue(Collections.singletonList(new ExprDoubleValue(1)))); - } - }); + LinkedHashMap labelsMap = + new LinkedHashMap<>() { + { + put("instance", new ExprStringValue("localhost:9090")); + put("__name__", new ExprStringValue("up")); + put("job", new ExprStringValue("prometheus")); + } + }; + ExprTupleValue firstRow = + new ExprTupleValue( + new LinkedHashMap<>() { + { + put(LABELS, new ExprTupleValue(labelsMap)); + put( + TIMESTAMP, + new ExprCollectionValue( + Collections.singletonList( + new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))))); + put( + VALUE, + new ExprCollectionValue(Collections.singletonList(new ExprDoubleValue(1)))); + } + }); assertEquals(firstRow, queryRangeFunctionTableScanOperator.next()); Assertions.assertTrue(queryRangeFunctionTableScanOperator.hasNext()); - LinkedHashMap labelsMap2 = new LinkedHashMap<>() {{ - put("instance", new ExprStringValue("localhost:9091")); - put("__name__", new ExprStringValue("up")); - put("job", new ExprStringValue("node")); - }}; - ExprTupleValue secondRow = new ExprTupleValue(new LinkedHashMap<>() {{ - put(LABELS, new ExprTupleValue(labelsMap2)); - put(TIMESTAMP, new ExprCollectionValue(Collections - .singletonList(new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))))); - put(VALUE, new ExprCollectionValue(Collections.singletonList(new ExprDoubleValue(0)))); - } - }); + LinkedHashMap labelsMap2 = + new LinkedHashMap<>() { + { + put("instance", new ExprStringValue("localhost:9091")); + put("__name__", new ExprStringValue("up")); + put("job", new ExprStringValue("node")); + } + }; + ExprTupleValue secondRow = + new ExprTupleValue( + new LinkedHashMap<>() { + { + put(LABELS, new ExprTupleValue(labelsMap2)); + put( + TIMESTAMP, + new ExprCollectionValue( + Collections.singletonList( + new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))))); + put( + VALUE, + new ExprCollectionValue(Collections.singletonList(new ExprDoubleValue(0)))); + } + }); assertEquals(secondRow, queryRangeFunctionTableScanOperator.next()); Assertions.assertFalse(queryRangeFunctionTableScanOperator.hasNext()); } @@ -106,16 +127,17 @@ void testEmptyQueryWithNoMatrixKeyInResultJson() { prometheusQueryRequest.setEndTime(ENDTIME); prometheusQueryRequest.setStep(STEP); - QueryRangeFunctionTableScanOperator queryRangeFunctionTableScanOperator - = new QueryRangeFunctionTableScanOperator(prometheusClient, prometheusQueryRequest); + QueryRangeFunctionTableScanOperator queryRangeFunctionTableScanOperator = + new QueryRangeFunctionTableScanOperator(prometheusClient, prometheusQueryRequest); when(prometheusClient.queryRange(any(), any(), any(), any())) .thenReturn(new JSONObject(getJson("no_matrix_query_range_result.json"))); - RuntimeException runtimeException - = assertThrows(RuntimeException.class, queryRangeFunctionTableScanOperator::open); + RuntimeException runtimeException = + assertThrows(RuntimeException.class, queryRangeFunctionTableScanOperator::open); assertEquals( "Unexpected Result Type: vector during Prometheus Response Parsing. " - + "'matrix' resultType is expected", runtimeException.getMessage()); + + "'matrix' resultType is expected", + runtimeException.getMessage()); } @Test @@ -127,8 +149,8 @@ void testQuerySchema() { prometheusQueryRequest.setEndTime(ENDTIME); prometheusQueryRequest.setStep(STEP); - QueryRangeFunctionTableScanOperator queryRangeFunctionTableScanOperator - = new QueryRangeFunctionTableScanOperator(prometheusClient, prometheusQueryRequest); + QueryRangeFunctionTableScanOperator queryRangeFunctionTableScanOperator = + new QueryRangeFunctionTableScanOperator(prometheusClient, prometheusQueryRequest); when(prometheusClient.queryRange(any(), any(), any(), any())) .thenReturn(new JSONObject(getJson("query_range_result.json"))); @@ -150,18 +172,17 @@ void testEmptyQueryWithException() { prometheusQueryRequest.setEndTime(ENDTIME); prometheusQueryRequest.setStep(STEP); - QueryRangeFunctionTableScanOperator queryRangeFunctionTableScanOperator - = new QueryRangeFunctionTableScanOperator(prometheusClient, prometheusQueryRequest); + QueryRangeFunctionTableScanOperator queryRangeFunctionTableScanOperator = + new QueryRangeFunctionTableScanOperator(prometheusClient, prometheusQueryRequest); when(prometheusClient.queryRange(any(), any(), any(), any())) .thenThrow(new IOException("Error Message")); - RuntimeException runtimeException - = assertThrows(RuntimeException.class, queryRangeFunctionTableScanOperator::open); - assertEquals("Error fetching data from prometheus server: Error Message", - runtimeException.getMessage()); + RuntimeException runtimeException = + assertThrows(RuntimeException.class, queryRangeFunctionTableScanOperator::open); + assertEquals( + "Error fetching data from prometheus server: Error Message", runtimeException.getMessage()); } - @Test @SneakyThrows void testExplain() { @@ -171,10 +192,11 @@ void testExplain() { prometheusQueryRequest.setEndTime(ENDTIME); prometheusQueryRequest.setStep(STEP); - QueryRangeFunctionTableScanOperator queryRangeFunctionTableScanOperator - = new QueryRangeFunctionTableScanOperator(prometheusClient, prometheusQueryRequest); + QueryRangeFunctionTableScanOperator queryRangeFunctionTableScanOperator = + new QueryRangeFunctionTableScanOperator(prometheusClient, prometheusQueryRequest); - Assertions.assertEquals("query_range(test_query, 1664767694133, 1664771294133, 14)", + Assertions.assertEquals( + "query_range(test_query, 1664767694133, 1664771294133, 14)", queryRangeFunctionTableScanOperator.explain()); } @@ -187,8 +209,8 @@ void testClose() { prometheusQueryRequest.setEndTime(ENDTIME); prometheusQueryRequest.setStep(STEP); - QueryRangeFunctionTableScanOperator queryRangeFunctionTableScanOperator - = new QueryRangeFunctionTableScanOperator(prometheusClient, prometheusQueryRequest); + QueryRangeFunctionTableScanOperator queryRangeFunctionTableScanOperator = + new QueryRangeFunctionTableScanOperator(prometheusClient, prometheusQueryRequest); queryRangeFunctionTableScanOperator.close(); } } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/planner/logical/PrometheusLogicOptimizerTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/planner/logical/PrometheusLogicOptimizerTest.java index a1d1cef91d..33c48e2f2d 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/planner/logical/PrometheusLogicOptimizerTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/planner/logical/PrometheusLogicOptimizerTest.java @@ -32,60 +32,50 @@ @ExtendWith(MockitoExtension.class) public class PrometheusLogicOptimizerTest { - @Mock - private Table table; + @Mock private Table table; @Test void project_filter_merge_with_relation() { assertEquals( project( - indexScan("prometheus_http_total_requests", - DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200")))) - ), + indexScan( + "prometheus_http_total_requests", + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))))), optimize( project( filter( relation("prometheus_http_total_requests", table), - DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))) - )) - ) - ); + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))))))); } @Test void aggregation_merge_relation() { assertEquals( project( - indexScanAgg("prometheus_http_total_requests", ImmutableList - .of(DSL.named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), + indexScanAgg( + "prometheus_http_total_requests", + ImmutableList.of(DSL.named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), ImmutableList.of(DSL.named("code", DSL.ref("code", STRING)))), DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE))), optimize( project( aggregation( relation("prometheus_http_total_requests", table), - ImmutableList - .of(DSL.named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), - ImmutableList.of(DSL.named("code", - DSL.ref("code", STRING)))), - DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE))) - ) - ); + ImmutableList.of(DSL.named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), + ImmutableList.of(DSL.named("code", DSL.ref("code", STRING)))), + DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE))))); } - @Test void aggregation_merge_filter_relation() { assertEquals( project( - indexScanAgg("prometheus_http_total_requests", - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + indexScanAgg( + "prometheus_http_total_requests", + DSL.and( + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/")))), - ImmutableList - .of(DSL.named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), + ImmutableList.of(DSL.named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), ImmutableList.of(DSL.named("job", DSL.ref("job", STRING)))), DSL.named("AVG(@value)", DSL.ref("AVG(@value)", DOUBLE))), optimize( @@ -94,25 +84,16 @@ void aggregation_merge_filter_relation() { filter( relation("prometheus_http_total_requests", table), DSL.and( - DSL.equal(DSL.ref("code", STRING), - DSL.literal(stringValue("200"))), - DSL.equal(DSL.ref("handler", STRING), - DSL.literal(stringValue("/ready/")))) - ), - ImmutableList - .of(DSL.named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), - ImmutableList.of(DSL.named("job", - DSL.ref("job", STRING)))), - DSL.named("AVG(@value)", DSL.ref("AVG(@value)", DOUBLE))) - ) - ); + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.equal( + DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))))), + ImmutableList.of(DSL.named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), + ImmutableList.of(DSL.named("job", DSL.ref("job", STRING)))), + DSL.named("AVG(@value)", DSL.ref("AVG(@value)", DOUBLE))))); } - private LogicalPlan optimize(LogicalPlan plan) { final LogicalPlanOptimizer optimizer = PrometheusLogicalPlanOptimizerFactory.create(); return optimizer.optimize(plan); } - } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/request/PrometheusDescribeMetricRequestTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/request/PrometheusDescribeMetricRequestTest.java index dfc9aee7dc..9add7896cf 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/request/PrometheusDescribeMetricRequestTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/request/PrometheusDescribeMetricRequestTest.java @@ -37,54 +37,61 @@ @ExtendWith(MockitoExtension.class) public class PrometheusDescribeMetricRequestTest { - @Mock - private PrometheusClient prometheusClient; + @Mock private PrometheusClient prometheusClient; @Test @SneakyThrows void testGetFieldTypes() { - when(prometheusClient.getLabels(METRIC_NAME)).thenReturn(new ArrayList() {{ - add("call"); - add("code"); - } - }); - Map expected = new HashMap<>() {{ - put("call", ExprCoreType.STRING); - put("code", ExprCoreType.STRING); - put(VALUE, ExprCoreType.DOUBLE); - put(TIMESTAMP, ExprCoreType.TIMESTAMP); - }}; - PrometheusDescribeMetricRequest prometheusDescribeMetricRequest - = new PrometheusDescribeMetricRequest(prometheusClient, - new DataSourceSchemaName("prometheus", "default"), METRIC_NAME); + when(prometheusClient.getLabels(METRIC_NAME)) + .thenReturn( + new ArrayList() { + { + add("call"); + add("code"); + } + }); + Map expected = + new HashMap<>() { + { + put("call", ExprCoreType.STRING); + put("code", ExprCoreType.STRING); + put(VALUE, ExprCoreType.DOUBLE); + put(TIMESTAMP, ExprCoreType.TIMESTAMP); + } + }; + PrometheusDescribeMetricRequest prometheusDescribeMetricRequest = + new PrometheusDescribeMetricRequest( + prometheusClient, new DataSourceSchemaName("prometheus", "default"), METRIC_NAME); assertEquals(expected, prometheusDescribeMetricRequest.getFieldTypes()); verify(prometheusClient, times(1)).getLabels(METRIC_NAME); } - @Test @SneakyThrows void testGetFieldTypesWithEmptyMetricName() { - Map expected = new HashMap<>() {{ - put(VALUE, ExprCoreType.DOUBLE); - put(TIMESTAMP, ExprCoreType.TIMESTAMP); - }}; - assertThrows(NullPointerException.class, - () -> new PrometheusDescribeMetricRequest(prometheusClient, - new DataSourceSchemaName("prometheus", "default"), - null)); + Map expected = + new HashMap<>() { + { + put(VALUE, ExprCoreType.DOUBLE); + put(TIMESTAMP, ExprCoreType.TIMESTAMP); + } + }; + assertThrows( + NullPointerException.class, + () -> + new PrometheusDescribeMetricRequest( + prometheusClient, new DataSourceSchemaName("prometheus", "default"), null)); } - @Test @SneakyThrows void testGetFieldTypesWhenException() { when(prometheusClient.getLabels(METRIC_NAME)).thenThrow(new RuntimeException("ERROR Message")); - PrometheusDescribeMetricRequest prometheusDescribeMetricRequest - = new PrometheusDescribeMetricRequest(prometheusClient, - new DataSourceSchemaName("prometheus", "default"), METRIC_NAME); - RuntimeException exception = assertThrows(RuntimeException.class, - prometheusDescribeMetricRequest::getFieldTypes); + PrometheusDescribeMetricRequest prometheusDescribeMetricRequest = + new PrometheusDescribeMetricRequest( + prometheusClient, new DataSourceSchemaName("prometheus", "default"), METRIC_NAME); + RuntimeException exception = + assertThrows(RuntimeException.class, prometheusDescribeMetricRequest::getFieldTypes); verify(prometheusClient, times(1)).getLabels(METRIC_NAME); assertEquals("ERROR Message", exception.getMessage()); } @@ -93,27 +100,30 @@ void testGetFieldTypesWhenException() { @SneakyThrows void testGetFieldTypesWhenIOException() { when(prometheusClient.getLabels(METRIC_NAME)).thenThrow(new IOException("ERROR Message")); - PrometheusDescribeMetricRequest prometheusDescribeMetricRequest - = new PrometheusDescribeMetricRequest(prometheusClient, - new DataSourceSchemaName("prometheus", "default"), METRIC_NAME); - RuntimeException exception = assertThrows(RuntimeException.class, - prometheusDescribeMetricRequest::getFieldTypes); - assertEquals("Error while fetching labels for http_requests_total" - + " from prometheus: ERROR Message", exception.getMessage()); + PrometheusDescribeMetricRequest prometheusDescribeMetricRequest = + new PrometheusDescribeMetricRequest( + prometheusClient, new DataSourceSchemaName("prometheus", "default"), METRIC_NAME); + RuntimeException exception = + assertThrows(RuntimeException.class, prometheusDescribeMetricRequest::getFieldTypes); + assertEquals( + "Error while fetching labels for http_requests_total" + " from prometheus: ERROR Message", + exception.getMessage()); verify(prometheusClient, times(1)).getLabels(METRIC_NAME); } @Test @SneakyThrows void testSearch() { - when(prometheusClient.getLabels(METRIC_NAME)).thenReturn(new ArrayList<>() { - { - add("call"); - } - }); - PrometheusDescribeMetricRequest prometheusDescribeMetricRequest - = new PrometheusDescribeMetricRequest(prometheusClient, - new DataSourceSchemaName("test", "default"), METRIC_NAME); + when(prometheusClient.getLabels(METRIC_NAME)) + .thenReturn( + new ArrayList<>() { + { + add("call"); + } + }); + PrometheusDescribeMetricRequest prometheusDescribeMetricRequest = + new PrometheusDescribeMetricRequest( + prometheusClient, new DataSourceSchemaName("test", "default"), METRIC_NAME); List result = prometheusDescribeMetricRequest.search(); assertEquals(3, result.size()); assertEquals(expectedRow(), result.get(0)); @@ -129,5 +139,4 @@ private ExprValue expectedRow() { valueMap.put("DATA_TYPE", stringValue(ExprCoreType.STRING.legacyTypeName().toLowerCase())); return new ExprTupleValue(valueMap); } - } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/request/PrometheusListMetricsRequestTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/request/PrometheusListMetricsRequestTest.java index bf5bb22e96..09f63463b5 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/request/PrometheusListMetricsRequestTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/request/PrometheusListMetricsRequestTest.java @@ -35,45 +35,46 @@ @ExtendWith(MockitoExtension.class) public class PrometheusListMetricsRequestTest { - @Mock - private PrometheusClient prometheusClient; + @Mock private PrometheusClient prometheusClient; @Test @SneakyThrows void testSearch() { Map> metricsResult = new HashMap<>(); - metricsResult.put("go_gc_duration_seconds", - Collections.singletonList(new MetricMetadata("summary", - "A summary of the pause duration of garbage collection cycles.", ""))); - metricsResult.put("go_goroutines", - Collections.singletonList(new MetricMetadata("gauge", - "Number of goroutines that currently exist.", ""))); + metricsResult.put( + "go_gc_duration_seconds", + Collections.singletonList( + new MetricMetadata( + "summary", "A summary of the pause duration of garbage collection cycles.", ""))); + metricsResult.put( + "go_goroutines", + Collections.singletonList( + new MetricMetadata("gauge", "Number of goroutines that currently exist.", ""))); when(prometheusClient.getAllMetrics()).thenReturn(metricsResult); - PrometheusListMetricsRequest prometheusListMetricsRequest - = new PrometheusListMetricsRequest(prometheusClient, - new DataSourceSchemaName("prometheus", "information_schema")); + PrometheusListMetricsRequest prometheusListMetricsRequest = + new PrometheusListMetricsRequest( + prometheusClient, new DataSourceSchemaName("prometheus", "information_schema")); List result = prometheusListMetricsRequest.search(); assertEquals(expectedRow(), result.get(0)); assertEquals(2, result.size()); verify(prometheusClient, times(1)).getAllMetrics(); } - @Test @SneakyThrows void testSearchWhenIOException() { when(prometheusClient.getAllMetrics()).thenThrow(new IOException("ERROR Message")); - PrometheusListMetricsRequest prometheusListMetricsRequest - = new PrometheusListMetricsRequest(prometheusClient, - new DataSourceSchemaName("prometheus", "information_schema")); - RuntimeException exception = assertThrows(RuntimeException.class, - prometheusListMetricsRequest::search); - assertEquals("Error while fetching metric list for from prometheus: ERROR Message", + PrometheusListMetricsRequest prometheusListMetricsRequest = + new PrometheusListMetricsRequest( + prometheusClient, new DataSourceSchemaName("prometheus", "information_schema")); + RuntimeException exception = + assertThrows(RuntimeException.class, prometheusListMetricsRequest::search); + assertEquals( + "Error while fetching metric list for from prometheus: ERROR Message", exception.getMessage()); verify(prometheusClient, times(1)).getAllMetrics(); } - private ExprTupleValue expectedRow() { LinkedHashMap valueMap = new LinkedHashMap<>(); valueMap.put("TABLE_CATALOG", stringValue("prometheus")); @@ -81,9 +82,8 @@ private ExprTupleValue expectedRow() { valueMap.put("TABLE_NAME", stringValue("go_gc_duration_seconds")); valueMap.put("TABLE_TYPE", stringValue("summary")); valueMap.put("UNIT", stringValue("")); - valueMap.put("REMARKS", - stringValue("A summary of the pause duration of garbage collection cycles.")); + valueMap.put( + "REMARKS", stringValue("A summary of the pause duration of garbage collection cycles.")); return new ExprTupleValue(valueMap); } - } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusMetricScanTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusMetricScanTest.java index 9c0207853c..00ddc973bc 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusMetricScanTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusMetricScanTest.java @@ -44,8 +44,7 @@ @ExtendWith(MockitoExtension.class) public class PrometheusMetricScanTest { - @Mock - private PrometheusClient prometheusClient; + @Mock private PrometheusClient prometheusClient; @Test @SneakyThrows @@ -60,24 +59,30 @@ void testQueryResponseIterator() { .thenReturn(new JSONObject(getJson("query_range_result.json"))); prometheusMetricScan.open(); Assertions.assertTrue(prometheusMetricScan.hasNext()); - ExprTupleValue firstRow = new ExprTupleValue(new LinkedHashMap<>() {{ - put(TIMESTAMP, new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))); - put(VALUE, new ExprDoubleValue(1)); - put("instance", new ExprStringValue("localhost:9090")); - put("__name__", new ExprStringValue("up")); - put("job", new ExprStringValue("prometheus")); - } - }); + ExprTupleValue firstRow = + new ExprTupleValue( + new LinkedHashMap<>() { + { + put(TIMESTAMP, new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))); + put(VALUE, new ExprDoubleValue(1)); + put("instance", new ExprStringValue("localhost:9090")); + put("__name__", new ExprStringValue("up")); + put("job", new ExprStringValue("prometheus")); + } + }); assertEquals(firstRow, prometheusMetricScan.next()); Assertions.assertTrue(prometheusMetricScan.hasNext()); - ExprTupleValue secondRow = new ExprTupleValue(new LinkedHashMap<>() {{ - put("@timestamp", new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))); - put("@value", new ExprDoubleValue(0)); - put("instance", new ExprStringValue("localhost:9091")); - put("__name__", new ExprStringValue("up")); - put("job", new ExprStringValue("node")); - } - }); + ExprTupleValue secondRow = + new ExprTupleValue( + new LinkedHashMap<>() { + { + put("@timestamp", new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))); + put("@value", new ExprDoubleValue(0)); + put("instance", new ExprStringValue("localhost:9091")); + put("__name__", new ExprStringValue("up")); + put("job", new ExprStringValue("node")); + } + }); assertEquals(secondRow, prometheusMetricScan.next()); Assertions.assertFalse(prometheusMetricScan.hasNext()); } @@ -85,8 +90,7 @@ void testQueryResponseIterator() { @Test @SneakyThrows void testQueryResponseIteratorWithGivenPrometheusResponseFieldNames() { - PrometheusResponseFieldNames prometheusResponseFieldNames - = new PrometheusResponseFieldNames(); + PrometheusResponseFieldNames prometheusResponseFieldNames = new PrometheusResponseFieldNames(); prometheusResponseFieldNames.setValueFieldName("count()"); prometheusResponseFieldNames.setValueType(INTEGER); prometheusResponseFieldNames.setTimestampFieldName(TIMESTAMP); @@ -101,34 +105,38 @@ void testQueryResponseIteratorWithGivenPrometheusResponseFieldNames() { .thenReturn(new JSONObject(getJson("query_range_result.json"))); prometheusMetricScan.open(); Assertions.assertTrue(prometheusMetricScan.hasNext()); - ExprTupleValue firstRow = new ExprTupleValue(new LinkedHashMap<>() {{ - put(TIMESTAMP, new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))); - put("count()", new ExprIntegerValue(1)); - put("instance", new ExprStringValue("localhost:9090")); - put("__name__", new ExprStringValue("up")); - put("job", new ExprStringValue("prometheus")); - } - }); + ExprTupleValue firstRow = + new ExprTupleValue( + new LinkedHashMap<>() { + { + put(TIMESTAMP, new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))); + put("count()", new ExprIntegerValue(1)); + put("instance", new ExprStringValue("localhost:9090")); + put("__name__", new ExprStringValue("up")); + put("job", new ExprStringValue("prometheus")); + } + }); assertEquals(firstRow, prometheusMetricScan.next()); Assertions.assertTrue(prometheusMetricScan.hasNext()); - ExprTupleValue secondRow = new ExprTupleValue(new LinkedHashMap<>() {{ - put(TIMESTAMP, new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))); - put("count()", new ExprIntegerValue(0)); - put("instance", new ExprStringValue("localhost:9091")); - put("__name__", new ExprStringValue("up")); - put("job", new ExprStringValue("node")); - } - }); + ExprTupleValue secondRow = + new ExprTupleValue( + new LinkedHashMap<>() { + { + put(TIMESTAMP, new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))); + put("count()", new ExprIntegerValue(0)); + put("instance", new ExprStringValue("localhost:9091")); + put("__name__", new ExprStringValue("up")); + put("job", new ExprStringValue("node")); + } + }); assertEquals(secondRow, prometheusMetricScan.next()); Assertions.assertFalse(prometheusMetricScan.hasNext()); } - @Test @SneakyThrows void testQueryResponseIteratorWithGivenPrometheusResponseWithLongInAggType() { - PrometheusResponseFieldNames prometheusResponseFieldNames - = new PrometheusResponseFieldNames(); + PrometheusResponseFieldNames prometheusResponseFieldNames = new PrometheusResponseFieldNames(); prometheusResponseFieldNames.setValueFieldName("testAgg"); prometheusResponseFieldNames.setValueType(LONG); prometheusResponseFieldNames.setTimestampFieldName(TIMESTAMP); @@ -143,24 +151,30 @@ void testQueryResponseIteratorWithGivenPrometheusResponseWithLongInAggType() { .thenReturn(new JSONObject(getJson("query_range_result.json"))); prometheusMetricScan.open(); Assertions.assertTrue(prometheusMetricScan.hasNext()); - ExprTupleValue firstRow = new ExprTupleValue(new LinkedHashMap<>() {{ - put(TIMESTAMP, new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))); - put("testAgg", new ExprLongValue(1)); - put("instance", new ExprStringValue("localhost:9090")); - put("__name__", new ExprStringValue("up")); - put("job", new ExprStringValue("prometheus")); - } - }); + ExprTupleValue firstRow = + new ExprTupleValue( + new LinkedHashMap<>() { + { + put(TIMESTAMP, new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))); + put("testAgg", new ExprLongValue(1)); + put("instance", new ExprStringValue("localhost:9090")); + put("__name__", new ExprStringValue("up")); + put("job", new ExprStringValue("prometheus")); + } + }); assertEquals(firstRow, prometheusMetricScan.next()); Assertions.assertTrue(prometheusMetricScan.hasNext()); - ExprTupleValue secondRow = new ExprTupleValue(new LinkedHashMap<>() {{ - put(TIMESTAMP, new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))); - put("testAgg", new ExprLongValue(0)); - put("instance", new ExprStringValue("localhost:9091")); - put("__name__", new ExprStringValue("up")); - put("job", new ExprStringValue("node")); - } - }); + ExprTupleValue secondRow = + new ExprTupleValue( + new LinkedHashMap<>() { + { + put(TIMESTAMP, new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))); + put("testAgg", new ExprLongValue(0)); + put("instance", new ExprStringValue("localhost:9091")); + put("__name__", new ExprStringValue("up")); + put("job", new ExprStringValue("node")); + } + }); assertEquals(secondRow, prometheusMetricScan.next()); Assertions.assertFalse(prometheusMetricScan.hasNext()); } @@ -168,8 +182,7 @@ void testQueryResponseIteratorWithGivenPrometheusResponseWithLongInAggType() { @Test @SneakyThrows void testQueryResponseIteratorWithGivenPrometheusResponseWithBackQuotedFieldNames() { - PrometheusResponseFieldNames prometheusResponseFieldNames - = new PrometheusResponseFieldNames(); + PrometheusResponseFieldNames prometheusResponseFieldNames = new PrometheusResponseFieldNames(); prometheusResponseFieldNames.setValueFieldName("testAgg"); prometheusResponseFieldNames.setValueType(LONG); prometheusResponseFieldNames.setTimestampFieldName(TIMESTAMP); @@ -186,29 +199,34 @@ void testQueryResponseIteratorWithGivenPrometheusResponseWithBackQuotedFieldName .thenReturn(new JSONObject(getJson("query_range_result.json"))); prometheusMetricScan.open(); Assertions.assertTrue(prometheusMetricScan.hasNext()); - ExprTupleValue firstRow = new ExprTupleValue(new LinkedHashMap<>() {{ - put(TIMESTAMP, new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))); - put("testAgg", new ExprLongValue(1)); - put("`instance`", new ExprStringValue("localhost:9090")); - put("__name__", new ExprStringValue("up")); - put("job", new ExprStringValue("prometheus")); - } - }); + ExprTupleValue firstRow = + new ExprTupleValue( + new LinkedHashMap<>() { + { + put(TIMESTAMP, new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))); + put("testAgg", new ExprLongValue(1)); + put("`instance`", new ExprStringValue("localhost:9090")); + put("__name__", new ExprStringValue("up")); + put("job", new ExprStringValue("prometheus")); + } + }); assertEquals(firstRow, prometheusMetricScan.next()); Assertions.assertTrue(prometheusMetricScan.hasNext()); - ExprTupleValue secondRow = new ExprTupleValue(new LinkedHashMap<>() {{ - put(TIMESTAMP, new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))); - put("testAgg", new ExprLongValue(0)); - put("`instance`", new ExprStringValue("localhost:9091")); - put("__name__", new ExprStringValue("up")); - put("job", new ExprStringValue("node")); - } - }); + ExprTupleValue secondRow = + new ExprTupleValue( + new LinkedHashMap<>() { + { + put(TIMESTAMP, new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))); + put("testAgg", new ExprLongValue(0)); + put("`instance`", new ExprStringValue("localhost:9091")); + put("__name__", new ExprStringValue("up")); + put("job", new ExprStringValue("node")); + } + }); assertEquals(secondRow, prometheusMetricScan.next()); Assertions.assertFalse(prometheusMetricScan.hasNext()); } - @Test @SneakyThrows void testEmptyQueryResponseIterator() { @@ -235,11 +253,12 @@ void testEmptyQueryWithNoMatrixKeyInResultJson() { when(prometheusClient.queryRange(any(), any(), any(), any())) .thenReturn(new JSONObject(getJson("no_matrix_query_range_result.json"))); - RuntimeException runtimeException - = Assertions.assertThrows(RuntimeException.class, prometheusMetricScan::open); + RuntimeException runtimeException = + Assertions.assertThrows(RuntimeException.class, prometheusMetricScan::open); assertEquals( "Unexpected Result Type: vector during Prometheus Response Parsing. " - + "'matrix' resultType is expected", runtimeException.getMessage()); + + "'matrix' resultType is expected", + runtimeException.getMessage()); } @Test @@ -253,13 +272,12 @@ void testEmptyQueryWithException() { when(prometheusClient.queryRange(any(), any(), any(), any())) .thenThrow(new IOException("Error Message")); - RuntimeException runtimeException - = assertThrows(RuntimeException.class, prometheusMetricScan::open); - assertEquals("Error fetching data from prometheus server. Error Message", - runtimeException.getMessage()); + RuntimeException runtimeException = + assertThrows(RuntimeException.class, prometheusMetricScan::open); + assertEquals( + "Error fetching data from prometheus server. Error Message", runtimeException.getMessage()); } - @Test @SneakyThrows void testExplain() { @@ -273,5 +291,4 @@ void testExplain() { + "endTime=1664771294133, step=14)", prometheusMetricScan.explain()); } - } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusMetricTableTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusMetricTableTest.java index d43c38fc68..8bdab9244b 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusMetricTableTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusMetricTableTest.java @@ -62,15 +62,14 @@ @ExtendWith(MockitoExtension.class) class PrometheusMetricTableTest { - @Mock - private PrometheusClient client; + @Mock private PrometheusClient client; @Test @SneakyThrows void testGetFieldTypesFromMetric() { when(client.getLabels(TestConstants.METRIC_NAME)).thenReturn(List.of("label1", "label2")); - PrometheusMetricTable prometheusMetricTable - = new PrometheusMetricTable(client, TestConstants.METRIC_NAME); + PrometheusMetricTable prometheusMetricTable = + new PrometheusMetricTable(client, TestConstants.METRIC_NAME); Map expectedFieldTypes = new HashMap<>(); expectedFieldTypes.put("label1", ExprCoreType.STRING); expectedFieldTypes.put("label2", ExprCoreType.STRING); @@ -84,7 +83,7 @@ void testGetFieldTypesFromMetric() { assertNull(prometheusMetricTable.getPrometheusQueryRequest()); assertNotNull(prometheusMetricTable.getMetricName()); - //testing Caching + // testing Caching fieldTypes = prometheusMetricTable.getFieldTypes(); assertEquals(expectedFieldTypes, fieldTypes); @@ -96,8 +95,8 @@ void testGetFieldTypesFromMetric() { @Test @SneakyThrows void testGetFieldTypesFromPrometheusQueryRequest() { - PrometheusMetricTable prometheusMetricTable - = new PrometheusMetricTable(client, new PrometheusQueryRequest()); + PrometheusMetricTable prometheusMetricTable = + new PrometheusMetricTable(client, new PrometheusQueryRequest()); Map expectedFieldTypes = new HashMap<>(); expectedFieldTypes.put(VALUE, ExprCoreType.DOUBLE); expectedFieldTypes.put(TIMESTAMP, ExprCoreType.TIMESTAMP); @@ -117,14 +116,17 @@ void testImplementWithBasicMetricQuery() { new PrometheusMetricTable(client, "prometheus_http_requests_total"); List finalProjectList = new ArrayList<>(); finalProjectList.add(named("@value", ref("@value", ExprCoreType.DOUBLE))); - PhysicalPlan plan = prometheusMetricTable.implement( - project(relation("prometheus_http_requests_total", prometheusMetricTable), - finalProjectList, null)); + PhysicalPlan plan = + prometheusMetricTable.implement( + project( + relation("prometheus_http_requests_total", prometheusMetricTable), + finalProjectList, + null)); assertTrue(plan instanceof ProjectOperator); List projectList = ((ProjectOperator) plan).getProjectList(); - List outputFields - = projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); + List outputFields = + projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); assertEquals(List.of(VALUE), outputFields); assertTrue(((ProjectOperator) plan).getInput() instanceof PrometheusMetricScan); PrometheusMetricScan prometheusMetricScan = @@ -133,7 +135,6 @@ void testImplementWithBasicMetricQuery() { assertEquals(3600 / 250 + "s", prometheusMetricScan.getRequest().getStep()); } - @Test void testImplementPrometheusQueryWithStatsQueryAndNoFilter() { @@ -141,16 +142,23 @@ void testImplementPrometheusQueryWithStatsQueryAndNoFilter() { new PrometheusMetricTable(client, "prometheus_http_total_requests"); // IndexScanAgg without Filter - PhysicalPlan plan = prometheusMetricTable.implement( - filter( - indexScanAgg("prometheus_http_total_requests", ImmutableList - .of(named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), - ImmutableList.of(named("code", DSL.ref("code", STRING)), - named("span", DSL.span(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal(40), "s")))), - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/")))))); + PhysicalPlan plan = + prometheusMetricTable.implement( + filter( + indexScanAgg( + "prometheus_http_total_requests", + ImmutableList.of(named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), + ImmutableList.of( + named("code", DSL.ref("code", STRING)), + named( + "span", + DSL.span( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal(40), + "s")))), + DSL.and( + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/")))))); assertTrue(plan.getChild().get(0) instanceof PrometheusMetricScan); PrometheusQueryRequest prometheusQueryRequest = @@ -166,28 +174,31 @@ void testImplementPrometheusQueryWithStatsQueryAndFilter() { PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - // IndexScanAgg with Filter - PhysicalPlan plan = prometheusMetricTable.implement( - indexScanAgg("prometheus_http_total_requests", - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/")))), - ImmutableList - .of(named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), - ImmutableList.of(named("job", DSL.ref("job", STRING)), - named("span", DSL.span(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal(40), "s"))))); + PhysicalPlan plan = + prometheusMetricTable.implement( + indexScanAgg( + "prometheus_http_total_requests", + DSL.and( + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/")))), + ImmutableList.of(named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), + ImmutableList.of( + named("job", DSL.ref("job", STRING)), + named( + "span", + DSL.span( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal(40), + "s"))))); assertTrue(plan instanceof PrometheusMetricScan); PrometheusQueryRequest prometheusQueryRequest = ((PrometheusMetricScan) plan).getRequest(); assertEquals( "avg by(job) (avg_over_time" + "(prometheus_http_total_requests{code=\"200\" , handler=\"/ready/\"}[40s]))", prometheusQueryRequest.getPromQl()); - } - @Test void testImplementPrometheusQueryWithStatsQueryAndFilterAndProject() { @@ -198,77 +209,99 @@ void testImplementPrometheusQueryWithStatsQueryAndFilterAndProject() { List finalProjectList = new ArrayList<>(); finalProjectList.add(DSL.named(VALUE, DSL.ref(VALUE, STRING))); finalProjectList.add(DSL.named(TIMESTAMP, DSL.ref(TIMESTAMP, ExprCoreType.TIMESTAMP))); - PhysicalPlan plan = prometheusMetricTable.implement( - project(indexScanAgg("prometheus_http_total_requests", - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/")))), - ImmutableList - .of(DSL.named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), - ImmutableList.of(DSL.named("job", DSL.ref("job", STRING)), - named("span", DSL.span(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal(40), "s")))), - finalProjectList, null)); + PhysicalPlan plan = + prometheusMetricTable.implement( + project( + indexScanAgg( + "prometheus_http_total_requests", + DSL.and( + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/")))), + ImmutableList.of(DSL.named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), + ImmutableList.of( + DSL.named("job", DSL.ref("job", STRING)), + named( + "span", + DSL.span( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal(40), + "s")))), + finalProjectList, + null)); assertTrue(plan instanceof ProjectOperator); assertTrue(((ProjectOperator) plan).getInput() instanceof PrometheusMetricScan); - PrometheusQueryRequest request - = ((PrometheusMetricScan) ((ProjectOperator) plan).getInput()).getRequest(); + PrometheusQueryRequest request = + ((PrometheusMetricScan) ((ProjectOperator) plan).getInput()).getRequest(); assertEquals(request.getStep(), "40s"); - assertEquals("avg by(job) (avg_over_time" + assertEquals( + "avg by(job) (avg_over_time" + "(prometheus_http_total_requests{code=\"200\" , handler=\"/ready/\"}[40s]))", request.getPromQl()); List projectList = ((ProjectOperator) plan).getProjectList(); - List outputFields - = projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); + List outputFields = + projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); assertEquals(List.of(VALUE, TIMESTAMP), outputFields); } - @Test void testTimeRangeResolver() { PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - - //Both endTime and startTime are set. + // Both endTime and startTime are set. List finalProjectList = new ArrayList<>(); finalProjectList.add(DSL.named(VALUE, DSL.ref(VALUE, STRING))); finalProjectList.add(DSL.named(TIMESTAMP, DSL.ref(TIMESTAMP, ExprCoreType.TIMESTAMP))); Long endTime = new Date(System.currentTimeMillis()).getTime(); Long startTime = new Date(System.currentTimeMillis() - 4800 * 1000).getTime(); DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); - PhysicalPlan plan = prometheusMetricTable.implement( - project(indexScanAgg("prometheus_http_total_requests", - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + PhysicalPlan plan = + prometheusMetricTable.implement( + project( + indexScanAgg( + "prometheus_http_total_requests", DSL.and( - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))), - DSL.and(DSL.gte(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal( - fromObjectValue(dateFormat.format(new Date(startTime)), - ExprCoreType.TIMESTAMP))), - DSL.lte(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal( - fromObjectValue(dateFormat.format(new Date(endTime)), - ExprCoreType.TIMESTAMP)))))), - ImmutableList - .of(named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), - ImmutableList.of(named("job", DSL.ref("job", STRING)), - named("span", DSL.span(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal(40), "s")))), - finalProjectList, null)); + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.and( + DSL.equal( + DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))), + DSL.and( + DSL.gte( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal( + fromObjectValue( + dateFormat.format(new Date(startTime)), + ExprCoreType.TIMESTAMP))), + DSL.lte( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal( + fromObjectValue( + dateFormat.format(new Date(endTime)), + ExprCoreType.TIMESTAMP)))))), + ImmutableList.of(named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), + ImmutableList.of( + named("job", DSL.ref("job", STRING)), + named( + "span", + DSL.span( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal(40), + "s")))), + finalProjectList, + null)); assertTrue(plan instanceof ProjectOperator); assertTrue(((ProjectOperator) plan).getInput() instanceof PrometheusMetricScan); - PrometheusQueryRequest request - = ((PrometheusMetricScan) ((ProjectOperator) plan).getInput()).getRequest(); + PrometheusQueryRequest request = + ((PrometheusMetricScan) ((ProjectOperator) plan).getInput()).getRequest(); assertEquals("40s", request.getStep()); - assertEquals("avg by(job) (avg_over_time" + assertEquals( + "avg by(job) (avg_over_time" + "(prometheus_http_total_requests{code=\"200\" , handler=\"/ready/\"}[40s]))", request.getPromQl()); List projectList = ((ProjectOperator) plan).getProjectList(); - List outputFields - = projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); + List outputFields = + projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); assertEquals(List.of(VALUE, TIMESTAMP), outputFields); } @@ -278,40 +311,51 @@ void testTimeRangeResolverWithOutEndTimeInFilter() { PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - - //Only endTime is set. + // Only endTime is set. List finalProjectList = new ArrayList<>(); finalProjectList.add(DSL.named(VALUE, DSL.ref(VALUE, STRING))); finalProjectList.add(DSL.named(TIMESTAMP, DSL.ref(TIMESTAMP, ExprCoreType.TIMESTAMP))); Long startTime = new Date(System.currentTimeMillis() - 4800 * 1000).getTime(); DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); - PhysicalPlan plan = prometheusMetricTable.implement( - project(indexScanAgg("prometheus_http_total_requests", - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + PhysicalPlan plan = + prometheusMetricTable.implement( + project( + indexScanAgg( + "prometheus_http_total_requests", DSL.and( - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))), - DSL.gte(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal( - fromObjectValue(dateFormat.format(new Date(startTime)), - ExprCoreType.TIMESTAMP))))), - ImmutableList - .of(named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), - ImmutableList.of(named("job", DSL.ref("job", STRING)), - named("span", DSL.span(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal(40), "s")))), - finalProjectList, null)); + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.and( + DSL.equal( + DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))), + DSL.gte( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal( + fromObjectValue( + dateFormat.format(new Date(startTime)), + ExprCoreType.TIMESTAMP))))), + ImmutableList.of(named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), + ImmutableList.of( + named("job", DSL.ref("job", STRING)), + named( + "span", + DSL.span( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal(40), + "s")))), + finalProjectList, + null)); assertTrue(plan instanceof ProjectOperator); assertTrue(((ProjectOperator) plan).getInput() instanceof PrometheusMetricScan); - PrometheusQueryRequest request - = ((PrometheusMetricScan) ((ProjectOperator) plan).getInput()).getRequest(); + PrometheusQueryRequest request = + ((PrometheusMetricScan) ((ProjectOperator) plan).getInput()).getRequest(); assertEquals("40s", request.getStep()); - assertEquals("avg by(job) (avg_over_time" + assertEquals( + "avg by(job) (avg_over_time" + "(prometheus_http_total_requests{code=\"200\" , handler=\"/ready/\"}[40s]))", request.getPromQl()); List projectList = ((ProjectOperator) plan).getProjectList(); - List outputFields - = projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); + List outputFields = + projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); assertEquals(List.of(VALUE, TIMESTAMP), outputFields); } @@ -321,78 +365,95 @@ void testTimeRangeResolverWithOutStartTimeInFilter() { PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - - //Both endTime and startTime are set. + // Both endTime and startTime are set. List finalProjectList = new ArrayList<>(); finalProjectList.add(DSL.named(VALUE, DSL.ref(VALUE, STRING))); finalProjectList.add(DSL.named(TIMESTAMP, DSL.ref(TIMESTAMP, ExprCoreType.TIMESTAMP))); Long endTime = new Date(System.currentTimeMillis() - 4800 * 1000).getTime(); DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); - PhysicalPlan plan = prometheusMetricTable.implement( - project(indexScanAgg("prometheus_http_total_requests", - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + PhysicalPlan plan = + prometheusMetricTable.implement( + project( + indexScanAgg( + "prometheus_http_total_requests", DSL.and( - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))), - DSL.lte(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal( - fromObjectValue(dateFormat.format(new Date(endTime)), - ExprCoreType.TIMESTAMP))))), - ImmutableList - .of(named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), - ImmutableList.of(named("job", DSL.ref("job", STRING)), - named("span", DSL.span(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal(40), "s")))), - finalProjectList, null)); + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.and( + DSL.equal( + DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))), + DSL.lte( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal( + fromObjectValue( + dateFormat.format(new Date(endTime)), + ExprCoreType.TIMESTAMP))))), + ImmutableList.of(named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), + ImmutableList.of( + named("job", DSL.ref("job", STRING)), + named( + "span", + DSL.span( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal(40), + "s")))), + finalProjectList, + null)); assertTrue(plan instanceof ProjectOperator); assertTrue(((ProjectOperator) plan).getInput() instanceof PrometheusMetricScan); - PrometheusQueryRequest request - = ((PrometheusMetricScan) ((ProjectOperator) plan).getInput()).getRequest(); + PrometheusQueryRequest request = + ((PrometheusMetricScan) ((ProjectOperator) plan).getInput()).getRequest(); assertEquals("40s", request.getStep()); - assertEquals("avg by(job) (avg_over_time" + assertEquals( + "avg by(job) (avg_over_time" + "(prometheus_http_total_requests{code=\"200\" , handler=\"/ready/\"}[40s]))", request.getPromQl()); List projectList = ((ProjectOperator) plan).getProjectList(); - List outputFields - = projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); + List outputFields = + projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); assertEquals(List.of(VALUE, TIMESTAMP), outputFields); } - @Test void testSpanResolverWithoutSpanExpression() { PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - List finalProjectList = new ArrayList<>(); finalProjectList.add(DSL.named(VALUE, DSL.ref(VALUE, STRING))); Long endTime = new Date(System.currentTimeMillis()).getTime(); Long startTime = new Date(System.currentTimeMillis() - 4800 * 1000).getTime(); DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); - LogicalPlan plan = project(indexScanAgg("prometheus_http_total_requests", - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + LogicalPlan plan = + project( + indexScanAgg( + "prometheus_http_total_requests", + DSL.and( + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), DSL.and( DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))), - DSL.and(DSL.gte(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.and( + DSL.gte( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), DSL.literal( - fromObjectValue(dateFormat.format(new Date(startTime)), + fromObjectValue( + dateFormat.format(new Date(startTime)), ExprCoreType.TIMESTAMP))), - DSL.lte(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.lte( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), DSL.literal( - fromObjectValue(dateFormat.format(new Date(endTime)), + fromObjectValue( + dateFormat.format(new Date(endTime)), ExprCoreType.TIMESTAMP)))))), - ImmutableList - .of(named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), + ImmutableList.of(named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), null), - finalProjectList, null); - RuntimeException runtimeException - = Assertions.assertThrows(RuntimeException.class, - () -> prometheusMetricTable.implement(plan)); - Assertions.assertEquals("Prometheus Catalog doesn't support " - + "aggregations without span expression", + finalProjectList, + null); + RuntimeException runtimeException = + Assertions.assertThrows( + RuntimeException.class, () -> prometheusMetricTable.implement(plan)); + Assertions.assertEquals( + "Prometheus Catalog doesn't support " + "aggregations without span expression", runtimeException.getMessage()); } @@ -402,34 +463,41 @@ void testSpanResolverWithEmptyGroupByList() { PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - List finalProjectList = new ArrayList<>(); finalProjectList.add(DSL.named(VALUE, DSL.ref(VALUE, STRING))); Long endTime = new Date(System.currentTimeMillis()).getTime(); Long startTime = new Date(System.currentTimeMillis() - 4800 * 1000).getTime(); DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); - LogicalPlan plan = project(indexScanAgg("prometheus_http_total_requests", - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + LogicalPlan plan = + project( + indexScanAgg( + "prometheus_http_total_requests", DSL.and( - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))), - DSL.and(DSL.gte(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal( - fromObjectValue(dateFormat.format(new Date(startTime)), - ExprCoreType.TIMESTAMP))), - DSL.lte(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal( - fromObjectValue(dateFormat.format(new Date(endTime)), - ExprCoreType.TIMESTAMP)))))), - ImmutableList - .of(named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), - ImmutableList.of()), - finalProjectList, null); - RuntimeException runtimeException - = Assertions.assertThrows(RuntimeException.class, - () -> prometheusMetricTable.implement(plan)); - Assertions.assertEquals("Prometheus Catalog doesn't support " - + "aggregations without span expression", + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.and( + DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))), + DSL.and( + DSL.gte( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal( + fromObjectValue( + dateFormat.format(new Date(startTime)), + ExprCoreType.TIMESTAMP))), + DSL.lte( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal( + fromObjectValue( + dateFormat.format(new Date(endTime)), + ExprCoreType.TIMESTAMP)))))), + ImmutableList.of(named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), + ImmutableList.of()), + finalProjectList, + null); + RuntimeException runtimeException = + Assertions.assertThrows( + RuntimeException.class, () -> prometheusMetricTable.implement(plan)); + Assertions.assertEquals( + "Prometheus Catalog doesn't support " + "aggregations without span expression", runtimeException.getMessage()); } @@ -439,44 +507,58 @@ void testSpanResolverWithSpanExpression() { PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - List finalProjectList = new ArrayList<>(); finalProjectList.add(DSL.named(VALUE, DSL.ref(VALUE, STRING))); finalProjectList.add(DSL.named(TIMESTAMP, DSL.ref(TIMESTAMP, ExprCoreType.TIMESTAMP))); Long endTime = new Date(System.currentTimeMillis()).getTime(); Long startTime = new Date(System.currentTimeMillis() - 4800 * 1000).getTime(); DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); - PhysicalPlan plan = prometheusMetricTable.implement( - project(indexScanAgg("prometheus_http_total_requests", - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + PhysicalPlan plan = + prometheusMetricTable.implement( + project( + indexScanAgg( + "prometheus_http_total_requests", DSL.and( - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))), - DSL.and(DSL.gte(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal( - fromObjectValue(dateFormat.format(new Date(startTime)), - ExprCoreType.TIMESTAMP))), - DSL.lte(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal( - fromObjectValue(dateFormat.format(new Date(endTime)), - ExprCoreType.TIMESTAMP)))))), - ImmutableList - .of(named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), - ImmutableList.of(named("job", DSL.ref("job", STRING)), - named("span", DSL.span(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal(40), "s")))), - finalProjectList, null)); + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.and( + DSL.equal( + DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))), + DSL.and( + DSL.gte( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal( + fromObjectValue( + dateFormat.format(new Date(startTime)), + ExprCoreType.TIMESTAMP))), + DSL.lte( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal( + fromObjectValue( + dateFormat.format(new Date(endTime)), + ExprCoreType.TIMESTAMP)))))), + ImmutableList.of(named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), + ImmutableList.of( + named("job", DSL.ref("job", STRING)), + named( + "span", + DSL.span( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal(40), + "s")))), + finalProjectList, + null)); assertTrue(plan instanceof ProjectOperator); assertTrue(((ProjectOperator) plan).getInput() instanceof PrometheusMetricScan); - PrometheusQueryRequest request - = ((PrometheusMetricScan) ((ProjectOperator) plan).getInput()).getRequest(); + PrometheusQueryRequest request = + ((PrometheusMetricScan) ((ProjectOperator) plan).getInput()).getRequest(); assertEquals("40s", request.getStep()); - assertEquals("avg by(job) (avg_over_time" + assertEquals( + "avg by(job) (avg_over_time" + "(prometheus_http_total_requests{code=\"200\" , handler=\"/ready/\"}[40s]))", request.getPromQl()); List projectList = ((ProjectOperator) plan).getProjectList(); - List outputFields - = projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); + List outputFields = + projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); assertEquals(List.of(VALUE, TIMESTAMP), outputFields); } @@ -486,35 +568,45 @@ void testExpressionWithMissingTimeUnitInSpanExpression() { PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - List finalProjectList = new ArrayList<>(); finalProjectList.add(DSL.named(VALUE, DSL.ref(VALUE, STRING))); finalProjectList.add(DSL.named(TIMESTAMP, DSL.ref(TIMESTAMP, ExprCoreType.TIMESTAMP))); Long endTime = new Date(System.currentTimeMillis()).getTime(); Long startTime = new Date(System.currentTimeMillis() - 4800 * 1000).getTime(); DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); - LogicalPlan logicalPlan = project(indexScanAgg("prometheus_http_total_requests", - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + LogicalPlan logicalPlan = + project( + indexScanAgg( + "prometheus_http_total_requests", + DSL.and( + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), DSL.and( DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))), - DSL.and(DSL.gte(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.and( + DSL.gte( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), DSL.literal( - fromObjectValue(dateFormat.format(new Date(startTime)), + fromObjectValue( + dateFormat.format(new Date(startTime)), ExprCoreType.TIMESTAMP))), - DSL.lte(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.lte( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), DSL.literal( - fromObjectValue(dateFormat.format(new Date(endTime)), + fromObjectValue( + dateFormat.format(new Date(endTime)), ExprCoreType.TIMESTAMP)))))), - ImmutableList - .of(named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), - ImmutableList.of(named("job", DSL.ref("job", STRING)), - named("span", DSL.span(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal(40), "")))), - finalProjectList, null); + ImmutableList.of(named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), + ImmutableList.of( + named("job", DSL.ref("job", STRING)), + named( + "span", + DSL.span( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), DSL.literal(40), "")))), + finalProjectList, + null); RuntimeException exception = - Assertions.assertThrows(RuntimeException.class, - () -> prometheusMetricTable.implement(logicalPlan)); + Assertions.assertThrows( + RuntimeException.class, () -> prometheusMetricTable.implement(logicalPlan)); assertEquals("Missing TimeUnit in the span expression", exception.getMessage()); } @@ -524,44 +616,57 @@ void testPrometheusQueryWithOnlySpanExpressionInGroupByList() { PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - List finalProjectList = new ArrayList<>(); finalProjectList.add(DSL.named(VALUE, DSL.ref(VALUE, STRING))); finalProjectList.add(DSL.named(TIMESTAMP, DSL.ref(TIMESTAMP, ExprCoreType.TIMESTAMP))); Long endTime = new Date(System.currentTimeMillis()).getTime(); Long startTime = new Date(System.currentTimeMillis() - 4800 * 1000).getTime(); DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); - PhysicalPlan plan = prometheusMetricTable.implement( - project(indexScanAgg("prometheus_http_total_requests", - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + PhysicalPlan plan = + prometheusMetricTable.implement( + project( + indexScanAgg( + "prometheus_http_total_requests", DSL.and( - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))), - DSL.and(DSL.gte(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal( - fromObjectValue(dateFormat.format(new Date(startTime)), - ExprCoreType.TIMESTAMP))), - DSL.lte(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal( - fromObjectValue(dateFormat.format(new Date(endTime)), - ExprCoreType.TIMESTAMP)))))), - ImmutableList - .of(named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), - ImmutableList.of( - named("span", DSL.span(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal(40), "s")))), - finalProjectList, null)); + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.and( + DSL.equal( + DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))), + DSL.and( + DSL.gte( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal( + fromObjectValue( + dateFormat.format(new Date(startTime)), + ExprCoreType.TIMESTAMP))), + DSL.lte( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal( + fromObjectValue( + dateFormat.format(new Date(endTime)), + ExprCoreType.TIMESTAMP)))))), + ImmutableList.of(named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), + ImmutableList.of( + named( + "span", + DSL.span( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal(40), + "s")))), + finalProjectList, + null)); assertTrue(plan instanceof ProjectOperator); assertTrue(((ProjectOperator) plan).getInput() instanceof PrometheusMetricScan); - PrometheusQueryRequest request - = ((PrometheusMetricScan) ((ProjectOperator) plan).getInput()).getRequest(); + PrometheusQueryRequest request = + ((PrometheusMetricScan) ((ProjectOperator) plan).getInput()).getRequest(); assertEquals("40s", request.getStep()); - assertEquals("avg (avg_over_time" + assertEquals( + "avg (avg_over_time" + "(prometheus_http_total_requests{code=\"200\" , handler=\"/ready/\"}[40s]))", request.getPromQl()); List projectList = ((ProjectOperator) plan).getProjectList(); - List outputFields - = projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); + List outputFields = + projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); assertEquals(List.of(VALUE, TIMESTAMP), outputFields); } @@ -571,44 +676,57 @@ void testStatsWithNoGroupByList() { PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - List finalProjectList = new ArrayList<>(); finalProjectList.add(DSL.named(VALUE, DSL.ref(VALUE, STRING))); finalProjectList.add(DSL.named(TIMESTAMP, DSL.ref(TIMESTAMP, ExprCoreType.TIMESTAMP))); Long endTime = new Date(System.currentTimeMillis()).getTime(); Long startTime = new Date(System.currentTimeMillis() - 4800 * 1000).getTime(); DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); - PhysicalPlan plan = prometheusMetricTable.implement( - project(indexScanAgg("prometheus_http_total_requests", - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + PhysicalPlan plan = + prometheusMetricTable.implement( + project( + indexScanAgg( + "prometheus_http_total_requests", DSL.and( - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))), - DSL.and(DSL.gte(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal( - fromObjectValue(dateFormat.format(new Date(startTime)), - ExprCoreType.TIMESTAMP))), - DSL.lte(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal( - fromObjectValue(dateFormat.format(new Date(endTime)), - ExprCoreType.TIMESTAMP)))))), - ImmutableList - .of(named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), - ImmutableList.of(named("span", - DSL.span(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal(40), "s")))), - finalProjectList, null)); + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.and( + DSL.equal( + DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))), + DSL.and( + DSL.gte( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal( + fromObjectValue( + dateFormat.format(new Date(startTime)), + ExprCoreType.TIMESTAMP))), + DSL.lte( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal( + fromObjectValue( + dateFormat.format(new Date(endTime)), + ExprCoreType.TIMESTAMP)))))), + ImmutableList.of(named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), + ImmutableList.of( + named( + "span", + DSL.span( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal(40), + "s")))), + finalProjectList, + null)); assertTrue(plan instanceof ProjectOperator); assertTrue(((ProjectOperator) plan).getInput() instanceof PrometheusMetricScan); - PrometheusQueryRequest request - = ((PrometheusMetricScan) ((ProjectOperator) plan).getInput()).getRequest(); + PrometheusQueryRequest request = + ((PrometheusMetricScan) ((ProjectOperator) plan).getInput()).getRequest(); assertEquals("40s", request.getStep()); - assertEquals("avg (avg_over_time" + assertEquals( + "avg (avg_over_time" + "(prometheus_http_total_requests{code=\"200\" , handler=\"/ready/\"}[40s]))", request.getPromQl()); List projectList = ((ProjectOperator) plan).getProjectList(); - List outputFields - = projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); + List outputFields = + projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); assertEquals(List.of(VALUE, TIMESTAMP), outputFields); } @@ -617,9 +735,11 @@ void testImplementWithUnexpectedLogicalNode() { PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); LogicalPlan plan = project(testLogicalPlanNode()); - RuntimeException runtimeException = Assertions.assertThrows(RuntimeException.class, - () -> prometheusMetricTable.implement(plan)); - assertEquals("unexpected plan node type class" + RuntimeException runtimeException = + Assertions.assertThrows( + RuntimeException.class, () -> prometheusMetricTable.implement(plan)); + assertEquals( + "unexpected plan node type class" + " org.opensearch.sql.prometheus.utils.LogicalPlanUtils$TestLogicalPlan", runtimeException.getMessage()); } @@ -629,37 +749,44 @@ void testMultipleAggregationsThrowsRuntimeException() { PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - LogicalPlan plan = project(indexScanAgg("prometheus_http_total_requests", - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/")))), - ImmutableList - .of(named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER))), - named("SUM(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), - ImmutableList.of(named("job", DSL.ref("job", STRING))))); - - RuntimeException runtimeException = Assertions.assertThrows(RuntimeException.class, - () -> prometheusMetricTable.implement(plan)); - assertEquals("Prometheus Catalog doesn't multiple aggregations in stats command", + LogicalPlan plan = + project( + indexScanAgg( + "prometheus_http_total_requests", + DSL.and( + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/")))), + ImmutableList.of( + named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER))), + named("SUM(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), + ImmutableList.of(named("job", DSL.ref("job", STRING))))); + + RuntimeException runtimeException = + Assertions.assertThrows( + RuntimeException.class, () -> prometheusMetricTable.implement(plan)); + assertEquals( + "Prometheus Catalog doesn't multiple aggregations in stats command", runtimeException.getMessage()); } - @Test void testUnSupportedAggregation() { PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - LogicalPlan plan = project(indexScanAgg("prometheus_http_total_requests", - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/")))), - ImmutableList - .of(named("VAR_SAMP(@value)", - DSL.varSamp(DSL.ref("@value", INTEGER)))), - ImmutableList.of(named("job", DSL.ref("job", STRING))))); - - RuntimeException runtimeException = Assertions.assertThrows(RuntimeException.class, - () -> prometheusMetricTable.implement(plan)); + LogicalPlan plan = + project( + indexScanAgg( + "prometheus_http_total_requests", + DSL.and( + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/")))), + ImmutableList.of( + named("VAR_SAMP(@value)", DSL.varSamp(DSL.ref("@value", INTEGER)))), + ImmutableList.of(named("job", DSL.ref("job", STRING))))); + + RuntimeException runtimeException = + Assertions.assertThrows( + RuntimeException.class, () -> prometheusMetricTable.implement(plan)); assertTrue(runtimeException.getMessage().contains("Prometheus Catalog only supports")); } @@ -667,13 +794,16 @@ void testUnSupportedAggregation() { void testImplementWithORConditionInWhereClause() { PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - LogicalPlan plan = indexScan("prometheus_http_total_requests", - DSL.or(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))))); - RuntimeException exception - = assertThrows(RuntimeException.class, () -> prometheusMetricTable.implement(plan)); - assertEquals("Prometheus Datasource doesn't support or in where command.", - exception.getMessage()); + LogicalPlan plan = + indexScan( + "prometheus_http_total_requests", + DSL.or( + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))))); + RuntimeException exception = + assertThrows(RuntimeException.class, () -> prometheusMetricTable.implement(plan)); + assertEquals( + "Prometheus Datasource doesn't support or in where command.", exception.getMessage()); } @Test @@ -683,21 +813,26 @@ void testImplementWithRelationAndFilter() { finalProjectList.add(DSL.named(TIMESTAMP, DSL.ref(TIMESTAMP, ExprCoreType.TIMESTAMP))); PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - LogicalPlan logicalPlan = project(indexScan("prometheus_http_total_requests", - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))))), - finalProjectList, null); + LogicalPlan logicalPlan = + project( + indexScan( + "prometheus_http_total_requests", + DSL.and( + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))))), + finalProjectList, + null); PhysicalPlan physicalPlan = prometheusMetricTable.implement(logicalPlan); assertTrue(physicalPlan instanceof ProjectOperator); assertTrue(((ProjectOperator) physicalPlan).getInput() instanceof PrometheusMetricScan); - PrometheusQueryRequest request - = ((PrometheusMetricScan) ((ProjectOperator) physicalPlan).getInput()).getRequest(); + PrometheusQueryRequest request = + ((PrometheusMetricScan) ((ProjectOperator) physicalPlan).getInput()).getRequest(); assertEquals((3600 / 250) + "s", request.getStep()); - assertEquals("prometheus_http_total_requests{code=\"200\" , handler=\"/ready/\"}", - request.getPromQl()); + assertEquals( + "prometheus_http_total_requests{code=\"200\" , handler=\"/ready/\"}", request.getPromQl()); List projectList = ((ProjectOperator) physicalPlan).getProjectList(); - List outputFields - = projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); + List outputFields = + projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); assertEquals(List.of(VALUE, TIMESTAMP), outputFields); } @@ -710,27 +845,30 @@ void testImplementWithRelationAndTimestampFilter() { Long endTime = new Date(System.currentTimeMillis()).getTime(); PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - LogicalPlan logicalPlan = project(indexScan("prometheus_http_total_requests", - DSL.lte(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal( - fromObjectValue(dateFormat.format(new Date(endTime)), - ExprCoreType.TIMESTAMP))) - ), finalProjectList, null); + LogicalPlan logicalPlan = + project( + indexScan( + "prometheus_http_total_requests", + DSL.lte( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal( + fromObjectValue( + dateFormat.format(new Date(endTime)), ExprCoreType.TIMESTAMP)))), + finalProjectList, + null); PhysicalPlan physicalPlan = prometheusMetricTable.implement(logicalPlan); assertTrue(physicalPlan instanceof ProjectOperator); assertTrue(((ProjectOperator) physicalPlan).getInput() instanceof PrometheusMetricScan); - PrometheusQueryRequest request - = ((PrometheusMetricScan) ((ProjectOperator) physicalPlan).getInput()).getRequest(); + PrometheusQueryRequest request = + ((PrometheusMetricScan) ((ProjectOperator) physicalPlan).getInput()).getRequest(); assertEquals((3600 / 250) + "s", request.getStep()); - assertEquals("prometheus_http_total_requests", - request.getPromQl()); + assertEquals("prometheus_http_total_requests", request.getPromQl()); List projectList = ((ProjectOperator) physicalPlan).getProjectList(); - List outputFields - = projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); + List outputFields = + projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); assertEquals(List.of(VALUE, TIMESTAMP), outputFields); } - @Test void testImplementWithRelationAndTimestampLTFilter() { List finalProjectList = new ArrayList<>(); @@ -740,27 +878,30 @@ void testImplementWithRelationAndTimestampLTFilter() { Long endTime = new Date(System.currentTimeMillis()).getTime(); PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - LogicalPlan logicalPlan = project(indexScan("prometheus_http_total_requests", - DSL.less(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal( - fromObjectValue(dateFormat.format(new Date(endTime)), - ExprCoreType.TIMESTAMP))) - ), finalProjectList, null); + LogicalPlan logicalPlan = + project( + indexScan( + "prometheus_http_total_requests", + DSL.less( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal( + fromObjectValue( + dateFormat.format(new Date(endTime)), ExprCoreType.TIMESTAMP)))), + finalProjectList, + null); PhysicalPlan physicalPlan = prometheusMetricTable.implement(logicalPlan); assertTrue(physicalPlan instanceof ProjectOperator); assertTrue(((ProjectOperator) physicalPlan).getInput() instanceof PrometheusMetricScan); - PrometheusQueryRequest request - = ((PrometheusMetricScan) ((ProjectOperator) physicalPlan).getInput()).getRequest(); + PrometheusQueryRequest request = + ((PrometheusMetricScan) ((ProjectOperator) physicalPlan).getInput()).getRequest(); assertEquals((3600 / 250) + "s", request.getStep()); - assertEquals("prometheus_http_total_requests", - request.getPromQl()); + assertEquals("prometheus_http_total_requests", request.getPromQl()); List projectList = ((ProjectOperator) physicalPlan).getProjectList(); - List outputFields - = projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); + List outputFields = + projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); assertEquals(List.of(VALUE, TIMESTAMP), outputFields); } - @Test void testImplementWithRelationAndTimestampGTFilter() { List finalProjectList = new ArrayList<>(); @@ -770,23 +911,27 @@ void testImplementWithRelationAndTimestampGTFilter() { Long endTime = new Date(System.currentTimeMillis()).getTime(); PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - LogicalPlan logicalPlan = project(indexScan("prometheus_http_total_requests", - DSL.greater(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal( - fromObjectValue(dateFormat.format(new Date(endTime)), - ExprCoreType.TIMESTAMP))) - ), finalProjectList, null); + LogicalPlan logicalPlan = + project( + indexScan( + "prometheus_http_total_requests", + DSL.greater( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal( + fromObjectValue( + dateFormat.format(new Date(endTime)), ExprCoreType.TIMESTAMP)))), + finalProjectList, + null); PhysicalPlan physicalPlan = prometheusMetricTable.implement(logicalPlan); assertTrue(physicalPlan instanceof ProjectOperator); assertTrue(((ProjectOperator) physicalPlan).getInput() instanceof PrometheusMetricScan); - PrometheusQueryRequest request - = ((PrometheusMetricScan) ((ProjectOperator) physicalPlan).getInput()).getRequest(); + PrometheusQueryRequest request = + ((PrometheusMetricScan) ((ProjectOperator) physicalPlan).getInput()).getRequest(); assertEquals((3600 / 250) + "s", request.getStep()); - assertEquals("prometheus_http_total_requests", - request.getPromQl()); + assertEquals("prometheus_http_total_requests", request.getPromQl()); List projectList = ((ProjectOperator) physicalPlan).getProjectList(); - List outputFields - = projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); + List outputFields = + projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); assertEquals(List.of(VALUE, TIMESTAMP), outputFields); } @@ -796,10 +941,9 @@ void testOptimize() { PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, prometheusQueryRequest); List finalProjectList = new ArrayList<>(); - LogicalPlan inputPlan = project(relation("query_range", prometheusMetricTable), - finalProjectList, null); - LogicalPlan optimizedPlan = prometheusMetricTable.optimize( - inputPlan); + LogicalPlan inputPlan = + project(relation("query_range", prometheusMetricTable), finalProjectList, null); + LogicalPlan optimizedPlan = prometheusMetricTable.optimize(inputPlan); assertEquals(inputPlan, optimizedPlan); } @@ -810,7 +954,8 @@ void testUnsupportedOperation() { new PrometheusMetricTable(client, prometheusQueryRequest); assertThrows(UnsupportedOperationException.class, prometheusMetricTable::exists); - assertThrows(UnsupportedOperationException.class, + assertThrows( + UnsupportedOperationException.class, () -> prometheusMetricTable.create(Collections.emptyMap())); } @@ -820,25 +965,29 @@ void testImplementPrometheusQueryWithBackQuotedFieldNamesInStatsQuery() { PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - // IndexScanAgg with Filter - PhysicalPlan plan = prometheusMetricTable.implement( - indexScanAgg("prometheus_http_total_requests", - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/")))), - ImmutableList - .of(named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), - ImmutableList.of(named("`job`", DSL.ref("job", STRING)), - named("span", DSL.span(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal(40), "s"))))); + PhysicalPlan plan = + prometheusMetricTable.implement( + indexScanAgg( + "prometheus_http_total_requests", + DSL.and( + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/")))), + ImmutableList.of(named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), + ImmutableList.of( + named("`job`", DSL.ref("job", STRING)), + named( + "span", + DSL.span( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal(40), + "s"))))); assertTrue(plan instanceof PrometheusMetricScan); PrometheusQueryRequest prometheusQueryRequest = ((PrometheusMetricScan) plan).getRequest(); assertEquals( "avg by(job) (avg_over_time" + "(prometheus_http_total_requests{code=\"200\" , handler=\"/ready/\"}[40s]))", prometheusQueryRequest.getPromQl()); - } @Test @@ -848,14 +997,16 @@ void testImplementPrometheusQueryWithFilterQuery() { new PrometheusMetricTable(client, "prometheus_http_total_requests"); // IndexScanAgg without Filter - PhysicalPlan plan = prometheusMetricTable.implement( - indexScan("prometheus_http_total_requests", - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/")))))); + PhysicalPlan plan = + prometheusMetricTable.implement( + indexScan( + "prometheus_http_total_requests", + DSL.and( + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/")))))); assertTrue(plan instanceof PrometheusMetricScan); - PrometheusQueryRequest prometheusQueryRequest = - ((PrometheusMetricScan) plan).getRequest(); + PrometheusQueryRequest prometheusQueryRequest = ((PrometheusMetricScan) plan).getRequest(); assertEquals( "prometheus_http_total_requests{code=\"200\" , handler=\"/ready/\"}", prometheusQueryRequest.getPromQl()); @@ -867,15 +1018,22 @@ void testImplementPrometheusQueryWithUnsupportedFilterQuery() { PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - RuntimeException exception = assertThrows(RuntimeException.class, - () -> prometheusMetricTable.implement(indexScan("prometheus_http_total_requests", - DSL.and(DSL.lte(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))))))); - assertEquals("Prometheus Datasource doesn't support <= in where command.", - exception.getMessage()); + RuntimeException exception = + assertThrows( + RuntimeException.class, + () -> + prometheusMetricTable.implement( + indexScan( + "prometheus_http_total_requests", + DSL.and( + DSL.lte(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.equal( + DSL.ref("handler", STRING), + DSL.literal(stringValue("/ready/"))))))); + assertEquals( + "Prometheus Datasource doesn't support <= in where command.", exception.getMessage()); } - @Test void testCreateScanBuilderWithQueryRangeTableFunction() { PrometheusQueryRequest prometheusQueryRequest = new PrometheusQueryRequest(); @@ -895,5 +1053,4 @@ void testCreateScanBuilderWithPPLQuery() { TableScanBuilder tableScanBuilder = prometheusMetricTable.createScanBuilder(); Assertions.assertNull(tableScanBuilder); } - } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusStorageEngineTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusStorageEngineTest.java index 4e8d470373..b925fe6538 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusStorageEngineTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusStorageEngineTest.java @@ -29,8 +29,7 @@ @ExtendWith(MockitoExtension.class) class PrometheusStorageEngineTest { - @Mock - private PrometheusClient client; + @Mock private PrometheusClient client; @Test public void getTable() { @@ -43,15 +42,12 @@ public void getTable() { @Test public void getFunctions() { PrometheusStorageEngine engine = new PrometheusStorageEngine(client); - Collection functionResolverCollection - = engine.getFunctions(); + Collection functionResolverCollection = engine.getFunctions(); assertNotNull(functionResolverCollection); assertEquals(2, functionResolverCollection.size()); Iterator iterator = functionResolverCollection.iterator(); - assertTrue( - iterator.next() instanceof QueryRangeTableFunctionResolver); - assertTrue( - iterator.next() instanceof QueryExemplarsTableFunctionResolver); + assertTrue(iterator.next() instanceof QueryRangeTableFunctionResolver); + assertTrue(iterator.next() instanceof QueryExemplarsTableFunctionResolver); } @Test @@ -65,8 +61,8 @@ public void getSystemTable() { @Test public void getSystemTableForAllTablesInfo() { PrometheusStorageEngine engine = new PrometheusStorageEngine(client); - Table table - = engine.getTable(new DataSourceSchemaName("prometheus", "information_schema"), "tables"); + Table table = + engine.getTable(new DataSourceSchemaName("prometheus", "information_schema"), "tables"); assertNotNull(table); assertTrue(table instanceof PrometheusSystemTable); } @@ -74,10 +70,12 @@ public void getSystemTableForAllTablesInfo() { @Test public void getSystemTableWithWrongInformationSchemaTable() { PrometheusStorageEngine engine = new PrometheusStorageEngine(client); - SemanticCheckException exception = assertThrows(SemanticCheckException.class, - () -> engine.getTable(new DataSourceSchemaName("prometheus", "information_schema"), - "test")); + SemanticCheckException exception = + assertThrows( + SemanticCheckException.class, + () -> + engine.getTable( + new DataSourceSchemaName("prometheus", "information_schema"), "test")); assertEquals("Information Schema doesn't contain test table", exception.getMessage()); } - } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusStorageFactoryTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusStorageFactoryTest.java index c566ccdeb4..c2e8e5325a 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusStorageFactoryTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusStorageFactoryTest.java @@ -26,8 +26,7 @@ @ExtendWith(MockitoExtension.class) public class PrometheusStorageFactoryTest { - @Mock - private Settings settings; + @Mock private Settings settings; @Test void testGetConnectorType() { @@ -46,8 +45,7 @@ void testGetStorageEngineWithBasicAuth() { properties.put("prometheus.auth.type", "basicauth"); properties.put("prometheus.auth.username", "admin"); properties.put("prometheus.auth.password", "admin"); - StorageEngine storageEngine - = prometheusStorageFactory.getStorageEngine(properties); + StorageEngine storageEngine = prometheusStorageFactory.getStorageEngine(properties); Assertions.assertTrue(storageEngine instanceof PrometheusStorageEngine); } @@ -62,12 +60,10 @@ void testGetStorageEngineWithAWSSigV4Auth() { properties.put("prometheus.auth.region", "us-east-1"); properties.put("prometheus.auth.secret_key", "accessKey"); properties.put("prometheus.auth.access_key", "secretKey"); - StorageEngine storageEngine - = prometheusStorageFactory.getStorageEngine(properties); + StorageEngine storageEngine = prometheusStorageFactory.getStorageEngine(properties); Assertions.assertTrue(storageEngine instanceof PrometheusStorageEngine); } - @Test @SneakyThrows void testGetStorageEngineWithMissingURI() { @@ -77,10 +73,12 @@ void testGetStorageEngineWithMissingURI() { properties.put("prometheus.auth.region", "us-east-1"); properties.put("prometheus.auth.secret_key", "accessKey"); properties.put("prometheus.auth.access_key", "secretKey"); - IllegalArgumentException exception = Assertions.assertThrows(IllegalArgumentException.class, - () -> prometheusStorageFactory.getStorageEngine(properties)); - Assertions.assertEquals("Missing [prometheus.uri] fields " - + "in the Prometheus connector properties.", + IllegalArgumentException exception = + Assertions.assertThrows( + IllegalArgumentException.class, + () -> prometheusStorageFactory.getStorageEngine(properties)); + Assertions.assertEquals( + "Missing [prometheus.uri] fields " + "in the Prometheus connector properties.", exception.getMessage()); } @@ -93,14 +91,15 @@ void testGetStorageEngineWithMissingRegionInAWS() { properties.put("prometheus.auth.type", "awssigv4"); properties.put("prometheus.auth.secret_key", "accessKey"); properties.put("prometheus.auth.access_key", "secretKey"); - IllegalArgumentException exception = Assertions.assertThrows(IllegalArgumentException.class, - () -> prometheusStorageFactory.getStorageEngine(properties)); - Assertions.assertEquals("Missing [prometheus.auth.region] fields in the " - + "Prometheus connector properties.", + IllegalArgumentException exception = + Assertions.assertThrows( + IllegalArgumentException.class, + () -> prometheusStorageFactory.getStorageEngine(properties)); + Assertions.assertEquals( + "Missing [prometheus.auth.region] fields in the " + "Prometheus connector properties.", exception.getMessage()); } - @Test @SneakyThrows void testGetStorageEngineWithLongConfigProperties() { @@ -110,9 +109,12 @@ void testGetStorageEngineWithLongConfigProperties() { properties.put("prometheus.auth.type", "awssigv4"); properties.put("prometheus.auth.secret_key", "accessKey"); properties.put("prometheus.auth.access_key", "secretKey"); - IllegalArgumentException exception = Assertions.assertThrows(IllegalArgumentException.class, - () -> prometheusStorageFactory.getStorageEngine(properties)); - Assertions.assertEquals("Missing [prometheus.auth.region] fields in the " + IllegalArgumentException exception = + Assertions.assertThrows( + IllegalArgumentException.class, + () -> prometheusStorageFactory.getStorageEngine(properties)); + Assertions.assertEquals( + "Missing [prometheus.auth.region] fields in the " + "Prometheus connector properties." + "Fields [prometheus.uri] exceeds more than 1000 characters.", exception.getMessage()); @@ -129,13 +131,14 @@ void testGetStorageEngineWithWrongAuthType() { properties.put("prometheus.auth.region", "us-east-1"); properties.put("prometheus.auth.secret_key", "accessKey"); properties.put("prometheus.auth.access_key", "secretKey"); - IllegalArgumentException exception = Assertions.assertThrows(IllegalArgumentException.class, - () -> prometheusStorageFactory.getStorageEngine(properties)); - Assertions.assertEquals("AUTH Type : random is not supported with Prometheus Connector", - exception.getMessage()); + IllegalArgumentException exception = + Assertions.assertThrows( + IllegalArgumentException.class, + () -> prometheusStorageFactory.getStorageEngine(properties)); + Assertions.assertEquals( + "AUTH Type : random is not supported with Prometheus Connector", exception.getMessage()); } - @Test @SneakyThrows void testGetStorageEngineWithNONEAuthType() { @@ -143,8 +146,7 @@ void testGetStorageEngineWithNONEAuthType() { PrometheusStorageFactory prometheusStorageFactory = new PrometheusStorageFactory(settings); HashMap properties = new HashMap<>(); properties.put("prometheus.uri", "https://test.com"); - StorageEngine storageEngine - = prometheusStorageFactory.getStorageEngine(properties); + StorageEngine storageEngine = prometheusStorageFactory.getStorageEngine(properties); Assertions.assertTrue(storageEngine instanceof PrometheusStorageEngine); } @@ -157,8 +159,9 @@ void testGetStorageEngineWithInvalidURISyntax() { properties.put("prometheus.auth.type", "basicauth"); properties.put("prometheus.auth.username", "admin"); properties.put("prometheus.auth.password", "admin"); - RuntimeException exception = Assertions.assertThrows(RuntimeException.class, - () -> prometheusStorageFactory.getStorageEngine(properties)); + RuntimeException exception = + Assertions.assertThrows( + RuntimeException.class, () -> prometheusStorageFactory.getStorageEngine(properties)); Assertions.assertTrue( exception.getMessage().contains("Invalid URI in prometheus properties: ")); } @@ -213,10 +216,13 @@ void createDataSourceWithInvalidHostname() { metadata.setProperties(properties); PrometheusStorageFactory prometheusStorageFactory = new PrometheusStorageFactory(settings); - RuntimeException exception = Assertions.assertThrows(RuntimeException.class, - () -> prometheusStorageFactory.createDataSource(metadata)); + RuntimeException exception = + Assertions.assertThrows( + RuntimeException.class, () -> prometheusStorageFactory.createDataSource(metadata)); Assertions.assertTrue( - exception.getMessage().contains("Invalid hostname in the uri: http://dummyprometheus:9090")); + exception + .getMessage() + .contains("Invalid hostname in the uri: http://dummyprometheus:9090")); } @Test @@ -233,8 +239,9 @@ void createDataSourceWithInvalidIp() { metadata.setProperties(properties); PrometheusStorageFactory prometheusStorageFactory = new PrometheusStorageFactory(settings); - RuntimeException exception = Assertions.assertThrows(RuntimeException.class, - () -> prometheusStorageFactory.createDataSource(metadata)); + RuntimeException exception = + Assertions.assertThrows( + RuntimeException.class, () -> prometheusStorageFactory.createDataSource(metadata)); Assertions.assertTrue( exception.getMessage().contains("Invalid hostname in the uri: http://231.54.11.987:9090")); } @@ -255,11 +262,15 @@ void createDataSourceWithHostnameNotMatchingWithAllowHostsConfig() { metadata.setProperties(properties); PrometheusStorageFactory prometheusStorageFactory = new PrometheusStorageFactory(settings); - RuntimeException exception = Assertions.assertThrows(RuntimeException.class, - () -> prometheusStorageFactory.createDataSource(metadata)); + RuntimeException exception = + Assertions.assertThrows( + RuntimeException.class, () -> prometheusStorageFactory.createDataSource(metadata)); Assertions.assertTrue( - exception.getMessage().contains("Disallowed hostname in the uri: http://localhost.com:9090. " - + "Validate with plugins.query.datasources.uri.allowhosts config")); + exception + .getMessage() + .contains( + "Disallowed hostname in the uri: http://localhost.com:9090. " + + "Validate with plugins.query.datasources.uri.allowhosts config")); } @Test @@ -279,5 +290,4 @@ void createDataSourceSuccessWithHostnameRestrictions() { DataSource dataSource = new PrometheusStorageFactory(settings).createDataSource(metadata); Assertions.assertTrue(dataSource.getStorageEngine() instanceof PrometheusStorageEngine); } - } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/QueryExemplarsTableTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/QueryExemplarsTableTest.java index 19876d398d..7f49de981a 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/QueryExemplarsTableTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/QueryExemplarsTableTest.java @@ -30,14 +30,12 @@ @ExtendWith(MockitoExtension.class) class QueryExemplarsTableTest { - @Mock - private PrometheusClient client; + @Mock private PrometheusClient client; @Test @SneakyThrows void testGetFieldTypes() { - PrometheusQueryExemplarsRequest exemplarsRequest - = new PrometheusQueryExemplarsRequest(); + PrometheusQueryExemplarsRequest exemplarsRequest = new PrometheusQueryExemplarsRequest(); exemplarsRequest.setQuery(QUERY); exemplarsRequest.setStartTime(STARTTIME); exemplarsRequest.setEndTime(ENDTIME); @@ -50,8 +48,7 @@ void testGetFieldTypes() { @Test void testImplementWithBasicMetricQuery() { - PrometheusQueryExemplarsRequest exemplarsRequest - = new PrometheusQueryExemplarsRequest(); + PrometheusQueryExemplarsRequest exemplarsRequest = new PrometheusQueryExemplarsRequest(); exemplarsRequest.setQuery(QUERY); exemplarsRequest.setStartTime(STARTTIME); exemplarsRequest.setEndTime(ENDTIME); @@ -67,8 +64,7 @@ void testImplementWithBasicMetricQuery() { @Test void testCreateScanBuilderWithQueryRangeTableFunction() { - PrometheusQueryExemplarsRequest exemplarsRequest - = new PrometheusQueryExemplarsRequest(); + PrometheusQueryExemplarsRequest exemplarsRequest = new PrometheusQueryExemplarsRequest(); exemplarsRequest.setQuery(QUERY); exemplarsRequest.setStartTime(STARTTIME); exemplarsRequest.setEndTime(ENDTIME); @@ -77,5 +73,4 @@ void testCreateScanBuilderWithQueryRangeTableFunction() { Assertions.assertNotNull(tableScanBuilder); Assertions.assertTrue(tableScanBuilder instanceof QueryExemplarsFunctionTableScanBuilder); } - } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/querybuilders/StepParameterResolverTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/querybuilders/StepParameterResolverTest.java index 37e24a56b5..397b7146f7 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/querybuilders/StepParameterResolverTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/querybuilders/StepParameterResolverTest.java @@ -18,9 +18,11 @@ public class StepParameterResolverTest { @Test void testNullChecks() { StepParameterResolver stepParameterResolver = new StepParameterResolver(); - Assertions.assertThrows(NullPointerException.class, + Assertions.assertThrows( + NullPointerException.class, () -> stepParameterResolver.resolve(null, new Date().getTime(), Collections.emptyList())); - Assertions.assertThrows(NullPointerException.class, + Assertions.assertThrows( + NullPointerException.class, () -> stepParameterResolver.resolve(new Date().getTime(), null, Collections.emptyList())); } } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/querybuilders/TimeRangeParametersResolverTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/querybuilders/TimeRangeParametersResolverTest.java index 73839e2152..6a280b7d98 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/querybuilders/TimeRangeParametersResolverTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/querybuilders/TimeRangeParametersResolverTest.java @@ -21,9 +21,11 @@ public class TimeRangeParametersResolverTest { @Test void testTimeRangeParametersWithoutTimestampFilter() { TimeRangeParametersResolver timeRangeParametersResolver = new TimeRangeParametersResolver(); - Pair result = timeRangeParametersResolver.resolve( - DSL.and(DSL.less(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))))); + Pair result = + timeRangeParametersResolver.resolve( + DSL.and( + DSL.less(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))))); Assertions.assertNotNull(result); Assertions.assertEquals(3600, result.getSecond() - result.getFirst()); } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTableScanTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTableScanTest.java index 0d7ec4e2cc..ea299b87de 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTableScanTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTableScanTest.java @@ -22,8 +22,7 @@ @ExtendWith(MockitoExtension.class) public class PrometheusSystemTableScanTest { - @Mock - private PrometheusSystemRequest request; + @Mock private PrometheusSystemRequest request; @Test public void queryData() { diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTableTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTableTest.java index 0721f82c07..7022ca9657 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTableTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTableTest.java @@ -35,51 +35,41 @@ @ExtendWith(MockitoExtension.class) public class PrometheusSystemTableTest { - @Mock - private PrometheusClient client; + @Mock private PrometheusClient client; - @Mock - private Table table; + @Mock private Table table; @Test void testGetFieldTypesOfMetaTable() { - PrometheusSystemTable systemIndex = new PrometheusSystemTable(client, - new DataSourceSchemaName("prometheus", "information_schema"), TABLE_INFO); + PrometheusSystemTable systemIndex = + new PrometheusSystemTable( + client, new DataSourceSchemaName("prometheus", "information_schema"), TABLE_INFO); final Map fieldTypes = systemIndex.getFieldTypes(); - assertThat(fieldTypes, anyOf( - hasEntry("TABLE_CATALOG", STRING) - )); - assertThat(fieldTypes, anyOf( - hasEntry("UNIT", STRING) - )); + assertThat(fieldTypes, anyOf(hasEntry("TABLE_CATALOG", STRING))); + assertThat(fieldTypes, anyOf(hasEntry("UNIT", STRING))); } @Test void testGetFieldTypesOfMappingTable() { - PrometheusSystemTable systemIndex = new PrometheusSystemTable(client, - new DataSourceSchemaName("prometheus", "information_schema"), mappingTable( - "test_metric")); + PrometheusSystemTable systemIndex = + new PrometheusSystemTable( + client, + new DataSourceSchemaName("prometheus", "information_schema"), + mappingTable("test_metric")); final Map fieldTypes = systemIndex.getFieldTypes(); - assertThat(fieldTypes, anyOf( - hasEntry("COLUMN_NAME", STRING) - )); + assertThat(fieldTypes, anyOf(hasEntry("COLUMN_NAME", STRING))); } - - @Test void implement() { - PrometheusSystemTable systemIndex = new PrometheusSystemTable(client, - new DataSourceSchemaName("prometheus", "information_schema"), TABLE_INFO); + PrometheusSystemTable systemIndex = + new PrometheusSystemTable( + client, new DataSourceSchemaName("prometheus", "information_schema"), TABLE_INFO); NamedExpression projectExpr = named("TABLE_NAME", ref("TABLE_NAME", STRING)); - final PhysicalPlan plan = systemIndex.implement( - project( - relation(TABLE_INFO, table), - projectExpr - )); + final PhysicalPlan plan = + systemIndex.implement(project(relation(TABLE_INFO, table), projectExpr)); assertTrue(plan instanceof ProjectOperator); assertTrue(plan.getChild().get(0) instanceof PrometheusSystemTableScan); } - } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/utils/LogicalPlanUtils.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/utils/LogicalPlanUtils.java index 5fcebf52e6..570a987889 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/utils/LogicalPlanUtils.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/utils/LogicalPlanUtils.java @@ -19,43 +19,36 @@ public class LogicalPlanUtils { - /** - * Build PrometheusLogicalMetricScan. - */ + /** Build PrometheusLogicalMetricScan. */ public static LogicalPlan indexScan(String metricName, Expression filter) { - return PrometheusLogicalMetricScan.builder().metricName(metricName) - .filter(filter) - .build(); + return PrometheusLogicalMetricScan.builder().metricName(metricName).filter(filter).build(); } - /** - * Build PrometheusLogicalMetricAgg. - */ - public static LogicalPlan indexScanAgg(String metricName, Expression filter, - List aggregators, - List groupByList) { - return PrometheusLogicalMetricAgg.builder().metricName(metricName) + /** Build PrometheusLogicalMetricAgg. */ + public static LogicalPlan indexScanAgg( + String metricName, + Expression filter, + List aggregators, + List groupByList) { + return PrometheusLogicalMetricAgg.builder() + .metricName(metricName) .filter(filter) .aggregatorList(aggregators) .groupByList(groupByList) .build(); } - /** - * Build PrometheusLogicalMetricAgg. - */ - public static LogicalPlan indexScanAgg(String metricName, - List aggregators, - List groupByList) { - return PrometheusLogicalMetricAgg.builder().metricName(metricName) + /** Build PrometheusLogicalMetricAgg. */ + public static LogicalPlan indexScanAgg( + String metricName, List aggregators, List groupByList) { + return PrometheusLogicalMetricAgg.builder() + .metricName(metricName) .aggregatorList(aggregators) .groupByList(groupByList) .build(); } - /** - * Build PrometheusLogicalMetricAgg. - */ + /** Build PrometheusLogicalMetricAgg. */ public static LogicalPlan testLogicalPlanNode() { return new TestLogicalPlan(); } @@ -71,7 +64,4 @@ public R accept(LogicalPlanNodeVisitor visitor, C context) { return visitor.visitNode(this, null); } } - - - } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/utils/TestUtils.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/utils/TestUtils.java index 1683858c49..a9fcc26101 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/utils/TestUtils.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/utils/TestUtils.java @@ -12,6 +12,7 @@ public class TestUtils { /** * Get Json document from the files in resources folder. + * * @param filename filename. * @return String. * @throws IOException IOException. @@ -21,5 +22,4 @@ public static String getJson(String filename) throws IOException { return new String( Objects.requireNonNull(classLoader.getResourceAsStream(filename)).readAllBytes()); } - } From 549d0a028ebc0db387bd3e8a218a7cd671e12a91 Mon Sep 17 00:00:00 2001 From: Mitchell Gale Date: Thu, 17 Aug 2023 09:57:10 -0700 Subject: [PATCH 26/42] [Spotless] Applying Google Code Format for integ-test directory (pt 4/4) #18 (#1979) * Integration test spotless apply directory Signed-off-by: Mitchell Gale * Address PR comments Signed-off-by: Mitchell Gale --------- Signed-off-by: Mitchell Gale --- build.gradle | 3 +- .../sql/bwc/SQLBackwardsCompatibilityIT.java | 348 +++++++++--------- .../sql/correctness/TestConfig.java | 2 + .../tests/OpenSearchConnectionTest.java | 3 +- .../sql/correctness/testset/TestDataSet.java | 4 +- .../org/opensearch/sql/jdbc/CursorIT.java | 3 +- .../opensearch/sql/legacy/AggregationIT.java | 3 +- .../opensearch/sql/legacy/DateFormatIT.java | 6 +- .../sql/legacy/MetaDataQueriesIT.java | 2 + .../opensearch/sql/legacy/MethodQueryIT.java | 15 + .../sql/legacy/NestedFieldQueryIT.java | 2 + .../org/opensearch/sql/legacy/OrderIT.java | 3 +- .../sql/legacy/PrettyFormatResponseIT.java | 2 + .../sql/legacy/QueryFunctionsIT.java | 5 + .../org/opensearch/sql/legacy/QueryIT.java | 26 +- .../sql/legacy/RestIntegTestCase.java | 139 +++---- .../sql/legacy/SQLIntegTestCase.java | 15 +- .../sql/legacy/TermQueryExplainIT.java | 6 +- .../org/opensearch/sql/legacy/TestUtils.java | 4 +- .../sql/ppl/DateTimeImplementationIT.java | 3 - .../sql/ppl/MatchPhrasePrefixIT.java | 6 +- .../sql/ppl/ObjectFieldOperateIT.java | 3 +- .../sql/sql/ConvertTZFunctionIT.java | 144 ++++---- .../sql/sql/DateTimeImplementationIT.java | 17 +- .../org/opensearch/sql/sql/ScoreQueryIT.java | 3 + .../opensearch/sql/sql/StringLiteralIT.java | 30 +- .../ExecuteOnCallerThreadQueryManager.java | 2 +- .../org/opensearch/sql/util/TestUtils.java | 4 +- 28 files changed, 401 insertions(+), 402 deletions(-) diff --git a/build.gradle b/build.gradle index 290a1e1786..ffd0153d04 100644 --- a/build.gradle +++ b/build.gradle @@ -89,7 +89,8 @@ spotless { 'prometheus/**/*.java', 'sql/**/*.java', 'common/**/*.java', - 'ppl/**/*.java' + 'ppl/**/*.java', + 'integ-test/**/*java' exclude '**/build/**', '**/build-*/**' } importOrder() diff --git a/integ-test/src/test/java/org/opensearch/sql/bwc/SQLBackwardsCompatibilityIT.java b/integ-test/src/test/java/org/opensearch/sql/bwc/SQLBackwardsCompatibilityIT.java index f5c903d434..8c6c5d6710 100644 --- a/integ-test/src/test/java/org/opensearch/sql/bwc/SQLBackwardsCompatibilityIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/bwc/SQLBackwardsCompatibilityIT.java @@ -35,177 +35,189 @@ public class SQLBackwardsCompatibilityIT extends SQLIntegTestCase { - private static final ClusterType CLUSTER_TYPE = ClusterType.parse(System.getProperty("tests.rest.bwcsuite")); - private static final String CLUSTER_NAME = System.getProperty("tests.clustername"); - - @Override - protected final boolean preserveIndicesUponCompletion() { - return true; + private static final ClusterType CLUSTER_TYPE = + ClusterType.parse(System.getProperty("tests.rest.bwcsuite")); + private static final String CLUSTER_NAME = System.getProperty("tests.clustername"); + + @Override + protected final boolean preserveIndicesUponCompletion() { + return true; + } + + @Override + protected final boolean preserveReposUponCompletion() { + return true; + } + + @Override + protected boolean preserveTemplatesUponCompletion() { + return true; + } + + @Override + protected final Settings restClientSettings() { + return Settings.builder() + .put(super.restClientSettings()) + // increase the timeout here to 90 seconds to handle long waits for a green + // cluster health. the waits for green need to be longer than a minute to + // account for delayed shards + .put(OpenSearchRestTestCase.CLIENT_SOCKET_TIMEOUT, "90s") + .build(); + } + + private enum ClusterType { + OLD, + MIXED, + UPGRADED; + + public static ClusterType parse(String value) { + switch (value) { + case "old_cluster": + return OLD; + case "mixed_cluster": + return MIXED; + case "upgraded_cluster": + return UPGRADED; + default: + throw new AssertionError("unknown cluster type: " + value); + } } - - @Override - protected final boolean preserveReposUponCompletion() { - return true; + } + + @SuppressWarnings("unchecked") + public void testBackwardsCompatibility() throws Exception { + String uri = getUri(); + Map> responseMap = + (Map>) getAsMap(uri).get("nodes"); + for (Map response : responseMap.values()) { + List> plugins = (List>) response.get("plugins"); + Set pluginNames = + plugins.stream().map(map -> map.get("name")).collect(Collectors.toSet()); + switch (CLUSTER_TYPE) { + case OLD: + Assert.assertTrue(pluginNames.contains("opensearch-sql")); + updateLegacySQLSettings(); + loadIndex(Index.ACCOUNT); + verifySQLQueries(LEGACY_QUERY_API_ENDPOINT); + break; + case MIXED: + Assert.assertTrue(pluginNames.contains("opensearch-sql")); + verifySQLSettings(); + verifySQLQueries(LEGACY_QUERY_API_ENDPOINT); + break; + case UPGRADED: + Assert.assertTrue(pluginNames.contains("opensearch-sql")); + verifySQLSettings(); + verifySQLQueries(QUERY_API_ENDPOINT); + break; + } + break; } - - @Override - protected boolean preserveTemplatesUponCompletion() { - return true; - } - - @Override - protected final Settings restClientSettings() { - return Settings - .builder() - .put(super.restClientSettings()) - // increase the timeout here to 90 seconds to handle long waits for a green - // cluster health. the waits for green need to be longer than a minute to - // account for delayed shards - .put(OpenSearchRestTestCase.CLIENT_SOCKET_TIMEOUT, "90s") - .build(); - } - - private enum ClusterType { - OLD, - MIXED, - UPGRADED; - - public static ClusterType parse(String value) { - switch (value) { - case "old_cluster": - return OLD; - case "mixed_cluster": - return MIXED; - case "upgraded_cluster": - return UPGRADED; - default: - throw new AssertionError("unknown cluster type: " + value); - } + } + + private String getUri() { + switch (CLUSTER_TYPE) { + case OLD: + return "_nodes/" + CLUSTER_NAME + "-0/plugins"; + case MIXED: + String round = System.getProperty("tests.rest.bwcsuite_round"); + if (round.equals("second")) { + return "_nodes/" + CLUSTER_NAME + "-1/plugins"; + } else if (round.equals("third")) { + return "_nodes/" + CLUSTER_NAME + "-2/plugins"; + } else { + return "_nodes/" + CLUSTER_NAME + "-0/plugins"; } + case UPGRADED: + return "_nodes/plugins"; + default: + throw new AssertionError("unknown cluster type: " + CLUSTER_TYPE); } - - @SuppressWarnings("unchecked") - public void testBackwardsCompatibility() throws Exception { - String uri = getUri(); - Map> responseMap = (Map>) getAsMap(uri).get("nodes"); - for (Map response : responseMap.values()) { - List> plugins = (List>) response.get("plugins"); - Set pluginNames = plugins.stream().map(map -> map.get("name")).collect(Collectors.toSet()); - switch (CLUSTER_TYPE) { - case OLD: - Assert.assertTrue(pluginNames.contains("opensearch-sql")); - updateLegacySQLSettings(); - loadIndex(Index.ACCOUNT); - verifySQLQueries(LEGACY_QUERY_API_ENDPOINT); - break; - case MIXED: - Assert.assertTrue(pluginNames.contains("opensearch-sql")); - verifySQLSettings(); - verifySQLQueries(LEGACY_QUERY_API_ENDPOINT); - break; - case UPGRADED: - Assert.assertTrue(pluginNames.contains("opensearch-sql")); - verifySQLSettings(); - verifySQLQueries(QUERY_API_ENDPOINT); - break; - } - break; - } + } + + private void updateLegacySQLSettings() throws IOException { + Request request = new Request("PUT", LEGACY_SQL_SETTINGS_API_ENDPOINT); + request.setJsonEntity( + String.format( + Locale.ROOT, + "{\n" + " \"persistent\" : {\n \"%s\" : \"%s\"\n }\n}", + "opendistro.sql.cursor.keep_alive", + "7m")); + + RequestOptions.Builder restOptionsBuilder = RequestOptions.DEFAULT.toBuilder(); + restOptionsBuilder.addHeader("Content-Type", "application/json"); + request.setOptions(restOptionsBuilder); + + Response response = client().performRequest(request); + JSONObject jsonObject = new JSONObject(getResponseBody(response)); + Assert.assertTrue((boolean) jsonObject.get("acknowledged")); + } + + private void verifySQLSettings() throws IOException { + Request request = new Request("GET", "_cluster/settings?flat_settings"); + + RequestOptions.Builder restOptionsBuilder = RequestOptions.DEFAULT.toBuilder(); + restOptionsBuilder.addHeader("Content-Type", "application/json"); + request.setOptions(restOptionsBuilder); + + Response response = client().performRequest(request); + JSONObject jsonObject = new JSONObject(getResponseBody(response)); + Assert.assertEquals( + "{\"transient\":{},\"persistent\":{\"opendistro.sql.cursor.keep_alive\":\"7m\"}}", + jsonObject.toString()); + } + + private void verifySQLQueries(String endpoint) throws IOException { + JSONObject filterResponse = + executeSQLQuery( + endpoint, + "SELECT COUNT(*) FILTER(WHERE age > 35) FROM " + TestsConstants.TEST_INDEX_ACCOUNT); + verifySchema(filterResponse, schema("COUNT(*) FILTER(WHERE age > 35)", null, "integer")); + verifyDataRows(filterResponse, rows(238)); + + JSONObject aggResponse = + executeSQLQuery( + endpoint, "SELECT COUNT(DISTINCT age) FROM " + TestsConstants.TEST_INDEX_ACCOUNT); + verifySchema(aggResponse, schema("COUNT(DISTINCT age)", null, "integer")); + verifyDataRows(aggResponse, rows(21)); + + JSONObject groupByResponse = + executeSQLQuery( + endpoint, + "select a.gender from " + + TestsConstants.TEST_INDEX_ACCOUNT + + " a group by a.gender having count(*) > 0"); + verifySchema(groupByResponse, schema("gender", null, "text")); + Assert.assertEquals("[[\"F\"],[\"M\"]]", groupByResponse.getJSONArray("datarows").toString()); + } + + private JSONObject executeSQLQuery(String endpoint, String query) throws IOException { + Request request = new Request("POST", endpoint); + request.setJsonEntity(String.format(Locale.ROOT, "{ \"query\" : \"%s\"}", query)); + + RequestOptions.Builder restOptionsBuilder = RequestOptions.DEFAULT.toBuilder(); + restOptionsBuilder.addHeader("Content-Type", "application/json"); + request.setOptions(restOptionsBuilder); + + Response response = client().performRequest(request); + return new JSONObject(getResponseBody(response)); + } + + @Override + public boolean shouldResetQuerySizeLimit() { + return false; + } + + @Override + protected synchronized void loadIndex(Index index) throws IOException { + String indexName = index.getName(); + String mapping = index.getMapping(); + // current directory becomes 'integ-test/build/testrun/sqlBwcCluster#' during bwc + String dataSet = "../../../" + index.getDataSet(); + + if (!isIndexExist(client(), indexName)) { + createIndexByRestClient(client(), indexName, mapping); + loadDataByRestClient(client(), indexName, dataSet); } - - private String getUri() { - switch (CLUSTER_TYPE) { - case OLD: - return "_nodes/" + CLUSTER_NAME + "-0/plugins"; - case MIXED: - String round = System.getProperty("tests.rest.bwcsuite_round"); - if (round.equals("second")) { - return "_nodes/" + CLUSTER_NAME + "-1/plugins"; - } else if (round.equals("third")) { - return "_nodes/" + CLUSTER_NAME + "-2/plugins"; - } else { - return "_nodes/" + CLUSTER_NAME + "-0/plugins"; - } - case UPGRADED: - return "_nodes/plugins"; - default: - throw new AssertionError("unknown cluster type: " + CLUSTER_TYPE); - } - } - - private void updateLegacySQLSettings() throws IOException { - Request request = new Request("PUT", LEGACY_SQL_SETTINGS_API_ENDPOINT); - request.setJsonEntity(String.format(Locale.ROOT, "{\n" + - " \"persistent\" : {\n" + - " \"%s\" : \"%s\"\n" + - " }\n" + - "}", "opendistro.sql.cursor.keep_alive", "7m")); - - RequestOptions.Builder restOptionsBuilder = RequestOptions.DEFAULT.toBuilder(); - restOptionsBuilder.addHeader("Content-Type", "application/json"); - request.setOptions(restOptionsBuilder); - - Response response = client().performRequest(request); - JSONObject jsonObject = new JSONObject(getResponseBody(response)); - Assert.assertTrue((boolean) jsonObject.get("acknowledged")); - } - - private void verifySQLSettings() throws IOException { - Request request = new Request("GET", "_cluster/settings?flat_settings"); - - RequestOptions.Builder restOptionsBuilder = RequestOptions.DEFAULT.toBuilder(); - restOptionsBuilder.addHeader("Content-Type", "application/json"); - request.setOptions(restOptionsBuilder); - - Response response = client().performRequest(request); - JSONObject jsonObject = new JSONObject(getResponseBody(response)); - Assert.assertEquals("{\"transient\":{},\"persistent\":{\"opendistro.sql.cursor.keep_alive\":\"7m\"}}", jsonObject.toString()); - } - - private void verifySQLQueries(String endpoint) throws IOException { - JSONObject filterResponse = executeSQLQuery(endpoint, "SELECT COUNT(*) FILTER(WHERE age > 35) FROM " + TestsConstants.TEST_INDEX_ACCOUNT); - verifySchema(filterResponse, schema("COUNT(*) FILTER(WHERE age > 35)", null, "integer")); - verifyDataRows(filterResponse, rows(238)); - - JSONObject aggResponse = executeSQLQuery(endpoint, "SELECT COUNT(DISTINCT age) FROM " + TestsConstants.TEST_INDEX_ACCOUNT); - verifySchema(aggResponse, schema("COUNT(DISTINCT age)", null, "integer")); - verifyDataRows(aggResponse, rows(21)); - - JSONObject groupByResponse = executeSQLQuery(endpoint, "select a.gender from " + TestsConstants.TEST_INDEX_ACCOUNT + " a group by a.gender having count(*) > 0"); - verifySchema(groupByResponse, schema("gender", null, "text")); - Assert.assertEquals("[[\"F\"],[\"M\"]]", groupByResponse.getJSONArray("datarows").toString()); - } - - private JSONObject executeSQLQuery(String endpoint, String query) throws IOException { - Request request = new Request("POST", endpoint); - request.setJsonEntity(String.format(Locale.ROOT, "{" + - " \"query\" : \"%s\"" + - "}", query)); - - RequestOptions.Builder restOptionsBuilder = RequestOptions.DEFAULT.toBuilder(); - restOptionsBuilder.addHeader("Content-Type", "application/json"); - request.setOptions(restOptionsBuilder); - - Response response = client().performRequest(request); - return new JSONObject(getResponseBody(response)); - } - - @Override - public boolean shouldResetQuerySizeLimit() { - return false; - } - - @Override - protected synchronized void loadIndex(Index index) throws IOException { - String indexName = index.getName(); - String mapping = index.getMapping(); - // current directory becomes 'integ-test/build/testrun/sqlBwcCluster#' during bwc - String dataSet = "../../../" + index.getDataSet(); - - if (!isIndexExist(client(), indexName)) { - createIndexByRestClient(client(), indexName, mapping); - loadDataByRestClient(client(), indexName, dataSet); - } - } - + } } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/TestConfig.java b/integ-test/src/test/java/org/opensearch/sql/correctness/TestConfig.java index a498f15d63..16a172e384 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/TestConfig.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/TestConfig.java @@ -18,6 +18,8 @@ import org.opensearch.sql.legacy.utils.StringUtils; /** + * + * *
  * Test configuration parse the following information from command line arguments:
  * 1) Test schema and data
diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/OpenSearchConnectionTest.java b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/OpenSearchConnectionTest.java
index e5130d8fc1..49d8f01651 100644
--- a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/OpenSearchConnectionTest.java
+++ b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/OpenSearchConnectionTest.java
@@ -81,8 +81,7 @@ public void testInsertNullData() throws IOException {
     assertEquals("POST", actual.getMethod());
     assertEquals("/test/_bulk?refresh=true", actual.getEndpoint());
     assertEquals(
-        "{\"index\":{}}\n{\"age\":30}\n{\"index\":{}}\n{\"name\":\"Hank\"}\n",
-        getBody(actual));
+        "{\"index\":{}}\n{\"age\":30}\n{\"index\":{}}\n{\"name\":\"Hank\"}\n", getBody(actual));
   }
 
   @Test
diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/testset/TestDataSet.java b/integ-test/src/test/java/org/opensearch/sql/correctness/testset/TestDataSet.java
index 25a3f907cf..3872834b26 100644
--- a/integ-test/src/test/java/org/opensearch/sql/correctness/testset/TestDataSet.java
+++ b/integ-test/src/test/java/org/opensearch/sql/correctness/testset/TestDataSet.java
@@ -130,7 +130,9 @@ private Object convertStringToObject(String type, String str) {
   @Override
   public String toString() {
     int total = dataRows.size();
-    return String.format("Test data set:\n Table name: %s\n Schema: %s\n Data rows (first 5 in %d):", tableName, schema, total)
+    return String.format(
+            "Test data set:\n Table name: %s\n Schema: %s\n Data rows (first 5 in %d):",
+            tableName, schema, total)
         + dataRows.stream().limit(5).map(Arrays::toString).collect(joining("\n ", "\n ", "\n"));
   }
 }
diff --git a/integ-test/src/test/java/org/opensearch/sql/jdbc/CursorIT.java b/integ-test/src/test/java/org/opensearch/sql/jdbc/CursorIT.java
index 81b2aad785..325c81107f 100644
--- a/integ-test/src/test/java/org/opensearch/sql/jdbc/CursorIT.java
+++ b/integ-test/src/test/java/org/opensearch/sql/jdbc/CursorIT.java
@@ -111,8 +111,7 @@ public void select_all_no_cursor() {
       var query = String.format("SELECT * FROM %s", table);
       ResultSet rs = stmt.executeQuery(query);
       int rows = 0;
-      while(rs.next())
-        rows++;
+      while (rs.next()) rows++;
 
       var restResponse = executeRestQuery(query, null);
       assertEquals(rows, restResponse.getInt("total"));
diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/AggregationIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/AggregationIT.java
index e053d3d7cf..490e9eb510 100644
--- a/integ-test/src/test/java/org/opensearch/sql/legacy/AggregationIT.java
+++ b/integ-test/src/test/java/org/opensearch/sql/legacy/AggregationIT.java
@@ -251,8 +251,7 @@ public void groupByHavingTest() throws Exception {
     JSONObject result =
         executeQuery(
             String.format(
-                "SELECT gender FROM %s GROUP BY gender HAVING COUNT(*) > 0",
-                TEST_INDEX_ACCOUNT));
+                "SELECT gender FROM %s GROUP BY gender HAVING COUNT(*) > 0", TEST_INDEX_ACCOUNT));
     assertResultForGroupByHavingTest(result);
   }
 
diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/DateFormatIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/DateFormatIT.java
index 63d37dbad1..388d900924 100644
--- a/integ-test/src/test/java/org/opensearch/sql/legacy/DateFormatIT.java
+++ b/integ-test/src/test/java/org/opensearch/sql/legacy/DateFormatIT.java
@@ -40,9 +40,9 @@ protected void init() throws Exception {
   }
 
   /**
-   * All the following tests use UTC as their date_format timezone as this is the same timezone
-   * of the data being queried. This is to prevent discrepancies in the OpenSearch query and the
-   * actual field data that is being checked for the integration tests.
+   * All the following tests use UTC as their date_format timezone as this is the same timezone of
+   * the data being queried. This is to prevent discrepancies in the OpenSearch query and the actual
+   * field data that is being checked for the integration tests.
    *
    * 

Large LIMIT values were given for some of these queries since the default result size of the * query is 200 and this ends up excluding some of the expected values causing the assertion to diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/MetaDataQueriesIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/MetaDataQueriesIT.java index ba4519f607..3accb2bb17 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/MetaDataQueriesIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/MetaDataQueriesIT.java @@ -26,6 +26,8 @@ import org.opensearch.sql.legacy.utils.StringUtils; /** + * + * *

  * The following are tests for SHOW/DESCRIBE query support under Pretty Format Response protocol using JDBC format.
  * 

diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/MethodQueryIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/MethodQueryIT.java index 28c5886d68..7589304af0 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/MethodQueryIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/MethodQueryIT.java @@ -27,6 +27,8 @@ protected void init() throws Exception { } /** + * + * *

    * query
    * "query" : {
@@ -35,6 +37,7 @@ protected void init() throws Exception {
    *   }
    * }
    * 
+ * * @throws IOException */ @Test @@ -50,6 +53,8 @@ public void queryTest() throws IOException { } /** + * + * *
    * matchQuery
    * "query" : {
@@ -61,6 +66,7 @@ public void queryTest() throws IOException {
    *   }
    * }
    * 
+ * * @throws IOException */ @Test @@ -77,6 +83,8 @@ public void matchQueryTest() throws IOException { } /** + * + * *
    * matchQuery
    * {
@@ -118,6 +126,7 @@ public void matchQueryTest() throws IOException {
    *   }
    * }
    * 
+ * * @throws IOException */ @Test @@ -174,6 +183,8 @@ public void negativeRegexpQueryTest() throws IOException { } /** + * + * *
    * wildcardQuery
    * l*e means leae ltae ...
@@ -183,6 +194,7 @@ public void negativeRegexpQueryTest() throws IOException {
    *   }
    * }
    * 
+ * * @throws IOException */ @Test @@ -198,6 +210,8 @@ public void wildcardQueryTest() throws IOException { } /** + * + * *
    * matchPhraseQuery
    * "address" : {
@@ -205,6 +219,7 @@ public void wildcardQueryTest() throws IOException {
    *   "type" : "phrase"
    * }
    * 
+ * * @throws IOException */ @Test diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/NestedFieldQueryIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/NestedFieldQueryIT.java index 48d053c6e1..2108bf6867 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/NestedFieldQueryIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/NestedFieldQueryIT.java @@ -39,6 +39,8 @@ import org.opensearch.search.SearchHit; /** + * + * *
  * Integration test cases for both rewriting and projection logic.
  * 

diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/OrderIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/OrderIT.java index 01e989e9f0..20bed5d2ed 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/OrderIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/OrderIT.java @@ -76,8 +76,7 @@ public void orderByIsNull() throws IOException { // Another equivalent syntax assertThat( - explainQuery( - "SELECT * FROM opensearch-sql_test_index_order ORDER BY id IS NULL, id DESC"), + explainQuery("SELECT * FROM opensearch-sql_test_index_order ORDER BY id IS NULL, id DESC"), equalTo( explainQuery( "SELECT * FROM opensearch-sql_test_index_order ORDER BY id IS NULL DESC"))); diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/PrettyFormatResponseIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/PrettyFormatResponseIT.java index 70f8a3c433..07883d92f4 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/PrettyFormatResponseIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/PrettyFormatResponseIT.java @@ -29,6 +29,8 @@ import org.opensearch.client.Request; /** + * + * *

  * PrettyFormatResponseIT will likely be excluding some of the tests written in PrettyFormatResponseTest since
  * those tests were asserting on class objects directly. These updated tests will only be making assertions based
diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/QueryFunctionsIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/QueryFunctionsIT.java
index b36144ce5f..3cf45f7419 100644
--- a/integ-test/src/test/java/org/opensearch/sql/legacy/QueryFunctionsIT.java
+++ b/integ-test/src/test/java/org/opensearch/sql/legacy/QueryFunctionsIT.java
@@ -43,6 +43,8 @@ public class QueryFunctionsIT extends SQLIntegTestCase {
   private static final String FROM_PHRASE = "FROM " + TEST_INDEX_PHRASE;
 
   /**
+   *
+   *
    * 
    * TODO Looks like Math/Date Functions test all use the same query() and execute() functions
    * TODO execute/featureValueOf/hits functions are the same as used in NestedFieldQueryIT, should refactor into util
@@ -168,6 +170,8 @@ protected U featureValueOf(T actual) {
   }
 
   /**
+   *
+   *
    * 
    * Create Matchers for each field and its value
    * Only one of the Matchers need to match (per hit)
@@ -178,6 +182,7 @@ protected U featureValueOf(T actual) {
    * Then the value "Ayers" can be found in either the firstname or lastname field. Only one of these fields
    * need to satisfy the query value to be evaluated as correct expected output.
    * 
+ * * @param value The value to match for a field in the sourceMap * @param fields A list of fields to match */ diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/QueryIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/QueryIT.java index 3f684deaa9..71795b1fb7 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/QueryIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/QueryIT.java @@ -43,6 +43,8 @@ public class QueryIT extends SQLIntegTestCase { /** + * + * *
    * Currently commenting out tests related to JoinType index since there is an issue with mapping.
    * 

@@ -113,8 +115,7 @@ public void multipleFromTest() throws IOException { public void selectAllWithFieldReturnsAll() throws IOException { JSONObject response = executeQuery( - StringUtils.format( - "SELECT *, age FROM %s LIMIT 5", TestsConstants.TEST_INDEX_BANK)); + StringUtils.format("SELECT *, age FROM %s LIMIT 5", TestsConstants.TEST_INDEX_BANK)); checkSelectAllAndFieldResponseSize(response); } @@ -123,8 +124,7 @@ public void selectAllWithFieldReturnsAll() throws IOException { public void selectAllWithFieldReverseOrder() throws IOException { JSONObject response = executeQuery( - StringUtils.format( - "SELECT *, age FROM %s LIMIT 5", TestsConstants.TEST_INDEX_BANK)); + StringUtils.format("SELECT *, age FROM %s LIMIT 5", TestsConstants.TEST_INDEX_BANK)); checkSelectAllAndFieldResponseSize(response); } @@ -134,8 +134,7 @@ public void selectAllWithMultipleFields() throws IOException { JSONObject response = executeQuery( StringUtils.format( - "SELECT *, age, address FROM %s LIMIT 5", - TestsConstants.TEST_INDEX_BANK)); + "SELECT *, age, address FROM %s LIMIT 5", TestsConstants.TEST_INDEX_BANK)); checkSelectAllAndFieldResponseSize(response); } @@ -145,8 +144,7 @@ public void selectAllWithFieldAndOrderBy() throws IOException { JSONObject response = executeQuery( StringUtils.format( - "SELECT *, age FROM %s ORDER BY age LIMIT 5", - TestsConstants.TEST_INDEX_BANK)); + "SELECT *, age FROM %s ORDER BY age LIMIT 5", TestsConstants.TEST_INDEX_BANK)); checkSelectAllAndFieldResponseSize(response); } @@ -156,8 +154,7 @@ public void selectAllWithFieldAndGroupBy() throws IOException { JSONObject response = executeQuery( StringUtils.format( - "SELECT *, age FROM %s GROUP BY age LIMIT 10", - TestsConstants.TEST_INDEX_BANK)); + "SELECT *, age FROM %s GROUP BY age LIMIT 10", TestsConstants.TEST_INDEX_BANK)); checkSelectAllAndFieldAggregationResponseSize(response, "age"); } @@ -167,8 +164,7 @@ public void selectAllWithFieldAndGroupByReverseOrder() throws IOException { JSONObject response = executeQuery( StringUtils.format( - "SELECT *, age FROM %s GROUP BY age LIMIT 10", - TestsConstants.TEST_INDEX_BANK)); + "SELECT *, age FROM %s GROUP BY age LIMIT 10", TestsConstants.TEST_INDEX_BANK)); checkSelectAllAndFieldAggregationResponseSize(response, "age"); } @@ -1105,8 +1101,7 @@ public void testWhereWithBoolEqualsTrue() throws IOException { JSONObject response = executeQuery( StringUtils.format( - "SELECT * FROM %s WHERE male = true LIMIT 5", - TestsConstants.TEST_INDEX_BANK)); + "SELECT * FROM %s WHERE male = true LIMIT 5", TestsConstants.TEST_INDEX_BANK)); checkResponseSize(response, BANK_INDEX_MALE_TRUE); } @@ -1164,8 +1159,7 @@ public void testWhereWithBoolEqualsFalse() throws IOException { JSONObject response = executeQuery( StringUtils.format( - "SELECT * FROM %s WHERE male = false LIMIT 5", - TestsConstants.TEST_INDEX_BANK)); + "SELECT * FROM %s WHERE male = false LIMIT 5", TestsConstants.TEST_INDEX_BANK)); checkResponseSize(response, BANK_INDEX_MALE_FALSE); } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/RestIntegTestCase.java b/integ-test/src/test/java/org/opensearch/sql/legacy/RestIntegTestCase.java index e4f1cc552d..a94047c1e4 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/RestIntegTestCase.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/RestIntegTestCase.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.opensearch.core.common.Strings.isNullOrEmpty; @@ -44,12 +43,13 @@ import org.junit.Before; import org.opensearch.client.Request; import org.opensearch.client.Response; -import org.opensearch.core.common.Strings; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.rest.RestStatus; import org.opensearch.core.xcontent.XContentBuilder; /** + * + * *

  * SQL plugin integration test base class (migrated from SQLIntegTestCase)
  * 

@@ -81,9 +81,9 @@ protected boolean preserveClusterUponCompletion() { } /** - * We need to be able to dump the jacoco coverage before cluster is shut down. - * The new internal testing framework removed some of the gradle tasks we were listening to - * to choose a good time to do it. This will dump the executionData to file after each test. + * We need to be able to dump the jacoco coverage before cluster is shut down. The new internal + * testing framework removed some of the gradle tasks we were listening to to choose a good time + * to do it. This will dump the executionData to file after each test.
* TODO: This is also currently just overwriting integTest.exec with the updated execData without * resetting after writing each time. This can be improved to either write an exec file per test * or by letting jacoco append to the file @@ -107,10 +107,12 @@ public static void dumpCoverage() { String serverUrl = "service:jmx:rmi:///jndi/rmi://127.0.0.1:7777/jmxrmi"; try (JMXConnector connector = JMXConnectorFactory.connect(new JMXServiceURL(serverUrl))) { - IProxy proxy = MBeanServerInvocationHandler.newProxyInstance( - connector.getMBeanServerConnection(), new ObjectName("org.jacoco:type=Runtime"), - IProxy.class, - false); + IProxy proxy = + MBeanServerInvocationHandler.newProxyInstance( + connector.getMBeanServerConnection(), + new ObjectName("org.jacoco:type=Runtime"), + IProxy.class, + false); Path path = Paths.get(jacocoBuildPath + "/integTest.exec"); Files.write(path, proxy.getExecutionData(false)); @@ -120,9 +122,9 @@ public static void dumpCoverage() { } /** - * As JUnit JavaDoc says: - * "The @AfterClass methods declared in superclasses will be run after those of the current class." - * So this method is supposed to run before closeClients() in parent class. + * As JUnit JavaDoc says:
+ * "The @AfterClass methods declared in superclasses will be run after those of the current + * class." So this method is supposed to run before closeClients() in parent class. */ @AfterClass public static void cleanUpIndices() throws IOException { @@ -131,8 +133,8 @@ public static void cleanUpIndices() throws IOException { } /** - * Make it thread-safe in case tests are running in parallel but does not guarantee - * if test like DeleteIT that mutates cluster running in parallel. + * Make it thread-safe in case tests are running in parallel but does not guarantee if test like + * DeleteIT that mutates cluster running in parallel. */ protected synchronized void loadIndex(Index index) throws IOException { String indexName = index.getName(); @@ -145,11 +147,8 @@ protected synchronized void loadIndex(Index index) throws IOException { } } - /** - * Provide for each test to load test index, data and other setup work - */ - protected void init() throws Exception { - } + /** Provide for each test to load test index, data and other setup work */ + protected void init() throws Exception {} protected static void updateClusterSetting(String settingKey, Object value) throws IOException { updateClusterSetting(settingKey, value, true); @@ -158,18 +157,18 @@ protected static void updateClusterSetting(String settingKey, Object value) thro protected static void updateClusterSetting(String settingKey, Object value, boolean persistent) throws IOException { String property = persistent ? PERSISTENT : TRANSIENT; - XContentBuilder builder = XContentFactory - .jsonBuilder() - .startObject() - .startObject(property) - .field(settingKey, value) - .endObject() - .endObject(); + XContentBuilder builder = + XContentFactory.jsonBuilder() + .startObject() + .startObject(property) + .field(settingKey, value) + .endObject() + .endObject(); Request request = new Request("PUT", "_cluster/settings"); request.setJsonEntity(builder.toString()); Response response = client().performRequest(request); - Assert - .assertEquals(RestStatus.OK, RestStatus.fromCode(response.getStatusLine().getStatusCode())); + Assert.assertEquals( + RestStatus.OK, RestStatus.fromCode(response.getStatusLine().getStatusCode())); } protected static void wipeAllClusterSettings() throws IOException { @@ -177,103 +176,109 @@ protected static void wipeAllClusterSettings() throws IOException { updateClusterSetting("*", null, false); } - /** - * Enum for associating test index with relevant mapping and data. - */ + /** Enum for associating test index with relevant mapping and data. */ public enum Index { - ONLINE(TestsConstants.TEST_INDEX_ONLINE, - "online", - null, - "src/test/resources/online.json"), - ACCOUNT(TestsConstants.TEST_INDEX_ACCOUNT, + ONLINE(TestsConstants.TEST_INDEX_ONLINE, "online", null, "src/test/resources/online.json"), + ACCOUNT( + TestsConstants.TEST_INDEX_ACCOUNT, "account", getAccountIndexMapping(), "src/test/resources/accounts.json"), - PHRASE(TestsConstants.TEST_INDEX_PHRASE, + PHRASE( + TestsConstants.TEST_INDEX_PHRASE, "phrase", getPhraseIndexMapping(), "src/test/resources/phrases.json"), - DOG(TestsConstants.TEST_INDEX_DOG, - "dog", - getDogIndexMapping(), - "src/test/resources/dogs.json"), - DOGS2(TestsConstants.TEST_INDEX_DOG2, + DOG(TestsConstants.TEST_INDEX_DOG, "dog", getDogIndexMapping(), "src/test/resources/dogs.json"), + DOGS2( + TestsConstants.TEST_INDEX_DOG2, "dog", getDogs2IndexMapping(), "src/test/resources/dogs2.json"), - DOGS3(TestsConstants.TEST_INDEX_DOG3, + DOGS3( + TestsConstants.TEST_INDEX_DOG3, "dog", getDogs3IndexMapping(), "src/test/resources/dogs3.json"), - DOGSSUBQUERY(TestsConstants.TEST_INDEX_DOGSUBQUERY, + DOGSSUBQUERY( + TestsConstants.TEST_INDEX_DOGSUBQUERY, "dog", getDogIndexMapping(), "src/test/resources/dogsubquery.json"), - PEOPLE(TestsConstants.TEST_INDEX_PEOPLE, - "people", - null, - "src/test/resources/peoples.json"), - PEOPLE2(TestsConstants.TEST_INDEX_PEOPLE2, + PEOPLE(TestsConstants.TEST_INDEX_PEOPLE, "people", null, "src/test/resources/peoples.json"), + PEOPLE2( + TestsConstants.TEST_INDEX_PEOPLE2, "people", getPeople2IndexMapping(), "src/test/resources/people2.json"), - GAME_OF_THRONES(TestsConstants.TEST_INDEX_GAME_OF_THRONES, + GAME_OF_THRONES( + TestsConstants.TEST_INDEX_GAME_OF_THRONES, "gotCharacters", getGameOfThronesIndexMapping(), "src/test/resources/game_of_thrones_complex.json"), - SYSTEM(TestsConstants.TEST_INDEX_SYSTEM, - "systems", - null, - "src/test/resources/systems.json"), - ODBC(TestsConstants.TEST_INDEX_ODBC, + SYSTEM(TestsConstants.TEST_INDEX_SYSTEM, "systems", null, "src/test/resources/systems.json"), + ODBC( + TestsConstants.TEST_INDEX_ODBC, "odbc", getOdbcIndexMapping(), "src/test/resources/odbc-date-formats.json"), - LOCATION(TestsConstants.TEST_INDEX_LOCATION, + LOCATION( + TestsConstants.TEST_INDEX_LOCATION, "location", getLocationIndexMapping(), "src/test/resources/locations.json"), - LOCATION_TWO(TestsConstants.TEST_INDEX_LOCATION2, + LOCATION_TWO( + TestsConstants.TEST_INDEX_LOCATION2, "location2", getLocationIndexMapping(), "src/test/resources/locations2.json"), - NESTED(TestsConstants.TEST_INDEX_NESTED_TYPE, + NESTED( + TestsConstants.TEST_INDEX_NESTED_TYPE, "nestedType", getNestedTypeIndexMapping(), "src/test/resources/nested_objects.json"), - NESTED_WITH_QUOTES(TestsConstants.TEST_INDEX_NESTED_WITH_QUOTES, + NESTED_WITH_QUOTES( + TestsConstants.TEST_INDEX_NESTED_WITH_QUOTES, "nestedType", getNestedTypeIndexMapping(), "src/test/resources/nested_objects_quotes_in_values.json"), - EMPLOYEE_NESTED(TestsConstants.TEST_INDEX_EMPLOYEE_NESTED, + EMPLOYEE_NESTED( + TestsConstants.TEST_INDEX_EMPLOYEE_NESTED, "_doc", getEmployeeNestedTypeIndexMapping(), "src/test/resources/employee_nested.json"), - JOIN(TestsConstants.TEST_INDEX_JOIN_TYPE, + JOIN( + TestsConstants.TEST_INDEX_JOIN_TYPE, "joinType", getJoinTypeIndexMapping(), "src/test/resources/join_objects.json"), - BANK(TestsConstants.TEST_INDEX_BANK, + BANK( + TestsConstants.TEST_INDEX_BANK, "account", getBankIndexMapping(), "src/test/resources/bank.json"), - BANK_TWO(TestsConstants.TEST_INDEX_BANK_TWO, + BANK_TWO( + TestsConstants.TEST_INDEX_BANK_TWO, "account_two", getBankIndexMapping(), "src/test/resources/bank_two.json"), - BANK_WITH_NULL_VALUES(TestsConstants.TEST_INDEX_BANK_WITH_NULL_VALUES, + BANK_WITH_NULL_VALUES( + TestsConstants.TEST_INDEX_BANK_WITH_NULL_VALUES, "account_null", getBankWithNullValuesIndexMapping(), "src/test/resources/bank_with_null_values.json"), - ORDER(TestsConstants.TEST_INDEX_ORDER, + ORDER( + TestsConstants.TEST_INDEX_ORDER, "_doc", getOrderIndexMapping(), "src/test/resources/order.json"), - WEBLOG(TestsConstants.TEST_INDEX_WEBLOG, + WEBLOG( + TestsConstants.TEST_INDEX_WEBLOG, "weblog", getWeblogsIndexMapping(), "src/test/resources/weblogs.json"), - DATE(TestsConstants.TEST_INDEX_DATE, + DATE( + TestsConstants.TEST_INDEX_DATE, "dates", getDateIndexMapping(), "src/test/resources/dates.json"); diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/SQLIntegTestCase.java b/integ-test/src/test/java/org/opensearch/sql/legacy/SQLIntegTestCase.java index 4479abdcc6..8335ada5a7 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/SQLIntegTestCase.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/SQLIntegTestCase.java @@ -99,6 +99,8 @@ protected boolean preserveClusterUponCompletion() { } /** + * + * *

    * We need to be able to dump the jacoco coverage before cluster is shut down.
    * The new internal testing framework removed some of the gradle tasks we were listening to
@@ -143,9 +145,9 @@ public static void dumpCoverage() {
 
   /**
    * As JUnit JavaDoc says:
- "The @AfterClass methods declared in superclasses will be run after those of the current class."
- So this method is supposed to run before closeClients() in parent class. - * class. + * "The @AfterClass methods declared in superclasses will be run after those of the current + * class."
+ * So this method is supposed to run before closeClients() in parent class. class. */ @AfterClass public static void cleanUpIndices() throws IOException { @@ -440,14 +442,11 @@ protected String makeRequest(String query) { } protected String makeRequest(String query, int fetch_size) { - return String.format( - "{ \"fetch_size\": \"%s\", \"query\": \"%s\" }", fetch_size, query); + return String.format("{ \"fetch_size\": \"%s\", \"query\": \"%s\" }", fetch_size, query); } protected String makeFetchLessRequest(String query) { - return String.format("{\n" + - " \"query\": \"%s\"\n" + - "}", query); + return String.format("{\n" + " \"query\": \"%s\"\n" + "}", query); } protected String makeCursorRequest(String cursor) { diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/TermQueryExplainIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/TermQueryExplainIT.java index 6c33b2b242..ab2808ee3f 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/TermQueryExplainIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/TermQueryExplainIT.java @@ -52,8 +52,7 @@ public void testNonExistingIndex() throws IOException { @Test public void testNonResolvingIndexPattern() throws IOException { try { - explainQuery( - "SELECT * FROM opensearch_sql_test_blah_blah* WHERE firstname = 'Leo'"); + explainQuery("SELECT * FROM opensearch_sql_test_blah_blah* WHERE firstname = 'Leo'"); Assert.fail("Expected ResponseException, but none was thrown"); } catch (ResponseException e) { @@ -265,8 +264,7 @@ public void testKeywordAliasGroupByUsingTableAlias() throws IOException { @Test public void testKeywordAliasOrderBy() throws IOException { String result = - explainQuery( - "SELECT * FROM opensearch-sql_test_index_bank ORDER BY state, lastname "); + explainQuery("SELECT * FROM opensearch-sql_test_index_bank ORDER BY state, lastname "); assertThat(result, containsString("\"state.keyword\":{\"order\":\"asc\"")); assertThat(result, containsString("\"lastname\":{\"order\":\"asc\"}")); } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/TestUtils.java b/integ-test/src/test/java/org/opensearch/sql/legacy/TestUtils.java index 1abc1d6183..65cacf16d2 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/TestUtils.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/TestUtils.java @@ -54,8 +54,8 @@ public static void createIndexByRestClient(RestClient client, String indexName, /** * https://github.com/elastic/elasticsearch/pull/49959
- * Deprecate creation of dot-prefixed index - * names except for hidden and system indices. Create hidden index by REST client. + * Deprecate creation of dot-prefixed index names except for hidden and system indices. Create + * hidden index by REST client. * * @param client client connection * @param indexName test index name diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeImplementationIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeImplementationIT.java index fb97da32ab..dd86470a39 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeImplementationIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeImplementationIT.java @@ -17,13 +17,11 @@ public class DateTimeImplementationIT extends PPLIntegTestCase { - @Override public void init() throws IOException { loadIndex(Index.DATE); } - @Test public void inRangeZeroToStringTZ() throws IOException { JSONObject result = @@ -69,7 +67,6 @@ public void inRangeTwentyHourOffset() throws IOException { verifySome(result.getJSONArray("datarows"), rows("2004-02-29 19:00:00")); } - @Test public void inRangeYearChange() throws IOException { JSONObject result = diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/MatchPhrasePrefixIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/MatchPhrasePrefixIT.java index d6277252a5..91ce1bbd10 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/MatchPhrasePrefixIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/MatchPhrasePrefixIT.java @@ -97,8 +97,7 @@ public void zero_term_query_all() throws IOException { public void slop_is_2() throws IOException { // When slop is 2, the terms are matched exactly in the order specified. // 'open' is used to match prefix of the next term. - String query = - "source = %s | where match_phrase_prefix(Tags, 'gas ta', slop=2) | fields Tags"; + String query = "source = %s | where match_phrase_prefix(Tags, 'gas ta', slop=2) | fields Tags"; JSONObject result = executeQuery(String.format(query, TEST_INDEX_BEER)); verifyDataRows(result, rows("taste gas")); } @@ -106,8 +105,7 @@ public void slop_is_2() throws IOException { @Test public void slop_is_3() throws IOException { // When slop is 3, results will include phrases where the query terms are transposed. - String query = - "source = %s | where match_phrase_prefix(Tags, 'gas ta', slop=3) | fields Tags"; + String query = "source = %s | where match_phrase_prefix(Tags, 'gas ta', slop=3) | fields Tags"; JSONObject result = executeQuery(String.format(query, TEST_INDEX_BEER)); verifyDataRows(result, rows("taste draught gas"), rows("taste gas")); } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/ObjectFieldOperateIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/ObjectFieldOperateIT.java index 501d4bcb5e..d4d09c9af1 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/ObjectFieldOperateIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/ObjectFieldOperateIT.java @@ -28,8 +28,7 @@ public void select_object_field() throws IOException { JSONObject result = executeQuery( String.format( - "source=%s | fields city.name, city.location.latitude", - TEST_INDEX_DEEP_NESTED)); + "source=%s | fields city.name, city.location.latitude", TEST_INDEX_DEEP_NESTED)); verifySchema(result, schema("city.name", "string"), schema("city.location.latitude", "double")); verifyDataRows(result, rows("Seattle", 10.5)); } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/ConvertTZFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/ConvertTZFunctionIT.java index b719edd5b0..76600b6561 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/ConvertTZFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/ConvertTZFunctionIT.java @@ -1,7 +1,7 @@ - /* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ package org.opensearch.sql.sql; @@ -16,110 +16,97 @@ public class ConvertTZFunctionIT extends SQLIntegTestCase { - @Override public void init() throws Exception { super.init(); loadIndex(Index.BANK); } - @Test public void inRangeZeroToPositive() throws IOException { - var result = executeJdbcRequest( - "SELECT convert_tz('2008-05-15 12:00:00','+00:00','+10:00')"); - verifySchema(result, - schema("convert_tz('2008-05-15 12:00:00','+00:00','+10:00')", null, "datetime")); + var result = executeJdbcRequest("SELECT convert_tz('2008-05-15 12:00:00','+00:00','+10:00')"); + verifySchema( + result, schema("convert_tz('2008-05-15 12:00:00','+00:00','+10:00')", null, "datetime")); verifyDataRows(result, rows("2008-05-15 22:00:00")); } @Test public void inRangeNegativeZeroToPositiveZero() throws IOException { - var result = executeJdbcRequest( - "SELECT convert_tz('2021-05-12 00:00:00','-00:00','+00:00')"); - verifySchema(result, - schema("convert_tz('2021-05-12 00:00:00','-00:00','+00:00')", null, "datetime")); + var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 00:00:00','-00:00','+00:00')"); + verifySchema( + result, schema("convert_tz('2021-05-12 00:00:00','-00:00','+00:00')", null, "datetime")); verifyDataRows(result, rows("2021-05-12 00:00:00")); } @Test public void inRangePositiveToPositive() throws IOException { - var result = executeJdbcRequest( - "SELECT convert_tz('2021-05-12 00:00:00','+10:00','+11:00')"); - verifySchema(result, - schema("convert_tz('2021-05-12 00:00:00','+10:00','+11:00')", null, "datetime")); + var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 00:00:00','+10:00','+11:00')"); + verifySchema( + result, schema("convert_tz('2021-05-12 00:00:00','+10:00','+11:00')", null, "datetime")); verifyDataRows(result, rows("2021-05-12 01:00:00")); } @Test public void inRangeNegativeToPositive() throws IOException { - var result = executeJdbcRequest( - "SELECT convert_tz('2021-05-12 11:34:50','-08:00','+09:00')"); - verifySchema(result, - schema("convert_tz('2021-05-12 11:34:50','-08:00','+09:00')", null, "datetime")); + var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 11:34:50','-08:00','+09:00')"); + verifySchema( + result, schema("convert_tz('2021-05-12 11:34:50','-08:00','+09:00')", null, "datetime")); verifyDataRows(result, rows("2021-05-13 04:34:50")); } @Test public void inRangeSameTimeZone() throws IOException { - var result = executeJdbcRequest( - "SELECT convert_tz('2021-05-12 11:34:50','+09:00','+09:00')"); - verifySchema(result, - schema("convert_tz('2021-05-12 11:34:50','+09:00','+09:00')", null, "datetime")); + var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 11:34:50','+09:00','+09:00')"); + verifySchema( + result, schema("convert_tz('2021-05-12 11:34:50','+09:00','+09:00')", null, "datetime")); verifyDataRows(result, rows("2021-05-12 11:34:50")); } @Test public void inRangeTwentyFourHourTimeOffset() throws IOException { - var result = executeJdbcRequest( - "SELECT convert_tz('2021-05-12 11:34:50','-12:00','+12:00')"); - verifySchema(result, - schema("convert_tz('2021-05-12 11:34:50','-12:00','+12:00')", null, "datetime")); + var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 11:34:50','-12:00','+12:00')"); + verifySchema( + result, schema("convert_tz('2021-05-12 11:34:50','-12:00','+12:00')", null, "datetime")); verifyDataRows(result, rows("2021-05-13 11:34:50")); } @Test public void inRangeFifteenMinuteTimeZones() throws IOException { - var result = executeJdbcRequest( - "SELECT convert_tz('2021-05-12 13:00:00','+09:30','+05:45')"); - verifySchema(result, - schema("convert_tz('2021-05-12 13:00:00','+09:30','+05:45')", null, "datetime")); + var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 13:00:00','+09:30','+05:45')"); + verifySchema( + result, schema("convert_tz('2021-05-12 13:00:00','+09:30','+05:45')", null, "datetime")); verifyDataRows(result, rows("2021-05-12 09:15:00")); } @Test public void inRangeRandomTimes() throws IOException { - var result = executeJdbcRequest( - "SELECT convert_tz('2021-05-12 13:00:00','+09:31','+05:11')"); - verifySchema(result, - schema("convert_tz('2021-05-12 13:00:00','+09:31','+05:11')", null, "datetime")); + var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 13:00:00','+09:31','+05:11')"); + verifySchema( + result, schema("convert_tz('2021-05-12 13:00:00','+09:31','+05:11')", null, "datetime")); verifyDataRows(result, rows("2021-05-12 08:40:00")); } @Test public void nullField2Under() throws IOException { - var result = executeJdbcRequest( - "SELECT convert_tz('2021-05-30 11:34:50','-14:00','+08:00')"); - verifySchema(result, - schema("convert_tz('2021-05-30 11:34:50','-14:00','+08:00')", null, "datetime")); - verifyDataRows(result, rows(new Object[]{null})); + var result = executeJdbcRequest("SELECT convert_tz('2021-05-30 11:34:50','-14:00','+08:00')"); + verifySchema( + result, schema("convert_tz('2021-05-30 11:34:50','-14:00','+08:00')", null, "datetime")); + verifyDataRows(result, rows(new Object[] {null})); } @Test public void nullField3Over() throws IOException { - var result = executeJdbcRequest( - "SELECT convert_tz('2021-05-12 11:34:50','-12:00','+14:01')"); - verifySchema(result, - schema("convert_tz('2021-05-12 11:34:50','-12:00','+14:01')", null, "datetime")); - verifyDataRows(result, rows(new Object[]{null})); + var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 11:34:50','-12:00','+14:01')"); + verifySchema( + result, schema("convert_tz('2021-05-12 11:34:50','-12:00','+14:01')", null, "datetime")); + verifyDataRows(result, rows(new Object[] {null})); } @Test public void inRangeMinOnPoint() throws IOException { - var result = executeJdbcRequest( - "SELECT convert_tz('2021-05-12 15:00:00','-13:59','-13:59')"); - verifySchema(result, - schema("convert_tz('2021-05-12 15:00:00','-13:59','-13:59')", null, "datetime")); + var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 15:00:00','-13:59','-13:59')"); + verifySchema( + result, schema("convert_tz('2021-05-12 15:00:00','-13:59','-13:59')", null, "datetime")); verifyDataRows(result, rows("2021-05-12 15:00:00")); } @@ -129,57 +116,50 @@ public void inRangeMinOnPoint() throws IOException { // Invalid input returns null. @Test public void nullField3InvalidInput() throws IOException { - var result = executeJdbcRequest( - "SELECT convert_tz('2021-05-12 11:34:50','+10:0','+14:01')"); - verifySchema(result, - schema("convert_tz('2021-05-12 11:34:50','+10:0','+14:01')", null, "datetime")); - verifyDataRows(result, rows(new Object[]{null})); + var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 11:34:50','+10:0','+14:01')"); + verifySchema( + result, schema("convert_tz('2021-05-12 11:34:50','+10:0','+14:01')", null, "datetime")); + verifyDataRows(result, rows(new Object[] {null})); } @Test public void nullField2InvalidInput() throws IOException { - var result = executeJdbcRequest( - "SELECT convert_tz('2021-05-12 11:34:50','+14:01','****')"); - verifySchema(result, - schema("convert_tz('2021-05-12 11:34:50','+14:01','****')", null, "datetime")); - verifyDataRows(result, rows(new Object[]{null})); + var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 11:34:50','+14:01','****')"); + verifySchema( + result, schema("convert_tz('2021-05-12 11:34:50','+14:01','****')", null, "datetime")); + verifyDataRows(result, rows(new Object[] {null})); } // Invalid input in the datetime field of CONVERT_TZ results in a null field. It is any input // which is not of the format `yyyy-MM-dd HH:mm:ss` @Test public void nullDateTimeInvalidInput() throws IOException { - var result = executeJdbcRequest( - "SELECT convert_tz('2021----','+00:00','+00:00')"); - verifySchema(result, - schema("convert_tz('2021----','+00:00','+00:00')", null, "datetime")); - verifyDataRows(result, rows(new Object[]{null})); + var result = executeJdbcRequest("SELECT convert_tz('2021----','+00:00','+00:00')"); + verifySchema(result, schema("convert_tz('2021----','+00:00','+00:00')", null, "datetime")); + verifyDataRows(result, rows(new Object[] {null})); } @Test public void nullDateTimeInvalidDateValueFebruary() throws IOException { - var result = executeJdbcRequest( - "SELECT convert_tz('2021-02-30 10:00:00','+00:00','+00:00')"); - verifySchema(result, - schema("convert_tz('2021-02-30 10:00:00','+00:00','+00:00')", null, "datetime")); - verifyDataRows(result, rows(new Object[]{null})); + var result = executeJdbcRequest("SELECT convert_tz('2021-02-30 10:00:00','+00:00','+00:00')"); + verifySchema( + result, schema("convert_tz('2021-02-30 10:00:00','+00:00','+00:00')", null, "datetime")); + verifyDataRows(result, rows(new Object[] {null})); } @Test public void nullDateTimeInvalidDateValueApril() throws IOException { - var result = executeJdbcRequest( - "SELECT convert_tz('2021-04-31 10:00:00','+00:00','+00:00')"); - verifySchema(result, - schema("convert_tz('2021-04-31 10:00:00','+00:00','+00:00')", null, "datetime")); - verifyDataRows(result, rows(new Object[]{null})); + var result = executeJdbcRequest("SELECT convert_tz('2021-04-31 10:00:00','+00:00','+00:00')"); + verifySchema( + result, schema("convert_tz('2021-04-31 10:00:00','+00:00','+00:00')", null, "datetime")); + verifyDataRows(result, rows(new Object[] {null})); } @Test public void nullDateTimeInvalidDateValueMonth() throws IOException { - var result = executeJdbcRequest( - "SELECT convert_tz('2021-13-03 10:00:00','+00:00','+00:00')"); - verifySchema(result, - schema("convert_tz('2021-13-03 10:00:00','+00:00','+00:00')", null, "datetime")); - verifyDataRows(result, rows(new Object[]{null})); + var result = executeJdbcRequest("SELECT convert_tz('2021-13-03 10:00:00','+00:00','+00:00')"); + verifySchema( + result, schema("convert_tz('2021-13-03 10:00:00','+00:00','+00:00')", null, "datetime")); + verifyDataRows(result, rows(new Object[] {null})); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeImplementationIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeImplementationIT.java index 750be47de3..8ffa1df8f3 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeImplementationIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeImplementationIT.java @@ -5,17 +5,16 @@ package org.opensearch.sql.sql; -import org.junit.Test; -import org.opensearch.sql.legacy.SQLIntegTestCase; -import java.io.IOException; - import static org.opensearch.sql.util.MatcherUtils.rows; import static org.opensearch.sql.util.MatcherUtils.schema; import static org.opensearch.sql.util.MatcherUtils.verifyDataRows; import static org.opensearch.sql.util.MatcherUtils.verifySchema; -public class DateTimeImplementationIT extends SQLIntegTestCase { +import java.io.IOException; +import org.junit.Test; +import org.opensearch.sql.legacy.SQLIntegTestCase; +public class DateTimeImplementationIT extends SQLIntegTestCase { @Override public void init() throws Exception { @@ -135,10 +134,8 @@ public void nullDateTimeInvalidDateValueApril() throws IOException { @Test public void nullDateTimeInvalidDateValueMonth() throws IOException { - var result = executeJdbcRequest( - "SELECT DATETIME('2021-13-03 10:00:00')"); - verifySchema(result, - schema("DATETIME('2021-13-03 10:00:00')", null, "datetime")); - verifyDataRows(result, rows(new Object[]{null})); + var result = executeJdbcRequest("SELECT DATETIME('2021-13-03 10:00:00')"); + verifySchema(result, schema("DATETIME('2021-13-03 10:00:00')", null, "datetime")); + verifyDataRows(result, rows(new Object[] {null})); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/ScoreQueryIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/ScoreQueryIT.java index fdd35c47eb..6616746d99 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/ScoreQueryIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/ScoreQueryIT.java @@ -26,6 +26,8 @@ protected void init() throws Exception { } /** + * + * *
    * "query" : {
    *   "from": 0,
@@ -85,6 +87,7 @@ protected void init() throws Exception {
    *   "track_scores": true
    * }
    * 
+ * * @throws IOException */ @Test diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/StringLiteralIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/StringLiteralIT.java index d98016d62b..2b6f9476c7 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/StringLiteralIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/StringLiteralIT.java @@ -18,46 +18,36 @@ public class StringLiteralIT extends SQLIntegTestCase { @Test public void testStringHelloSingleQuote() throws IOException { - JSONObject result = - executeJdbcRequest("select 'Hello'"); - verifySchema(result, - schema("'Hello'", null, "keyword")); + JSONObject result = executeJdbcRequest("select 'Hello'"); + verifySchema(result, schema("'Hello'", null, "keyword")); verifyDataRows(result, rows("Hello")); } @Test public void testStringHelloDoubleQuote() throws IOException { - JSONObject result = - executeJdbcRequest("select \\\"Hello\\\""); - verifySchema(result, - schema("\"Hello\"", null, "keyword")); + JSONObject result = executeJdbcRequest("select \\\"Hello\\\""); + verifySchema(result, schema("\"Hello\"", null, "keyword")); verifyDataRows(result, rows("Hello")); } @Test public void testImStringDoubleDoubleQuoteEscape() throws IOException { - JSONObject result = - executeJdbcRequest("select \\\"I\\\"\\\"m\\\""); - verifySchema(result, - schema("\"I\"\"m\"", null, "keyword")); + JSONObject result = executeJdbcRequest("select \\\"I\\\"\\\"m\\\""); + verifySchema(result, schema("\"I\"\"m\"", null, "keyword")); verifyDataRows(result, rows("I\"m")); } @Test public void testImStringDoubleSingleQuoteEscape() throws IOException { - JSONObject result = - executeJdbcRequest("select 'I''m'"); - verifySchema(result, - schema("'I''m'", null, "keyword")); + JSONObject result = executeJdbcRequest("select 'I''m'"); + verifySchema(result, schema("'I''m'", null, "keyword")); verifyDataRows(result, rows("I'm")); } @Test public void testImStringEscapedSingleQuote() throws IOException { - JSONObject result = - executeJdbcRequest("select 'I\\\\'m'"); - verifySchema(result, - schema("'I\\'m'", null, "keyword")); + JSONObject result = executeJdbcRequest("select 'I\\\\'m'"); + verifySchema(result, schema("'I\\'m'", null, "keyword")); verifyDataRows(result, rows("I'm")); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/util/ExecuteOnCallerThreadQueryManager.java b/integ-test/src/test/java/org/opensearch/sql/util/ExecuteOnCallerThreadQueryManager.java index 0a42dc83e3..39437ffc5f 100644 --- a/integ-test/src/test/java/org/opensearch/sql/util/ExecuteOnCallerThreadQueryManager.java +++ b/integ-test/src/test/java/org/opensearch/sql/util/ExecuteOnCallerThreadQueryManager.java @@ -12,7 +12,7 @@ /** * ONLY USED FOR TEST PURPOSE. * - * Execute {@link AbstractPlan} on caller thread. + *

Execute {@link AbstractPlan} on caller thread. */ public class ExecuteOnCallerThreadQueryManager implements QueryManager { @Override diff --git a/integ-test/src/test/java/org/opensearch/sql/util/TestUtils.java b/integ-test/src/test/java/org/opensearch/sql/util/TestUtils.java index ac5cee118c..589fb1f9ae 100644 --- a/integ-test/src/test/java/org/opensearch/sql/util/TestUtils.java +++ b/integ-test/src/test/java/org/opensearch/sql/util/TestUtils.java @@ -57,8 +57,8 @@ public static void createIndexByRestClient(RestClient client, String indexName, /** * https://github.com/elastic/elasticsearch/pull/49959
- * Deprecate creation of dot-prefixed index - * names except for hidden and system indices. Create hidden index by REST client. + * Deprecate creation of dot-prefixed index names except for hidden and system indices. Create + * hidden index by REST client. * * @param client client connection * @param indexName test index name From 1744d8ad23a7978faf496a4f85f720851f08ec2e Mon Sep 17 00:00:00 2001 From: Mitchell Gale Date: Thu, 17 Aug 2023 11:44:18 -0700 Subject: [PATCH 27/42] [Spotless] Applying Google Code Format for protocol files #14 (#1971) * Spotless apply on protocol Signed-off-by: Mitchell Gale * added ignorefailures Signed-off-by: Mitchell Gale * Update protocol/src/main/java/org/opensearch/sql/protocol/response/format/RawResponseFormatter.java Co-authored-by: Yury-Fridlyand Signed-off-by: Mitchell Gale * Apply suggestions from code review Co-authored-by: Guian Gumpac Signed-off-by: Mitchell Gale --------- Signed-off-by: Mitchell Gale Signed-off-by: Mitchell Gale Co-authored-by: Yury-Fridlyand Co-authored-by: Guian Gumpac --- build.gradle | 6 +- protocol/build.gradle | 3 + .../sql/protocol/response/QueryResult.java | 35 ++-- .../format/CommandResponseFormatter.java | 4 +- .../response/format/CsvResponseFormatter.java | 2 - .../response/format/ErrorFormatter.java | 36 ++-- .../format/FlatResponseFormatter.java | 45 ++--- .../sql/protocol/response/format/Format.java | 4 +- .../format/JdbcResponseFormatter.java | 23 +-- .../format/JsonResponseFormatter.java | 21 +- .../response/format/RawResponseFormatter.java | 7 +- .../response/format/ResponseFormatter.java | 6 +- .../format/SimpleJsonResponseFormatter.java | 9 +- .../VisualizationResponseFormatter.java | 26 +-- .../protocol/response/QueryResultTest.java | 103 +++++----- .../format/CommandResponseFormatterTest.java | 40 ++-- .../format/CsvResponseFormatterTest.java | 155 ++++++++------ .../protocol/response/format/FormatTest.java | 6 +- .../format/JdbcResponseFormatterTest.java | 106 +++++----- .../format/RawResponseFormatterTest.java | 191 ++++++++++-------- .../SimpleJsonResponseFormatterTest.java | 40 ++-- .../VisualizationResponseFormatterTest.java | 77 +++---- 22 files changed, 480 insertions(+), 465 deletions(-) diff --git a/build.gradle b/build.gradle index ffd0153d04..6602bf6471 100644 --- a/build.gradle +++ b/build.gradle @@ -86,6 +86,7 @@ spotless { target fileTree('.') { include 'datasources/**/*.java', 'core/**/*.java', + 'protocol/**/*.java', 'prometheus/**/*.java', 'sql/**/*.java', 'common/**/*.java', @@ -119,9 +120,8 @@ allprojects { sourceCompatibility = targetCompatibility = "11" } configurations.all { - resolutionStrategy.force "com.squareup.okio:okio:3.5.0" - resolutionStrategy.force "org.jetbrains.kotlin:kotlin-stdlib:1.9.0" - resolutionStrategy.force "org.jetbrains.kotlin:kotlin-stdlib-jdk7:1.9.0" + resolutionStrategy.force "org.jetbrains.kotlin:kotlin-stdlib:1.6.0" + resolutionStrategy.force "org.jetbrains.kotlin:kotlin-stdlib-common:1.6.0" } } diff --git a/protocol/build.gradle b/protocol/build.gradle index 92a1aa0917..dcec1c675b 100644 --- a/protocol/build.gradle +++ b/protocol/build.gradle @@ -43,6 +43,9 @@ dependencies { testImplementation group: 'org.mockito', name: 'mockito-junit-jupiter', version: '3.12.4' } +checkstyleTest.ignoreFailures = true +checkstyleMain.ignoreFailures = true + configurations.all { resolutionStrategy.force "com.fasterxml.jackson.core:jackson-databind:${versions.jackson_databind}" } diff --git a/protocol/src/main/java/org/opensearch/sql/protocol/response/QueryResult.java b/protocol/src/main/java/org/opensearch/sql/protocol/response/QueryResult.java index 3ce1dd8875..03be0875cf 100644 --- a/protocol/src/main/java/org/opensearch/sql/protocol/response/QueryResult.java +++ b/protocol/src/main/java/org/opensearch/sql/protocol/response/QueryResult.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.protocol.response; import java.util.Collection; @@ -20,22 +19,18 @@ import org.opensearch.sql.executor.pagination.Cursor; /** - * Query response that encapsulates query results and isolate {@link ExprValue} - * related from formatter implementation. + * Query response that encapsulates query results and isolate {@link ExprValue} related from + * formatter implementation. */ @RequiredArgsConstructor public class QueryResult implements Iterable { - @Getter - private final ExecutionEngine.Schema schema; + @Getter private final ExecutionEngine.Schema schema; - /** - * Results which are collection of expression. - */ + /** Results which are collection of expression. */ private final Collection exprValues; - @Getter - private final Cursor cursor; + @Getter private final Cursor cursor; public QueryResult(ExecutionEngine.Schema schema, Collection exprValues) { this(schema, exprValues, Cursor.None); @@ -43,6 +38,7 @@ public QueryResult(ExecutionEngine.Schema schema, Collection exprValu /** * size of results. + * * @return size of results */ public int size() { @@ -52,14 +48,18 @@ public int size() { /** * Parse column name from results. * - * @return mapping from column names to its expression type. - * note that column name could be original name or its alias if any. + * @return mapping from column names to its expression type. note that column name could be + * original name or its alias if any. */ public Map columnNameTypes() { Map colNameTypes = new LinkedHashMap<>(); - schema.getColumns().forEach(column -> colNameTypes.put( - getColumnName(column), - column.getExprType().typeName().toLowerCase(Locale.ROOT))); + schema + .getColumns() + .forEach( + column -> + colNameTypes.put( + getColumnName(column), + column.getExprType().typeName().toLowerCase(Locale.ROOT))); return colNameTypes; } @@ -78,9 +78,6 @@ private String getColumnName(Column column) { } private Object[] convertExprValuesToValues(Collection exprValues) { - return exprValues - .stream() - .map(ExprValue::value) - .toArray(Object[]::new); + return exprValues.stream().map(ExprValue::value).toArray(Object[]::new); } } diff --git a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/CommandResponseFormatter.java b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/CommandResponseFormatter.java index dfd0f91931..b781e1dbba 100644 --- a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/CommandResponseFormatter.java +++ b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/CommandResponseFormatter.java @@ -10,8 +10,8 @@ import org.opensearch.sql.protocol.response.QueryResult; /** - * A simple response formatter which contains no data. - * Supposed to use with {@link CommandPlan} only. + * A simple response formatter which contains no data. Supposed to use with {@link CommandPlan} + * only. */ public class CommandResponseFormatter extends JsonResponseFormatter { diff --git a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/CsvResponseFormatter.java b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/CsvResponseFormatter.java index 5c5b4be048..a61b54b258 100644 --- a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/CsvResponseFormatter.java +++ b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/CsvResponseFormatter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.protocol.response.format; public class CsvResponseFormatter extends FlatResponseFormatter { @@ -14,5 +13,4 @@ public CsvResponseFormatter() { public CsvResponseFormatter(boolean sanitize) { super(",", sanitize); } - } diff --git a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/ErrorFormatter.java b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/ErrorFormatter.java index 40848e959b..5c85e5d65b 100644 --- a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/ErrorFormatter.java +++ b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/ErrorFormatter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.protocol.response.format; import com.google.gson.Gson; @@ -17,35 +16,28 @@ @UtilityClass public class ErrorFormatter { - private static final Gson PRETTY_PRINT_GSON = AccessController.doPrivileged( - (PrivilegedAction) () -> new GsonBuilder() - .setPrettyPrinting() - .disableHtmlEscaping() - .create()); - private static final Gson GSON = AccessController.doPrivileged( - (PrivilegedAction) () -> new GsonBuilder().disableHtmlEscaping().create()); - - /** - * Util method to format {@link Throwable} response to JSON string in compact printing. - */ + private static final Gson PRETTY_PRINT_GSON = + AccessController.doPrivileged( + (PrivilegedAction) + () -> new GsonBuilder().setPrettyPrinting().disableHtmlEscaping().create()); + private static final Gson GSON = + AccessController.doPrivileged( + (PrivilegedAction) () -> new GsonBuilder().disableHtmlEscaping().create()); + + /** Util method to format {@link Throwable} response to JSON string in compact printing. */ public static String compactFormat(Throwable t) { - JsonError error = new ErrorFormatter.JsonError(t.getClass().getSimpleName(), - t.getMessage()); + JsonError error = new ErrorFormatter.JsonError(t.getClass().getSimpleName(), t.getMessage()); return compactJsonify(error); } - /** - * Util method to format {@link Throwable} response to JSON string in pretty printing. - */ - public static String prettyFormat(Throwable t) { - JsonError error = new ErrorFormatter.JsonError(t.getClass().getSimpleName(), - t.getMessage()); + /** Util method to format {@link Throwable} response to JSON string in pretty printing. */ + public static String prettyFormat(Throwable t) { + JsonError error = new ErrorFormatter.JsonError(t.getClass().getSimpleName(), t.getMessage()); return prettyJsonify(error); } public static String compactJsonify(Object jsonObject) { - return AccessController.doPrivileged( - (PrivilegedAction) () -> GSON.toJson(jsonObject)); + return AccessController.doPrivileged((PrivilegedAction) () -> GSON.toJson(jsonObject)); } public static String prettyJsonify(Object jsonObject) { diff --git a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/FlatResponseFormatter.java b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/FlatResponseFormatter.java index 0575647dad..8c67d524b8 100644 --- a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/FlatResponseFormatter.java +++ b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/FlatResponseFormatter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.protocol.response.format; import com.google.common.collect.ImmutableList; @@ -48,9 +47,8 @@ public String format(Throwable t) { } /** - * Sanitize methods are migrated from legacy CSV result. - * Sanitize both headers and data lines by: - * 1) Second double quote entire cell if any comma is found. + * Sanitize methods are migrated from legacy CSV result. Sanitize both headers and data lines by: + * 1) Second double quote entire cell if any comma is found. */ @Getter @RequiredArgsConstructor @@ -84,29 +82,30 @@ private List getHeaders(QueryResult response, boolean sanitize) { private List> getData(QueryResult response, boolean sanitize) { ImmutableList.Builder> dataLines = new ImmutableList.Builder<>(); - response.iterator().forEachRemaining(row -> { - ImmutableList.Builder line = new ImmutableList.Builder<>(); - // replace null values with empty string - Arrays.asList(row).forEach(val -> line.add(val == null ? "" : val.toString())); - dataLines.add(line.build()); - }); + response + .iterator() + .forEachRemaining( + row -> { + ImmutableList.Builder line = new ImmutableList.Builder<>(); + // replace null values with empty string + Arrays.asList(row).forEach(val -> line.add(val == null ? "" : val.toString())); + dataLines.add(line.build()); + }); List> result = dataLines.build(); return sanitizeData(result); } - /** - * Sanitize headers because OpenSearch allows special character present in field names. - */ + /** Sanitize headers because OpenSearch allows special character present in field names. */ private List sanitizeHeaders(List headers) { if (sanitize) { return headers.stream() - .map(this::sanitizeCell) - .map(cell -> quoteIfRequired(INLINE_SEPARATOR, cell)) - .collect(Collectors.toList()); + .map(this::sanitizeCell) + .map(cell -> quoteIfRequired(INLINE_SEPARATOR, cell)) + .collect(Collectors.toList()); } else { return headers.stream() - .map(cell -> quoteIfRequired(INLINE_SEPARATOR, cell)) - .collect(Collectors.toList()); + .map(cell -> quoteIfRequired(INLINE_SEPARATOR, cell)) + .collect(Collectors.toList()); } } @@ -114,14 +113,16 @@ private List> sanitizeData(List> lines) { List> result = new ArrayList<>(); if (sanitize) { for (List line : lines) { - result.add(line.stream() + result.add( + line.stream() .map(this::sanitizeCell) .map(cell -> quoteIfRequired(INLINE_SEPARATOR, cell)) .collect(Collectors.toList())); } } else { for (List line : lines) { - result.add(line.stream() + result.add( + line.stream() .map(cell -> quoteIfRequired(INLINE_SEPARATOR, cell)) .collect(Collectors.toList())); } @@ -138,13 +139,11 @@ private String sanitizeCell(String cell) { private String quoteIfRequired(String separator, String cell) { final String quote = "\""; - return cell.contains(separator) - ? quote + cell.replaceAll("\"", "\"\"") + quote : cell; + return cell.contains(separator) ? quote + cell.replaceAll("\"", "\"\"") + quote : cell; } private boolean isStartWithSensitiveChar(String cell) { return SENSITIVE_CHAR.stream().anyMatch(cell::startsWith); } } - } diff --git a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/Format.java b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/Format.java index 4291c09df0..8f22a5380e 100644 --- a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/Format.java +++ b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/Format.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.protocol.response.format; import com.google.common.base.Strings; @@ -20,8 +19,7 @@ public enum Format { RAW("raw"), VIZ("viz"); - @Getter - private final String formatName; + @Getter private final String formatName; private static final Map ALL_FORMATS; diff --git a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/JdbcResponseFormatter.java b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/JdbcResponseFormatter.java index 1ad3ffde34..8be22af532 100644 --- a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/JdbcResponseFormatter.java +++ b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/JdbcResponseFormatter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.protocol.response.format; import java.util.List; @@ -40,9 +39,7 @@ protected Object buildJsonObject(QueryResult response) { json.datarows(fetchDataRows(response)); // Populate other fields - json.total(response.size()) - .size(response.size()) - .status(200); + json.total(response.size()).size(response.size()).status(200); if (!response.getCursor().equals(Cursor.None)) { json.cursor(response.getCursor().toString()); } @@ -54,10 +51,7 @@ protected Object buildJsonObject(QueryResult response) { public String format(Throwable t) { int status = getStatus(t); ErrorMessage message = ErrorMessageFactory.createErrorMessage(t, status); - Error error = new Error( - message.getType(), - message.getReason(), - message.getDetails()); + Error error = new Error(message.getType(), message.getReason(), message.getDetails()); return jsonify(new JdbcErrorResponse(error, status)); } @@ -66,8 +60,8 @@ private Column fetchColumn(Schema.Column col) { } /** - * Convert type that exists in both legacy and new engine but has different name. - * Return old type name to avoid breaking impact on client-side. + * Convert type that exists in both legacy and new engine but has different name. Return old type + * name to avoid breaking impact on client-side. */ private String convertToLegacyType(ExprType type) { return type.legacyTypeName().toLowerCase(); @@ -83,18 +77,16 @@ private Object[][] fetchDataRows(QueryResult response) { } private int getStatus(Throwable t) { - return (t instanceof SyntaxCheckException - || t instanceof QueryEngineException) ? 400 : 503; + return (t instanceof SyntaxCheckException || t instanceof QueryEngineException) ? 400 : 503; } - /** - * org.json requires these inner data classes be public (and static) - */ + /** org.json requires these inner data classes be public (and static) */ @Builder @Getter public static class JdbcResponse { @Singular("column") private final List schema; + private final Object[][] datarows; private final long total; private final long size; @@ -125,5 +117,4 @@ public static class Error { private final String reason; private final String details; } - } diff --git a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/JsonResponseFormatter.java b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/JsonResponseFormatter.java index 810a7d0c2d..115ee77b2b 100644 --- a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/JsonResponseFormatter.java +++ b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/JsonResponseFormatter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.protocol.response.format; import static org.opensearch.sql.protocol.response.format.ErrorFormatter.compactFormat; @@ -24,16 +23,13 @@ @RequiredArgsConstructor public abstract class JsonResponseFormatter implements ResponseFormatter { - /** - * JSON format styles: pretty format or compact format without indent and space. - */ + /** JSON format styles: pretty format or compact format without indent and space. */ public enum Style { - PRETTY, COMPACT + PRETTY, + COMPACT } - /** - * JSON format style. - */ + /** JSON format style. */ private final Style style; public static final String CONTENT_TYPE = "application/json; charset=UTF-8"; @@ -45,8 +41,8 @@ public String format(R response) { @Override public String format(Throwable t) { - return AccessController.doPrivileged((PrivilegedAction) () -> - (style == PRETTY) ? prettyFormat(t) : compactFormat(t)); + return AccessController.doPrivileged( + (PrivilegedAction) () -> (style == PRETTY) ? prettyFormat(t) : compactFormat(t)); } public String contentType() { @@ -62,7 +58,8 @@ public String contentType() { protected abstract Object buildJsonObject(R response); protected String jsonify(Object jsonObject) { - return AccessController.doPrivileged((PrivilegedAction) () -> - (style == PRETTY) ? prettyJsonify(jsonObject) : compactJsonify(jsonObject)); + return AccessController.doPrivileged( + (PrivilegedAction) + () -> (style == PRETTY) ? prettyJsonify(jsonObject) : compactJsonify(jsonObject)); } } diff --git a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/RawResponseFormatter.java b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/RawResponseFormatter.java index 8fe88b2f95..3b64be7062 100644 --- a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/RawResponseFormatter.java +++ b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/RawResponseFormatter.java @@ -3,16 +3,11 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.protocol.response.format; -/** - * Response formatter to format response to csv or raw format. - */ -//@RequiredArgsConstructor +/** Response formatter to format response to csv or raw format. */ public class RawResponseFormatter extends FlatResponseFormatter { public RawResponseFormatter() { super("|", false); } - } diff --git a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/ResponseFormatter.java b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/ResponseFormatter.java index 6d9cc093c5..6738cfbc9c 100644 --- a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/ResponseFormatter.java +++ b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/ResponseFormatter.java @@ -3,12 +3,9 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.protocol.response.format; -/** - * Response formatter to format response to different formats. - */ +/** Response formatter to format response to different formats. */ public interface ResponseFormatter { /** @@ -33,5 +30,4 @@ public interface ResponseFormatter { * @return string */ String contentType(); - } diff --git a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/SimpleJsonResponseFormatter.java b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/SimpleJsonResponseFormatter.java index ad705ccafa..c00174dc9f 100644 --- a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/SimpleJsonResponseFormatter.java +++ b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/SimpleJsonResponseFormatter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.protocol.response.format; import java.util.List; @@ -43,8 +42,7 @@ public SimpleJsonResponseFormatter(Style style) { public Object buildJsonObject(QueryResult response) { JsonResponse.JsonResponseBuilder json = JsonResponse.builder(); - json.total(response.size()) - .size(response.size()); + json.total(response.size()).size(response.size()); response.columnNameTypes().forEach((name, type) -> json.column(new Column(name, type))); @@ -61,9 +59,7 @@ private Object[][] fetchDataRows(QueryResult response) { return rows; } - /** - * org.json requires these inner data classes be public (and static) - */ + /** org.json requires these inner data classes be public (and static) */ @Builder @Getter public static class JsonResponse { @@ -82,5 +78,4 @@ public static class Column { private final String name; private final String type; } - } diff --git a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/VisualizationResponseFormatter.java b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/VisualizationResponseFormatter.java index 7e971c9099..d5d220dd8d 100644 --- a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/VisualizationResponseFormatter.java +++ b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/VisualizationResponseFormatter.java @@ -72,21 +72,20 @@ protected Object buildJsonObject(QueryResult response) { public String format(Throwable t) { int status = getStatus(t); ErrorMessage message = ErrorMessageFactory.createErrorMessage(t, status); - VisualizationResponseFormatter.Error error = new Error( - message.getType(), - message.getReason(), - message.getDetails()); + VisualizationResponseFormatter.Error error = + new Error(message.getType(), message.getReason(), message.getDetails()); return jsonify(new VisualizationErrorResponse(error, status)); } private int getStatus(Throwable t) { - return (t instanceof SyntaxCheckException - || t instanceof QueryEngineException) ? 400 : 503; + return (t instanceof SyntaxCheckException || t instanceof QueryEngineException) ? 400 : 503; } private Map> fetchData(QueryResult response) { Map> columnMap = new LinkedHashMap<>(); - response.getSchema().getColumns() + response + .getSchema() + .getColumns() .forEach(column -> columnMap.put(column.getName(), new LinkedList<>())); for (Object[] dataRow : response) { @@ -107,16 +106,17 @@ private Metadata constructMetadata(QueryResult response) { private List fetchFields(QueryResult response) { List columns = response.getSchema().getColumns(); ImmutableList.Builder fields = ImmutableList.builder(); - columns.forEach(column -> { - Field field = new Field(column.getName(), convertToLegacyType(column.getExprType())); - fields.add(field); - }); + columns.forEach( + column -> { + Field field = new Field(column.getName(), convertToLegacyType(column.getExprType())); + fields.add(field); + }); return fields.build(); } /** - * Convert type that exists in both legacy and new engine but has different name. - * Return old type name to avoid breaking impact on client-side. + * Convert type that exists in both legacy and new engine but has different name. Return old type + * name to avoid breaking impact on client-side. */ private String convertToLegacyType(ExprType type) { return type.legacyTypeName().toLowerCase(); diff --git a/protocol/src/test/java/org/opensearch/sql/protocol/response/QueryResultTest.java b/protocol/src/test/java/org/opensearch/sql/protocol/response/QueryResultTest.java index 4c58e189b8..e03169e9f8 100644 --- a/protocol/src/test/java/org/opensearch/sql/protocol/response/QueryResultTest.java +++ b/protocol/src/test/java/org/opensearch/sql/protocol/response/QueryResultTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.protocol.response; import static org.junit.jupiter.api.Assertions.assertArrayEquals; @@ -23,86 +22,77 @@ class QueryResultTest { - private ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("name", null, STRING), - new ExecutionEngine.Schema.Column("age", null, INTEGER))); - + private ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of( + new ExecutionEngine.Schema.Column("name", null, STRING), + new ExecutionEngine.Schema.Column("age", null, INTEGER))); @Test void size() { - QueryResult response = new QueryResult( - schema, - Arrays.asList( - tupleValue(ImmutableMap.of("name", "John", "age", 20)), - tupleValue(ImmutableMap.of("name", "Allen", "age", 30)), - tupleValue(ImmutableMap.of("name", "Smith", "age", 40)) - ), Cursor.None); + QueryResult response = + new QueryResult( + schema, + Arrays.asList( + tupleValue(ImmutableMap.of("name", "John", "age", 20)), + tupleValue(ImmutableMap.of("name", "Allen", "age", 30)), + tupleValue(ImmutableMap.of("name", "Smith", "age", 40))), + Cursor.None); assertEquals(3, response.size()); } @Test void columnNameTypes() { - QueryResult response = new QueryResult( - schema, - Collections.singletonList( - tupleValue(ImmutableMap.of("name", "John", "age", 20)) - ), Cursor.None); + QueryResult response = + new QueryResult( + schema, + Collections.singletonList(tupleValue(ImmutableMap.of("name", "John", "age", 20))), + Cursor.None); - assertEquals( - ImmutableMap.of("name", "string", "age", "integer"), - response.columnNameTypes() - ); + assertEquals(ImmutableMap.of("name", "string", "age", "integer"), response.columnNameTypes()); } @Test void columnNameTypesWithAlias() { - ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("name", "n", STRING))); - QueryResult response = new QueryResult( - schema, - Collections.singletonList(tupleValue(ImmutableMap.of("n", "John"))), - Cursor.None); - - assertEquals( - ImmutableMap.of("n", "string"), - response.columnNameTypes() - ); + ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of(new ExecutionEngine.Schema.Column("name", "n", STRING))); + QueryResult response = + new QueryResult( + schema, + Collections.singletonList(tupleValue(ImmutableMap.of("n", "John"))), + Cursor.None); + + assertEquals(ImmutableMap.of("n", "string"), response.columnNameTypes()); } @Test void columnNameTypesFromEmptyExprValues() { - QueryResult response = new QueryResult( - schema, - Collections.emptyList(), Cursor.None); - assertEquals( - ImmutableMap.of("name", "string", "age", "integer"), - response.columnNameTypes() - ); + QueryResult response = new QueryResult(schema, Collections.emptyList(), Cursor.None); + assertEquals(ImmutableMap.of("name", "string", "age", "integer"), response.columnNameTypes()); } @Test void columnNameTypesFromExprValuesWithMissing() { - QueryResult response = new QueryResult( - schema, - Arrays.asList( - tupleValue(ImmutableMap.of("name", "John")), - tupleValue(ImmutableMap.of("name", "John", "age", 20)) - )); - - assertEquals( - ImmutableMap.of("name", "string", "age", "integer"), - response.columnNameTypes() - ); + QueryResult response = + new QueryResult( + schema, + Arrays.asList( + tupleValue(ImmutableMap.of("name", "John")), + tupleValue(ImmutableMap.of("name", "John", "age", 20)))); + + assertEquals(ImmutableMap.of("name", "string", "age", "integer"), response.columnNameTypes()); } @Test void iterate() { - QueryResult response = new QueryResult( - schema, - Arrays.asList( - tupleValue(ImmutableMap.of("name", "John", "age", 20)), - tupleValue(ImmutableMap.of("name", "Allen", "age", 30)) - ), Cursor.None); + QueryResult response = + new QueryResult( + schema, + Arrays.asList( + tupleValue(ImmutableMap.of("name", "John", "age", 20)), + tupleValue(ImmutableMap.of("name", "Allen", "age", 30))), + Cursor.None); int i = 0; for (Object[] objects : response) { @@ -116,5 +106,4 @@ void iterate() { i++; } } - } diff --git a/protocol/src/test/java/org/opensearch/sql/protocol/response/format/CommandResponseFormatterTest.java b/protocol/src/test/java/org/opensearch/sql/protocol/response/format/CommandResponseFormatterTest.java index 85efbab369..8e86e47754 100644 --- a/protocol/src/test/java/org/opensearch/sql/protocol/response/format/CommandResponseFormatterTest.java +++ b/protocol/src/test/java/org/opensearch/sql/protocol/response/format/CommandResponseFormatterTest.java @@ -29,32 +29,34 @@ public class CommandResponseFormatterTest { @Test public void produces_always_same_output_for_any_query_response() { var formatter = new CommandResponseFormatter(); - assertEquals(formatter.format(mock(QueryResult.class)), - formatter.format(mock(QueryResult.class))); + assertEquals( + formatter.format(mock(QueryResult.class)), formatter.format(mock(QueryResult.class))); - QueryResult response = new QueryResult( - new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("name", "name", STRING), - new ExecutionEngine.Schema.Column("address", "address", OpenSearchTextType.of()), - new ExecutionEngine.Schema.Column("age", "age", INTEGER))), - ImmutableList.of( - tupleValue(ImmutableMap.builder() - .put("name", "John") - .put("address", "Seattle") - .put("age", 20) - .build())), - new Cursor("test_cursor")); + QueryResult response = + new QueryResult( + new ExecutionEngine.Schema( + ImmutableList.of( + new ExecutionEngine.Schema.Column("name", "name", STRING), + new ExecutionEngine.Schema.Column( + "address", "address", OpenSearchTextType.of()), + new ExecutionEngine.Schema.Column("age", "age", INTEGER))), + ImmutableList.of( + tupleValue( + ImmutableMap.builder() + .put("name", "John") + .put("address", "Seattle") + .put("age", 20) + .build())), + new Cursor("test_cursor")); - assertEquals("{\n" - + " \"succeeded\": true\n" - + "}", - formatter.format(response)); + assertEquals("{\n \"succeeded\": true\n}", formatter.format(response)); } @Test public void formats_error_as_default_formatter() { var exception = new Exception("pewpew", new RuntimeException("meow meow")); - assertEquals(new JdbcResponseFormatter(PRETTY).format(exception), + assertEquals( + new JdbcResponseFormatter(PRETTY).format(exception), new CommandResponseFormatter().format(exception)); } diff --git a/protocol/src/test/java/org/opensearch/sql/protocol/response/format/CsvResponseFormatterTest.java b/protocol/src/test/java/org/opensearch/sql/protocol/response/format/CsvResponseFormatterTest.java index 82b4f372b3..d27ac72373 100644 --- a/protocol/src/test/java/org/opensearch/sql/protocol/response/format/CsvResponseFormatterTest.java +++ b/protocol/src/test/java/org/opensearch/sql/protocol/response/format/CsvResponseFormatterTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.protocol.response.format; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -24,20 +23,23 @@ import org.opensearch.sql.executor.ExecutionEngine; import org.opensearch.sql.protocol.response.QueryResult; -/** - * Unit test for {@link CsvResponseFormatter}. - */ +/** Unit test for {@link CsvResponseFormatter}. */ public class CsvResponseFormatterTest { private static final CsvResponseFormatter formatter = new CsvResponseFormatter(); @Test void formatResponse() { - ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("name", "name", STRING), - new ExecutionEngine.Schema.Column("age", "age", INTEGER))); - QueryResult response = new QueryResult(schema, Arrays.asList( - tupleValue(ImmutableMap.of("name", "John", "age", 20)), - tupleValue(ImmutableMap.of("name", "Smith", "age", 30)))); + ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of( + new ExecutionEngine.Schema.Column("name", "name", STRING), + new ExecutionEngine.Schema.Column("age", "age", INTEGER))); + QueryResult response = + new QueryResult( + schema, + Arrays.asList( + tupleValue(ImmutableMap.of("name", "John", "age", 20)), + tupleValue(ImmutableMap.of("name", "Smith", "age", 30)))); CsvResponseFormatter formatter = new CsvResponseFormatter(); String expected = "name,age%nJohn,20%nSmith,30"; assertEquals(format(expected), formatter.format(response)); @@ -45,49 +47,69 @@ void formatResponse() { @Test void sanitizeHeaders() { - ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("=firstname", null, STRING), - new ExecutionEngine.Schema.Column("+lastname", null, STRING), - new ExecutionEngine.Schema.Column("-city", null, STRING), - new ExecutionEngine.Schema.Column("@age", null, INTEGER))); - QueryResult response = new QueryResult(schema, Arrays.asList( - tupleValue(ImmutableMap.of( - "=firstname", "John", "+lastname", "Smith", "-city", "Seattle", "@age", 20)))); - String expected = "'=firstname,'+lastname,'-city,'@age%n" - + "John,Smith,Seattle,20"; + ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of( + new ExecutionEngine.Schema.Column("=firstname", null, STRING), + new ExecutionEngine.Schema.Column("+lastname", null, STRING), + new ExecutionEngine.Schema.Column("-city", null, STRING), + new ExecutionEngine.Schema.Column("@age", null, INTEGER))); + QueryResult response = + new QueryResult( + schema, + Arrays.asList( + tupleValue( + ImmutableMap.of( + "=firstname", + "John", + "+lastname", + "Smith", + "-city", + "Seattle", + "@age", + 20)))); + String expected = "'=firstname,'+lastname,'-city,'@age%nJohn,Smith,Seattle,20"; assertEquals(format(expected), formatter.format(response)); } @Test void sanitizeData() { - ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("city", "city", STRING))); - QueryResult response = new QueryResult(schema, Arrays.asList( - tupleValue(ImmutableMap.of("city", "Seattle")), - tupleValue(ImmutableMap.of("city", "=Seattle")), - tupleValue(ImmutableMap.of("city", "+Seattle")), - tupleValue(ImmutableMap.of("city", "-Seattle")), - tupleValue(ImmutableMap.of("city", "@Seattle")), - tupleValue(ImmutableMap.of("city", "Seattle=")))); - String expected = "city%n" - + "Seattle%n" - + "'=Seattle%n" - + "'+Seattle%n" - + "'-Seattle%n" - + "'@Seattle%n" - + "Seattle="; + ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of(new ExecutionEngine.Schema.Column("city", "city", STRING))); + QueryResult response = + new QueryResult( + schema, + Arrays.asList( + tupleValue(ImmutableMap.of("city", "Seattle")), + tupleValue(ImmutableMap.of("city", "=Seattle")), + tupleValue(ImmutableMap.of("city", "+Seattle")), + tupleValue(ImmutableMap.of("city", "-Seattle")), + tupleValue(ImmutableMap.of("city", "@Seattle")), + tupleValue(ImmutableMap.of("city", "Seattle=")))); + String expected = + "city%n" + + "Seattle%n" + + "'=Seattle%n" + + "'+Seattle%n" + + "'-Seattle%n" + + "'@Seattle%n" + + "Seattle="; assertEquals(format(expected), formatter.format(response)); } @Test void quoteIfRequired() { - ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("na,me", "na,me", STRING), - new ExecutionEngine.Schema.Column(",,age", ",,age", INTEGER))); - QueryResult response = new QueryResult(schema, Arrays.asList( - tupleValue(ImmutableMap.of("na,me", "John,Smith", ",,age", "30,,,")))); - String expected = "\"na,me\",\",,age\"%n" - + "\"John,Smith\",\"30,,,\""; + ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of( + new ExecutionEngine.Schema.Column("na,me", "na,me", STRING), + new ExecutionEngine.Schema.Column(",,age", ",,age", INTEGER))); + QueryResult response = + new QueryResult( + schema, + Arrays.asList(tupleValue(ImmutableMap.of("na,me", "John,Smith", ",,age", "30,,,")))); + String expected = "\"na,me\",\",,age\"%n\"John,Smith\",\"30,,,\""; assertEquals(format(expected), formatter.format(response)); } @@ -102,32 +124,36 @@ void formatError() { @Test void escapeSanitize() { CsvResponseFormatter escapeFormatter = new CsvResponseFormatter(false); - ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("city", "city", STRING))); - QueryResult response = new QueryResult(schema, Arrays.asList( - tupleValue(ImmutableMap.of("city", "=Seattle")), - tupleValue(ImmutableMap.of("city", ",,Seattle")))); - String expected = "city%n" - + "=Seattle%n" - + "\",,Seattle\""; + ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of(new ExecutionEngine.Schema.Column("city", "city", STRING))); + QueryResult response = + new QueryResult( + schema, + Arrays.asList( + tupleValue(ImmutableMap.of("city", "=Seattle")), + tupleValue(ImmutableMap.of("city", ",,Seattle")))); + String expected = "city%n=Seattle%n\",,Seattle\""; assertEquals(format(expected), escapeFormatter.format(response)); } @Test void replaceNullValues() { - ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("name", "name", STRING), - new ExecutionEngine.Schema.Column("city", "city", STRING))); - QueryResult response = new QueryResult(schema, Arrays.asList( - tupleValue(ImmutableMap.of("name", "John","city", "Seattle")), - ExprTupleValue.fromExprValueMap( - ImmutableMap.of("firstname", LITERAL_NULL, "city", stringValue("Seattle"))), - ExprTupleValue.fromExprValueMap( - ImmutableMap.of("firstname", stringValue("John"), "city", LITERAL_MISSING)))); - String expected = "name,city%n" - + "John,Seattle%n" - + ",Seattle%n" - + "John,"; + ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of( + new ExecutionEngine.Schema.Column("name", "name", STRING), + new ExecutionEngine.Schema.Column("city", "city", STRING))); + QueryResult response = + new QueryResult( + schema, + Arrays.asList( + tupleValue(ImmutableMap.of("name", "John", "city", "Seattle")), + ExprTupleValue.fromExprValueMap( + ImmutableMap.of("firstname", LITERAL_NULL, "city", stringValue("Seattle"))), + ExprTupleValue.fromExprValueMap( + ImmutableMap.of("firstname", stringValue("John"), "city", LITERAL_MISSING)))); + String expected = "name,city%nJohn,Seattle%n,Seattle%nJohn,"; assertEquals(format(expected), formatter.format(response)); } @@ -135,5 +161,4 @@ void replaceNullValues() { void testContentType() { assertEquals(formatter.contentType(), CONTENT_TYPE); } - } diff --git a/protocol/src/test/java/org/opensearch/sql/protocol/response/format/FormatTest.java b/protocol/src/test/java/org/opensearch/sql/protocol/response/format/FormatTest.java index e0e4355a24..7293048916 100644 --- a/protocol/src/test/java/org/opensearch/sql/protocol/response/format/FormatTest.java +++ b/protocol/src/test/java/org/opensearch/sql/protocol/response/format/FormatTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.protocol.response.format; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -13,9 +12,7 @@ import java.util.Optional; import org.junit.jupiter.api.Test; -/** - * Unit test for {@link Format}. - */ +/** Unit test for {@link Format}. */ public class FormatTest { @Test @@ -58,5 +55,4 @@ void unsupportedFormat() { Optional format = Format.of("notsupport"); assertFalse(format.isPresent()); } - } diff --git a/protocol/src/test/java/org/opensearch/sql/protocol/response/format/JdbcResponseFormatterTest.java b/protocol/src/test/java/org/opensearch/sql/protocol/response/format/JdbcResponseFormatterTest.java index 9c79b1bf89..16dd1590ee 100644 --- a/protocol/src/test/java/org/opensearch/sql/protocol/response/format/JdbcResponseFormatterTest.java +++ b/protocol/src/test/java/org/opensearch/sql/protocol/response/format/JdbcResponseFormatterTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.protocol.response.format; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -43,26 +42,35 @@ class JdbcResponseFormatterTest { @Test void format_response() { - QueryResult response = new QueryResult( - new Schema(ImmutableList.of( - new Column("name", "name", STRING), - new Column("address1", "address1", OpenSearchTextType.of()), - new Column("address2", "address2", OpenSearchTextType.of(Map.of("words", - OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword)))), - new Column("location", "location", STRUCT), - new Column("employer", "employer", ARRAY), - new Column("age", "age", INTEGER))), - ImmutableList.of( - tupleValue(ImmutableMap.builder() - .put("name", "John") - .put("address1", "Seattle") - .put("address2", "WA") - .put("location", ImmutableMap.of("x", "1", "y", "2")) - .put("employments", ImmutableList.of( - ImmutableMap.of("name", "Amazon"), - ImmutableMap.of("name", "AWS"))) - .put("age", 20) - .build()))); + QueryResult response = + new QueryResult( + new Schema( + ImmutableList.of( + new Column("name", "name", STRING), + new Column("address1", "address1", OpenSearchTextType.of()), + new Column( + "address2", + "address2", + OpenSearchTextType.of( + Map.of( + "words", + OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword)))), + new Column("location", "location", STRUCT), + new Column("employer", "employer", ARRAY), + new Column("age", "age", INTEGER))), + ImmutableList.of( + tupleValue( + ImmutableMap.builder() + .put("name", "John") + .put("address1", "Seattle") + .put("address2", "WA") + .put("location", ImmutableMap.of("x", "1", "y", "2")) + .put( + "employments", + ImmutableList.of( + ImmutableMap.of("name", "Amazon"), ImmutableMap.of("name", "AWS"))) + .put("age", 20) + .build()))); assertJsonEquals( "{" @@ -76,7 +84,8 @@ void format_response() { + "]," + "\"datarows\":[" + "[\"John\",\"Seattle\",\"WA\",{\"x\":\"1\",\"y\":\"2\"}," - + "[{\"name\":\"Amazon\"}," + "{\"name\":\"AWS\"}]," + + "[{\"name\":\"Amazon\"}," + + "{\"name\":\"AWS\"}]," + "20]]," + "\"total\":1," + "\"size\":1," @@ -86,18 +95,21 @@ void format_response() { @Test void format_response_with_cursor() { - QueryResult response = new QueryResult( - new Schema(ImmutableList.of( - new Column("name", "name", STRING), - new Column("address", "address", OpenSearchTextType.of()), - new Column("age", "age", INTEGER))), - ImmutableList.of( - tupleValue(ImmutableMap.builder() - .put("name", "John") - .put("address", "Seattle") - .put("age", 20) - .build())), - new Cursor("test_cursor")); + QueryResult response = + new QueryResult( + new Schema( + ImmutableList.of( + new Column("name", "name", STRING), + new Column("address", "address", OpenSearchTextType.of()), + new Column("age", "age", INTEGER))), + ImmutableList.of( + tupleValue( + ImmutableMap.builder() + .put("name", "John") + .put("address", "Seattle") + .put("age", 20) + .build())), + new Cursor("test_cursor")); assertJsonEquals( "{" @@ -119,9 +131,9 @@ void format_response_with_cursor() { void format_response_with_missing_and_null_value() { QueryResult response = new QueryResult( - new Schema(ImmutableList.of( - new Column("name", null, STRING), - new Column("age", null, INTEGER))), + new Schema( + ImmutableList.of( + new Column("name", null, STRING), new Column("age", null, INTEGER))), Arrays.asList( ExprTupleValue.fromExprValueMap( ImmutableMap.of("name", stringValue("John"), "age", LITERAL_MISSING)), @@ -147,8 +159,7 @@ void format_client_error_response_due_to_syntax_exception() { + "\"details\":\"Invalid query syntax\"" + "}," + "\"status\":400}", - formatter.format(new SyntaxCheckException("Invalid query syntax")) - ); + formatter.format(new SyntaxCheckException("Invalid query syntax"))); } @Test @@ -161,8 +172,7 @@ void format_client_error_response_due_to_semantic_exception() { + "\"details\":\"Invalid query semantics\"" + "}," + "\"status\":400}", - formatter.format(new SemanticCheckException("Invalid query semantics")) - ); + formatter.format(new SemanticCheckException("Invalid query semantics"))); } @Test @@ -175,8 +185,7 @@ void format_server_error_response() { + "\"details\":\"Execution error\"" + "}," + "\"status\":503}", - formatter.format(new IllegalStateException("Execution error")) - ); + formatter.format(new IllegalStateException("Execution error"))); } @Test @@ -193,15 +202,12 @@ void format_server_error_response_due_to_opensearch() { + "from OpenSearch engine.\"" + "}," + "\"status\":503}", - formatter.format(new OpenSearchException("all shards failed", - new IllegalStateException("Execution error"))) - ); + formatter.format( + new OpenSearchException( + "all shards failed", new IllegalStateException("Execution error")))); } private static void assertJsonEquals(String expected, String actual) { - assertEquals( - JsonParser.parseString(expected), - JsonParser.parseString(actual)); + assertEquals(JsonParser.parseString(expected), JsonParser.parseString(actual)); } - } diff --git a/protocol/src/test/java/org/opensearch/sql/protocol/response/format/RawResponseFormatterTest.java b/protocol/src/test/java/org/opensearch/sql/protocol/response/format/RawResponseFormatterTest.java index b33a4f216a..65111bd3b9 100644 --- a/protocol/src/test/java/org/opensearch/sql/protocol/response/format/RawResponseFormatterTest.java +++ b/protocol/src/test/java/org/opensearch/sql/protocol/response/format/RawResponseFormatterTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.protocol.response.format; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -24,69 +23,92 @@ import org.opensearch.sql.executor.ExecutionEngine; import org.opensearch.sql.protocol.response.QueryResult; -/** - * Unit test for {@link FlatResponseFormatter}. - */ +/** Unit test for {@link FlatResponseFormatter}. */ public class RawResponseFormatterTest { private FlatResponseFormatter rawFormatter = new RawResponseFormatter(); @Test void formatResponse() { - ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("name", "name", STRING), - new ExecutionEngine.Schema.Column("age", "age", INTEGER))); - QueryResult response = new QueryResult(schema, Arrays.asList( - tupleValue(ImmutableMap.of("name", "John", "age", 20)), - tupleValue(ImmutableMap.of("name", "Smith", "age", 30)))); + ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of( + new ExecutionEngine.Schema.Column("name", "name", STRING), + new ExecutionEngine.Schema.Column("age", "age", INTEGER))); + QueryResult response = + new QueryResult( + schema, + Arrays.asList( + tupleValue(ImmutableMap.of("name", "John", "age", 20)), + tupleValue(ImmutableMap.of("name", "Smith", "age", 30)))); String expected = "name|age%nJohn|20%nSmith|30"; assertEquals(format(expected), rawFormatter.format(response)); } @Test void sanitizeHeaders() { - ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("=firstname", null, STRING), - new ExecutionEngine.Schema.Column("+lastname", null, STRING), - new ExecutionEngine.Schema.Column("-city", null, STRING), - new ExecutionEngine.Schema.Column("@age", null, INTEGER))); - QueryResult response = new QueryResult(schema, Arrays.asList( - tupleValue(ImmutableMap.of( - "=firstname", "John", "+lastname", "Smith", "-city", "Seattle", "@age", 20)))); - String expected = "=firstname|+lastname|-city|@age%n" - + "John|Smith|Seattle|20"; + ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of( + new ExecutionEngine.Schema.Column("=firstname", null, STRING), + new ExecutionEngine.Schema.Column("+lastname", null, STRING), + new ExecutionEngine.Schema.Column("-city", null, STRING), + new ExecutionEngine.Schema.Column("@age", null, INTEGER))); + QueryResult response = + new QueryResult( + schema, + Arrays.asList( + tupleValue( + ImmutableMap.of( + "=firstname", + "John", + "+lastname", + "Smith", + "-city", + "Seattle", + "@age", + 20)))); + String expected = "=firstname|+lastname|-city|@age%nJohn|Smith|Seattle|20"; assertEquals(format(expected), rawFormatter.format(response)); } @Test void sanitizeData() { - ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("city", "city", STRING))); - QueryResult response = new QueryResult(schema, Arrays.asList( - tupleValue(ImmutableMap.of("city", "Seattle")), - tupleValue(ImmutableMap.of("city", "=Seattle")), - tupleValue(ImmutableMap.of("city", "+Seattle")), - tupleValue(ImmutableMap.of("city", "-Seattle")), - tupleValue(ImmutableMap.of("city", "@Seattle")), - tupleValue(ImmutableMap.of("city", "Seattle=")))); - String expected = "city%n" - + "Seattle%n" - + "=Seattle%n" - + "+Seattle%n" - + "-Seattle%n" - + "@Seattle%n" - + "Seattle="; + ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of(new ExecutionEngine.Schema.Column("city", "city", STRING))); + QueryResult response = + new QueryResult( + schema, + Arrays.asList( + tupleValue(ImmutableMap.of("city", "Seattle")), + tupleValue(ImmutableMap.of("city", "=Seattle")), + tupleValue(ImmutableMap.of("city", "+Seattle")), + tupleValue(ImmutableMap.of("city", "-Seattle")), + tupleValue(ImmutableMap.of("city", "@Seattle")), + tupleValue(ImmutableMap.of("city", "Seattle=")))); + String expected = + "city%n" + + "Seattle%n" + + "=Seattle%n" + + "+Seattle%n" + + "-Seattle%n" + + "@Seattle%n" + + "Seattle="; assertEquals(format(expected), rawFormatter.format(response)); } @Test void quoteIfRequired() { - ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("na|me", "na|me", STRING), - new ExecutionEngine.Schema.Column("||age", "||age", INTEGER))); - QueryResult response = new QueryResult(schema, Arrays.asList( - tupleValue(ImmutableMap.of("na|me", "John|Smith", "||age", "30|||")))); - String expected = "\"na|me\"|\"||age\"%n" - + "\"John|Smith\"|\"30|||\""; + ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of( + new ExecutionEngine.Schema.Column("na|me", "na|me", STRING), + new ExecutionEngine.Schema.Column("||age", "||age", INTEGER))); + QueryResult response = + new QueryResult( + schema, + Arrays.asList(tupleValue(ImmutableMap.of("na|me", "John|Smith", "||age", "30|||")))); + String expected = "\"na|me\"|\"||age\"%n\"John|Smith\"|\"30|||\""; assertEquals(format(expected), rawFormatter.format(response)); } @@ -101,59 +123,67 @@ void formatError() { @Test void escapeSanitize() { FlatResponseFormatter escapeFormatter = new RawResponseFormatter(); - ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("city", "city", STRING))); - QueryResult response = new QueryResult(schema, Arrays.asList( - tupleValue(ImmutableMap.of("city", "=Seattle")), - tupleValue(ImmutableMap.of("city", "||Seattle")))); - String expected = "city%n" - + "=Seattle%n" - + "\"||Seattle\""; + ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of(new ExecutionEngine.Schema.Column("city", "city", STRING))); + QueryResult response = + new QueryResult( + schema, + Arrays.asList( + tupleValue(ImmutableMap.of("city", "=Seattle")), + tupleValue(ImmutableMap.of("city", "||Seattle")))); + String expected = "city%n=Seattle%n\"||Seattle\""; assertEquals(format(expected), escapeFormatter.format(response)); } @Test void senstiveCharater() { - ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("city", "city", STRING))); - QueryResult response = new QueryResult(schema, Arrays.asList( - tupleValue(ImmutableMap.of("city", "@Seattle")), - tupleValue(ImmutableMap.of("city", "++Seattle")))); - String expected = "city%n" - + "@Seattle%n" - + "++Seattle"; + ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of(new ExecutionEngine.Schema.Column("city", "city", STRING))); + QueryResult response = + new QueryResult( + schema, + Arrays.asList( + tupleValue(ImmutableMap.of("city", "@Seattle")), + tupleValue(ImmutableMap.of("city", "++Seattle")))); + String expected = "city%n@Seattle%n++Seattle"; assertEquals(format(expected), rawFormatter.format(response)); } @Test void senstiveCharaterWithSanitize() { FlatResponseFormatter testFormater = new RawResponseFormatter(); - ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("city", "city", STRING))); - QueryResult response = new QueryResult(schema, Arrays.asList( - tupleValue(ImmutableMap.of("city", "@Seattle")), - tupleValue(ImmutableMap.of("city", "++Seattle|||")))); - String expected = "city%n" - + "@Seattle%n" - + "\"++Seattle|||\""; + ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of(new ExecutionEngine.Schema.Column("city", "city", STRING))); + QueryResult response = + new QueryResult( + schema, + Arrays.asList( + tupleValue(ImmutableMap.of("city", "@Seattle")), + tupleValue(ImmutableMap.of("city", "++Seattle|||")))); + String expected = "city%n@Seattle%n\"++Seattle|||\""; assertEquals(format(expected), testFormater.format(response)); } @Test void replaceNullValues() { - ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("name", "name", STRING), - new ExecutionEngine.Schema.Column("city", "city", STRING))); - QueryResult response = new QueryResult(schema, Arrays.asList( - tupleValue(ImmutableMap.of("name", "John","city", "Seattle")), - ExprTupleValue.fromExprValueMap( - ImmutableMap.of("firstname", LITERAL_NULL, "city", stringValue("Seattle"))), - ExprTupleValue.fromExprValueMap( - ImmutableMap.of("firstname", stringValue("John"), "city", LITERAL_MISSING)))); - String expected = "name|city%n" - + "John|Seattle%n" - + "|Seattle%n" - + "John|"; + ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of( + new ExecutionEngine.Schema.Column("name", "name", STRING), + new ExecutionEngine.Schema.Column("city", "city", STRING))); + QueryResult response = + new QueryResult( + schema, + Arrays.asList( + tupleValue(ImmutableMap.of("name", "John", "city", "Seattle")), + ExprTupleValue.fromExprValueMap( + ImmutableMap.of("firstname", LITERAL_NULL, "city", stringValue("Seattle"))), + ExprTupleValue.fromExprValueMap( + ImmutableMap.of("firstname", stringValue("John"), "city", LITERAL_MISSING)))); + String expected = "name|city%nJohn|Seattle%n|Seattle%nJohn|"; assertEquals(format(expected), rawFormatter.format(response)); } @@ -161,5 +191,4 @@ void replaceNullValues() { void testContentType() { assertEquals(rawFormatter.contentType(), CONTENT_TYPE); } - } diff --git a/protocol/src/test/java/org/opensearch/sql/protocol/response/format/SimpleJsonResponseFormatterTest.java b/protocol/src/test/java/org/opensearch/sql/protocol/response/format/SimpleJsonResponseFormatterTest.java index 8b4438cf91..e5eb0f1ac7 100644 --- a/protocol/src/test/java/org/opensearch/sql/protocol/response/format/SimpleJsonResponseFormatterTest.java +++ b/protocol/src/test/java/org/opensearch/sql/protocol/response/format/SimpleJsonResponseFormatterTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.protocol.response.format; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -25,9 +24,11 @@ class SimpleJsonResponseFormatterTest { - private final ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("firstname", null, STRING), - new ExecutionEngine.Schema.Column("age", null, INTEGER))); + private final ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of( + new ExecutionEngine.Schema.Column("firstname", null, STRING), + new ExecutionEngine.Schema.Column("age", null, INTEGER))); @Test void formatResponse() { @@ -84,12 +85,12 @@ void formatResponsePretty() { @Test void formatResponseSchemaWithAlias() { - ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("firstname", "name", STRING))); + ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of(new ExecutionEngine.Schema.Column("firstname", "name", STRING))); QueryResult response = new QueryResult( - schema, - ImmutableList.of(tupleValue(ImmutableMap.of("name", "John", "age", 20)))); + schema, ImmutableList.of(tupleValue(ImmutableMap.of("name", "John", "age", 20)))); SimpleJsonResponseFormatter formatter = new SimpleJsonResponseFormatter(COMPACT); assertEquals( "{\"schema\":[{\"name\":\"name\",\"type\":\"string\"}]," @@ -120,10 +121,13 @@ void formatResponseWithTupleValue() { new QueryResult( schema, Arrays.asList( - tupleValue(ImmutableMap - .of("name", "Smith", - "address", ImmutableMap.of("state", "WA", "street", - ImmutableMap.of("city", "seattle")))))); + tupleValue( + ImmutableMap.of( + "name", + "Smith", + "address", + ImmutableMap.of( + "state", "WA", "street", ImmutableMap.of("city", "seattle")))))); SimpleJsonResponseFormatter formatter = new SimpleJsonResponseFormatter(COMPACT); assertEquals( @@ -140,11 +144,13 @@ void formatResponseWithArrayValue() { new QueryResult( schema, Arrays.asList( - tupleValue(ImmutableMap - .of("name", "Smith", - "address", Arrays.asList( - ImmutableMap.of("state", "WA"), ImmutableMap.of("state", "NYC") - ))))); + tupleValue( + ImmutableMap.of( + "name", + "Smith", + "address", + Arrays.asList( + ImmutableMap.of("state", "WA"), ImmutableMap.of("state", "NYC")))))); SimpleJsonResponseFormatter formatter = new SimpleJsonResponseFormatter(COMPACT); assertEquals( "{\"schema\":[{\"name\":\"firstname\",\"type\":\"string\"}," diff --git a/protocol/src/test/java/org/opensearch/sql/protocol/response/format/VisualizationResponseFormatterTest.java b/protocol/src/test/java/org/opensearch/sql/protocol/response/format/VisualizationResponseFormatterTest.java index f501a53d64..a6fdd1e03e 100644 --- a/protocol/src/test/java/org/opensearch/sql/protocol/response/format/VisualizationResponseFormatterTest.java +++ b/protocol/src/test/java/org/opensearch/sql/protocol/response/format/VisualizationResponseFormatterTest.java @@ -24,18 +24,21 @@ import org.opensearch.sql.protocol.response.QueryResult; public class VisualizationResponseFormatterTest { - private final VisualizationResponseFormatter formatter = new VisualizationResponseFormatter( - JsonResponseFormatter.Style.COMPACT); + private final VisualizationResponseFormatter formatter = + new VisualizationResponseFormatter(JsonResponseFormatter.Style.COMPACT); @Test void formatResponse() { - QueryResult response = new QueryResult( - new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("name", "name", STRING), - new ExecutionEngine.Schema.Column("age", "age", INTEGER))), - ImmutableList.of(tupleValue(ImmutableMap.of("name", "John", "age", 20)), - tupleValue(ImmutableMap.of("name", "Amy", "age", 31)), - tupleValue(ImmutableMap.of("name", "Bob", "age", 28)))); + QueryResult response = + new QueryResult( + new ExecutionEngine.Schema( + ImmutableList.of( + new ExecutionEngine.Schema.Column("name", "name", STRING), + new ExecutionEngine.Schema.Column("age", "age", INTEGER))), + ImmutableList.of( + tupleValue(ImmutableMap.of("name", "John", "age", 20)), + tupleValue(ImmutableMap.of("name", "Amy", "age", 31)), + tupleValue(ImmutableMap.of("name", "Bob", "age", 28)))); assertJsonEquals( "{\"data\":{" @@ -55,10 +58,12 @@ void formatResponse() { void formatResponseWithNull() { QueryResult response = new QueryResult( - new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("name", null, STRING), - new ExecutionEngine.Schema.Column("age", null, INTEGER))), - ImmutableList.of(tupleValue(ImmutableMap.of("name", "John", "age", LITERAL_MISSING)), + new ExecutionEngine.Schema( + ImmutableList.of( + new ExecutionEngine.Schema.Column("name", null, STRING), + new ExecutionEngine.Schema.Column("age", null, INTEGER))), + ImmutableList.of( + tupleValue(ImmutableMap.of("name", "John", "age", LITERAL_MISSING)), tupleValue(ImmutableMap.of("name", "Allen", "age", LITERAL_NULL)), tupleValue(ImmutableMap.of("name", "Smith", "age", 30)))); @@ -73,8 +78,7 @@ void formatResponseWithNull() { + "\"size\":3," + "\"status\":200" + "}", - formatter.format(response) - ); + formatter.format(response)); } @Test @@ -87,8 +91,7 @@ void clientErrorSyntaxException() { + "\"details\":\"Invalid query syntax\"" + "}," + "\"status\":400}", - formatter.format(new SyntaxCheckException("Invalid query syntax")) - ); + formatter.format(new SyntaxCheckException("Invalid query syntax"))); } @Test @@ -101,8 +104,7 @@ void clientErrorSemanticException() { + "\"details\":\"Invalid query semantics\"" + "}," + "\"status\":400}", - formatter.format(new SemanticCheckException("Invalid query semantics")) - ); + formatter.format(new SemanticCheckException("Invalid query semantics"))); } @Test @@ -115,8 +117,7 @@ void serverError() { + "\"details\":\"Execution error\"" + "}," + "\"status\":503}", - formatter.format(new IllegalStateException("Execution error")) - ); + formatter.format(new IllegalStateException("Execution error"))); } @Test @@ -133,22 +134,25 @@ void opensearchServerError() { + "from OpenSearch engine.\"" + "}," + "\"status\":503}", - formatter.format(new OpenSearchException("all shards failed", - new IllegalStateException("Execution error"))) - ); + formatter.format( + new OpenSearchException( + "all shards failed", new IllegalStateException("Execution error")))); } @Test void prettyStyle() { - VisualizationResponseFormatter prettyFormatter = new VisualizationResponseFormatter( - JsonResponseFormatter.Style.PRETTY); - QueryResult response = new QueryResult( - new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("name", "name", STRING), - new ExecutionEngine.Schema.Column("age", "age", INTEGER))), - ImmutableList.of(tupleValue(ImmutableMap.of("name", "John", "age", 20)), - tupleValue(ImmutableMap.of("name", "Amy", "age", 31)), - tupleValue(ImmutableMap.of("name", "Bob", "age", 28)))); + VisualizationResponseFormatter prettyFormatter = + new VisualizationResponseFormatter(JsonResponseFormatter.Style.PRETTY); + QueryResult response = + new QueryResult( + new ExecutionEngine.Schema( + ImmutableList.of( + new ExecutionEngine.Schema.Column("name", "name", STRING), + new ExecutionEngine.Schema.Column("age", "age", INTEGER))), + ImmutableList.of( + tupleValue(ImmutableMap.of("name", "John", "age", 20)), + tupleValue(ImmutableMap.of("name", "Amy", "age", 31)), + tupleValue(ImmutableMap.of("name", "Bob", "age", 28)))); assertJsonEquals( "{\n" @@ -179,14 +183,11 @@ void prettyStyle() { + " \"size\": 3,\n" + " \"status\": 200\n" + "}", - prettyFormatter.format(response) - ); + prettyFormatter.format(response)); } private static void assertJsonEquals(String expected, String actual) { - assertEquals( - JsonParser.parseString(expected), - JsonParser.parseString(actual)); + assertEquals(JsonParser.parseString(expected), JsonParser.parseString(actual)); } @Test From bb3c340ad7ca41f7247e55c14c3a42bd251eb672 Mon Sep 17 00:00:00 2001 From: Mitchell Gale Date: Thu, 17 Aug 2023 12:33:42 -0700 Subject: [PATCH 28/42] [Spotless] Applying Google Code Format for several directories #15 (#1975) * Dev/sl google java format15 (#341) * Spotless apply on 'datasources/**/*.java', 'core/**/*.java', 'docs/**/*.java', 'doctest/**/*.java', 'relase-notes/**/*.java', 'spark/**/*.java' Signed-off-by: Mitchell Gale * Added more directories to build.gradle Signed-off-by: Mitchell Gale * spotless apply for plugin. Signed-off-by: Mitchell Gale * Adding checkstyle ignorefailures. Signed-off-by: Mitchell Gale --------- Signed-off-by: Mitchell Gale * added missing semi colon. Signed-off-by: Mitchell Gale * Update build.gradle Co-authored-by: Yury-Fridlyand Signed-off-by: Mitchell Gale * Update build.gradle Signed-off-by: Mitchell Gale * Update build.gradle Signed-off-by: Mitchell Gale --------- Signed-off-by: Mitchell Gale Signed-off-by: Mitchell Gale Co-authored-by: Yury-Fridlyand --- build.gradle | 2 + plugin/build.gradle | 3 + .../org/opensearch/sql/plugin/SQLPlugin.java | 77 ++++++------ .../plugin/config/OpenSearchPluginModule.java | 18 ++- .../request/PPLQueryRequestFactory.java | 14 ++- .../sql/plugin/rest/RestPPLQueryAction.java | 15 ++- .../sql/plugin/rest/RestPPLStatsAction.java | 23 ++-- .../plugin/rest/RestQuerySettingsAction.java | 66 +++++++---- .../transport/TransportPPLQueryAction.java | 4 +- .../TransportPPLQueryRequestTest.java | 4 +- spark/build.gradle | 3 + .../sql/spark/client/EmrClientImpl.java | 64 +++++----- .../sql/spark/client/SparkClient.java | 6 +- .../SparkSqlFunctionImplementation.java | 56 +++++---- .../SparkSqlTableFunctionResolver.java | 63 +++++----- ...DefaultSparkSqlFunctionResponseHandle.java | 21 ++-- .../SparkSqlFunctionResponseHandle.java | 16 +-- .../SparkSqlFunctionTableScanBuilder.java | 4 +- .../SparkSqlFunctionTableScanOperator.java | 28 ++--- .../sql/spark/helper/FlintHelper.java | 27 ++--- .../sql/spark/request/SparkQueryRequest.java | 9 +- .../sql/spark/response/SparkResponse.java | 18 +-- .../sql/spark/storage/SparkScan.java | 12 +- .../sql/spark/storage/SparkStorageEngine.java | 7 +- .../spark/storage/SparkStorageFactory.java | 60 +++++----- .../sql/spark/storage/SparkTable.java | 15 +-- .../sql/spark/client/EmrClientImplTest.java | 52 ++++---- .../SparkSqlFunctionImplementationTest.java | 57 ++++----- .../SparkSqlFunctionTableScanBuilderTest.java | 21 ++-- ...SparkSqlFunctionTableScanOperatorTest.java | 111 +++++++++--------- .../SparkSqlTableFunctionResolverTest.java | 107 +++++++++-------- .../sql/spark/response/SparkResponseTest.java | 45 +++---- .../sql/spark/storage/SparkScanTest.java | 7 +- .../spark/storage/SparkStorageEngineTest.java | 12 +- .../storage/SparkStorageFactoryTest.java | 62 +++++----- .../sql/spark/storage/SparkTableTest.java | 23 ++-- .../opensearch/sql/spark/utils/TestUtils.java | 2 +- 37 files changed, 557 insertions(+), 577 deletions(-) diff --git a/build.gradle b/build.gradle index 6602bf6471..2bdc4865bb 100644 --- a/build.gradle +++ b/build.gradle @@ -90,6 +90,8 @@ spotless { 'prometheus/**/*.java', 'sql/**/*.java', 'common/**/*.java', + 'spark/**/*.java', + 'plugin/**/*.java', 'ppl/**/*.java', 'integ-test/**/*java' exclude '**/build/**', '**/build-*/**' diff --git a/plugin/build.gradle b/plugin/build.gradle index 11f97ea857..8ec6844bfd 100644 --- a/plugin/build.gradle +++ b/plugin/build.gradle @@ -85,6 +85,9 @@ publishing { } } +checkstyleTest.ignoreFailures = true +checkstyleMain.ignoreFailures = true + javadoc.enabled = false loggerUsageCheck.enabled = false dependencyLicenses.enabled = false diff --git a/plugin/src/main/java/org/opensearch/sql/plugin/SQLPlugin.java b/plugin/src/main/java/org/opensearch/sql/plugin/SQLPlugin.java index 5e156c2f5d..f20de87d61 100644 --- a/plugin/src/main/java/org/opensearch/sql/plugin/SQLPlugin.java +++ b/plugin/src/main/java/org/opensearch/sql/plugin/SQLPlugin.java @@ -94,10 +94,10 @@ public class SQLPlugin extends Plugin implements ActionPlugin, ScriptPlugin { private static final Logger LOGGER = LogManager.getLogger(SQLPlugin.class); private ClusterService clusterService; - /** - * Settings should be inited when bootstrap the plugin. - */ + + /** Settings should be inited when bootstrap the plugin. */ private org.opensearch.sql.common.setting.Settings pluginSettings; + private NodeClient client; private DataSourceServiceImpl dataSourceService; private Injector injector; @@ -134,23 +134,28 @@ public List getRestHandlers( new RestDataSourceQueryAction()); } - /** - * Register action and handler so that transportClient can find proxy for action. - */ + /** Register action and handler so that transportClient can find proxy for action. */ @Override public List> getActions() { return Arrays.asList( new ActionHandler<>( new ActionType<>(PPLQueryAction.NAME, TransportPPLQueryResponse::new), TransportPPLQueryAction.class), - new ActionHandler<>(new ActionType<>(TransportCreateDataSourceAction.NAME, - CreateDataSourceActionResponse::new), TransportCreateDataSourceAction.class), - new ActionHandler<>(new ActionType<>(TransportGetDataSourceAction.NAME, - GetDataSourceActionResponse::new), TransportGetDataSourceAction.class), - new ActionHandler<>(new ActionType<>(TransportUpdateDataSourceAction.NAME, - UpdateDataSourceActionResponse::new), TransportUpdateDataSourceAction.class), - new ActionHandler<>(new ActionType<>(TransportDeleteDataSourceAction.NAME, - DeleteDataSourceActionResponse::new), TransportDeleteDataSourceAction.class)); + new ActionHandler<>( + new ActionType<>( + TransportCreateDataSourceAction.NAME, CreateDataSourceActionResponse::new), + TransportCreateDataSourceAction.class), + new ActionHandler<>( + new ActionType<>(TransportGetDataSourceAction.NAME, GetDataSourceActionResponse::new), + TransportGetDataSourceAction.class), + new ActionHandler<>( + new ActionType<>( + TransportUpdateDataSourceAction.NAME, UpdateDataSourceActionResponse::new), + TransportUpdateDataSourceAction.class), + new ActionHandler<>( + new ActionType<>( + TransportDeleteDataSourceAction.NAME, DeleteDataSourceActionResponse::new), + TransportDeleteDataSourceAction.class)); } @Override @@ -176,11 +181,12 @@ public Collection createComponents( ModulesBuilder modules = new ModulesBuilder(); modules.add(new OpenSearchPluginModule()); - modules.add(b -> { - b.bind(NodeClient.class).toInstance((NodeClient) client); - b.bind(org.opensearch.sql.common.setting.Settings.class).toInstance(pluginSettings); - b.bind(DataSourceService.class).toInstance(dataSourceService); - }); + modules.add( + b -> { + b.bind(NodeClient.class).toInstance((NodeClient) client); + b.bind(org.opensearch.sql.common.setting.Settings.class).toInstance(pluginSettings); + b.bind(DataSourceService.class).toInstance(dataSourceService); + }); injector = modules.createInjector(); return ImmutableList.of(dataSourceService); @@ -212,30 +218,31 @@ public ScriptEngine getScriptEngine(Settings settings, Collection() - .add(new OpenSearchDataSourceFactory( - new OpenSearchNodeClient(this.client), pluginSettings)) + .add( + new OpenSearchDataSourceFactory( + new OpenSearchNodeClient(this.client), pluginSettings)) .add(new PrometheusStorageFactory(pluginSettings)) .add(new SparkStorageFactory(this.client, pluginSettings)) .build(), dataSourceMetadataStorage, dataSourceUserAuthorizationHelper); } - } diff --git a/plugin/src/main/java/org/opensearch/sql/plugin/config/OpenSearchPluginModule.java b/plugin/src/main/java/org/opensearch/sql/plugin/config/OpenSearchPluginModule.java index f301a242fb..33a785c498 100644 --- a/plugin/src/main/java/org/opensearch/sql/plugin/config/OpenSearchPluginModule.java +++ b/plugin/src/main/java/org/opensearch/sql/plugin/config/OpenSearchPluginModule.java @@ -45,8 +45,7 @@ public class OpenSearchPluginModule extends AbstractModule { BuiltinFunctionRepository.getInstance(); @Override - protected void configure() { - } + protected void configure() {} @Provides public OpenSearchClient openSearchClient(NodeClient nodeClient) { @@ -59,8 +58,8 @@ public StorageEngine storageEngine(OpenSearchClient client, Settings settings) { } @Provides - public ExecutionEngine executionEngine(OpenSearchClient client, ExecutionProtector protector, - PlanSerializer planSerializer) { + public ExecutionEngine executionEngine( + OpenSearchClient client, ExecutionProtector protector, PlanSerializer planSerializer) { return new OpenSearchExecutionEngine(client, protector, planSerializer); } @@ -95,18 +94,15 @@ public SQLService sqlService(QueryManager queryManager, QueryPlanFactory queryPl return new SQLService(new SQLSyntaxParser(), queryManager, queryPlanFactory); } - /** - * {@link QueryPlanFactory}. - */ + /** {@link QueryPlanFactory}. */ @Provides - public QueryPlanFactory queryPlanFactory(DataSourceService dataSourceService, - ExecutionEngine executionEngine) { + public QueryPlanFactory queryPlanFactory( + DataSourceService dataSourceService, ExecutionEngine executionEngine) { Analyzer analyzer = new Analyzer( new ExpressionAnalyzer(functionRepository), dataSourceService, functionRepository); Planner planner = new Planner(LogicalPlanOptimizer.create()); - QueryService queryService = new QueryService( - analyzer, executionEngine, planner); + QueryService queryService = new QueryService(analyzer, executionEngine, planner); return new QueryPlanFactory(queryService); } } diff --git a/plugin/src/main/java/org/opensearch/sql/plugin/request/PPLQueryRequestFactory.java b/plugin/src/main/java/org/opensearch/sql/plugin/request/PPLQueryRequestFactory.java index 730da0e923..ad734bf150 100644 --- a/plugin/src/main/java/org/opensearch/sql/plugin/request/PPLQueryRequestFactory.java +++ b/plugin/src/main/java/org/opensearch/sql/plugin/request/PPLQueryRequestFactory.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.plugin.request; import java.util.Map; @@ -15,9 +14,7 @@ import org.opensearch.sql.protocol.response.format.Format; import org.opensearch.sql.protocol.response.format.JsonResponseFormatter; -/** - * Factory of {@link PPLQueryRequest}. - */ +/** Factory of {@link PPLQueryRequest}. */ public class PPLQueryRequestFactory { private static final String PPL_URL_PARAM_KEY = "ppl"; private static final String PPL_FIELD_NAME = "query"; @@ -28,6 +25,7 @@ public class PPLQueryRequestFactory { /** * Build {@link PPLQueryRequest} from {@link RestRequest}. + * * @param request {@link PPLQueryRequest} * @return {@link RestRequest} */ @@ -63,8 +61,12 @@ private static PPLQueryRequest parsePPLRequestFromPayload(RestRequest restReques } catch (JSONException e) { throw new IllegalArgumentException("Failed to parse request payload", e); } - PPLQueryRequest pplRequest = new PPLQueryRequest(jsonContent.getString(PPL_FIELD_NAME), - jsonContent, restRequest.path(), format.getFormatName()); + PPLQueryRequest pplRequest = + new PPLQueryRequest( + jsonContent.getString(PPL_FIELD_NAME), + jsonContent, + restRequest.path(), + format.getFormatName()); // set sanitize option if csv format if (format.equals(Format.CSV)) { pplRequest.sanitize(getSanitizeOption(restRequest.params())); diff --git a/plugin/src/main/java/org/opensearch/sql/plugin/rest/RestPPLQueryAction.java b/plugin/src/main/java/org/opensearch/sql/plugin/rest/RestPPLQueryAction.java index 55f8dfdfef..996ae8c700 100644 --- a/plugin/src/main/java/org/opensearch/sql/plugin/rest/RestPPLQueryAction.java +++ b/plugin/src/main/java/org/opensearch/sql/plugin/rest/RestPPLQueryAction.java @@ -102,14 +102,17 @@ protected Set responseParams() { protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient nodeClient) { // TODO: need move to transport Action if (!pplEnabled.get()) { - return channel -> reportError(channel, new IllegalAccessException( - "Either plugins.ppl.enabled or rest.action.multi.allow_explicit_index setting is false"), - BAD_REQUEST); + return channel -> + reportError( + channel, + new IllegalAccessException( + "Either plugins.ppl.enabled or rest.action.multi.allow_explicit_index setting is" + + " false"), + BAD_REQUEST); } - TransportPPLQueryRequest transportPPLQueryRequest = new TransportPPLQueryRequest( - PPLQueryRequestFactory.getPPLRequest(request) - ); + TransportPPLQueryRequest transportPPLQueryRequest = + new TransportPPLQueryRequest(PPLQueryRequestFactory.getPPLRequest(request)); return channel -> nodeClient.execute( diff --git a/plugin/src/main/java/org/opensearch/sql/plugin/rest/RestPPLStatsAction.java b/plugin/src/main/java/org/opensearch/sql/plugin/rest/RestPPLStatsAction.java index ef9f68a2a7..7a51fc282b 100644 --- a/plugin/src/main/java/org/opensearch/sql/plugin/rest/RestPPLStatsAction.java +++ b/plugin/src/main/java/org/opensearch/sql/plugin/rest/RestPPLStatsAction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.plugin.rest; import static org.opensearch.core.rest.RestStatus.SERVICE_UNAVAILABLE; @@ -26,17 +25,14 @@ import org.opensearch.sql.legacy.executor.format.ErrorMessageFactory; import org.opensearch.sql.legacy.metrics.Metrics; -/** - * PPL Node level status. - */ +/** PPL Node level status. */ public class RestPPLStatsAction extends BaseRestHandler { private static final Logger LOG = LogManager.getLogger(RestPPLStatsAction.class); - /** - * API endpoint path. - */ + /** API endpoint path. */ public static final String PPL_STATS_API_ENDPOINT = "/_plugins/_ppl/stats"; + public static final String PPL_LEGACY_STATS_API_ENDPOINT = "/_opendistro/_ppl/stats"; public RestPPLStatsAction(Settings settings, RestController restController) { @@ -70,13 +66,18 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli QueryContext.addRequestId(); try { - return channel -> channel.sendResponse(new BytesRestResponse(RestStatus.OK, - Metrics.getInstance().collectToJSON())); + return channel -> + channel.sendResponse( + new BytesRestResponse(RestStatus.OK, Metrics.getInstance().collectToJSON())); } catch (Exception e) { LOG.error("Failed during Query PPL STATS Action.", e); - return channel -> channel.sendResponse(new BytesRestResponse(SERVICE_UNAVAILABLE, - ErrorMessageFactory.createErrorMessage(e, SERVICE_UNAVAILABLE.getStatus()).toString())); + return channel -> + channel.sendResponse( + new BytesRestResponse( + SERVICE_UNAVAILABLE, + ErrorMessageFactory.createErrorMessage(e, SERVICE_UNAVAILABLE.getStatus()) + .toString())); } } diff --git a/plugin/src/main/java/org/opensearch/sql/plugin/rest/RestQuerySettingsAction.java b/plugin/src/main/java/org/opensearch/sql/plugin/rest/RestQuerySettingsAction.java index b15b4dddd6..885c953c17 100644 --- a/plugin/src/main/java/org/opensearch/sql/plugin/rest/RestQuerySettingsAction.java +++ b/plugin/src/main/java/org/opensearch/sql/plugin/rest/RestQuerySettingsAction.java @@ -39,9 +39,14 @@ public class RestQuerySettingsAction extends BaseRestHandler { private static final String LEGACY_SQL_SETTINGS_PREFIX = "opendistro.sql."; private static final String LEGACY_PPL_SETTINGS_PREFIX = "opendistro.ppl."; private static final String LEGACY_COMMON_SETTINGS_PREFIX = "opendistro.query."; - private static final List SETTINGS_PREFIX = ImmutableList.of( - SQL_SETTINGS_PREFIX, PPL_SETTINGS_PREFIX, COMMON_SETTINGS_PREFIX, - LEGACY_SQL_SETTINGS_PREFIX, LEGACY_PPL_SETTINGS_PREFIX, LEGACY_COMMON_SETTINGS_PREFIX); + private static final List SETTINGS_PREFIX = + ImmutableList.of( + SQL_SETTINGS_PREFIX, + PPL_SETTINGS_PREFIX, + COMMON_SETTINGS_PREFIX, + LEGACY_SQL_SETTINGS_PREFIX, + LEGACY_PPL_SETTINGS_PREFIX, + LEGACY_COMMON_SETTINGS_PREFIX); public static final String SETTINGS_API_ENDPOINT = "/_plugins/_query/settings"; public static final String LEGACY_SQL_SETTINGS_API_ENDPOINT = "/_opendistro/_sql/settings"; @@ -75,10 +80,11 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli QueryContext.addRequestId(); final ClusterUpdateSettingsRequest clusterUpdateSettingsRequest = Requests.clusterUpdateSettingsRequest(); - clusterUpdateSettingsRequest.timeout(request.paramAsTime( - "timeout", clusterUpdateSettingsRequest.timeout())); - clusterUpdateSettingsRequest.clusterManagerNodeTimeout(request.paramAsTime( - "cluster_manager_timeout", clusterUpdateSettingsRequest.clusterManagerNodeTimeout())); + clusterUpdateSettingsRequest.timeout( + request.paramAsTime("timeout", clusterUpdateSettingsRequest.timeout())); + clusterUpdateSettingsRequest.clusterManagerNodeTimeout( + request.paramAsTime( + "cluster_manager_timeout", clusterUpdateSettingsRequest.clusterManagerNodeTimeout())); Map source; try (XContentParser parser = request.contentParser()) { source = parser.map(); @@ -86,20 +92,27 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli try { if (source.containsKey(TRANSIENT)) { - clusterUpdateSettingsRequest.transientSettings(getAndFilterSettings( - (Map) source.get(TRANSIENT))); + clusterUpdateSettingsRequest.transientSettings( + getAndFilterSettings((Map) source.get(TRANSIENT))); } if (source.containsKey(PERSISTENT)) { - clusterUpdateSettingsRequest.persistentSettings(getAndFilterSettings( - (Map) source.get(PERSISTENT))); + clusterUpdateSettingsRequest.persistentSettings( + getAndFilterSettings((Map) source.get(PERSISTENT))); } - return channel -> client.admin().cluster().updateSettings( - clusterUpdateSettingsRequest, new RestToXContentListener<>(channel)); + return channel -> + client + .admin() + .cluster() + .updateSettings(clusterUpdateSettingsRequest, new RestToXContentListener<>(channel)); } catch (Exception e) { LOG.error("Error changing OpenSearch SQL plugin cluster settings", e); - return channel -> channel.sendResponse(new BytesRestResponse(INTERNAL_SERVER_ERROR, - ErrorMessageFactory.createErrorMessage(e, INTERNAL_SERVER_ERROR.getStatus()).toString())); + return channel -> + channel.sendResponse( + new BytesRestResponse( + INTERNAL_SERVER_ERROR, + ErrorMessageFactory.createErrorMessage(e, INTERNAL_SERVER_ERROR.getStatus()) + .toString())); } } @@ -107,16 +120,19 @@ private Settings getAndFilterSettings(Map source) { try { XContentBuilder builder = XContentFactory.jsonBuilder(); builder.map(source); - Settings.Builder settingsBuilder = Settings.builder() - .loadFromSource(builder.toString(), builder.contentType()); - settingsBuilder.keys().removeIf(key -> { - for (String prefix : SETTINGS_PREFIX) { - if (key.startsWith(prefix)) { - return false; - } - } - return true; - }); + Settings.Builder settingsBuilder = + Settings.builder().loadFromSource(builder.toString(), builder.contentType()); + settingsBuilder + .keys() + .removeIf( + key -> { + for (String prefix : SETTINGS_PREFIX) { + if (key.startsWith(prefix)) { + return false; + } + } + return true; + }); return settingsBuilder.build(); } catch (IOException e) { throw new OpenSearchGenerationException("Failed to generate [" + source + "]", e); diff --git a/plugin/src/main/java/org/opensearch/sql/plugin/transport/TransportPPLQueryAction.java b/plugin/src/main/java/org/opensearch/sql/plugin/transport/TransportPPLQueryAction.java index 8a9d276673..fde9e24f75 100644 --- a/plugin/src/main/java/org/opensearch/sql/plugin/transport/TransportPPLQueryAction.java +++ b/plugin/src/main/java/org/opensearch/sql/plugin/transport/TransportPPLQueryAction.java @@ -139,8 +139,8 @@ private ResponseListener createListener( @Override public void onResponse(ExecutionEngine.QueryResponse response) { String responseContent = - formatter.format(new QueryResult(response.getSchema(), response.getResults(), - response.getCursor())); + formatter.format( + new QueryResult(response.getSchema(), response.getResults(), response.getCursor())); listener.onResponse(new TransportPPLQueryResponse(responseContent)); } diff --git a/plugin/src/test/java/org/opensearch/sql/plugin/transport/TransportPPLQueryRequestTest.java b/plugin/src/test/java/org/opensearch/sql/plugin/transport/TransportPPLQueryRequestTest.java index 0e5d99ae35..286ac20fed 100644 --- a/plugin/src/test/java/org/opensearch/sql/plugin/transport/TransportPPLQueryRequestTest.java +++ b/plugin/src/test/java/org/opensearch/sql/plugin/transport/TransportPPLQueryRequestTest.java @@ -59,9 +59,7 @@ public void writeTo(StreamOutput out) throws IOException { @Test public void testCustomizedNullJSONContentActionRequestFromActionRequest() { - TransportPPLQueryRequest request = new TransportPPLQueryRequest( - "source=t a=1", null, null - ); + TransportPPLQueryRequest request = new TransportPPLQueryRequest("source=t a=1", null, null); ActionRequest actionRequest = new ActionRequest() { @Override diff --git a/spark/build.gradle b/spark/build.gradle index 89842e5ea8..2608b88ced 100644 --- a/spark/build.gradle +++ b/spark/build.gradle @@ -13,6 +13,9 @@ repositories { mavenCentral() } +checkstyleTest.ignoreFailures = true +checkstyleMain.ignoreFailures = true + dependencies { api project(':core') implementation project(':datasources') diff --git a/spark/src/main/java/org/opensearch/sql/spark/client/EmrClientImpl.java b/spark/src/main/java/org/opensearch/sql/spark/client/EmrClientImpl.java index 1e2475c196..1a3304994b 100644 --- a/spark/src/main/java/org/opensearch/sql/spark/client/EmrClientImpl.java +++ b/spark/src/main/java/org/opensearch/sql/spark/client/EmrClientImpl.java @@ -36,12 +36,16 @@ public class EmrClientImpl implements SparkClient { /** * Constructor for EMR Client Implementation. * - * @param emr EMR helper - * @param flint Opensearch args for flint integration jar + * @param emr EMR helper + * @param flint Opensearch args for flint integration jar * @param sparkResponse Response object to help with retrieving results from Opensearch index */ - public EmrClientImpl(AmazonElasticMapReduce emr, String emrCluster, FlintHelper flint, - SparkResponse sparkResponse, String sparkApplicationJar) { + public EmrClientImpl( + AmazonElasticMapReduce emr, + String emrCluster, + FlintHelper flint, + SparkResponse sparkResponse, + String sparkApplicationJar) { this.emr = emr; this.emrCluster = emrCluster; this.flint = flint; @@ -59,38 +63,39 @@ public JSONObject sql(String query) throws IOException { @VisibleForTesting void runEmrApplication(String query) { - HadoopJarStepConfig stepConfig = new HadoopJarStepConfig() - .withJar("command-runner.jar") - .withArgs("spark-submit", - "--class","org.opensearch.sql.SQLJob", - "--jars", - flint.getFlintIntegrationJar(), - sparkApplicationJar, - query, - SPARK_INDEX_NAME, - flint.getFlintHost(), - flint.getFlintPort(), - flint.getFlintScheme(), - flint.getFlintAuth(), - flint.getFlintRegion() - ); + HadoopJarStepConfig stepConfig = + new HadoopJarStepConfig() + .withJar("command-runner.jar") + .withArgs( + "spark-submit", + "--class", + "org.opensearch.sql.SQLJob", + "--jars", + flint.getFlintIntegrationJar(), + sparkApplicationJar, + query, + SPARK_INDEX_NAME, + flint.getFlintHost(), + flint.getFlintPort(), + flint.getFlintScheme(), + flint.getFlintAuth(), + flint.getFlintRegion()); - StepConfig emrstep = new StepConfig() - .withName("Spark Application") - .withActionOnFailure(ActionOnFailure.CONTINUE) - .withHadoopJarStep(stepConfig); + StepConfig emrstep = + new StepConfig() + .withName("Spark Application") + .withActionOnFailure(ActionOnFailure.CONTINUE) + .withHadoopJarStep(stepConfig); - AddJobFlowStepsRequest request = new AddJobFlowStepsRequest() - .withJobFlowId(emrCluster) - .withSteps(emrstep); + AddJobFlowStepsRequest request = + new AddJobFlowStepsRequest().withJobFlowId(emrCluster).withSteps(emrstep); AddJobFlowStepsResult result = emr.addJobFlowSteps(request); logger.info("EMR step ID: " + result.getStepIds()); String stepId = result.getStepIds().get(0); - DescribeStepRequest stepRequest = new DescribeStepRequest() - .withClusterId(emrCluster) - .withStepId(stepId); + DescribeStepRequest stepRequest = + new DescribeStepRequest().withClusterId(emrCluster).withStepId(stepId); waitForStepExecution(stepRequest); sparkResponse.setValue(stepId); @@ -117,5 +122,4 @@ private void waitForStepExecution(DescribeStepRequest stepRequest) { } } } - } diff --git a/spark/src/main/java/org/opensearch/sql/spark/client/SparkClient.java b/spark/src/main/java/org/opensearch/sql/spark/client/SparkClient.java index 99d8600dd0..b38f04680b 100644 --- a/spark/src/main/java/org/opensearch/sql/spark/client/SparkClient.java +++ b/spark/src/main/java/org/opensearch/sql/spark/client/SparkClient.java @@ -8,15 +8,13 @@ import java.io.IOException; import org.json.JSONObject; -/** - * Interface class for Spark Client. - */ +/** Interface class for Spark Client. */ public interface SparkClient { /** * This method executes spark sql query. * * @param query spark sql query - * @return spark query response + * @return spark query response */ JSONObject sql(String query) throws IOException; } diff --git a/spark/src/main/java/org/opensearch/sql/spark/functions/implementation/SparkSqlFunctionImplementation.java b/spark/src/main/java/org/opensearch/sql/spark/functions/implementation/SparkSqlFunctionImplementation.java index 1936c266de..914aa80085 100644 --- a/spark/src/main/java/org/opensearch/sql/spark/functions/implementation/SparkSqlFunctionImplementation.java +++ b/spark/src/main/java/org/opensearch/sql/spark/functions/implementation/SparkSqlFunctionImplementation.java @@ -24,9 +24,7 @@ import org.opensearch.sql.spark.storage.SparkTable; import org.opensearch.sql.storage.Table; -/** - * Spark SQL function implementation. - */ +/** Spark SQL function implementation. */ public class SparkSqlFunctionImplementation extends FunctionExpression implements TableFunctionImplementation { @@ -38,8 +36,8 @@ public class SparkSqlFunctionImplementation extends FunctionExpression * Constructor for spark sql function. * * @param functionName name of the function - * @param arguments a list of expressions - * @param sparkClient spark client + * @param arguments a list of expressions + * @param sparkClient spark client */ public SparkSqlFunctionImplementation( FunctionName functionName, List arguments, SparkClient sparkClient) { @@ -51,9 +49,11 @@ public SparkSqlFunctionImplementation( @Override public ExprValue valueOf(Environment valueEnv) { - throw new UnsupportedOperationException(String.format( - "Spark defined function [%s] is only " - + "supported in SOURCE clause with spark connector catalog", functionName)); + throw new UnsupportedOperationException( + String.format( + "Spark defined function [%s] is only " + + "supported in SOURCE clause with spark connector catalog", + functionName)); } @Override @@ -63,11 +63,15 @@ public ExprType type() { @Override public String toString() { - List args = arguments.stream() - .map(arg -> String.format("%s=%s", - ((NamedArgumentExpression) arg).getArgName(), - ((NamedArgumentExpression) arg).getValue().toString())) - .collect(Collectors.toList()); + List args = + arguments.stream() + .map( + arg -> + String.format( + "%s=%s", + ((NamedArgumentExpression) arg).getArgName(), + ((NamedArgumentExpression) arg).getValue().toString())) + .collect(Collectors.toList()); return String.format("%s(%s)", functionName, String.join(", ", args)); } @@ -80,23 +84,23 @@ public Table applyArguments() { * This method builds a spark query request. * * @param arguments spark sql function arguments - * @return spark query request + * @return spark query request */ private SparkQueryRequest buildQueryFromSqlFunction(List arguments) { SparkQueryRequest sparkQueryRequest = new SparkQueryRequest(); - arguments.forEach(arg -> { - String argName = ((NamedArgumentExpression) arg).getArgName(); - Expression argValue = ((NamedArgumentExpression) arg).getValue(); - ExprValue literalValue = argValue.valueOf(); - if (argName.equals(QUERY)) { - sparkQueryRequest.setSql((String) literalValue.value()); - } else { - throw new ExpressionEvaluationException( - String.format("Invalid Function Argument:%s", argName)); - } - }); + arguments.forEach( + arg -> { + String argName = ((NamedArgumentExpression) arg).getArgName(); + Expression argValue = ((NamedArgumentExpression) arg).getValue(); + ExprValue literalValue = argValue.valueOf(); + if (argName.equals(QUERY)) { + sparkQueryRequest.setSql((String) literalValue.value()); + } else { + throw new ExpressionEvaluationException( + String.format("Invalid Function Argument:%s", argName)); + } + }); return sparkQueryRequest; } - } diff --git a/spark/src/main/java/org/opensearch/sql/spark/functions/resolver/SparkSqlTableFunctionResolver.java b/spark/src/main/java/org/opensearch/sql/spark/functions/resolver/SparkSqlTableFunctionResolver.java index 624600e1a8..a4f2a6c0fe 100644 --- a/spark/src/main/java/org/opensearch/sql/spark/functions/resolver/SparkSqlTableFunctionResolver.java +++ b/spark/src/main/java/org/opensearch/sql/spark/functions/resolver/SparkSqlTableFunctionResolver.java @@ -22,9 +22,7 @@ import org.opensearch.sql.spark.client.SparkClient; import org.opensearch.sql.spark.functions.implementation.SparkSqlFunctionImplementation; -/** - * Function resolver for sql function of spark connector. - */ +/** Function resolver for sql function of spark connector. */ @RequiredArgsConstructor public class SparkSqlTableFunctionResolver implements FunctionResolver { private final SparkClient sparkClient; @@ -35,35 +33,44 @@ public class SparkSqlTableFunctionResolver implements FunctionResolver { @Override public Pair resolve(FunctionSignature unresolvedSignature) { FunctionName functionName = FunctionName.of(SQL); - FunctionSignature functionSignature = - new FunctionSignature(functionName, List.of(STRING)); + FunctionSignature functionSignature = new FunctionSignature(functionName, List.of(STRING)); final List argumentNames = List.of(QUERY); - FunctionBuilder functionBuilder = (functionProperties, arguments) -> { - Boolean argumentsPassedByName = arguments.stream() - .noneMatch(arg -> StringUtils.isEmpty(((NamedArgumentExpression) arg).getArgName())); - Boolean argumentsPassedByPosition = arguments.stream() - .allMatch(arg -> StringUtils.isEmpty(((NamedArgumentExpression) arg).getArgName())); - if (!(argumentsPassedByName || argumentsPassedByPosition)) { - throw new SemanticCheckException("Arguments should be either passed by name or position"); - } + FunctionBuilder functionBuilder = + (functionProperties, arguments) -> { + Boolean argumentsPassedByName = + arguments.stream() + .noneMatch( + arg -> StringUtils.isEmpty(((NamedArgumentExpression) arg).getArgName())); + Boolean argumentsPassedByPosition = + arguments.stream() + .allMatch( + arg -> StringUtils.isEmpty(((NamedArgumentExpression) arg).getArgName())); + if (!(argumentsPassedByName || argumentsPassedByPosition)) { + throw new SemanticCheckException( + "Arguments should be either passed by name or position"); + } - if (arguments.size() != argumentNames.size()) { - throw new SemanticCheckException( - String.format("Missing arguments:[%s]", - String.join(",", argumentNames.subList(arguments.size(), argumentNames.size())))); - } + if (arguments.size() != argumentNames.size()) { + throw new SemanticCheckException( + String.format( + "Missing arguments:[%s]", + String.join( + ",", argumentNames.subList(arguments.size(), argumentNames.size())))); + } - if (argumentsPassedByPosition) { - List namedArguments = new ArrayList<>(); - for (int i = 0; i < arguments.size(); i++) { - namedArguments.add(new NamedArgumentExpression(argumentNames.get(i), - ((NamedArgumentExpression) arguments.get(i)).getValue())); - } - return new SparkSqlFunctionImplementation(functionName, namedArguments, sparkClient); - } - return new SparkSqlFunctionImplementation(functionName, arguments, sparkClient); - }; + if (argumentsPassedByPosition) { + List namedArguments = new ArrayList<>(); + for (int i = 0; i < arguments.size(); i++) { + namedArguments.add( + new NamedArgumentExpression( + argumentNames.get(i), + ((NamedArgumentExpression) arguments.get(i)).getValue())); + } + return new SparkSqlFunctionImplementation(functionName, namedArguments, sparkClient); + } + return new SparkSqlFunctionImplementation(functionName, arguments, sparkClient); + }; return Pair.of(functionSignature, functionBuilder); } diff --git a/spark/src/main/java/org/opensearch/sql/spark/functions/response/DefaultSparkSqlFunctionResponseHandle.java b/spark/src/main/java/org/opensearch/sql/spark/functions/response/DefaultSparkSqlFunctionResponseHandle.java index cb2b31ddc1..823ad2da29 100644 --- a/spark/src/main/java/org/opensearch/sql/spark/functions/response/DefaultSparkSqlFunctionResponseHandle.java +++ b/spark/src/main/java/org/opensearch/sql/spark/functions/response/DefaultSparkSqlFunctionResponseHandle.java @@ -29,9 +29,7 @@ import org.opensearch.sql.data.type.ExprType; import org.opensearch.sql.executor.ExecutionEngine; -/** - * Default implementation of SparkSqlFunctionResponseHandle. - */ +/** Default implementation of SparkSqlFunctionResponseHandle. */ public class DefaultSparkSqlFunctionResponseHandle implements SparkSqlFunctionResponseHandle { private Iterator responseIterator; private ExecutionEngine.Schema schema; @@ -54,8 +52,8 @@ private void constructIteratorAndSchema(JSONObject responseObject) { logger.info("Spark Application ID: " + items.getString("applicationId")); columnList = getColumnList(items.getJSONArray("schema")); for (int i = 0; i < items.getJSONArray("result").length(); i++) { - JSONObject row = new JSONObject( - items.getJSONArray("result").get(i).toString().replace("'", "\"")); + JSONObject row = + new JSONObject(items.getJSONArray("result").get(i).toString().replace("'", "\"")); LinkedHashMap linkedHashMap = extractRow(row, columnList); result.add(new ExprTupleValue(linkedHashMap)); } @@ -85,8 +83,8 @@ private static LinkedHashMap extractRow( } else if (type == ExprCoreType.DATE) { linkedHashMap.put(column.getName(), new ExprDateValue(row.getString(column.getName()))); } else if (type == ExprCoreType.TIMESTAMP) { - linkedHashMap.put(column.getName(), - new ExprTimestampValue(row.getString(column.getName()))); + linkedHashMap.put( + column.getName(), new ExprTimestampValue(row.getString(column.getName()))); } else if (type == ExprCoreType.STRING) { linkedHashMap.put(column.getName(), new ExprStringValue(row.getString(column.getName()))); } else { @@ -101,10 +99,11 @@ private List getColumnList(JSONArray schema) { List columnList = new ArrayList<>(); for (int i = 0; i < schema.length(); i++) { JSONObject column = new JSONObject(schema.get(i).toString().replace("'", "\"")); - columnList.add(new ExecutionEngine.Schema.Column( - column.get("column_name").toString(), - column.get("column_name").toString(), - getDataType(column.get("data_type").toString()))); + columnList.add( + new ExecutionEngine.Schema.Column( + column.get("column_name").toString(), + column.get("column_name").toString(), + getDataType(column.get("data_type").toString()))); } return columnList; } diff --git a/spark/src/main/java/org/opensearch/sql/spark/functions/response/SparkSqlFunctionResponseHandle.java b/spark/src/main/java/org/opensearch/sql/spark/functions/response/SparkSqlFunctionResponseHandle.java index da68b591eb..a9be484712 100644 --- a/spark/src/main/java/org/opensearch/sql/spark/functions/response/SparkSqlFunctionResponseHandle.java +++ b/spark/src/main/java/org/opensearch/sql/spark/functions/response/SparkSqlFunctionResponseHandle.java @@ -8,24 +8,18 @@ import org.opensearch.sql.data.model.ExprValue; import org.opensearch.sql.executor.ExecutionEngine; -/** - * Handle Spark response. - */ +/** Handle Spark response. */ public interface SparkSqlFunctionResponseHandle { - /** - * Return true if Spark response has more result. - */ + /** Return true if Spark response has more result. */ boolean hasNext(); /** - * Return Spark response as {@link ExprValue}. Attention, the method must been called when - * hasNext return true. + * Return Spark response as {@link ExprValue}. Attention, the method must been called when hasNext + * return true. */ ExprValue next(); - /** - * Return ExecutionEngine.Schema of the Spark response. - */ + /** Return ExecutionEngine.Schema of the Spark response. */ ExecutionEngine.Schema schema(); } diff --git a/spark/src/main/java/org/opensearch/sql/spark/functions/scan/SparkSqlFunctionTableScanBuilder.java b/spark/src/main/java/org/opensearch/sql/spark/functions/scan/SparkSqlFunctionTableScanBuilder.java index 28ce7dd19a..aea8f72f36 100644 --- a/spark/src/main/java/org/opensearch/sql/spark/functions/scan/SparkSqlFunctionTableScanBuilder.java +++ b/spark/src/main/java/org/opensearch/sql/spark/functions/scan/SparkSqlFunctionTableScanBuilder.java @@ -12,9 +12,7 @@ import org.opensearch.sql.storage.TableScanOperator; import org.opensearch.sql.storage.read.TableScanBuilder; -/** - * TableScanBuilder for sql function of spark connector. - */ +/** TableScanBuilder for sql function of spark connector. */ @AllArgsConstructor public class SparkSqlFunctionTableScanBuilder extends TableScanBuilder { diff --git a/spark/src/main/java/org/opensearch/sql/spark/functions/scan/SparkSqlFunctionTableScanOperator.java b/spark/src/main/java/org/opensearch/sql/spark/functions/scan/SparkSqlFunctionTableScanOperator.java index 85e854e422..a2e44affd5 100644 --- a/spark/src/main/java/org/opensearch/sql/spark/functions/scan/SparkSqlFunctionTableScanOperator.java +++ b/spark/src/main/java/org/opensearch/sql/spark/functions/scan/SparkSqlFunctionTableScanOperator.java @@ -21,9 +21,7 @@ import org.opensearch.sql.spark.request.SparkQueryRequest; import org.opensearch.sql.storage.TableScanOperator; -/** - * This a table scan operator to handle sql table function. - */ +/** This a table scan operator to handle sql table function. */ @RequiredArgsConstructor public class SparkSqlFunctionTableScanOperator extends TableScanOperator { private final SparkClient sparkClient; @@ -34,17 +32,19 @@ public class SparkSqlFunctionTableScanOperator extends TableScanOperator { @Override public void open() { super.open(); - this.sparkResponseHandle = AccessController.doPrivileged( - (PrivilegedAction) () -> { - try { - JSONObject responseObject = sparkClient.sql(request.getSql()); - return new DefaultSparkSqlFunctionResponseHandle(responseObject); - } catch (IOException e) { - LOG.error(e.getMessage()); - throw new RuntimeException( - String.format("Error fetching data from spark server: %s", e.getMessage())); - } - }); + this.sparkResponseHandle = + AccessController.doPrivileged( + (PrivilegedAction) + () -> { + try { + JSONObject responseObject = sparkClient.sql(request.getSql()); + return new DefaultSparkSqlFunctionResponseHandle(responseObject); + } catch (IOException e) { + LOG.error(e.getMessage()); + throw new RuntimeException( + String.format("Error fetching data from spark server: %s", e.getMessage())); + } + }); } @Override diff --git a/spark/src/main/java/org/opensearch/sql/spark/helper/FlintHelper.java b/spark/src/main/java/org/opensearch/sql/spark/helper/FlintHelper.java index b3c3c0871a..10d880187f 100644 --- a/spark/src/main/java/org/opensearch/sql/spark/helper/FlintHelper.java +++ b/spark/src/main/java/org/opensearch/sql/spark/helper/FlintHelper.java @@ -15,25 +15,20 @@ import lombok.Getter; public class FlintHelper { - @Getter - private final String flintIntegrationJar; - @Getter - private final String flintHost; - @Getter - private final String flintPort; - @Getter - private final String flintScheme; - @Getter - private final String flintAuth; - @Getter - private final String flintRegion; + @Getter private final String flintIntegrationJar; + @Getter private final String flintHost; + @Getter private final String flintPort; + @Getter private final String flintScheme; + @Getter private final String flintAuth; + @Getter private final String flintRegion; - /** Arguments required to write data to opensearch index using flint integration. + /** + * Arguments required to write data to opensearch index using flint integration. * - * @param flintHost Opensearch host for flint - * @param flintPort Opensearch port for flint integration + * @param flintHost Opensearch host for flint + * @param flintPort Opensearch port for flint integration * @param flintScheme Opensearch scheme for flint integration - * @param flintAuth Opensearch auth for flint integration + * @param flintAuth Opensearch auth for flint integration * @param flintRegion Opensearch region for flint integration */ public FlintHelper( diff --git a/spark/src/main/java/org/opensearch/sql/spark/request/SparkQueryRequest.java b/spark/src/main/java/org/opensearch/sql/spark/request/SparkQueryRequest.java index bc0944a784..94c9795161 100644 --- a/spark/src/main/java/org/opensearch/sql/spark/request/SparkQueryRequest.java +++ b/spark/src/main/java/org/opensearch/sql/spark/request/SparkQueryRequest.java @@ -7,15 +7,10 @@ import lombok.Data; -/** - * Spark query request. - */ +/** Spark query request. */ @Data public class SparkQueryRequest { - /** - * SQL. - */ + /** SQL. */ private String sql; - } diff --git a/spark/src/main/java/org/opensearch/sql/spark/response/SparkResponse.java b/spark/src/main/java/org/opensearch/sql/spark/response/SparkResponse.java index f30072eb3f..3edb541384 100644 --- a/spark/src/main/java/org/opensearch/sql/spark/response/SparkResponse.java +++ b/spark/src/main/java/org/opensearch/sql/spark/response/SparkResponse.java @@ -36,8 +36,8 @@ public class SparkResponse { * Response for spark sql query. * * @param client Opensearch client - * @param value Identifier field value - * @param field Identifier field name + * @param value Identifier field value + * @param field Identifier field name */ public SparkResponse(Client client, String value, String field) { this.client = client; @@ -64,8 +64,10 @@ private JSONObject searchInSparkIndex(QueryBuilder query) { SearchResponse searchResponse = searchResponseActionFuture.actionGet(); if (searchResponse.status().getStatus() != 200) { throw new RuntimeException( - "Fetching result from " + SPARK_INDEX_NAME + " index failed with status : " - + searchResponse.status()); + "Fetching result from " + + SPARK_INDEX_NAME + + " index failed with status : " + + searchResponse.status()); } else { JSONObject data = new JSONObject(); for (SearchHit searchHit : searchResponse.getHits().getHits()) { @@ -90,11 +92,11 @@ void deleteInSparkIndex(String id) { if (deleteResponse.getResult().equals(DocWriteResponse.Result.DELETED)) { LOG.debug("Spark result successfully deleted ", id); } else if (deleteResponse.getResult().equals(DocWriteResponse.Result.NOT_FOUND)) { - throw new ResourceNotFoundException("Spark result with id " - + id + " doesn't exist"); + throw new ResourceNotFoundException("Spark result with id " + id + " doesn't exist"); } else { - throw new RuntimeException("Deleting spark result information failed with : " - + deleteResponse.getResult().getLowercase()); + throw new RuntimeException( + "Deleting spark result information failed with : " + + deleteResponse.getResult().getLowercase()); } } } diff --git a/spark/src/main/java/org/opensearch/sql/spark/storage/SparkScan.java b/spark/src/main/java/org/opensearch/sql/spark/storage/SparkScan.java index 3897e8690e..395e1685a6 100644 --- a/spark/src/main/java/org/opensearch/sql/spark/storage/SparkScan.java +++ b/spark/src/main/java/org/opensearch/sql/spark/storage/SparkScan.java @@ -14,21 +14,14 @@ import org.opensearch.sql.spark.request.SparkQueryRequest; import org.opensearch.sql.storage.TableScanOperator; -/** - * Spark scan operator. - */ +/** Spark scan operator. */ @EqualsAndHashCode(onlyExplicitlyIncluded = true, callSuper = false) @ToString(onlyExplicitlyIncluded = true) public class SparkScan extends TableScanOperator { private final SparkClient sparkClient; - @EqualsAndHashCode.Include - @Getter - @Setter - @ToString.Include - private SparkQueryRequest request; - + @EqualsAndHashCode.Include @Getter @Setter @ToString.Include private SparkQueryRequest request; /** * Constructor. @@ -54,5 +47,4 @@ public ExprValue next() { public String explain() { return getRequest().toString(); } - } diff --git a/spark/src/main/java/org/opensearch/sql/spark/storage/SparkStorageEngine.java b/spark/src/main/java/org/opensearch/sql/spark/storage/SparkStorageEngine.java index a5e35ecc4c..84c9c05e79 100644 --- a/spark/src/main/java/org/opensearch/sql/spark/storage/SparkStorageEngine.java +++ b/spark/src/main/java/org/opensearch/sql/spark/storage/SparkStorageEngine.java @@ -15,17 +15,14 @@ import org.opensearch.sql.storage.StorageEngine; import org.opensearch.sql.storage.Table; -/** - * Spark storage engine implementation. - */ +/** Spark storage engine implementation. */ @RequiredArgsConstructor public class SparkStorageEngine implements StorageEngine { private final SparkClient sparkClient; @Override public Collection getFunctions() { - return Collections.singletonList( - new SparkSqlTableFunctionResolver(sparkClient)); + return Collections.singletonList(new SparkSqlTableFunctionResolver(sparkClient)); } @Override diff --git a/spark/src/main/java/org/opensearch/sql/spark/storage/SparkStorageFactory.java b/spark/src/main/java/org/opensearch/sql/spark/storage/SparkStorageFactory.java index 937679b50e..467bacbaea 100644 --- a/spark/src/main/java/org/opensearch/sql/spark/storage/SparkStorageFactory.java +++ b/spark/src/main/java/org/opensearch/sql/spark/storage/SparkStorageFactory.java @@ -30,9 +30,7 @@ import org.opensearch.sql.storage.DataSourceFactory; import org.opensearch.sql.storage.StorageEngine; -/** - * Storage factory implementation for spark connector. - */ +/** Storage factory implementation for spark connector. */ @RequiredArgsConstructor public class SparkStorageFactory implements DataSourceFactory { private final Client client; @@ -66,9 +64,7 @@ public DataSourceType getDataSourceType() { @Override public DataSource createDataSource(DataSourceMetadata metadata) { return new DataSource( - metadata.getName(), - DataSourceType.SPARK, - getStorageEngine(metadata.getProperties())); + metadata.getName(), DataSourceType.SPARK, getStorageEngine(metadata.getProperties())); } /** @@ -81,24 +77,26 @@ StorageEngine getStorageEngine(Map requiredConfig) { SparkClient sparkClient; if (requiredConfig.get(CONNECTOR_TYPE).equals(EMR)) { sparkClient = - AccessController.doPrivileged((PrivilegedAction) () -> { - validateEMRConfigProperties(requiredConfig); - return new EmrClientImpl( - getEMRClient( - requiredConfig.get(EMR_ACCESS_KEY), - requiredConfig.get(EMR_SECRET_KEY), - requiredConfig.get(EMR_REGION)), - requiredConfig.get(EMR_CLUSTER), - new FlintHelper( - requiredConfig.get(FLINT_INTEGRATION), - requiredConfig.get(FLINT_HOST), - requiredConfig.get(FLINT_PORT), - requiredConfig.get(FLINT_SCHEME), - requiredConfig.get(FLINT_AUTH), - requiredConfig.get(FLINT_REGION)), - new SparkResponse(client, null, STEP_ID_FIELD), - requiredConfig.get(SPARK_SQL_APPLICATION)); - }); + AccessController.doPrivileged( + (PrivilegedAction) + () -> { + validateEMRConfigProperties(requiredConfig); + return new EmrClientImpl( + getEMRClient( + requiredConfig.get(EMR_ACCESS_KEY), + requiredConfig.get(EMR_SECRET_KEY), + requiredConfig.get(EMR_REGION)), + requiredConfig.get(EMR_CLUSTER), + new FlintHelper( + requiredConfig.get(FLINT_INTEGRATION), + requiredConfig.get(FLINT_HOST), + requiredConfig.get(FLINT_PORT), + requiredConfig.get(FLINT_SCHEME), + requiredConfig.get(FLINT_AUTH), + requiredConfig.get(FLINT_REGION)), + new SparkResponse(client, null, STEP_ID_FIELD), + requiredConfig.get(SPARK_SQL_APPLICATION)); + }); } else { throw new InvalidParameterException("Spark connector type is invalid."); } @@ -110,12 +108,14 @@ private void validateEMRConfigProperties(Map dataSourceMetadataC if (dataSourceMetadataConfig.get(EMR_CLUSTER) == null || dataSourceMetadataConfig.get(EMR_AUTH_TYPE) == null) { throw new IllegalArgumentException("EMR config properties are missing."); - } else if (dataSourceMetadataConfig.get(EMR_AUTH_TYPE) - .equals(AuthenticationType.AWSSIGV4AUTH.getName()) + } else if (dataSourceMetadataConfig + .get(EMR_AUTH_TYPE) + .equals(AuthenticationType.AWSSIGV4AUTH.getName()) && (dataSourceMetadataConfig.get(EMR_ACCESS_KEY) == null - || dataSourceMetadataConfig.get(EMR_SECRET_KEY) == null)) { + || dataSourceMetadataConfig.get(EMR_SECRET_KEY) == null)) { throw new IllegalArgumentException("EMR auth keys are missing."); - } else if (!dataSourceMetadataConfig.get(EMR_AUTH_TYPE) + } else if (!dataSourceMetadataConfig + .get(EMR_AUTH_TYPE) .equals(AuthenticationType.AWSSIGV4AUTH.getName())) { throw new IllegalArgumentException("Invalid auth type."); } @@ -124,8 +124,8 @@ private void validateEMRConfigProperties(Map dataSourceMetadataC private AmazonElasticMapReduce getEMRClient( String emrAccessKey, String emrSecretKey, String emrRegion) { return AmazonElasticMapReduceClientBuilder.standard() - .withCredentials(new AWSStaticCredentialsProvider( - new BasicAWSCredentials(emrAccessKey, emrSecretKey))) + .withCredentials( + new AWSStaticCredentialsProvider(new BasicAWSCredentials(emrAccessKey, emrSecretKey))) .withRegion(emrRegion) .build(); } diff --git a/spark/src/main/java/org/opensearch/sql/spark/storage/SparkTable.java b/spark/src/main/java/org/opensearch/sql/spark/storage/SparkTable.java index 5151405db9..731c3df672 100644 --- a/spark/src/main/java/org/opensearch/sql/spark/storage/SparkTable.java +++ b/spark/src/main/java/org/opensearch/sql/spark/storage/SparkTable.java @@ -18,20 +18,14 @@ import org.opensearch.sql.storage.Table; import org.opensearch.sql.storage.read.TableScanBuilder; -/** - * Spark table implementation. - * This can be constructed from SparkQueryRequest. - */ +/** Spark table implementation. This can be constructed from SparkQueryRequest. */ public class SparkTable implements Table { private final SparkClient sparkClient; - @Getter - private final SparkQueryRequest sparkQueryRequest; + @Getter private final SparkQueryRequest sparkQueryRequest; - /** - * Constructor for entire Sql Request. - */ + /** Constructor for entire Sql Request. */ public SparkTable(SparkClient sparkService, SparkQueryRequest sparkQueryRequest) { this.sparkClient = sparkService; this.sparkQueryRequest = sparkQueryRequest; @@ -56,8 +50,7 @@ public Map getFieldTypes() { @Override public PhysicalPlan implement(LogicalPlan plan) { - SparkScan metricScan = - new SparkScan(sparkClient); + SparkScan metricScan = new SparkScan(sparkClient); metricScan.setRequest(sparkQueryRequest); return plan.accept(new DefaultImplementor(), metricScan); } diff --git a/spark/src/test/java/org/opensearch/sql/spark/client/EmrClientImplTest.java b/spark/src/test/java/org/opensearch/sql/spark/client/EmrClientImplTest.java index a94ac01f2f..93dc0d6bc8 100644 --- a/spark/src/test/java/org/opensearch/sql/spark/client/EmrClientImplTest.java +++ b/spark/src/test/java/org/opensearch/sql/spark/client/EmrClientImplTest.java @@ -29,12 +29,9 @@ @ExtendWith(MockitoExtension.class) public class EmrClientImplTest { - @Mock - private AmazonElasticMapReduce emr; - @Mock - private FlintHelper flint; - @Mock - private SparkResponse sparkResponse; + @Mock private AmazonElasticMapReduce emr; + @Mock private FlintHelper flint; + @Mock private SparkResponse sparkResponse; @Test @SneakyThrows @@ -50,8 +47,8 @@ void testRunEmrApplication() { describeStepResult.setStep(step); when(emr.describeStep(any())).thenReturn(describeStepResult); - EmrClientImpl emrClientImpl = new EmrClientImpl( - emr, EMR_CLUSTER_ID, flint, sparkResponse, null); + EmrClientImpl emrClientImpl = + new EmrClientImpl(emr, EMR_CLUSTER_ID, flint, sparkResponse, null); emrClientImpl.runEmrApplication(QUERY); } @@ -69,12 +66,12 @@ void testRunEmrApplicationFailed() { describeStepResult.setStep(step); when(emr.describeStep(any())).thenReturn(describeStepResult); - EmrClientImpl emrClientImpl = new EmrClientImpl( - emr, EMR_CLUSTER_ID, flint, sparkResponse, null); - RuntimeException exception = Assertions.assertThrows(RuntimeException.class, - () -> emrClientImpl.runEmrApplication(QUERY)); - Assertions.assertEquals("Spark SQL application failed.", - exception.getMessage()); + EmrClientImpl emrClientImpl = + new EmrClientImpl(emr, EMR_CLUSTER_ID, flint, sparkResponse, null); + RuntimeException exception = + Assertions.assertThrows( + RuntimeException.class, () -> emrClientImpl.runEmrApplication(QUERY)); + Assertions.assertEquals("Spark SQL application failed.", exception.getMessage()); } @Test @@ -91,12 +88,12 @@ void testRunEmrApplicationCancelled() { describeStepResult.setStep(step); when(emr.describeStep(any())).thenReturn(describeStepResult); - EmrClientImpl emrClientImpl = new EmrClientImpl( - emr, EMR_CLUSTER_ID, flint, sparkResponse, null); - RuntimeException exception = Assertions.assertThrows(RuntimeException.class, - () -> emrClientImpl.runEmrApplication(QUERY)); - Assertions.assertEquals("Spark SQL application failed.", - exception.getMessage()); + EmrClientImpl emrClientImpl = + new EmrClientImpl(emr, EMR_CLUSTER_ID, flint, sparkResponse, null); + RuntimeException exception = + Assertions.assertThrows( + RuntimeException.class, () -> emrClientImpl.runEmrApplication(QUERY)); + Assertions.assertEquals("Spark SQL application failed.", exception.getMessage()); } @Test @@ -119,11 +116,12 @@ void testRunEmrApplicationRunnning() { DescribeStepResult completedDescribeStepResult = new DescribeStepResult(); completedDescribeStepResult.setStep(completedStep); - when(emr.describeStep(any())).thenReturn(runningDescribeStepResult) + when(emr.describeStep(any())) + .thenReturn(runningDescribeStepResult) .thenReturn(completedDescribeStepResult); - EmrClientImpl emrClientImpl = new EmrClientImpl( - emr, EMR_CLUSTER_ID, flint, sparkResponse, null); + EmrClientImpl emrClientImpl = + new EmrClientImpl(emr, EMR_CLUSTER_ID, flint, sparkResponse, null); emrClientImpl.runEmrApplication(QUERY); } @@ -147,14 +145,14 @@ void testSql() { DescribeStepResult completedDescribeStepResult = new DescribeStepResult(); completedDescribeStepResult.setStep(completedStep); - when(emr.describeStep(any())).thenReturn(runningDescribeStepResult) + when(emr.describeStep(any())) + .thenReturn(runningDescribeStepResult) .thenReturn(completedDescribeStepResult); when(sparkResponse.getResultFromOpensearchIndex()) .thenReturn(new JSONObject(getJson("select_query_response.json"))); - EmrClientImpl emrClientImpl = new EmrClientImpl( - emr, EMR_CLUSTER_ID, flint, sparkResponse, null); + EmrClientImpl emrClientImpl = + new EmrClientImpl(emr, EMR_CLUSTER_ID, flint, sparkResponse, null); emrClientImpl.sql(QUERY); - } } diff --git a/spark/src/test/java/org/opensearch/sql/spark/functions/SparkSqlFunctionImplementationTest.java b/spark/src/test/java/org/opensearch/sql/spark/functions/SparkSqlFunctionImplementationTest.java index 18db5b9471..120747e0d3 100644 --- a/spark/src/test/java/org/opensearch/sql/spark/functions/SparkSqlFunctionImplementationTest.java +++ b/spark/src/test/java/org/opensearch/sql/spark/functions/SparkSqlFunctionImplementationTest.java @@ -27,51 +27,52 @@ @ExtendWith(MockitoExtension.class) public class SparkSqlFunctionImplementationTest { - @Mock - private SparkClient client; + @Mock private SparkClient client; @Test void testValueOfAndTypeToString() { FunctionName functionName = new FunctionName("sql"); - List namedArgumentExpressionList - = List.of(DSL.namedArgument("query", DSL.literal(QUERY))); - SparkSqlFunctionImplementation sparkSqlFunctionImplementation - = new SparkSqlFunctionImplementation(functionName, namedArgumentExpressionList, client); - UnsupportedOperationException exception = assertThrows(UnsupportedOperationException.class, - () -> sparkSqlFunctionImplementation.valueOf()); - assertEquals("Spark defined function [sql] is only " - + "supported in SOURCE clause with spark connector catalog", exception.getMessage()); - assertEquals("sql(query=\"select 1\")", - sparkSqlFunctionImplementation.toString()); + List namedArgumentExpressionList = + List.of(DSL.namedArgument("query", DSL.literal(QUERY))); + SparkSqlFunctionImplementation sparkSqlFunctionImplementation = + new SparkSqlFunctionImplementation(functionName, namedArgumentExpressionList, client); + UnsupportedOperationException exception = + assertThrows( + UnsupportedOperationException.class, () -> sparkSqlFunctionImplementation.valueOf()); + assertEquals( + "Spark defined function [sql] is only " + + "supported in SOURCE clause with spark connector catalog", + exception.getMessage()); + assertEquals("sql(query=\"select 1\")", sparkSqlFunctionImplementation.toString()); assertEquals(ExprCoreType.STRUCT, sparkSqlFunctionImplementation.type()); } @Test void testApplyArguments() { FunctionName functionName = new FunctionName("sql"); - List namedArgumentExpressionList - = List.of(DSL.namedArgument("query", DSL.literal(QUERY))); - SparkSqlFunctionImplementation sparkSqlFunctionImplementation - = new SparkSqlFunctionImplementation(functionName, namedArgumentExpressionList, client); - SparkTable sparkTable - = (SparkTable) sparkSqlFunctionImplementation.applyArguments(); + List namedArgumentExpressionList = + List.of(DSL.namedArgument("query", DSL.literal(QUERY))); + SparkSqlFunctionImplementation sparkSqlFunctionImplementation = + new SparkSqlFunctionImplementation(functionName, namedArgumentExpressionList, client); + SparkTable sparkTable = (SparkTable) sparkSqlFunctionImplementation.applyArguments(); assertNotNull(sparkTable.getSparkQueryRequest()); - SparkQueryRequest sparkQueryRequest - = sparkTable.getSparkQueryRequest(); + SparkQueryRequest sparkQueryRequest = sparkTable.getSparkQueryRequest(); assertEquals(QUERY, sparkQueryRequest.getSql()); } @Test void testApplyArgumentsException() { FunctionName functionName = new FunctionName("sql"); - List namedArgumentExpressionList - = List.of(DSL.namedArgument("query", DSL.literal(QUERY)), - DSL.namedArgument("tmp", DSL.literal(12345))); - SparkSqlFunctionImplementation sparkSqlFunctionImplementation - = new SparkSqlFunctionImplementation(functionName, namedArgumentExpressionList, client); - ExpressionEvaluationException exception = assertThrows(ExpressionEvaluationException.class, - () -> sparkSqlFunctionImplementation.applyArguments()); + List namedArgumentExpressionList = + List.of( + DSL.namedArgument("query", DSL.literal(QUERY)), + DSL.namedArgument("tmp", DSL.literal(12345))); + SparkSqlFunctionImplementation sparkSqlFunctionImplementation = + new SparkSqlFunctionImplementation(functionName, namedArgumentExpressionList, client); + ExpressionEvaluationException exception = + assertThrows( + ExpressionEvaluationException.class, + () -> sparkSqlFunctionImplementation.applyArguments()); assertEquals("Invalid Function Argument:tmp", exception.getMessage()); } - } diff --git a/spark/src/test/java/org/opensearch/sql/spark/functions/SparkSqlFunctionTableScanBuilderTest.java b/spark/src/test/java/org/opensearch/sql/spark/functions/SparkSqlFunctionTableScanBuilderTest.java index 94c87602b7..212056eb15 100644 --- a/spark/src/test/java/org/opensearch/sql/spark/functions/SparkSqlFunctionTableScanBuilderTest.java +++ b/spark/src/test/java/org/opensearch/sql/spark/functions/SparkSqlFunctionTableScanBuilderTest.java @@ -18,23 +18,20 @@ import org.opensearch.sql.storage.TableScanOperator; public class SparkSqlFunctionTableScanBuilderTest { - @Mock - private SparkClient sparkClient; + @Mock private SparkClient sparkClient; - @Mock - private LogicalProject logicalProject; + @Mock private LogicalProject logicalProject; @Test void testBuild() { SparkQueryRequest sparkQueryRequest = new SparkQueryRequest(); sparkQueryRequest.setSql(QUERY); - SparkSqlFunctionTableScanBuilder sparkSqlFunctionTableScanBuilder - = new SparkSqlFunctionTableScanBuilder(sparkClient, sparkQueryRequest); - TableScanOperator sqlFunctionTableScanOperator - = sparkSqlFunctionTableScanBuilder.build(); - Assertions.assertTrue(sqlFunctionTableScanOperator - instanceof SparkSqlFunctionTableScanOperator); + SparkSqlFunctionTableScanBuilder sparkSqlFunctionTableScanBuilder = + new SparkSqlFunctionTableScanBuilder(sparkClient, sparkQueryRequest); + TableScanOperator sqlFunctionTableScanOperator = sparkSqlFunctionTableScanBuilder.build(); + Assertions.assertTrue( + sqlFunctionTableScanOperator instanceof SparkSqlFunctionTableScanOperator); } @Test @@ -42,8 +39,8 @@ void testPushProject() { SparkQueryRequest sparkQueryRequest = new SparkQueryRequest(); sparkQueryRequest.setSql(QUERY); - SparkSqlFunctionTableScanBuilder sparkSqlFunctionTableScanBuilder - = new SparkSqlFunctionTableScanBuilder(sparkClient, sparkQueryRequest); + SparkSqlFunctionTableScanBuilder sparkSqlFunctionTableScanBuilder = + new SparkSqlFunctionTableScanBuilder(sparkClient, sparkQueryRequest); Assertions.assertTrue(sparkSqlFunctionTableScanBuilder.pushDownProject(logicalProject)); } } diff --git a/spark/src/test/java/org/opensearch/sql/spark/functions/SparkSqlFunctionTableScanOperatorTest.java b/spark/src/test/java/org/opensearch/sql/spark/functions/SparkSqlFunctionTableScanOperatorTest.java index f6807f9913..586f0ef2d8 100644 --- a/spark/src/test/java/org/opensearch/sql/spark/functions/SparkSqlFunctionTableScanOperatorTest.java +++ b/spark/src/test/java/org/opensearch/sql/spark/functions/SparkSqlFunctionTableScanOperatorTest.java @@ -43,8 +43,7 @@ @ExtendWith(MockitoExtension.class) public class SparkSqlFunctionTableScanOperatorTest { - @Mock - private SparkClient sparkClient; + @Mock private SparkClient sparkClient; @Test @SneakyThrows @@ -52,15 +51,14 @@ void testEmptyQueryWithException() { SparkQueryRequest sparkQueryRequest = new SparkQueryRequest(); sparkQueryRequest.setSql(QUERY); - SparkSqlFunctionTableScanOperator sparkSqlFunctionTableScanOperator - = new SparkSqlFunctionTableScanOperator(sparkClient, sparkQueryRequest); + SparkSqlFunctionTableScanOperator sparkSqlFunctionTableScanOperator = + new SparkSqlFunctionTableScanOperator(sparkClient, sparkQueryRequest); - when(sparkClient.sql(any())) - .thenThrow(new IOException("Error Message")); - RuntimeException runtimeException - = assertThrows(RuntimeException.class, sparkSqlFunctionTableScanOperator::open); - assertEquals("Error fetching data from spark server: Error Message", - runtimeException.getMessage()); + when(sparkClient.sql(any())).thenThrow(new IOException("Error Message")); + RuntimeException runtimeException = + assertThrows(RuntimeException.class, sparkSqlFunctionTableScanOperator::open); + assertEquals( + "Error fetching data from spark server: Error Message", runtimeException.getMessage()); } @Test @@ -69,8 +67,8 @@ void testClose() { SparkQueryRequest sparkQueryRequest = new SparkQueryRequest(); sparkQueryRequest.setSql(QUERY); - SparkSqlFunctionTableScanOperator sparkSqlFunctionTableScanOperator - = new SparkSqlFunctionTableScanOperator(sparkClient, sparkQueryRequest); + SparkSqlFunctionTableScanOperator sparkSqlFunctionTableScanOperator = + new SparkSqlFunctionTableScanOperator(sparkClient, sparkQueryRequest); sparkSqlFunctionTableScanOperator.close(); } @@ -80,11 +78,10 @@ void testExplain() { SparkQueryRequest sparkQueryRequest = new SparkQueryRequest(); sparkQueryRequest.setSql(QUERY); - SparkSqlFunctionTableScanOperator sparkSqlFunctionTableScanOperator - = new SparkSqlFunctionTableScanOperator(sparkClient, sparkQueryRequest); + SparkSqlFunctionTableScanOperator sparkSqlFunctionTableScanOperator = + new SparkSqlFunctionTableScanOperator(sparkClient, sparkQueryRequest); - Assertions.assertEquals("sql(select 1)", - sparkSqlFunctionTableScanOperator.explain()); + Assertions.assertEquals("sql(select 1)", sparkSqlFunctionTableScanOperator.explain()); } @Test @@ -93,18 +90,19 @@ void testQueryResponseIterator() { SparkQueryRequest sparkQueryRequest = new SparkQueryRequest(); sparkQueryRequest.setSql(QUERY); - SparkSqlFunctionTableScanOperator sparkSqlFunctionTableScanOperator - = new SparkSqlFunctionTableScanOperator(sparkClient, sparkQueryRequest); + SparkSqlFunctionTableScanOperator sparkSqlFunctionTableScanOperator = + new SparkSqlFunctionTableScanOperator(sparkClient, sparkQueryRequest); - when(sparkClient.sql(any())) - .thenReturn(new JSONObject(getJson("select_query_response.json"))); + when(sparkClient.sql(any())).thenReturn(new JSONObject(getJson("select_query_response.json"))); sparkSqlFunctionTableScanOperator.open(); assertTrue(sparkSqlFunctionTableScanOperator.hasNext()); - ExprTupleValue firstRow = new ExprTupleValue(new LinkedHashMap<>() { - { - put("1", new ExprIntegerValue(1)); - } - }); + ExprTupleValue firstRow = + new ExprTupleValue( + new LinkedHashMap<>() { + { + put("1", new ExprIntegerValue(1)); + } + }); assertEquals(firstRow, sparkSqlFunctionTableScanOperator.next()); Assertions.assertFalse(sparkSqlFunctionTableScanOperator.hasNext()); } @@ -115,28 +113,29 @@ void testQueryResponseAllTypes() { SparkQueryRequest sparkQueryRequest = new SparkQueryRequest(); sparkQueryRequest.setSql(QUERY); - SparkSqlFunctionTableScanOperator sparkSqlFunctionTableScanOperator - = new SparkSqlFunctionTableScanOperator(sparkClient, sparkQueryRequest); + SparkSqlFunctionTableScanOperator sparkSqlFunctionTableScanOperator = + new SparkSqlFunctionTableScanOperator(sparkClient, sparkQueryRequest); - when(sparkClient.sql(any())) - .thenReturn(new JSONObject(getJson("all_data_type.json"))); + when(sparkClient.sql(any())).thenReturn(new JSONObject(getJson("all_data_type.json"))); sparkSqlFunctionTableScanOperator.open(); assertTrue(sparkSqlFunctionTableScanOperator.hasNext()); - ExprTupleValue firstRow = new ExprTupleValue(new LinkedHashMap<>() { - { - put("boolean", ExprBooleanValue.of(true)); - put("long", new ExprLongValue(922337203)); - put("integer", new ExprIntegerValue(2147483647)); - put("short", new ExprShortValue(32767)); - put("byte", new ExprByteValue(127)); - put("double", new ExprDoubleValue(9223372036854.775807)); - put("float", new ExprFloatValue(21474.83647)); - put("timestamp", new ExprDateValue("2023-07-01 10:31:30")); - put("date", new ExprTimestampValue("2023-07-01 10:31:30")); - put("string", new ExprStringValue("ABC")); - put("char", new ExprStringValue("A")); - } - }); + ExprTupleValue firstRow = + new ExprTupleValue( + new LinkedHashMap<>() { + { + put("boolean", ExprBooleanValue.of(true)); + put("long", new ExprLongValue(922337203)); + put("integer", new ExprIntegerValue(2147483647)); + put("short", new ExprShortValue(32767)); + put("byte", new ExprByteValue(127)); + put("double", new ExprDoubleValue(9223372036854.775807)); + put("float", new ExprFloatValue(21474.83647)); + put("timestamp", new ExprDateValue("2023-07-01 10:31:30")); + put("date", new ExprTimestampValue("2023-07-01 10:31:30")); + put("string", new ExprStringValue("ABC")); + put("char", new ExprStringValue("A")); + } + }); assertEquals(firstRow, sparkSqlFunctionTableScanOperator.next()); Assertions.assertFalse(sparkSqlFunctionTableScanOperator.hasNext()); } @@ -147,16 +146,15 @@ void testQueryResponseInvalidDataType() { SparkQueryRequest sparkQueryRequest = new SparkQueryRequest(); sparkQueryRequest.setSql(QUERY); - SparkSqlFunctionTableScanOperator sparkSqlFunctionTableScanOperator - = new SparkSqlFunctionTableScanOperator(sparkClient, sparkQueryRequest); + SparkSqlFunctionTableScanOperator sparkSqlFunctionTableScanOperator = + new SparkSqlFunctionTableScanOperator(sparkClient, sparkQueryRequest); - when(sparkClient.sql(any())) - .thenReturn(new JSONObject(getJson("invalid_data_type.json"))); + when(sparkClient.sql(any())).thenReturn(new JSONObject(getJson("invalid_data_type.json"))); - RuntimeException exception = Assertions.assertThrows(RuntimeException.class, - () -> sparkSqlFunctionTableScanOperator.open()); - Assertions.assertEquals("Result contains invalid data type", - exception.getMessage()); + RuntimeException exception = + Assertions.assertThrows( + RuntimeException.class, () -> sparkSqlFunctionTableScanOperator.open()); + Assertions.assertEquals("Result contains invalid data type", exception.getMessage()); } @Test @@ -165,17 +163,14 @@ void testQuerySchema() { SparkQueryRequest sparkQueryRequest = new SparkQueryRequest(); sparkQueryRequest.setSql(QUERY); - SparkSqlFunctionTableScanOperator sparkSqlFunctionTableScanOperator - = new SparkSqlFunctionTableScanOperator(sparkClient, sparkQueryRequest); + SparkSqlFunctionTableScanOperator sparkSqlFunctionTableScanOperator = + new SparkSqlFunctionTableScanOperator(sparkClient, sparkQueryRequest); - when(sparkClient.sql(any())) - .thenReturn( - new JSONObject(getJson("select_query_response.json"))); + when(sparkClient.sql(any())).thenReturn(new JSONObject(getJson("select_query_response.json"))); sparkSqlFunctionTableScanOperator.open(); ArrayList columns = new ArrayList<>(); columns.add(new ExecutionEngine.Schema.Column("1", "1", ExprCoreType.INTEGER)); ExecutionEngine.Schema expectedSchema = new ExecutionEngine.Schema(columns); assertEquals(expectedSchema, sparkSqlFunctionTableScanOperator.schema()); } - } diff --git a/spark/src/test/java/org/opensearch/sql/spark/functions/SparkSqlTableFunctionResolverTest.java b/spark/src/test/java/org/opensearch/sql/spark/functions/SparkSqlTableFunctionResolverTest.java index e18fac36de..a828ac76c4 100644 --- a/spark/src/test/java/org/opensearch/sql/spark/functions/SparkSqlTableFunctionResolverTest.java +++ b/spark/src/test/java/org/opensearch/sql/spark/functions/SparkSqlTableFunctionResolverTest.java @@ -35,107 +35,106 @@ @ExtendWith(MockitoExtension.class) public class SparkSqlTableFunctionResolverTest { - @Mock - private SparkClient client; + @Mock private SparkClient client; - @Mock - private FunctionProperties functionProperties; + @Mock private FunctionProperties functionProperties; @Test void testResolve() { - SparkSqlTableFunctionResolver sqlTableFunctionResolver - = new SparkSqlTableFunctionResolver(client); + SparkSqlTableFunctionResolver sqlTableFunctionResolver = + new SparkSqlTableFunctionResolver(client); FunctionName functionName = FunctionName.of("sql"); - List expressions - = List.of(DSL.namedArgument("query", DSL.literal(QUERY))); - FunctionSignature functionSignature = new FunctionSignature(functionName, expressions - .stream().map(Expression::type).collect(Collectors.toList())); - Pair resolution - = sqlTableFunctionResolver.resolve(functionSignature); + List expressions = List.of(DSL.namedArgument("query", DSL.literal(QUERY))); + FunctionSignature functionSignature = + new FunctionSignature( + functionName, expressions.stream().map(Expression::type).collect(Collectors.toList())); + Pair resolution = + sqlTableFunctionResolver.resolve(functionSignature); assertEquals(functionName, resolution.getKey().getFunctionName()); assertEquals(functionName, sqlTableFunctionResolver.getFunctionName()); assertEquals(List.of(STRING), resolution.getKey().getParamTypeList()); FunctionBuilder functionBuilder = resolution.getValue(); - TableFunctionImplementation functionImplementation - = (TableFunctionImplementation) functionBuilder.apply(functionProperties, expressions); + TableFunctionImplementation functionImplementation = + (TableFunctionImplementation) functionBuilder.apply(functionProperties, expressions); assertTrue(functionImplementation instanceof SparkSqlFunctionImplementation); - SparkTable sparkTable - = (SparkTable) functionImplementation.applyArguments(); + SparkTable sparkTable = (SparkTable) functionImplementation.applyArguments(); assertNotNull(sparkTable.getSparkQueryRequest()); - SparkQueryRequest sparkQueryRequest = - sparkTable.getSparkQueryRequest(); + SparkQueryRequest sparkQueryRequest = sparkTable.getSparkQueryRequest(); assertEquals(QUERY, sparkQueryRequest.getSql()); } @Test void testArgumentsPassedByPosition() { - SparkSqlTableFunctionResolver sqlTableFunctionResolver - = new SparkSqlTableFunctionResolver(client); + SparkSqlTableFunctionResolver sqlTableFunctionResolver = + new SparkSqlTableFunctionResolver(client); FunctionName functionName = FunctionName.of("sql"); - List expressions - = List.of(DSL.namedArgument(null, DSL.literal(QUERY))); - FunctionSignature functionSignature = new FunctionSignature(functionName, expressions - .stream().map(Expression::type).collect(Collectors.toList())); + List expressions = List.of(DSL.namedArgument(null, DSL.literal(QUERY))); + FunctionSignature functionSignature = + new FunctionSignature( + functionName, expressions.stream().map(Expression::type).collect(Collectors.toList())); - Pair resolution - = sqlTableFunctionResolver.resolve(functionSignature); + Pair resolution = + sqlTableFunctionResolver.resolve(functionSignature); assertEquals(functionName, resolution.getKey().getFunctionName()); assertEquals(functionName, sqlTableFunctionResolver.getFunctionName()); assertEquals(List.of(STRING), resolution.getKey().getParamTypeList()); FunctionBuilder functionBuilder = resolution.getValue(); - TableFunctionImplementation functionImplementation - = (TableFunctionImplementation) functionBuilder.apply(functionProperties, expressions); + TableFunctionImplementation functionImplementation = + (TableFunctionImplementation) functionBuilder.apply(functionProperties, expressions); assertTrue(functionImplementation instanceof SparkSqlFunctionImplementation); - SparkTable sparkTable - = (SparkTable) functionImplementation.applyArguments(); + SparkTable sparkTable = (SparkTable) functionImplementation.applyArguments(); assertNotNull(sparkTable.getSparkQueryRequest()); - SparkQueryRequest sparkQueryRequest = - sparkTable.getSparkQueryRequest(); + SparkQueryRequest sparkQueryRequest = sparkTable.getSparkQueryRequest(); assertEquals(QUERY, sparkQueryRequest.getSql()); } @Test void testMixedArgumentTypes() { - SparkSqlTableFunctionResolver sqlTableFunctionResolver - = new SparkSqlTableFunctionResolver(client); + SparkSqlTableFunctionResolver sqlTableFunctionResolver = + new SparkSqlTableFunctionResolver(client); FunctionName functionName = FunctionName.of("sql"); - List expressions - = List.of(DSL.namedArgument("query", DSL.literal(QUERY)), - DSL.namedArgument(null, DSL.literal(12345))); - FunctionSignature functionSignature = new FunctionSignature(functionName, expressions - .stream().map(Expression::type).collect(Collectors.toList())); - Pair resolution - = sqlTableFunctionResolver.resolve(functionSignature); + List expressions = + List.of( + DSL.namedArgument("query", DSL.literal(QUERY)), + DSL.namedArgument(null, DSL.literal(12345))); + FunctionSignature functionSignature = + new FunctionSignature( + functionName, expressions.stream().map(Expression::type).collect(Collectors.toList())); + Pair resolution = + sqlTableFunctionResolver.resolve(functionSignature); assertEquals(functionName, resolution.getKey().getFunctionName()); assertEquals(functionName, sqlTableFunctionResolver.getFunctionName()); assertEquals(List.of(STRING), resolution.getKey().getParamTypeList()); - SemanticCheckException exception = assertThrows(SemanticCheckException.class, - () -> resolution.getValue().apply(functionProperties, expressions)); + SemanticCheckException exception = + assertThrows( + SemanticCheckException.class, + () -> resolution.getValue().apply(functionProperties, expressions)); assertEquals("Arguments should be either passed by name or position", exception.getMessage()); } @Test void testWrongArgumentsSizeWhenPassedByName() { - SparkSqlTableFunctionResolver sqlTableFunctionResolver - = new SparkSqlTableFunctionResolver(client); + SparkSqlTableFunctionResolver sqlTableFunctionResolver = + new SparkSqlTableFunctionResolver(client); FunctionName functionName = FunctionName.of("sql"); - List expressions - = List.of(); - FunctionSignature functionSignature = new FunctionSignature(functionName, expressions - .stream().map(Expression::type).collect(Collectors.toList())); - Pair resolution - = sqlTableFunctionResolver.resolve(functionSignature); + List expressions = List.of(); + FunctionSignature functionSignature = + new FunctionSignature( + functionName, expressions.stream().map(Expression::type).collect(Collectors.toList())); + Pair resolution = + sqlTableFunctionResolver.resolve(functionSignature); assertEquals(functionName, resolution.getKey().getFunctionName()); assertEquals(functionName, sqlTableFunctionResolver.getFunctionName()); assertEquals(List.of(STRING), resolution.getKey().getParamTypeList()); - SemanticCheckException exception = assertThrows(SemanticCheckException.class, - () -> resolution.getValue().apply(functionProperties, expressions)); + SemanticCheckException exception = + assertThrows( + SemanticCheckException.class, + () -> resolution.getValue().apply(functionProperties, expressions)); assertEquals("Missing arguments:[query]", exception.getMessage()); } - } diff --git a/spark/src/test/java/org/opensearch/sql/spark/response/SparkResponseTest.java b/spark/src/test/java/org/opensearch/sql/spark/response/SparkResponseTest.java index abc4c81626..211561ac72 100644 --- a/spark/src/test/java/org/opensearch/sql/spark/response/SparkResponseTest.java +++ b/spark/src/test/java/org/opensearch/sql/spark/response/SparkResponseTest.java @@ -32,18 +32,12 @@ @ExtendWith(MockitoExtension.class) public class SparkResponseTest { - @Mock - private Client client; - @Mock - private SearchResponse searchResponse; - @Mock - private DeleteResponse deleteResponse; - @Mock - private SearchHit searchHit; - @Mock - private ActionFuture searchResponseActionFuture; - @Mock - private ActionFuture deleteResponseActionFuture; + @Mock private Client client; + @Mock private SearchResponse searchResponse; + @Mock private DeleteResponse deleteResponse; + @Mock private SearchHit searchHit; + @Mock private ActionFuture searchResponseActionFuture; + @Mock private ActionFuture deleteResponseActionFuture; @Test public void testGetResultFromOpensearchIndex() { @@ -53,12 +47,8 @@ public void testGetResultFromOpensearchIndex() { when(searchResponse.getHits()) .thenReturn( new SearchHits( - new SearchHit[] {searchHit}, - new TotalHits(1, TotalHits.Relation.EQUAL_TO), - 1.0F)); - Mockito.when(searchHit.getSourceAsMap()) - .thenReturn(Map.of("stepId", EMR_CLUSTER_ID)); - + new SearchHit[] {searchHit}, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F)); + Mockito.when(searchHit.getSourceAsMap()).thenReturn(Map.of("stepId", EMR_CLUSTER_ID)); when(client.delete(any())).thenReturn(deleteResponseActionFuture); when(deleteResponseActionFuture.actionGet()).thenReturn(deleteResponse); @@ -75,11 +65,13 @@ public void testInvalidSearchResponse() { when(searchResponse.status()).thenReturn(RestStatus.NO_CONTENT); SparkResponse sparkResponse = new SparkResponse(client, EMR_CLUSTER_ID, "stepId"); - RuntimeException exception = assertThrows(RuntimeException.class, - () -> sparkResponse.getResultFromOpensearchIndex()); + RuntimeException exception = + assertThrows(RuntimeException.class, () -> sparkResponse.getResultFromOpensearchIndex()); Assertions.assertEquals( - "Fetching result from " + SPARK_INDEX_NAME - + " index failed with status : " + RestStatus.NO_CONTENT, + "Fetching result from " + + SPARK_INDEX_NAME + + " index failed with status : " + + RestStatus.NO_CONTENT, exception.getMessage()); } @@ -104,8 +96,9 @@ public void testNotFoundDeleteResponse() { when(deleteResponse.getResult()).thenReturn(DocWriteResponse.Result.NOT_FOUND); SparkResponse sparkResponse = new SparkResponse(client, EMR_CLUSTER_ID, "stepId"); - RuntimeException exception = assertThrows(ResourceNotFoundException.class, - () -> sparkResponse.deleteInSparkIndex("123")); + RuntimeException exception = + assertThrows( + ResourceNotFoundException.class, () -> sparkResponse.deleteInSparkIndex("123")); Assertions.assertEquals("Spark result with id 123 doesn't exist", exception.getMessage()); } @@ -116,8 +109,8 @@ public void testInvalidDeleteResponse() { when(deleteResponse.getResult()).thenReturn(DocWriteResponse.Result.NOOP); SparkResponse sparkResponse = new SparkResponse(client, EMR_CLUSTER_ID, "stepId"); - RuntimeException exception = assertThrows(RuntimeException.class, - () -> sparkResponse.deleteInSparkIndex("123")); + RuntimeException exception = + assertThrows(RuntimeException.class, () -> sparkResponse.deleteInSparkIndex("123")); Assertions.assertEquals( "Deleting spark result information failed with : noop", exception.getMessage()); } diff --git a/spark/src/test/java/org/opensearch/sql/spark/storage/SparkScanTest.java b/spark/src/test/java/org/opensearch/sql/spark/storage/SparkScanTest.java index c57142f580..971db3c33c 100644 --- a/spark/src/test/java/org/opensearch/sql/spark/storage/SparkScanTest.java +++ b/spark/src/test/java/org/opensearch/sql/spark/storage/SparkScanTest.java @@ -19,8 +19,7 @@ @ExtendWith(MockitoExtension.class) public class SparkScanTest { - @Mock - private SparkClient sparkClient; + @Mock private SparkClient sparkClient; @Test @SneakyThrows @@ -36,8 +35,6 @@ void testQueryResponseIteratorForQueryRangeFunction() { void testExplain() { SparkScan sparkScan = new SparkScan(sparkClient); sparkScan.getRequest().setSql(QUERY); - assertEquals( - "SparkQueryRequest(sql=select 1)", - sparkScan.explain()); + assertEquals("SparkQueryRequest(sql=select 1)", sparkScan.explain()); } } diff --git a/spark/src/test/java/org/opensearch/sql/spark/storage/SparkStorageEngineTest.java b/spark/src/test/java/org/opensearch/sql/spark/storage/SparkStorageEngineTest.java index d42e123678..5e7ec76cdb 100644 --- a/spark/src/test/java/org/opensearch/sql/spark/storage/SparkStorageEngineTest.java +++ b/spark/src/test/java/org/opensearch/sql/spark/storage/SparkStorageEngineTest.java @@ -22,14 +22,12 @@ @ExtendWith(MockitoExtension.class) public class SparkStorageEngineTest { - @Mock - private SparkClient client; + @Mock private SparkClient client; @Test public void getFunctions() { SparkStorageEngine engine = new SparkStorageEngine(client); - Collection functionResolverCollection - = engine.getFunctions(); + Collection functionResolverCollection = engine.getFunctions(); assertNotNull(functionResolverCollection); assertEquals(1, functionResolverCollection.size()); assertTrue( @@ -39,8 +37,10 @@ public void getFunctions() { @Test public void getTable() { SparkStorageEngine engine = new SparkStorageEngine(client); - RuntimeException exception = assertThrows(RuntimeException.class, - () -> engine.getTable(new DataSourceSchemaName("spark", "default"), "")); + RuntimeException exception = + assertThrows( + RuntimeException.class, + () -> engine.getTable(new DataSourceSchemaName("spark", "default"), "")); assertEquals("Unable to get table from storage engine.", exception.getMessage()); } } diff --git a/spark/src/test/java/org/opensearch/sql/spark/storage/SparkStorageFactoryTest.java b/spark/src/test/java/org/opensearch/sql/spark/storage/SparkStorageFactoryTest.java index c68adf2039..eb93cdabfe 100644 --- a/spark/src/test/java/org/opensearch/sql/spark/storage/SparkStorageFactoryTest.java +++ b/spark/src/test/java/org/opensearch/sql/spark/storage/SparkStorageFactoryTest.java @@ -24,17 +24,14 @@ @ExtendWith(MockitoExtension.class) public class SparkStorageFactoryTest { - @Mock - private Settings settings; + @Mock private Settings settings; - @Mock - private Client client; + @Mock private Client client; @Test void testGetConnectorType() { SparkStorageFactory sparkStorageFactory = new SparkStorageFactory(client, settings); - Assertions.assertEquals( - DataSourceType.SPARK, sparkStorageFactory.getDataSourceType()); + Assertions.assertEquals(DataSourceType.SPARK, sparkStorageFactory.getDataSourceType()); } @Test @@ -48,8 +45,7 @@ void testGetStorageEngine() { properties.put("emr.auth.secret_key", "secret_key"); properties.put("emr.auth.region", "region"); SparkStorageFactory sparkStorageFactory = new SparkStorageFactory(client, settings); - StorageEngine storageEngine - = sparkStorageFactory.getStorageEngine(properties); + StorageEngine storageEngine = sparkStorageFactory.getStorageEngine(properties); Assertions.assertTrue(storageEngine instanceof SparkStorageEngine); } @@ -59,10 +55,11 @@ void testInvalidConnectorType() { HashMap properties = new HashMap<>(); properties.put("spark.connector", "random"); SparkStorageFactory sparkStorageFactory = new SparkStorageFactory(client, settings); - InvalidParameterException exception = Assertions.assertThrows(InvalidParameterException.class, - () -> sparkStorageFactory.getStorageEngine(properties)); - Assertions.assertEquals("Spark connector type is invalid.", - exception.getMessage()); + InvalidParameterException exception = + Assertions.assertThrows( + InvalidParameterException.class, + () -> sparkStorageFactory.getStorageEngine(properties)); + Assertions.assertEquals("Spark connector type is invalid.", exception.getMessage()); } @Test @@ -72,10 +69,10 @@ void testMissingAuth() { properties.put("spark.connector", "emr"); properties.put("emr.cluster", EMR_CLUSTER_ID); SparkStorageFactory sparkStorageFactory = new SparkStorageFactory(client, settings); - IllegalArgumentException exception = Assertions.assertThrows(IllegalArgumentException.class, - () -> sparkStorageFactory.getStorageEngine(properties)); - Assertions.assertEquals("EMR config properties are missing.", - exception.getMessage()); + IllegalArgumentException exception = + Assertions.assertThrows( + IllegalArgumentException.class, () -> sparkStorageFactory.getStorageEngine(properties)); + Assertions.assertEquals("EMR config properties are missing.", exception.getMessage()); } @Test @@ -86,10 +83,10 @@ void testUnsupportedEmrAuth() { properties.put("emr.cluster", EMR_CLUSTER_ID); properties.put("emr.auth.type", "basic"); SparkStorageFactory sparkStorageFactory = new SparkStorageFactory(client, settings); - IllegalArgumentException exception = Assertions.assertThrows(IllegalArgumentException.class, - () -> sparkStorageFactory.getStorageEngine(properties)); - Assertions.assertEquals("Invalid auth type.", - exception.getMessage()); + IllegalArgumentException exception = + Assertions.assertThrows( + IllegalArgumentException.class, () -> sparkStorageFactory.getStorageEngine(properties)); + Assertions.assertEquals("Invalid auth type.", exception.getMessage()); } @Test @@ -99,10 +96,10 @@ void testMissingCluster() { properties.put("spark.connector", "emr"); properties.put("emr.auth.type", "awssigv4"); SparkStorageFactory sparkStorageFactory = new SparkStorageFactory(client, settings); - IllegalArgumentException exception = Assertions.assertThrows(IllegalArgumentException.class, - () -> sparkStorageFactory.getStorageEngine(properties)); - Assertions.assertEquals("EMR config properties are missing.", - exception.getMessage()); + IllegalArgumentException exception = + Assertions.assertThrows( + IllegalArgumentException.class, () -> sparkStorageFactory.getStorageEngine(properties)); + Assertions.assertEquals("EMR config properties are missing.", exception.getMessage()); } @Test @@ -113,10 +110,10 @@ void testMissingAuthKeys() { properties.put("emr.cluster", EMR_CLUSTER_ID); properties.put("emr.auth.type", "awssigv4"); SparkStorageFactory sparkStorageFactory = new SparkStorageFactory(client, settings); - IllegalArgumentException exception = Assertions.assertThrows(IllegalArgumentException.class, - () -> sparkStorageFactory.getStorageEngine(properties)); - Assertions.assertEquals("EMR auth keys are missing.", - exception.getMessage()); + IllegalArgumentException exception = + Assertions.assertThrows( + IllegalArgumentException.class, () -> sparkStorageFactory.getStorageEngine(properties)); + Assertions.assertEquals("EMR auth keys are missing.", exception.getMessage()); } @Test @@ -128,10 +125,10 @@ void testMissingAuthSecretKey() { properties.put("emr.auth.type", "awssigv4"); properties.put("emr.auth.access_key", "test"); SparkStorageFactory sparkStorageFactory = new SparkStorageFactory(client, settings); - IllegalArgumentException exception = Assertions.assertThrows(IllegalArgumentException.class, - () -> sparkStorageFactory.getStorageEngine(properties)); - Assertions.assertEquals("EMR auth keys are missing.", - exception.getMessage()); + IllegalArgumentException exception = + Assertions.assertThrows( + IllegalArgumentException.class, () -> sparkStorageFactory.getStorageEngine(properties)); + Assertions.assertEquals("EMR auth keys are missing.", exception.getMessage()); } @Test @@ -178,5 +175,4 @@ void testSetSparkJars() { DataSource dataSource = new SparkStorageFactory(client, settings).createDataSource(metadata); Assertions.assertTrue(dataSource.getStorageEngine() instanceof SparkStorageEngine); } - } diff --git a/spark/src/test/java/org/opensearch/sql/spark/storage/SparkTableTest.java b/spark/src/test/java/org/opensearch/sql/spark/storage/SparkTableTest.java index 39bd2eb199..a70d4ba69e 100644 --- a/spark/src/test/java/org/opensearch/sql/spark/storage/SparkTableTest.java +++ b/spark/src/test/java/org/opensearch/sql/spark/storage/SparkTableTest.java @@ -31,26 +31,23 @@ @ExtendWith(MockitoExtension.class) public class SparkTableTest { - @Mock - private SparkClient client; + @Mock private SparkClient client; @Test void testUnsupportedOperation() { SparkQueryRequest sparkQueryRequest = new SparkQueryRequest(); - SparkTable sparkTable = - new SparkTable(client, sparkQueryRequest); + SparkTable sparkTable = new SparkTable(client, sparkQueryRequest); assertThrows(UnsupportedOperationException.class, sparkTable::exists); - assertThrows(UnsupportedOperationException.class, - () -> sparkTable.create(Collections.emptyMap())); + assertThrows( + UnsupportedOperationException.class, () -> sparkTable.create(Collections.emptyMap())); } @Test void testCreateScanBuilderWithSqlTableFunction() { SparkQueryRequest sparkQueryRequest = new SparkQueryRequest(); sparkQueryRequest.setSql(QUERY); - SparkTable sparkTable = - new SparkTable(client, sparkQueryRequest); + SparkTable sparkTable = new SparkTable(client, sparkQueryRequest); TableScanBuilder tableScanBuilder = sparkTable.createScanBuilder(); Assertions.assertNotNull(tableScanBuilder); Assertions.assertTrue(tableScanBuilder instanceof SparkSqlFunctionTableScanBuilder); @@ -59,8 +56,7 @@ void testCreateScanBuilderWithSqlTableFunction() { @Test @SneakyThrows void testGetFieldTypesFromSparkQueryRequest() { - SparkTable sparkTable - = new SparkTable(client, new SparkQueryRequest()); + SparkTable sparkTable = new SparkTable(client, new SparkQueryRequest()); Map expectedFieldTypes = new HashMap<>(); Map fieldTypes = sparkTable.getFieldTypes(); @@ -73,10 +69,9 @@ void testGetFieldTypesFromSparkQueryRequest() { void testImplementWithSqlFunction() { SparkQueryRequest sparkQueryRequest = new SparkQueryRequest(); sparkQueryRequest.setSql(QUERY); - SparkTable sparkMetricTable = - new SparkTable(client, sparkQueryRequest); - PhysicalPlan plan = sparkMetricTable.implement( - new SparkSqlFunctionTableScanBuilder(client, sparkQueryRequest)); + SparkTable sparkMetricTable = new SparkTable(client, sparkQueryRequest); + PhysicalPlan plan = + sparkMetricTable.implement(new SparkSqlFunctionTableScanBuilder(client, sparkQueryRequest)); assertTrue(plan instanceof SparkSqlFunctionTableScanOperator); } } diff --git a/spark/src/test/java/org/opensearch/sql/spark/utils/TestUtils.java b/spark/src/test/java/org/opensearch/sql/spark/utils/TestUtils.java index b480e6d9d9..ca77006d9c 100644 --- a/spark/src/test/java/org/opensearch/sql/spark/utils/TestUtils.java +++ b/spark/src/test/java/org/opensearch/sql/spark/utils/TestUtils.java @@ -12,6 +12,7 @@ public class TestUtils { /** * Get Json document from the files in resources folder. + * * @param filename filename. * @return String. * @throws IOException IOException. @@ -21,5 +22,4 @@ public static String getJson(String filename) throws IOException { return new String( Objects.requireNonNull(classLoader.getResourceAsStream(filename)).readAllBytes()); } - } From 56aa572511500965718480cf1c329489a5caf80d Mon Sep 17 00:00:00 2001 From: Mitchell Gale Date: Fri, 18 Aug 2023 09:28:44 -0700 Subject: [PATCH 29/42] [Spotless] Applying Google Code Format for opensearch directory #16 (#1977) * spotless apply for OpenSearch P1. Signed-off-by: Mitchell Gale * Manual spotless changes Signed-off-by: Mitchell Gale * ignore failures for checkstyles. Signed-off-by: Mitchell Gale * Apply suggestions from code review Co-authored-by: Yury-Fridlyand Signed-off-by: Mitchell Gale * Apply suggestions from code review Co-authored-by: Yury-Fridlyand Signed-off-by: Mitchell Gale * Apply suggestions from code review Co-authored-by: Yury-Fridlyand Signed-off-by: Mitchell Gale * Address PR comment Signed-off-by: Mitchell Gale * add order list in Content.java Signed-off-by: Mitchell Gale * Update opensearch/src/main/java/org/opensearch/sql/opensearch/request/system/OpenSearchDescribeIndexRequest.java Co-authored-by: Guian Gumpac Signed-off-by: Mitchell Gale * Update opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/MetricParserHelper.java Co-authored-by: Guian Gumpac Signed-off-by: Mitchell Gale * Fixing compilation error Signed-off-by: Mitchell Gale --------- Signed-off-by: Mitchell Gale Signed-off-by: Mitchell Gale Co-authored-by: Yury-Fridlyand Co-authored-by: Guian Gumpac --- opensearch/build.gradle | 3 + .../sql/opensearch/client/MLClient.java | 6 +- .../opensearch/client/OpenSearchClient.java | 10 +- .../data/type/OpenSearchBinaryType.java | 3 +- .../data/type/OpenSearchDataType.java | 139 +- .../data/type/OpenSearchDateType.java | 285 +-- .../sql/opensearch/data/utils/Content.java | 91 +- .../opensearch/data/utils/ObjectContent.java | 26 +- .../data/value/OpenSearchExprBinaryValue.java | 4 +- .../value/OpenSearchExprGeoPointValue.java | 6 +- .../executor/OpenSearchExecutionEngine.java | 48 +- .../protector/ExecutionProtector.java | 9 +- .../protector/NoopExecutionProtector.java | 5 +- .../OpenSearchExecutionProtector.java | 91 +- .../sql/opensearch/mapping/IndexMapping.java | 15 +- .../planner/physical/ADOperator.java | 179 +- .../planner/physical/MLCommonsOperator.java | 84 +- .../physical/MLCommonsOperatorActions.java | 140 +- .../planner/physical/MLOperator.java | 70 +- .../system/OpenSearchCatIndicesRequest.java | 5 +- .../OpenSearchDescribeIndexRequest.java | 42 +- .../agg/CompositeAggregationParser.java | 4 +- .../opensearch/response/agg/FilterParser.java | 5 +- .../opensearch/response/agg/MetricParser.java | 8 +- .../response/agg/MetricParserHelper.java | 9 +- .../agg/NoBucketAggregationParser.java | 4 +- .../OpenSearchAggregationResponseParser.java | 5 +- .../response/error/ErrorMessage.java | 18 +- .../response/error/ErrorMessageFactory.java | 9 +- .../error/OpenSearchErrorMessage.java | 23 +- .../setting/LegacyOpenDistroSettings.java | 194 ++- .../storage/OpenSearchDataSourceFactory.java | 4 +- .../script/ExpressionScriptEngine.java | 31 +- .../aggregation/AggregationQueryBuilder.java | 98 +- .../ExpressionAggregationScript.java | 19 +- .../ExpressionAggregationScriptFactory.java | 5 +- ...xpressionAggregationScriptLeafFactory.java | 21 +- .../dsl/AggregationBuilderHelper.java | 24 +- .../dsl/BucketAggregationBuilder.java | 15 +- .../dsl/MetricAggregationBuilder.java | 50 +- .../storage/script/core/ExpressionScript.java | 108 +- .../script/filter/ExpressionFilterScript.java | 31 +- .../filter/ExpressionFilterScriptFactory.java | 10 +- .../ExpressionFilterScriptLeafFactory.java | 23 +- .../script/filter/FilterQueryBuilder.java | 54 +- .../script/filter/lucene/LikeQuery.java | 7 +- .../script/filter/lucene/LuceneQuery.java | 293 ++-- .../script/filter/lucene/NestedQuery.java | 35 +- .../FunctionParameterRepository.java | 389 +++-- .../relevance/MatchBoolPrefixQuery.java | 18 +- .../relevance/MatchPhrasePrefixQuery.java | 6 +- .../lucene/relevance/MatchPhraseQuery.java | 8 +- .../filter/lucene/relevance/MatchQuery.java | 8 +- .../lucene/relevance/MultiFieldQuery.java | 37 +- .../lucene/relevance/MultiMatchQuery.java | 4 +- .../filter/lucene/relevance/NoFieldQuery.java | 27 +- .../DefaultExpressionSerializer.java | 6 +- .../serialization/ExpressionSerializer.java | 16 +- .../OpenSearchDataTypeRecognitionTest.java | 7 +- .../data/type/OpenSearchDataTypeTest.java | 240 +-- .../data/type/OpenSearchDateTypeTest.java | 78 +- .../value/OpenSearchExprBinaryValueTest.java | 10 +- .../OpenSearchExecutionEngineTest.java | 171 +- .../protector/NoopExecutionProtectorTest.java | 4 +- .../OpenSearchExecutionProtectorTest.java | 173 +- .../physical/MLCommonsOperatorTest.java | 78 +- .../planner/physical/MLOperatorTest.java | 74 +- .../OpenSearchCatIndicesRequestTest.java | 12 +- .../OpenSearchDescribeIndexRequestTest.java | 40 +- .../response/AggregationResponseUtils.java | 80 +- ...enSearchAggregationResponseParserTest.java | 446 ++--- .../error/ErrorMessageFactoryTest.java | 1 - .../response/error/ErrorMessageTest.java | 65 +- .../error/OpenSearchErrorMessageTest.java | 19 +- .../OpenSearchDefaultImplementorTest.java | 15 +- .../script/ExpressionScriptEngineTest.java | 17 +- .../AggregationQueryBuilderTest.java | 864 +++++----- ...xpressionAggregationScriptFactoryTest.java | 13 +- .../ExpressionAggregationScriptTest.java | 79 +- .../aggregation/GroupSortOrderTest.java | 11 +- .../dsl/BucketAggregationBuilderTest.java | 36 +- .../dsl/MetricAggregationBuilderTest.java | 471 ++--- .../ExpressionFilterScriptFactoryTest.java | 14 +- .../filter/ExpressionFilterScriptTest.java | 75 +- .../script/filter/FilterQueryBuilderTest.java | 1536 +++++++++-------- .../script/filter/lucene/LuceneQueryTest.java | 8 +- .../lucene/MatchBoolPrefixQueryTest.java | 48 +- .../lucene/MatchPhrasePrefixQueryTest.java | 90 +- .../filter/lucene/MatchPhraseQueryTest.java | 300 ++-- .../script/filter/lucene/MatchQueryTest.java | 211 ++- .../script/filter/lucene/MultiMatchTest.java | 139 +- .../lucene/relevance/MultiFieldQueryTest.java | 42 +- .../lucene/relevance/NoFieldQueryTest.java | 20 +- .../DefaultExpressionSerializerTest.java | 34 +- 94 files changed, 4288 insertions(+), 4120 deletions(-) diff --git a/opensearch/build.gradle b/opensearch/build.gradle index 4095d519cb..a2ab670403 100644 --- a/opensearch/build.gradle +++ b/opensearch/build.gradle @@ -48,6 +48,9 @@ dependencies { testImplementation group: 'org.opensearch.test', name: 'framework', version: "${opensearch_version}" } +checkstyleTest.ignoreFailures = true +checkstyleMain.ignoreFailures = true + pitest { targetClasses = ['org.opensearch.sql.*'] pitestVersion = '1.9.0' diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/client/MLClient.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/client/MLClient.java index 19f49d0e5f..4bc6009875 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/client/MLClient.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/client/MLClient.java @@ -3,16 +3,14 @@ import org.opensearch.client.node.NodeClient; import org.opensearch.ml.client.MachineLearningNodeClient; - public class MLClient { private static MachineLearningNodeClient INSTANCE; - private MLClient() { - - } + private MLClient() {} /** * get machine learning client. + * * @param nodeClient node client * @return machine learning client */ diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/client/OpenSearchClient.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/client/OpenSearchClient.java index dc6e72bd91..0a9cc67993 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/client/OpenSearchClient.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/client/OpenSearchClient.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.client; import java.util.List; @@ -14,9 +13,8 @@ import org.opensearch.sql.opensearch.response.OpenSearchResponse; /** - * OpenSearch client abstraction to wrap different OpenSearch client implementation. For - * example, implementation by node client for OpenSearch plugin or by REST client for - * standalone mode. + * OpenSearch client abstraction to wrap different OpenSearch client implementation. For example, + * implementation by node client for OpenSearch plugin or by REST client for standalone mode. */ public interface OpenSearchClient { @@ -24,6 +22,7 @@ public interface OpenSearchClient { /** * Check if the given index exists. + * * @param indexName index name * @return true if exists, otherwise false */ @@ -31,8 +30,9 @@ public interface OpenSearchClient { /** * Create OpenSearch index based on the given mappings. + * * @param indexName index name - * @param mappings index mappings + * @param mappings index mappings */ void createIndex(String indexName, Map mappings); diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchBinaryType.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchBinaryType.java index cd58d4bc9f..b3be5e7b7f 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchBinaryType.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchBinaryType.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.type; import static org.opensearch.sql.data.type.ExprCoreType.UNKNOWN; @@ -11,7 +10,7 @@ import lombok.EqualsAndHashCode; /** - * The type of a binary value. See + * The type of binary value. See
* doc */ @EqualsAndHashCode(callSuper = false) diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchDataType.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchDataType.java index 273b980d2a..d276374539 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchDataType.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchDataType.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.type; import com.google.common.collect.ImmutableMap; @@ -18,15 +17,11 @@ import org.opensearch.sql.data.type.ExprCoreType; import org.opensearch.sql.data.type.ExprType; -/** - * The extension of ExprType in OpenSearch. - */ +/** The extension of ExprType in OpenSearch. */ @EqualsAndHashCode public class OpenSearchDataType implements ExprType, Serializable { - /** - * The mapping (OpenSearch engine) type. - */ + /** The mapping (OpenSearch engine) type. */ public enum MappingType { Invalid(null, ExprCoreType.UNKNOWN), Text("text", ExprCoreType.UNKNOWN), @@ -51,8 +46,7 @@ public enum MappingType { private final String name; // Associated `ExprCoreType` - @Getter - private final ExprCoreType exprCoreType; + @Getter private final ExprCoreType exprCoreType; MappingType(String name, ExprCoreType exprCoreType) { this.name = name; @@ -64,16 +58,15 @@ public String toString() { } } - @EqualsAndHashCode.Exclude - @Getter - protected MappingType mappingType; + @EqualsAndHashCode.Exclude @Getter protected MappingType mappingType; // resolved ExprCoreType protected ExprCoreType exprCoreType; /** - * Get a simplified type {@link ExprCoreType} if possible. - * To avoid returning `UNKNOWN` for `OpenSearch*Type`s, e.g. for IP, returns itself. + * Get a simplified type {@link ExprCoreType} if possible. To avoid returning `UNKNOWN` for + * `OpenSearch*Type`s, e.g. for IP, returns itself. + * * @return An {@link ExprType}. */ public ExprType getExprType() { @@ -84,22 +77,23 @@ public ExprType getExprType() { } /** - * Simple instances of OpenSearchDataType are created once during entire SQL engine lifetime - * and cached there. This reduces memory usage and increases type comparison. - * Note: Types with non-empty fields and properties are not cached. + * Simple instances of OpenSearchDataType are created once during entire SQL engine lifetime and + * cached there. This reduces memory usage and increases type comparison. Note: Types with + * non-empty fields and properties are not cached. */ private static final Map instances = new HashMap<>(); static { EnumUtils.getEnumList(MappingType.class).stream() - .filter(t -> t != MappingType.Invalid).forEach(t -> - instances.put(t.toString(), OpenSearchDataType.of(t))); - EnumUtils.getEnumList(ExprCoreType.class).forEach(t -> - instances.put(t.toString(), OpenSearchDataType.of(t))); + .filter(t -> t != MappingType.Invalid) + .forEach(t -> instances.put(t.toString(), OpenSearchDataType.of(t))); + EnumUtils.getEnumList(ExprCoreType.class) + .forEach(t -> instances.put(t.toString(), OpenSearchDataType.of(t))); } /** * Parses index mapping and maps it to a Data type in the SQL plugin. + * * @param indexMapping An input with keys and objects that need to be mapped to a data type. * @return The mapping. */ @@ -110,37 +104,35 @@ public static Map parseMapping(Map i return result; } - indexMapping.forEach((k, v) -> { - var innerMap = (Map)v; - // by default, the type is treated as an Object if "type" is not provided - var type = ((String) innerMap - .getOrDefault( - "type", - "object")) - .replace("_", ""); - if (!EnumUtils.isValidEnumIgnoreCase(OpenSearchDataType.MappingType.class, type)) { - // unknown type, e.g. `alias` - // TODO resolve alias reference - return; - } - // create OpenSearchDataType - result.put(k, OpenSearchDataType.of( - EnumUtils.getEnumIgnoreCase(OpenSearchDataType.MappingType.class, type), - innerMap) - ); - }); + indexMapping.forEach( + (k, v) -> { + var innerMap = (Map) v; + // by default, the type is treated as an Object if "type" is not provided + var type = ((String) innerMap.getOrDefault("type", "object")).replace("_", ""); + if (!EnumUtils.isValidEnumIgnoreCase(OpenSearchDataType.MappingType.class, type)) { + // unknown type, e.g. `alias` + // TODO resolve alias reference + return; + } + // create OpenSearchDataType + result.put( + k, + OpenSearchDataType.of( + EnumUtils.getEnumIgnoreCase(OpenSearchDataType.MappingType.class, type), + innerMap)); + }); return result; } /** * A constructor function which builds proper `OpenSearchDataType` for given mapping `Type`. + * * @param mappingType A mapping type. * @return An instance or inheritor of `OpenSearchDataType`. */ public static OpenSearchDataType of(MappingType mappingType, Map innerMap) { - OpenSearchDataType res = instances.getOrDefault(mappingType.toString(), - new OpenSearchDataType(mappingType) - ); + OpenSearchDataType res = + instances.getOrDefault(mappingType.toString(), new OpenSearchDataType(mappingType)); switch (mappingType) { case Object: // TODO: use Object type once it has been added @@ -158,9 +150,12 @@ public static OpenSearchDataType of(MappingType mappingType, Map Map fields = parseMapping((Map) innerMap.getOrDefault("fields", Map.of())); return (!fields.isEmpty()) ? OpenSearchTextType.of(fields) : OpenSearchTextType.of(); - case GeoPoint: return OpenSearchGeoPointType.of(); - case Binary: return OpenSearchBinaryType.of(); - case Ip: return OpenSearchIpType.of(); + case GeoPoint: + return OpenSearchGeoPointType.of(); + case Binary: + return OpenSearchBinaryType.of(); + case Ip: + return OpenSearchIpType.of(); case Date: // Default date formatter is used when "" is passed as the second parameter String format = (String) innerMap.getOrDefault("format", ""); @@ -173,6 +168,7 @@ public static OpenSearchDataType of(MappingType mappingType, Map /** * A constructor function which builds proper `OpenSearchDataType` for given mapping `Type`. * Designed to be called by the mapping parser only (and tests). + * * @param mappingType A mapping type. * @return An instance or inheritor of `OpenSearchDataType`. */ @@ -182,6 +178,7 @@ public static OpenSearchDataType of(MappingType mappingType) { /** * A constructor function which builds proper `OpenSearchDataType` for given {@link ExprType}. + * * @param type A type. * @return An instance of `OpenSearchDataType`. */ @@ -211,9 +208,7 @@ protected OpenSearchDataType(ExprCoreType type) { // For datatypes with properties (example: object and nested types) // a read-only collection - @Getter - @EqualsAndHashCode.Exclude - Map properties = ImmutableMap.of(); + @Getter @EqualsAndHashCode.Exclude Map properties = ImmutableMap.of(); @Override // Called when building TypeEnvironment and when serializing PPL response @@ -236,46 +231,52 @@ public String legacyTypeName() { } /** - * Clone type object without {@link #properties} - without info about nested object types. - * Note: Should be overriden by all derived classes for proper work. + * Clone type object without {@link #properties} - without info about nested object types. Note: + * Should be overriden by all derived classes for proper work. + * * @return A cloned object. */ protected OpenSearchDataType cloneEmpty() { return this.mappingType == null - ? new OpenSearchDataType(this.exprCoreType) : new OpenSearchDataType(this.mappingType); + ? new OpenSearchDataType(this.exprCoreType) + : new OpenSearchDataType(this.mappingType); } /** - * Flattens mapping tree into a single layer list of objects (pairs of name-types actually), - * which don't have nested types. - * See {@link OpenSearchDataTypeTest#traverseAndFlatten() test} for example. + * Flattens mapping tree into a single layer list of objects (pairs of name-types actually), which + * don't have nested types. See {@link OpenSearchDataTypeTest#traverseAndFlatten() test} for + * example. + * * @param tree A list of `OpenSearchDataType`s - map between field name and its type. * @return A list of all `OpenSearchDataType`s from given map on the same nesting level (1). - * Nested object names are prefixed by names of their host. + * Nested object names are prefixed by names of their host. */ public static Map traverseAndFlatten( Map tree) { final Map result = new LinkedHashMap<>(); - BiConsumer, String> visitLevel = new BiConsumer<>() { - @Override - public void accept(Map subtree, String prefix) { - for (var entry : subtree.entrySet()) { - String entryKey = entry.getKey(); - var nextPrefix = prefix.isEmpty() ? entryKey : String.format("%s.%s", prefix, entryKey); - result.put(nextPrefix, entry.getValue().cloneEmpty()); - var nextSubtree = entry.getValue().getProperties(); - if (!nextSubtree.isEmpty()) { - accept(nextSubtree, nextPrefix); + BiConsumer, String> visitLevel = + new BiConsumer<>() { + @Override + public void accept(Map subtree, String prefix) { + for (var entry : subtree.entrySet()) { + String entryKey = entry.getKey(); + var nextPrefix = + prefix.isEmpty() ? entryKey : String.format("%s.%s", prefix, entryKey); + result.put(nextPrefix, entry.getValue().cloneEmpty()); + var nextSubtree = entry.getValue().getProperties(); + if (!nextSubtree.isEmpty()) { + accept(nextSubtree, nextPrefix); + } + } } - } - } - }; + }; visitLevel.accept(tree, ""); return result; } /** * Resolve type of identified from parsed mapping tree. + * * @param tree Parsed mapping tree (not flattened). * @param id An identifier. * @return Resolved OpenSearchDataType or null if not found. diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchDateType.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchDateType.java index 76947bf720..d0a924c494 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchDateType.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchDateType.java @@ -20,124 +20,124 @@ import org.opensearch.sql.data.type.ExprCoreType; import org.opensearch.sql.data.type.ExprType; -/** - * Date type with support for predefined and custom formats read from the index mapping. - */ +/** Date type with support for predefined and custom formats read from the index mapping. */ @EqualsAndHashCode(callSuper = true) public class OpenSearchDateType extends OpenSearchDataType { private static final OpenSearchDateType instance = new OpenSearchDateType(); /** Numeric formats which support full datetime. */ - public static final List SUPPORTED_NAMED_NUMERIC_FORMATS = List.of( - FormatNames.EPOCH_MILLIS, - FormatNames.EPOCH_SECOND - ); + public static final List SUPPORTED_NAMED_NUMERIC_FORMATS = + List.of(FormatNames.EPOCH_MILLIS, FormatNames.EPOCH_SECOND); /** List of named formats which support full datetime. */ - public static final List SUPPORTED_NAMED_DATETIME_FORMATS = List.of( - FormatNames.ISO8601, - FormatNames.BASIC_DATE_TIME, - FormatNames.BASIC_DATE_TIME_NO_MILLIS, - FormatNames.BASIC_ORDINAL_DATE_TIME, - FormatNames.BASIC_ORDINAL_DATE_TIME_NO_MILLIS, - FormatNames.BASIC_WEEK_DATE_TIME, - FormatNames.STRICT_BASIC_WEEK_DATE_TIME, - FormatNames.BASIC_WEEK_DATE_TIME_NO_MILLIS, - FormatNames.STRICT_BASIC_WEEK_DATE_TIME_NO_MILLIS, - FormatNames.BASIC_WEEK_DATE, - FormatNames.STRICT_BASIC_WEEK_DATE, - FormatNames.DATE_OPTIONAL_TIME, - FormatNames.STRICT_DATE_OPTIONAL_TIME, - FormatNames.STRICT_DATE_OPTIONAL_TIME_NANOS, - FormatNames.DATE_TIME, - FormatNames.STRICT_DATE_TIME, - FormatNames.DATE_TIME_NO_MILLIS, - FormatNames.STRICT_DATE_TIME_NO_MILLIS, - FormatNames.DATE_HOUR_MINUTE_SECOND_FRACTION, - FormatNames.STRICT_DATE_HOUR_MINUTE_SECOND_FRACTION, - FormatNames.DATE_HOUR_MINUTE_SECOND_FRACTION, - FormatNames.DATE_HOUR_MINUTE_SECOND_MILLIS, - FormatNames.STRICT_DATE_HOUR_MINUTE_SECOND_MILLIS, - FormatNames.DATE_HOUR_MINUTE_SECOND, - FormatNames.STRICT_DATE_HOUR_MINUTE_SECOND, - FormatNames.DATE_HOUR_MINUTE, - FormatNames.STRICT_DATE_HOUR_MINUTE, - FormatNames.DATE_HOUR, - FormatNames.STRICT_DATE_HOUR, - FormatNames.ORDINAL_DATE_TIME, - FormatNames.STRICT_ORDINAL_DATE_TIME, - FormatNames.ORDINAL_DATE_TIME_NO_MILLIS, - FormatNames.STRICT_ORDINAL_DATE_TIME_NO_MILLIS, - FormatNames.WEEK_DATE_TIME, - FormatNames.STRICT_WEEK_DATE_TIME, - FormatNames.WEEK_DATE_TIME_NO_MILLIS, - FormatNames.STRICT_WEEK_DATE_TIME_NO_MILLIS - ); + public static final List SUPPORTED_NAMED_DATETIME_FORMATS = + List.of( + FormatNames.ISO8601, + FormatNames.BASIC_DATE_TIME, + FormatNames.BASIC_DATE_TIME_NO_MILLIS, + FormatNames.BASIC_ORDINAL_DATE_TIME, + FormatNames.BASIC_ORDINAL_DATE_TIME_NO_MILLIS, + FormatNames.BASIC_WEEK_DATE_TIME, + FormatNames.STRICT_BASIC_WEEK_DATE_TIME, + FormatNames.BASIC_WEEK_DATE_TIME_NO_MILLIS, + FormatNames.STRICT_BASIC_WEEK_DATE_TIME_NO_MILLIS, + FormatNames.BASIC_WEEK_DATE, + FormatNames.STRICT_BASIC_WEEK_DATE, + FormatNames.DATE_OPTIONAL_TIME, + FormatNames.STRICT_DATE_OPTIONAL_TIME, + FormatNames.STRICT_DATE_OPTIONAL_TIME_NANOS, + FormatNames.DATE_TIME, + FormatNames.STRICT_DATE_TIME, + FormatNames.DATE_TIME_NO_MILLIS, + FormatNames.STRICT_DATE_TIME_NO_MILLIS, + FormatNames.DATE_HOUR_MINUTE_SECOND_FRACTION, + FormatNames.STRICT_DATE_HOUR_MINUTE_SECOND_FRACTION, + FormatNames.DATE_HOUR_MINUTE_SECOND_FRACTION, + FormatNames.DATE_HOUR_MINUTE_SECOND_MILLIS, + FormatNames.STRICT_DATE_HOUR_MINUTE_SECOND_MILLIS, + FormatNames.DATE_HOUR_MINUTE_SECOND, + FormatNames.STRICT_DATE_HOUR_MINUTE_SECOND, + FormatNames.DATE_HOUR_MINUTE, + FormatNames.STRICT_DATE_HOUR_MINUTE, + FormatNames.DATE_HOUR, + FormatNames.STRICT_DATE_HOUR, + FormatNames.ORDINAL_DATE_TIME, + FormatNames.STRICT_ORDINAL_DATE_TIME, + FormatNames.ORDINAL_DATE_TIME_NO_MILLIS, + FormatNames.STRICT_ORDINAL_DATE_TIME_NO_MILLIS, + FormatNames.WEEK_DATE_TIME, + FormatNames.STRICT_WEEK_DATE_TIME, + FormatNames.WEEK_DATE_TIME_NO_MILLIS, + FormatNames.STRICT_WEEK_DATE_TIME_NO_MILLIS); /** List of named formats that only support year/month/day. */ - public static final List SUPPORTED_NAMED_DATE_FORMATS = List.of( - FormatNames.BASIC_DATE, - FormatNames.BASIC_ORDINAL_DATE, - FormatNames.DATE, - FormatNames.STRICT_DATE, - FormatNames.YEAR_MONTH_DAY, - FormatNames.STRICT_YEAR_MONTH_DAY, - FormatNames.ORDINAL_DATE, - FormatNames.STRICT_ORDINAL_DATE, - FormatNames.WEEK_DATE, - FormatNames.STRICT_WEEK_DATE, - FormatNames.WEEKYEAR_WEEK_DAY, - FormatNames.STRICT_WEEKYEAR_WEEK_DAY - ); - - /** list of named formats which produce incomplete date, - * e.g. 1 or 2 are missing from tuple year/month/day. */ - public static final List SUPPORTED_NAMED_INCOMPLETE_DATE_FORMATS = List.of( - FormatNames.YEAR_MONTH, - FormatNames.STRICT_YEAR_MONTH, - FormatNames.YEAR, - FormatNames.STRICT_YEAR, - FormatNames.WEEK_YEAR, - FormatNames.WEEK_YEAR_WEEK, - FormatNames.STRICT_WEEKYEAR_WEEK, - FormatNames.WEEKYEAR, - FormatNames.STRICT_WEEKYEAR - ); + public static final List SUPPORTED_NAMED_DATE_FORMATS = + List.of( + FormatNames.BASIC_DATE, + FormatNames.BASIC_ORDINAL_DATE, + FormatNames.DATE, + FormatNames.STRICT_DATE, + FormatNames.YEAR_MONTH_DAY, + FormatNames.STRICT_YEAR_MONTH_DAY, + FormatNames.ORDINAL_DATE, + FormatNames.STRICT_ORDINAL_DATE, + FormatNames.WEEK_DATE, + FormatNames.STRICT_WEEK_DATE, + FormatNames.WEEKYEAR_WEEK_DAY, + FormatNames.STRICT_WEEKYEAR_WEEK_DAY); + + /** + * list of named formats which produce incomplete date, e.g. 1 or 2 are missing from tuple + * year/month/day. + */ + public static final List SUPPORTED_NAMED_INCOMPLETE_DATE_FORMATS = + List.of( + FormatNames.YEAR_MONTH, + FormatNames.STRICT_YEAR_MONTH, + FormatNames.YEAR, + FormatNames.STRICT_YEAR, + FormatNames.WEEK_YEAR, + FormatNames.WEEK_YEAR_WEEK, + FormatNames.STRICT_WEEKYEAR_WEEK, + FormatNames.WEEKYEAR, + FormatNames.STRICT_WEEKYEAR); /** List of named formats that only support hour/minute/second. */ - public static final List SUPPORTED_NAMED_TIME_FORMATS = List.of( - FormatNames.BASIC_TIME, - FormatNames.BASIC_TIME_NO_MILLIS, - FormatNames.BASIC_T_TIME, - FormatNames.BASIC_T_TIME_NO_MILLIS, - FormatNames.TIME, - FormatNames.STRICT_TIME, - FormatNames.TIME_NO_MILLIS, - FormatNames.STRICT_TIME_NO_MILLIS, - FormatNames.HOUR_MINUTE_SECOND_FRACTION, - FormatNames.STRICT_HOUR_MINUTE_SECOND_FRACTION, - FormatNames.HOUR_MINUTE_SECOND_MILLIS, - FormatNames.STRICT_HOUR_MINUTE_SECOND_MILLIS, - FormatNames.HOUR_MINUTE_SECOND, - FormatNames.STRICT_HOUR_MINUTE_SECOND, - FormatNames.HOUR_MINUTE, - FormatNames.STRICT_HOUR_MINUTE, - FormatNames.HOUR, - FormatNames.STRICT_HOUR, - FormatNames.T_TIME, - FormatNames.STRICT_T_TIME, - FormatNames.T_TIME_NO_MILLIS, - FormatNames.STRICT_T_TIME_NO_MILLIS - ); - - /** Formatter symbols which used to format time or date correspondingly. - * {@link java.time.format.DateTimeFormatter}. */ + public static final List SUPPORTED_NAMED_TIME_FORMATS = + List.of( + FormatNames.BASIC_TIME, + FormatNames.BASIC_TIME_NO_MILLIS, + FormatNames.BASIC_T_TIME, + FormatNames.BASIC_T_TIME_NO_MILLIS, + FormatNames.TIME, + FormatNames.STRICT_TIME, + FormatNames.TIME_NO_MILLIS, + FormatNames.STRICT_TIME_NO_MILLIS, + FormatNames.HOUR_MINUTE_SECOND_FRACTION, + FormatNames.STRICT_HOUR_MINUTE_SECOND_FRACTION, + FormatNames.HOUR_MINUTE_SECOND_MILLIS, + FormatNames.STRICT_HOUR_MINUTE_SECOND_MILLIS, + FormatNames.HOUR_MINUTE_SECOND, + FormatNames.STRICT_HOUR_MINUTE_SECOND, + FormatNames.HOUR_MINUTE, + FormatNames.STRICT_HOUR_MINUTE, + FormatNames.HOUR, + FormatNames.STRICT_HOUR, + FormatNames.T_TIME, + FormatNames.STRICT_T_TIME, + FormatNames.T_TIME_NO_MILLIS, + FormatNames.STRICT_T_TIME_NO_MILLIS); + + /** + * Formatter symbols which used to format time or date correspondingly. {@link + * java.time.format.DateTimeFormatter}. + */ private static final String CUSTOM_FORMAT_TIME_SYMBOLS = "nNASsmHkKha"; + private static final String CUSTOM_FORMAT_DATE_SYMBOLS = "FecEWwYqQgdMLDyuG"; - @EqualsAndHashCode.Exclude - private final List formats; + @EqualsAndHashCode.Exclude private final List formats; private OpenSearchDateType() { super(MappingType.Date); @@ -166,6 +166,7 @@ public boolean hasFormats() { /** * Retrieves and splits a user defined format string from the mapping into a list of formats. + * * @return A list of format names and user defined formats. */ private List getFormatList(String format) { @@ -175,49 +176,57 @@ private List getFormatList(String format) { /** * Retrieves a list of named OpenSearch formatters given by user mapping. + * * @return a list of DateFormatters that can be used to parse a Date/Time/Timestamp. */ public List getAllNamedFormatters() { return formats.stream() .filter(formatString -> FormatNames.forName(formatString) != null) - .map(DateFormatter::forPattern).collect(Collectors.toList()); + .map(DateFormatter::forPattern) + .collect(Collectors.toList()); } /** * Retrieves a list of numeric formatters that format for dates. + * * @return a list of DateFormatters that can be used to parse a Date. */ public List getNumericNamedFormatters() { return formats.stream() - .filter(formatString -> { - FormatNames namedFormat = FormatNames.forName(formatString); - return namedFormat != null && SUPPORTED_NAMED_NUMERIC_FORMATS.contains(namedFormat); - }) - .map(DateFormatter::forPattern).collect(Collectors.toList()); + .filter( + formatString -> { + FormatNames namedFormat = FormatNames.forName(formatString); + return namedFormat != null && SUPPORTED_NAMED_NUMERIC_FORMATS.contains(namedFormat); + }) + .map(DateFormatter::forPattern) + .collect(Collectors.toList()); } /** * Retrieves a list of custom formats defined by the user. + * * @return a list of formats as strings that can be used to parse a Date/Time/Timestamp. */ public List getAllCustomFormats() { return formats.stream() .filter(format -> FormatNames.forName(format) == null) - .map(format -> { - try { - DateFormatter.forPattern(format); - return format; - } catch (Exception ignored) { - // parsing failed - return null; - } - }) + .map( + format -> { + try { + DateFormatter.forPattern(format); + return format; + } catch (Exception ignored) { + // parsing failed + return null; + } + }) .filter(Objects::nonNull) .collect(Collectors.toList()); } /** * Retrieves a list of custom formatters defined by the user. + * * @return a list of DateFormatters that can be used to parse a Date/Time/Timestamp. */ public List getAllCustomFormatters() { @@ -228,41 +237,50 @@ public List getAllCustomFormatters() { /** * Retrieves a list of named formatters that format for dates. + * * @return a list of DateFormatters that can be used to parse a Date. */ public List getDateNamedFormatters() { return formats.stream() - .filter(formatString -> { - FormatNames namedFormat = FormatNames.forName(formatString); - return namedFormat != null && SUPPORTED_NAMED_DATE_FORMATS.contains(namedFormat); - }) - .map(DateFormatter::forPattern).collect(Collectors.toList()); + .filter( + formatString -> { + FormatNames namedFormat = FormatNames.forName(formatString); + return namedFormat != null && SUPPORTED_NAMED_DATE_FORMATS.contains(namedFormat); + }) + .map(DateFormatter::forPattern) + .collect(Collectors.toList()); } /** * Retrieves a list of named formatters that format for Times. + * * @return a list of DateFormatters that can be used to parse a Time. */ public List getTimeNamedFormatters() { return formats.stream() - .filter(formatString -> { - FormatNames namedFormat = FormatNames.forName(formatString); - return namedFormat != null && SUPPORTED_NAMED_TIME_FORMATS.contains(namedFormat); - }) - .map(DateFormatter::forPattern).collect(Collectors.toList()); + .filter( + formatString -> { + FormatNames namedFormat = FormatNames.forName(formatString); + return namedFormat != null && SUPPORTED_NAMED_TIME_FORMATS.contains(namedFormat); + }) + .map(DateFormatter::forPattern) + .collect(Collectors.toList()); } /** * Retrieves a list of named formatters that format for DateTimes. + * * @return a list of DateFormatters that can be used to parse a DateTime. */ public List getDateTimeNamedFormatters() { return formats.stream() - .filter(formatString -> { - FormatNames namedFormat = FormatNames.forName(formatString); - return namedFormat != null && SUPPORTED_NAMED_DATETIME_FORMATS.contains(namedFormat); - }) - .map(DateFormatter::forPattern).collect(Collectors.toList()); + .filter( + formatString -> { + FormatNames namedFormat = FormatNames.forName(formatString); + return namedFormat != null && SUPPORTED_NAMED_DATETIME_FORMATS.contains(namedFormat); + }) + .map(DateFormatter::forPattern) + .collect(Collectors.toList()); } private ExprCoreType getExprTypeFromCustomFormats(List formats) { @@ -368,6 +386,7 @@ public static boolean isDateTypeCompatible(ExprType exprType) { /** * Create a Date type which has a LinkedHashMap defining all formats. + * * @return A new type object. */ public static OpenSearchDateType of(String format) { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/utils/Content.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/utils/Content.java index 992689a186..0c3d2aec45 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/utils/Content.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/utils/Content.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.utils; import java.util.Iterator; @@ -11,111 +10,73 @@ import org.apache.commons.lang3.tuple.Pair; /** + * * Regardless the underling data format, the {@link Content} define the data in abstract manner. - * which could be parsed by ElasticsearchExprValueFactory. - * There are two major use cases: - * 1. Represent the JSON data retrieve from OpenSearch search response. - * 2. Represent the Object data extract from the OpenSearch aggregation response. + * which could be parsed by ElasticsearchExprValueFactory. There are two major use cases: + *
    + *
  1. Represent the JSON data retrieve from OpenSearch search response.
  2. + *
  3. Represent the Object data extract from the OpenSearch aggregation response.
  4. + *
*/ public interface Content { - /** - * Is null value. - */ + /** Is null value. */ boolean isNull(); - /** - * Is number value. - */ + /** Is number value. */ boolean isNumber(); - /** - * Is float value. - */ + /** Is float value. */ boolean isFloat(); - /** - * Is double value. - */ + /** Is double value. */ boolean isDouble(); - /** - * Is long value. - */ + /** Is long value. */ boolean isLong(); - /** - * Is boolean value. - */ + /** Is boolean value. */ boolean isBoolean(); - /** - * Is string value. - */ + /** Is string value. */ boolean isString(); - /** - * Is array value. - */ + /** Is array value. */ boolean isArray(); - /** - * Get integer value. - */ + /** Get integer value. */ Integer intValue(); - /** - * Get long value. - */ + /** Get long value. */ Long longValue(); - /** - * Get short value. - */ + /** Get short value. */ Short shortValue(); - /** - * Get byte value. - */ + /** Get byte value. */ Byte byteValue(); - /** - * Get float value. - */ + /** Get float value. */ Float floatValue(); - /** - * Get double value. - */ + /** Get double value. */ Double doubleValue(); - /** - * Get string value. - */ + /** Get string value. */ String stringValue(); - /** - * Get boolean value. - */ + /** Get boolean value. */ Boolean booleanValue(); - /** - * Get map of {@link Content} value. - */ + /** Get map of {@link Content} value. */ Iterator> map(); - /** - * Get array of {@link Content} value. - */ + /** Get array of {@link Content} value. */ Iterator array(); - /** - * Get geo point value. - */ + /** Get geo point value. */ Pair geoValue(); - /** - * Get {@link Object} value. - */ + /** Get {@link Object} value. */ Object objectValue(); } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/utils/ObjectContent.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/utils/ObjectContent.java index e8875d19ba..fd45ca0d51 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/utils/ObjectContent.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/utils/ObjectContent.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.utils; import com.fasterxml.jackson.databind.node.ArrayNode; @@ -15,17 +14,15 @@ import lombok.RequiredArgsConstructor; import org.apache.commons.lang3.tuple.Pair; -/** - * The Implementation of Content to represent {@link Object}. - */ +/** The Implementation of Content to represent {@link Object}. */ @RequiredArgsConstructor public class ObjectContent implements Content { private final Object value; /** - * The parse method parses the value as double value, - * since the key values histogram buckets are defaulted to double. + * The parse method parses the value as double value, since the key values histogram buckets are + * defaulted to double. */ @Override public Integer intValue() { @@ -81,11 +78,14 @@ public Object objectValue() { @SuppressWarnings("unchecked") @Override public Iterator> map() { - return ((Map) value).entrySet().stream() - .map(entry -> (Map.Entry) new AbstractMap.SimpleEntry( - entry.getKey(), - new ObjectContent(entry.getValue()))) - .iterator(); + return ((Map) value) + .entrySet().stream() + .map( + entry -> + (Map.Entry) + new AbstractMap.SimpleEntry( + entry.getKey(), new ObjectContent(entry.getValue()))) + .iterator(); } @SuppressWarnings("unchecked") @@ -140,8 +140,8 @@ public Pair geoValue() { return Pair.of(Double.valueOf(split[0]), Double.valueOf(split[1])); } - private T parseNumberValue(Object value, Function stringTFunction, - Function numberTFunction) { + private T parseNumberValue( + Object value, Function stringTFunction, Function numberTFunction) { if (value instanceof String) { return stringTFunction.apply((String) value); } else { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprBinaryValue.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprBinaryValue.java index e418832117..95558c88bc 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprBinaryValue.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprBinaryValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.value; import lombok.EqualsAndHashCode; @@ -12,9 +11,8 @@ import org.opensearch.sql.data.type.ExprType; import org.opensearch.sql.opensearch.data.type.OpenSearchBinaryType; - /** - * OpenSearch BinaryValue. + * OpenSearch BinaryValue.
* Todo, add this to avoid the unknown value type exception, the implementation will be changed. */ @EqualsAndHashCode(callSuper = false) diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprGeoPointValue.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprGeoPointValue.java index 72f7f4a4f2..c13c39c355 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprGeoPointValue.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprGeoPointValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.value; import java.util.Objects; @@ -14,7 +13,7 @@ import org.opensearch.sql.opensearch.data.type.OpenSearchGeoPointType; /** - * OpenSearch GeoPointValue. + * OpenSearch GeoPointValue.
* Todo, add this to avoid the unknown value type exception, the implementation will be changed. */ public class OpenSearchExprGeoPointValue extends AbstractExprValue { @@ -37,7 +36,8 @@ public ExprType type() { @Override public int compare(ExprValue other) { - return geoPoint.toString() + return geoPoint + .toString() .compareTo((((OpenSearchExprGeoPointValue) other).geoPoint).toString()); } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/OpenSearchExecutionEngine.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/OpenSearchExecutionEngine.java index 31e5c7f957..21046956d0 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/OpenSearchExecutionEngine.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/OpenSearchExecutionEngine.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.executor; import java.util.ArrayList; @@ -36,8 +35,10 @@ public void execute(PhysicalPlan physicalPlan, ResponseListener l } @Override - public void execute(PhysicalPlan physicalPlan, ExecutionContext context, - ResponseListener listener) { + public void execute( + PhysicalPlan physicalPlan, + ExecutionContext context, + ResponseListener listener) { PhysicalPlan plan = executionProtector.protect(physicalPlan); client.schedule( () -> { @@ -51,8 +52,9 @@ public void execute(PhysicalPlan physicalPlan, ExecutionContext context, result.add(plan.next()); } - QueryResponse response = new QueryResponse(physicalPlan.schema(), result, - planSerializer.convertToCursor(plan)); + QueryResponse response = + new QueryResponse( + physicalPlan.schema(), result, planSerializer.convertToCursor(plan)); listener.onResponse(response); } catch (Exception e) { listener.onFailure(e); @@ -64,21 +66,27 @@ public void execute(PhysicalPlan physicalPlan, ExecutionContext context, @Override public void explain(PhysicalPlan plan, ResponseListener listener) { - client.schedule(() -> { - try { - Explain openSearchExplain = new Explain() { - @Override - public ExplainResponseNode visitTableScan(TableScanOperator node, Object context) { - return explain(node, context, explainNode -> { - explainNode.setDescription(Map.of("request", node.explain())); - }); - } - }; + client.schedule( + () -> { + try { + Explain openSearchExplain = + new Explain() { + @Override + public ExplainResponseNode visitTableScan( + TableScanOperator node, Object context) { + return explain( + node, + context, + explainNode -> { + explainNode.setDescription(Map.of("request", node.explain())); + }); + } + }; - listener.onResponse(openSearchExplain.apply(plan)); - } catch (Exception e) { - listener.onFailure(e); - } - }); + listener.onResponse(openSearchExplain.apply(plan)); + } catch (Exception e) { + listener.onFailure(e); + } + }); } } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/protector/ExecutionProtector.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/protector/ExecutionProtector.java index 42c49b44d8..3a11ee99d7 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/protector/ExecutionProtector.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/protector/ExecutionProtector.java @@ -3,19 +3,14 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.executor.protector; import org.opensearch.sql.planner.physical.PhysicalPlan; import org.opensearch.sql.planner.physical.PhysicalPlanNodeVisitor; -/** - * Execution Plan Protector. - */ +/** Execution Plan Protector. */ public abstract class ExecutionProtector extends PhysicalPlanNodeVisitor { - /** - * Decorated the PhysicalPlan to run in resource sensitive mode. - */ + /** Decorated the PhysicalPlan to run in resource sensitive mode. */ public abstract PhysicalPlan protect(PhysicalPlan physicalPlan); } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/protector/NoopExecutionProtector.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/protector/NoopExecutionProtector.java index 03e2f0c61c..88a5108159 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/protector/NoopExecutionProtector.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/protector/NoopExecutionProtector.java @@ -3,14 +3,11 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.executor.protector; import org.opensearch.sql.planner.physical.PhysicalPlan; -/** - * No operation execution protector. - */ +/** No operation execution protector. */ public class NoopExecutionProtector extends ExecutionProtector { @Override diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/protector/OpenSearchExecutionProtector.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/protector/OpenSearchExecutionProtector.java index dff5545785..0905c2f4b4 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/protector/OpenSearchExecutionProtector.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/protector/OpenSearchExecutionProtector.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.executor.protector; import lombok.RequiredArgsConstructor; @@ -28,15 +27,11 @@ import org.opensearch.sql.planner.physical.WindowOperator; import org.opensearch.sql.storage.TableScanOperator; -/** - * OpenSearch Execution Protector. - */ +/** OpenSearch Execution Protector. */ @RequiredArgsConstructor public class OpenSearchExecutionProtector extends ExecutionProtector { - /** - * OpenSearch resource monitor. - */ + /** OpenSearch resource monitor. */ private final ResourceMonitor resourceMonitor; public PhysicalPlan protect(PhysicalPlan physicalPlan) { @@ -44,8 +39,8 @@ public PhysicalPlan protect(PhysicalPlan physicalPlan) { } /** - * Don't protect {@link CursorCloseOperator} and entire nested tree, because - * {@link CursorCloseOperator} as designed as no-op. + * Don't protect {@link CursorCloseOperator} and entire nested tree, because {@link + * CursorCloseOperator} as designed as no-op. */ @Override public PhysicalPlan visitCursorClose(CursorCloseOperator node, Object context) { @@ -59,14 +54,18 @@ public PhysicalPlan visitFilter(FilterOperator node, Object context) { @Override public PhysicalPlan visitAggregation(AggregationOperator node, Object context) { - return new AggregationOperator(visitInput(node.getInput(), context), node.getAggregatorList(), - node.getGroupByExprList()); + return new AggregationOperator( + visitInput(node.getInput(), context), node.getAggregatorList(), node.getGroupByExprList()); } @Override public PhysicalPlan visitRareTopN(RareTopNOperator node, Object context) { - return new RareTopNOperator(visitInput(node.getInput(), context), node.getCommandType(), - node.getNoOfResults(), node.getFieldExprList(), node.getGroupByExprList()); + return new RareTopNOperator( + visitInput(node.getInput(), context), + node.getCommandType(), + node.getNoOfResults(), + node.getFieldExprList(), + node.getGroupByExprList()); } @Override @@ -74,9 +73,7 @@ public PhysicalPlan visitRename(RenameOperator node, Object context) { return new RenameOperator(visitInput(node.getInput(), context), node.getMapping()); } - /** - * Decorate with {@link ResourceMonitorPlan}. - */ + /** Decorate with {@link ResourceMonitorPlan}. */ @Override public PhysicalPlan visitTableScan(TableScanOperator node, Object context) { return doProtect(node); @@ -84,7 +81,9 @@ public PhysicalPlan visitTableScan(TableScanOperator node, Object context) { @Override public PhysicalPlan visitProject(ProjectOperator node, Object context) { - return new ProjectOperator(visitInput(node.getInput(), context), node.getProjectList(), + return new ProjectOperator( + visitInput(node.getInput(), context), + node.getProjectList(), node.getNamedParseExpressions()); } @@ -102,15 +101,19 @@ public PhysicalPlan visitEval(EvalOperator node, Object context) { public PhysicalPlan visitNested(NestedOperator node, Object context) { return doProtect( new NestedOperator( - visitInput(node.getInput(), context), node.getFields(), node.getGroupedPathsAndFields() - ) - ); + visitInput(node.getInput(), context), + node.getFields(), + node.getGroupedPathsAndFields())); } @Override public PhysicalPlan visitDedupe(DedupeOperator node, Object context) { - return new DedupeOperator(visitInput(node.getInput(), context), node.getDedupeList(), - node.getAllowedDuplication(), node.getKeepEmpty(), node.getConsecutive()); + return new DedupeOperator( + visitInput(node.getInput(), context), + node.getDedupeList(), + node.getAllowedDuplication(), + node.getKeepEmpty(), + node.getConsecutive()); } @Override @@ -121,20 +124,14 @@ public PhysicalPlan visitWindow(WindowOperator node, Object context) { node.getWindowDefinition()); } - /** - * Decorate with {@link ResourceMonitorPlan}. - */ + /** Decorate with {@link ResourceMonitorPlan}. */ @Override public PhysicalPlan visitSort(SortOperator node, Object context) { - return doProtect( - new SortOperator( - visitInput(node.getInput(), context), - node.getSortList())); + return doProtect(new SortOperator(visitInput(node.getInput(), context), node.getSortList())); } /** - * Values are a sequence of rows of literal value in memory - * which doesn't need memory protection. + * Values are a sequence of rows of literal value in memory which doesn't need memory protection. */ @Override public PhysicalPlan visitValues(ValuesOperator node, Object context) { @@ -144,41 +141,38 @@ public PhysicalPlan visitValues(ValuesOperator node, Object context) { @Override public PhysicalPlan visitLimit(LimitOperator node, Object context) { return new LimitOperator( - visitInput(node.getInput(), context), - node.getLimit(), - node.getOffset()); + visitInput(node.getInput(), context), node.getLimit(), node.getOffset()); } @Override public PhysicalPlan visitMLCommons(PhysicalPlan node, Object context) { MLCommonsOperator mlCommonsOperator = (MLCommonsOperator) node; return doProtect( - new MLCommonsOperator(visitInput(mlCommonsOperator.getInput(), context), - mlCommonsOperator.getAlgorithm(), - mlCommonsOperator.getArguments(), - mlCommonsOperator.getNodeClient()) - ); + new MLCommonsOperator( + visitInput(mlCommonsOperator.getInput(), context), + mlCommonsOperator.getAlgorithm(), + mlCommonsOperator.getArguments(), + mlCommonsOperator.getNodeClient())); } @Override public PhysicalPlan visitAD(PhysicalPlan node, Object context) { ADOperator adOperator = (ADOperator) node; return doProtect( - new ADOperator(visitInput(adOperator.getInput(), context), - adOperator.getArguments(), - adOperator.getNodeClient() - ) - ); + new ADOperator( + visitInput(adOperator.getInput(), context), + adOperator.getArguments(), + adOperator.getNodeClient())); } @Override public PhysicalPlan visitML(PhysicalPlan node, Object context) { MLOperator mlOperator = (MLOperator) node; return doProtect( - new MLOperator(visitInput(mlOperator.getInput(), context), - mlOperator.getArguments(), - mlOperator.getNodeClient()) - ); + new MLOperator( + visitInput(mlOperator.getInput(), context), + mlOperator.getArguments(), + mlOperator.getNodeClient())); } PhysicalPlan visitInput(PhysicalPlan node, Object context) { @@ -199,5 +193,4 @@ protected PhysicalPlan doProtect(PhysicalPlan node) { private boolean isProtected(PhysicalPlan node) { return (node instanceof ResourceMonitorPlan); } - } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/mapping/IndexMapping.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/mapping/IndexMapping.java index 2fefd0316f..87aa9d93dd 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/mapping/IndexMapping.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/mapping/IndexMapping.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.mapping; import java.util.Map; @@ -13,25 +12,25 @@ import org.opensearch.sql.opensearch.data.type.OpenSearchDataType; /** - * OpenSearch index mapping. Because there is no specific behavior for different field types, - * string is used to represent field types. + * OpenSearch index mapping. Because there is no specific behavior for different field types, string + * is used to represent field types. */ @ToString public class IndexMapping { /** Field mappings from field name to field type in OpenSearch date type system. */ - @Getter - private final Map fieldMappings; + @Getter private final Map fieldMappings; /** * Maps each column in the index definition to an OpenSearchSQL datatype. + * * @param metaData The metadata retrieved from the index mapping defined by the user. */ @SuppressWarnings("unchecked") public IndexMapping(MappingMetadata metaData) { - this.fieldMappings = OpenSearchDataType.parseMapping( - (Map) metaData.getSourceAsMap().getOrDefault("properties", null) - ); + this.fieldMappings = + OpenSearchDataType.parseMapping( + (Map) metaData.getSourceAsMap().getOrDefault("properties", null)); } /** diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/planner/physical/ADOperator.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/planner/physical/ADOperator.java index 7a0ae7c960..f9c32b7424 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/planner/physical/ADOperator.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/planner/physical/ADOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.planner.physical; import static org.opensearch.sql.utils.MLCommonsConstants.ANOMALY_RATE; @@ -41,65 +40,62 @@ import org.opensearch.sql.planner.physical.PhysicalPlan; import org.opensearch.sql.planner.physical.PhysicalPlanNodeVisitor; -/** - * AD Physical operator to call AD interface to get results for - * algorithm execution. - */ +/** AD Physical operator to call AD interface to get results for algorithm execution. */ @RequiredArgsConstructor @EqualsAndHashCode(callSuper = false) public class ADOperator extends MLCommonsOperatorActions { - @Getter - private final PhysicalPlan input; + @Getter private final PhysicalPlan input; - @Getter - private final Map arguments; + @Getter private final Map arguments; - @Getter - private final NodeClient nodeClient; + @Getter private final NodeClient nodeClient; - @EqualsAndHashCode.Exclude - private Iterator iterator; + @EqualsAndHashCode.Exclude private Iterator iterator; private FunctionName rcfType; @Override public void open() { super.open(); - String categoryField = arguments.containsKey(CATEGORY_FIELD) - ? (String) arguments.get(CATEGORY_FIELD).getValue() : null; - List> - inputDataFrames = generateCategorizedInputDataset(input, categoryField); + String categoryField = + arguments.containsKey(CATEGORY_FIELD) + ? (String) arguments.get(CATEGORY_FIELD).getValue() + : null; + List> inputDataFrames = + generateCategorizedInputDataset(input, categoryField); MLAlgoParams mlAlgoParams = convertArgumentToMLParameter(arguments); - List predictionResults = inputDataFrames.stream() - .map(pair -> getMLPredictionResult(rcfType, mlAlgoParams, pair.getRight(), nodeClient)) - .collect(Collectors.toList()); + List predictionResults = + inputDataFrames.stream() + .map(pair -> getMLPredictionResult(rcfType, mlAlgoParams, pair.getRight(), nodeClient)) + .collect(Collectors.toList()); Iterator> inputDataFramesIter = inputDataFrames.iterator(); Iterator predictionResultIter = predictionResults.iterator(); - iterator = new Iterator() { - private DataFrame inputDataFrame = null; - private Iterator inputRowIter = null; - private MLPredictionOutput predictionResult = null; - private Iterator resultRowIter = null; - - @Override - public boolean hasNext() { - return inputRowIter != null && inputRowIter.hasNext() || inputDataFramesIter.hasNext(); - } - - @Override - public ExprValue next() { - if (inputRowIter == null || !inputRowIter.hasNext()) { - inputDataFrame = inputDataFramesIter.next().getLeft(); - inputRowIter = inputDataFrame.iterator(); - predictionResult = predictionResultIter.next(); - resultRowIter = predictionResult.getPredictionResult().iterator(); - } - return buildResult(inputRowIter, inputDataFrame, predictionResult, resultRowIter); - } - }; + iterator = + new Iterator() { + private DataFrame inputDataFrame = null; + private Iterator inputRowIter = null; + private MLPredictionOutput predictionResult = null; + private Iterator resultRowIter = null; + + @Override + public boolean hasNext() { + return inputRowIter != null && inputRowIter.hasNext() || inputDataFramesIter.hasNext(); + } + + @Override + public ExprValue next() { + if (inputRowIter == null || !inputRowIter.hasNext()) { + inputDataFrame = inputDataFramesIter.next().getLeft(); + inputRowIter = inputDataFrame.iterator(); + predictionResult = predictionResultIter.next(); + resultRowIter = predictionResult.getPredictionResult().iterator(); + } + return buildResult(inputRowIter, inputDataFrame, predictionResult, resultRowIter); + } + }; } @Override @@ -126,53 +122,66 @@ protected MLAlgoParams convertArgumentToMLParameter(Map argumen if (arguments.get(TIME_FIELD) == null) { rcfType = FunctionName.BATCH_RCF; return BatchRCFParams.builder() - .numberOfTrees(arguments.containsKey(NUMBER_OF_TREES) - ? ((Integer) arguments.get(NUMBER_OF_TREES).getValue()) - : null) - .sampleSize(arguments.containsKey(SAMPLE_SIZE) - ? ((Integer) arguments.get(SAMPLE_SIZE).getValue()) - : null) - .outputAfter(arguments.containsKey(OUTPUT_AFTER) - ? ((Integer) arguments.get(OUTPUT_AFTER).getValue()) - : null) - .trainingDataSize(arguments.containsKey(TRAINING_DATA_SIZE) - ? ((Integer) arguments.get(TRAINING_DATA_SIZE).getValue()) - : null) - .anomalyScoreThreshold(arguments.containsKey(ANOMALY_SCORE_THRESHOLD) - ? ((Double) arguments.get(ANOMALY_SCORE_THRESHOLD).getValue()) - : null) + .numberOfTrees( + arguments.containsKey(NUMBER_OF_TREES) + ? ((Integer) arguments.get(NUMBER_OF_TREES).getValue()) + : null) + .sampleSize( + arguments.containsKey(SAMPLE_SIZE) + ? ((Integer) arguments.get(SAMPLE_SIZE).getValue()) + : null) + .outputAfter( + arguments.containsKey(OUTPUT_AFTER) + ? ((Integer) arguments.get(OUTPUT_AFTER).getValue()) + : null) + .trainingDataSize( + arguments.containsKey(TRAINING_DATA_SIZE) + ? ((Integer) arguments.get(TRAINING_DATA_SIZE).getValue()) + : null) + .anomalyScoreThreshold( + arguments.containsKey(ANOMALY_SCORE_THRESHOLD) + ? ((Double) arguments.get(ANOMALY_SCORE_THRESHOLD).getValue()) + : null) .build(); } rcfType = FunctionName.FIT_RCF; return FitRCFParams.builder() - .numberOfTrees(arguments.containsKey(NUMBER_OF_TREES) - ? ((Integer) arguments.get(NUMBER_OF_TREES).getValue()) - : null) - .shingleSize(arguments.containsKey(SHINGLE_SIZE) - ? ((Integer) arguments.get(SHINGLE_SIZE).getValue()) - : null) - .sampleSize(arguments.containsKey(SAMPLE_SIZE) - ? ((Integer) arguments.get(SAMPLE_SIZE).getValue()) - : null) - .outputAfter(arguments.containsKey(OUTPUT_AFTER) - ? ((Integer) arguments.get(OUTPUT_AFTER).getValue()) - : null) - .timeDecay(arguments.containsKey(TIME_DECAY) - ? ((Double) arguments.get(TIME_DECAY).getValue()) - : null) - .anomalyRate(arguments.containsKey(ANOMALY_RATE) - ? ((Double) arguments.get(ANOMALY_RATE).getValue()) - : null) - .timeField(arguments.containsKey(TIME_FIELD) - ? ((String) arguments.get(TIME_FIELD).getValue()) - : null) - .dateFormat(arguments.containsKey(DATE_FORMAT) - ? ((String) arguments.get(DATE_FORMAT).getValue()) - : "yyyy-MM-dd HH:mm:ss") - .timeZone(arguments.containsKey(TIME_ZONE) - ? ((String) arguments.get(TIME_ZONE).getValue()) - : null) + .numberOfTrees( + arguments.containsKey(NUMBER_OF_TREES) + ? ((Integer) arguments.get(NUMBER_OF_TREES).getValue()) + : null) + .shingleSize( + arguments.containsKey(SHINGLE_SIZE) + ? ((Integer) arguments.get(SHINGLE_SIZE).getValue()) + : null) + .sampleSize( + arguments.containsKey(SAMPLE_SIZE) + ? ((Integer) arguments.get(SAMPLE_SIZE).getValue()) + : null) + .outputAfter( + arguments.containsKey(OUTPUT_AFTER) + ? ((Integer) arguments.get(OUTPUT_AFTER).getValue()) + : null) + .timeDecay( + arguments.containsKey(TIME_DECAY) + ? ((Double) arguments.get(TIME_DECAY).getValue()) + : null) + .anomalyRate( + arguments.containsKey(ANOMALY_RATE) + ? ((Double) arguments.get(ANOMALY_RATE).getValue()) + : null) + .timeField( + arguments.containsKey(TIME_FIELD) + ? ((String) arguments.get(TIME_FIELD).getValue()) + : null) + .dateFormat( + arguments.containsKey(DATE_FORMAT) + ? ((String) arguments.get(DATE_FORMAT).getValue()) + : "yyyy-MM-dd HH:mm:ss") + .timeZone( + arguments.containsKey(TIME_ZONE) + ? ((String) arguments.get(TIME_ZONE).getValue()) + : null) .build(); } - } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/planner/physical/MLCommonsOperator.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/planner/physical/MLCommonsOperator.java index de0c23c4e9..ef60782a24 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/planner/physical/MLCommonsOperator.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/planner/physical/MLCommonsOperator.java @@ -30,26 +30,21 @@ import org.opensearch.sql.planner.physical.PhysicalPlanNodeVisitor; /** - * ml-commons Physical operator to call machine learning interface to get results for - * algorithm execution. + * ml-commons Physical operator to call machine learning interface to get results for algorithm + * execution. */ @RequiredArgsConstructor @EqualsAndHashCode(callSuper = false) public class MLCommonsOperator extends MLCommonsOperatorActions { - @Getter - private final PhysicalPlan input; + @Getter private final PhysicalPlan input; - @Getter - private final String algorithm; + @Getter private final String algorithm; - @Getter - private final Map arguments; + @Getter private final Map arguments; - @Getter - private final NodeClient nodeClient; + @Getter private final NodeClient nodeClient; - @EqualsAndHashCode.Exclude - private Iterator iterator; + @EqualsAndHashCode.Exclude private Iterator iterator; @Override public void open() { @@ -57,22 +52,26 @@ public void open() { DataFrame inputDataFrame = generateInputDataset(input); MLAlgoParams mlAlgoParams = convertArgumentToMLParameter(arguments, algorithm); MLPredictionOutput predictionResult = - getMLPredictionResult(FunctionName.valueOf(algorithm.toUpperCase()), - mlAlgoParams, inputDataFrame, nodeClient); + getMLPredictionResult( + FunctionName.valueOf(algorithm.toUpperCase()), + mlAlgoParams, + inputDataFrame, + nodeClient); Iterator inputRowIter = inputDataFrame.iterator(); Iterator resultRowIter = predictionResult.getPredictionResult().iterator(); - iterator = new Iterator() { - @Override - public boolean hasNext() { - return inputRowIter.hasNext(); - } - - @Override - public ExprValue next() { - return buildResult(inputRowIter, inputDataFrame, predictionResult, resultRowIter); - } - }; + iterator = + new Iterator() { + @Override + public boolean hasNext() { + return inputRowIter.hasNext(); + } + + @Override + public ExprValue next() { + return buildResult(inputRowIter, inputDataFrame, predictionResult, resultRowIter); + } + }; } @Override @@ -95,30 +94,33 @@ public List getChild() { return Collections.singletonList(input); } - protected MLAlgoParams convertArgumentToMLParameter(Map arguments, - String algorithm) { + protected MLAlgoParams convertArgumentToMLParameter( + Map arguments, String algorithm) { switch (FunctionName.valueOf(algorithm.toUpperCase())) { case KMEANS: return KMeansParams.builder() - .centroids(arguments.containsKey(CENTROIDS) - ? ((Integer) arguments.get(CENTROIDS).getValue()) + .centroids( + arguments.containsKey(CENTROIDS) + ? ((Integer) arguments.get(CENTROIDS).getValue()) + : null) + .iterations( + arguments.containsKey(ITERATIONS) + ? ((Integer) arguments.get(ITERATIONS).getValue()) + : null) + .distanceType( + arguments.containsKey(DISTANCE_TYPE) + ? (arguments.get(DISTANCE_TYPE).getValue() != null + ? KMeansParams.DistanceType.valueOf( + ((String) arguments.get(DISTANCE_TYPE).getValue()).toUpperCase()) : null) - .iterations(arguments.containsKey(ITERATIONS) - ? ((Integer) arguments.get(ITERATIONS).getValue()) - : null) - .distanceType(arguments.containsKey(DISTANCE_TYPE) - ? (arguments.get(DISTANCE_TYPE).getValue() != null - ? KMeansParams.DistanceType.valueOf(( - (String) arguments.get(DISTANCE_TYPE).getValue()).toUpperCase()) - : null) - : null) - .build(); + : null) + .build(); default: // TODO: update available algorithms in the message when adding a new case throw new IllegalArgumentException( - String.format("unsupported algorithm: %s, available algorithms: %s.", + String.format( + "unsupported algorithm: %s, available algorithms: %s.", FunctionName.valueOf(algorithm.toUpperCase()), KMEANS)); } } - } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/planner/physical/MLCommonsOperatorActions.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/planner/physical/MLCommonsOperatorActions.java index e1f12fb8a7..ddb0e2d5f4 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/planner/physical/MLCommonsOperatorActions.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/planner/physical/MLCommonsOperatorActions.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.planner.physical; import static org.opensearch.sql.utils.MLCommonsConstants.MODELID; @@ -48,13 +47,12 @@ import org.opensearch.sql.opensearch.client.MLClient; import org.opensearch.sql.planner.physical.PhysicalPlan; -/** - * Common method actions for ml-commons related operators. - */ +/** Common method actions for ml-commons related operators. */ public abstract class MLCommonsOperatorActions extends PhysicalPlan { /** * generate ml-commons request input dataset. + * * @param input physical input * @return ml-commons dataframe */ @@ -70,33 +68,37 @@ protected DataFrame generateInputDataset(PhysicalPlan input) { /** * Generate ml-commons request input dataset per each category based on a given category field. * Each category value will be a {@link DataFrame} pair, where the left one contains all fields - * for building response, and the right one contains all fields except the aggregated field for - * ml prediction. This is a temporary solution before ml-commons supports 2 dimensional input. + * for building response, and the right one contains all fields except the aggregated field for ml + * prediction. This is a temporary solution before ml-commons supports 2 dimensional input. * * @param input physical input * @param categoryField String, the field should be aggregated on * @return list of ml-commons dataframe pairs */ - protected List> generateCategorizedInputDataset(PhysicalPlan input, - String categoryField) { + protected List> generateCategorizedInputDataset( + PhysicalPlan input, String categoryField) { Map inputMap = new HashMap<>(); while (input.hasNext()) { Map tupleValue = input.next().tupleValue(); ExprValue categoryValue = categoryField == null ? null : tupleValue.get(categoryField); - MLInputRows inputData = - inputMap.computeIfAbsent(categoryValue, k -> new MLInputRows()); + MLInputRows inputData = inputMap.computeIfAbsent(categoryValue, k -> new MLInputRows()); inputData.addTupleValue(tupleValue); } // categoryField should be excluded for ml-commons predictions - return inputMap.values().stream().filter(inputData -> inputData.size() > 0).map( - inputData -> new ImmutablePair<>(inputData.toDataFrame(), - inputData.toFilteredDataFrame(e -> !e.getKey().equals(categoryField)))) + return inputMap.values().stream() + .filter(inputData -> inputData.size() > 0) + .map( + inputData -> + new ImmutablePair<>( + inputData.toDataFrame(), + inputData.toFilteredDataFrame(e -> !e.getKey().equals(categoryField)))) .collect(Collectors.toList()); } /** * covert result schema into ExprValue. + * * @param columnMetas column metas * @param row row * @return a map of result schema in ExprValue format @@ -113,13 +115,15 @@ protected Map convertRowIntoExprValue(ColumnMeta[] columnMeta /** * populate result map by ml-commons supported data type. + * * @param columnValue column value * @param resultKeyName result kay name * @param resultBuilder result builder */ - protected void populateResultBuilder(ColumnValue columnValue, - String resultKeyName, - ImmutableMap.Builder resultBuilder) { + protected void populateResultBuilder( + ColumnValue columnValue, + String resultKeyName, + ImmutableMap.Builder resultBuilder) { switch (columnValue.columnType()) { case INTEGER: resultBuilder.put(resultKeyName, new ExprIntegerValue(columnValue.intValue())); @@ -149,14 +153,14 @@ protected void populateResultBuilder(ColumnValue columnValue, /** * concert result into ExprValue. + * * @param columnMetas column metas * @param row row * @param schema schema * @return a map of result in ExprValue format */ - protected Map convertResultRowIntoExprValue(ColumnMeta[] columnMetas, - Row row, - Map schema) { + protected Map convertResultRowIntoExprValue( + ColumnMeta[] columnMetas, Row row, Map schema) { ImmutableMap.Builder resultBuilder = new ImmutableMap.Builder<>(); for (int i = 0; i < columnMetas.length; i++) { ColumnValue columnValue = row.getValue(i); @@ -167,29 +171,31 @@ protected Map convertResultRowIntoExprValue(ColumnMeta[] colu resultKeyName = resultKeyName + "1"; } populateResultBuilder(columnValue, resultKeyName, resultBuilder); - } return resultBuilder.build(); } /** * iterate result and built it into ExprTupleValue. + * * @param inputRowIter input row iterator * @param inputDataFrame input data frame * @param predictionResult prediction result * @param resultRowIter result row iterator * @return result in ExprTupleValue format */ - protected ExprTupleValue buildResult(Iterator inputRowIter, - DataFrame inputDataFrame, - MLPredictionOutput predictionResult, - Iterator resultRowIter) { + protected ExprTupleValue buildResult( + Iterator inputRowIter, + DataFrame inputDataFrame, + MLPredictionOutput predictionResult, + Iterator resultRowIter) { ImmutableMap.Builder resultSchemaBuilder = new ImmutableMap.Builder<>(); - resultSchemaBuilder.putAll(convertRowIntoExprValue(inputDataFrame.columnMetas(), - inputRowIter.next())); + resultSchemaBuilder.putAll( + convertRowIntoExprValue(inputDataFrame.columnMetas(), inputRowIter.next())); Map resultSchema = resultSchemaBuilder.build(); ImmutableMap.Builder resultBuilder = new ImmutableMap.Builder<>(); - resultBuilder.putAll(convertResultRowIntoExprValue( + resultBuilder.putAll( + convertResultRowIntoExprValue( predictionResult.getPredictionResult().columnMetas(), resultRowIter.next(), resultSchema)); @@ -199,74 +205,73 @@ protected ExprTupleValue buildResult(Iterator inputRowIter, /** * get ml-commons train and predict result. + * * @param functionName ml-commons algorithm name * @param mlAlgoParams ml-commons algorithm parameters * @param inputDataFrame input data frame * @param nodeClient node client * @return ml-commons train and predict result */ - protected MLPredictionOutput getMLPredictionResult(FunctionName functionName, - MLAlgoParams mlAlgoParams, - DataFrame inputDataFrame, - NodeClient nodeClient) { - MLInput mlinput = MLInput.builder() + protected MLPredictionOutput getMLPredictionResult( + FunctionName functionName, + MLAlgoParams mlAlgoParams, + DataFrame inputDataFrame, + NodeClient nodeClient) { + MLInput mlinput = + MLInput.builder() .algorithm(functionName) .parameters(mlAlgoParams) .inputDataset(new DataFrameInputDataset(inputDataFrame)) .build(); - MachineLearningNodeClient machineLearningClient = - MLClient.getMLClient(nodeClient); + MachineLearningNodeClient machineLearningClient = MLClient.getMLClient(nodeClient); - return (MLPredictionOutput) machineLearningClient - .trainAndPredict(mlinput) - .actionGet(30, TimeUnit.SECONDS); + return (MLPredictionOutput) + machineLearningClient.trainAndPredict(mlinput).actionGet(30, TimeUnit.SECONDS); } /** * get ml-commons train, predict and trainandpredict result. + * * @param inputDataFrame input data frame * @param arguments ml parameters * @param nodeClient node client * @return ml-commons result */ - protected MLOutput getMLOutput(DataFrame inputDataFrame, - Map arguments, - NodeClient nodeClient) { - MLInput mlinput = MLInput.builder() + protected MLOutput getMLOutput( + DataFrame inputDataFrame, Map arguments, NodeClient nodeClient) { + MLInput mlinput = + MLInput.builder() .inputDataset(new DataFrameInputDataset(inputDataFrame)) - //Just the placeholders for algorithm and parameters which must be initialized. - //They will be overridden in ml client. + // Just the placeholders for algorithm and parameters which must be initialized. + // They will be overridden in ml client. .algorithm(FunctionName.SAMPLE_ALGO) .parameters(new SampleAlgoParams(0)) .build(); - MachineLearningNodeClient machineLearningClient = - MLClient.getMLClient(nodeClient); + MachineLearningNodeClient machineLearningClient = MLClient.getMLClient(nodeClient); - return machineLearningClient - .run(mlinput, arguments) - .actionGet(30, TimeUnit.SECONDS); + return machineLearningClient.run(mlinput, arguments).actionGet(30, TimeUnit.SECONDS); } /** * iterate result and built it into ExprTupleValue. + * * @param inputRowIter input row iterator * @param inputDataFrame input data frame * @param mlResult train/predict result * @param resultRowIter predict result iterator * @return result in ExprTupleValue format */ - protected ExprTupleValue buildPPLResult(boolean isPredict, - Iterator inputRowIter, - DataFrame inputDataFrame, - MLOutput mlResult, - Iterator resultRowIter) { + protected ExprTupleValue buildPPLResult( + boolean isPredict, + Iterator inputRowIter, + DataFrame inputDataFrame, + MLOutput mlResult, + Iterator resultRowIter) { if (isPredict) { - return buildResult(inputRowIter, - inputDataFrame, - (MLPredictionOutput) mlResult, - resultRowIter); + return buildResult( + inputRowIter, inputDataFrame, (MLPredictionOutput) mlResult, resultRowIter); } else { return buildTrainResult((MLTrainingOutput) mlResult); } @@ -284,18 +289,21 @@ protected ExprTupleValue buildTrainResult(MLTrainingOutput trainResult) { private static class MLInputRows extends LinkedList> { /** * Add tuple value to input map, skip if any value is null. + * * @param tupleValue a row in input data. */ public void addTupleValue(Map tupleValue) { if (tupleValue.values().stream().anyMatch(e -> e.isNull() || e.isMissing())) { return; } - this.add(tupleValue.entrySet().stream() - .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().value()))); + this.add( + tupleValue.entrySet().stream() + .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().value()))); } /** * Convert to DataFrame. + * * @return DataFrame */ public DataFrame toDataFrame() { @@ -304,15 +312,19 @@ public DataFrame toDataFrame() { /** * Filter each row and convert to DataFrame. + * * @param filter used to filter fields in each row * @return DataFrame */ public DataFrame toFilteredDataFrame(Predicate> filter) { - return DataFrameBuilder.load(this.stream().map( - row -> row.entrySet().stream().filter(filter) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))) - .collect(Collectors.toList())); + return DataFrameBuilder.load( + this.stream() + .map( + row -> + row.entrySet().stream() + .filter(filter) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))) + .collect(Collectors.toList())); } } - } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/planner/physical/MLOperator.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/planner/physical/MLOperator.java index 36834bc23a..6dc7078a0d 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/planner/physical/MLOperator.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/planner/physical/MLOperator.java @@ -25,23 +25,19 @@ import org.opensearch.sql.planner.physical.PhysicalPlanNodeVisitor; /** - * ml-commons Physical operator to call machine learning interface to get results for - * algorithm execution. + * ml-commons Physical operator to call machine learning interface to get results for algorithm + * execution. */ @RequiredArgsConstructor @EqualsAndHashCode(callSuper = false) public class MLOperator extends MLCommonsOperatorActions { - @Getter - private final PhysicalPlan input; + @Getter private final PhysicalPlan input; - @Getter - private final Map arguments; + @Getter private final Map arguments; - @Getter - private final NodeClient nodeClient; + @Getter private final NodeClient nodeClient; - @EqualsAndHashCode.Exclude - private Iterator iterator; + @EqualsAndHashCode.Exclude private Iterator iterator; @Override public void open() { @@ -53,34 +49,36 @@ public void open() { final Iterator inputRowIter = inputDataFrame.iterator(); // Only need to check train here, as action should be already checked in ml client. final boolean isPrediction = ((String) args.get("action")).equals("train") ? false : true; - //For train, only one row to return. - final Iterator trainIter = new ArrayList() { - { - add("train"); - } - }.iterator(); - final Iterator resultRowIter = isPrediction - ? ((MLPredictionOutput) mlOutput).getPredictionResult().iterator() - : null; - iterator = new Iterator() { - @Override - public boolean hasNext() { - if (isPrediction) { - return inputRowIter.hasNext(); - } else { - boolean res = trainIter.hasNext(); - if (res) { - trainIter.next(); + // For train, only one row to return. + final Iterator trainIter = + new ArrayList() { + { + add("train"); + } + }.iterator(); + final Iterator resultRowIter = + isPrediction ? ((MLPredictionOutput) mlOutput).getPredictionResult().iterator() : null; + iterator = + new Iterator() { + @Override + public boolean hasNext() { + if (isPrediction) { + return inputRowIter.hasNext(); + } else { + boolean res = trainIter.hasNext(); + if (res) { + trainIter.next(); + } + return res; + } } - return res; - } - } - @Override - public ExprValue next() { - return buildPPLResult(isPrediction, inputRowIter, inputDataFrame, mlOutput, resultRowIter); - } - }; + @Override + public ExprValue next() { + return buildPPLResult( + isPrediction, inputRowIter, inputDataFrame, mlOutput, resultRowIter); + } + }; } @Override diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/system/OpenSearchCatIndicesRequest.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/system/OpenSearchCatIndicesRequest.java index 6e85dc00cc..e7685394f4 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/system/OpenSearchCatIndicesRequest.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/system/OpenSearchCatIndicesRequest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.request.system; import static org.opensearch.sql.data.model.ExprValueUtils.stringValue; @@ -18,9 +17,7 @@ import org.opensearch.sql.data.model.ExprValue; import org.opensearch.sql.opensearch.client.OpenSearchClient; -/** - * Cat indices request. - */ +/** Cat indices request. */ @RequiredArgsConstructor public class OpenSearchCatIndicesRequest implements OpenSearchSystemRequest { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/system/OpenSearchDescribeIndexRequest.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/system/OpenSearchDescribeIndexRequest.java index f4fd7b98d3..5e96ad83c5 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/system/OpenSearchDescribeIndexRequest.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/system/OpenSearchDescribeIndexRequest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.request.system; import static org.opensearch.sql.data.model.ExprValueUtils.integerValue; @@ -23,9 +22,7 @@ import org.opensearch.sql.opensearch.mapping.IndexMapping; import org.opensearch.sql.opensearch.request.OpenSearchRequest; -/** - * Describe index meta data request. - */ +/** Describe index meta data request. */ public class OpenSearchDescribeIndexRequest implements OpenSearchSystemRequest { private static final String DEFAULT_TABLE_CAT = "opensearch"; @@ -36,22 +33,18 @@ public class OpenSearchDescribeIndexRequest implements OpenSearchSystemRequest { private static final String DEFAULT_IS_AUTOINCREMENT = "NO"; - /** - * OpenSearch client connection. - */ + /** OpenSearch client connection. */ private final OpenSearchClient client; - /** - * {@link OpenSearchRequest.IndexName}. - */ + /** {@link OpenSearchRequest.IndexName}. */ private final OpenSearchRequest.IndexName indexName; public OpenSearchDescribeIndexRequest(OpenSearchClient client, String indexName) { this(client, new OpenSearchRequest.IndexName(indexName)); } - public OpenSearchDescribeIndexRequest(OpenSearchClient client, - OpenSearchRequest.IndexName indexName) { + public OpenSearchDescribeIndexRequest( + OpenSearchClient client, OpenSearchRequest.IndexName indexName) { this.client = client; this.indexName = indexName; } @@ -66,10 +59,13 @@ public List search() { List results = new ArrayList<>(); Map meta = client.meta(); int pos = 0; - for (Map.Entry entry - : OpenSearchDataType.traverseAndFlatten(getFieldTypes()).entrySet()) { + for (Map.Entry entry : + OpenSearchDataType.traverseAndFlatten(getFieldTypes()).entrySet()) { results.add( - row(entry.getKey(), entry.getValue().legacyTypeName().toLowerCase(), pos++, + row( + entry.getKey(), + entry.getValue().legacyTypeName().toLowerCase(), + pos++, clusterName(meta))); } return results; @@ -97,8 +93,12 @@ public Map getFieldTypes() { * @return max result window */ public Integer getMaxResultWindow() { - return client.getIndexMaxResultWindows(getLocalIndexNames(indexName.getIndexNames())) - .values().stream().min(Integer::compare).get(); + return client + .getIndexMaxResultWindows(getLocalIndexNames(indexName.getIndexNames())) + .values() + .stream() + .min(Integer::compare) + .get(); } private ExprTupleValue row(String fieldName, String fieldType, int position, String clusterName) { @@ -122,8 +122,8 @@ private ExprTupleValue row(String fieldName, String fieldType, int position, Str } /** - * Return index names without "{cluster}:" prefix. - * Without the prefix, they refer to the indices at the local cluster. + * Return index names without "{cluster}:" prefix. Without the prefix, they refer to the indices + * at the local cluster. * * @param indexNames a string array of index names * @return local cluster index names @@ -140,8 +140,6 @@ private String clusterName(Map meta) { @Override public String toString() { - return "OpenSearchDescribeIndexRequest{" - + "indexName='" + indexName + '\'' - + '}'; + return "OpenSearchDescribeIndexRequest{indexName='" + indexName + "\'}"; } } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/CompositeAggregationParser.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/CompositeAggregationParser.java index 7459300caa..581f708f22 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/CompositeAggregationParser.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/CompositeAggregationParser.java @@ -22,9 +22,7 @@ import org.opensearch.search.aggregations.Aggregations; import org.opensearch.search.aggregations.bucket.composite.CompositeAggregation; -/** - * Composite Aggregation Parser which include composite aggregation and metric parsers. - */ +/** Composite Aggregation Parser which include composite aggregation and metric parsers. */ @EqualsAndHashCode public class CompositeAggregationParser implements OpenSearchAggregationResponseParser { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/FilterParser.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/FilterParser.java index 8358379be0..406f279784 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/FilterParser.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/FilterParser.java @@ -21,9 +21,8 @@ import org.opensearch.search.aggregations.bucket.filter.Filter; /** - * {@link Filter} Parser. - * The current use case is filter aggregation, e.g. avg(age) filter(balance>0). The filter parser - * do nothing and return the result from metricsParser. + * {@link Filter} Parser. The current use case is filter aggregation, e.g. avg(age) + * filter(balance>0). The filter parser do nothing and return the result from metricsParser. */ @Builder @EqualsAndHashCode diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/MetricParser.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/MetricParser.java index 15f05e5b05..0f8f8e284b 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/MetricParser.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/MetricParser.java @@ -16,14 +16,10 @@ import java.util.Map; import org.opensearch.search.aggregations.Aggregation; -/** - * Metric Aggregation Parser. - */ +/** Metric Aggregation Parser. */ public interface MetricParser { - /** - * Get the name of metric parser. - */ + /** Get the name of metric parser. */ String getName(); /** diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/MetricParserHelper.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/MetricParserHelper.java index d5c0141ad2..4df9537973 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/MetricParserHelper.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/MetricParserHelper.java @@ -23,9 +23,7 @@ import org.opensearch.search.aggregations.Aggregations; import org.opensearch.sql.common.utils.StringUtils; -/** - * Parse multiple metrics in one bucket. - */ +/** Parse multiple metrics in one bucket. */ @EqualsAndHashCode @RequiredArgsConstructor public class MetricParserHelper { @@ -49,8 +47,9 @@ public Map parse(Aggregations aggregations) { if (metricParserMap.containsKey(aggregation.getName())) { resultMap.putAll(metricParserMap.get(aggregation.getName()).parse(aggregation)); } else { - throw new RuntimeException(StringUtils.format("couldn't parse field %s in aggregation " - + "response", aggregation.getName())); + throw new RuntimeException( + StringUtils.format( + "couldn't parse field %s in aggregation response", aggregation.getName())); } } return resultMap; diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/NoBucketAggregationParser.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/NoBucketAggregationParser.java index 5756003523..de0ee5883c 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/NoBucketAggregationParser.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/NoBucketAggregationParser.java @@ -19,9 +19,7 @@ import java.util.Map; import org.opensearch.search.aggregations.Aggregations; -/** - * No Bucket Aggregation Parser which include only metric parsers. - */ +/** No Bucket Aggregation Parser which include only metric parsers. */ public class NoBucketAggregationParser implements OpenSearchAggregationResponseParser { private final MetricParserHelper metricsParser; diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/OpenSearchAggregationResponseParser.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/OpenSearchAggregationResponseParser.java index 3a19747ef3..0c15d72eb6 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/OpenSearchAggregationResponseParser.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/OpenSearchAggregationResponseParser.java @@ -17,13 +17,12 @@ import java.util.Map; import org.opensearch.search.aggregations.Aggregations; -/** - * OpenSearch Aggregation Response Parser. - */ +/** OpenSearch Aggregation Response Parser. */ public interface OpenSearchAggregationResponseParser { /** * Parse the OpenSearch Aggregation Response. + * * @param aggregations Aggregations. * @return aggregation result. */ diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/error/ErrorMessage.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/error/ErrorMessage.java index f828c2c485..bbcacc1d2c 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/error/ErrorMessage.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/error/ErrorMessage.java @@ -3,34 +3,26 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.response.error; import lombok.Getter; import org.json.JSONObject; import org.opensearch.core.rest.RestStatus; -/** - * Error Message. - */ +/** Error Message. */ public class ErrorMessage { protected Throwable exception; private final int status; - @Getter - private final String type; + @Getter private final String type; - @Getter - private final String reason; + @Getter private final String reason; - @Getter - private final String details; + @Getter private final String details; - /** - * Error Message Constructor. - */ + /** Error Message Constructor. */ public ErrorMessage(Throwable exception, int status) { this.exception = exception; this.status = status; diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/error/ErrorMessageFactory.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/error/ErrorMessageFactory.java index 204c6a8b93..901bfc30c8 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/error/ErrorMessageFactory.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/error/ErrorMessageFactory.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.response.error; import lombok.experimental.UtilityClass; @@ -12,11 +11,11 @@ @UtilityClass public class ErrorMessageFactory { /** - * Create error message based on the exception type. - * Exceptions of OpenSearch exception type and exceptions with wrapped OpenSearch exception causes - * should create {@link OpenSearchErrorMessage} + * Create error message based on the exception type. Exceptions of OpenSearch exception type and + * exceptions with wrapped OpenSearch exception causes should create {@link + * OpenSearchErrorMessage} * - * @param e exception to create error message + * @param e exception to create error message * @param status exception status code * @return error message */ diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/error/OpenSearchErrorMessage.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/error/OpenSearchErrorMessage.java index a90c52922e..87a374d353 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/error/OpenSearchErrorMessage.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/error/OpenSearchErrorMessage.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.response.error; import java.util.Locale; @@ -11,9 +10,7 @@ import org.opensearch.action.search.SearchPhaseExecutionException; import org.opensearch.action.search.ShardSearchFailure; -/** - * OpenSearch Error Message. - */ +/** OpenSearch Error Message. */ public class OpenSearchErrorMessage extends ErrorMessage { OpenSearchErrorMessage(OpenSearchException exception, int status) { @@ -45,21 +42,21 @@ protected String fetchDetails() { } /** - * Could not deliver the exactly same error messages due to the limit of JDBC types. - * Currently our cases occurred only SearchPhaseExecutionException instances - * among all types of OpenSearch exceptions - * according to the survey, see all types: OpenSearchException.OpenSearchExceptionHandle. - * Either add methods of fetching details for different types, or re-make a consistent - * message by not giving - * detailed messages/root causes but only a suggestion message. + * Could not deliver the exactly same error messages due to the limit of JDBC types. Currently our + * cases occurred only SearchPhaseExecutionException instances among all types of OpenSearch + * exceptions according to the survey, see all types: + * OpenSearchException.OpenSearchExceptionHandle. Either add methods of fetching details for + * different types, or re-make a consistent message by not giving detailed messages/root causes + * but only a suggestion message. */ private String fetchSearchPhaseExecutionExceptionDetails( SearchPhaseExecutionException exception) { StringBuilder details = new StringBuilder(); ShardSearchFailure[] shardFailures = exception.shardFailures(); for (ShardSearchFailure failure : shardFailures) { - details.append(String.format(Locale.ROOT, "Shard[%d]: %s\n", failure.shardId(), - failure.getCause().toString())); + details.append( + String.format( + Locale.ROOT, "Shard[%d]: %s\n", failure.shardId(), failure.getCause().toString())); } return details.toString(); } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/setting/LegacyOpenDistroSettings.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/setting/LegacyOpenDistroSettings.java index 3eadea482b..f20551b89d 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/setting/LegacyOpenDistroSettings.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/setting/LegacyOpenDistroSettings.java @@ -18,102 +18,108 @@ @UtilityClass public class LegacyOpenDistroSettings { - public static final Setting SQL_ENABLED_SETTING = Setting.boolSetting( - LegacySettings.Key.SQL_ENABLED.getKeyValue(), - true, - Setting.Property.NodeScope, - Setting.Property.Dynamic, - Setting.Property.Deprecated); - - public static final Setting SQL_QUERY_SLOWLOG_SETTING = Setting.intSetting( - LegacySettings.Key.SQL_QUERY_SLOWLOG.getKeyValue(), - 2, - Setting.Property.NodeScope, - Setting.Property.Dynamic, - Setting.Property.Deprecated); - - public static final Setting SQL_CURSOR_KEEPALIVE_SETTING = Setting.positiveTimeSetting( - LegacySettings.Key.SQL_CURSOR_KEEPALIVE.getKeyValue(), - timeValueMinutes(1), - Setting.Property.NodeScope, - Setting.Property.Dynamic, - Setting.Property.Deprecated); - - public static final Setting METRICS_ROLLING_WINDOW_SETTING = Setting.longSetting( - LegacySettings.Key.METRICS_ROLLING_WINDOW.getKeyValue(), - 3600L, - 2L, - Setting.Property.NodeScope, - Setting.Property.Dynamic, - Setting.Property.Deprecated); - - public static final Setting METRICS_ROLLING_INTERVAL_SETTING = Setting.longSetting( - LegacySettings.Key.METRICS_ROLLING_INTERVAL.getKeyValue(), - 60L, - 1L, - Setting.Property.NodeScope, - Setting.Property.Dynamic, - Setting.Property.Deprecated); - - public static final Setting PPL_ENABLED_SETTING = Setting.boolSetting( - LegacySettings.Key.PPL_ENABLED.getKeyValue(), - true, - Setting.Property.NodeScope, - Setting.Property.Dynamic, - Setting.Property.Deprecated); - - public static final Setting - PPL_QUERY_MEMORY_LIMIT_SETTING = Setting.memorySizeSetting( - LegacySettings.Key.PPL_QUERY_MEMORY_LIMIT.getKeyValue(), - "85%", - Setting.Property.NodeScope, - Setting.Property.Dynamic, - Setting.Property.Deprecated); - - public static final Setting QUERY_SIZE_LIMIT_SETTING = Setting.intSetting( - LegacySettings.Key.QUERY_SIZE_LIMIT.getKeyValue(), - 200, - Setting.Property.NodeScope, - Setting.Property.Dynamic, - Setting.Property.Deprecated); + public static final Setting SQL_ENABLED_SETTING = + Setting.boolSetting( + LegacySettings.Key.SQL_ENABLED.getKeyValue(), + true, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated); - /** - * Deprecated and will be removed then. - * From OpenSearch 1.0, the new engine is always enabled. - */ - public static final Setting SQL_NEW_ENGINE_ENABLED_SETTING = Setting.boolSetting( - LegacySettings.Key.SQL_NEW_ENGINE_ENABLED.getKeyValue(), - true, - Setting.Property.NodeScope, - Setting.Property.Dynamic, - Setting.Property.Deprecated); + public static final Setting SQL_QUERY_SLOWLOG_SETTING = + Setting.intSetting( + LegacySettings.Key.SQL_QUERY_SLOWLOG.getKeyValue(), + 2, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated); + + public static final Setting SQL_CURSOR_KEEPALIVE_SETTING = + Setting.positiveTimeSetting( + LegacySettings.Key.SQL_CURSOR_KEEPALIVE.getKeyValue(), + timeValueMinutes(1), + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated); + + public static final Setting METRICS_ROLLING_WINDOW_SETTING = + Setting.longSetting( + LegacySettings.Key.METRICS_ROLLING_WINDOW.getKeyValue(), + 3600L, + 2L, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated); + + public static final Setting METRICS_ROLLING_INTERVAL_SETTING = + Setting.longSetting( + LegacySettings.Key.METRICS_ROLLING_INTERVAL.getKeyValue(), + 60L, + 1L, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated); + + public static final Setting PPL_ENABLED_SETTING = + Setting.boolSetting( + LegacySettings.Key.PPL_ENABLED.getKeyValue(), + true, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated); + + public static final Setting PPL_QUERY_MEMORY_LIMIT_SETTING = + Setting.memorySizeSetting( + LegacySettings.Key.PPL_QUERY_MEMORY_LIMIT.getKeyValue(), + "85%", + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated); + + public static final Setting QUERY_SIZE_LIMIT_SETTING = + Setting.intSetting( + LegacySettings.Key.QUERY_SIZE_LIMIT.getKeyValue(), + 200, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated); + + /** Deprecated and will be removed then. From OpenSearch 1.0, the new engine is always enabled. */ + public static final Setting SQL_NEW_ENGINE_ENABLED_SETTING = + Setting.boolSetting( + LegacySettings.Key.SQL_NEW_ENGINE_ENABLED.getKeyValue(), + true, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated); /** - * Deprecated and will be removed then. - * From OpenSearch 1.0, the query analysis in legacy engine is disabled. + * Deprecated and will be removed then. From OpenSearch 1.0, the query analysis in legacy engine + * is disabled. */ - public static final Setting QUERY_ANALYSIS_ENABLED_SETTING = Setting.boolSetting( - LegacySettings.Key.QUERY_ANALYSIS_ENABLED.getKeyValue(), - false, - Setting.Property.NodeScope, - Setting.Property.Dynamic, - Setting.Property.Deprecated); + public static final Setting QUERY_ANALYSIS_ENABLED_SETTING = + Setting.boolSetting( + LegacySettings.Key.QUERY_ANALYSIS_ENABLED.getKeyValue(), + false, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated); /** - * Deprecated and will be removed then. - * From OpenSearch 1.0, the query analysis suggestion in legacy engine is disabled. + * Deprecated and will be removed then. From OpenSearch 1.0, the query analysis suggestion in + * legacy engine is disabled. */ public static final Setting QUERY_ANALYSIS_SEMANTIC_SUGGESTION_SETTING = Setting.boolSetting( - LegacySettings.Key.QUERY_ANALYSIS_SEMANTIC_SUGGESTION.getKeyValue(), - false, - Setting.Property.NodeScope, - Setting.Property.Dynamic, - Setting.Property.Deprecated); + LegacySettings.Key.QUERY_ANALYSIS_SEMANTIC_SUGGESTION.getKeyValue(), + false, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated); /** - * Deprecated and will be removed then. - * From OpenSearch 1.0, the query analysis threshold in legacy engine is disabled. + * Deprecated and will be removed then. From OpenSearch 1.0, the query analysis threshold in + * legacy engine is disabled. */ public static final Setting QUERY_ANALYSIS_SEMANTIC_THRESHOLD_SETTING = Setting.intSetting( @@ -124,8 +130,8 @@ public class LegacyOpenDistroSettings { Setting.Property.Deprecated); /** - * Deprecated and will be removed then. - * From OpenSearch 1.0, the query response format is default to JDBC format. + * Deprecated and will be removed then. From OpenSearch 1.0, the query response format is default + * to JDBC format. */ public static final Setting QUERY_RESPONSE_FORMAT_SETTING = Setting.simpleString( @@ -136,8 +142,8 @@ public class LegacyOpenDistroSettings { Setting.Property.Deprecated); /** - * Deprecated and will be removed then. - * From OpenSearch 1.0, the cursor feature is enabled by default. + * Deprecated and will be removed then. From OpenSearch 1.0, the cursor feature is enabled by + * default. */ public static final Setting SQL_CURSOR_ENABLED_SETTING = Setting.boolSetting( @@ -146,10 +152,10 @@ public class LegacyOpenDistroSettings { Setting.Property.NodeScope, Setting.Property.Dynamic, Setting.Property.Deprecated); + /** - * Deprecated and will be removed then. - * From OpenSearch 1.0, the fetch_size in query body will decide whether create the cursor - * context. No cursor will be created if the fetch_size = 0. + * Deprecated and will be removed then. From OpenSearch 1.0, the fetch_size in query body will + * decide whether create the cursor context. No cursor will be created if the fetch_size = 0. */ public static final Setting SQL_CURSOR_FETCH_SIZE_SETTING = Setting.intSetting( @@ -159,9 +165,7 @@ public class LegacyOpenDistroSettings { Setting.Property.Dynamic, Setting.Property.Deprecated); - /** - * Used by Plugin to init Setting. - */ + /** Used by Plugin to init Setting. */ public static List> legacySettings() { return new ImmutableList.Builder>() .add(SQL_ENABLED_SETTING) diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchDataSourceFactory.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchDataSourceFactory.java index 011f6236fb..b30d460c00 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchDataSourceFactory.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchDataSourceFactory.java @@ -28,7 +28,9 @@ public DataSourceType getDataSourceType() { @Override public DataSource createDataSource(DataSourceMetadata metadata) { - return new DataSource(metadata.getName(), DataSourceType.OPENSEARCH, + return new DataSource( + metadata.getName(), + DataSourceType.OPENSEARCH, new OpenSearchStorageEngine(client, settings)); } } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/ExpressionScriptEngine.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/ExpressionScriptEngine.java index 855aae645d..167bf88f30 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/ExpressionScriptEngine.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/ExpressionScriptEngine.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script; import com.google.common.collect.ImmutableMap; @@ -21,29 +20,23 @@ import org.opensearch.sql.opensearch.storage.serialization.ExpressionSerializer; /** - * Custom expression script engine that supports using core engine expression code in DSL - * as a new script language just like built-in Painless language. + * Custom expression script engine that supports using core engine expression code in DSL as a new + * script language just like built-in Painless language. */ @RequiredArgsConstructor public class ExpressionScriptEngine implements ScriptEngine { - /** - * Expression script language name. - */ + /** Expression script language name. */ public static final String EXPRESSION_LANG_NAME = "opensearch_query_expression"; - /** - * All supported script contexts and function to create factory from expression. - */ + /** All supported script contexts and function to create factory from expression. */ private static final Map, Function> CONTEXTS = new ImmutableMap.Builder, Function>() .put(FilterScript.CONTEXT, ExpressionFilterScriptFactory::new) .put(AggregationScript.CONTEXT, ExpressionAggregationScriptFactory::new) .build(); - /** - * Expression serializer that (de-)serializes expression. - */ + /** Expression serializer that (de-)serializes expression. */ private final ExpressionSerializer serializer; @Override @@ -52,10 +45,8 @@ public String getType() { } @Override - public T compile(String scriptName, - String scriptCode, - ScriptContext context, - Map params) { + public T compile( + String scriptName, String scriptCode, ScriptContext context, Map params) { /* * Note that in fact the expression source is already compiled in query engine. * The "code" is actually a serialized expression tree by our serializer. @@ -66,13 +57,15 @@ public T compile(String scriptName, if (CONTEXTS.containsKey(context)) { return context.factoryClazz.cast(CONTEXTS.get(context).apply(expression)); } - throw new IllegalStateException(String.format("Script context is currently not supported: " - + "all supported contexts [%s], given context [%s] ", CONTEXTS, context)); + throw new IllegalStateException( + String.format( + "Script context is currently not supported: " + + "all supported contexts [%s], given context [%s] ", + CONTEXTS, context)); } @Override public Set> getSupportedContexts() { return CONTEXTS.keySet(); } - } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilder.java index 8b1cb08cfa..a218151b2e 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilder.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilder.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.aggregation; import com.google.common.annotations.VisibleForTesting; @@ -39,32 +38,23 @@ import org.opensearch.sql.opensearch.storage.serialization.ExpressionSerializer; /** - * Build the AggregationBuilder from the list of {@link NamedAggregator} - * and list of {@link NamedExpression}. + * Build the AggregationBuilder from the list of {@link NamedAggregator} and list of {@link + * NamedExpression}. */ @RequiredArgsConstructor public class AggregationQueryBuilder extends ExpressionNodeVisitor { - /** - * How many composite buckets should be returned. - */ + /** How many composite buckets should be returned. */ public static final int AGGREGATION_BUCKET_SIZE = 1000; - /** - * Bucket Aggregation builder. - */ + /** Bucket Aggregation builder. */ private final BucketAggregationBuilder bucketBuilder; - /** - * Metric Aggregation builder. - */ + /** Metric Aggregation builder. */ private final MetricAggregationBuilder metricBuilder; - /** - * Aggregation Query Builder Constructor. - */ - public AggregationQueryBuilder( - ExpressionSerializer serializer) { + /** Aggregation Query Builder Constructor. */ + public AggregationQueryBuilder(ExpressionSerializer serializer) { this.bucketBuilder = new BucketAggregationBuilder(serializer); this.metricBuilder = new MetricAggregationBuilder(serializer); } @@ -93,7 +83,10 @@ public AggregationQueryBuilder( bucketBuilder.build( groupByList.stream() .sorted(groupSortOrder) - .map(expr -> Triple.of(expr, + .map( + expr -> + Triple.of( + expr, groupSortOrder.sortOrder(expr), groupSortOrder.missingOrder(expr))) .collect(Collectors.toList()))) @@ -103,72 +96,62 @@ public AggregationQueryBuilder( } } - /** - * Build mapping for OpenSearchExprValueFactory. - */ + /** Build mapping for OpenSearchExprValueFactory. */ public Map buildTypeMapping( - List namedAggregatorList, - List groupByList) { + List namedAggregatorList, List groupByList) { ImmutableMap.Builder builder = new ImmutableMap.Builder<>(); - namedAggregatorList.forEach(agg -> builder.put(agg.getName(), - OpenSearchDataType.of(agg.type()))); - groupByList.forEach(group -> builder.put(group.getNameOrAlias(), - OpenSearchDataType.of(group.type()))); + namedAggregatorList.forEach( + agg -> builder.put(agg.getName(), OpenSearchDataType.of(agg.type()))); + groupByList.forEach( + group -> builder.put(group.getNameOrAlias(), OpenSearchDataType.of(group.type()))); return builder.build(); } - /** - * Group By field sort order. - */ + /** Group By field sort order. */ @VisibleForTesting public static class GroupSortOrder implements Comparator { /** - * The default order of group field. - * The order is ASC NULL_FIRST. - * The field should be the last one in the group list. + * The default order of group field. The order is ASC NULL_FIRST. The field should be the last + * one in the group list. */ private static final Pair DEFAULT_ORDER = Pair.of(Sort.SortOption.DEFAULT_ASC, Integer.MAX_VALUE); - /** - * The mapping between {@link Sort.SortOrder} and {@link SortOrder}. - */ + /** The mapping between {@link Sort.SortOrder} and {@link SortOrder}. */ private static final Map SORT_MAP = new ImmutableMap.Builder() .put(Sort.SortOrder.ASC, SortOrder.ASC) - .put(Sort.SortOrder.DESC, SortOrder.DESC).build(); + .put(Sort.SortOrder.DESC, SortOrder.DESC) + .build(); - /** - * The mapping between {@link Sort.NullOrder} and {@link MissingOrder}. - */ + /** The mapping between {@link Sort.NullOrder} and {@link MissingOrder}. */ private static final Map NULL_MAP = new ImmutableMap.Builder() .put(Sort.NullOrder.NULL_FIRST, MissingOrder.FIRST) - .put(Sort.NullOrder.NULL_LAST, MissingOrder.LAST).build(); + .put(Sort.NullOrder.NULL_LAST, MissingOrder.LAST) + .build(); private final Map> map = new HashMap<>(); - /** - * Constructor of GroupSortOrder. - */ + /** Constructor of GroupSortOrder. */ public GroupSortOrder(List> sortList) { if (null == sortList) { return; } int pos = 0; for (Pair sortPair : sortList) { - map.put(((ReferenceExpression) sortPair.getRight()).getAttr(), + map.put( + ((ReferenceExpression) sortPair.getRight()).getAttr(), Pair.of(sortPair.getLeft(), pos++)); } } /** - * Compare the two expressions. The comparison is based on the pos in the sort list. - * If the expression is defined in the sort list. then the order of the expression is the pos - * in sort list. - * If the expression isn't defined in the sort list. the the order of the expression is the - * Integer.MAX_VALUE. you can think it is at the end of the sort list. + * Compare the two expressions. The comparison is based on the pos in the sort list. If the + * expression is defined in the sort list. then the order of the expression is the pos in sort + * list. If the expression isn't defined in the sort list. the the order of the expression is + * the Integer.MAX_VALUE. you can think it is at the end of the sort list. * * @param o1 NamedExpression * @param o2 NamedExpression @@ -176,24 +159,19 @@ public GroupSortOrder(List> sortList) { */ @Override public int compare(NamedExpression o1, NamedExpression o2) { - final Pair o1Value = - map.getOrDefault(o1.getName(), DEFAULT_ORDER); - final Pair o2Value = - map.getOrDefault(o2.getName(), DEFAULT_ORDER); + final Pair o1Value = map.getOrDefault(o1.getName(), DEFAULT_ORDER); + final Pair o2Value = map.getOrDefault(o2.getName(), DEFAULT_ORDER); return o1Value.getRight().compareTo(o2Value.getRight()); } - /** - * Get the {@link SortOrder} for expression. - * By default, the {@link SortOrder} is ASC. - */ + /** Get the {@link SortOrder} for expression. By default, the {@link SortOrder} is ASC. */ public SortOrder sortOrder(NamedExpression expression) { return SORT_MAP.get(sortOption(expression).getSortOrder()); } /** - * Get the {@link MissingOrder} for expression. - * By default, the {@link MissingOrder} is ASC missing first / DESC missing last. + * Get the {@link MissingOrder} for expression. By default, the {@link MissingOrder} is ASC + * missing first / DESC missing last. */ public MissingOrder missingOrder(NamedExpression expression) { return NULL_MAP.get(sortOption(expression).getNullOrder()); diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScript.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScript.java index 2871bd4a97..7e7b2e959a 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScript.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScript.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.aggregation; import static java.time.temporal.ChronoUnit.MILLIS; @@ -22,20 +21,14 @@ import org.opensearch.sql.opensearch.data.type.OpenSearchDataType; import org.opensearch.sql.opensearch.storage.script.core.ExpressionScript; -/** - * Aggregation expression script that executed on each document. - */ +/** Aggregation expression script that executed on each document. */ @EqualsAndHashCode(callSuper = false) public class ExpressionAggregationScript extends AggregationScript { - /** - * Expression Script. - */ + /** Expression Script. */ private final ExpressionScript expressionScript; - /** - * Constructor of ExpressionAggregationScript. - */ + /** Constructor of ExpressionAggregationScript. */ public ExpressionAggregationScript( Expression expression, SearchLookup lookup, @@ -51,7 +44,7 @@ public Object execute() { if (expr.type() instanceof OpenSearchDataType) { return expr.value(); } - switch ((ExprCoreType)expr.type()) { + switch ((ExprCoreType) expr.type()) { case TIME: // Can't get timestamp from `ExprTimeValue` return MILLIS.between(LocalTime.MIN, expr.timeValue()); @@ -64,8 +57,8 @@ public Object execute() { } } - private ExprValue evaluateExpression(Expression expression, Environment valueEnv) { + private ExprValue evaluateExpression( + Expression expression, Environment valueEnv) { ExprValue result = expression.valueOf(valueEnv); // The missing value is treated as null value in doc_value, so we can't distinguish with them. diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScriptFactory.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScriptFactory.java index 3138ee90fc..c0b92e5438 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScriptFactory.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScriptFactory.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.aggregation; import java.util.Map; @@ -12,9 +11,7 @@ import org.opensearch.search.lookup.SearchLookup; import org.opensearch.sql.expression.Expression; -/** - * Aggregation Expression script factory that generates leaf factory. - */ +/** Aggregation Expression script factory that generates leaf factory. */ @EqualsAndHashCode public class ExpressionAggregationScriptFactory implements AggregationScript.Factory { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScriptLeafFactory.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScriptLeafFactory.java index 7d22f724e3..13f9c95c8f 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScriptLeafFactory.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScriptLeafFactory.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.aggregation; import java.util.Map; @@ -12,29 +11,19 @@ import org.opensearch.search.lookup.SearchLookup; import org.opensearch.sql.expression.Expression; -/** - * Expression script leaf factory that produces script executor for each leaf. - */ +/** Expression script leaf factory that produces script executor for each leaf. */ public class ExpressionAggregationScriptLeafFactory implements AggregationScript.LeafFactory { - /** - * Expression to execute. - */ + /** Expression to execute. */ private final Expression expression; - /** - * Expression to execute. - */ + /** Expression to execute. */ private final Map params; - /** - * Expression to execute. - */ + /** Expression to execute. */ private final SearchLookup lookup; - /** - * Constructor of ExpressionAggregationScriptLeafFactory. - */ + /** Constructor of ExpressionAggregationScriptLeafFactory. */ public ExpressionAggregationScriptLeafFactory( Expression expression, Map params, SearchLookup lookup) { this.expression = expression; diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/AggregationBuilderHelper.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/AggregationBuilderHelper.java index 156b565976..7dd02d82d0 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/AggregationBuilderHelper.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/AggregationBuilderHelper.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.aggregation.dsl; import static java.util.Collections.emptyMap; @@ -20,9 +19,7 @@ import org.opensearch.sql.opensearch.data.type.OpenSearchTextType; import org.opensearch.sql.opensearch.storage.serialization.ExpressionSerializer; -/** - * Abstract Aggregation Builder. - */ +/** Abstract Aggregation Builder. */ @RequiredArgsConstructor public class AggregationBuilderHelper { @@ -34,20 +31,23 @@ public class AggregationBuilderHelper { * @param expression Expression * @return AggregationBuilder */ - public T build(Expression expression, Function fieldBuilder, - Function scriptBuilder) { + public T build( + Expression expression, Function fieldBuilder, Function scriptBuilder) { if (expression instanceof ReferenceExpression) { String fieldName = ((ReferenceExpression) expression).getAttr(); return fieldBuilder.apply( - OpenSearchTextType.convertTextToKeyword(fieldName, expression.type())); + OpenSearchTextType.convertTextToKeyword(fieldName, expression.type())); } else if (expression instanceof FunctionExpression || expression instanceof LiteralExpression) { - return scriptBuilder.apply(new Script( - DEFAULT_SCRIPT_TYPE, EXPRESSION_LANG_NAME, serializer.serialize(expression), - emptyMap())); + return scriptBuilder.apply( + new Script( + DEFAULT_SCRIPT_TYPE, + EXPRESSION_LANG_NAME, + serializer.serialize(expression), + emptyMap())); } else { - throw new IllegalStateException(String.format("metric aggregation doesn't support " - + "expression %s", expression)); + throw new IllegalStateException( + String.format("metric aggregation doesn't support " + "expression %s", expression)); } } } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/BucketAggregationBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/BucketAggregationBuilder.java index 1a6a82be96..4485626742 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/BucketAggregationBuilder.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/BucketAggregationBuilder.java @@ -26,29 +26,24 @@ import org.opensearch.sql.expression.span.SpanExpression; import org.opensearch.sql.opensearch.storage.serialization.ExpressionSerializer; -/** - * Bucket Aggregation Builder. - */ +/** Bucket Aggregation Builder. */ public class BucketAggregationBuilder { private final AggregationBuilderHelper helper; - public BucketAggregationBuilder( - ExpressionSerializer serializer) { + public BucketAggregationBuilder(ExpressionSerializer serializer) { this.helper = new AggregationBuilderHelper(serializer); } - /** - * Build the list of CompositeValuesSourceBuilder. - */ + /** Build the list of CompositeValuesSourceBuilder. */ public List> build( List> groupList) { ImmutableList.Builder> resultBuilder = new ImmutableList.Builder<>(); for (Triple groupPair : groupList) { resultBuilder.add( - buildCompositeValuesSourceBuilder(groupPair.getLeft(), - groupPair.getMiddle(), groupPair.getRight())); + buildCompositeValuesSourceBuilder( + groupPair.getLeft(), groupPair.getMiddle(), groupPair.getRight())); } return resultBuilder.build(); } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/MetricAggregationBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/MetricAggregationBuilder.java index 5e7d34abce..c99fbfdc49 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/MetricAggregationBuilder.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/MetricAggregationBuilder.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.aggregation.dsl; import static org.opensearch.sql.data.type.ExprCoreType.INTEGER; @@ -33,18 +32,14 @@ import org.opensearch.sql.opensearch.storage.script.filter.FilterQueryBuilder; import org.opensearch.sql.opensearch.storage.serialization.ExpressionSerializer; -/** - * Build the Metric Aggregation and List of {@link MetricParser} from {@link NamedAggregator}. - */ +/** Build the Metric Aggregation and List of {@link MetricParser} from {@link NamedAggregator}. */ public class MetricAggregationBuilder extends ExpressionNodeVisitor, Object> { private final AggregationBuilderHelper helper; private final FilterQueryBuilder filterBuilder; - /** - * Constructor. - */ + /** Constructor. */ public MetricAggregationBuilder(ExpressionSerializer serializer) { this.helper = new AggregationBuilderHelper(serializer); this.filterBuilder = new FilterQueryBuilder(serializer); @@ -87,8 +82,9 @@ public Pair visitNamedAggregator( name, new SingleValueParser(name)); default: - throw new IllegalStateException(String.format( - "unsupported distinct aggregator %s", node.getFunctionName().getFunctionName())); + throw new IllegalStateException( + String.format( + "unsupported distinct aggregator %s", node.getFunctionName().getFunctionName())); } } @@ -186,14 +182,13 @@ private Pair make( return Pair.of(aggregationBuilder, parser); } - /** - * Make {@link CardinalityAggregationBuilder} for distinct count aggregations. - */ - private Pair make(CardinalityAggregationBuilder builder, - Expression expression, - Expression condition, - String name, - MetricParser parser) { + /** Make {@link CardinalityAggregationBuilder} for distinct count aggregations. */ + private Pair make( + CardinalityAggregationBuilder builder, + Expression expression, + Expression condition, + String name, + MetricParser parser) { CardinalityAggregationBuilder aggregationBuilder = helper.build(expression, builder::field, builder::script); if (condition != null) { @@ -204,15 +199,14 @@ private Pair make(CardinalityAggregationBuilde return Pair.of(aggregationBuilder, parser); } - /** - * Make {@link TopHitsAggregationBuilder} for take aggregations. - */ - private Pair make(TopHitsAggregationBuilder builder, - Expression expression, - Expression size, - Expression condition, - String name, - MetricParser parser) { + /** Make {@link TopHitsAggregationBuilder} for take aggregations. */ + private Pair make( + TopHitsAggregationBuilder builder, + Expression expression, + Expression size, + Expression condition, + String name, + MetricParser parser) { String fieldName = ((ReferenceExpression) expression).getAttr(); builder.fetchSource(fieldName, null); builder.size(size.valueOf().integerValue()); @@ -245,8 +239,8 @@ private Expression replaceStarOrLiteral(Expression countArg) { * Make builder to build FilterAggregation for aggregations with filter in the bucket. * * @param subAggBuilder AggregationBuilder instance which the filter is applied to. - * @param condition Condition expression in the filter. - * @param name Name of the FilterAggregation instance to build. + * @param condition Condition expression in the filter. + * @param name Name of the FilterAggregation instance to build. * @return {@link FilterAggregationBuilder}. */ private FilterAggregationBuilder makeFilterAggregation( diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/core/ExpressionScript.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/core/ExpressionScript.java index 9bdb15d63a..3a9ff02ba0 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/core/ExpressionScript.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/core/ExpressionScript.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.core; import static java.util.stream.Collectors.toMap; @@ -33,36 +32,27 @@ import org.opensearch.sql.opensearch.data.value.OpenSearchExprValueFactory; /** - * Expression script executor that executes the expression on each document - * and determine if the document is supposed to be filtered out or not. + * Expression script executor that executes the expression on each document and determine if the + * document is supposed to be filtered out or not. */ @EqualsAndHashCode(callSuper = false) public class ExpressionScript { - /** - * Expression to execute. - */ + /** Expression to execute. */ private final Expression expression; - /** - * ElasticsearchExprValueFactory. - */ - @EqualsAndHashCode.Exclude - private final OpenSearchExprValueFactory valueFactory; + /** ElasticsearchExprValueFactory. */ + @EqualsAndHashCode.Exclude private final OpenSearchExprValueFactory valueFactory; - /** - * Reference Fields. - */ - @EqualsAndHashCode.Exclude - private final Set fields; + /** Reference Fields. */ + @EqualsAndHashCode.Exclude private final Set fields; - /** - * Expression constructor. - */ + /** Expression constructor. */ public ExpressionScript(Expression expression) { this.expression = expression; - this.fields = AccessController.doPrivileged((PrivilegedAction>) () -> - extractFields(expression)); + this.fields = + AccessController.doPrivileged( + (PrivilegedAction>) () -> extractFields(expression)); this.valueFactory = AccessController.doPrivileged( (PrivilegedAction) () -> buildValueFactory(fields)); @@ -72,65 +62,67 @@ public ExpressionScript(Expression expression) { * Evaluate on the doc generate by the doc provider. * * @param docProvider doc provider. - * @param evaluator evaluator + * @param evaluator evaluator * @return expr value */ - public ExprValue execute(Supplier>> docProvider, - BiFunction, ExprValue> evaluator) { - return AccessController.doPrivileged((PrivilegedAction) () -> { - Environment valueEnv = - buildValueEnv(fields, valueFactory, docProvider); - ExprValue result = evaluator.apply(expression, valueEnv); - return result; - }); + public ExprValue execute( + Supplier>> docProvider, + BiFunction, ExprValue> evaluator) { + return AccessController.doPrivileged( + (PrivilegedAction) + () -> { + Environment valueEnv = + buildValueEnv(fields, valueFactory, docProvider); + ExprValue result = evaluator.apply(expression, valueEnv); + return result; + }); } private Set extractFields(Expression expr) { Set fields = new HashSet<>(); - expr.accept(new ExpressionNodeVisitor>() { - @Override - public Object visitReference(ReferenceExpression node, Set context) { - context.add(node); - return null; - } - - @Override - public Object visitParse(ParseExpression node, Set context) { - node.getSourceField().accept(this, context); - return null; - } - }, fields); + expr.accept( + new ExpressionNodeVisitor>() { + @Override + public Object visitReference(ReferenceExpression node, Set context) { + context.add(node); + return null; + } + + @Override + public Object visitParse(ParseExpression node, Set context) { + node.getSourceField().accept(this, context); + return null; + } + }, + fields); return fields; } private OpenSearchExprValueFactory buildValueFactory(Set fields) { - Map typeEnv = fields.stream().collect(toMap( - ReferenceExpression::getAttr, e -> OpenSearchDataType.of(e.type()))); + Map typeEnv = + fields.stream() + .collect(toMap(ReferenceExpression::getAttr, e -> OpenSearchDataType.of(e.type()))); return new OpenSearchExprValueFactory(typeEnv); } private Environment buildValueEnv( - Set fields, OpenSearchExprValueFactory valueFactory, + Set fields, + OpenSearchExprValueFactory valueFactory, Supplier>> docProvider) { Map valueEnv = new HashMap<>(); for (ReferenceExpression field : fields) { String fieldName = field.getAttr(); - ExprValue exprValue = valueFactory.construct( - fieldName, - getDocValue(field, docProvider), - false - ); + ExprValue exprValue = + valueFactory.construct(fieldName, getDocValue(field, docProvider), false); valueEnv.put(field, exprValue); } // Encapsulate map data structure into anonymous Environment class return valueEnv::get; } - private Object getDocValue(ReferenceExpression field, - Supplier>> docProvider) { + private Object getDocValue( + ReferenceExpression field, Supplier>> docProvider) { String fieldName = OpenSearchTextType.convertTextToKeyword(field.getAttr(), field.type()); ScriptDocValues docValue = docProvider.get().get(fieldName); if (docValue == null || docValue.isEmpty()) { @@ -145,9 +137,9 @@ private Object getDocValue(ReferenceExpression field, } /** - * DocValue only support long and double so cast to integer and float if needed. - * The doc value must be Long and Double for expr type Long/Integer and Double/Float respectively. - * Otherwise there must be bugs in our engine that causes the mismatch. + * DocValue only support long and double so cast to integer and float if needed. The doc value + * must be Long and Double for expr type Long/Integer and Double/Float respectively. Otherwise + * there must be bugs in our engine that causes the mismatch. */ private Object castNumberToFieldType(Object value, ExprType type) { if (value == null) { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/ExpressionFilterScript.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/ExpressionFilterScript.java index adce89d0df..557cbbe4c9 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/ExpressionFilterScript.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/ExpressionFilterScript.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.filter; import java.util.Map; @@ -19,21 +18,20 @@ import org.opensearch.sql.opensearch.storage.script.core.ExpressionScript; /** - * Expression script executor that executes the expression on each document - * and determine if the document is supposed to be filtered out or not. + * Expression script executor that executes the expression on each document and determine if the + * document is supposed to be filtered out or not. */ @EqualsAndHashCode(callSuper = false) class ExpressionFilterScript extends FilterScript { - /** - * Expression Script. - */ + /** Expression Script. */ private final ExpressionScript expressionScript; - public ExpressionFilterScript(Expression expression, - SearchLookup lookup, - LeafReaderContext context, - Map params) { + public ExpressionFilterScript( + Expression expression, + SearchLookup lookup, + LeafReaderContext context, + Map params) { super(params, lookup, context); this.expressionScript = new ExpressionScript(expression); } @@ -43,19 +41,20 @@ public boolean execute() { return expressionScript.execute(this::getDoc, this::evaluateExpression).booleanValue(); } - private ExprValue evaluateExpression(Expression expression, - Environment valueEnv) { + private ExprValue evaluateExpression( + Expression expression, Environment valueEnv) { ExprValue result = expression.valueOf(valueEnv); if (result.isNull()) { return ExprBooleanValue.of(false); } if (result.type() != ExprCoreType.BOOLEAN) { - throw new IllegalStateException(String.format( - "Expression has wrong result type instead of boolean: " - + "expression [%s], result [%s]", expression, result)); + throw new IllegalStateException( + String.format( + "Expression has wrong result type instead of boolean: " + + "expression [%s], result [%s]", + expression, result)); } return result; } - } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/ExpressionFilterScriptFactory.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/ExpressionFilterScriptFactory.java index e35482d618..5db10733a7 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/ExpressionFilterScriptFactory.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/ExpressionFilterScriptFactory.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.filter; import java.util.Map; @@ -12,15 +11,11 @@ import org.opensearch.search.lookup.SearchLookup; import org.opensearch.sql.expression.Expression; -/** - * Expression script factory that generates leaf factory. - */ +/** Expression script factory that generates leaf factory. */ @EqualsAndHashCode public class ExpressionFilterScriptFactory implements FilterScript.Factory { - /** - * Expression to execute. - */ + /** Expression to execute. */ private final Expression expression; public ExpressionFilterScriptFactory(Expression expression) { @@ -37,5 +32,4 @@ public boolean isResultDeterministic() { public FilterScript.LeafFactory newFactory(Map params, SearchLookup lookup) { return new ExpressionFilterScriptLeafFactory(expression, params, lookup); } - } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/ExpressionFilterScriptLeafFactory.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/ExpressionFilterScriptLeafFactory.java index 22b4be1b69..6c04ca7233 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/ExpressionFilterScriptLeafFactory.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/ExpressionFilterScriptLeafFactory.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.filter; import java.util.Map; @@ -12,29 +11,20 @@ import org.opensearch.search.lookup.SearchLookup; import org.opensearch.sql.expression.Expression; -/** - * Expression script leaf factory that produces script executor for each leaf. - */ +/** Expression script leaf factory that produces script executor for each leaf. */ class ExpressionFilterScriptLeafFactory implements FilterScript.LeafFactory { - /** - * Expression to execute. - */ + /** Expression to execute. */ private final Expression expression; - /** - * Parameters for the expression. - */ + /** Parameters for the expression. */ private final Map params; - /** - * Document lookup that returns doc values. - */ + /** Document lookup that returns doc values. */ private final SearchLookup lookup; - public ExpressionFilterScriptLeafFactory(Expression expression, - Map params, - SearchLookup lookup) { + public ExpressionFilterScriptLeafFactory( + Expression expression, Map params, SearchLookup lookup) { this.expression = expression; this.params = params; this.lookup = lookup; @@ -44,5 +34,4 @@ public ExpressionFilterScriptLeafFactory(Expression expression, public FilterScript newInstance(LeafReaderContext ctx) { return new ExpressionFilterScript(expression, lookup, ctx, params); } - } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/FilterQueryBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/FilterQueryBuilder.java index 51b10d2c41..fa0fe19105 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/FilterQueryBuilder.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/FilterQueryBuilder.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.filter; import static java.util.Collections.emptyMap; @@ -45,14 +44,10 @@ @RequiredArgsConstructor public class FilterQueryBuilder extends ExpressionNodeVisitor { - /** - * Serializer that serializes expression for build DSL query. - */ + /** Serializer that serializes expression for build DSL query. */ private final ExpressionSerializer serializer; - /** - * Mapping from function name to lucene query builder. - */ + /** Mapping from function name to lucene query builder. */ private final Map luceneQueries = ImmutableMap.builder() .put(BuiltinFunctionName.EQUAL.getName(), new TermQuery()) @@ -82,8 +77,9 @@ public class FilterQueryBuilder extends ExpressionNodeVisitor accumulator) { + private BoolQueryBuilder buildBoolQuery( + FunctionExpression node, + Object context, + BiFunction accumulator) { BoolQueryBuilder boolQuery = QueryBuilders.boolQuery(); for (Expression arg : node.getArguments()) { accumulator.apply(boolQuery, arg.accept(this, context)); @@ -131,8 +129,8 @@ private BoolQueryBuilder buildBoolQuery(FunctionExpression node, } private ScriptQueryBuilder buildScriptQuery(FunctionExpression node) { - return new ScriptQueryBuilder(new Script( - DEFAULT_SCRIPT_TYPE, EXPRESSION_LANG_NAME, serializer.serialize(node), emptyMap())); + return new ScriptQueryBuilder( + new Script( + DEFAULT_SCRIPT_TYPE, EXPRESSION_LANG_NAME, serializer.serialize(node), emptyMap())); } - } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/LikeQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/LikeQuery.java index 699af4f3fd..44c1c30200 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/LikeQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/LikeQuery.java @@ -21,10 +21,9 @@ public QueryBuilder doBuild(String fieldName, ExprType fieldType, ExprValue lite } /** - * Though WildcardQueryBuilder is required, LikeQuery needed its own class as - * it is not a relevance function which wildcard_query is. The arguments in - * LIKE are of type ReferenceExpression while wildcard_query are of type - * NamedArgumentExpression + * Though WildcardQueryBuilder is required, LikeQuery needed its own class as it is not a + * relevance function which wildcard_query is. The arguments in LIKE are of type + * ReferenceExpression while wildcard_query are of type NamedArgumentExpression */ protected WildcardQueryBuilder createBuilder(String field, String query) { String matchText = StringUtils.convertSqlWildcardToLucene(query); diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/LuceneQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/LuceneQuery.java index a45c535383..a1b633f942 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/LuceneQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/LuceneQuery.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.filter.lucene; import static org.opensearch.sql.analysis.NestedAnalyzer.isNestedFunction; @@ -35,31 +34,31 @@ import org.opensearch.sql.expression.function.BuiltinFunctionName; import org.opensearch.sql.expression.function.FunctionName; -/** - * Lucene query abstraction that builds Lucene query from function expression. - */ +/** Lucene query abstraction that builds Lucene query from function expression. */ public abstract class LuceneQuery { /** - * Check if function expression supported by current Lucene query. - * Default behavior is that report supported if: - * 1. Left is a reference - * 2. Right side is a literal - * - * @param func function - * @return return true if supported, otherwise false. + * Check if function expression supported by current Lucene query. Default behavior is that report + * supported if: + *
    + *
  1. Left is a reference
  2. + *
  3. Right side is a literal
  4. + *
+ * @param func function + * @return return true if supported, otherwise false. */ public boolean canSupport(FunctionExpression func) { return (func.getArguments().size() == 2) - && (func.getArguments().get(0) instanceof ReferenceExpression) - && (func.getArguments().get(1) instanceof LiteralExpression - || literalExpressionWrappedByCast(func)) + && (func.getArguments().get(0) instanceof ReferenceExpression) + && (func.getArguments().get(1) instanceof LiteralExpression + || literalExpressionWrappedByCast(func)) || isMultiParameterQuery(func); } /** * Check if predicate expression has nested function on left side of predicate expression. * Validation for right side being a `LiteralExpression` is done in NestedQuery. + * * @param func function. * @return return true if function has supported nested function expression. */ @@ -70,8 +69,8 @@ public boolean isNestedPredicate(FunctionExpression func) { /** * Check if the function expression has multiple named argument expressions as the parameters. * - * @param func function - * @return return true if the expression is a multi-parameter function. + * @param func function + * @return return true if the expression is a multi-parameter function. */ private boolean isMultiParameterQuery(FunctionExpression func) { for (Expression expr : func.getArguments()) { @@ -95,139 +94,163 @@ private boolean literalExpressionWrappedByCast(FunctionExpression func) { } /** - * Build Lucene query from function expression. - * The cast function is converted to literal expressions before generating DSL. + * Build Lucene query from function expression. The cast function is converted to literal + * expressions before generating DSL. * - * @param func function - * @return query + * @param func function + * @return query */ public QueryBuilder build(FunctionExpression func) { ReferenceExpression ref = (ReferenceExpression) func.getArguments().get(0); Expression expr = func.getArguments().get(1); - ExprValue literalValue = expr instanceof LiteralExpression ? expr - .valueOf() : cast((FunctionExpression) expr); + ExprValue literalValue = + expr instanceof LiteralExpression ? expr.valueOf() : cast((FunctionExpression) expr); return doBuild(ref.getAttr(), ref.type(), literalValue); } private ExprValue cast(FunctionExpression castFunction) { - return castMap.get(castFunction.getFunctionName()).apply( - (LiteralExpression) castFunction.getArguments().get(0)); + return castMap + .get(castFunction.getFunctionName()) + .apply((LiteralExpression) castFunction.getArguments().get(0)); } - /** - * Type converting map. - */ - private final Map> castMap = ImmutableMap - .>builder() - .put(BuiltinFunctionName.CAST_TO_STRING.getName(), expr -> { - if (!expr.type().equals(ExprCoreType.STRING)) { - return new ExprStringValue(String.valueOf(expr.valueOf().value())); - } else { - return expr.valueOf(); - } - }) - .put(BuiltinFunctionName.CAST_TO_BYTE.getName(), expr -> { - if (ExprCoreType.numberTypes().contains(expr.type())) { - return new ExprByteValue(expr.valueOf().byteValue()); - } else if (expr.type().equals(ExprCoreType.BOOLEAN)) { - return new ExprByteValue(expr.valueOf().booleanValue() ? 1 : 0); - } else { - return new ExprByteValue(Byte.valueOf(expr.valueOf().stringValue())); - } - }) - .put(BuiltinFunctionName.CAST_TO_SHORT.getName(), expr -> { - if (ExprCoreType.numberTypes().contains(expr.type())) { - return new ExprShortValue(expr.valueOf().shortValue()); - } else if (expr.type().equals(ExprCoreType.BOOLEAN)) { - return new ExprShortValue(expr.valueOf().booleanValue() ? 1 : 0); - } else { - return new ExprShortValue(Short.valueOf(expr.valueOf().stringValue())); - } - }) - .put(BuiltinFunctionName.CAST_TO_INT.getName(), expr -> { - if (ExprCoreType.numberTypes().contains(expr.type())) { - return new ExprIntegerValue(expr.valueOf().integerValue()); - } else if (expr.type().equals(ExprCoreType.BOOLEAN)) { - return new ExprIntegerValue(expr.valueOf().booleanValue() ? 1 : 0); - } else { - return new ExprIntegerValue(Integer.valueOf(expr.valueOf().stringValue())); - } - }) - .put(BuiltinFunctionName.CAST_TO_LONG.getName(), expr -> { - if (ExprCoreType.numberTypes().contains(expr.type())) { - return new ExprLongValue(expr.valueOf().longValue()); - } else if (expr.type().equals(ExprCoreType.BOOLEAN)) { - return new ExprLongValue(expr.valueOf().booleanValue() ? 1 : 0); - } else { - return new ExprLongValue(Long.valueOf(expr.valueOf().stringValue())); - } - }) - .put(BuiltinFunctionName.CAST_TO_FLOAT.getName(), expr -> { - if (ExprCoreType.numberTypes().contains(expr.type())) { - return new ExprFloatValue(expr.valueOf().floatValue()); - } else if (expr.type().equals(ExprCoreType.BOOLEAN)) { - return new ExprFloatValue(expr.valueOf().booleanValue() ? 1 : 0); - } else { - return new ExprFloatValue(Float.valueOf(expr.valueOf().stringValue())); - } - }) - .put(BuiltinFunctionName.CAST_TO_DOUBLE.getName(), expr -> { - if (ExprCoreType.numberTypes().contains(expr.type())) { - return new ExprDoubleValue(expr.valueOf().doubleValue()); - } else if (expr.type().equals(ExprCoreType.BOOLEAN)) { - return new ExprDoubleValue(expr.valueOf().booleanValue() ? 1 : 0); - } else { - return new ExprDoubleValue(Double.valueOf(expr.valueOf().stringValue())); - } - }) - .put(BuiltinFunctionName.CAST_TO_BOOLEAN.getName(), expr -> { - if (ExprCoreType.numberTypes().contains(expr.type())) { - return expr.valueOf().doubleValue() != 0 - ? ExprBooleanValue.of(true) : ExprBooleanValue.of(false); - } else if (expr.type().equals(ExprCoreType.STRING)) { - return ExprBooleanValue.of(Boolean.valueOf(expr.valueOf().stringValue())); - } else { - return expr.valueOf(); - } - }) - .put(BuiltinFunctionName.CAST_TO_DATE.getName(), expr -> { - if (expr.type().equals(ExprCoreType.STRING)) { - return new ExprDateValue(expr.valueOf().stringValue()); - } else { - return new ExprDateValue(expr.valueOf().dateValue()); - } - }) - .put(BuiltinFunctionName.CAST_TO_TIME.getName(), expr -> { - if (expr.type().equals(ExprCoreType.STRING)) { - return new ExprTimeValue(expr.valueOf().stringValue()); - } else { - return new ExprTimeValue(expr.valueOf().timeValue()); - } - }) - .put(BuiltinFunctionName.CAST_TO_DATETIME.getName(), expr -> { - if (expr.type().equals(ExprCoreType.STRING)) { - return new ExprDatetimeValue(expr.valueOf().stringValue()); - } else { - return new ExprDatetimeValue(expr.valueOf().datetimeValue()); - } - }) - .put(BuiltinFunctionName.CAST_TO_TIMESTAMP.getName(), expr -> { - if (expr.type().equals(ExprCoreType.STRING)) { - return new ExprTimestampValue(expr.valueOf().stringValue()); - } else { - return new ExprTimestampValue(expr.valueOf().timestampValue()); - } - }) - .build(); + /** Type converting map. */ + private final Map> castMap = + ImmutableMap.>builder() + .put( + BuiltinFunctionName.CAST_TO_STRING.getName(), + expr -> { + if (!expr.type().equals(ExprCoreType.STRING)) { + return new ExprStringValue(String.valueOf(expr.valueOf().value())); + } else { + return expr.valueOf(); + } + }) + .put( + BuiltinFunctionName.CAST_TO_BYTE.getName(), + expr -> { + if (ExprCoreType.numberTypes().contains(expr.type())) { + return new ExprByteValue(expr.valueOf().byteValue()); + } else if (expr.type().equals(ExprCoreType.BOOLEAN)) { + return new ExprByteValue(expr.valueOf().booleanValue() ? 1 : 0); + } else { + return new ExprByteValue(Byte.valueOf(expr.valueOf().stringValue())); + } + }) + .put( + BuiltinFunctionName.CAST_TO_SHORT.getName(), + expr -> { + if (ExprCoreType.numberTypes().contains(expr.type())) { + return new ExprShortValue(expr.valueOf().shortValue()); + } else if (expr.type().equals(ExprCoreType.BOOLEAN)) { + return new ExprShortValue(expr.valueOf().booleanValue() ? 1 : 0); + } else { + return new ExprShortValue(Short.valueOf(expr.valueOf().stringValue())); + } + }) + .put( + BuiltinFunctionName.CAST_TO_INT.getName(), + expr -> { + if (ExprCoreType.numberTypes().contains(expr.type())) { + return new ExprIntegerValue(expr.valueOf().integerValue()); + } else if (expr.type().equals(ExprCoreType.BOOLEAN)) { + return new ExprIntegerValue(expr.valueOf().booleanValue() ? 1 : 0); + } else { + return new ExprIntegerValue(Integer.valueOf(expr.valueOf().stringValue())); + } + }) + .put( + BuiltinFunctionName.CAST_TO_LONG.getName(), + expr -> { + if (ExprCoreType.numberTypes().contains(expr.type())) { + return new ExprLongValue(expr.valueOf().longValue()); + } else if (expr.type().equals(ExprCoreType.BOOLEAN)) { + return new ExprLongValue(expr.valueOf().booleanValue() ? 1 : 0); + } else { + return new ExprLongValue(Long.valueOf(expr.valueOf().stringValue())); + } + }) + .put( + BuiltinFunctionName.CAST_TO_FLOAT.getName(), + expr -> { + if (ExprCoreType.numberTypes().contains(expr.type())) { + return new ExprFloatValue(expr.valueOf().floatValue()); + } else if (expr.type().equals(ExprCoreType.BOOLEAN)) { + return new ExprFloatValue(expr.valueOf().booleanValue() ? 1 : 0); + } else { + return new ExprFloatValue(Float.valueOf(expr.valueOf().stringValue())); + } + }) + .put( + BuiltinFunctionName.CAST_TO_DOUBLE.getName(), + expr -> { + if (ExprCoreType.numberTypes().contains(expr.type())) { + return new ExprDoubleValue(expr.valueOf().doubleValue()); + } else if (expr.type().equals(ExprCoreType.BOOLEAN)) { + return new ExprDoubleValue(expr.valueOf().booleanValue() ? 1 : 0); + } else { + return new ExprDoubleValue(Double.valueOf(expr.valueOf().stringValue())); + } + }) + .put( + BuiltinFunctionName.CAST_TO_BOOLEAN.getName(), + expr -> { + if (ExprCoreType.numberTypes().contains(expr.type())) { + return expr.valueOf().doubleValue() != 0 + ? ExprBooleanValue.of(true) + : ExprBooleanValue.of(false); + } else if (expr.type().equals(ExprCoreType.STRING)) { + return ExprBooleanValue.of(Boolean.valueOf(expr.valueOf().stringValue())); + } else { + return expr.valueOf(); + } + }) + .put( + BuiltinFunctionName.CAST_TO_DATE.getName(), + expr -> { + if (expr.type().equals(ExprCoreType.STRING)) { + return new ExprDateValue(expr.valueOf().stringValue()); + } else { + return new ExprDateValue(expr.valueOf().dateValue()); + } + }) + .put( + BuiltinFunctionName.CAST_TO_TIME.getName(), + expr -> { + if (expr.type().equals(ExprCoreType.STRING)) { + return new ExprTimeValue(expr.valueOf().stringValue()); + } else { + return new ExprTimeValue(expr.valueOf().timeValue()); + } + }) + .put( + BuiltinFunctionName.CAST_TO_DATETIME.getName(), + expr -> { + if (expr.type().equals(ExprCoreType.STRING)) { + return new ExprDatetimeValue(expr.valueOf().stringValue()); + } else { + return new ExprDatetimeValue(expr.valueOf().datetimeValue()); + } + }) + .put( + BuiltinFunctionName.CAST_TO_TIMESTAMP.getName(), + expr -> { + if (expr.type().equals(ExprCoreType.STRING)) { + return new ExprTimestampValue(expr.valueOf().stringValue()); + } else { + return new ExprTimestampValue(expr.valueOf().timestampValue()); + } + }) + .build(); /** - * Build method that subclass implements by default which is to build query - * from reference and literal in function arguments. + * Build method that subclass implements by default which is to build query from reference and + * literal in function arguments. * - * @param fieldName field name - * @param fieldType field type - * @param literal field value literal - * @return query + * @param fieldName field name + * @param fieldType field type + * @param literal field value literal + * @return query */ protected QueryBuilder doBuild(String fieldName, ExprType fieldType, ExprValue literal) { throw new UnsupportedOperationException( diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/NestedQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/NestedQuery.java index 358637791c..f098d5df5a 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/NestedQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/NestedQuery.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.filter.lucene; import org.apache.lucene.search.join.ScoreMode; @@ -15,21 +14,20 @@ import org.opensearch.sql.expression.LiteralExpression; import org.opensearch.sql.expression.ReferenceExpression; -/** - * Lucene query that build nested query. - */ +/** Lucene query that build nested query. */ public class NestedQuery extends LuceneQuery { /** * Build query for 'nested' function used in predicate expression. Supports 'nested' function on * left and literal on right. + * * @param func Function expression. * @param innerQuery Comparison query to be place inside nested query. * @return Nested query. */ public QueryBuilder buildNested(FunctionExpression func, LuceneQuery innerQuery) { // Generate inner query for placement inside nested query - FunctionExpression nestedFunc = (FunctionExpression)func.getArguments().get(0); + FunctionExpression nestedFunc = (FunctionExpression) func.getArguments().get(0); validateArgs(nestedFunc, func.getArguments().get(1)); ExprValue literalValue = func.getArguments().get(1).valueOf(); ReferenceExpression ref = (ReferenceExpression) nestedFunc.getArguments().get(0); @@ -38,14 +36,17 @@ public QueryBuilder buildNested(FunctionExpression func, LuceneQuery innerQuery) // Generate nested query boolean hasPathParam = nestedFunc.getArguments().size() == 2; - String pathStr = hasPathParam ? nestedFunc.getArguments().get(1).toString() : - getNestedPathString((ReferenceExpression) nestedFunc.getArguments().get(0)); + String pathStr = + hasPathParam + ? nestedFunc.getArguments().get(1).toString() + : getNestedPathString((ReferenceExpression) nestedFunc.getArguments().get(0)); return QueryBuilders.nestedQuery(pathStr, innerQueryResult, ScoreMode.None); } /** - * Dynamically generate path for nested field. An example field of 'office.section.cubicle' - * would dynamically generate the path 'office.section'. + * Dynamically generate path for nested field. An example field of 'office.section.cubicle' would + * dynamically generate the path 'office.section'. + * * @param field nested field to generate path for. * @return path for nested field. */ @@ -59,31 +60,27 @@ private String getNestedPathString(ReferenceExpression field) { /** * Validate arguments in nested function and predicate expression. + * * @param nestedFunc Nested function expression. */ private void validateArgs(FunctionExpression nestedFunc, Expression rightExpression) { if (nestedFunc.getArguments().size() > 2) { throw new IllegalArgumentException( - "nested function supports 2 parameters (field, path) or 1 parameter (field)" - ); + "nested function supports 2 parameters (field, path) or 1 parameter (field)"); } for (var arg : nestedFunc.getArguments()) { if (!(arg instanceof ReferenceExpression)) { throw new IllegalArgumentException( - String.format("Illegal nested field name: %s", - arg.toString() - ) - ); + String.format("Illegal nested field name: %s", arg.toString())); } } if (!(rightExpression instanceof LiteralExpression)) { throw new IllegalArgumentException( - String.format("Illegal argument on right side of predicate expression: %s", - rightExpression.toString() - ) - ); + String.format( + "Illegal argument on right side of predicate expression: %s", + rightExpression.toString())); } } } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/FunctionParameterRepository.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/FunctionParameterRepository.java index 1adddff95d..a830adb590 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/FunctionParameterRepository.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/FunctionParameterRepository.java @@ -31,192 +31,254 @@ public class FunctionParameterRepository { public static final Map> - MatchBoolPrefixQueryBuildActions = ImmutableMap.>builder() - .put("analyzer", (b, v) -> b.analyzer(v.stringValue())) - .put("boost", (b, v) -> b.boost(convertFloatValue(v, "boost"))) - .put("fuzziness", (b, v) -> b.fuzziness(convertFuzziness(v))) - .put("fuzzy_rewrite", (b, v) -> b.fuzzyRewrite(checkRewrite(v, "fuzzy_rewrite"))) - .put("fuzzy_transpositions", (b, v) -> b.fuzzyTranspositions( - convertBoolValue(v, "fuzzy_transpositions"))) - .put("max_expansions", (b, v) -> b.maxExpansions(convertIntValue(v, "max_expansions"))) - .put("minimum_should_match", (b, v) -> b.minimumShouldMatch(v.stringValue())) - .put("operator", (b, v) -> b.operator(convertOperator(v, "operator"))) - .put("prefix_length", (b, v) -> b.prefixLength(convertIntValue(v, "prefix_length"))) - .build(); + MatchBoolPrefixQueryBuildActions = + ImmutableMap + .>builder() + .put("analyzer", (b, v) -> b.analyzer(v.stringValue())) + .put("boost", (b, v) -> b.boost(convertFloatValue(v, "boost"))) + .put("fuzziness", (b, v) -> b.fuzziness(convertFuzziness(v))) + .put("fuzzy_rewrite", (b, v) -> b.fuzzyRewrite(checkRewrite(v, "fuzzy_rewrite"))) + .put( + "fuzzy_transpositions", + (b, v) -> b.fuzzyTranspositions(convertBoolValue(v, "fuzzy_transpositions"))) + .put( + "max_expansions", (b, v) -> b.maxExpansions(convertIntValue(v, "max_expansions"))) + .put("minimum_should_match", (b, v) -> b.minimumShouldMatch(v.stringValue())) + .put("operator", (b, v) -> b.operator(convertOperator(v, "operator"))) + .put("prefix_length", (b, v) -> b.prefixLength(convertIntValue(v, "prefix_length"))) + .build(); public static final Map> - MatchPhrasePrefixQueryBuildActions = ImmutableMap.>builder() - .put("analyzer", (b, v) -> b.analyzer(v.stringValue())) - .put("boost", (b, v) -> b.boost(convertFloatValue(v, "boost"))) - .put("max_expansions", (b, v) -> b.maxExpansions(convertIntValue(v, "max_expansions"))) - .put("slop", (b, v) -> b.slop(convertIntValue(v, "slop"))) - .put("zero_terms_query", (b, v) -> b.zeroTermsQuery(convertZeroTermsQuery(v))) - .build(); + MatchPhrasePrefixQueryBuildActions = + ImmutableMap + .>builder() + .put("analyzer", (b, v) -> b.analyzer(v.stringValue())) + .put("boost", (b, v) -> b.boost(convertFloatValue(v, "boost"))) + .put( + "max_expansions", (b, v) -> b.maxExpansions(convertIntValue(v, "max_expansions"))) + .put("slop", (b, v) -> b.slop(convertIntValue(v, "slop"))) + .put("zero_terms_query", (b, v) -> b.zeroTermsQuery(convertZeroTermsQuery(v))) + .build(); public static final Map> - MatchPhraseQueryBuildActions = ImmutableMap.>builder() - .put("analyzer", (b, v) -> b.analyzer(v.stringValue())) - .put("boost", (b, v) -> b.boost(convertFloatValue(v, "boost"))) - .put("slop", (b, v) -> b.slop(convertIntValue(v, "slop"))) - .put("zero_terms_query", (b, v) -> b.zeroTermsQuery(convertZeroTermsQuery(v))) - .build(); + MatchPhraseQueryBuildActions = + ImmutableMap.>builder() + .put("analyzer", (b, v) -> b.analyzer(v.stringValue())) + .put("boost", (b, v) -> b.boost(convertFloatValue(v, "boost"))) + .put("slop", (b, v) -> b.slop(convertIntValue(v, "slop"))) + .put("zero_terms_query", (b, v) -> b.zeroTermsQuery(convertZeroTermsQuery(v))) + .build(); public static final Map> - MatchQueryBuildActions = ImmutableMap.>builder() - .put("analyzer", (b, v) -> b.analyzer(v.stringValue())) - .put("auto_generate_synonyms_phrase_query", (b, v) -> b.autoGenerateSynonymsPhraseQuery( - convertBoolValue(v, "auto_generate_synonyms_phrase_query"))) - .put("boost", (b, v) -> b.boost(convertFloatValue(v, "boost"))) - .put("fuzziness", (b, v) -> b.fuzziness(convertFuzziness(v))) - .put("fuzzy_rewrite", (b, v) -> b.fuzzyRewrite(checkRewrite(v, "fuzzy_rewrite"))) - .put("fuzzy_transpositions", (b, v) -> b.fuzzyTranspositions( - convertBoolValue(v, "fuzzy_transpositions"))) - .put("lenient", (b, v) -> b.lenient(convertBoolValue(v, "lenient"))) - .put("minimum_should_match", (b, v) -> b.minimumShouldMatch(v.stringValue())) - .put("max_expansions", (b, v) -> b.maxExpansions(convertIntValue(v, "max_expansions"))) - .put("operator", (b, v) -> b.operator(convertOperator(v, "operator"))) - .put("prefix_length", (b, v) -> b.prefixLength(convertIntValue(v, "prefix_length"))) - .put("zero_terms_query", (b, v) -> b.zeroTermsQuery(convertZeroTermsQuery(v))) - .build(); + MatchQueryBuildActions = + ImmutableMap.>builder() + .put("analyzer", (b, v) -> b.analyzer(v.stringValue())) + .put( + "auto_generate_synonyms_phrase_query", + (b, v) -> + b.autoGenerateSynonymsPhraseQuery( + convertBoolValue(v, "auto_generate_synonyms_phrase_query"))) + .put("boost", (b, v) -> b.boost(convertFloatValue(v, "boost"))) + .put("fuzziness", (b, v) -> b.fuzziness(convertFuzziness(v))) + .put("fuzzy_rewrite", (b, v) -> b.fuzzyRewrite(checkRewrite(v, "fuzzy_rewrite"))) + .put( + "fuzzy_transpositions", + (b, v) -> b.fuzzyTranspositions(convertBoolValue(v, "fuzzy_transpositions"))) + .put("lenient", (b, v) -> b.lenient(convertBoolValue(v, "lenient"))) + .put("minimum_should_match", (b, v) -> b.minimumShouldMatch(v.stringValue())) + .put( + "max_expansions", (b, v) -> b.maxExpansions(convertIntValue(v, "max_expansions"))) + .put("operator", (b, v) -> b.operator(convertOperator(v, "operator"))) + .put("prefix_length", (b, v) -> b.prefixLength(convertIntValue(v, "prefix_length"))) + .put("zero_terms_query", (b, v) -> b.zeroTermsQuery(convertZeroTermsQuery(v))) + .build(); @SuppressWarnings("deprecation") // cutoffFrequency is deprecated public static final Map> - MultiMatchQueryBuildActions = ImmutableMap.>builder() - .put("analyzer", (b, v) -> b.analyzer(v.stringValue())) - .put("auto_generate_synonyms_phrase_query", (b, v) -> b.autoGenerateSynonymsPhraseQuery( - convertBoolValue(v, "auto_generate_synonyms_phrase_query"))) - .put("boost", (b, v) -> b.boost(convertFloatValue(v, "boost"))) - .put("cutoff_frequency", (b, v) -> b.cutoffFrequency( - convertFloatValue(v, "cutoff_frequency"))) - .put("fuzziness", (b, v) -> b.fuzziness(convertFuzziness(v))) - .put("fuzzy_transpositions", (b, v) -> b.fuzzyTranspositions( - convertBoolValue(v, "fuzzy_transpositions"))) - .put("lenient", (b, v) -> b.lenient(convertBoolValue(v, "lenient"))) - .put("max_expansions", (b, v) -> b.maxExpansions(convertIntValue(v, "max_expansions"))) - .put("minimum_should_match", (b, v) -> b.minimumShouldMatch(v.stringValue())) - .put("operator", (b, v) -> b.operator(convertOperator(v, "operator"))) - .put("prefix_length", (b, v) -> b.prefixLength(convertIntValue(v, "prefix_length"))) - .put("slop", (b, v) -> b.slop(convertIntValue(v, "slop"))) - .put("tie_breaker", (b, v) -> b.tieBreaker(convertFloatValue(v, "tie_breaker"))) - .put("type", (b, v) -> b.type(convertType(v))) - .put("zero_terms_query", (b, v) -> b.zeroTermsQuery(convertZeroTermsQuery(v))) - .build(); + MultiMatchQueryBuildActions = + ImmutableMap.>builder() + .put("analyzer", (b, v) -> b.analyzer(v.stringValue())) + .put( + "auto_generate_synonyms_phrase_query", + (b, v) -> + b.autoGenerateSynonymsPhraseQuery( + convertBoolValue(v, "auto_generate_synonyms_phrase_query"))) + .put("boost", (b, v) -> b.boost(convertFloatValue(v, "boost"))) + .put( + "cutoff_frequency", + (b, v) -> b.cutoffFrequency(convertFloatValue(v, "cutoff_frequency"))) + .put("fuzziness", (b, v) -> b.fuzziness(convertFuzziness(v))) + .put( + "fuzzy_transpositions", + (b, v) -> b.fuzzyTranspositions(convertBoolValue(v, "fuzzy_transpositions"))) + .put("lenient", (b, v) -> b.lenient(convertBoolValue(v, "lenient"))) + .put( + "max_expansions", (b, v) -> b.maxExpansions(convertIntValue(v, "max_expansions"))) + .put("minimum_should_match", (b, v) -> b.minimumShouldMatch(v.stringValue())) + .put("operator", (b, v) -> b.operator(convertOperator(v, "operator"))) + .put("prefix_length", (b, v) -> b.prefixLength(convertIntValue(v, "prefix_length"))) + .put("slop", (b, v) -> b.slop(convertIntValue(v, "slop"))) + .put("tie_breaker", (b, v) -> b.tieBreaker(convertFloatValue(v, "tie_breaker"))) + .put("type", (b, v) -> b.type(convertType(v))) + .put("zero_terms_query", (b, v) -> b.zeroTermsQuery(convertZeroTermsQuery(v))) + .build(); public static final Map> - QueryStringQueryBuildActions = ImmutableMap.>builder() - .put("allow_leading_wildcard", (b, v) -> b.allowLeadingWildcard( - convertBoolValue(v, "allow_leading_wildcard"))) - .put("analyzer", (b, v) -> b.analyzer(v.stringValue())) - .put("analyze_wildcard", (b, v) -> b.analyzeWildcard( - convertBoolValue(v, "analyze_wildcard"))) - .put("auto_generate_synonyms_phrase_query", (b, v) -> b.autoGenerateSynonymsPhraseQuery( - convertBoolValue(v, "auto_generate_synonyms_phrase_query"))) - .put("boost", (b, v) -> b.boost(convertFloatValue(v, "boost"))) - .put("default_operator", (b, v) -> b.defaultOperator( - convertOperator(v, "default_operator"))) - .put("enable_position_increments", (b, v) -> b.enablePositionIncrements( - convertBoolValue(v, "enable_position_increments"))) - .put("escape", (b, v) -> b.escape(convertBoolValue(v, "escape"))) - .put("fuzziness", (b, v) -> b.fuzziness(convertFuzziness(v))) - .put("fuzzy_max_expansions", (b, v) -> b.fuzzyMaxExpansions( - convertIntValue(v, "fuzzy_max_expansions"))) - .put("fuzzy_prefix_length", (b, v) -> b.fuzzyPrefixLength( - convertIntValue(v, "fuzzy_prefix_length"))) - .put("fuzzy_rewrite", (b, v) -> b.fuzzyRewrite(checkRewrite(v, "fuzzy_rewrite"))) - .put("fuzzy_transpositions", (b, v) -> b.fuzzyTranspositions( - convertBoolValue(v, "fuzzy_transpositions"))) - .put("lenient", (b, v) -> b.lenient(convertBoolValue(v, "lenient"))) - .put("max_determinized_states", (b, v) -> b.maxDeterminizedStates( - convertIntValue(v, "max_determinized_states"))) - .put("minimum_should_match", (b, v) -> b.minimumShouldMatch(v.stringValue())) - .put("phrase_slop", (b, v) -> b.phraseSlop(convertIntValue(v, "phrase_slop"))) - .put("quote_analyzer", (b, v) -> b.quoteAnalyzer(v.stringValue())) - .put("quote_field_suffix", (b, v) -> b.quoteFieldSuffix(v.stringValue())) - .put("rewrite", (b, v) -> b.rewrite(checkRewrite(v, "rewrite"))) - .put("tie_breaker", (b, v) -> b.tieBreaker(convertFloatValue(v, "tie_breaker"))) - .put("time_zone", (b, v) -> b.timeZone(checkTimeZone(v))) - .put("type", (b, v) -> b.type(convertType(v))) - .build(); + QueryStringQueryBuildActions = + ImmutableMap.>builder() + .put( + "allow_leading_wildcard", + (b, v) -> b.allowLeadingWildcard(convertBoolValue(v, "allow_leading_wildcard"))) + .put("analyzer", (b, v) -> b.analyzer(v.stringValue())) + .put( + "analyze_wildcard", + (b, v) -> b.analyzeWildcard(convertBoolValue(v, "analyze_wildcard"))) + .put( + "auto_generate_synonyms_phrase_query", + (b, v) -> + b.autoGenerateSynonymsPhraseQuery( + convertBoolValue(v, "auto_generate_synonyms_phrase_query"))) + .put("boost", (b, v) -> b.boost(convertFloatValue(v, "boost"))) + .put( + "default_operator", + (b, v) -> b.defaultOperator(convertOperator(v, "default_operator"))) + .put( + "enable_position_increments", + (b, v) -> + b.enablePositionIncrements(convertBoolValue(v, "enable_position_increments"))) + .put("escape", (b, v) -> b.escape(convertBoolValue(v, "escape"))) + .put("fuzziness", (b, v) -> b.fuzziness(convertFuzziness(v))) + .put( + "fuzzy_max_expansions", + (b, v) -> b.fuzzyMaxExpansions(convertIntValue(v, "fuzzy_max_expansions"))) + .put( + "fuzzy_prefix_length", + (b, v) -> b.fuzzyPrefixLength(convertIntValue(v, "fuzzy_prefix_length"))) + .put("fuzzy_rewrite", (b, v) -> b.fuzzyRewrite(checkRewrite(v, "fuzzy_rewrite"))) + .put( + "fuzzy_transpositions", + (b, v) -> b.fuzzyTranspositions(convertBoolValue(v, "fuzzy_transpositions"))) + .put("lenient", (b, v) -> b.lenient(convertBoolValue(v, "lenient"))) + .put( + "max_determinized_states", + (b, v) -> b.maxDeterminizedStates(convertIntValue(v, "max_determinized_states"))) + .put("minimum_should_match", (b, v) -> b.minimumShouldMatch(v.stringValue())) + .put("phrase_slop", (b, v) -> b.phraseSlop(convertIntValue(v, "phrase_slop"))) + .put("quote_analyzer", (b, v) -> b.quoteAnalyzer(v.stringValue())) + .put("quote_field_suffix", (b, v) -> b.quoteFieldSuffix(v.stringValue())) + .put("rewrite", (b, v) -> b.rewrite(checkRewrite(v, "rewrite"))) + .put("tie_breaker", (b, v) -> b.tieBreaker(convertFloatValue(v, "tie_breaker"))) + .put("time_zone", (b, v) -> b.timeZone(checkTimeZone(v))) + .put("type", (b, v) -> b.type(convertType(v))) + .build(); public static final Map> - SimpleQueryStringQueryBuildActions = ImmutableMap.>builder() - .put("analyzer", (b, v) -> b.analyzer(v.stringValue())) - .put("analyze_wildcard", (b, v) -> b.analyzeWildcard( - convertBoolValue(v, "analyze_wildcard"))) - .put("auto_generate_synonyms_phrase_query", (b, v) -> b.autoGenerateSynonymsPhraseQuery( - convertBoolValue(v, "auto_generate_synonyms_phrase_query"))) - .put("boost", (b, v) -> b.boost(convertFloatValue(v, "boost"))) - .put("default_operator", (b, v) -> b.defaultOperator( - convertOperator(v, "default_operator"))) - .put("flags", (b, v) -> b.flags(convertFlags(v))) - .put("fuzzy_max_expansions", (b, v) -> b.fuzzyMaxExpansions( - convertIntValue(v, "fuzzy_max_expansions"))) - .put("fuzzy_prefix_length", (b, v) -> b.fuzzyPrefixLength( - convertIntValue(v, "fuzzy_prefix_length"))) - .put("fuzzy_transpositions", (b, v) -> b.fuzzyTranspositions( - convertBoolValue(v, "fuzzy_transpositions"))) - .put("lenient", (b, v) -> b.lenient(convertBoolValue(v, "lenient"))) - .put("minimum_should_match", (b, v) -> b.minimumShouldMatch(v.stringValue())) - .put("quote_field_suffix", (b, v) -> b.quoteFieldSuffix(v.stringValue())) - .build(); + SimpleQueryStringQueryBuildActions = + ImmutableMap.>builder() + .put("analyzer", (b, v) -> b.analyzer(v.stringValue())) + .put( + "analyze_wildcard", + (b, v) -> b.analyzeWildcard(convertBoolValue(v, "analyze_wildcard"))) + .put( + "auto_generate_synonyms_phrase_query", + (b, v) -> + b.autoGenerateSynonymsPhraseQuery( + convertBoolValue(v, "auto_generate_synonyms_phrase_query"))) + .put("boost", (b, v) -> b.boost(convertFloatValue(v, "boost"))) + .put( + "default_operator", + (b, v) -> b.defaultOperator(convertOperator(v, "default_operator"))) + .put("flags", (b, v) -> b.flags(convertFlags(v))) + .put( + "fuzzy_max_expansions", + (b, v) -> b.fuzzyMaxExpansions(convertIntValue(v, "fuzzy_max_expansions"))) + .put( + "fuzzy_prefix_length", + (b, v) -> b.fuzzyPrefixLength(convertIntValue(v, "fuzzy_prefix_length"))) + .put( + "fuzzy_transpositions", + (b, v) -> b.fuzzyTranspositions(convertBoolValue(v, "fuzzy_transpositions"))) + .put("lenient", (b, v) -> b.lenient(convertBoolValue(v, "lenient"))) + .put("minimum_should_match", (b, v) -> b.minimumShouldMatch(v.stringValue())) + .put("quote_field_suffix", (b, v) -> b.quoteFieldSuffix(v.stringValue())) + .build(); public static final Map> - WildcardQueryBuildActions = ImmutableMap.>builder() - .put("boost", (b, v) -> b.boost(convertFloatValue(v, "boost"))) - .put("case_insensitive", (b, v) -> b.caseInsensitive(convertBoolValue(v, "case_insensitive"))) - .put("rewrite", (b, v) -> b.rewrite(checkRewrite(v, "rewrite"))) - .build(); + WildcardQueryBuildActions = + ImmutableMap.>builder() + .put("boost", (b, v) -> b.boost(convertFloatValue(v, "boost"))) + .put( + "case_insensitive", + (b, v) -> b.caseInsensitive(convertBoolValue(v, "case_insensitive"))) + .put("rewrite", (b, v) -> b.rewrite(checkRewrite(v, "rewrite"))) + .build(); public static final Map ArgumentLimitations = ImmutableMap.builder() - .put("boost", "Accepts only floating point values greater than 0.") - .put("tie_breaker", "Accepts only floating point values in range 0 to 1.") - .put("rewrite", "Available values are: constant_score, " - + "scoring_boolean, constant_score_boolean, top_terms_X, top_terms_boost_X, " - + "top_terms_blended_freqs_X, where X is an integer value.") - .put("flags", String.format( - "Available values are: %s and any combinations of these separated by '|'.", - Arrays.stream(SimpleQueryStringFlag.class.getEnumConstants()) - .map(Enum::toString).collect(Collectors.joining(", ")))) - .put("time_zone", "For more information, follow this link: " - + "https://docs.oracle.com/javase/8/docs/api/java/time/ZoneId.html#of-java.lang.String-") - .put("fuzziness", "Available values are: " - + "'AUTO', 'AUTO:x,y' or z, where x, y, z - integer values.") - .put("operator", String.format("Available values are: %s.", - Arrays.stream(Operator.class.getEnumConstants()) - .map(Enum::toString).collect(Collectors.joining(", ")))) - .put("type", String.format("Available values are: %s.", - Arrays.stream(MultiMatchQueryBuilder.Type.class.getEnumConstants()) - .map(Enum::toString).collect(Collectors.joining(", ")))) - .put("zero_terms_query", String.format("Available values are: %s.", - Arrays.stream(MatchQuery.ZeroTermsQuery.class.getEnumConstants()) - .map(Enum::toString).collect(Collectors.joining(", ")))) - .put("int", "Accepts only integer values.") - .put("float", "Accepts only floating point values.") - .put("bool", "Accepts only boolean values: 'true' or 'false'.") - .build(); - + .put("boost", "Accepts only floating point values greater than 0.") + .put("tie_breaker", "Accepts only floating point values in range 0 to 1.") + .put( + "rewrite", + "Available values are: constant_score, " + + "scoring_boolean, constant_score_boolean, top_terms_X, top_terms_boost_X, " + + "top_terms_blended_freqs_X, where X is an integer value.") + .put( + "flags", + String.format( + "Available values are: %s and any combinations of these separated by '|'.", + Arrays.stream(SimpleQueryStringFlag.class.getEnumConstants()) + .map(Enum::toString) + .collect(Collectors.joining(", ")))) + .put( + "time_zone", + "For more information, follow this link: " + + "https://docs.oracle.com/javase/8/docs/api/java/time/ZoneId.html#of-java.lang.String-") + .put( + "fuzziness", + "Available values are: 'AUTO', 'AUTO:x,y' or z, where x, y, z - integer values.") + .put( + "operator", + String.format( + "Available values are: %s.", + Arrays.stream(Operator.class.getEnumConstants()) + .map(Enum::toString) + .collect(Collectors.joining(", ")))) + .put( + "type", + String.format( + "Available values are: %s.", + Arrays.stream(MultiMatchQueryBuilder.Type.class.getEnumConstants()) + .map(Enum::toString) + .collect(Collectors.joining(", ")))) + .put( + "zero_terms_query", + String.format( + "Available values are: %s.", + Arrays.stream(MatchQuery.ZeroTermsQuery.class.getEnumConstants()) + .map(Enum::toString) + .collect(Collectors.joining(", ")))) + .put("int", "Accepts only integer values.") + .put("float", "Accepts only floating point values.") + .put("bool", "Accepts only boolean values: 'true' or 'false'.") + .build(); private static String formatErrorMessage(String name, String value) { return formatErrorMessage(name, value, name); } private static String formatErrorMessage(String name, String value, String limitationName) { - return String.format("Invalid %s value: '%s'. %s", - name, value, ArgumentLimitations.containsKey(name) ? ArgumentLimitations.get(name) + return String.format( + "Invalid %s value: '%s'. %s", + name, + value, + ArgumentLimitations.containsKey(name) + ? ArgumentLimitations.get(name) : ArgumentLimitations.getOrDefault(limitationName, "")); } /** * Check whether value is valid for 'rewrite' or 'fuzzy_rewrite'. + * * @param value Value * @param name Value name * @return Converted @@ -233,6 +295,7 @@ public static String checkRewrite(ExprValue value, String name) { /** * Convert ExprValue to Flags. + * * @param value Value * @return Array of flags */ @@ -248,6 +311,7 @@ public static SimpleQueryStringFlag[] convertFlags(ExprValue value) { /** * Check whether ExprValue could be converted to timezone object. + * * @param value Value * @return Converted to string */ @@ -262,6 +326,7 @@ public static String checkTimeZone(ExprValue value) { /** * Convert ExprValue to Fuzziness object. + * * @param value Value * @return Fuzziness */ @@ -275,6 +340,7 @@ public static Fuzziness convertFuzziness(ExprValue value) { /** * Convert ExprValue to Operator object, could be used for 'operator' and 'default_operator'. + * * @param value Value * @param name Value name * @return Operator @@ -289,13 +355,14 @@ public static Operator convertOperator(ExprValue value, String name) { /** * Convert ExprValue to Type object. + * * @param value Value * @return Type */ public static MultiMatchQueryBuilder.Type convertType(ExprValue value) { try { - return MultiMatchQueryBuilder.Type.parse(value.stringValue().toLowerCase(), - LoggingDeprecationHandler.INSTANCE); + return MultiMatchQueryBuilder.Type.parse( + value.stringValue().toLowerCase(), LoggingDeprecationHandler.INSTANCE); } catch (Exception e) { throw new RuntimeException(formatErrorMessage("type", value.stringValue()), e); } @@ -303,6 +370,7 @@ public static MultiMatchQueryBuilder.Type convertType(ExprValue value) { /** * Convert ExprValue to ZeroTermsQuery object. + * * @param value Value * @return ZeroTermsQuery */ @@ -316,6 +384,7 @@ public static MatchQuery.ZeroTermsQuery convertZeroTermsQuery(ExprValue value) { /** * Convert ExprValue to int. + * * @param value Value * @param name Value name * @return int @@ -330,6 +399,7 @@ public static int convertIntValue(ExprValue value, String name) { /** * Convert ExprValue to float. + * * @param value Value * @param name Value name * @return float @@ -344,6 +414,7 @@ public static float convertFloatValue(ExprValue value, String name) { /** * Convert ExprValue to bool. + * * @param value Value * @param name Value name * @return bool diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MatchBoolPrefixQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MatchBoolPrefixQuery.java index 7044a56035..5443d7154d 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MatchBoolPrefixQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MatchBoolPrefixQuery.java @@ -8,14 +8,11 @@ import org.opensearch.index.query.MatchBoolPrefixQueryBuilder; import org.opensearch.index.query.QueryBuilders; -/** - * Initializes MatchBoolPrefixQueryBuilder from a FunctionExpression. - */ -public class MatchBoolPrefixQuery - extends SingleFieldQuery { +/** Initializes MatchBoolPrefixQueryBuilder from a FunctionExpression. */ +public class MatchBoolPrefixQuery extends SingleFieldQuery { /** - * Constructor for MatchBoolPrefixQuery to configure RelevanceQuery - * with support of optional parameters. + * Constructor for MatchBoolPrefixQuery to configure RelevanceQuery with support of optional + * parameters. */ public MatchBoolPrefixQuery() { super(FunctionParameterRepository.MatchBoolPrefixQueryBuildActions); @@ -23,9 +20,10 @@ public MatchBoolPrefixQuery() { /** * Maps correct query builder function to class. - * @param field Field to execute query in - * @param query Text used to search field - * @return Object of executed query + * + * @param field Field to execute query in + * @param query Text used to search field + * @return Object of executed query */ @Override protected MatchBoolPrefixQueryBuilder createBuilder(String field, String query) { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MatchPhrasePrefixQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MatchPhrasePrefixQuery.java index 8ee9ae299e..5a9b5e0d1c 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MatchPhrasePrefixQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MatchPhrasePrefixQuery.java @@ -8,12 +8,10 @@ import org.opensearch.index.query.MatchPhrasePrefixQueryBuilder; import org.opensearch.index.query.QueryBuilders; -/** - * Lucene query that builds a match_phrase_prefix query. - */ +/** Lucene query that builds a match_phrase_prefix query. */ public class MatchPhrasePrefixQuery extends SingleFieldQuery { /** - * Default constructor for MatchPhrasePrefixQuery configures how RelevanceQuery.build() handles + * Default constructor for MatchPhrasePrefixQuery configures how RelevanceQuery.build() handles * named arguments. */ public MatchPhrasePrefixQuery() { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MatchPhraseQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MatchPhraseQuery.java index 2afaca1a7a..3c823b7cae 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MatchPhraseQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MatchPhraseQuery.java @@ -8,13 +8,11 @@ import org.opensearch.index.query.MatchPhraseQueryBuilder; import org.opensearch.index.query.QueryBuilders; -/** - * Lucene query that builds a match_phrase query. - */ +/** Lucene query that builds a match_phrase query. */ public class MatchPhraseQuery extends SingleFieldQuery { /** - * Default constructor for MatchPhraseQuery configures how RelevanceQuery.build() handles - * named arguments. + * Default constructor for MatchPhraseQuery configures how RelevanceQuery.build() handles named + * arguments. */ public MatchPhraseQuery() { super(FunctionParameterRepository.MatchPhraseQueryBuildActions); diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MatchQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MatchQuery.java index a4de1c0831..b40d4fb85b 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MatchQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MatchQuery.java @@ -8,13 +8,11 @@ import org.opensearch.index.query.MatchQueryBuilder; import org.opensearch.index.query.QueryBuilders; -/** - * Initializes MatchQueryBuilder from a FunctionExpression. - */ +/** Initializes MatchQueryBuilder from a FunctionExpression. */ public class MatchQuery extends SingleFieldQuery { /** - * Default constructor for MatchQuery configures how RelevanceQuery.build() handles - * named arguments. + * Default constructor for MatchQuery configures how RelevanceQuery.build() handles named + * arguments. */ public MatchQuery() { super(FunctionParameterRepository.MatchQueryBuildActions); diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MultiFieldQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MultiFieldQuery.java index 9f37951072..b6e854a3f8 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MultiFieldQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MultiFieldQuery.java @@ -13,7 +13,8 @@ import org.opensearch.sql.expression.NamedArgumentExpression; /** - * Base class to represent relevance queries that search multiple fields. + * Base class to represent relevance queries that search multiple fields. + * * @param The builder class for the OpenSearch query. */ abstract class MultiFieldQuery extends RelevanceQuery { @@ -25,26 +26,24 @@ public MultiFieldQuery(Map> queryBuildActions) { @Override public T createQueryBuilder(List arguments) { // Extract 'fields' and 'query' - var fields = arguments.stream() - .filter(a -> a.getArgName().equalsIgnoreCase("fields")) - .findFirst() - .orElseThrow(() -> new SemanticCheckException("'fields' parameter is missing.")); - - var query = arguments.stream() - .filter(a -> a.getArgName().equalsIgnoreCase("query")) - .findFirst() - .orElseThrow(() -> new SemanticCheckException("'query' parameter is missing")); - - var fieldsAndWeights = fields - .getValue() - .valueOf() - .tupleValue() - .entrySet() - .stream() - .collect(ImmutableMap.toImmutableMap(e -> e.getKey(), e -> e.getValue().floatValue())); + var fields = + arguments.stream() + .filter(a -> a.getArgName().equalsIgnoreCase("fields")) + .findFirst() + .orElseThrow(() -> new SemanticCheckException("'fields' parameter is missing.")); + + var query = + arguments.stream() + .filter(a -> a.getArgName().equalsIgnoreCase("query")) + .findFirst() + .orElseThrow(() -> new SemanticCheckException("'query' parameter is missing")); + + var fieldsAndWeights = + fields.getValue().valueOf().tupleValue().entrySet().stream() + .collect(ImmutableMap.toImmutableMap(e -> e.getKey(), e -> e.getValue().floatValue())); return createBuilder(fieldsAndWeights, query.getValue().valueOf().stringValue()); } - protected abstract T createBuilder(ImmutableMap fields, String query); + protected abstract T createBuilder(ImmutableMap fields, String query); } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MultiMatchQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MultiMatchQuery.java index a791bf756b..826e6d7dde 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MultiMatchQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MultiMatchQuery.java @@ -11,8 +11,8 @@ public class MultiMatchQuery extends MultiFieldQuery { /** - * Default constructor for MultiMatch configures how RelevanceQuery.build() handles - * named arguments. + * Default constructor for MultiMatch configures how RelevanceQuery.build() handles named + * arguments. */ public MultiMatchQuery() { super(FunctionParameterRepository.MultiMatchQueryBuildActions); diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/NoFieldQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/NoFieldQuery.java index 528b24af6c..ba79147c8c 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/NoFieldQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/NoFieldQuery.java @@ -33,36 +33,39 @@ protected void ignoreArguments(List arguments) { protected void checkValidArguments(String argNormalized, T queryBuilder) { if (!getQueryBuildActions().containsKey(argNormalized)) { throw new SemanticCheckException( - String.format("Parameter %s is invalid for %s function.", - argNormalized, getQueryName())); + String.format("Parameter %s is invalid for %s function.", argNormalized, getQueryName())); } } + /** - * Override build function because RelevanceQuery requires 2 fields, - * but NoFieldQuery must have no fields. + * Override build function because RelevanceQuery requires 2 fields, but NoFieldQuery must have no + * fields. * * @param func : Contains function name and passed in arguments. * @return : QueryBuilder object */ - @Override public QueryBuilder build(FunctionExpression func) { - var arguments = func.getArguments().stream().map( - a -> (NamedArgumentExpression) a).collect(Collectors.toList()); + var arguments = + func.getArguments().stream() + .map(a -> (NamedArgumentExpression) a) + .collect(Collectors.toList()); if (arguments.size() < 1) { - throw new SyntaxCheckException(String.format( - "%s requires at least one parameter", func.getFunctionName())); + throw new SyntaxCheckException( + String.format("%s requires at least one parameter", func.getFunctionName())); } return loadArguments(arguments); } - @Override public T createQueryBuilder(List arguments) { // Extract 'query' - var query = arguments.stream().filter(a -> a.getArgName().equalsIgnoreCase("query")).findFirst() - .orElseThrow(() -> new SemanticCheckException("'query' parameter is missing")); + var query = + arguments.stream() + .filter(a -> a.getArgName().equalsIgnoreCase("query")) + .findFirst() + .orElseThrow(() -> new SemanticCheckException("'query' parameter is missing")); return createBuilder(query.getValue().valueOf().stringValue()); } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/serialization/DefaultExpressionSerializer.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/serialization/DefaultExpressionSerializer.java index dc67da9de5..aa78d60a6e 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/serialization/DefaultExpressionSerializer.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/serialization/DefaultExpressionSerializer.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.serialization; import java.io.ByteArrayInputStream; @@ -14,9 +13,7 @@ import java.util.Base64; import org.opensearch.sql.expression.Expression; -/** - * Default serializer that (de-)serialize expressions by JDK serialization. - */ +/** Default serializer that (de-)serialize expressions by JDK serialization. */ public class DefaultExpressionSerializer implements ExpressionSerializer { @Override @@ -42,5 +39,4 @@ public Expression deserialize(String code) { throw new IllegalStateException("Failed to deserialize expression code: " + code, e); } } - } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/serialization/ExpressionSerializer.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/serialization/ExpressionSerializer.java index b7caeb30f8..9c9779696c 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/serialization/ExpressionSerializer.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/serialization/ExpressionSerializer.java @@ -3,28 +3,26 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.serialization; import org.opensearch.sql.expression.Expression; -/** - * Expression serializer that (de-)serializes expression object. - */ +/** Expression serializer that (de-)serializes expression object. */ public interface ExpressionSerializer { /** * Serialize an expression. - * @param expr expression - * @return serialized string + * + * @param expr expression + * @return serialized string */ String serialize(Expression expr); /** * Deserialize an expression. - * @param code serialized code - * @return original expression object + * + * @param code serialized code + * @return original expression object */ Expression deserialize(String code); - } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/type/OpenSearchDataTypeRecognitionTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/type/OpenSearchDataTypeRecognitionTest.java index c3a5d13dca..35ad6b7ea6 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/type/OpenSearchDataTypeRecognitionTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/type/OpenSearchDataTypeRecognitionTest.java @@ -35,8 +35,7 @@ private static Stream types() { Arguments.of("BINARY", new OpenSearchExprBinaryValue("A"), "binary"), Arguments.of("IP", new OpenSearchExprIpValue("A"), "ip"), Arguments.of("TEXT", new TestTextWithFieldValue("Hello World"), "text with fields"), - Arguments.of("GEO_POINT", new OpenSearchExprGeoPointValue(0d, 0d), "geo point") - ); + Arguments.of("GEO_POINT", new OpenSearchExprGeoPointValue(0d, 0d), "geo point")); } private String typeofGetValue(ExprValue input) { @@ -50,8 +49,8 @@ public TestTextWithFieldValue(String value) { @Override public ExprType type() { - return OpenSearchTextType.of(Map.of("words", - OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword))); + return OpenSearchTextType.of( + Map.of("words", OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword))); } } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/type/OpenSearchDataTypeTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/type/OpenSearchDataTypeTest.java index 8d69b3d855..b0288dc9a7 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/type/OpenSearchDataTypeTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/type/OpenSearchDataTypeTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.type; import static org.junit.jupiter.api.Assertions.assertAll; @@ -108,13 +107,9 @@ private static Stream getTestDataWithType() { Arguments.of(MappingType.Date, "date", TIMESTAMP), Arguments.of(MappingType.Object, "object", STRUCT), Arguments.of(MappingType.Nested, "nested", ARRAY), - Arguments.of(MappingType.GeoPoint, "geo_point", - OpenSearchGeoPointType.of()), - Arguments.of(MappingType.Binary, "binary", - OpenSearchBinaryType.of()), - Arguments.of(MappingType.Ip, "ip", - OpenSearchIpType.of()) - ); + Arguments.of(MappingType.GeoPoint, "geo_point", OpenSearchGeoPointType.of()), + Arguments.of(MappingType.Binary, "binary", OpenSearchBinaryType.of()), + Arguments.of(MappingType.Ip, "ip", OpenSearchIpType.of())); } @ParameterizedTest(name = "{1}") @@ -128,8 +123,7 @@ public void of_MappingType(MappingType mappingType, String name, ExprType dataTy assertAll( () -> assertEquals(nameForPPL, type.typeName()), () -> assertEquals(nameForSQL, type.legacyTypeName()), - () -> assertEquals(dataType, type.getExprType()) - ); + () -> assertEquals(dataType, type.getExprType())); } @ParameterizedTest(name = "{0}") @@ -168,15 +162,10 @@ public void of_OpenSearchDataType_from_MappingType(OpenSearchDataType.MappingTyp public void types_but_clones_are_singletons_and_cached() { var type = OpenSearchDataType.of(MappingType.Object); var alsoType = OpenSearchDataType.of(MappingType.Object); - Map properties = Map.of( - "properties", - Map.of("number", Map.of("type", "integer"))); - var typeWithProperties = OpenSearchDataType.of( - MappingType.Object, - properties); - var typeWithFields = OpenSearchDataType.of( - MappingType.Text, - Map.of()); + Map properties = + Map.of("properties", Map.of("number", Map.of("type", "integer"))); + var typeWithProperties = OpenSearchDataType.of(MappingType.Object, properties); + var typeWithFields = OpenSearchDataType.of(MappingType.Text, Map.of()); var cloneType = type.cloneEmpty(); assertAll( @@ -187,22 +176,20 @@ public void types_but_clones_are_singletons_and_cached() { () -> assertNotSame(typeWithProperties, typeWithProperties.cloneEmpty()), () -> assertNotSame(typeWithFields, typeWithFields.cloneEmpty()), () -> assertNotSame(dateType, dateType.cloneEmpty()), - () -> assertSame(OpenSearchDataType.of(MappingType.Text), - OpenSearchTextType.of()), - () -> assertSame(OpenSearchDataType.of(MappingType.Binary), - OpenSearchBinaryType.of()), - () -> assertSame(OpenSearchDataType.of(MappingType.GeoPoint), - OpenSearchGeoPointType.of()), - () -> assertSame(OpenSearchDataType.of(MappingType.Ip), - OpenSearchIpType.of()), - () -> assertNotSame(OpenSearchTextType.of(), - OpenSearchTextType.of(Map.of("properties", OpenSearchDataType.of(INTEGER)))), + () -> assertSame(OpenSearchDataType.of(MappingType.Text), OpenSearchTextType.of()), + () -> assertSame(OpenSearchDataType.of(MappingType.Binary), OpenSearchBinaryType.of()), + () -> assertSame(OpenSearchDataType.of(MappingType.GeoPoint), OpenSearchGeoPointType.of()), + () -> assertSame(OpenSearchDataType.of(MappingType.Ip), OpenSearchIpType.of()), + () -> + assertNotSame( + OpenSearchTextType.of(), + OpenSearchTextType.of(Map.of("properties", OpenSearchDataType.of(INTEGER)))), () -> assertSame(OpenSearchDataType.of(INTEGER), OpenSearchDataType.of(INTEGER)), () -> assertSame(OpenSearchDataType.of(STRING), OpenSearchDataType.of(STRING)), () -> assertSame(OpenSearchDataType.of(STRUCT), OpenSearchDataType.of(STRUCT)), - () -> assertNotSame(OpenSearchDataType.of(INTEGER), - OpenSearchDataType.of(INTEGER).cloneEmpty()) - ); + () -> + assertNotSame( + OpenSearchDataType.of(INTEGER), OpenSearchDataType.of(INTEGER).cloneEmpty())); } @Test @@ -211,17 +198,25 @@ public void types_but_clones_are_singletons_and_cached() { public void fields_and_properties_are_readonly() { var objectType = OpenSearchDataType.of(MappingType.Object); var textType = OpenSearchTextType.of(); - var textTypeWithFields = OpenSearchTextType.of( - Map.of("letters", OpenSearchDataType.of(MappingType.Keyword))); + var textTypeWithFields = + OpenSearchTextType.of(Map.of("letters", OpenSearchDataType.of(MappingType.Keyword))); assertAll( - () -> assertThrows(UnsupportedOperationException.class, - () -> objectType.getProperties().put("something", OpenSearchDataType.of(INTEGER))), - () -> assertThrows(UnsupportedOperationException.class, - () -> textType.getFields().put("words", OpenSearchDataType.of(MappingType.Keyword))), - () -> assertThrows(UnsupportedOperationException.class, - () -> textTypeWithFields.getFields().put("words", - OpenSearchDataType.of(MappingType.Keyword))) - ); + () -> + assertThrows( + UnsupportedOperationException.class, + () -> objectType.getProperties().put("something", OpenSearchDataType.of(INTEGER))), + () -> + assertThrows( + UnsupportedOperationException.class, + () -> + textType.getFields().put("words", OpenSearchDataType.of(MappingType.Keyword))), + () -> + assertThrows( + UnsupportedOperationException.class, + () -> + textTypeWithFields + .getFields() + .put("words", OpenSearchDataType.of(MappingType.Keyword)))); } @Test @@ -234,10 +229,8 @@ public void of_null_MappingType() { // cloneEmpty doesn't clone properties and fields. // Fields are cloned by OpenSearchTextType::cloneEmpty, because it is used in that type only. public void cloneEmpty() { - var type = OpenSearchDataType.of( - MappingType.Object, - Map.of("val", OpenSearchDataType.of(INTEGER)) - ); + var type = + OpenSearchDataType.of(MappingType.Object, Map.of("val", OpenSearchDataType.of(INTEGER))); var clone = type.cloneEmpty(); var textClone = textKeywordType.cloneEmpty(); @@ -246,9 +239,10 @@ public void cloneEmpty() { () -> assertEquals(type, clone), () -> assertTrue(clone.getProperties().isEmpty()), () -> assertEquals(textKeywordType, textClone), - () -> assertEquals(FieldUtils.readField(textKeywordType, "fields", true), - FieldUtils.readField(textClone, "fields", true)) - ); + () -> + assertEquals( + FieldUtils.readField(textKeywordType, "fields", true), + FieldUtils.readField(textClone, "fields", true))); } // Following structure of nested objects should be flattened @@ -294,26 +288,29 @@ public void traverseAndFlatten() { () -> assertEquals(9, flattened.size()), () -> assertTrue(flattened.get("mapping").getProperties().isEmpty()), () -> assertTrue(flattened.get("mapping.submapping").getProperties().isEmpty()), - () -> assertTrue( - flattened.get("mapping.submapping.subsubmapping").getProperties().isEmpty()), - + () -> + assertTrue(flattened.get("mapping.submapping.subsubmapping").getProperties().isEmpty()), () -> assertEquals(objectType, flattened.get("mapping")), () -> assertEquals(objectType, flattened.get("mapping.submapping")), () -> assertEquals(objectType, flattened.get("mapping.submapping.subsubmapping")), - - () -> assertEquals(OpenSearchDataType.of(MappingType.Keyword), - flattened.get("mapping.keyword")), - () -> assertEquals(OpenSearchDataType.of(MappingType.Text), - flattened.get("mapping.text")), - () -> assertEquals(OpenSearchGeoPointType.of(), - flattened.get("mapping.submapping.geo_point")), - () -> assertEquals(OpenSearchTextType.of(), - flattened.get("mapping.submapping.textWithFieldsType")), - () -> assertEquals(OpenSearchTextType.of(), - flattened.get("mapping.submapping.subsubmapping.texttype")), - () -> assertEquals(OpenSearchDataType.of(INTEGER), - flattened.get("mapping.submapping.subsubmapping.INTEGER")) - ); + () -> + assertEquals( + OpenSearchDataType.of(MappingType.Keyword), flattened.get("mapping.keyword")), + () -> assertEquals(OpenSearchDataType.of(MappingType.Text), flattened.get("mapping.text")), + () -> + assertEquals( + OpenSearchGeoPointType.of(), flattened.get("mapping.submapping.geo_point")), + () -> + assertEquals( + OpenSearchTextType.of(), flattened.get("mapping.submapping.textWithFieldsType")), + () -> + assertEquals( + OpenSearchTextType.of(), + flattened.get("mapping.submapping.subsubmapping.texttype")), + () -> + assertEquals( + OpenSearchDataType.of(INTEGER), + flattened.get("mapping.submapping.subsubmapping.INTEGER"))); } @Test @@ -322,25 +319,42 @@ public void resolve() { assertAll( () -> assertNull(OpenSearchDataType.resolve(mapping, "incorrect")), - () -> assertEquals(OpenSearchDataType.of(MappingType.Object), - OpenSearchDataType.resolve(mapping, "mapping")), - () -> assertEquals(OpenSearchDataType.of(MappingType.Object), - OpenSearchDataType.resolve(mapping, "submapping")), - () -> assertEquals(OpenSearchDataType.of(MappingType.Object), - OpenSearchDataType.resolve(mapping, "subsubmapping")), - () -> assertEquals(OpenSearchDataType.of(MappingType.Text), - OpenSearchDataType.resolve(mapping, "texttype")), - () -> assertEquals(OpenSearchDataType.of(MappingType.Text), - OpenSearchDataType.resolve(mapping, "textWithFieldsType")), - () -> assertEquals(OpenSearchDataType.of(MappingType.Text), - OpenSearchDataType.resolve(mapping, "text")), - () -> assertEquals(OpenSearchDataType.of(MappingType.Integer), - OpenSearchDataType.resolve(mapping, "INTEGER")), - () -> assertEquals(OpenSearchDataType.of(MappingType.GeoPoint), - OpenSearchDataType.resolve(mapping, "geo_point")), - () -> assertEquals(OpenSearchDataType.of(MappingType.Keyword), - OpenSearchDataType.resolve(mapping, "keyword")) - ); + () -> + assertEquals( + OpenSearchDataType.of(MappingType.Object), + OpenSearchDataType.resolve(mapping, "mapping")), + () -> + assertEquals( + OpenSearchDataType.of(MappingType.Object), + OpenSearchDataType.resolve(mapping, "submapping")), + () -> + assertEquals( + OpenSearchDataType.of(MappingType.Object), + OpenSearchDataType.resolve(mapping, "subsubmapping")), + () -> + assertEquals( + OpenSearchDataType.of(MappingType.Text), + OpenSearchDataType.resolve(mapping, "texttype")), + () -> + assertEquals( + OpenSearchDataType.of(MappingType.Text), + OpenSearchDataType.resolve(mapping, "textWithFieldsType")), + () -> + assertEquals( + OpenSearchDataType.of(MappingType.Text), + OpenSearchDataType.resolve(mapping, "text")), + () -> + assertEquals( + OpenSearchDataType.of(MappingType.Integer), + OpenSearchDataType.resolve(mapping, "INTEGER")), + () -> + assertEquals( + OpenSearchDataType.of(MappingType.GeoPoint), + OpenSearchDataType.resolve(mapping, "geo_point")), + () -> + assertEquals( + OpenSearchDataType.of(MappingType.Keyword), + OpenSearchDataType.resolve(mapping, "keyword"))); } // type : Object @@ -357,39 +371,38 @@ public void resolve() { @Test public void text_type_with_fields_ctor() { - var type = OpenSearchTextType.of(Map.of("words", - OpenSearchDataType.of(MappingType.Keyword))); + var type = OpenSearchTextType.of(Map.of("words", OpenSearchDataType.of(MappingType.Keyword))); assertAll( () -> assertEquals(OpenSearchTextType.of(), type), () -> assertEquals(1, type.getFields().size()), - () -> assertEquals(OpenSearchDataType.of(MappingType.Keyword), - type.getFields().get("words")) - ); + () -> + assertEquals( + OpenSearchDataType.of(MappingType.Keyword), type.getFields().get("words"))); } private Map getSampleMapping() { - Map subsubmapping = Map.of( - "properties", Map.of( - "texttype", Map.of("type", "text"), - "INTEGER", Map.of("type", "integer") - ) - ); - - Map submapping = Map.of( - "properties", Map.of( - "subsubmapping", subsubmapping, - "textWithFieldsType", Map.of("type", "text", "fieldsType", true), - "geo_point", Map.of("type", "geo_point") - ) - ); - - Map types = Map.of( - "properties", Map.of( - "submapping", submapping, - "keyword", Map.of("type", "keyword"), - "text", Map.of("type", "text") - ) - ); + Map subsubmapping = + Map.of( + "properties", + Map.of( + "texttype", Map.of("type", "text"), + "INTEGER", Map.of("type", "integer"))); + + Map submapping = + Map.of( + "properties", + Map.of( + "subsubmapping", subsubmapping, + "textWithFieldsType", Map.of("type", "text", "fieldsType", true), + "geo_point", Map.of("type", "geo_point"))); + + Map types = + Map.of( + "properties", + Map.of( + "submapping", submapping, + "keyword", Map.of("type", "keyword"), + "text", Map.of("type", "text"))); var mapping = OpenSearchDataType.of(MappingType.Object, types); return Map.of("mapping", mapping); @@ -397,8 +410,7 @@ private Map getSampleMapping() { @Test public void test_getExprType() { - assertEquals(OpenSearchTextType.of(), - OpenSearchDataType.of(MappingType.Text).getExprType()); + assertEquals(OpenSearchTextType.of(), OpenSearchDataType.of(MappingType.Text).getExprType()); assertEquals(FLOAT, OpenSearchDataType.of(MappingType.Float).getExprType()); assertEquals(FLOAT, OpenSearchDataType.of(MappingType.HalfFloat).getExprType()); assertEquals(DOUBLE, OpenSearchDataType.of(MappingType.Double).getExprType()); diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/type/OpenSearchDateTypeTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/type/OpenSearchDateTypeTest.java index 13393da732..a9511f8c0b 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/type/OpenSearchDateTypeTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/type/OpenSearchDateTypeTest.java @@ -47,10 +47,8 @@ class OpenSearchDateTypeTest { private static final OpenSearchDateType defaultDateType = OpenSearchDateType.of(defaultFormatString); - private static final OpenSearchDateType dateDateType = - OpenSearchDateType.of(dateFormatString); - private static final OpenSearchDateType timeDateType = - OpenSearchDateType.of(timeFormatString); + private static final OpenSearchDateType dateDateType = OpenSearchDateType.of(dateFormatString); + private static final OpenSearchDateType timeDateType = OpenSearchDateType.of(timeFormatString); private static final OpenSearchDateType datetimeDateType = OpenSearchDateType.of(datetimeFormatString); @@ -79,8 +77,7 @@ public void isCompatible() { () -> assertFalse(DATE.isCompatible(defaultDateType)), () -> assertTrue(DATE.isCompatible(dateDateType)), () -> assertFalse(DATE.isCompatible(timeDateType)), - () -> assertFalse(DATE.isCompatible(datetimeDateType)) - ); + () -> assertFalse(DATE.isCompatible(datetimeDateType))); } // `typeName` and `legacyTypeName` return the same thing for date objects: @@ -92,8 +89,7 @@ public void check_typeName() { () -> assertEquals("DATE", defaultDateType.typeName()), () -> assertEquals("DATE", timeDateType.typeName()), () -> assertEquals("DATE", dateDateType.typeName()), - () -> assertEquals("DATE", datetimeDateType.typeName()) - ); + () -> assertEquals("DATE", datetimeDateType.typeName())); } @Test @@ -103,8 +99,7 @@ public void check_legacyTypeName() { () -> assertEquals("DATE", defaultDateType.legacyTypeName()), () -> assertEquals("DATE", timeDateType.legacyTypeName()), () -> assertEquals("DATE", dateDateType.legacyTypeName()), - () -> assertEquals("DATE", datetimeDateType.legacyTypeName()) - ); + () -> assertEquals("DATE", datetimeDateType.legacyTypeName())); } @Test @@ -114,8 +109,7 @@ public void check_exprTypeName() { () -> assertEquals(TIMESTAMP, defaultDateType.getExprType()), () -> assertEquals(TIME, timeDateType.getExprType()), () -> assertEquals(DATE, dateDateType.getExprType()), - () -> assertEquals(TIMESTAMP, datetimeDateType.getExprType()) - ); + () -> assertEquals(TIMESTAMP, datetimeDateType.getExprType())); } private static Stream getAllSupportedFormats() { @@ -125,11 +119,12 @@ private static Stream getAllSupportedFormats() { @ParameterizedTest @MethodSource("getAllSupportedFormats") public void check_supported_format_names_coverage(FormatNames formatName) { - assertTrue(SUPPORTED_NAMED_NUMERIC_FORMATS.contains(formatName) - || SUPPORTED_NAMED_DATETIME_FORMATS.contains(formatName) - || SUPPORTED_NAMED_DATE_FORMATS.contains(formatName) - || SUPPORTED_NAMED_TIME_FORMATS.contains(formatName) - || SUPPORTED_NAMED_INCOMPLETE_DATE_FORMATS.contains(formatName), + assertTrue( + SUPPORTED_NAMED_NUMERIC_FORMATS.contains(formatName) + || SUPPORTED_NAMED_DATETIME_FORMATS.contains(formatName) + || SUPPORTED_NAMED_DATE_FORMATS.contains(formatName) + || SUPPORTED_NAMED_TIME_FORMATS.contains(formatName) + || SUPPORTED_NAMED_INCOMPLETE_DATE_FORMATS.contains(formatName), formatName + " not supported"); } @@ -142,17 +137,24 @@ private static Stream getSupportedDatetimeFormats() { public void check_datetime_format_names(FormatNames datetimeFormat) { String camelCaseName = datetimeFormat.getCamelCaseName(); if (camelCaseName != null && !camelCaseName.isEmpty()) { - OpenSearchDateType dateType = - OpenSearchDateType.of(camelCaseName); - assertSame(dateType.getExprType(), TIMESTAMP, camelCaseName - + " does not format to a TIMESTAMP type, instead got " + dateType.getExprType()); + OpenSearchDateType dateType = OpenSearchDateType.of(camelCaseName); + assertSame( + dateType.getExprType(), + TIMESTAMP, + camelCaseName + + " does not format to a TIMESTAMP type, instead got " + + dateType.getExprType()); } String snakeCaseName = datetimeFormat.getSnakeCaseName(); if (snakeCaseName != null && !snakeCaseName.isEmpty()) { OpenSearchDateType dateType = OpenSearchDateType.of(snakeCaseName); - assertSame(dateType.getExprType(), TIMESTAMP, snakeCaseName - + " does not format to a TIMESTAMP type, instead got " + dateType.getExprType()); + assertSame( + dateType.getExprType(), + TIMESTAMP, + snakeCaseName + + " does not format to a TIMESTAMP type, instead got " + + dateType.getExprType()); } else { fail(); } @@ -168,15 +170,19 @@ public void check_date_format_names(FormatNames dateFormat) { String camelCaseName = dateFormat.getCamelCaseName(); if (camelCaseName != null && !camelCaseName.isEmpty()) { OpenSearchDateType dateType = OpenSearchDateType.of(camelCaseName); - assertSame(dateType.getExprType(), DATE, camelCaseName - + " does not format to a DATE type, instead got " + dateType.getExprType()); + assertSame( + dateType.getExprType(), + DATE, + camelCaseName + " does not format to a DATE type, instead got " + dateType.getExprType()); } String snakeCaseName = dateFormat.getSnakeCaseName(); if (snakeCaseName != null && !snakeCaseName.isEmpty()) { OpenSearchDateType dateType = OpenSearchDateType.of(snakeCaseName); - assertSame(dateType.getExprType(), DATE, snakeCaseName - + " does not format to a DATE type, instead got " + dateType.getExprType()); + assertSame( + dateType.getExprType(), + DATE, + snakeCaseName + " does not format to a DATE type, instead got " + dateType.getExprType()); } else { fail(); } @@ -192,15 +198,19 @@ public void check_time_format_names(FormatNames timeFormat) { String camelCaseName = timeFormat.getCamelCaseName(); if (camelCaseName != null && !camelCaseName.isEmpty()) { OpenSearchDateType dateType = OpenSearchDateType.of(camelCaseName); - assertSame(dateType.getExprType(), TIME, camelCaseName - + " does not format to a TIME type, instead got " + dateType.getExprType()); + assertSame( + dateType.getExprType(), + TIME, + camelCaseName + " does not format to a TIME type, instead got " + dateType.getExprType()); } String snakeCaseName = timeFormat.getSnakeCaseName(); if (snakeCaseName != null && !snakeCaseName.isEmpty()) { OpenSearchDateType dateType = OpenSearchDateType.of(snakeCaseName); - assertSame(dateType.getExprType(), TIME, snakeCaseName - + " does not format to a TIME type, instead got " + dateType.getExprType()); + assertSame( + dateType.getExprType(), + TIME, + snakeCaseName + " does not format to a TIME type, instead got " + dateType.getExprType()); } else { fail(); } @@ -237,8 +247,7 @@ private static Stream get_format_combinations_for_test() { // D - day of year, N - nano of day Arguments.of(TIMESTAMP, List.of("dd.MM.yyyy N", "uuuu:D:HH:mm"), "custom datetime"), Arguments.of(DATE, List.of("dd.MM.yyyy", "uuuu:D"), "custom date"), - Arguments.of(TIME, List.of("HH:mm", "N"), "custom time") - ); + Arguments.of(TIME, List.of("HH:mm", "N"), "custom time")); } @ParameterizedTest(name = "[{index}] {2}") @@ -258,7 +267,6 @@ public void dont_use_incorrect_format_as_custom() { @Test public void check_if_date_type_compatible() { assertTrue(isDateTypeCompatible(DATE)); - assertFalse(isDateTypeCompatible(OpenSearchDataType.of( - OpenSearchDataType.MappingType.Text))); + assertFalse(isDateTypeCompatible(OpenSearchDataType.of(OpenSearchDataType.MappingType.Text))); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprBinaryValueTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprBinaryValueTest.java index 4e7b33f944..fa221bc214 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprBinaryValueTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprBinaryValueTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.value; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -24,22 +23,19 @@ public void compare() { @Test public void equal() { - OpenSearchExprBinaryValue value = - new OpenSearchExprBinaryValue("U29tZSBiaW5hcnkgYmxvYg=="); + OpenSearchExprBinaryValue value = new OpenSearchExprBinaryValue("U29tZSBiaW5hcnkgYmxvYg=="); assertTrue(value.equal(new OpenSearchExprBinaryValue("U29tZSBiaW5hcnkgYmxvYg=="))); } @Test public void value() { - OpenSearchExprBinaryValue value = - new OpenSearchExprBinaryValue("U29tZSBiaW5hcnkgYmxvYg=="); + OpenSearchExprBinaryValue value = new OpenSearchExprBinaryValue("U29tZSBiaW5hcnkgYmxvYg=="); assertEquals("U29tZSBiaW5hcnkgYmxvYg==", value.value()); } @Test public void type() { - OpenSearchExprBinaryValue value = - new OpenSearchExprBinaryValue("U29tZSBiaW5hcnkgYmxvYg=="); + OpenSearchExprBinaryValue value = new OpenSearchExprBinaryValue("U29tZSBiaW5hcnkgYmxvYg=="); assertEquals(OpenSearchBinaryType.of(), value.type()); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/OpenSearchExecutionEngineTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/OpenSearchExecutionEngineTest.java index 330793a5d6..739b70b1b8 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/OpenSearchExecutionEngineTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/OpenSearchExecutionEngineTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.executor; import static com.google.common.collect.ImmutableMap.of; @@ -73,12 +72,12 @@ class OpenSearchExecutionEngineTest { @BeforeEach void setUp() { doAnswer( - invocation -> { - // Run task immediately - Runnable task = invocation.getArgument(0); - task.run(); - return null; - }) + invocation -> { + // Run task immediately + Runnable task = invocation.getArgument(0); + task.run(); + return null; + }) .when(client) .schedule(any()); } @@ -91,22 +90,22 @@ void execute_successfully() { FakePhysicalPlan plan = new FakePhysicalPlan(expected.iterator()); when(protector.protect(plan)).thenReturn(plan); - OpenSearchExecutionEngine executor = new OpenSearchExecutionEngine(client, protector, - new PlanSerializer(null)); + OpenSearchExecutionEngine executor = + new OpenSearchExecutionEngine(client, protector, new PlanSerializer(null)); List actual = new ArrayList<>(); executor.execute( plan, - new ResponseListener<>() { - @Override - public void onResponse(QueryResponse response) { - actual.addAll(response.getResults()); - } - - @Override - public void onFailure(Exception e) { - fail("Error occurred during execution", e); - } - }); + new ResponseListener<>() { + @Override + public void onResponse(QueryResponse response) { + actual.addAll(response.getResults()); + } + + @Override + public void onFailure(Exception e) { + fail("Error occurred during execution", e); + } + }); assertTrue(plan.hasOpen); assertEquals(expected, actual); @@ -121,23 +120,23 @@ void execute_with_cursor() { var plan = new FakePhysicalPlan(expected.iterator()); when(protector.protect(plan)).thenReturn(plan); - OpenSearchExecutionEngine executor = new OpenSearchExecutionEngine(client, protector, - new PlanSerializer(null)); + OpenSearchExecutionEngine executor = + new OpenSearchExecutionEngine(client, protector, new PlanSerializer(null)); List actual = new ArrayList<>(); executor.execute( plan, - new ResponseListener<>() { - @Override - public void onResponse(QueryResponse response) { - actual.addAll(response.getResults()); - assertTrue(response.getCursor().toString().startsWith("n:")); - } - - @Override - public void onFailure(Exception e) { - fail("Error occurred during execution", e); - } - }); + new ResponseListener<>() { + @Override + public void onResponse(QueryResponse response) { + actual.addAll(response.getResults()); + assertTrue(response.getCursor().toString().startsWith("n:")); + } + + @Override + public void onFailure(Exception e) { + fail("Error occurred during execution", e); + } + }); assertEquals(expected, actual); } @@ -149,78 +148,84 @@ void execute_with_failure() { when(plan.hasNext()).thenThrow(expected); when(protector.protect(plan)).thenReturn(plan); - OpenSearchExecutionEngine executor = new OpenSearchExecutionEngine(client, protector, - new PlanSerializer(null)); + OpenSearchExecutionEngine executor = + new OpenSearchExecutionEngine(client, protector, new PlanSerializer(null)); AtomicReference actual = new AtomicReference<>(); executor.execute( plan, - new ResponseListener<>() { - @Override - public void onResponse(QueryResponse response) { - fail("Expected error didn't happen"); - } - - @Override - public void onFailure(Exception e) { - actual.set(e); - } - }); + new ResponseListener<>() { + @Override + public void onResponse(QueryResponse response) { + fail("Expected error didn't happen"); + } + + @Override + public void onFailure(Exception e) { + actual.set(e); + } + }); assertEquals(expected, actual.get()); verify(plan).close(); } @Test void explain_successfully() { - OpenSearchExecutionEngine executor = new OpenSearchExecutionEngine(client, protector, - new PlanSerializer(null)); + OpenSearchExecutionEngine executor = + new OpenSearchExecutionEngine(client, protector, new PlanSerializer(null)); Settings settings = mock(Settings.class); - when(settings.getSettingValue(SQL_CURSOR_KEEP_ALIVE)) - .thenReturn(TimeValue.timeValueMinutes(1)); + when(settings.getSettingValue(SQL_CURSOR_KEEP_ALIVE)).thenReturn(TimeValue.timeValueMinutes(1)); OpenSearchExprValueFactory exprValueFactory = mock(OpenSearchExprValueFactory.class); final var name = new OpenSearchRequest.IndexName("test"); final int defaultQuerySize = 100; final int maxResultWindow = 10000; final var requestBuilder = new OpenSearchRequestBuilder(defaultQuerySize, exprValueFactory); - PhysicalPlan plan = new OpenSearchIndexScan(mock(OpenSearchClient.class), - maxResultWindow, requestBuilder.build(name, maxResultWindow, - settings.getSettingValue(SQL_CURSOR_KEEP_ALIVE))); + PhysicalPlan plan = + new OpenSearchIndexScan( + mock(OpenSearchClient.class), + maxResultWindow, + requestBuilder.build( + name, maxResultWindow, settings.getSettingValue(SQL_CURSOR_KEEP_ALIVE))); AtomicReference result = new AtomicReference<>(); - executor.explain(plan, new ResponseListener<>() { - @Override - public void onResponse(ExplainResponse response) { - result.set(response); - } - - @Override - public void onFailure(Exception e) { - fail(e); - } - }); + executor.explain( + plan, + new ResponseListener<>() { + @Override + public void onResponse(ExplainResponse response) { + result.set(response); + } + + @Override + public void onFailure(Exception e) { + fail(e); + } + }); assertNotNull(result.get()); } @Test void explain_with_failure() { - OpenSearchExecutionEngine executor = new OpenSearchExecutionEngine(client, protector, - new PlanSerializer(null)); + OpenSearchExecutionEngine executor = + new OpenSearchExecutionEngine(client, protector, new PlanSerializer(null)); PhysicalPlan plan = mock(PhysicalPlan.class); when(plan.accept(any(), any())).thenThrow(IllegalStateException.class); AtomicReference result = new AtomicReference<>(); - executor.explain(plan, new ResponseListener<>() { - @Override - public void onResponse(ExplainResponse response) { - fail("Should fail as expected"); - } - - @Override - public void onFailure(Exception e) { - result.set(e); - } - }); + executor.explain( + plan, + new ResponseListener<>() { + @Override + public void onResponse(ExplainResponse response) { + fail("Should fail as expected"); + } + + @Override + public void onFailure(Exception e) { + result.set(e); + } + }); assertNotNull(result.get()); } @@ -234,8 +239,8 @@ void call_add_split_and_open_in_order() { when(protector.protect(plan)).thenReturn(plan); when(executionContext.getSplit()).thenReturn(Optional.of(split)); - OpenSearchExecutionEngine executor = new OpenSearchExecutionEngine(client, protector, - new PlanSerializer(null)); + OpenSearchExecutionEngine executor = + new OpenSearchExecutionEngine(client, protector, new PlanSerializer(null)); List actual = new ArrayList<>(); executor.execute( plan, @@ -266,12 +271,10 @@ private static class FakePhysicalPlan extends TableScanOperator implements Seria private boolean hasSplit; @Override - public void readExternal(ObjectInput in) { - } + public void readExternal(ObjectInput in) {} @Override - public void writeExternal(ObjectOutput out) { - } + public void writeExternal(ObjectOutput out) {} @Override public void open() { diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/protector/NoopExecutionProtectorTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/protector/NoopExecutionProtectorTest.java index 8dc49aad01..f028f3ea5d 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/protector/NoopExecutionProtectorTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/protector/NoopExecutionProtectorTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.executor.protector; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -17,8 +16,7 @@ @ExtendWith(MockitoExtension.class) class NoopExecutionProtectorTest { - @Mock - private PhysicalPlan plan; + @Mock private PhysicalPlan plan; @Test void protect() { diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/protector/OpenSearchExecutionProtectorTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/protector/OpenSearchExecutionProtectorTest.java index fd5e747b5f..b2dc042110 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/protector/OpenSearchExecutionProtectorTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/protector/OpenSearchExecutionProtectorTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.executor.protector; import static java.util.Collections.emptyList; @@ -74,17 +73,13 @@ @DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) class OpenSearchExecutionProtectorTest { - @Mock - private OpenSearchClient client; + @Mock private OpenSearchClient client; - @Mock - private ResourceMonitor resourceMonitor; + @Mock private ResourceMonitor resourceMonitor; - @Mock - private OpenSearchExprValueFactory exprValueFactory; + @Mock private OpenSearchExprValueFactory exprValueFactory; - @Mock - private OpenSearchSettings settings; + @Mock private OpenSearchSettings settings; private OpenSearchExecutionProtector executionProtector; @@ -106,8 +101,7 @@ void test_protect_indexScan() { Expression filterExpr = literal(ExprBooleanValue.of(true)); List groupByExprs = List.of(named("age", ref("age", INTEGER))); List aggregators = - List.of(named("avg(age)", new AvgAggregator(List.of(ref("age", INTEGER)), - DOUBLE))); + List.of(named("avg(age)", new AvgAggregator(List.of(ref("age", INTEGER)), DOUBLE))); Map mappings = ImmutableMap.of(ref("name", STRING), ref("lastname", STRING)); Pair newEvalField = @@ -118,9 +112,12 @@ void test_protect_indexScan() { Integer offset = 10; final var name = new OpenSearchRequest.IndexName(indexName); - final var request = new OpenSearchRequestBuilder(querySizeLimit, exprValueFactory) - .build(name, maxResultWindow, - settings.getSettingValue(Settings.Key.SQL_CURSOR_KEEP_ALIVE)); + final var request = + new OpenSearchRequestBuilder(querySizeLimit, exprValueFactory) + .build( + name, + maxResultWindow, + settings.getSettingValue(Settings.Key.SQL_CURSOR_KEEP_ALIVE)); assertEquals( PhysicalPlanDSL.project( PhysicalPlanDSL.limit( @@ -134,8 +131,8 @@ void test_protect_indexScan() { PhysicalPlanDSL.agg( filter( resourceMonitor( - new OpenSearchIndexScan(client, - maxResultWindow, request)), + new OpenSearchIndexScan( + client, maxResultWindow, request)), filterExpr), aggregators, groupByExprs), @@ -161,8 +158,8 @@ void test_protect_indexScan() { PhysicalPlanDSL.rename( PhysicalPlanDSL.agg( filter( - new OpenSearchIndexScan(client, - maxResultWindow, request), + new OpenSearchIndexScan( + client, maxResultWindow, request), filterExpr), aggregators, groupByExprs), @@ -189,21 +186,9 @@ void test_protect_sort_for_windowOperator() { new WindowDefinition(emptyList(), ImmutableList.of(sortItem)); assertEquals( - window( - resourceMonitor( - sort( - values(emptyList()), - sortItem)), - rank, - windowDefinition), + window(resourceMonitor(sort(values(emptyList()), sortItem)), rank, windowDefinition), executionProtector.protect( - window( - sort( - values(emptyList()), - sortItem - ), - rank, - windowDefinition))); + window(sort(values(emptyList()), sortItem), rank, windowDefinition))); } @Test @@ -212,16 +197,8 @@ void test_protect_windowOperator_input() { WindowDefinition windowDefinition = mock(WindowDefinition.class); assertEquals( - window( - resourceMonitor( - values()), - avg, - windowDefinition), - executionProtector.protect( - window( - values(), - avg, - windowDefinition))); + window(resourceMonitor(values()), avg, windowDefinition), + executionProtector.protect(window(values(), avg, windowDefinition))); } @SuppressWarnings("unchecked") @@ -234,20 +211,9 @@ void test_not_protect_windowOperator_input_if_already_protected() { new WindowDefinition(emptyList(), ImmutableList.of(sortItem)); assertEquals( - window( - resourceMonitor( - sort( - values(emptyList()), - sortItem)), - avg, - windowDefinition), + window(resourceMonitor(sort(values(emptyList()), sortItem)), avg, windowDefinition), executionProtector.protect( - window( - sort( - values(emptyList()), - sortItem), - avg, - windowDefinition))); + window(sort(values(emptyList()), sortItem), avg, windowDefinition))); } @Test @@ -255,85 +221,80 @@ void test_without_protection() { Expression filterExpr = literal(ExprBooleanValue.of(true)); assertEquals( - filter( - filter(null, filterExpr), - filterExpr), - executionProtector.protect( - filter( - filter(null, filterExpr), - filterExpr) - ) - ); + filter(filter(null, filterExpr), filterExpr), + executionProtector.protect(filter(filter(null, filterExpr), filterExpr))); } @Test void test_visitMLcommons() { NodeClient nodeClient = mock(NodeClient.class); MLCommonsOperator mlCommonsOperator = - new MLCommonsOperator( - values(emptyList()), "kmeans", - new HashMap() {{ - put("centroids", new Literal(3, DataType.INTEGER)); - put("iterations", new Literal(2, DataType.INTEGER)); - put("distance_type", new Literal(null, DataType.STRING)); - }}, - nodeClient - ); + new MLCommonsOperator( + values(emptyList()), + "kmeans", + new HashMap() { + { + put("centroids", new Literal(3, DataType.INTEGER)); + put("iterations", new Literal(2, DataType.INTEGER)); + put("distance_type", new Literal(null, DataType.STRING)); + } + }, + nodeClient); - assertEquals(executionProtector.doProtect(mlCommonsOperator), - executionProtector.visitMLCommons(mlCommonsOperator, null)); + assertEquals( + executionProtector.doProtect(mlCommonsOperator), + executionProtector.visitMLCommons(mlCommonsOperator, null)); } @Test void test_visitAD() { NodeClient nodeClient = mock(NodeClient.class); ADOperator adOperator = - new ADOperator( - values(emptyList()), - new HashMap() {{ - put("shingle_size", new Literal(8, DataType.INTEGER)); - put("time_decay", new Literal(0.0001, DataType.DOUBLE)); - put("time_field", new Literal(null, DataType.STRING)); - }}, - nodeClient - ); + new ADOperator( + values(emptyList()), + new HashMap() { + { + put("shingle_size", new Literal(8, DataType.INTEGER)); + put("time_decay", new Literal(0.0001, DataType.DOUBLE)); + put("time_field", new Literal(null, DataType.STRING)); + } + }, + nodeClient); - assertEquals(executionProtector.doProtect(adOperator), - executionProtector.visitAD(adOperator, null)); + assertEquals( + executionProtector.doProtect(adOperator), executionProtector.visitAD(adOperator, null)); } @Test void test_visitML() { NodeClient nodeClient = mock(NodeClient.class); MLOperator mlOperator = - new MLOperator( - values(emptyList()), - new HashMap() {{ - put("action", new Literal("train", DataType.STRING)); - put("algorithm", new Literal("rcf", DataType.STRING)); - put("shingle_size", new Literal(8, DataType.INTEGER)); - put("time_decay", new Literal(0.0001, DataType.DOUBLE)); - put("time_field", new Literal(null, DataType.STRING)); - }}, - nodeClient - ); + new MLOperator( + values(emptyList()), + new HashMap() { + { + put("action", new Literal("train", DataType.STRING)); + put("algorithm", new Literal("rcf", DataType.STRING)); + put("shingle_size", new Literal(8, DataType.INTEGER)); + put("time_decay", new Literal(0.0001, DataType.DOUBLE)); + put("time_field", new Literal(null, DataType.STRING)); + } + }, + nodeClient); - assertEquals(executionProtector.doProtect(mlOperator), - executionProtector.visitML(mlOperator, null)); + assertEquals( + executionProtector.doProtect(mlOperator), executionProtector.visitML(mlOperator, null)); } @Test void test_visitNested() { Set args = Set.of("message.info"); - Map> groupedFieldsByPath = - Map.of("message", List.of("message.info")); + Map> groupedFieldsByPath = Map.of("message", List.of("message.info")); NestedOperator nestedOperator = - new NestedOperator( - values(emptyList()), - args, - groupedFieldsByPath); + new NestedOperator(values(emptyList()), args, groupedFieldsByPath); - assertEquals(executionProtector.doProtect(nestedOperator), + assertEquals( + executionProtector.doProtect(nestedOperator), executionProtector.visitNested(nestedOperator, values(emptyList()))); } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/planner/physical/MLCommonsOperatorTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/planner/physical/MLCommonsOperatorTest.java index 20d2f633dd..e6d2bac85b 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/planner/physical/MLCommonsOperatorTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/planner/physical/MLCommonsOperatorTest.java @@ -52,60 +52,60 @@ @MockitoSettings(strictness = Strictness.LENIENT) @RunWith(MockitoJUnitRunner.Silent.class) public class MLCommonsOperatorTest { - @Mock - private PhysicalPlan input; + @Mock private PhysicalPlan input; - @Mock(answer = Answers.RETURNS_DEEP_STUBS) + @Mock(answer = Answers.RETURNS_DEEP_STUBS) private NodeClient nodeClient; private MLCommonsOperator mlCommonsOperator; - @Mock(answer = Answers.RETURNS_DEEP_STUBS) + @Mock(answer = Answers.RETURNS_DEEP_STUBS) private MachineLearningNodeClient machineLearningNodeClient; @BeforeEach void setUp() { Map arguments = new HashMap<>(); - arguments.put("k1",AstDSL.intLiteral(3)); - arguments.put("k2",AstDSL.stringLiteral("v1")); - arguments.put("k3",AstDSL.booleanLiteral(true)); - arguments.put("k4",AstDSL.doubleLiteral(2.0D)); - arguments.put("k5",AstDSL.shortLiteral((short)2)); - arguments.put("k6",AstDSL.longLiteral(2L)); - arguments.put("k7",AstDSL.floatLiteral(2F)); - - - mlCommonsOperator = new MLCommonsOperator(input, "kmeans", arguments, - nodeClient); + arguments.put("k1", AstDSL.intLiteral(3)); + arguments.put("k2", AstDSL.stringLiteral("v1")); + arguments.put("k3", AstDSL.booleanLiteral(true)); + arguments.put("k4", AstDSL.doubleLiteral(2.0D)); + arguments.put("k5", AstDSL.shortLiteral((short) 2)); + arguments.put("k6", AstDSL.longLiteral(2L)); + arguments.put("k7", AstDSL.floatLiteral(2F)); + + mlCommonsOperator = new MLCommonsOperator(input, "kmeans", arguments, nodeClient); when(input.hasNext()).thenReturn(true).thenReturn(false); ImmutableMap.Builder resultBuilder = new ImmutableMap.Builder<>(); resultBuilder.put("k1", new ExprIntegerValue(2)); when(input.next()).thenReturn(ExprTupleValue.fromExprValueMap(resultBuilder.build())); - DataFrame dataFrame = DataFrameBuilder - .load(Collections.singletonList( - ImmutableMap.builder().put("result-k1", 2D) - .put("result-k2", 1) - .put("result-k3", "v3") - .put("result-k4", true) - .put("result-k5", (short)2) - .put("result-k6", 2L) - .put("result-k7", 2F) - .build()) - ); - MLPredictionOutput mlPredictionOutput = MLPredictionOutput.builder() + DataFrame dataFrame = + DataFrameBuilder.load( + Collections.singletonList( + ImmutableMap.builder() + .put("result-k1", 2D) + .put("result-k2", 1) + .put("result-k3", "v3") + .put("result-k4", true) + .put("result-k5", (short) 2) + .put("result-k6", 2L) + .put("result-k7", 2F) + .build())); + MLPredictionOutput mlPredictionOutput = + MLPredictionOutput.builder() .taskId("test_task_id") .status("test_status") .predictionResult(dataFrame) .build(); try (MockedStatic mlClientMockedStatic = Mockito.mockStatic(MLClient.class)) { - mlClientMockedStatic.when(() -> MLClient.getMLClient(any(NodeClient.class))) - .thenReturn(machineLearningNodeClient); - when(machineLearningNodeClient.trainAndPredict(any(MLInput.class)) - .actionGet(anyLong(), - eq(TimeUnit.SECONDS))) - .thenReturn(mlPredictionOutput); + mlClientMockedStatic + .when(() -> MLClient.getMLClient(any(NodeClient.class))) + .thenReturn(machineLearningNodeClient); + when(machineLearningNodeClient + .trainAndPredict(any(MLInput.class)) + .actionGet(anyLong(), eq(TimeUnit.SECONDS))) + .thenReturn(mlPredictionOutput); } } @@ -120,17 +120,17 @@ public void testOpen() { @Test public void testAccept() { - PhysicalPlanNodeVisitor physicalPlanNodeVisitor - = new PhysicalPlanNodeVisitor() {}; + PhysicalPlanNodeVisitor physicalPlanNodeVisitor = + new PhysicalPlanNodeVisitor() {}; assertNull(mlCommonsOperator.accept(physicalPlanNodeVisitor, null)); } @Test public void testConvertArgumentToMLParameter_UnsupportedType() { Map argument = new HashMap<>(); - argument.put("k2",AstDSL.dateLiteral("2020-10-31")); - assertThrows(IllegalArgumentException.class, () -> mlCommonsOperator - .convertArgumentToMLParameter(argument, "LINEAR_REGRESSION")); + argument.put("k2", AstDSL.dateLiteral("2020-10-31")); + assertThrows( + IllegalArgumentException.class, + () -> mlCommonsOperator.convertArgumentToMLParameter(argument, "LINEAR_REGRESSION")); } - } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/planner/physical/MLOperatorTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/planner/physical/MLOperatorTest.java index 7a73468391..0a3f56285f 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/planner/physical/MLOperatorTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/planner/physical/MLOperatorTest.java @@ -57,29 +57,27 @@ @MockitoSettings(strictness = Strictness.LENIENT) @RunWith(MockitoJUnitRunner.Silent.class) public class MLOperatorTest { - @Mock - private PhysicalPlan input; + @Mock private PhysicalPlan input; - @Mock - PlainActionFuture actionFuture; + @Mock PlainActionFuture actionFuture; - @Mock(answer = Answers.RETURNS_DEEP_STUBS) + @Mock(answer = Answers.RETURNS_DEEP_STUBS) private NodeClient nodeClient; private MLOperator mlOperator; Map arguments = new HashMap<>(); - @Mock(answer = Answers.RETURNS_DEEP_STUBS) + @Mock(answer = Answers.RETURNS_DEEP_STUBS) private MachineLearningNodeClient machineLearningNodeClient; void setUp(boolean isPredict) { - arguments.put("k1",AstDSL.intLiteral(3)); - arguments.put("k2",AstDSL.stringLiteral("v1")); - arguments.put("k3",AstDSL.booleanLiteral(true)); - arguments.put("k4",AstDSL.doubleLiteral(2.0D)); - arguments.put("k5",AstDSL.shortLiteral((short)2)); - arguments.put("k6",AstDSL.longLiteral(2L)); - arguments.put("k7",AstDSL.floatLiteral(2F)); + arguments.put("k1", AstDSL.intLiteral(3)); + arguments.put("k2", AstDSL.stringLiteral("v1")); + arguments.put("k3", AstDSL.booleanLiteral(true)); + arguments.put("k4", AstDSL.doubleLiteral(2.0D)); + arguments.put("k5", AstDSL.shortLiteral((short) 2)); + arguments.put("k6", AstDSL.longLiteral(2L)); + arguments.put("k7", AstDSL.floatLiteral(2F)); mlOperator = new MLOperator(input, arguments, nodeClient); when(input.hasNext()).thenReturn(true).thenReturn(false); @@ -87,49 +85,50 @@ void setUp(boolean isPredict) { resultBuilder.put("k1", new ExprIntegerValue(2)); when(input.next()).thenReturn(ExprTupleValue.fromExprValueMap(resultBuilder.build())); - DataFrame dataFrame = DataFrameBuilder - .load(Collections.singletonList( - ImmutableMap.builder().put("result-k1", 2D) - .put("result-k2", 1) - .put("result-k3", "v3") - .put("result-k4", true) - .put("result-k5", (short)2) - .put("result-k6", 2L) - .put("result-k7", 2F) - .build()) - ); + DataFrame dataFrame = + DataFrameBuilder.load( + Collections.singletonList( + ImmutableMap.builder() + .put("result-k1", 2D) + .put("result-k2", 1) + .put("result-k3", "v3") + .put("result-k4", true) + .put("result-k5", (short) 2) + .put("result-k6", 2L) + .put("result-k7", 2F) + .build())); MLOutput mlOutput; if (isPredict) { - mlOutput = MLPredictionOutput.builder() + mlOutput = + MLPredictionOutput.builder() .taskId("test_task_id") .status("test_status") .predictionResult(dataFrame) .build(); } else { - mlOutput = MLTrainingOutput.builder() + mlOutput = + MLTrainingOutput.builder() .taskId("test_task_id") .status("test_status") .modelId("test_model_id") .build(); } - when(actionFuture.actionGet(anyLong(), eq(TimeUnit.SECONDS))) - .thenReturn(mlOutput); - when(machineLearningNodeClient.run(any(MLInput.class), any())) - .thenReturn(actionFuture); + when(actionFuture.actionGet(anyLong(), eq(TimeUnit.SECONDS))).thenReturn(mlOutput); + when(machineLearningNodeClient.run(any(MLInput.class), any())).thenReturn(actionFuture); } void setUpPredict() { - arguments.put(ACTION,AstDSL.stringLiteral(PREDICT)); - arguments.put(ALGO,AstDSL.stringLiteral(KMEANS)); - arguments.put("modelid",AstDSL.stringLiteral("dummyID")); + arguments.put(ACTION, AstDSL.stringLiteral(PREDICT)); + arguments.put(ALGO, AstDSL.stringLiteral(KMEANS)); + arguments.put("modelid", AstDSL.stringLiteral("dummyID")); setUp(true); } void setUpTrain() { - arguments.put(ACTION,AstDSL.stringLiteral(TRAIN)); - arguments.put(ALGO,AstDSL.stringLiteral(KMEANS)); + arguments.put(ACTION, AstDSL.stringLiteral(TRAIN)); + arguments.put(ALGO, AstDSL.stringLiteral(KMEANS)); setUp(false); } @@ -162,10 +161,9 @@ public void testAccept() { setUpPredict(); try (MockedStatic mlClientMockedStatic = Mockito.mockStatic(MLClient.class)) { when(MLClient.getMLClient(any(NodeClient.class))).thenReturn(machineLearningNodeClient); - PhysicalPlanNodeVisitor physicalPlanNodeVisitor - = new PhysicalPlanNodeVisitor() {}; + PhysicalPlanNodeVisitor physicalPlanNodeVisitor = + new PhysicalPlanNodeVisitor() {}; assertNull(mlOperator.accept(physicalPlanNodeVisitor, null)); } } - } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/request/system/OpenSearchCatIndicesRequestTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/system/OpenSearchCatIndicesRequestTest.java index a720c2a266..8f954b68b2 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/request/system/OpenSearchCatIndicesRequestTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/system/OpenSearchCatIndicesRequestTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.request.system; import static org.hamcrest.MatcherAssert.assertThat; @@ -25,8 +24,7 @@ @ExtendWith(MockitoExtension.class) class OpenSearchCatIndicesRequestTest { - @Mock - private OpenSearchClient client; + @Mock private OpenSearchClient client; @Test void testSearch() { @@ -34,14 +32,12 @@ void testSearch() { final List results = new OpenSearchCatIndicesRequest(client).search(); assertEquals(1, results.size()); - assertThat(results.get(0).tupleValue(), anyOf( - hasEntry("TABLE_NAME", stringValue("index")) - )); + assertThat(results.get(0).tupleValue(), anyOf(hasEntry("TABLE_NAME", stringValue("index")))); } @Test void testToString() { - assertEquals("OpenSearchCatIndicesRequest{}", - new OpenSearchCatIndicesRequest(client).toString()); + assertEquals( + "OpenSearchCatIndicesRequest{}", new OpenSearchCatIndicesRequest(client).toString()); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/request/system/OpenSearchDescribeIndexRequestTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/system/OpenSearchDescribeIndexRequestTest.java index c19b3a3ccd..59ece9bfbc 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/request/system/OpenSearchDescribeIndexRequestTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/system/OpenSearchDescribeIndexRequestTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.request.system; import static org.hamcrest.MatcherAssert.assertThat; @@ -28,46 +27,47 @@ @ExtendWith(MockitoExtension.class) class OpenSearchDescribeIndexRequestTest { - @Mock - private OpenSearchClient client; + @Mock private OpenSearchClient client; - @Mock - private IndexMapping mapping; + @Mock private IndexMapping mapping; @Test void testSearch() { - when(mapping.getFieldMappings()).thenReturn( - Map.of("name", OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword))); + when(mapping.getFieldMappings()) + .thenReturn(Map.of("name", OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword))); when(client.getIndexMappings("index")).thenReturn(ImmutableMap.of("test", mapping)); final List results = new OpenSearchDescribeIndexRequest(client, "index").search(); assertEquals(1, results.size()); - assertThat(results.get(0).tupleValue(), anyOf( - hasEntry("TABLE_NAME", stringValue("index")), - hasEntry("COLUMN_NAME", stringValue("name")), - hasEntry("TYPE_NAME", stringValue("STRING")) - )); + assertThat( + results.get(0).tupleValue(), + anyOf( + hasEntry("TABLE_NAME", stringValue("index")), + hasEntry("COLUMN_NAME", stringValue("name")), + hasEntry("TYPE_NAME", stringValue("STRING")))); } @Test void testCrossClusterShouldSearchLocal() { - when(mapping.getFieldMappings()).thenReturn( - Map.of("name", OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword))); + when(mapping.getFieldMappings()) + .thenReturn(Map.of("name", OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword))); when(client.getIndexMappings("index")).thenReturn(ImmutableMap.of("test", mapping)); final List results = new OpenSearchDescribeIndexRequest(client, "ccs:index").search(); assertEquals(1, results.size()); - assertThat(results.get(0).tupleValue(), anyOf( - hasEntry("TABLE_NAME", stringValue("index")), - hasEntry("COLUMN_NAME", stringValue("name")), - hasEntry("TYPE_NAME", stringValue("STRING")) - )); + assertThat( + results.get(0).tupleValue(), + anyOf( + hasEntry("TABLE_NAME", stringValue("index")), + hasEntry("COLUMN_NAME", stringValue("name")), + hasEntry("TYPE_NAME", stringValue("STRING")))); } @Test void testToString() { - assertEquals("OpenSearchDescribeIndexRequest{indexName='index'}", + assertEquals( + "OpenSearchDescribeIndexRequest{indexName='index'}", new OpenSearchDescribeIndexRequest(client, "index").toString()); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/response/AggregationResponseUtils.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/response/AggregationResponseUtils.java index bbc462e980..76148b9395 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/response/AggregationResponseUtils.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/response/AggregationResponseUtils.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.response; import com.fasterxml.jackson.core.JsonFactory; @@ -52,37 +51,45 @@ public class AggregationResponseUtils { private static final List entryList = - new ImmutableMap.Builder>().put( - MinAggregationBuilder.NAME, (p, c) -> ParsedMin.fromXContent(p, (String) c)) - .put(MaxAggregationBuilder.NAME, (p, c) -> ParsedMax.fromXContent(p, (String) c)) - .put(SumAggregationBuilder.NAME, (p, c) -> ParsedSum.fromXContent(p, (String) c)) - .put(AvgAggregationBuilder.NAME, (p, c) -> ParsedAvg.fromXContent(p, (String) c)) - .put(ExtendedStatsAggregationBuilder.NAME, - (p, c) -> ParsedExtendedStats.fromXContent(p, (String) c)) - .put(StringTerms.NAME, (p, c) -> ParsedStringTerms.fromXContent(p, (String) c)) - .put(LongTerms.NAME, (p, c) -> ParsedLongTerms.fromXContent(p, (String) c)) - .put(DoubleTerms.NAME, (p, c) -> ParsedDoubleTerms.fromXContent(p, (String) c)) - .put(ValueCountAggregationBuilder.NAME, - (p, c) -> ParsedValueCount.fromXContent(p, (String) c)) - .put(PercentilesBucketPipelineAggregationBuilder.NAME, - (p, c) -> ParsedPercentilesBucket.fromXContent(p, (String) c)) - .put(DateHistogramAggregationBuilder.NAME, - (p, c) -> ParsedDateHistogram.fromXContent(p, (String) c)) - .put(HistogramAggregationBuilder.NAME, - (p, c) -> ParsedHistogram.fromXContent(p, (String) c)) - .put(CompositeAggregationBuilder.NAME, - (p, c) -> ParsedComposite.fromXContent(p, (String) c)) - .put(FilterAggregationBuilder.NAME, - (p, c) -> ParsedFilter.fromXContent(p, (String) c)) - .put(TopHitsAggregationBuilder.NAME, - (p, c) -> ParsedTopHits.fromXContent(p, (String) c)) - .build() - .entrySet() - .stream() - .map(entry -> new NamedXContentRegistry.Entry(Aggregation.class, - new ParseField(entry.getKey()), - entry.getValue())) - .collect(Collectors.toList()); + new ImmutableMap.Builder>() + .put(MinAggregationBuilder.NAME, (p, c) -> ParsedMin.fromXContent(p, (String) c)) + .put(MaxAggregationBuilder.NAME, (p, c) -> ParsedMax.fromXContent(p, (String) c)) + .put(SumAggregationBuilder.NAME, (p, c) -> ParsedSum.fromXContent(p, (String) c)) + .put(AvgAggregationBuilder.NAME, (p, c) -> ParsedAvg.fromXContent(p, (String) c)) + .put( + ExtendedStatsAggregationBuilder.NAME, + (p, c) -> ParsedExtendedStats.fromXContent(p, (String) c)) + .put(StringTerms.NAME, (p, c) -> ParsedStringTerms.fromXContent(p, (String) c)) + .put(LongTerms.NAME, (p, c) -> ParsedLongTerms.fromXContent(p, (String) c)) + .put(DoubleTerms.NAME, (p, c) -> ParsedDoubleTerms.fromXContent(p, (String) c)) + .put( + ValueCountAggregationBuilder.NAME, + (p, c) -> ParsedValueCount.fromXContent(p, (String) c)) + .put( + PercentilesBucketPipelineAggregationBuilder.NAME, + (p, c) -> ParsedPercentilesBucket.fromXContent(p, (String) c)) + .put( + DateHistogramAggregationBuilder.NAME, + (p, c) -> ParsedDateHistogram.fromXContent(p, (String) c)) + .put( + HistogramAggregationBuilder.NAME, + (p, c) -> ParsedHistogram.fromXContent(p, (String) c)) + .put( + CompositeAggregationBuilder.NAME, + (p, c) -> ParsedComposite.fromXContent(p, (String) c)) + .put( + FilterAggregationBuilder.NAME, (p, c) -> ParsedFilter.fromXContent(p, (String) c)) + .put( + TopHitsAggregationBuilder.NAME, + (p, c) -> ParsedTopHits.fromXContent(p, (String) c)) + .build() + .entrySet() + .stream() + .map( + entry -> + new NamedXContentRegistry.Entry( + Aggregation.class, new ParseField(entry.getKey()), entry.getValue())) + .collect(Collectors.toList()); private static final NamedXContentRegistry namedXContentRegistry = new NamedXContentRegistry(entryList); @@ -94,10 +101,11 @@ public class AggregationResponseUtils { */ public static Aggregations fromJson(String json) { try { - XContentParser contentParser = new JsonXContentParser( - namedXContentRegistry, - LoggingDeprecationHandler.INSTANCE, - new JsonFactory().createParser(json)); + XContentParser contentParser = + new JsonXContentParser( + namedXContentRegistry, + LoggingDeprecationHandler.INSTANCE, + new JsonFactory().createParser(json)); contentParser.nextToken(); return Aggregations.fromXContent(contentParser); } catch (IOException e) { diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchAggregationResponseParserTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchAggregationResponseParserTest.java index 318110bdde..7ed6c900dd 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchAggregationResponseParserTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchAggregationResponseParserTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.response; import static org.hamcrest.MatcherAssert.assertThat; @@ -34,127 +33,125 @@ @DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) class OpenSearchAggregationResponseParserTest { - /** - * SELECT MAX(age) as max FROM accounts. - */ + /** SELECT MAX(age) as max FROM accounts. */ @Test void no_bucket_one_metric_should_pass() { - String response = "{\n" - + " \"max#max\": {\n" - + " \"value\": 40\n" - + " }\n" - + "}"; - NoBucketAggregationParser parser = new NoBucketAggregationParser( - new SingleValueParser("max") - ); + String response = + "{\n" + + " \"max#max\": {\n" + + " \"value\": 40\n" + + " }\n" + + "}"; + NoBucketAggregationParser parser = new NoBucketAggregationParser(new SingleValueParser("max")); assertThat(parse(parser, response), contains(entry("max", 40d))); } - /** - * SELECT MAX(age) as max, MIN(age) as min FROM accounts. - */ + /** SELECT MAX(age) as max, MIN(age) as min FROM accounts. */ @Test void no_bucket_two_metric_should_pass() { - String response = "{\n" - + " \"max#max\": {\n" - + " \"value\": 40\n" - + " },\n" - + " \"min#min\": {\n" - + " \"value\": 20\n" - + " }\n" - + "}"; - NoBucketAggregationParser parser = new NoBucketAggregationParser( - new SingleValueParser("max"), - new SingleValueParser("min") - ); - assertThat(parse(parser, response), - contains(entry("max", 40d,"min", 20d))); + String response = + "{\n" + + " \"max#max\": {\n" + + " \"value\": 40\n" + + " },\n" + + " \"min#min\": {\n" + + " \"value\": 20\n" + + " }\n" + + "}"; + NoBucketAggregationParser parser = + new NoBucketAggregationParser(new SingleValueParser("max"), new SingleValueParser("min")); + assertThat(parse(parser, response), contains(entry("max", 40d, "min", 20d))); } @Test void one_bucket_one_metric_should_pass() { - String response = "{\n" - + " \"composite#composite_buckets\": {\n" - + " \"after_key\": {\n" - + " \"type\": \"sale\"\n" - + " },\n" - + " \"buckets\": [\n" - + " {\n" - + " \"key\": {\n" - + " \"type\": \"cost\"\n" - + " },\n" - + " \"doc_count\": 2,\n" - + " \"avg#avg\": {\n" - + " \"value\": 20\n" - + " }\n" - + " },\n" - + " {\n" - + " \"key\": {\n" - + " \"type\": \"sale\"\n" - + " },\n" - + " \"doc_count\": 2,\n" - + " \"avg#avg\": {\n" - + " \"value\": 105\n" - + " }\n" - + " }\n" - + " ]\n" - + " }\n" - + "}"; + String response = + "{\n" + + " \"composite#composite_buckets\": {\n" + + " \"after_key\": {\n" + + " \"type\": \"sale\"\n" + + " },\n" + + " \"buckets\": [\n" + + " {\n" + + " \"key\": {\n" + + " \"type\": \"cost\"\n" + + " },\n" + + " \"doc_count\": 2,\n" + + " \"avg#avg\": {\n" + + " \"value\": 20\n" + + " }\n" + + " },\n" + + " {\n" + + " \"key\": {\n" + + " \"type\": \"sale\"\n" + + " },\n" + + " \"doc_count\": 2,\n" + + " \"avg#avg\": {\n" + + " \"value\": 105\n" + + " }\n" + + " }\n" + + " ]\n" + + " }\n" + + "}"; - OpenSearchAggregationResponseParser parser = new CompositeAggregationParser( - new SingleValueParser("avg")); - assertThat(parse(parser, response), - containsInAnyOrder(ImmutableMap.of("type", "cost", "avg", 20d), + OpenSearchAggregationResponseParser parser = + new CompositeAggregationParser(new SingleValueParser("avg")); + assertThat( + parse(parser, response), + containsInAnyOrder( + ImmutableMap.of("type", "cost", "avg", 20d), ImmutableMap.of("type", "sale", "avg", 105d))); } @Test void two_bucket_one_metric_should_pass() { - String response = "{\n" - + " \"composite#composite_buckets\": {\n" - + " \"after_key\": {\n" - + " \"type\": \"sale\",\n" - + " \"region\": \"us\"\n" - + " },\n" - + " \"buckets\": [\n" - + " {\n" - + " \"key\": {\n" - + " \"type\": \"cost\",\n" - + " \"region\": \"us\"\n" - + " },\n" - + " \"avg#avg\": {\n" - + " \"value\": 20\n" - + " }\n" - + " },\n" - + " {\n" - + " \"key\": {\n" - + " \"type\": \"sale\",\n" - + " \"region\": \"uk\"\n" - + " },\n" - + " \"avg#avg\": {\n" - + " \"value\": 130\n" - + " }\n" - + " }\n" - + " ]\n" - + " }\n" - + "}"; - OpenSearchAggregationResponseParser parser = new CompositeAggregationParser( - new SingleValueParser("avg")); - assertThat(parse(parser, response), - containsInAnyOrder(ImmutableMap.of("type", "cost", "region", "us", "avg", 20d), + String response = + "{\n" + + " \"composite#composite_buckets\": {\n" + + " \"after_key\": {\n" + + " \"type\": \"sale\",\n" + + " \"region\": \"us\"\n" + + " },\n" + + " \"buckets\": [\n" + + " {\n" + + " \"key\": {\n" + + " \"type\": \"cost\",\n" + + " \"region\": \"us\"\n" + + " },\n" + + " \"avg#avg\": {\n" + + " \"value\": 20\n" + + " }\n" + + " },\n" + + " {\n" + + " \"key\": {\n" + + " \"type\": \"sale\",\n" + + " \"region\": \"uk\"\n" + + " },\n" + + " \"avg#avg\": {\n" + + " \"value\": 130\n" + + " }\n" + + " }\n" + + " ]\n" + + " }\n" + + "}"; + OpenSearchAggregationResponseParser parser = + new CompositeAggregationParser(new SingleValueParser("avg")); + assertThat( + parse(parser, response), + containsInAnyOrder( + ImmutableMap.of("type", "cost", "region", "us", "avg", 20d), ImmutableMap.of("type", "sale", "region", "uk", "avg", 130d))); } @Test void unsupported_aggregation_should_fail() { - String response = "{\n" - + " \"date_histogram#date_histogram\": {\n" - + " \"value\": 40\n" - + " }\n" - + "}"; - NoBucketAggregationParser parser = new NoBucketAggregationParser( - new SingleValueParser("max") - ); + String response = + "{\n" + + " \"date_histogram#date_histogram\": {\n" + + " \"value\": 40\n" + + " }\n" + + "}"; + NoBucketAggregationParser parser = new NoBucketAggregationParser(new SingleValueParser("max")); RuntimeException exception = assertThrows(RuntimeException.class, () -> parse(parser, response)); assertEquals( @@ -170,14 +167,15 @@ void nan_value_should_return_null() { @Test void filter_aggregation_should_pass() { - String response = "{\n" - + " \"filter#filtered\" : {\n" - + " \"doc_count\" : 3,\n" - + " \"avg#filtered\" : {\n" - + " \"value\" : 37.0\n" - + " }\n" - + " }\n" - + " }"; + String response = + "{\n" + + " \"filter#filtered\" : {\n" + + " \"doc_count\" : 3,\n" + + " \"avg#filtered\" : {\n" + + " \"value\" : 37.0\n" + + " }\n" + + " }\n" + + " }"; OpenSearchAggregationResponseParser parser = new NoBucketAggregationParser( FilterParser.builder() @@ -189,132 +187,134 @@ void filter_aggregation_should_pass() { @Test void filter_aggregation_group_by_should_pass() { - String response = "{\n" - + " \"composite#composite_buckets\":{\n" - + " \"after_key\":{\n" - + " \"gender\":\"m\"\n" - + " },\n" - + " \"buckets\":[\n" - + " {\n" - + " \"key\":{\n" - + " \"gender\":\"f\"\n" - + " },\n" - + " \"doc_count\":3,\n" - + " \"filter#filter\":{\n" - + " \"doc_count\":1,\n" - + " \"avg#avg\":{\n" - + " \"value\":39.0\n" - + " }\n" - + " }\n" - + " },\n" - + " {\n" - + " \"key\":{\n" - + " \"gender\":\"m\"\n" - + " },\n" - + " \"doc_count\":4,\n" - + " \"filter#filter\":{\n" - + " \"doc_count\":2,\n" - + " \"avg#avg\":{\n" - + " \"value\":36.0\n" - + " }\n" - + " }\n" - + " }\n" - + " ]\n" - + " }\n" - + "}"; - OpenSearchAggregationResponseParser parser = new CompositeAggregationParser( - FilterParser.builder() - .name("filter") - .metricsParser(new SingleValueParser("avg")) - .build() - ); - assertThat(parse(parser, response), containsInAnyOrder( - entry("gender", "f", "avg", 39.0), - entry("gender", "m", "avg", 36.0))); + String response = + "{\n" + + " \"composite#composite_buckets\":{\n" + + " \"after_key\":{\n" + + " \"gender\":\"m\"\n" + + " },\n" + + " \"buckets\":[\n" + + " {\n" + + " \"key\":{\n" + + " \"gender\":\"f\"\n" + + " },\n" + + " \"doc_count\":3,\n" + + " \"filter#filter\":{\n" + + " \"doc_count\":1,\n" + + " \"avg#avg\":{\n" + + " \"value\":39.0\n" + + " }\n" + + " }\n" + + " },\n" + + " {\n" + + " \"key\":{\n" + + " \"gender\":\"m\"\n" + + " },\n" + + " \"doc_count\":4,\n" + + " \"filter#filter\":{\n" + + " \"doc_count\":2,\n" + + " \"avg#avg\":{\n" + + " \"value\":36.0\n" + + " }\n" + + " }\n" + + " }\n" + + " ]\n" + + " }\n" + + "}"; + OpenSearchAggregationResponseParser parser = + new CompositeAggregationParser( + FilterParser.builder() + .name("filter") + .metricsParser(new SingleValueParser("avg")) + .build()); + assertThat( + parse(parser, response), + containsInAnyOrder(entry("gender", "f", "avg", 39.0), entry("gender", "m", "avg", 36.0))); } - /** - * SELECT MAX(age) as max, STDDEV(age) as min FROM accounts. - */ + /** SELECT MAX(age) as max, STDDEV(age) as min FROM accounts. */ @Test void no_bucket_max_and_extended_stats() { - String response = "{\n" - + " \"extended_stats#esField\": {\n" - + " \"count\": 2033,\n" - + " \"min\": 0,\n" - + " \"max\": 360,\n" - + " \"avg\": 45.47958681751107,\n" - + " \"sum\": 92460,\n" - + " \"sum_of_squares\": 22059450,\n" - + " \"variance\": 8782.295820390027,\n" - + " \"variance_population\": 8782.295820390027,\n" - + " \"variance_sampling\": 8786.61781636463,\n" - + " \"std_deviation\": 93.71390409320287,\n" - + " \"std_deviation_population\": 93.71390409320287,\n" - + " \"std_deviation_sampling\": 93.73696078049805,\n" - + " \"std_deviation_bounds\": {\n" - + " \"upper\": 232.9073950039168,\n" - + " \"lower\": -141.94822136889468,\n" - + " \"upper_population\": 232.9073950039168,\n" - + " \"lower_population\": -141.94822136889468,\n" - + " \"upper_sampling\": 232.95350837850717,\n" - + " \"lower_sampling\": -141.99433474348504\n" - + " }\n" - + " },\n" - + " \"max#maxField\": {\n" - + " \"value\": 360\n" - + " }\n" - + "}"; + String response = + "{\n" + + " \"extended_stats#esField\": {\n" + + " \"count\": 2033,\n" + + " \"min\": 0,\n" + + " \"max\": 360,\n" + + " \"avg\": 45.47958681751107,\n" + + " \"sum\": 92460,\n" + + " \"sum_of_squares\": 22059450,\n" + + " \"variance\": 8782.295820390027,\n" + + " \"variance_population\": 8782.295820390027,\n" + + " \"variance_sampling\": 8786.61781636463,\n" + + " \"std_deviation\": 93.71390409320287,\n" + + " \"std_deviation_population\": 93.71390409320287,\n" + + " \"std_deviation_sampling\": 93.73696078049805,\n" + + " \"std_deviation_bounds\": {\n" + + " \"upper\": 232.9073950039168,\n" + + " \"lower\": -141.94822136889468,\n" + + " \"upper_population\": 232.9073950039168,\n" + + " \"lower_population\": -141.94822136889468,\n" + + " \"upper_sampling\": 232.95350837850717,\n" + + " \"lower_sampling\": -141.99433474348504\n" + + " }\n" + + " },\n" + + " \"max#maxField\": {\n" + + " \"value\": 360\n" + + " }\n" + + "}"; - NoBucketAggregationParser parser = new NoBucketAggregationParser( - new SingleValueParser("maxField"), - new StatsParser(ExtendedStats::getStdDeviation, "esField") - ); - assertThat(parse(parser, response), - contains(entry("esField", 93.71390409320287, "maxField", 360D))); + NoBucketAggregationParser parser = + new NoBucketAggregationParser( + new SingleValueParser("maxField"), + new StatsParser(ExtendedStats::getStdDeviation, "esField")); + assertThat( + parse(parser, response), contains(entry("esField", 93.71390409320287, "maxField", 360D))); } @Test void top_hits_aggregation_should_pass() { - String response = "{\n" - + " \"composite#composite_buckets\": {\n" - + " \"buckets\": [\n" - + " {\n" - + " \"key\": {\n" - + " \"type\": \"take\"\n" - + " },\n" - + " \"doc_count\": 2,\n" - + " \"top_hits#take\": {\n" - + " \"hits\": {\n" - + " \"total\": { \"value\": 2, \"relation\": \"eq\" },\n" - + " \"max_score\": 1.0,\n" - + " \"hits\": [\n" - + " {\n" - + " \"_index\": \"accounts\",\n" - + " \"_id\": \"1\",\n" - + " \"_score\": 1.0,\n" - + " \"_source\": {\n" - + " \"gender\": \"m\"\n" - + " }\n" - + " },\n" - + " {\n" - + " \"_index\": \"accounts\",\n" - + " \"_id\": \"2\",\n" - + " \"_score\": 1.0,\n" - + " \"_source\": {\n" - + " \"gender\": \"f\"\n" - + " }\n" - + " }\n" - + " ]\n" - + " }\n" - + " }\n" - + " }\n" - + " ]\n" - + " }\n" - + "}"; + String response = + "{\n" + + " \"composite#composite_buckets\": {\n" + + " \"buckets\": [\n" + + " {\n" + + " \"key\": {\n" + + " \"type\": \"take\"\n" + + " },\n" + + " \"doc_count\": 2,\n" + + " \"top_hits#take\": {\n" + + " \"hits\": {\n" + + " \"total\": { \"value\": 2, \"relation\": \"eq\" },\n" + + " \"max_score\": 1.0,\n" + + " \"hits\": [\n" + + " {\n" + + " \"_index\": \"accounts\",\n" + + " \"_id\": \"1\",\n" + + " \"_score\": 1.0,\n" + + " \"_source\": {\n" + + " \"gender\": \"m\"\n" + + " }\n" + + " },\n" + + " {\n" + + " \"_index\": \"accounts\",\n" + + " \"_id\": \"2\",\n" + + " \"_score\": 1.0,\n" + + " \"_source\": {\n" + + " \"gender\": \"f\"\n" + + " }\n" + + " }\n" + + " ]\n" + + " }\n" + + " }\n" + + " }\n" + + " ]\n" + + " }\n" + + "}"; OpenSearchAggregationResponseParser parser = new CompositeAggregationParser(new TopHitsParser("take")); - assertThat(parse(parser, response), + assertThat( + parse(parser, response), contains(ImmutableMap.of("type", "take", "take", ImmutableList.of("m", "f")))); } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/response/error/ErrorMessageFactoryTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/response/error/ErrorMessageFactoryTest.java index c3ae5d139d..eb759233a8 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/response/error/ErrorMessageFactoryTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/response/error/ErrorMessageFactoryTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.response.error; import static org.junit.jupiter.api.Assertions.assertFalse; diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/response/error/ErrorMessageTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/response/error/ErrorMessageTest.java index ac0d46938a..90268502c2 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/response/error/ErrorMessageTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/response/error/ErrorMessageTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.response.error; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -20,45 +19,49 @@ class ErrorMessageTest { @Test public void testToString() { ErrorMessage errorMessage = - new ErrorMessage(new IllegalStateException("illegal state"), - SERVICE_UNAVAILABLE.getStatus()); - assertEquals("{\n" - + " \"error\": {\n" - + " \"reason\": \"There was internal problem at backend\",\n" - + " \"details\": \"illegal state\",\n" - + " \"type\": \"IllegalStateException\"\n" - + " },\n" - + " \"status\": 503\n" - + "}", errorMessage.toString()); + new ErrorMessage( + new IllegalStateException("illegal state"), SERVICE_UNAVAILABLE.getStatus()); + assertEquals( + "{\n" + + " \"error\": {\n" + + " \"reason\": \"There was internal problem at backend\",\n" + + " \"details\": \"illegal state\",\n" + + " \"type\": \"IllegalStateException\"\n" + + " },\n" + + " \"status\": 503\n" + + "}", + errorMessage.toString()); } @Test public void testBadRequestToString() { ErrorMessage errorMessage = - new ErrorMessage(new IllegalStateException(), - BAD_REQUEST.getStatus()); - assertEquals("{\n" - + " \"error\": {\n" - + " \"reason\": \"Invalid Query\",\n" - + " \"details\": \"\",\n" - + " \"type\": \"IllegalStateException\"\n" - + " },\n" - + " \"status\": 400\n" - + "}", errorMessage.toString()); + new ErrorMessage(new IllegalStateException(), BAD_REQUEST.getStatus()); + assertEquals( + "{\n" + + " \"error\": {\n" + + " \"reason\": \"Invalid Query\",\n" + + " \"details\": \"\",\n" + + " \"type\": \"IllegalStateException\"\n" + + " },\n" + + " \"status\": 400\n" + + "}", + errorMessage.toString()); } @Test public void testToStringWithEmptyErrorMessage() { ErrorMessage errorMessage = - new ErrorMessage(new IllegalStateException(), - SERVICE_UNAVAILABLE.getStatus()); - assertEquals("{\n" - + " \"error\": {\n" - + " \"reason\": \"There was internal problem at backend\",\n" - + " \"details\": \"\",\n" - + " \"type\": \"IllegalStateException\"\n" - + " },\n" - + " \"status\": 503\n" - + "}", errorMessage.toString()); + new ErrorMessage(new IllegalStateException(), SERVICE_UNAVAILABLE.getStatus()); + assertEquals( + "{\n" + + " \"error\": {\n" + + " \"reason\": \"There was internal problem at backend\",\n" + + " \"details\": \"\",\n" + + " \"type\": \"IllegalStateException\"\n" + + " },\n" + + " \"status\": 503\n" + + "}", + errorMessage.toString()); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/response/error/OpenSearchErrorMessageTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/response/error/OpenSearchErrorMessageTest.java index 3dcb38a558..f07b5dfdd3 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/response/error/OpenSearchErrorMessageTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/response/error/OpenSearchErrorMessageTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.response.error; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -21,14 +20,11 @@ @ExtendWith(MockitoExtension.class) class OpenSearchErrorMessageTest { - @Mock - private OpenSearchException openSearchException; + @Mock private OpenSearchException openSearchException; - @Mock - private SearchPhaseExecutionException searchPhaseExecutionException; + @Mock private SearchPhaseExecutionException searchPhaseExecutionException; - @Mock - private ShardSearchFailure shardSearchFailure; + @Mock private ShardSearchFailure shardSearchFailure; @Test public void fetchReason() { @@ -45,7 +41,8 @@ public void fetchDetailsWithOpenSearchException() { OpenSearchErrorMessage errorMessage = new OpenSearchErrorMessage(openSearchException, SERVICE_UNAVAILABLE.getStatus()); - assertEquals("detail error\n" + assertEquals( + "detail error\n" + "For more details, please send request for " + "Json format to see the raw response from OpenSearch engine.", errorMessage.fetchDetails()); @@ -59,9 +56,9 @@ public void fetchDetailsWithSearchPhaseExecutionException() { when(shardSearchFailure.getCause()).thenReturn(new IllegalStateException("illegal state")); OpenSearchErrorMessage errorMessage = - new OpenSearchErrorMessage(searchPhaseExecutionException, - SERVICE_UNAVAILABLE.getStatus()); - assertEquals("Shard[1]: java.lang.IllegalStateException: illegal state\n" + new OpenSearchErrorMessage(searchPhaseExecutionException, SERVICE_UNAVAILABLE.getStatus()); + assertEquals( + "Shard[1]: java.lang.IllegalStateException: illegal state\n" + "\n" + "For more details, please send request for Json format to see the " + "raw response from OpenSearch engine.", diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchDefaultImplementorTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchDefaultImplementorTest.java index 1e44345576..85d0a4e94f 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchDefaultImplementorTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchDefaultImplementorTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage; import static org.junit.jupiter.api.Assertions.assertNotNull; @@ -23,13 +22,11 @@ @ExtendWith(MockitoExtension.class) public class OpenSearchDefaultImplementorTest { - @Mock - OpenSearchClient client; + @Mock OpenSearchClient client; @Test public void visitMachineLearning() { - LogicalMLCommons node = Mockito.mock(LogicalMLCommons.class, - Answers.RETURNS_DEEP_STUBS); + LogicalMLCommons node = Mockito.mock(LogicalMLCommons.class, Answers.RETURNS_DEEP_STUBS); Mockito.when(node.getChild().get(0)).thenReturn(Mockito.mock(LogicalPlan.class)); OpenSearchIndex.OpenSearchDefaultImplementor implementor = new OpenSearchIndex.OpenSearchDefaultImplementor(client); @@ -38,8 +35,7 @@ public void visitMachineLearning() { @Test public void visitAD() { - LogicalAD node = Mockito.mock(LogicalAD.class, - Answers.RETURNS_DEEP_STUBS); + LogicalAD node = Mockito.mock(LogicalAD.class, Answers.RETURNS_DEEP_STUBS); Mockito.when(node.getChild().get(0)).thenReturn(Mockito.mock(LogicalPlan.class)); OpenSearchIndex.OpenSearchDefaultImplementor implementor = new OpenSearchIndex.OpenSearchDefaultImplementor(client); @@ -48,11 +44,10 @@ public void visitAD() { @Test public void visitML() { - LogicalML node = Mockito.mock(LogicalML.class, - Answers.RETURNS_DEEP_STUBS); + LogicalML node = Mockito.mock(LogicalML.class, Answers.RETURNS_DEEP_STUBS); Mockito.when(node.getChild().get(0)).thenReturn(Mockito.mock(LogicalPlan.class)); OpenSearchIndex.OpenSearchDefaultImplementor implementor = - new OpenSearchIndex.OpenSearchDefaultImplementor(client); + new OpenSearchIndex.OpenSearchDefaultImplementor(client); assertNotNull(implementor.visitML(node, null)); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/ExpressionScriptEngineTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/ExpressionScriptEngineTest.java index 3d497c2f5b..63710e57aa 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/ExpressionScriptEngineTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/ExpressionScriptEngineTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script; import static java.util.Collections.emptyMap; @@ -34,8 +33,7 @@ @ExtendWith(MockitoExtension.class) class ExpressionScriptEngineTest { - @Mock - private ExpressionSerializer serializer; + @Mock private ExpressionSerializer serializer; private ScriptEngine scriptEngine; @@ -55,19 +53,20 @@ void should_return_custom_script_language_name() { void can_initialize_filter_script_factory_by_compiled_script() { when(serializer.deserialize("test code")).thenReturn(expression); - assertThat(scriptEngine.getSupportedContexts(), + assertThat( + scriptEngine.getSupportedContexts(), contains(FilterScript.CONTEXT, AggregationScript.CONTEXT)); - Object actualFactory = scriptEngine.compile( - "test", "test code", FilterScript.CONTEXT, emptyMap()); + Object actualFactory = + scriptEngine.compile("test", "test code", FilterScript.CONTEXT, emptyMap()); assertEquals(new ExpressionFilterScriptFactory(expression), actualFactory); } @Test void should_throw_exception_for_unsupported_script_context() { ScriptContext unknownCtx = mock(ScriptContext.class); - assertThrows(IllegalStateException.class, () -> - scriptEngine.compile("test", "test code", unknownCtx, emptyMap())); + assertThrows( + IllegalStateException.class, + () -> scriptEngine.compile("test", "test code", unknownCtx, emptyMap())); } - } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilderTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilderTest.java index c76567c1e9..6485dce124 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilderTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilderTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.aggregation; import static org.hamcrest.MatcherAssert.assertThat; @@ -61,8 +60,7 @@ @DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) @ExtendWith(MockitoExtension.class) class AggregationQueryBuilderTest { - @Mock - private ExpressionSerializer serializer; + @Mock private ExpressionSerializer serializer; private AggregationQueryBuilder queryBuilder; @@ -73,31 +71,32 @@ void set_up() { @Test void should_build_composite_aggregation_for_field_reference() { - assertEquals(format( - "{%n" - + " \"composite_buckets\" : {%n" - + " \"composite\" : {%n" - + " \"size\" : 1000,%n" - + " \"sources\" : [ {%n" - + " \"name\" : {%n" - + " \"terms\" : {%n" - + " \"field\" : \"name\",%n" - + " \"missing_bucket\" : true,%n" - + " \"missing_order\" : \"first\",%n" - + " \"order\" : \"asc\"%n" - + " }%n" - + " }%n" - + " } ]%n" - + " },%n" - + " \"aggregations\" : {%n" - + " \"avg(age)\" : {%n" - + " \"avg\" : {%n" - + " \"field\" : \"age\"%n" - + " }%n" - + " }%n" - + " }%n" - + " }%n" - + "}"), + assertEquals( + format( + "{%n" + + " \"composite_buckets\" : {%n" + + " \"composite\" : {%n" + + " \"size\" : 1000,%n" + + " \"sources\" : [ {%n" + + " \"name\" : {%n" + + " \"terms\" : {%n" + + " \"field\" : \"name\",%n" + + " \"missing_bucket\" : true,%n" + + " \"missing_order\" : \"first\",%n" + + " \"order\" : \"asc\"%n" + + " }%n" + + " }%n" + + " } ]%n" + + " },%n" + + " \"aggregations\" : {%n" + + " \"avg(age)\" : {%n" + + " \"avg\" : {%n" + + " \"field\" : \"age\"%n" + + " }%n" + + " }%n" + + " }%n" + + " }%n" + + "}"), buildQuery( Arrays.asList( named("avg(age)", new AvgAggregator(Arrays.asList(ref("age", INTEGER)), INTEGER))), @@ -106,388 +105,415 @@ void should_build_composite_aggregation_for_field_reference() { @Test void should_build_composite_aggregation_for_field_reference_with_order() { - assertEquals(format( - "{%n" - + " \"composite_buckets\" : {%n" - + " \"composite\" : {%n" - + " \"size\" : 1000,%n" - + " \"sources\" : [ {%n" - + " \"name\" : {%n" - + " \"terms\" : {%n" - + " \"field\" : \"name\",%n" - + " \"missing_bucket\" : true,%n" - + " \"missing_order\" : \"last\",%n" - + " \"order\" : \"desc\"%n" - + " }%n" - + " }%n" - + " } ]%n" - + " },%n" - + " \"aggregations\" : {%n" - + " \"avg(age)\" : {%n" - + " \"avg\" : {%n" - + " \"field\" : \"age\"%n" - + " }%n" - + " }%n" - + " }%n" - + " }%n" - + "}"), + assertEquals( + format( + "{%n" + + " \"composite_buckets\" : {%n" + + " \"composite\" : {%n" + + " \"size\" : 1000,%n" + + " \"sources\" : [ {%n" + + " \"name\" : {%n" + + " \"terms\" : {%n" + + " \"field\" : \"name\",%n" + + " \"missing_bucket\" : true,%n" + + " \"missing_order\" : \"last\",%n" + + " \"order\" : \"desc\"%n" + + " }%n" + + " }%n" + + " } ]%n" + + " },%n" + + " \"aggregations\" : {%n" + + " \"avg(age)\" : {%n" + + " \"avg\" : {%n" + + " \"field\" : \"age\"%n" + + " }%n" + + " }%n" + + " }%n" + + " }%n" + + "}"), buildQuery( Arrays.asList( named("avg(age)", new AvgAggregator(Arrays.asList(ref("age", INTEGER)), INTEGER))), Arrays.asList(named("name", ref("name", STRING))), - sort(ref("name", STRING), Sort.SortOption.DEFAULT_DESC) - )); + sort(ref("name", STRING), Sort.SortOption.DEFAULT_DESC))); } @Test void should_build_type_mapping_for_field_reference() { assertThat( - buildTypeMapping(Arrays.asList( - named("avg(age)", new AvgAggregator(Arrays.asList(ref("age", INTEGER)), INTEGER))), + buildTypeMapping( + Arrays.asList( + named("avg(age)", new AvgAggregator(Arrays.asList(ref("age", INTEGER)), INTEGER))), Arrays.asList(named("name", ref("name", STRING)))), containsInAnyOrder( map("avg(age)", OpenSearchDataType.of(INTEGER)), - map("name", OpenSearchDataType.of(STRING)) - )); + map("name", OpenSearchDataType.of(STRING)))); } @Test void should_build_type_mapping_for_datetime_type() { assertThat( - buildTypeMapping(Arrays.asList( - named("avg(datetime)", + buildTypeMapping( + Arrays.asList( + named( + "avg(datetime)", new AvgAggregator(Arrays.asList(ref("datetime", DATETIME)), DATETIME))), Arrays.asList(named("datetime", ref("datetime", DATETIME)))), containsInAnyOrder( map("avg(datetime)", OpenSearchDateType.of(DATETIME)), - map("datetime", OpenSearchDateType.of(DATETIME)) - )); + map("datetime", OpenSearchDateType.of(DATETIME)))); } @Test void should_build_type_mapping_for_timestamp_type() { assertThat( - buildTypeMapping(Arrays.asList( - named("avg(timestamp)", + buildTypeMapping( + Arrays.asList( + named( + "avg(timestamp)", new AvgAggregator(Arrays.asList(ref("timestamp", TIMESTAMP)), TIMESTAMP))), Arrays.asList(named("timestamp", ref("timestamp", TIMESTAMP)))), containsInAnyOrder( map("avg(timestamp)", OpenSearchDateType.of()), - map("timestamp", OpenSearchDateType.of()) - )); + map("timestamp", OpenSearchDateType.of()))); } @Test void should_build_type_mapping_for_date_type() { assertThat( - buildTypeMapping(Arrays.asList( - named("avg(date)", - new AvgAggregator(Arrays.asList(ref("date", DATE)), DATE))), + buildTypeMapping( + Arrays.asList( + named("avg(date)", new AvgAggregator(Arrays.asList(ref("date", DATE)), DATE))), Arrays.asList(named("date", ref("date", DATE)))), containsInAnyOrder( map("avg(date)", OpenSearchDateType.of(DATE)), - map("date", OpenSearchDateType.of(DATE)) - )); + map("date", OpenSearchDateType.of(DATE)))); } @Test void should_build_type_mapping_for_time_type() { assertThat( - buildTypeMapping(Arrays.asList( - named("avg(time)", - new AvgAggregator(Arrays.asList(ref("time", TIME)), TIME))), + buildTypeMapping( + Arrays.asList( + named("avg(time)", new AvgAggregator(Arrays.asList(ref("time", TIME)), TIME))), Arrays.asList(named("time", ref("time", TIME)))), containsInAnyOrder( map("avg(time)", OpenSearchDateType.of(TIME)), - map("time", OpenSearchDateType.of(TIME)) - )); + map("time", OpenSearchDateType.of(TIME)))); } @Test void should_build_composite_aggregation_for_field_reference_of_keyword() { - assertEquals(format( - "{%n" - + " \"composite_buckets\" : {%n" - + " \"composite\" : {%n" - + " \"size\" : 1000,%n" - + " \"sources\" : [ {%n" - + " \"name\" : {%n" - + " \"terms\" : {%n" - + " \"field\" : \"name.keyword\",%n" - + " \"missing_bucket\" : true,%n" - + " \"missing_order\" : \"first\",%n" - + " \"order\" : \"asc\"%n" - + " }%n" - + " }%n" - + " } ]%n" - + " },%n" - + " \"aggregations\" : {%n" - + " \"avg(age)\" : {%n" - + " \"avg\" : {%n" - + " \"field\" : \"age\"%n" - + " }%n" - + " }%n" - + " }%n" - + " }%n" - + "}"), + assertEquals( + format( + "{%n" + + " \"composite_buckets\" : {%n" + + " \"composite\" : {%n" + + " \"size\" : 1000,%n" + + " \"sources\" : [ {%n" + + " \"name\" : {%n" + + " \"terms\" : {%n" + + " \"field\" : \"name.keyword\",%n" + + " \"missing_bucket\" : true,%n" + + " \"missing_order\" : \"first\",%n" + + " \"order\" : \"asc\"%n" + + " }%n" + + " }%n" + + " } ]%n" + + " },%n" + + " \"aggregations\" : {%n" + + " \"avg(age)\" : {%n" + + " \"avg\" : {%n" + + " \"field\" : \"age\"%n" + + " }%n" + + " }%n" + + " }%n" + + " }%n" + + "}"), buildQuery( Arrays.asList( named("avg(age)", new AvgAggregator(Arrays.asList(ref("age", INTEGER)), INTEGER))), - Arrays.asList(named("name", ref("name", OpenSearchTextType.of(Map.of("words", - OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword)))))))); + Arrays.asList( + named( + "name", + ref( + "name", + OpenSearchTextType.of( + Map.of( + "words", + OpenSearchDataType.of( + OpenSearchDataType.MappingType.Keyword)))))))); } @Test void should_build_type_mapping_for_field_reference_of_keyword() { assertThat( - buildTypeMapping(Arrays.asList( - named("avg(age)", new AvgAggregator(Arrays.asList(ref("age", INTEGER)), INTEGER))), + buildTypeMapping( + Arrays.asList( + named("avg(age)", new AvgAggregator(Arrays.asList(ref("age", INTEGER)), INTEGER))), Arrays.asList(named("name", ref("name", STRING)))), containsInAnyOrder( map("avg(age)", OpenSearchDataType.of(INTEGER)), - map("name", OpenSearchDataType.of(STRING)) - )); + map("name", OpenSearchDataType.of(STRING)))); } @Test void should_build_composite_aggregation_for_expression() { - doAnswer(invocation -> { - Expression expr = invocation.getArgument(0); - return expr.toString(); - }).when(serializer).serialize(any()); - assertEquals(format( - "{%n" - + " \"composite_buckets\" : {%n" - + " \"composite\" : {%n" - + " \"size\" : 1000,%n" - + " \"sources\" : [ {%n" - + " \"age\" : {%n" - + " \"terms\" : {%n" - + " \"script\" : {%n" - + " \"source\" : \"asin(age)\",%n" - + " \"lang\" : \"opensearch_query_expression\"%n" - + " },%n" - + " \"missing_bucket\" : true,%n" - + " \"missing_order\" : \"first\",%n" - + " \"order\" : \"asc\"%n" - + " }%n" - + " }%n" - + " } ]%n" - + " },%n" - + " \"aggregations\" : {%n" - + " \"avg(balance)\" : {%n" - + " \"avg\" : {%n" - + " \"script\" : {%n" - + " \"source\" : \"abs(balance)\",%n" - + " \"lang\" : \"opensearch_query_expression\"%n" - + " }%n" - + " }%n" - + " }%n" - + " }%n" - + " }%n" - + "}"), + doAnswer( + invocation -> { + Expression expr = invocation.getArgument(0); + return expr.toString(); + }) + .when(serializer) + .serialize(any()); + assertEquals( + format( + "{%n" + + " \"composite_buckets\" : {%n" + + " \"composite\" : {%n" + + " \"size\" : 1000,%n" + + " \"sources\" : [ {%n" + + " \"age\" : {%n" + + " \"terms\" : {%n" + + " \"script\" : {%n" + + " \"source\" : \"asin(age)\",%n" + + " \"lang\" : \"opensearch_query_expression\"%n" + + " },%n" + + " \"missing_bucket\" : true,%n" + + " \"missing_order\" : \"first\",%n" + + " \"order\" : \"asc\"%n" + + " }%n" + + " }%n" + + " } ]%n" + + " },%n" + + " \"aggregations\" : {%n" + + " \"avg(balance)\" : {%n" + + " \"avg\" : {%n" + + " \"script\" : {%n" + + " \"source\" : \"abs(balance)\",%n" + + " \"lang\" : \"opensearch_query_expression\"%n" + + " }%n" + + " }%n" + + " }%n" + + " }%n" + + " }%n" + + "}"), buildQuery( Arrays.asList( - named("avg(balance)", new AvgAggregator( - Arrays.asList(DSL.abs(ref("balance", INTEGER))), INTEGER))), + named( + "avg(balance)", + new AvgAggregator(Arrays.asList(DSL.abs(ref("balance", INTEGER))), INTEGER))), Arrays.asList(named("age", DSL.asin(ref("age", INTEGER)))))); } @Test void should_build_composite_aggregation_follow_with_order_by_position() { - assertEquals(format( - "{%n" - + " \"composite_buckets\" : {%n" - + " \"composite\" : {%n" - + " \"size\" : 1000,%n" - + " \"sources\" : [ {%n" - + " \"name\" : {%n" - + " \"terms\" : {%n" - + " \"field\" : \"name\",%n" - + " \"missing_bucket\" : true,%n" - + " \"missing_order\" : \"last\",%n" - + " \"order\" : \"desc\"%n" - + " }%n" - + " }%n" - + " }, {%n" - + " \"age\" : {%n" - + " \"terms\" : {%n" - + " \"field\" : \"age\",%n" - + " \"missing_bucket\" : true,%n" - + " \"missing_order\" : \"first\",%n" - + " \"order\" : \"asc\"%n" - + " }%n" - + " }%n" - + " } ]%n" - + " },%n" - + " \"aggregations\" : {%n" - + " \"avg(balance)\" : {%n" - + " \"avg\" : {%n" - + " \"field\" : \"balance\"%n" - + " }%n" - + " }%n" - + " }%n" - + " }%n" - + "}"), + assertEquals( + format( + "{%n" + + " \"composite_buckets\" : {%n" + + " \"composite\" : {%n" + + " \"size\" : 1000,%n" + + " \"sources\" : [ {%n" + + " \"name\" : {%n" + + " \"terms\" : {%n" + + " \"field\" : \"name\",%n" + + " \"missing_bucket\" : true,%n" + + " \"missing_order\" : \"last\",%n" + + " \"order\" : \"desc\"%n" + + " }%n" + + " }%n" + + " }, {%n" + + " \"age\" : {%n" + + " \"terms\" : {%n" + + " \"field\" : \"age\",%n" + + " \"missing_bucket\" : true,%n" + + " \"missing_order\" : \"first\",%n" + + " \"order\" : \"asc\"%n" + + " }%n" + + " }%n" + + " } ]%n" + + " },%n" + + " \"aggregations\" : {%n" + + " \"avg(balance)\" : {%n" + + " \"avg\" : {%n" + + " \"field\" : \"balance\"%n" + + " }%n" + + " }%n" + + " }%n" + + " }%n" + + "}"), buildQuery( agg(named("avg(balance)", avg(ref("balance", INTEGER), INTEGER))), group(named("age", ref("age", INTEGER)), named("name", ref("name", STRING))), - sort(ref("name", STRING), Sort.SortOption.DEFAULT_DESC, - ref("age", INTEGER), Sort.SortOption.DEFAULT_ASC) - )); + sort( + ref("name", STRING), + Sort.SortOption.DEFAULT_DESC, + ref("age", INTEGER), + Sort.SortOption.DEFAULT_ASC))); } @Test void should_build_type_mapping_for_expression() { assertThat( - buildTypeMapping(Arrays.asList( - named("avg(balance)", new AvgAggregator( - Arrays.asList(DSL.abs(ref("balance", INTEGER))), INTEGER))), + buildTypeMapping( + Arrays.asList( + named( + "avg(balance)", + new AvgAggregator(Arrays.asList(DSL.abs(ref("balance", INTEGER))), INTEGER))), Arrays.asList(named("age", DSL.asin(ref("age", INTEGER))))), containsInAnyOrder( map("avg(balance)", OpenSearchDataType.of(INTEGER)), - map("age", OpenSearchDataType.of(DOUBLE)) - )); + map("age", OpenSearchDataType.of(DOUBLE)))); } @Test void should_build_aggregation_without_bucket() { - assertEquals(format( - "{%n" - + " \"avg(balance)\" : {%n" - + " \"avg\" : {%n" - + " \"field\" : \"balance\"%n" - + " }%n" - + " }%n" - + "}"), + assertEquals( + format( + "{%n" + + " \"avg(balance)\" : {%n" + + " \"avg\" : {%n" + + " \"field\" : \"balance\"%n" + + " }%n" + + " }%n" + + "}"), buildQuery( Arrays.asList( - named("avg(balance)", new AvgAggregator( - Arrays.asList(ref("balance", INTEGER)), INTEGER))), + named( + "avg(balance)", + new AvgAggregator(Arrays.asList(ref("balance", INTEGER)), INTEGER))), Collections.emptyList())); } @Test void should_build_filter_aggregation() { - assertEquals(format( - "{%n" - + " \"avg(age) filter(where age > 34)\" : {%n" - + " \"filter\" : {%n" - + " \"range\" : {%n" - + " \"age\" : {%n" - + " \"from\" : 20,%n" - + " \"to\" : null,%n" - + " \"include_lower\" : false,%n" - + " \"include_upper\" : true,%n" - + " \"boost\" : 1.0%n" - + " }%n" - + " }%n" - + " },%n" - + " \"aggregations\" : {%n" - + " \"avg(age) filter(where age > 34)\" : {%n" - + " \"avg\" : {%n" - + " \"field\" : \"age\"%n" - + " }%n" - + " }%n" - + " }%n" - + " }%n" - + "}"), + assertEquals( + format( + "{%n" + + " \"avg(age) filter(where age > 34)\" : {%n" + + " \"filter\" : {%n" + + " \"range\" : {%n" + + " \"age\" : {%n" + + " \"from\" : 20,%n" + + " \"to\" : null,%n" + + " \"include_lower\" : false,%n" + + " \"include_upper\" : true,%n" + + " \"boost\" : 1.0%n" + + " }%n" + + " }%n" + + " },%n" + + " \"aggregations\" : {%n" + + " \"avg(age) filter(where age > 34)\" : {%n" + + " \"avg\" : {%n" + + " \"field\" : \"age\"%n" + + " }%n" + + " }%n" + + " }%n" + + " }%n" + + "}"), buildQuery( - Arrays.asList(named("avg(age) filter(where age > 34)", - new AvgAggregator(Arrays.asList(ref("age", INTEGER)), INTEGER) - .condition(DSL.greater(ref("age", INTEGER), literal(20))))), + Arrays.asList( + named( + "avg(age) filter(where age > 34)", + new AvgAggregator(Arrays.asList(ref("age", INTEGER)), INTEGER) + .condition(DSL.greater(ref("age", INTEGER), literal(20))))), Collections.emptyList())); } @Test void should_build_filter_aggregation_group_by() { - assertEquals(format( - "{%n" - + " \"composite_buckets\" : {%n" - + " \"composite\" : {%n" - + " \"size\" : 1000,%n" - + " \"sources\" : [ {%n" - + " \"gender\" : {%n" - + " \"terms\" : {%n" - + " \"field\" : \"gender\",%n" - + " \"missing_bucket\" : true,%n" - + " \"missing_order\" : \"first\",%n" - + " \"order\" : \"asc\"%n" - + " }%n" - + " }%n" - + " } ]%n" - + " },%n" - + " \"aggregations\" : {%n" - + " \"avg(age) filter(where age > 34)\" : {%n" - + " \"filter\" : {%n" - + " \"range\" : {%n" - + " \"age\" : {%n" - + " \"from\" : 20,%n" - + " \"to\" : null,%n" - + " \"include_lower\" : false,%n" - + " \"include_upper\" : true,%n" - + " \"boost\" : 1.0%n" - + " }%n" - + " }%n" - + " },%n" - + " \"aggregations\" : {%n" - + " \"avg(age) filter(where age > 34)\" : {%n" - + " \"avg\" : {%n" - + " \"field\" : \"age\"%n" - + " }%n" - + " }%n" - + " }%n" - + " }%n" - + " }%n" - + " }%n" - + "}"), + assertEquals( + format( + "{%n" + + " \"composite_buckets\" : {%n" + + " \"composite\" : {%n" + + " \"size\" : 1000,%n" + + " \"sources\" : [ {%n" + + " \"gender\" : {%n" + + " \"terms\" : {%n" + + " \"field\" : \"gender\",%n" + + " \"missing_bucket\" : true,%n" + + " \"missing_order\" : \"first\",%n" + + " \"order\" : \"asc\"%n" + + " }%n" + + " }%n" + + " } ]%n" + + " },%n" + + " \"aggregations\" : {%n" + + " \"avg(age) filter(where age > 34)\" : {%n" + + " \"filter\" : {%n" + + " \"range\" : {%n" + + " \"age\" : {%n" + + " \"from\" : 20,%n" + + " \"to\" : null,%n" + + " \"include_lower\" : false,%n" + + " \"include_upper\" : true,%n" + + " \"boost\" : 1.0%n" + + " }%n" + + " }%n" + + " },%n" + + " \"aggregations\" : {%n" + + " \"avg(age) filter(where age > 34)\" : {%n" + + " \"avg\" : {%n" + + " \"field\" : \"age\"%n" + + " }%n" + + " }%n" + + " }%n" + + " }%n" + + " }%n" + + " }%n" + + "}"), buildQuery( - Arrays.asList(named("avg(age) filter(where age > 34)", - new AvgAggregator(Arrays.asList(ref("age", INTEGER)), INTEGER) - .condition(DSL.greater(ref("age", INTEGER), literal(20))))), + Arrays.asList( + named( + "avg(age) filter(where age > 34)", + new AvgAggregator(Arrays.asList(ref("age", INTEGER)), INTEGER) + .condition(DSL.greater(ref("age", INTEGER), literal(20))))), Arrays.asList(named(ref("gender", OpenSearchDataType.of(STRING)))))); } @Test void should_build_type_mapping_without_bucket() { assertThat( - buildTypeMapping(Arrays.asList( - named("avg(balance)", new AvgAggregator( - Arrays.asList(ref("balance", INTEGER)), INTEGER))), + buildTypeMapping( + Arrays.asList( + named( + "avg(balance)", + new AvgAggregator(Arrays.asList(ref("balance", INTEGER)), INTEGER))), Collections.emptyList()), - containsInAnyOrder( - map("avg(balance)", OpenSearchDataType.of(INTEGER)) - )); + containsInAnyOrder(map("avg(balance)", OpenSearchDataType.of(INTEGER)))); } @Test void should_build_histogram() { - assertEquals(format( - "{%n" - + " \"composite_buckets\" : {%n" - + " \"composite\" : {%n" - + " \"size\" : 1000,%n" - + " \"sources\" : [ {%n" - + " \"SpanExpression(field=age, value=10, unit=NONE)\" : {%n" - + " \"histogram\" : {%n" - + " \"field\" : \"age\",%n" - + " \"missing_bucket\" : true,%n" - + " \"missing_order\" : \"first\",%n" - + " \"order\" : \"asc\",%n" - + " \"interval\" : 10.0%n" - + " }%n" - + " }%n" - + " } ]%n" - + " },%n" - + " \"aggregations\" : {%n" - + " \"count(a)\" : {%n" - + " \"value_count\" : {%n" - + " \"field\" : \"a\"%n" - + " }%n" - + " }%n" - + " }%n" - + " }%n" - + "}"), + assertEquals( + format( + "{%n" + + " \"composite_buckets\" : {%n" + + " \"composite\" : {%n" + + " \"size\" : 1000,%n" + + " \"sources\" : [ {%n" + + " \"SpanExpression(field=age, value=10, unit=NONE)\" : {%n" + + " \"histogram\" : {%n" + + " \"field\" : \"age\",%n" + + " \"missing_bucket\" : true,%n" + + " \"missing_order\" : \"first\",%n" + + " \"order\" : \"asc\",%n" + + " \"interval\" : 10.0%n" + + " }%n" + + " }%n" + + " } ]%n" + + " },%n" + + " \"aggregations\" : {%n" + + " \"count(a)\" : {%n" + + " \"value_count\" : {%n" + + " \"field\" : \"a\"%n" + + " }%n" + + " }%n" + + " }%n" + + " }%n" + + "}"), buildQuery( Arrays.asList( named("count(a)", new CountAggregator(Arrays.asList(ref("a", INTEGER)), INTEGER))), @@ -496,37 +522,38 @@ void should_build_histogram() { @Test void should_build_histogram_two_metrics() { - assertEquals(format( - "{%n" - + " \"composite_buckets\" : {%n" - + " \"composite\" : {%n" - + " \"size\" : 1000,%n" - + " \"sources\" : [ {%n" - + " \"SpanExpression(field=age, value=10, unit=NONE)\" : {%n" - + " \"histogram\" : {%n" - + " \"field\" : \"age\",%n" - + " \"missing_bucket\" : true,%n" - + " \"missing_order\" : \"first\",%n" - + " \"order\" : \"asc\",%n" - + " \"interval\" : 10.0%n" - + " }%n" - + " }%n" - + " } ]%n" - + " },%n" - + " \"aggregations\" : {%n" - + " \"count(a)\" : {%n" - + " \"value_count\" : {%n" - + " \"field\" : \"a\"%n" - + " }%n" - + " },%n" - + " \"avg(b)\" : {%n" - + " \"avg\" : {%n" - + " \"field\" : \"b\"%n" - + " }%n" - + " }%n" - + " }%n" - + " }%n" - + "}"), + assertEquals( + format( + "{%n" + + " \"composite_buckets\" : {%n" + + " \"composite\" : {%n" + + " \"size\" : 1000,%n" + + " \"sources\" : [ {%n" + + " \"SpanExpression(field=age, value=10, unit=NONE)\" : {%n" + + " \"histogram\" : {%n" + + " \"field\" : \"age\",%n" + + " \"missing_bucket\" : true,%n" + + " \"missing_order\" : \"first\",%n" + + " \"order\" : \"asc\",%n" + + " \"interval\" : 10.0%n" + + " }%n" + + " }%n" + + " } ]%n" + + " },%n" + + " \"aggregations\" : {%n" + + " \"count(a)\" : {%n" + + " \"value_count\" : {%n" + + " \"field\" : \"a\"%n" + + " }%n" + + " },%n" + + " \"avg(b)\" : {%n" + + " \"avg\" : {%n" + + " \"field\" : \"b\"%n" + + " }%n" + + " }%n" + + " }%n" + + " }%n" + + "}"), buildQuery( Arrays.asList( named("count(a)", new CountAggregator(Arrays.asList(ref("a", INTEGER)), INTEGER)), @@ -536,32 +563,33 @@ void should_build_histogram_two_metrics() { @Test void fixed_interval_time_span() { - assertEquals(format( - "{%n" - + " \"composite_buckets\" : {%n" - + " \"composite\" : {%n" - + " \"size\" : 1000,%n" - + " \"sources\" : [ {%n" - + " \"SpanExpression(field=timestamp, value=1, unit=H)\" : {%n" - + " \"date_histogram\" : {%n" - + " \"field\" : \"timestamp\",%n" - + " \"missing_bucket\" : true,%n" - + " \"missing_order\" : \"first\",%n" - + " \"order\" : \"asc\",%n" - + " \"fixed_interval\" : \"1h\"%n" - + " }%n" - + " }%n" - + " } ]%n" - + " },%n" - + " \"aggregations\" : {%n" - + " \"count(a)\" : {%n" - + " \"value_count\" : {%n" - + " \"field\" : \"a\"%n" - + " }%n" - + " }%n" - + " }%n" - + " }%n" - + "}"), + assertEquals( + format( + "{%n" + + " \"composite_buckets\" : {%n" + + " \"composite\" : {%n" + + " \"size\" : 1000,%n" + + " \"sources\" : [ {%n" + + " \"SpanExpression(field=timestamp, value=1, unit=H)\" : {%n" + + " \"date_histogram\" : {%n" + + " \"field\" : \"timestamp\",%n" + + " \"missing_bucket\" : true,%n" + + " \"missing_order\" : \"first\",%n" + + " \"order\" : \"asc\",%n" + + " \"fixed_interval\" : \"1h\"%n" + + " }%n" + + " }%n" + + " } ]%n" + + " },%n" + + " \"aggregations\" : {%n" + + " \"count(a)\" : {%n" + + " \"value_count\" : {%n" + + " \"field\" : \"a\"%n" + + " }%n" + + " }%n" + + " }%n" + + " }%n" + + "}"), buildQuery( Arrays.asList( named("count(a)", new CountAggregator(Arrays.asList(ref("a", INTEGER)), INTEGER))), @@ -570,32 +598,33 @@ void fixed_interval_time_span() { @Test void calendar_interval_time_span() { - assertEquals(format( - "{%n" - + " \"composite_buckets\" : {%n" - + " \"composite\" : {%n" - + " \"size\" : 1000,%n" - + " \"sources\" : [ {%n" - + " \"SpanExpression(field=date, value=1, unit=W)\" : {%n" - + " \"date_histogram\" : {%n" - + " \"field\" : \"date\",%n" - + " \"missing_bucket\" : true,%n" - + " \"missing_order\" : \"first\",%n" - + " \"order\" : \"asc\",%n" - + " \"calendar_interval\" : \"1w\"%n" - + " }%n" - + " }%n" - + " } ]%n" - + " },%n" - + " \"aggregations\" : {%n" - + " \"count(a)\" : {%n" - + " \"value_count\" : {%n" - + " \"field\" : \"a\"%n" - + " }%n" - + " }%n" - + " }%n" - + " }%n" - + "}"), + assertEquals( + format( + "{%n" + + " \"composite_buckets\" : {%n" + + " \"composite\" : {%n" + + " \"size\" : 1000,%n" + + " \"sources\" : [ {%n" + + " \"SpanExpression(field=date, value=1, unit=W)\" : {%n" + + " \"date_histogram\" : {%n" + + " \"field\" : \"date\",%n" + + " \"missing_bucket\" : true,%n" + + " \"missing_order\" : \"first\",%n" + + " \"order\" : \"asc\",%n" + + " \"calendar_interval\" : \"1w\"%n" + + " }%n" + + " }%n" + + " } ]%n" + + " },%n" + + " \"aggregations\" : {%n" + + " \"count(a)\" : {%n" + + " \"value_count\" : {%n" + + " \"field\" : \"a\"%n" + + " }%n" + + " }%n" + + " }%n" + + " }%n" + + "}"), buildQuery( Arrays.asList( named("count(a)", new CountAggregator(Arrays.asList(ref("a", INTEGER)), INTEGER))), @@ -604,32 +633,33 @@ void calendar_interval_time_span() { @Test void general_span() { - assertEquals(format( - "{%n" - + " \"composite_buckets\" : {%n" - + " \"composite\" : {%n" - + " \"size\" : 1000,%n" - + " \"sources\" : [ {%n" - + " \"SpanExpression(field=age, value=1, unit=NONE)\" : {%n" - + " \"histogram\" : {%n" - + " \"field\" : \"age\",%n" - + " \"missing_bucket\" : true,%n" - + " \"missing_order\" : \"first\",%n" - + " \"order\" : \"asc\",%n" - + " \"interval\" : 1.0%n" - + " }%n" - + " }%n" - + " } ]%n" - + " },%n" - + " \"aggregations\" : {%n" - + " \"count(a)\" : {%n" - + " \"value_count\" : {%n" - + " \"field\" : \"a\"%n" - + " }%n" - + " }%n" - + " }%n" - + " }%n" - + "}"), + assertEquals( + format( + "{%n" + + " \"composite_buckets\" : {%n" + + " \"composite\" : {%n" + + " \"size\" : 1000,%n" + + " \"sources\" : [ {%n" + + " \"SpanExpression(field=age, value=1, unit=NONE)\" : {%n" + + " \"histogram\" : {%n" + + " \"field\" : \"age\",%n" + + " \"missing_bucket\" : true,%n" + + " \"missing_order\" : \"first\",%n" + + " \"order\" : \"asc\",%n" + + " \"interval\" : 1.0%n" + + " }%n" + + " }%n" + + " } ]%n" + + " },%n" + + " \"aggregations\" : {%n" + + " \"count(a)\" : {%n" + + " \"value_count\" : {%n" + + " \"field\" : \"a\"%n" + + " }%n" + + " }%n" + + " }%n" + + " }%n" + + "}"), buildQuery( Arrays.asList( named("count(a)", new CountAggregator(Arrays.asList(ref("a", INTEGER)), INTEGER))), @@ -638,15 +668,20 @@ void general_span() { @Test void invalid_unit() { - assertThrows(IllegalStateException.class, () -> buildQuery( - Arrays.asList( - named("count(a)", new CountAggregator(Arrays.asList(ref("a", INTEGER)), INTEGER))), - Arrays.asList(named(span(ref("age", INTEGER), literal(1), "invalid_unit"))))); + assertThrows( + IllegalStateException.class, + () -> + buildQuery( + Arrays.asList( + named( + "count(a)", + new CountAggregator(Arrays.asList(ref("a", INTEGER)), INTEGER))), + Arrays.asList(named(span(ref("age", INTEGER), literal(1), "invalid_unit"))))); } @SneakyThrows - private String buildQuery(List namedAggregatorList, - List groupByList) { + private String buildQuery( + List namedAggregatorList, List groupByList) { return buildQuery(namedAggregatorList, groupByList, null); } @@ -667,8 +702,7 @@ private String buildQuery( } private Set> buildTypeMapping( - List namedAggregatorList, - List groupByList) { + List namedAggregatorList, List groupByList) { return queryBuilder.buildTypeMapping(namedAggregatorList, groupByList).entrySet(); } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScriptFactoryTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScriptFactoryTest.java index 38107934a0..618a9ca77a 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScriptFactoryTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScriptFactoryTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.aggregation; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -31,14 +30,11 @@ @ExtendWith(MockitoExtension.class) class ExpressionAggregationScriptFactoryTest { - @Mock - private SearchLookup searchLookup; + @Mock private SearchLookup searchLookup; - @Mock - private LeafSearchLookup leafSearchLookup; + @Mock private LeafSearchLookup leafSearchLookup; - @Mock - private LeafReaderContext leafReaderContext; + @Mock private LeafReaderContext leafReaderContext; private final Expression expression = DSL.literal(true); @@ -63,7 +59,6 @@ void can_initialize_expression_filter_script() throws IOException { assertEquals( new ExpressionAggregationScript(expression, searchLookup, leafReaderContext, params), - actualScript - ); + actualScript); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScriptTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScriptTest.java index b98bc538ab..520e301301 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScriptTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScriptTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.aggregation; import static java.time.temporal.ChronoUnit.MILLIS; @@ -46,21 +45,17 @@ @ExtendWith(MockitoExtension.class) class ExpressionAggregationScriptTest { - @Mock - private SearchLookup lookup; + @Mock private SearchLookup lookup; - @Mock - private LeafSearchLookup leafLookup; + @Mock private LeafSearchLookup leafLookup; - @Mock - private LeafReaderContext context; + @Mock private LeafReaderContext context; @Test void can_execute_expression_with_integer_field() { assertThat() .docValues("age", 30L) // DocValue only supports long - .evaluate( - DSL.abs(ref("age", INTEGER))) + .evaluate(DSL.abs(ref("age", INTEGER))) .shouldMatch(30); } @@ -68,8 +63,7 @@ void can_execute_expression_with_integer_field() { void can_execute_expression_with_integer_field_with_boolean_result() { assertThat() .docValues("age", 30L) // DocValue only supports long - .evaluate( - DSL.greater(ref("age", INTEGER), literal(20))) + .evaluate(DSL.greater(ref("age", INTEGER), literal(20))) .shouldMatch(true); } @@ -78,34 +72,36 @@ void can_execute_expression_with_text_keyword_field() { assertThat() .docValues("name.keyword", "John") .evaluate( - DSL.equal(ref("name", OpenSearchTextType.of(Map.of("words", - OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword)))), + DSL.equal( + ref( + "name", + OpenSearchTextType.of( + Map.of( + "words", + OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword)))), literal("John"))) .shouldMatch(true); } @Test void can_execute_expression_with_null_field() { - assertThat() - .docValues("age", null) - .evaluate(ref("age", INTEGER)) - .shouldMatch(null); + assertThat().docValues("age", null).evaluate(ref("age", INTEGER)).shouldMatch(null); } @Test void can_execute_expression_with_missing_field() { - assertThat() - .docValues("age", 30) - .evaluate(ref("name", STRING)) - .shouldMatch(null); + assertThat().docValues("age", 30).evaluate(ref("name", STRING)).shouldMatch(null); } @Test void can_execute_parse_expression() { assertThat() .docValues("age_string", "age: 30") - .evaluate(DSL.regex(DSL.ref("age_string", STRING), DSL.literal("age: (?\\d+)"), - DSL.literal("age"))) + .evaluate( + DSL.regex( + DSL.ref("age_string", STRING), + DSL.literal("age: (?\\d+)"), + DSL.literal("age"))) .shouldMatch("30"); } @@ -113,28 +109,23 @@ void can_execute_parse_expression() { void can_execute_expression_interpret_dates_for_aggregation() { assertThat() .docValues("date", "1961-04-12") - .evaluate( - DSL.date(ref("date", STRING))) - .shouldMatch(new ExprDateValue(LocalDate.of(1961, 4, 12)) - .timestampValue().toEpochMilli()); + .evaluate(DSL.date(ref("date", STRING))) + .shouldMatch(new ExprDateValue(LocalDate.of(1961, 4, 12)).timestampValue().toEpochMilli()); } @Test void can_execute_expression_interpret_datetimes_for_aggregation() { assertThat() .docValues("datetime", "1984-03-17 22:16:42") - .evaluate( - DSL.datetime(ref("datetime", STRING))) - .shouldMatch(new ExprDatetimeValue("1984-03-17 22:16:42") - .timestampValue().toEpochMilli()); + .evaluate(DSL.datetime(ref("datetime", STRING))) + .shouldMatch(new ExprDatetimeValue("1984-03-17 22:16:42").timestampValue().toEpochMilli()); } @Test void can_execute_expression_interpret_times_for_aggregation() { assertThat() .docValues("time", "22:13:42") - .evaluate( - DSL.time(ref("time", STRING))) + .evaluate(DSL.time(ref("time", STRING))) .shouldMatch(MILLIS.between(LocalTime.MIN, LocalTime.of(22, 13, 42))); } @@ -142,10 +133,8 @@ void can_execute_expression_interpret_times_for_aggregation() { void can_execute_expression_interpret_timestamps_for_aggregation() { assertThat() .docValues("timestamp", "1984-03-17 22:16:42") - .evaluate( - DSL.timestamp(ref("timestamp", STRING))) - .shouldMatch(new ExprTimestampValue("1984-03-17 22:16:42") - .timestampValue().toEpochMilli()); + .evaluate(DSL.timestamp(ref("timestamp", STRING))) + .shouldMatch(new ExprTimestampValue("1984-03-17 22:16:42").timestampValue().toEpochMilli()); } @Test @@ -172,20 +161,20 @@ ExprScriptAssertion docValues() { } ExprScriptAssertion docValues(String name, Object value) { - LeafDocLookup leafDocLookup = mockLeafDocLookup( - ImmutableMap.of(name, new FakeScriptDocValues<>(value))); + LeafDocLookup leafDocLookup = + mockLeafDocLookup(ImmutableMap.of(name, new FakeScriptDocValues<>(value))); when(lookup.getLeafSearchLookup(any())).thenReturn(leafLookup); when(leafLookup.doc()).thenReturn(leafDocLookup); return this; } - ExprScriptAssertion docValues(String name1, Object value1, - String name2, Object value2) { - LeafDocLookup leafDocLookup = mockLeafDocLookup( - ImmutableMap.of( - name1, new FakeScriptDocValues<>(value1), - name2, new FakeScriptDocValues<>(value2))); + ExprScriptAssertion docValues(String name1, Object value1, String name2, Object value2) { + LeafDocLookup leafDocLookup = + mockLeafDocLookup( + ImmutableMap.of( + name1, new FakeScriptDocValues<>(value1), + name2, new FakeScriptDocValues<>(value2))); when(lookup.getLeafSearchLookup(any())).thenReturn(leafLookup); when(leafLookup.doc()).thenReturn(leafDocLookup); diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/GroupSortOrderTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/GroupSortOrderTest.java index bff04604c1..2ab8a24d68 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/GroupSortOrderTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/GroupSortOrderTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.aggregation; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -28,10 +27,12 @@ class GroupSortOrderTest { private final AggregationQueryBuilder.GroupSortOrder groupSortOrder = new AggregationQueryBuilder.GroupSortOrder( - sort(ref("name", STRING), Sort.SortOption.DEFAULT_DESC, - ref("age", INTEGER), Sort.SortOption.DEFAULT_ASC)); - @Mock - private ReferenceExpression ref; + sort( + ref("name", STRING), + Sort.SortOption.DEFAULT_DESC, + ref("age", INTEGER), + Sort.SortOption.DEFAULT_ASC)); + @Mock private ReferenceExpression ref; @Test void both_expression_in_sort_list() { diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/BucketAggregationBuilderTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/BucketAggregationBuilderTest.java index 208904d9c3..d11d7da2fe 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/BucketAggregationBuilderTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/BucketAggregationBuilderTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.aggregation.dsl; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -48,8 +47,7 @@ @ExtendWith(MockitoExtension.class) class BucketAggregationBuilderTest { - @Mock - private ExpressionSerializer serializer; + @Mock private ExpressionSerializer serializer; private BucketAggregationBuilder aggregationBuilder; @@ -69,9 +67,7 @@ void should_build_bucket_with_field() { + " \"order\" : \"asc\"\n" + " }\n" + "}", - buildQuery( - Arrays.asList( - asc(named("age", ref("age", INTEGER)))))); + buildQuery(Arrays.asList(asc(named("age", ref("age", INTEGER)))))); } @Test @@ -90,9 +86,7 @@ void should_build_bucket_with_literal() { + " \"order\" : \"asc\"\n" + " }\n" + "}", - buildQuery( - Arrays.asList( - asc(named(literal))))); + buildQuery(Arrays.asList(asc(named(literal))))); } @Test @@ -108,8 +102,16 @@ void should_build_bucket_with_keyword_field() { + "}", buildQuery( Arrays.asList( - asc(named("name", ref("name", OpenSearchTextType.of(Map.of("words", - OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword))))))))); + asc( + named( + "name", + ref( + "name", + OpenSearchTextType.of( + Map.of( + "words", + OpenSearchDataType.of( + OpenSearchDataType.MappingType.Keyword))))))))); } @Test @@ -129,13 +131,13 @@ void should_build_bucket_with_parse_expression() { + " \"order\" : \"asc\"\n" + " }\n" + "}", - buildQuery( - Arrays.asList( - asc(named("name", parseExpression))))); + buildQuery(Arrays.asList(asc(named("name", parseExpression))))); } @ParameterizedTest(name = "{0}") - @EnumSource(value = ExprCoreType.class, names = {"TIMESTAMP", "TIME", "DATE", "DATETIME"}) + @EnumSource( + value = ExprCoreType.class, + names = {"TIMESTAMP", "TIME", "DATE", "DATETIME"}) void terms_bucket_for_datetime_types_uses_long(ExprType dataType) { assertEquals( "{\n" @@ -147,9 +149,7 @@ void terms_bucket_for_datetime_types_uses_long(ExprType dataType) { + " \"order\" : \"asc\"\n" + " }\n" + "}", - buildQuery( - Arrays.asList( - asc(named("date", ref("date", dataType)))))); + buildQuery(Arrays.asList(asc(named("date", ref("date", dataType)))))); } @SneakyThrows diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/MetricAggregationBuilderTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/MetricAggregationBuilderTest.java index 94f152f913..7f302c9c53 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/MetricAggregationBuilderTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/MetricAggregationBuilderTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.aggregation.dsl; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -49,11 +48,9 @@ @ExtendWith(MockitoExtension.class) class MetricAggregationBuilderTest { - @Mock - private ExpressionSerializer serializer; + @Mock private ExpressionSerializer serializer; - @Mock - private NamedAggregator aggregator; + @Mock private NamedAggregator aggregator; private MetricAggregationBuilder aggregationBuilder; @@ -64,299 +61,332 @@ void set_up() { @Test void should_build_avg_aggregation() { - assertEquals(format( - "{%n" - + " \"avg(age)\" : {%n" - + " \"avg\" : {%n" - + " \"field\" : \"age\"%n" - + " }%n" - + " }%n" - + "}"), + assertEquals( + format( + "{%n" + + " \"avg(age)\" : {%n" + + " \"avg\" : {%n" + + " \"field\" : \"age\"%n" + + " }%n" + + " }%n" + + "}"), buildQuery( Arrays.asList( - named("avg(age)", - new AvgAggregator(Arrays.asList(ref("age", INTEGER)), INTEGER))))); + named( + "avg(age)", new AvgAggregator(Arrays.asList(ref("age", INTEGER)), INTEGER))))); } @Test void should_build_sum_aggregation() { - assertEquals(format( - "{%n" - + " \"sum(age)\" : {%n" - + " \"sum\" : {%n" - + " \"field\" : \"age\"%n" - + " }%n" - + " }%n" - + "}"), + assertEquals( + format( + "{%n" + + " \"sum(age)\" : {%n" + + " \"sum\" : {%n" + + " \"field\" : \"age\"%n" + + " }%n" + + " }%n" + + "}"), buildQuery( Arrays.asList( - named("sum(age)", - new SumAggregator(Arrays.asList(ref("age", INTEGER)), INTEGER))))); + named( + "sum(age)", new SumAggregator(Arrays.asList(ref("age", INTEGER)), INTEGER))))); } @Test void should_build_count_aggregation() { - assertEquals(format( - "{%n" - + " \"count(age)\" : {%n" - + " \"value_count\" : {%n" - + " \"field\" : \"age\"%n" - + " }%n" - + " }%n" - + "}"), + assertEquals( + format( + "{%n" + + " \"count(age)\" : {%n" + + " \"value_count\" : {%n" + + " \"field\" : \"age\"%n" + + " }%n" + + " }%n" + + "}"), buildQuery( Arrays.asList( - named("count(age)", + named( + "count(age)", new CountAggregator(Arrays.asList(ref("age", INTEGER)), INTEGER))))); } @Test void should_build_count_star_aggregation() { - assertEquals(format( - "{%n" - + " \"count(*)\" : {%n" - + " \"value_count\" : {%n" - + " \"field\" : \"_index\"%n" - + " }%n" - + " }%n" - + "}"), + assertEquals( + format( + "{%n" + + " \"count(*)\" : {%n" + + " \"value_count\" : {%n" + + " \"field\" : \"_index\"%n" + + " }%n" + + " }%n" + + "}"), buildQuery( Arrays.asList( - named("count(*)", - new CountAggregator(Arrays.asList(literal("*")), INTEGER))))); + named("count(*)", new CountAggregator(Arrays.asList(literal("*")), INTEGER))))); } @Test void should_build_count_other_literal_aggregation() { - assertEquals(format( - "{%n" - + " \"count(1)\" : {%n" - + " \"value_count\" : {%n" - + " \"field\" : \"_index\"%n" - + " }%n" - + " }%n" - + "}"), + assertEquals( + format( + "{%n" + + " \"count(1)\" : {%n" + + " \"value_count\" : {%n" + + " \"field\" : \"_index\"%n" + + " }%n" + + " }%n" + + "}"), buildQuery( Arrays.asList( - named("count(1)", - new CountAggregator(Arrays.asList(literal(1)), INTEGER))))); + named("count(1)", new CountAggregator(Arrays.asList(literal(1)), INTEGER))))); } @Test void should_build_min_aggregation() { - assertEquals(format( - "{%n" - + " \"min(age)\" : {%n" - + " \"min\" : {%n" - + " \"field\" : \"age\"%n" - + " }%n" - + " }%n" - + "}"), + assertEquals( + format( + "{%n" + + " \"min(age)\" : {%n" + + " \"min\" : {%n" + + " \"field\" : \"age\"%n" + + " }%n" + + " }%n" + + "}"), buildQuery( Arrays.asList( - named("min(age)", - new MinAggregator(Arrays.asList(ref("age", INTEGER)), INTEGER))))); + named( + "min(age)", new MinAggregator(Arrays.asList(ref("age", INTEGER)), INTEGER))))); } @Test void should_build_max_aggregation() { - assertEquals(format( - "{%n" - + " \"max(age)\" : {%n" - + " \"max\" : {%n" - + " \"field\" : \"age\"%n" - + " }%n" - + " }%n" - + "}"), + assertEquals( + format( + "{%n" + + " \"max(age)\" : {%n" + + " \"max\" : {%n" + + " \"field\" : \"age\"%n" + + " }%n" + + " }%n" + + "}"), buildQuery( Arrays.asList( - named("max(age)", - new MaxAggregator(Arrays.asList(ref("age", INTEGER)), INTEGER))))); + named( + "max(age)", new MaxAggregator(Arrays.asList(ref("age", INTEGER)), INTEGER))))); } @Test void should_build_varPop_aggregation() { - assertEquals(format( - "{%n" - + " \"var_pop(age)\" : {%n" - + " \"extended_stats\" : {%n" - + " \"field\" : \"age\",%n" - + " \"sigma\" : 2.0%n" - + " }%n" - + " }%n" - + "}"), + assertEquals( + format( + "{%n" + + " \"var_pop(age)\" : {%n" + + " \"extended_stats\" : {%n" + + " \"field\" : \"age\",%n" + + " \"sigma\" : 2.0%n" + + " }%n" + + " }%n" + + "}"), buildQuery( Arrays.asList( - named("var_pop(age)", + named( + "var_pop(age)", variancePopulation(Arrays.asList(ref("age", INTEGER)), INTEGER))))); } @Test void should_build_varSamp_aggregation() { - assertEquals(format( - "{%n" - + " \"var_samp(age)\" : {%n" - + " \"extended_stats\" : {%n" - + " \"field\" : \"age\",%n" - + " \"sigma\" : 2.0%n" - + " }%n" - + " }%n" - + "}"), + assertEquals( + format( + "{%n" + + " \"var_samp(age)\" : {%n" + + " \"extended_stats\" : {%n" + + " \"field\" : \"age\",%n" + + " \"sigma\" : 2.0%n" + + " }%n" + + " }%n" + + "}"), buildQuery( Arrays.asList( - named("var_samp(age)", + named( + "var_samp(age)", varianceSample(Arrays.asList(ref("age", INTEGER)), INTEGER))))); } @Test void should_build_stddevPop_aggregation() { - assertEquals(format( - "{%n" - + " \"stddev_pop(age)\" : {%n" - + " \"extended_stats\" : {%n" - + " \"field\" : \"age\",%n" - + " \"sigma\" : 2.0%n" - + " }%n" - + " }%n" - + "}"), + assertEquals( + format( + "{%n" + + " \"stddev_pop(age)\" : {%n" + + " \"extended_stats\" : {%n" + + " \"field\" : \"age\",%n" + + " \"sigma\" : 2.0%n" + + " }%n" + + " }%n" + + "}"), buildQuery( Arrays.asList( - named("stddev_pop(age)", + named( + "stddev_pop(age)", stddevPopulation(Arrays.asList(ref("age", INTEGER)), INTEGER))))); } @Test void should_build_stddevSamp_aggregation() { - assertEquals(format( - "{%n" - + " \"stddev_samp(age)\" : {%n" - + " \"extended_stats\" : {%n" - + " \"field\" : \"age\",%n" - + " \"sigma\" : 2.0%n" - + " }%n" - + " }%n" - + "}"), + assertEquals( + format( + "{%n" + + " \"stddev_samp(age)\" : {%n" + + " \"extended_stats\" : {%n" + + " \"field\" : \"age\",%n" + + " \"sigma\" : 2.0%n" + + " }%n" + + " }%n" + + "}"), buildQuery( Arrays.asList( - named("stddev_samp(age)", + named( + "stddev_samp(age)", stddevSample(Arrays.asList(ref("age", INTEGER)), INTEGER))))); } @Test void should_build_cardinality_aggregation() { - assertEquals(format( - "{%n" - + " \"count(distinct name)\" : {%n" - + " \"cardinality\" : {%n" - + " \"field\" : \"name\"%n" - + " }%n" - + " }%n" - + "}"), + assertEquals( + format( + "{%n" + + " \"count(distinct name)\" : {%n" + + " \"cardinality\" : {%n" + + " \"field\" : \"name\"%n" + + " }%n" + + " }%n" + + "}"), buildQuery( - Collections.singletonList(named("count(distinct name)", new CountAggregator( - Collections.singletonList(ref("name", STRING)), INTEGER).distinct(true))))); + Collections.singletonList( + named( + "count(distinct name)", + new CountAggregator(Collections.singletonList(ref("name", STRING)), INTEGER) + .distinct(true))))); } @Test void should_build_filtered_cardinality_aggregation() { - assertEquals(format( - "{%n" - + " \"count(distinct name) filter(where age > 30)\" : {%n" - + " \"filter\" : {%n" - + " \"range\" : {%n" - + " \"age\" : {%n" - + " \"from\" : 30,%n" - + " \"to\" : null,%n" - + " \"include_lower\" : false,%n" - + " \"include_upper\" : true,%n" - + " \"boost\" : 1.0%n" - + " }%n" - + " }%n" - + " },%n" - + " \"aggregations\" : {%n" - + " \"count(distinct name) filter(where age > 30)\" : {%n" - + " \"cardinality\" : {%n" - + " \"field\" : \"name\"%n" - + " }%n" - + " }%n" - + " }%n" - + " }%n" - + "}"), - buildQuery(Collections.singletonList(named( - "count(distinct name) filter(where age > 30)", - new CountAggregator(Collections.singletonList(ref("name", STRING)), INTEGER) - .condition(DSL.greater(ref("age", INTEGER), literal(30))) - .distinct(true))))); + assertEquals( + format( + "{%n" + + " \"count(distinct name) filter(where age > 30)\" : {%n" + + " \"filter\" : {%n" + + " \"range\" : {%n" + + " \"age\" : {%n" + + " \"from\" : 30,%n" + + " \"to\" : null,%n" + + " \"include_lower\" : false,%n" + + " \"include_upper\" : true,%n" + + " \"boost\" : 1.0%n" + + " }%n" + + " }%n" + + " },%n" + + " \"aggregations\" : {%n" + + " \"count(distinct name) filter(where age > 30)\" : {%n" + + " \"cardinality\" : {%n" + + " \"field\" : \"name\"%n" + + " }%n" + + " }%n" + + " }%n" + + " }%n" + + "}"), + buildQuery( + Collections.singletonList( + named( + "count(distinct name) filter(where age > 30)", + new CountAggregator(Collections.singletonList(ref("name", STRING)), INTEGER) + .condition(DSL.greater(ref("age", INTEGER), literal(30))) + .distinct(true))))); } @Test void should_build_top_hits_aggregation() { - assertEquals(format( - "{%n" - + " \"take(name, 10)\" : {%n" - + " \"top_hits\" : {%n" - + " \"from\" : 0,%n" - + " \"size\" : 10,%n" - + " \"version\" : false,%n" - + " \"seq_no_primary_term\" : false,%n" - + " \"explain\" : false,%n" - + " \"_source\" : {%n" - + " \"includes\" : [ \"name\" ],%n" - + " \"excludes\" : [ ]%n" - + " }%n" - + " }%n" - + " }%n" - + "}"), + assertEquals( + format( + "{%n" + + " \"take(name, 10)\" : {%n" + + " \"top_hits\" : {%n" + + " \"from\" : 0,%n" + + " \"size\" : 10,%n" + + " \"version\" : false,%n" + + " \"seq_no_primary_term\" : false,%n" + + " \"explain\" : false,%n" + + " \"_source\" : {%n" + + " \"includes\" : [ \"name\" ],%n" + + " \"excludes\" : [ ]%n" + + " }%n" + + " }%n" + + " }%n" + + "}"), buildQuery( - Collections.singletonList(named("take(name, 10)", new TakeAggregator( - ImmutableList.of(ref("name", STRING), literal(10)), ARRAY))))); + Collections.singletonList( + named( + "take(name, 10)", + new TakeAggregator( + ImmutableList.of(ref("name", STRING), literal(10)), ARRAY))))); } @Test void should_build_filtered_top_hits_aggregation() { - assertEquals(format( - "{%n" - + " \"take(name, 10) filter(where age > 30)\" : {%n" - + " \"filter\" : {%n" - + " \"range\" : {%n" - + " \"age\" : {%n" - + " \"from\" : 30,%n" - + " \"to\" : null,%n" - + " \"include_lower\" : false,%n" - + " \"include_upper\" : true,%n" - + " \"boost\" : 1.0%n" - + " }%n" - + " }%n" - + " },%n" - + " \"aggregations\" : {%n" - + " \"take(name, 10) filter(where age > 30)\" : {%n" - + " \"top_hits\" : {%n" - + " \"from\" : 0,%n" - + " \"size\" : 10,%n" - + " \"version\" : false,%n" - + " \"seq_no_primary_term\" : false,%n" - + " \"explain\" : false,%n" - + " \"_source\" : {%n" - + " \"includes\" : [ \"name\" ],%n" - + " \"excludes\" : [ ]%n" - + " }%n" - + " }%n" - + " }%n" - + " }%n" - + " }%n" - + "}"), - buildQuery(Collections.singletonList(named( - "take(name, 10) filter(where age > 30)", - new TakeAggregator( - ImmutableList.of(ref("name", STRING), literal(10)), ARRAY) - .condition(DSL.greater(ref("age", INTEGER), literal(30))))))); + assertEquals( + format( + "{%n" + + " \"take(name, 10) filter(where age > 30)\" : {%n" + + " \"filter\" : {%n" + + " \"range\" : {%n" + + " \"age\" : {%n" + + " \"from\" : 30,%n" + + " \"to\" : null,%n" + + " \"include_lower\" : false,%n" + + " \"include_upper\" : true,%n" + + " \"boost\" : 1.0%n" + + " }%n" + + " }%n" + + " },%n" + + " \"aggregations\" : {%n" + + " \"take(name, 10) filter(where age > 30)\" : {%n" + + " \"top_hits\" : {%n" + + " \"from\" : 0,%n" + + " \"size\" : 10,%n" + + " \"version\" : false,%n" + + " \"seq_no_primary_term\" : false,%n" + + " \"explain\" : false,%n" + + " \"_source\" : {%n" + + " \"includes\" : [ \"name\" ],%n" + + " \"excludes\" : [ ]%n" + + " }%n" + + " }%n" + + " }%n" + + " }%n" + + " }%n" + + "}"), + buildQuery( + Collections.singletonList( + named( + "take(name, 10) filter(where age > 30)", + new TakeAggregator(ImmutableList.of(ref("name", STRING), literal(10)), ARRAY) + .condition(DSL.greater(ref("age", INTEGER), literal(30))))))); } @Test void should_throw_exception_for_unsupported_distinct_aggregator() { - assertThrows(IllegalStateException.class, - () -> buildQuery(Collections.singletonList(named("avg(distinct age)", new AvgAggregator( - Collections.singletonList(ref("name", STRING)), STRING).distinct(true)))), + assertThrows( + IllegalStateException.class, + () -> + buildQuery( + Collections.singletonList( + named( + "avg(distinct age)", + new AvgAggregator(Collections.singletonList(ref("name", STRING)), STRING) + .distinct(true)))), "unsupported distinct aggregator avg"); } @@ -366,7 +396,8 @@ void should_throw_exception_for_unsupported_aggregator() { when(aggregator.getArguments()).thenReturn(Arrays.asList(ref("age", INTEGER))); IllegalStateException exception = - assertThrows(IllegalStateException.class, + assertThrows( + IllegalStateException.class, () -> buildQuery(Arrays.asList(named("unsupported_agg(age)", aggregator)))); assertEquals("unsupported aggregator unsupported_agg", exception.getMessage()); } @@ -374,19 +405,23 @@ void should_throw_exception_for_unsupported_aggregator() { @Test void should_throw_exception_for_unsupported_exception() { IllegalStateException exception = - assertThrows(IllegalStateException.class, () -> buildQuery(Arrays.asList( - named("count(age)", - new CountAggregator(Arrays.asList(named("age", ref("age", INTEGER))), INTEGER))))); - assertEquals( - "metric aggregation doesn't support expression age", - exception.getMessage()); + assertThrows( + IllegalStateException.class, + () -> + buildQuery( + Arrays.asList( + named( + "count(age)", + new CountAggregator( + Arrays.asList(named("age", ref("age", INTEGER))), INTEGER))))); + assertEquals("metric aggregation doesn't support expression age", exception.getMessage()); } @SneakyThrows private String buildQuery(List namedAggregatorList) { ObjectMapper objectMapper = new ObjectMapper(); - return objectMapper.readTree( - aggregationBuilder.build(namedAggregatorList).getLeft().toString()) + return objectMapper + .readTree(aggregationBuilder.build(namedAggregatorList).getLeft().toString()) .toPrettyString(); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/ExpressionFilterScriptFactoryTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/ExpressionFilterScriptFactoryTest.java index 3c927c9a0b..d2d349c14b 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/ExpressionFilterScriptFactoryTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/ExpressionFilterScriptFactoryTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.filter; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -30,14 +29,11 @@ @ExtendWith(MockitoExtension.class) class ExpressionFilterScriptFactoryTest { - @Mock - private SearchLookup searchLookup; + @Mock private SearchLookup searchLookup; - @Mock - private LeafSearchLookup leafSearchLookup; + @Mock private LeafSearchLookup leafSearchLookup; - @Mock - private LeafReaderContext leafReaderContext; + @Mock private LeafReaderContext leafReaderContext; private final Expression expression = DSL.literal(true); @@ -59,8 +55,6 @@ void can_initialize_expression_filter_script() throws IOException { assertEquals( new ExpressionFilterScript(expression, searchLookup, leafReaderContext, params), - actualFilterScript - ); + actualFilterScript); } - } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/ExpressionFilterScriptTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/ExpressionFilterScriptTest.java index 61a3e9d35f..cca51c8f4a 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/ExpressionFilterScriptTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/ExpressionFilterScriptTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.filter; import static java.util.Collections.emptyList; @@ -55,37 +54,27 @@ @ExtendWith(MockitoExtension.class) class ExpressionFilterScriptTest { - @Mock - private SearchLookup lookup; + @Mock private SearchLookup lookup; - @Mock - private LeafSearchLookup leafLookup; + @Mock private LeafSearchLookup leafLookup; - @Mock - private LeafReaderContext context; + @Mock private LeafReaderContext context; @Test void should_match_if_true_literal() { - assertThat() - .docValues() - .filterBy(literal(true)) - .shouldMatch(); + assertThat().docValues().filterBy(literal(true)).shouldMatch(); } @Test void should_not_match_if_false_literal() { - assertThat() - .docValues() - .filterBy(literal(false)) - .shouldNotMatch(); + assertThat().docValues().filterBy(literal(false)).shouldNotMatch(); } @Test void can_execute_expression_with_integer_field() { assertThat() .docValues("age", 30L) // DocValue only supports long - .filterBy( - DSL.greater(ref("age", INTEGER), literal(20))) + .filterBy(DSL.greater(ref("age", INTEGER), literal(20))) .shouldMatch(); } @@ -94,8 +83,13 @@ void can_execute_expression_with_text_keyword_field() { assertThat() .docValues("name.keyword", "John") .filterBy( - DSL.equal(ref("name", OpenSearchTextType.of(Map.of("words", - OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword)))), + DSL.equal( + ref( + "name", + OpenSearchTextType.of( + Map.of( + "words", + OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword)))), literal("John"))) .shouldMatch(); } @@ -151,34 +145,31 @@ void can_execute_expression_with_time_field() { @Test void can_execute_expression_with_missing_field() { - assertThat() - .docValues("age", 30) - .filterBy(ref("name", STRING)) - .shouldNotMatch(); + assertThat().docValues("age", 30).filterBy(ref("name", STRING)).shouldNotMatch(); } @Test void can_execute_expression_with_empty_doc_value() { - assertThat() - .docValues("name", emptyList()) - .filterBy(ref("name", STRING)) - .shouldNotMatch(); + assertThat().docValues("name", emptyList()).filterBy(ref("name", STRING)).shouldNotMatch(); } @Test void can_execute_parse_expression() { assertThat() .docValues("age_string", "age: 30") - .filterBy(DSL.equal( - DSL.regex(DSL.ref("age_string", STRING), literal("age: (?\\d+)"), literal("age")), - literal("30"))) + .filterBy( + DSL.equal( + DSL.regex( + DSL.ref("age_string", STRING), literal("age: (?\\d+)"), literal("age")), + literal("30"))) .shouldMatch(); } @Test void cannot_execute_non_predicate_expression() { - assertThrow(IllegalStateException.class, - "Expression has wrong result type instead of boolean: expression [10], result [10]") + assertThrow( + IllegalStateException.class, + "Expression has wrong result type instead of boolean: expression [10], result [10]") .docValues() .filterBy(literal(10)); } @@ -187,8 +178,7 @@ private ExprScriptAssertion assertThat() { return new ExprScriptAssertion(lookup, leafLookup, context); } - private ExprScriptAssertion assertThrow(Class clazz, - String message) { + private ExprScriptAssertion assertThrow(Class clazz, String message) { return new ExprScriptAssertion(lookup, leafLookup, context) { @Override ExprScriptAssertion filterBy(Expression expr) { @@ -211,20 +201,20 @@ ExprScriptAssertion docValues() { } ExprScriptAssertion docValues(String name, Object value) { - LeafDocLookup leafDocLookup = mockLeafDocLookup( - ImmutableMap.of(name, new FakeScriptDocValues<>(value))); + LeafDocLookup leafDocLookup = + mockLeafDocLookup(ImmutableMap.of(name, new FakeScriptDocValues<>(value))); when(lookup.getLeafSearchLookup(any())).thenReturn(leafLookup); when(leafLookup.doc()).thenReturn(leafDocLookup); return this; } - ExprScriptAssertion docValues(String name1, Object value1, - String name2, Object value2) { - LeafDocLookup leafDocLookup = mockLeafDocLookup( - ImmutableMap.of( - name1, new FakeScriptDocValues<>(value1), - name2, new FakeScriptDocValues<>(value2))); + ExprScriptAssertion docValues(String name1, Object value1, String name2, Object value2) { + LeafDocLookup leafDocLookup = + mockLeafDocLookup( + ImmutableMap.of( + name1, new FakeScriptDocValues<>(value1), + name2, new FakeScriptDocValues<>(value2))); when(lookup.getLeafSearchLookup(any())).thenReturn(leafLookup); when(leafLookup.doc()).thenReturn(leafDocLookup); @@ -276,5 +266,4 @@ public int size() { return values.size(); } } - } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/FilterQueryBuilderTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/FilterQueryBuilderTest.java index eb07076257..1fc2d5ee29 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/FilterQueryBuilderTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/FilterQueryBuilderTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.filter; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -63,22 +62,42 @@ class FilterQueryBuilderTest { private static Stream numericCastSource() { - return Stream.of(literal((byte) 1), literal((short) -1), literal( - 1), literal(21L), literal(3.14F), literal(3.1415D), literal(true), literal("1")); + return Stream.of( + literal((byte) 1), + literal((short) -1), + literal(1), + literal(21L), + literal(3.14F), + literal(3.1415D), + literal(true), + literal("1")); } private static Stream booleanTrueCastSource() { - return Stream.of(literal((byte) 1), literal((short) -1), literal( - 1), literal(42L), literal(3.14F), literal(3.1415D), literal(true), literal("true")); + return Stream.of( + literal((byte) 1), + literal((short) -1), + literal(1), + literal(42L), + literal(3.14F), + literal(3.1415D), + literal(true), + literal("true")); } private static Stream booleanFalseCastSource() { - return Stream.of(literal((byte) 0), literal((short) 0), literal( - 0), literal(0L), literal(0.0F), literal(0.0D), literal(false), literal("false")); + return Stream.of( + literal((byte) 0), + literal((short) 0), + literal(0), + literal(0L), + literal(0.0F), + literal(0.0D), + literal(false), + literal("false")); } - @Mock - private ExpressionSerializer serializer; + @Mock private ExpressionSerializer serializer; private FilterQueryBuilder filterQueryBuilder; @@ -98,34 +117,42 @@ void should_build_term_query_for_equality_expression() { + " }\n" + " }\n" + "}", - buildQuery( - DSL.equal( - ref("name", STRING), literal("John")))); + buildQuery(DSL.equal(ref("name", STRING), literal("John")))); } @Test void should_build_range_query_for_comparison_expression() { Expression[] params = {ref("age", INTEGER), literal(30)}; - Map ranges = ImmutableMap.of( - DSL.less(params), new Object[]{null, 30, true, false}, - DSL.greater(params), new Object[]{30, null, false, true}, - DSL.lte(params), new Object[]{null, 30, true, true}, - DSL.gte(params), new Object[]{30, null, true, true}); - - ranges.forEach((expr, range) -> - assertJsonEquals( - "{\n" - + " \"range\" : {\n" - + " \"age\" : {\n" - + " \"from\" : " + range[0] + ",\n" - + " \"to\" : " + range[1] + ",\n" - + " \"include_lower\" : " + range[2] + ",\n" - + " \"include_upper\" : " + range[3] + ",\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + "}", - buildQuery(expr))); + Map ranges = + ImmutableMap.of( + DSL.less(params), new Object[] {null, 30, true, false}, + DSL.greater(params), new Object[] {30, null, false, true}, + DSL.lte(params), new Object[] {null, 30, true, true}, + DSL.gte(params), new Object[] {30, null, true, true}); + + ranges.forEach( + (expr, range) -> + assertJsonEquals( + "{\n" + + " \"range\" : {\n" + + " \"age\" : {\n" + + " \"from\" : " + + range[0] + + ",\n" + + " \"to\" : " + + range[1] + + ",\n" + + " \"include_lower\" : " + + range[2] + + ",\n" + + " \"include_upper\" : " + + range[3] + + ",\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}", + buildQuery(expr))); } @Test @@ -140,9 +167,7 @@ void should_build_wildcard_query_for_like_expression() { + " }\n" + " }\n" + "}", - buildQuery( - DSL.like( - ref("name", STRING), literal("%John_")))); + buildQuery(DSL.like(ref("name", STRING), literal("%John_")))); } @Test @@ -158,8 +183,7 @@ void should_build_script_query_for_unsupported_lucene_query() { + " \"boost\" : 1.0\n" + " }\n" + "}", - buildQuery( - DSL.isnotnull(ref("age", INTEGER)))); + buildQuery(DSL.isnotnull(ref("age", INTEGER)))); } @Test @@ -175,9 +199,7 @@ void should_build_script_query_for_function_expression() { + " \"boost\" : 1.0\n" + " }\n" + "}", - buildQuery( - DSL.equal( - DSL.abs(ref("age", INTEGER)), literal(30)))); + buildQuery(DSL.equal(DSL.abs(ref("age", INTEGER)), literal(30)))); } @Test @@ -193,26 +215,23 @@ void should_build_script_query_for_comparison_between_fields() { + " \"boost\" : 1.0\n" + " }\n" + "}", - buildQuery( - DSL.equal( - ref("age1", INTEGER), ref("age2", INTEGER)))); + buildQuery(DSL.equal(ref("age1", INTEGER), ref("age2", INTEGER)))); } @Test void should_build_bool_query_for_and_or_expression() { - String[] names = { "filter", "should" }; + String[] names = {"filter", "should"}; FunctionExpression expr1 = DSL.equal(ref("name", STRING), literal("John")); FunctionExpression expr2 = DSL.equal(ref("age", INTEGER), literal(30)); - Expression[] exprs = { - DSL.and(expr1, expr2), - DSL.or(expr1, expr2) - }; + Expression[] exprs = {DSL.and(expr1, expr2), DSL.or(expr1, expr2)}; for (int i = 0; i < names.length; i++) { assertJsonEquals( "{\n" + " \"bool\" : {\n" - + " \"" + names[i] + "\" : [\n" + + " \"" + + names[i] + + "\" : [\n" + " {\n" + " \"term\" : {\n" + " \"name\" : {\n" @@ -257,10 +276,7 @@ void should_build_bool_query_for_not_expression() { + " \"boost\" : 1.0\n" + " }\n" + "}", - buildQuery( - DSL.not( - DSL.equal( - ref("age", INTEGER), literal(30))))); + buildQuery(DSL.not(DSL.equal(ref("age", INTEGER), literal(30))))); } @Test @@ -276,8 +292,12 @@ void should_use_keyword_for_multi_field_in_equality_expression() { + "}", buildQuery( DSL.equal( - ref("name", OpenSearchTextType.of(Map.of("words", - OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword)))), + ref( + "name", + OpenSearchTextType.of( + Map.of( + "words", + OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword)))), literal("John")))); } @@ -295,8 +315,12 @@ void should_use_keyword_for_multi_field_in_like_expression() { + "}", buildQuery( DSL.like( - ref("name", OpenSearchTextType.of(Map.of("words", - OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword)))), + ref( + "name", + OpenSearchTextType.of( + Map.of( + "words", + OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword)))), literal("John%")))); } @@ -320,13 +344,9 @@ void should_build_term_query_predicate_expression_with_nested_function() { + " }\n" + "}", buildQuery( - DSL.equal(DSL.nested( - DSL.ref("message.info", STRING), - DSL.ref("message", STRING)), - literal("string_value") - ) - ) - ); + DSL.equal( + DSL.nested(DSL.ref("message.info", STRING), DSL.ref("message", STRING)), + literal("string_value")))); } @Test @@ -351,78 +371,67 @@ void should_build_range_query_predicate_expression_with_nested_function() { + " \"boost\" : 1.0\n" + " }\n" + "}", - buildQuery( - DSL.greater(DSL.nested( - DSL.ref("lottery.number.id", INTEGER)), literal(1234) - ) - ) - ); + buildQuery(DSL.greater(DSL.nested(DSL.ref("lottery.number.id", INTEGER)), literal(1234)))); } // TODO remove this test when alternate syntax is implemented for nested // function in WHERE clause: nested(path, condition) @Test void ensure_alternate_syntax_falls_back_to_legacy_engine() { - assertThrows(SyntaxCheckException.class, () -> - buildQuery( - DSL.nested( - DSL.ref("message", STRING), - DSL.equal(DSL.literal("message.info"), literal("a")) - ) - ) - ); + assertThrows( + SyntaxCheckException.class, + () -> + buildQuery( + DSL.nested( + DSL.ref("message", STRING), + DSL.equal(DSL.literal("message.info"), literal("a"))))); } @Test void nested_filter_wrong_right_side_type_in_predicate_throws_exception() { - assertThrows(IllegalArgumentException.class, () -> - buildQuery( - DSL.equal(DSL.nested( - DSL.ref("message.info", STRING), - DSL.ref("message", STRING)), - DSL.ref("string_value", STRING) - ) - ) - ); + assertThrows( + IllegalArgumentException.class, + () -> + buildQuery( + DSL.equal( + DSL.nested(DSL.ref("message.info", STRING), DSL.ref("message", STRING)), + DSL.ref("string_value", STRING)))); } @Test void nested_filter_wrong_first_param_type_throws_exception() { - assertThrows(IllegalArgumentException.class, () -> - buildQuery( - DSL.equal(DSL.nested( - DSL.namedArgument("field", literal("message"))), - literal("string_value") - ) - ) - ); + assertThrows( + IllegalArgumentException.class, + () -> + buildQuery( + DSL.equal( + DSL.nested(DSL.namedArgument("field", literal("message"))), + literal("string_value")))); } @Test void nested_filter_wrong_second_param_type_throws_exception() { - assertThrows(IllegalArgumentException.class, () -> - buildQuery( - DSL.equal(DSL.nested( - DSL.ref("message.info", STRING), - DSL.literal(2)), - literal("string_value") - ) - ) - ); + assertThrows( + IllegalArgumentException.class, + () -> + buildQuery( + DSL.equal( + DSL.nested(DSL.ref("message.info", STRING), DSL.literal(2)), + literal("string_value")))); } @Test void nested_filter_too_many_params_throws_exception() { - assertThrows(IllegalArgumentException.class, () -> - buildQuery( - DSL.equal(DSL.nested( - DSL.ref("message.info", STRING), - DSL.ref("message", STRING), - DSL.ref("message", STRING)), - literal("string_value") - ) - ) - ); + assertThrows( + IllegalArgumentException.class, + () -> + buildQuery( + DSL.equal( + DSL.nested( + DSL.ref("message.info", STRING), + DSL.ref("message", STRING), + DSL.ref("message", STRING)), + literal("string_value")))); } @Test @@ -445,8 +454,8 @@ void should_build_match_query_with_default_parameters() { + "}", buildQuery( DSL.match( - DSL.namedArgument("field", - new ReferenceExpression("message", OpenSearchTextType.of())), + DSL.namedArgument( + "field", new ReferenceExpression("message", OpenSearchTextType.of())), DSL.namedArgument("query", literal("search query"))))); } @@ -474,8 +483,8 @@ void should_build_match_query_with_custom_parameters() { + "}", buildQuery( DSL.match( - DSL.namedArgument("field", - new ReferenceExpression("message", OpenSearchTextType.of())), + DSL.namedArgument( + "field", new ReferenceExpression("message", OpenSearchTextType.of())), DSL.namedArgument("query", literal("search query")), DSL.namedArgument("operator", literal("AND")), DSL.namedArgument("analyzer", literal("keyword")), @@ -493,60 +502,65 @@ void should_build_match_query_with_custom_parameters() { @Test void match_invalid_parameter() { - FunctionExpression expr = DSL.match( - DSL.namedArgument("field", - new ReferenceExpression("message", OpenSearchTextType.of())), - DSL.namedArgument("query", literal("search query")), - DSL.namedArgument("invalid_parameter", literal("invalid_value"))); + FunctionExpression expr = + DSL.match( + DSL.namedArgument("field", new ReferenceExpression("message", OpenSearchTextType.of())), + DSL.namedArgument("query", literal("search query")), + DSL.namedArgument("invalid_parameter", literal("invalid_value"))); var msg = assertThrows(SemanticCheckException.class, () -> buildQuery(expr)).getMessage(); assertTrue(msg.startsWith("Parameter invalid_parameter is invalid for match function.")); } @Test void match_disallow_duplicate_parameter() { - FunctionExpression expr = DSL.match( - DSL.namedArgument("field", literal("message")), - DSL.namedArgument("query", literal("search query")), - DSL.namedArgument("analyzer", literal("keyword")), - DSL.namedArgument("AnalYzer", literal("english"))); + FunctionExpression expr = + DSL.match( + DSL.namedArgument("field", literal("message")), + DSL.namedArgument("query", literal("search query")), + DSL.namedArgument("analyzer", literal("keyword")), + DSL.namedArgument("AnalYzer", literal("english"))); var msg = assertThrows(SemanticCheckException.class, () -> buildQuery(expr)).getMessage(); assertEquals("Parameter 'analyzer' can only be specified once.", msg); } @Test void match_disallow_duplicate_query() { - FunctionExpression expr = DSL.match( - DSL.namedArgument("field", literal("message")), - DSL.namedArgument("query", literal("search query")), - DSL.namedArgument("analyzer", literal("keyword")), - DSL.namedArgument("QUERY", literal("something"))); + FunctionExpression expr = + DSL.match( + DSL.namedArgument("field", literal("message")), + DSL.namedArgument("query", literal("search query")), + DSL.namedArgument("analyzer", literal("keyword")), + DSL.namedArgument("QUERY", literal("something"))); var msg = assertThrows(SemanticCheckException.class, () -> buildQuery(expr)).getMessage(); assertEquals("Parameter 'query' can only be specified once.", msg); } @Test void match_disallow_duplicate_field() { - FunctionExpression expr = DSL.match( - DSL.namedArgument("field", literal("message")), - DSL.namedArgument("query", literal("search query")), - DSL.namedArgument("analyzer", literal("keyword")), - DSL.namedArgument("Field", literal("something"))); + FunctionExpression expr = + DSL.match( + DSL.namedArgument("field", literal("message")), + DSL.namedArgument("query", literal("search query")), + DSL.namedArgument("analyzer", literal("keyword")), + DSL.namedArgument("Field", literal("something"))); var msg = assertThrows(SemanticCheckException.class, () -> buildQuery(expr)).getMessage(); assertEquals("Parameter 'field' can only be specified once.", msg); } @Test void match_missing_field() { - FunctionExpression expr = DSL.match( - DSL.namedArgument("query", literal("search query")), - DSL.namedArgument("analyzer", literal("keyword"))); + FunctionExpression expr = + DSL.match( + DSL.namedArgument("query", literal("search query")), + DSL.namedArgument("analyzer", literal("keyword"))); var msg = assertThrows(SemanticCheckException.class, () -> buildQuery(expr)).getMessage(); assertEquals("'field' parameter is missing.", msg); } @Test void match_missing_query() { - FunctionExpression expr = DSL.match( + FunctionExpression expr = + DSL.match( DSL.namedArgument("field", literal("field1")), DSL.namedArgument("analyzer", literal("keyword"))); var msg = assertThrows(SemanticCheckException.class, () -> buildQuery(expr)).getMessage(); @@ -556,7 +570,7 @@ void match_missing_query() { @Test void should_build_match_phrase_query_with_default_parameters() { assertJsonEquals( - "{\n" + "{\n" + " \"match_phrase\" : {\n" + " \"message\" : {\n" + " \"query\" : \"search query\",\n" @@ -568,14 +582,15 @@ void should_build_match_phrase_query_with_default_parameters() { + "}", buildQuery( DSL.match_phrase( - DSL.namedArgument("field", - new ReferenceExpression("message", OpenSearchTextType.of())), + DSL.namedArgument( + "field", new ReferenceExpression("message", OpenSearchTextType.of())), DSL.namedArgument("query", literal("search query"))))); } @Test void should_build_multi_match_query_with_default_parameters_single_field() { - assertJsonEquals("{\n" + assertJsonEquals( + "{\n" + " \"multi_match\" : {\n" + " \"query\" : \"search query\",\n" + " \"fields\" : [\n" @@ -592,16 +607,21 @@ void should_build_multi_match_query_with_default_parameters_single_field() { + " \"boost\" : 1.0,\n" + " }\n" + "}", - buildQuery(DSL.multi_match( - DSL.namedArgument("fields", DSL.literal(new ExprTupleValue( - new LinkedHashMap<>(ImmutableMap.of( - "field1", ExprValueUtils.floatValue(1.F)))))), - DSL.namedArgument("query", literal("search query"))))); + buildQuery( + DSL.multi_match( + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of("field1", ExprValueUtils.floatValue(1.F)))))), + DSL.namedArgument("query", literal("search query"))))); } @Test void should_build_multi_match_query_with_default_parameters_all_fields() { - assertJsonEquals("{\n" + assertJsonEquals( + "{\n" + " \"multi_match\" : {\n" + " \"query\" : \"search query\",\n" + " \"fields\" : [\n" @@ -618,16 +638,21 @@ void should_build_multi_match_query_with_default_parameters_all_fields() { + " \"boost\" : 1.0,\n" + " }\n" + "}", - buildQuery(DSL.multi_match( - DSL.namedArgument("fields", DSL.literal(new ExprTupleValue( - new LinkedHashMap<>(ImmutableMap.of( - "*", ExprValueUtils.floatValue(1.F)))))), - DSL.namedArgument("query", literal("search query"))))); + buildQuery( + DSL.multi_match( + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of("*", ExprValueUtils.floatValue(1.F)))))), + DSL.namedArgument("query", literal("search query"))))); } @Test void should_build_multi_match_query_with_default_parameters_no_fields() { - assertJsonEquals("{\n" + assertJsonEquals( + "{\n" + " \"multi_match\" : {\n" + " \"query\" : \"search query\",\n" + " \"fields\" : [],\n" @@ -642,17 +667,20 @@ void should_build_multi_match_query_with_default_parameters_no_fields() { + " \"boost\" : 1.0,\n" + " }\n" + "}", - buildQuery(DSL.multi_match( - DSL.namedArgument("fields", DSL.literal(new ExprTupleValue( - new LinkedHashMap<>(ImmutableMap.of())))), - DSL.namedArgument("query", literal("search query"))))); + buildQuery( + DSL.multi_match( + DSL.namedArgument( + "fields", + DSL.literal(new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of())))), + DSL.namedArgument("query", literal("search query"))))); } // Note: we can't test `multi_match` and `simple_query_string` without weight(s) @Test void should_build_multi_match_query_with_default_parameters_multiple_fields() { - var expected = "{\n" + var expected = + "{\n" + " \"multi_match\" : {\n" + " \"query\" : \"search query\",\n" + " \"fields\" : [%s],\n" @@ -667,23 +695,31 @@ void should_build_multi_match_query_with_default_parameters_multiple_fields() { + " \"boost\" : 1.0,\n" + " }\n" + "}"; - var actual = buildQuery(DSL.multi_match( - DSL.namedArgument("fields", DSL.literal(new ExprTupleValue( - new LinkedHashMap<>(ImmutableMap.of( - "field1", ExprValueUtils.floatValue(1.F), - "field2", ExprValueUtils.floatValue(.3F)))))), - DSL.namedArgument("query", literal("search query")))); + var actual = + buildQuery( + DSL.multi_match( + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of( + "field1", ExprValueUtils.floatValue(1.F), + "field2", ExprValueUtils.floatValue(.3F)))))), + DSL.namedArgument("query", literal("search query")))); var ex1 = String.format(expected, "\"field1^1.0\", \"field2^0.3\""); var ex2 = String.format(expected, "\"field2^0.3\", \"field1^1.0\""); - assertTrue(new JSONObject(ex1).similar(new JSONObject(actual)) - || new JSONObject(ex2).similar(new JSONObject(actual)), + assertTrue( + new JSONObject(ex1).similar(new JSONObject(actual)) + || new JSONObject(ex2).similar(new JSONObject(actual)), StringUtils.format("Actual %s doesn't match neither expected %s nor %s", actual, ex1, ex2)); } @Test void should_build_multi_match_query_with_custom_parameters() { - var expected = "{\n" + var expected = + "{\n" + " \"multi_match\" : {\n" + " \"query\" : \"search query\",\n" + " \"fields\" : [%s],\n" @@ -704,10 +740,13 @@ void should_build_multi_match_query_with_custom_parameters() { + " \"boost\" : 2.0\n" + " }\n" + "}"; - var actual = buildQuery( + var actual = + buildQuery( DSL.multi_match( - DSL.namedArgument("fields", DSL.literal( - ExprValueUtils.tupleValue(ImmutableMap.of("field1", 1.F, "field2", .3F)))), + DSL.namedArgument( + "fields", + DSL.literal( + ExprValueUtils.tupleValue(ImmutableMap.of("field1", 1.F, "field2", .3F)))), DSL.namedArgument("query", literal("search query")), DSL.namedArgument("analyzer", literal("keyword")), DSL.namedArgument("auto_generate_synonyms_phrase_query", literal("false")), @@ -727,28 +766,36 @@ void should_build_multi_match_query_with_custom_parameters() { var ex1 = String.format(expected, "\"field1^1.0\", \"field2^0.3\""); var ex2 = String.format(expected, "\"field2^0.3\", \"field1^1.0\""); - assertTrue(new JSONObject(ex1).similar(new JSONObject(actual)) - || new JSONObject(ex2).similar(new JSONObject(actual)), + assertTrue( + new JSONObject(ex1).similar(new JSONObject(actual)) + || new JSONObject(ex2).similar(new JSONObject(actual)), StringUtils.format("Actual %s doesn't match neither expected %s nor %s", actual, ex1, ex2)); } @Test void multi_match_invalid_parameter() { - FunctionExpression expr = DSL.multi_match( - DSL.namedArgument("fields", DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "field1", ExprValueUtils.floatValue(1.F), - "field2", ExprValueUtils.floatValue(.3F)))))), - DSL.namedArgument("query", literal("search query")), - DSL.namedArgument("invalid_parameter", literal("invalid_value"))); - assertThrows(SemanticCheckException.class, () -> buildQuery(expr), + FunctionExpression expr = + DSL.multi_match( + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of( + "field1", ExprValueUtils.floatValue(1.F), + "field2", ExprValueUtils.floatValue(.3F)))))), + DSL.namedArgument("query", literal("search query")), + DSL.namedArgument("invalid_parameter", literal("invalid_value"))); + assertThrows( + SemanticCheckException.class, + () -> buildQuery(expr), "Parameter invalid_parameter is invalid for match function."); } @Test void should_build_match_phrase_query_with_custom_parameters() { assertJsonEquals( - "{\n" + "{\n" + " \"match_phrase\" : {\n" + " \"message\" : {\n" + " \"query\" : \"search query\",\n" @@ -761,8 +808,8 @@ void should_build_match_phrase_query_with_custom_parameters() { + "}", buildQuery( DSL.match_phrase( - DSL.namedArgument("field", - new ReferenceExpression("message", OpenSearchTextType.of())), + DSL.namedArgument( + "field", new ReferenceExpression("message", OpenSearchTextType.of())), DSL.namedArgument("boost", literal("1.2")), DSL.namedArgument("query", literal("search query")), DSL.namedArgument("analyzer", literal("keyword")), @@ -772,150 +819,171 @@ void should_build_match_phrase_query_with_custom_parameters() { @Test void wildcard_query_invalid_parameter() { - FunctionExpression expr = DSL.wildcard_query( - DSL.namedArgument("field", - new ReferenceExpression("field", OpenSearchTextType.of())), - DSL.namedArgument("query", literal("search query*")), - DSL.namedArgument("invalid_parameter", literal("invalid_value"))); - assertThrows(SemanticCheckException.class, () -> buildQuery(expr), + FunctionExpression expr = + DSL.wildcard_query( + DSL.namedArgument("field", new ReferenceExpression("field", OpenSearchTextType.of())), + DSL.namedArgument("query", literal("search query*")), + DSL.namedArgument("invalid_parameter", literal("invalid_value"))); + assertThrows( + SemanticCheckException.class, + () -> buildQuery(expr), "Parameter invalid_parameter is invalid for wildcard_query function."); } @Test void wildcard_query_convert_sql_wildcard_to_lucene() { // Test conversion of % wildcard to * - assertJsonEquals("{\n" - + " \"wildcard\" : {\n" - + " \"field\" : {\n" - + " \"wildcard\" : \"search query*\",\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + "}", - buildQuery(DSL.wildcard_query( - DSL.namedArgument("field", - new ReferenceExpression("field", OpenSearchTextType.of())), - DSL.namedArgument("query", literal("search query%"))))); - - assertJsonEquals("{\n" - + " \"wildcard\" : {\n" - + " \"field\" : {\n" - + " \"wildcard\" : \"search query?\",\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + "}", - buildQuery(DSL.wildcard_query( - DSL.namedArgument("field", - new ReferenceExpression("field", OpenSearchTextType.of())), - DSL.namedArgument("query", literal("search query_"))))); + assertJsonEquals( + "{\n" + + " \"wildcard\" : {\n" + + " \"field\" : {\n" + + " \"wildcard\" : \"search query*\",\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}", + buildQuery( + DSL.wildcard_query( + DSL.namedArgument( + "field", new ReferenceExpression("field", OpenSearchTextType.of())), + DSL.namedArgument("query", literal("search query%"))))); + + assertJsonEquals( + "{\n" + + " \"wildcard\" : {\n" + + " \"field\" : {\n" + + " \"wildcard\" : \"search query?\",\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}", + buildQuery( + DSL.wildcard_query( + DSL.namedArgument( + "field", new ReferenceExpression("field", OpenSearchTextType.of())), + DSL.namedArgument("query", literal("search query_"))))); } @Test void wildcard_query_escape_wildcards_characters() { - assertJsonEquals("{\n" - + " \"wildcard\" : {\n" - + " \"field\" : {\n" - + " \"wildcard\" : \"search query%\",\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + "}", - buildQuery(DSL.wildcard_query( - DSL.namedArgument("field", - new ReferenceExpression("field", OpenSearchTextType.of())), - DSL.namedArgument("query", literal("search query\\%"))))); - - assertJsonEquals("{\n" - + " \"wildcard\" : {\n" - + " \"field\" : {\n" - + " \"wildcard\" : \"search query_\",\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + "}", - buildQuery(DSL.wildcard_query( - DSL.namedArgument("field", - new ReferenceExpression("field", OpenSearchTextType.of())), - DSL.namedArgument("query", literal("search query\\_"))))); - - assertJsonEquals("{\n" - + " \"wildcard\" : {\n" - + " \"field\" : {\n" - + " \"wildcard\" : \"search query\\\\*\",\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + "}", - buildQuery(DSL.wildcard_query( - DSL.namedArgument("field", - new ReferenceExpression("field", OpenSearchTextType.of())), - DSL.namedArgument("query", literal("search query\\*"))))); - - assertJsonEquals("{\n" - + " \"wildcard\" : {\n" - + " \"field\" : {\n" - + " \"wildcard\" : \"search query\\\\?\",\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + "}", - buildQuery(DSL.wildcard_query( - DSL.namedArgument("field", - new ReferenceExpression("field", OpenSearchTextType.of())), - DSL.namedArgument("query", literal("search query\\?"))))); + assertJsonEquals( + "{\n" + + " \"wildcard\" : {\n" + + " \"field\" : {\n" + + " \"wildcard\" : \"search query%\",\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}", + buildQuery( + DSL.wildcard_query( + DSL.namedArgument( + "field", new ReferenceExpression("field", OpenSearchTextType.of())), + DSL.namedArgument("query", literal("search query\\%"))))); + + assertJsonEquals( + "{\n" + + " \"wildcard\" : {\n" + + " \"field\" : {\n" + + " \"wildcard\" : \"search query_\",\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}", + buildQuery( + DSL.wildcard_query( + DSL.namedArgument( + "field", new ReferenceExpression("field", OpenSearchTextType.of())), + DSL.namedArgument("query", literal("search query\\_"))))); + + assertJsonEquals( + "{\n" + + " \"wildcard\" : {\n" + + " \"field\" : {\n" + + " \"wildcard\" : \"search query\\\\*\",\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}", + buildQuery( + DSL.wildcard_query( + DSL.namedArgument( + "field", new ReferenceExpression("field", OpenSearchTextType.of())), + DSL.namedArgument("query", literal("search query\\*"))))); + + assertJsonEquals( + "{\n" + + " \"wildcard\" : {\n" + + " \"field\" : {\n" + + " \"wildcard\" : \"search query\\\\?\",\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}", + buildQuery( + DSL.wildcard_query( + DSL.namedArgument( + "field", new ReferenceExpression("field", OpenSearchTextType.of())), + DSL.namedArgument("query", literal("search query\\?"))))); } @Test void should_build_wildcard_query_with_default_parameters() { - assertJsonEquals("{\n" - + " \"wildcard\" : {\n" - + " \"field\" : {\n" - + " \"wildcard\" : \"search query*\",\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + "}", - buildQuery(DSL.wildcard_query( - DSL.namedArgument("field", - new ReferenceExpression("field", OpenSearchTextType.of())), - DSL.namedArgument("query", literal("search query*"))))); + assertJsonEquals( + "{\n" + + " \"wildcard\" : {\n" + + " \"field\" : {\n" + + " \"wildcard\" : \"search query*\",\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}", + buildQuery( + DSL.wildcard_query( + DSL.namedArgument( + "field", new ReferenceExpression("field", OpenSearchTextType.of())), + DSL.namedArgument("query", literal("search query*"))))); } @Test void should_build_wildcard_query_query_with_custom_parameters() { - assertJsonEquals("{\n" - + " \"wildcard\" : {\n" - + " \"field\" : {\n" - + " \"wildcard\" : \"search query*\",\n" - + " \"boost\" : 0.6,\n" - + " \"case_insensitive\" : true,\n" - + " \"rewrite\" : \"constant_score_boolean\"\n" - + " }\n" - + " }\n" - + "}", - buildQuery(DSL.wildcard_query( - DSL.namedArgument("field", - new ReferenceExpression("field", OpenSearchTextType.of())), - DSL.namedArgument("query", literal("search query*")), - DSL.namedArgument("boost", literal("0.6")), - DSL.namedArgument("case_insensitive", literal("true")), - DSL.namedArgument("rewrite", literal("constant_score_boolean"))))); + assertJsonEquals( + "{\n" + + " \"wildcard\" : {\n" + + " \"field\" : {\n" + + " \"wildcard\" : \"search query*\",\n" + + " \"boost\" : 0.6,\n" + + " \"case_insensitive\" : true,\n" + + " \"rewrite\" : \"constant_score_boolean\"\n" + + " }\n" + + " }\n" + + "}", + buildQuery( + DSL.wildcard_query( + DSL.namedArgument( + "field", new ReferenceExpression("field", OpenSearchTextType.of())), + DSL.namedArgument("query", literal("search query*")), + DSL.namedArgument("boost", literal("0.6")), + DSL.namedArgument("case_insensitive", literal("true")), + DSL.namedArgument("rewrite", literal("constant_score_boolean"))))); } @Test void query_invalid_parameter() { - FunctionExpression expr = DSL.query( - DSL.namedArgument("invalid_parameter", literal("invalid_value"))); - assertThrows(SemanticCheckException.class, () -> buildQuery(expr), - "Parameter invalid_parameter is invalid for query function."); + FunctionExpression expr = + DSL.query(DSL.namedArgument("invalid_parameter", literal("invalid_value"))); + assertThrows( + SemanticCheckException.class, + () -> buildQuery(expr), + "Parameter invalid_parameter is invalid for query function."); } @Test void query_invalid_fields_parameter_exception_message() { - FunctionExpression expr = DSL.query( - DSL.namedArgument("fields", literal("field1")), - DSL.namedArgument("query", literal("search query"))); + FunctionExpression expr = + DSL.query( + DSL.namedArgument("fields", literal("field1")), + DSL.namedArgument("query", literal("search query"))); var exception = assertThrows(SemanticCheckException.class, () -> buildQuery(expr)); assertEquals("Parameter fields is invalid for query function.", exception.getMessage()); @@ -923,7 +991,8 @@ void query_invalid_fields_parameter_exception_message() { @Test void should_build_query_query_with_default_parameters() { - var expected = "{\n" + var expected = + "{\n" + " \"query_string\" : {\n" + " \"query\" : \"field1:query_value\",\n" + " \"fields\" : [],\n" @@ -942,13 +1011,14 @@ void should_build_query_query_with_default_parameters() { + " }\n" + "}"; - assertJsonEquals(expected, buildQuery(DSL.query( - DSL.namedArgument("query", literal("field1:query_value"))))); + assertJsonEquals( + expected, buildQuery(DSL.query(DSL.namedArgument("query", literal("field1:query_value"))))); } @Test void should_build_query_query_with_custom_parameters() { - var expected = "{\n" + var expected = + "{\n" + " \"query_string\" : {\n" + " \"query\" : \"field1:query_value\",\n" + " \"fields\" : [],\n" @@ -971,125 +1041,147 @@ void should_build_query_query_with_custom_parameters() { + " \"boost\" : 2.0,\n" + " }\n" + "}"; - var actual = buildQuery( + var actual = + buildQuery( DSL.query( - DSL.namedArgument("query", literal("field1:query_value")), - DSL.namedArgument("analyze_wildcard", literal("true")), - DSL.namedArgument("analyzer", literal("keyword")), - DSL.namedArgument("auto_generate_synonyms_phrase_query", literal("false")), - DSL.namedArgument("default_operator", literal("AND")), - DSL.namedArgument("fuzzy_max_expansions", literal("10")), - DSL.namedArgument("fuzzy_prefix_length", literal("2")), - DSL.namedArgument("fuzzy_transpositions", literal("false")), - DSL.namedArgument("lenient", literal("false")), - DSL.namedArgument("minimum_should_match", literal("3")), - DSL.namedArgument("tie_breaker", literal("1.3")), - DSL.namedArgument("type", literal("cross_fields")), - DSL.namedArgument("boost", literal("2.0")))); + DSL.namedArgument("query", literal("field1:query_value")), + DSL.namedArgument("analyze_wildcard", literal("true")), + DSL.namedArgument("analyzer", literal("keyword")), + DSL.namedArgument("auto_generate_synonyms_phrase_query", literal("false")), + DSL.namedArgument("default_operator", literal("AND")), + DSL.namedArgument("fuzzy_max_expansions", literal("10")), + DSL.namedArgument("fuzzy_prefix_length", literal("2")), + DSL.namedArgument("fuzzy_transpositions", literal("false")), + DSL.namedArgument("lenient", literal("false")), + DSL.namedArgument("minimum_should_match", literal("3")), + DSL.namedArgument("tie_breaker", literal("1.3")), + DSL.namedArgument("type", literal("cross_fields")), + DSL.namedArgument("boost", literal("2.0")))); assertJsonEquals(expected, actual); } @Test void query_string_invalid_parameter() { - FunctionExpression expr = DSL.query_string( - DSL.namedArgument("fields", DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "field1", ExprValueUtils.floatValue(1.F), - "field2", ExprValueUtils.floatValue(.3F)))))), - DSL.namedArgument("query", literal("search query")), - DSL.namedArgument("invalid_parameter", literal("invalid_value"))); - assertThrows(SemanticCheckException.class, () -> buildQuery(expr), + FunctionExpression expr = + DSL.query_string( + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of( + "field1", ExprValueUtils.floatValue(1.F), + "field2", ExprValueUtils.floatValue(.3F)))))), + DSL.namedArgument("query", literal("search query")), + DSL.namedArgument("invalid_parameter", literal("invalid_value"))); + assertThrows( + SemanticCheckException.class, + () -> buildQuery(expr), "Parameter invalid_parameter is invalid for match function."); } @Test void should_build_query_string_query_with_default_parameters_multiple_fields() { - var expected = "{\n" - + " \"query_string\" : {\n" - + " \"query\" : \"query_value\",\n" - + " \"fields\" : [%s],\n" - + " \"type\" : \"best_fields\",\n" - + " \"default_operator\" : \"or\",\n" - + " \"max_determinized_states\" : 10000,\n" - + " \"enable_position_increments\" : true,\n" - + " \"fuzziness\" : \"AUTO\",\n" - + " \"fuzzy_prefix_length\" : 0,\n" - + " \"fuzzy_max_expansions\" : 50,\n" - + " \"phrase_slop\" : 0,\n" - + " \"escape\" : false,\n" - + " \"auto_generate_synonyms_phrase_query\" : true,\n" - + " \"fuzzy_transpositions\" : true,\n" - + " \"boost\" : 1.0\n" - + " }\n" - + "}"; - var actual = buildQuery(DSL.query_string( - DSL.namedArgument("fields", DSL.literal(new ExprTupleValue( - new LinkedHashMap<>(ImmutableMap.of( - "field1", ExprValueUtils.floatValue(1.F), - "field2", ExprValueUtils.floatValue(.3F)))))), - DSL.namedArgument("query", literal("query_value")))); + var expected = + "{\n" + + " \"query_string\" : {\n" + + " \"query\" : \"query_value\",\n" + + " \"fields\" : [%s],\n" + + " \"type\" : \"best_fields\",\n" + + " \"default_operator\" : \"or\",\n" + + " \"max_determinized_states\" : 10000,\n" + + " \"enable_position_increments\" : true,\n" + + " \"fuzziness\" : \"AUTO\",\n" + + " \"fuzzy_prefix_length\" : 0,\n" + + " \"fuzzy_max_expansions\" : 50,\n" + + " \"phrase_slop\" : 0,\n" + + " \"escape\" : false,\n" + + " \"auto_generate_synonyms_phrase_query\" : true,\n" + + " \"fuzzy_transpositions\" : true,\n" + + " \"boost\" : 1.0\n" + + " }\n" + + "}"; + var actual = + buildQuery( + DSL.query_string( + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of( + "field1", ExprValueUtils.floatValue(1.F), + "field2", ExprValueUtils.floatValue(.3F)))))), + DSL.namedArgument("query", literal("query_value")))); var ex1 = String.format(expected, "\"field1^1.0\", \"field2^0.3\""); var ex2 = String.format(expected, "\"field2^0.3\", \"field1^1.0\""); - assertTrue(new JSONObject(ex1).similar(new JSONObject(actual)) + assertTrue( + new JSONObject(ex1).similar(new JSONObject(actual)) || new JSONObject(ex2).similar(new JSONObject(actual)), StringUtils.format("Actual %s doesn't match neither expected %s nor %s", actual, ex1, ex2)); } @Test void should_build_query_string_query_with_custom_parameters() { - var expected = "{\n" - + " \"query_string\" : {\n" - + " \"query\" : \"query_value\",\n" - + " \"fields\" : [%s],\n" - + " \"type\" : \"cross_fields\",\n" - + " \"tie_breaker\" : 1.3,\n" - + " \"default_operator\" : \"and\",\n" - + " \"analyzer\" : \"keyword\",\n" - + " \"max_determinized_states\" : 10000,\n" - + " \"enable_position_increments\" : true,\n" - + " \"fuzziness\" : \"AUTO\",\n" - + " \"fuzzy_prefix_length\" : 2,\n" - + " \"fuzzy_max_expansions\" : 10,\n" - + " \"phrase_slop\" : 0,\n" - + " \"analyze_wildcard\" : true,\n" - + " \"minimum_should_match\" : \"3\",\n" - + " \"lenient\" : false,\n" - + " \"escape\" : false,\n" - + " \"auto_generate_synonyms_phrase_query\" : false,\n" - + " \"fuzzy_transpositions\" : false,\n" - + " \"boost\" : 2.0,\n" - + " }\n" - + "}"; - var actual = buildQuery( - DSL.query_string( - DSL.namedArgument("fields", DSL.literal( - ExprValueUtils.tupleValue(ImmutableMap.of("field1", 1.F, "field2", .3F)))), - DSL.namedArgument("query", literal("query_value")), - DSL.namedArgument("analyze_wildcard", literal("true")), - DSL.namedArgument("analyzer", literal("keyword")), - DSL.namedArgument("auto_generate_synonyms_phrase_query", literal("false")), - DSL.namedArgument("default_operator", literal("AND")), - DSL.namedArgument("fuzzy_max_expansions", literal("10")), - DSL.namedArgument("fuzzy_prefix_length", literal("2")), - DSL.namedArgument("fuzzy_transpositions", literal("false")), - DSL.namedArgument("lenient", literal("false")), - DSL.namedArgument("minimum_should_match", literal("3")), - DSL.namedArgument("tie_breaker", literal("1.3")), - DSL.namedArgument("type", literal("cross_fields")), - DSL.namedArgument("boost", literal("2.0")))); + var expected = + "{\n" + + " \"query_string\" : {\n" + + " \"query\" : \"query_value\",\n" + + " \"fields\" : [%s],\n" + + " \"type\" : \"cross_fields\",\n" + + " \"tie_breaker\" : 1.3,\n" + + " \"default_operator\" : \"and\",\n" + + " \"analyzer\" : \"keyword\",\n" + + " \"max_determinized_states\" : 10000,\n" + + " \"enable_position_increments\" : true,\n" + + " \"fuzziness\" : \"AUTO\",\n" + + " \"fuzzy_prefix_length\" : 2,\n" + + " \"fuzzy_max_expansions\" : 10,\n" + + " \"phrase_slop\" : 0,\n" + + " \"analyze_wildcard\" : true,\n" + + " \"minimum_should_match\" : \"3\",\n" + + " \"lenient\" : false,\n" + + " \"escape\" : false,\n" + + " \"auto_generate_synonyms_phrase_query\" : false,\n" + + " \"fuzzy_transpositions\" : false,\n" + + " \"boost\" : 2.0,\n" + + " }\n" + + "}"; + var actual = + buildQuery( + DSL.query_string( + DSL.namedArgument( + "fields", + DSL.literal( + ExprValueUtils.tupleValue(ImmutableMap.of("field1", 1.F, "field2", .3F)))), + DSL.namedArgument("query", literal("query_value")), + DSL.namedArgument("analyze_wildcard", literal("true")), + DSL.namedArgument("analyzer", literal("keyword")), + DSL.namedArgument("auto_generate_synonyms_phrase_query", literal("false")), + DSL.namedArgument("default_operator", literal("AND")), + DSL.namedArgument("fuzzy_max_expansions", literal("10")), + DSL.namedArgument("fuzzy_prefix_length", literal("2")), + DSL.namedArgument("fuzzy_transpositions", literal("false")), + DSL.namedArgument("lenient", literal("false")), + DSL.namedArgument("minimum_should_match", literal("3")), + DSL.namedArgument("tie_breaker", literal("1.3")), + DSL.namedArgument("type", literal("cross_fields")), + DSL.namedArgument("boost", literal("2.0")))); var ex1 = String.format(expected, "\"field1^1.0\", \"field2^0.3\""); var ex2 = String.format(expected, "\"field2^0.3\", \"field1^1.0\""); - assertTrue(new JSONObject(ex1).similar(new JSONObject(actual)) + assertTrue( + new JSONObject(ex1).similar(new JSONObject(actual)) || new JSONObject(ex2).similar(new JSONObject(actual)), StringUtils.format("Actual %s doesn't match neither expected %s nor %s", actual, ex1, ex2)); } @Test void should_build_query_string_query_with_default_parameters_single_field() { - assertJsonEquals("{\n" + assertJsonEquals( + "{\n" + " \"query_string\" : {\n" + " \"query\" : \"query_value\",\n" + " \"fields\" : [\n" @@ -1109,11 +1201,15 @@ void should_build_query_string_query_with_default_parameters_single_field() { + " \"boost\" : 1.0,\n" + " }\n" + "}", - buildQuery(DSL.query_string( - DSL.namedArgument("fields", DSL.literal(new ExprTupleValue( - new LinkedHashMap<>(ImmutableMap.of( - "field1", ExprValueUtils.floatValue(1.F)))))), - DSL.namedArgument("query", literal("query_value"))))); + buildQuery( + DSL.query_string( + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of("field1", ExprValueUtils.floatValue(1.F)))))), + DSL.namedArgument("query", literal("query_value"))))); } @Test @@ -1122,7 +1218,8 @@ void should_build_query_string_query_with_default_parameters_single_field() { // 2) `flags` are printed by OpenSearch as an integer // 3) `minimum_should_match` printed as a string void should_build_simple_query_string_query_with_default_parameters_single_field() { - assertJsonEquals("{\n" + assertJsonEquals( + "{\n" + " \"simple_query_string\" : {\n" + " \"query\" : \"search query\",\n" + " \"fields\" : [\n" @@ -1138,16 +1235,21 @@ void should_build_simple_query_string_query_with_default_parameters_single_field + " \"boost\" : 1.0\n" + " }\n" + "}", - buildQuery(DSL.simple_query_string( - DSL.namedArgument("fields", DSL.literal(new ExprTupleValue( - new LinkedHashMap<>(ImmutableMap.of( - "field1", ExprValueUtils.floatValue(1.F)))))), - DSL.namedArgument("query", literal("search query"))))); + buildQuery( + DSL.simple_query_string( + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of("field1", ExprValueUtils.floatValue(1.F)))))), + DSL.namedArgument("query", literal("search query"))))); } @Test void should_build_simple_query_string_query_with_default_parameters_multiple_fields() { - var expected = "{\n" + var expected = + "{\n" + " \"simple_query_string\" : {\n" + " \"query\" : \"search query\",\n" + " \"fields\" : [%s],\n" @@ -1161,23 +1263,31 @@ void should_build_simple_query_string_query_with_default_parameters_multiple_fie + " \"boost\" : 1.0\n" + " }\n" + "}"; - var actual = buildQuery(DSL.simple_query_string( - DSL.namedArgument("fields", DSL.literal(new ExprTupleValue( - new LinkedHashMap<>(ImmutableMap.of( - "field1", ExprValueUtils.floatValue(1.F), - "field2", ExprValueUtils.floatValue(.3F)))))), - DSL.namedArgument("query", literal("search query")))); + var actual = + buildQuery( + DSL.simple_query_string( + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of( + "field1", ExprValueUtils.floatValue(1.F), + "field2", ExprValueUtils.floatValue(.3F)))))), + DSL.namedArgument("query", literal("search query")))); var ex1 = String.format(expected, "\"field1^1.0\", \"field2^0.3\""); var ex2 = String.format(expected, "\"field2^0.3\", \"field1^1.0\""); - assertTrue(new JSONObject(ex1).similar(new JSONObject(actual)) - || new JSONObject(ex2).similar(new JSONObject(actual)), + assertTrue( + new JSONObject(ex1).similar(new JSONObject(actual)) + || new JSONObject(ex2).similar(new JSONObject(actual)), StringUtils.format("Actual %s doesn't match neither expected %s nor %s", actual, ex1, ex2)); } @Test void should_build_simple_query_string_query_with_custom_parameters() { - var expected = "{\n" + var expected = + "{\n" + " \"simple_query_string\" : {\n" + " \"query\" : \"search query\",\n" + " \"fields\" : [%s],\n" @@ -1194,10 +1304,13 @@ void should_build_simple_query_string_query_with_custom_parameters() { + " \"boost\" : 2.0\n" + " }\n" + "}"; - var actual = buildQuery( + var actual = + buildQuery( DSL.simple_query_string( - DSL.namedArgument("fields", DSL.literal( - ExprValueUtils.tupleValue(ImmutableMap.of("field1", 1.F, "field2", .3F)))), + DSL.namedArgument( + "fields", + DSL.literal( + ExprValueUtils.tupleValue(ImmutableMap.of("field1", 1.F, "field2", .3F)))), DSL.namedArgument("query", literal("search query")), DSL.namedArgument("analyze_wildcard", literal("true")), DSL.namedArgument("analyzer", literal("keyword")), @@ -1213,95 +1326,105 @@ void should_build_simple_query_string_query_with_custom_parameters() { var ex1 = String.format(expected, "\"field1^1.0\", \"field2^0.3\""); var ex2 = String.format(expected, "\"field2^0.3\", \"field1^1.0\""); - assertTrue(new JSONObject(ex1).similar(new JSONObject(actual)) - || new JSONObject(ex2).similar(new JSONObject(actual)), + assertTrue( + new JSONObject(ex1).similar(new JSONObject(actual)) + || new JSONObject(ex2).similar(new JSONObject(actual)), StringUtils.format("Actual %s doesn't match neither expected %s nor %s", actual, ex1, ex2)); } @Test void simple_query_string_invalid_parameter() { - FunctionExpression expr = DSL.simple_query_string( - DSL.namedArgument("fields", DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "field1", ExprValueUtils.floatValue(1.F), - "field2", ExprValueUtils.floatValue(.3F)))))), - DSL.namedArgument("query", literal("search query")), - DSL.namedArgument("invalid_parameter", literal("invalid_value"))); - assertThrows(SemanticCheckException.class, () -> buildQuery(expr), + FunctionExpression expr = + DSL.simple_query_string( + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of( + "field1", ExprValueUtils.floatValue(1.F), + "field2", ExprValueUtils.floatValue(.3F)))))), + DSL.namedArgument("query", literal("search query")), + DSL.namedArgument("invalid_parameter", literal("invalid_value"))); + assertThrows( + SemanticCheckException.class, + () -> buildQuery(expr), "Parameter invalid_parameter is invalid for match function."); } @Test void match_phrase_invalid_parameter() { - FunctionExpression expr = DSL.match_phrase( - DSL.namedArgument("field", - new ReferenceExpression("message", OpenSearchTextType.of())), - DSL.namedArgument("query", literal("search query")), - DSL.namedArgument("invalid_parameter", literal("invalid_value"))); + FunctionExpression expr = + DSL.match_phrase( + DSL.namedArgument("field", new ReferenceExpression("message", OpenSearchTextType.of())), + DSL.namedArgument("query", literal("search query")), + DSL.namedArgument("invalid_parameter", literal("invalid_value"))); var msg = assertThrows(SemanticCheckException.class, () -> buildQuery(expr)).getMessage(); assertTrue(msg.startsWith("Parameter invalid_parameter is invalid for match_phrase function.")); } @Test void relevancy_func_invalid_arg_values() { - final var field = DSL.namedArgument("field", - new ReferenceExpression("message", OpenSearchTextType.of())); - final var fields = DSL.namedArgument("fields", DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "field1", ExprValueUtils.floatValue(1.F), - "field2", ExprValueUtils.floatValue(.3F)))))); + final var field = + DSL.namedArgument("field", new ReferenceExpression("message", OpenSearchTextType.of())); + final var fields = + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of( + "field1", ExprValueUtils.floatValue(1.F), + "field2", ExprValueUtils.floatValue(.3F)))))); final var query = DSL.namedArgument("query", literal("search query")); - var slopTest = DSL.match_phrase(field, query, - DSL.namedArgument("slop", literal("1.5"))); + var slopTest = DSL.match_phrase(field, query, DSL.namedArgument("slop", literal("1.5"))); var msg = assertThrows(RuntimeException.class, () -> buildQuery(slopTest)).getMessage(); assertEquals("Invalid slop value: '1.5'. Accepts only integer values.", msg); - var ztqTest = DSL.match_phrase(field, query, - DSL.namedArgument("zero_terms_query", literal("meow"))); + var ztqTest = + DSL.match_phrase(field, query, DSL.namedArgument("zero_terms_query", literal("meow"))); msg = assertThrows(RuntimeException.class, () -> buildQuery(ztqTest)).getMessage(); assertEquals( "Invalid zero_terms_query value: 'meow'. Available values are: NONE, ALL, NULL.", msg); - var boostTest = DSL.match(field, query, - DSL.namedArgument("boost", literal("pewpew"))); + var boostTest = DSL.match(field, query, DSL.namedArgument("boost", literal("pewpew"))); msg = assertThrows(RuntimeException.class, () -> buildQuery(boostTest)).getMessage(); assertEquals( "Invalid boost value: 'pewpew'. Accepts only floating point values greater than 0.", msg); - var boolTest = DSL.query_string(fields, query, - DSL.namedArgument("escape", literal("42"))); + var boolTest = DSL.query_string(fields, query, DSL.namedArgument("escape", literal("42"))); msg = assertThrows(RuntimeException.class, () -> buildQuery(boolTest)).getMessage(); assertEquals( "Invalid escape value: '42'. Accepts only boolean values: 'true' or 'false'.", msg); - var typeTest = DSL.multi_match(fields, query, - DSL.namedArgument("type", literal("42"))); + var typeTest = DSL.multi_match(fields, query, DSL.namedArgument("type", literal("42"))); msg = assertThrows(RuntimeException.class, () -> buildQuery(typeTest)).getMessage(); assertTrue(msg.startsWith("Invalid type value: '42'. Available values are:")); - var operatorTest = DSL.simple_query_string(fields, query, - DSL.namedArgument("default_operator", literal("42"))); + var operatorTest = + DSL.simple_query_string( + fields, query, DSL.namedArgument("default_operator", literal("42"))); msg = assertThrows(RuntimeException.class, () -> buildQuery(operatorTest)).getMessage(); assertTrue(msg.startsWith("Invalid default_operator value: '42'. Available values are:")); - var flagsTest = DSL.simple_query_string(fields, query, - DSL.namedArgument("flags", literal("42"))); + var flagsTest = + DSL.simple_query_string(fields, query, DSL.namedArgument("flags", literal("42"))); msg = assertThrows(RuntimeException.class, () -> buildQuery(flagsTest)).getMessage(); assertTrue(msg.startsWith("Invalid flags value: '42'. Available values are:")); - var fuzzinessTest = DSL.match_bool_prefix(field, query, - DSL.namedArgument("fuzziness", literal("AUTO:"))); + var fuzzinessTest = + DSL.match_bool_prefix(field, query, DSL.namedArgument("fuzziness", literal("AUTO:"))); msg = assertThrows(RuntimeException.class, () -> buildQuery(fuzzinessTest)).getMessage(); assertTrue(msg.startsWith("Invalid fuzziness value: 'AUTO:'. Available values are:")); - var rewriteTest = DSL.match_bool_prefix(field, query, - DSL.namedArgument("fuzzy_rewrite", literal("42"))); + var rewriteTest = + DSL.match_bool_prefix(field, query, DSL.namedArgument("fuzzy_rewrite", literal("42"))); msg = assertThrows(RuntimeException.class, () -> buildQuery(rewriteTest)).getMessage(); assertTrue(msg.startsWith("Invalid fuzzy_rewrite value: '42'. Available values are:")); - var timezoneTest = DSL.query_string(fields, query, - DSL.namedArgument("time_zone", literal("42"))); + var timezoneTest = + DSL.query_string(fields, query, DSL.namedArgument("time_zone", literal("42"))); msg = assertThrows(RuntimeException.class, () -> buildQuery(timezoneTest)).getMessage(); assertTrue(msg.startsWith("Invalid time_zone value: '42'.")); } @@ -1323,30 +1446,39 @@ void should_build_match_bool_prefix_query_with_default_parameters() { + "}", buildQuery( DSL.match_bool_prefix( - DSL.namedArgument("field", - new ReferenceExpression("message", OpenSearchTextType.of())), + DSL.namedArgument( + "field", new ReferenceExpression("message", OpenSearchTextType.of())), DSL.namedArgument("query", literal("search query"))))); } @Test void multi_match_missing_fields_even_with_struct() { - FunctionExpression expr = DSL.multi_match( - DSL.namedArgument("something-but-not-fields", DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "pewpew", ExprValueUtils.integerValue(42)))))), - DSL.namedArgument("query", literal("search query")), - DSL.namedArgument("analyzer", literal("keyword"))); + FunctionExpression expr = + DSL.multi_match( + DSL.namedArgument( + "something-but-not-fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of("pewpew", ExprValueUtils.integerValue(42)))))), + DSL.namedArgument("query", literal("search query")), + DSL.namedArgument("analyzer", literal("keyword"))); var msg = assertThrows(SemanticCheckException.class, () -> buildQuery(expr)).getMessage(); assertEquals("'fields' parameter is missing.", msg); } @Test void multi_match_missing_query_even_with_struct() { - FunctionExpression expr = DSL.multi_match( - DSL.namedArgument("fields", DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "field1", ExprValueUtils.floatValue(1.F), - "field2", ExprValueUtils.floatValue(.3F)))))), + FunctionExpression expr = + DSL.multi_match( + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of( + "field1", ExprValueUtils.floatValue(1.F), + "field2", ExprValueUtils.floatValue(.3F)))))), DSL.namedArgument("analyzer", literal("keyword"))); var msg = assertThrows(SemanticCheckException.class, () -> buildQuery(expr)).getMessage(); assertEquals("'query' parameter is missing", msg); @@ -1368,8 +1500,8 @@ void should_build_match_phrase_prefix_query_with_default_parameters() { + "}", buildQuery( DSL.match_phrase_prefix( - DSL.namedArgument("field", - new ReferenceExpression("message", OpenSearchTextType.of())), + DSL.namedArgument( + "field", new ReferenceExpression("message", OpenSearchTextType.of())), DSL.namedArgument("query", literal("search query"))))); } @@ -1390,8 +1522,8 @@ void should_build_match_phrase_prefix_query_with_non_default_parameters() { + "}", buildQuery( DSL.match_phrase_prefix( - DSL.namedArgument("field", - new ReferenceExpression("message", OpenSearchTextType.of())), + DSL.namedArgument( + "field", new ReferenceExpression("message", OpenSearchTextType.of())), DSL.namedArgument("query", literal("search query")), DSL.namedArgument("boost", literal("1.2")), DSL.namedArgument("max_expansions", literal("42")), @@ -1400,30 +1532,31 @@ void should_build_match_phrase_prefix_query_with_non_default_parameters() { @Test void cast_to_string_in_filter() { - String json = "{\n" - + " \"term\" : {\n" - + " \"string_value\" : {\n" - + " \"value\" : \"1\",\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + "}"; + String json = + "{\n" + + " \"term\" : {\n" + + " \"string_value\" : {\n" + + " \"value\" : \"1\",\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}"; - assertJsonEquals(json, buildQuery( - DSL.equal(ref("string_value", STRING), DSL.castString(literal(1))))); - assertJsonEquals(json, buildQuery( - DSL.equal(ref("string_value", STRING), DSL.castString(literal("1"))))); + assertJsonEquals( + json, buildQuery(DSL.equal(ref("string_value", STRING), DSL.castString(literal(1))))); + assertJsonEquals( + json, buildQuery(DSL.equal(ref("string_value", STRING), DSL.castString(literal("1"))))); } private Float castToFloat(Object o) { if (o instanceof Number) { - return ((Number)o).floatValue(); + return ((Number) o).floatValue(); } if (o instanceof String) { return Float.parseFloat((String) o); } if (o instanceof Boolean) { - return ((Boolean)o) ? 1F : 0F; + return ((Boolean) o) ? 1F : 0F; } // unreachable code throw new IllegalArgumentException(); @@ -1431,13 +1564,13 @@ private Float castToFloat(Object o) { private Integer castToInteger(Object o) { if (o instanceof Number) { - return ((Number)o).intValue(); + return ((Number) o).intValue(); } if (o instanceof String) { return Integer.parseInt((String) o); } if (o instanceof Boolean) { - return ((Boolean)o) ? 1 : 0; + return ((Boolean) o) ? 1 : 0; } // unreachable code throw new IllegalArgumentException(); @@ -1446,75 +1579,85 @@ private Integer castToInteger(Object o) { @ParameterizedTest(name = "castByte({0})") @MethodSource({"numericCastSource"}) void cast_to_byte_in_filter(LiteralExpression expr) { - assertJsonEquals(String.format( - "{\n" - + " \"term\" : {\n" - + " \"byte_value\" : {\n" - + " \"value\" : %d,\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + "}", castToInteger(expr.valueOf().value())), + assertJsonEquals( + String.format( + "{\n" + + " \"term\" : {\n" + + " \"byte_value\" : {\n" + + " \"value\" : %d,\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}", + castToInteger(expr.valueOf().value())), buildQuery(DSL.equal(ref("byte_value", BYTE), DSL.castByte(expr)))); } @ParameterizedTest(name = "castShort({0})") @MethodSource({"numericCastSource"}) void cast_to_short_in_filter(LiteralExpression expr) { - assertJsonEquals(String.format( - "{\n" - + " \"term\" : {\n" - + " \"short_value\" : {\n" - + " \"value\" : %d,\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + "}", castToInteger(expr.valueOf().value())), + assertJsonEquals( + String.format( + "{\n" + + " \"term\" : {\n" + + " \"short_value\" : {\n" + + " \"value\" : %d,\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}", + castToInteger(expr.valueOf().value())), buildQuery(DSL.equal(ref("short_value", SHORT), DSL.castShort(expr)))); } @ParameterizedTest(name = "castInt({0})") @MethodSource({"numericCastSource"}) void cast_to_int_in_filter(LiteralExpression expr) { - assertJsonEquals(String.format( - "{\n" - + " \"term\" : {\n" - + " \"integer_value\" : {\n" - + " \"value\" : %d,\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + "}", castToInteger(expr.valueOf().value())), + assertJsonEquals( + String.format( + "{\n" + + " \"term\" : {\n" + + " \"integer_value\" : {\n" + + " \"value\" : %d,\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}", + castToInteger(expr.valueOf().value())), buildQuery(DSL.equal(ref("integer_value", INTEGER), DSL.castInt(expr)))); } @ParameterizedTest(name = "castLong({0})") @MethodSource({"numericCastSource"}) void cast_to_long_in_filter(LiteralExpression expr) { - assertJsonEquals(String.format( - "{\n" - + " \"term\" : {\n" - + " \"long_value\" : {\n" - + " \"value\" : %d,\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + "}", castToInteger(expr.valueOf().value())), + assertJsonEquals( + String.format( + "{\n" + + " \"term\" : {\n" + + " \"long_value\" : {\n" + + " \"value\" : %d,\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}", + castToInteger(expr.valueOf().value())), buildQuery(DSL.equal(ref("long_value", LONG), DSL.castLong(expr)))); } @ParameterizedTest(name = "castFloat({0})") @MethodSource({"numericCastSource"}) void cast_to_float_in_filter(LiteralExpression expr) { - assertJsonEquals(String.format( - "{\n" - + " \"term\" : {\n" - + " \"float_value\" : {\n" - + " \"value\" : %f,\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + "}", castToFloat(expr.valueOf().value())), + assertJsonEquals( + String.format( + "{\n" + + " \"term\" : {\n" + + " \"float_value\" : {\n" + + " \"value\" : %f,\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}", + castToFloat(expr.valueOf().value())), buildQuery(DSL.equal(ref("float_value", FLOAT), DSL.castFloat(expr)))); } @@ -1523,32 +1666,35 @@ void cast_to_float_in_filter(LiteralExpression expr) { void cast_to_double_in_filter(LiteralExpression expr) { // double values affected by floating point imprecision, so we can't compare them in json // (Double)(Float)3.14 -> 3.14000010490417 - assertEquals(castToFloat(expr.valueOf().value()), - DSL.castDouble(expr).valueOf().doubleValue(), 0.00001); + assertEquals( + castToFloat(expr.valueOf().value()), DSL.castDouble(expr).valueOf().doubleValue(), 0.00001); - assertJsonEquals(String.format( - "{\n" - + " \"term\" : {\n" - + " \"double_value\" : {\n" - + " \"value\" : %2.20f,\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + "}", DSL.castDouble(expr).valueOf().doubleValue()), + assertJsonEquals( + String.format( + "{\n" + + " \"term\" : {\n" + + " \"double_value\" : {\n" + + " \"value\" : %2.20f,\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}", + DSL.castDouble(expr).valueOf().doubleValue()), buildQuery(DSL.equal(ref("double_value", DOUBLE), DSL.castDouble(expr)))); } @ParameterizedTest(name = "castBooleanTrue({0})") @MethodSource({"booleanTrueCastSource"}) void cast_to_boolean_true_in_filter(LiteralExpression expr) { - String json = "{\n" - + " \"term\" : {\n" - + " \"boolean_value\" : {\n" - + " \"value\" : true,\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + "}"; + String json = + "{\n" + + " \"term\" : {\n" + + " \"boolean_value\" : {\n" + + " \"value\" : true,\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}"; assertJsonEquals( json, buildQuery(DSL.equal(ref("boolean_value", BOOLEAN), DSL.castBoolean(expr)))); @@ -1557,14 +1703,15 @@ void cast_to_boolean_true_in_filter(LiteralExpression expr) { @ParameterizedTest(name = "castBooleanFalse({0})") @MethodSource({"booleanFalseCastSource"}) void cast_to_boolean_false_in_filter(LiteralExpression expr) { - String json = "{\n" - + " \"term\" : {\n" - + " \"boolean_value\" : {\n" - + " \"value\" : false,\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + "}"; + String json = + "{\n" + + " \"term\" : {\n" + + " \"boolean_value\" : {\n" + + " \"value\" : false,\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}"; assertJsonEquals( json, buildQuery(DSL.equal(ref("boolean_value", BOOLEAN), DSL.castBoolean(expr)))); @@ -1573,118 +1720,153 @@ void cast_to_boolean_false_in_filter(LiteralExpression expr) { @Test void cast_from_boolean() { Expression booleanExpr = literal(false); - String json = "{\n" - + " \"term\" : {\n" - + " \"my_value\" : {\n" - + " \"value\" : 0,\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + "}"; - assertJsonEquals(json, buildQuery( - DSL.equal(ref("my_value", BYTE), DSL.castByte(booleanExpr)))); - assertJsonEquals(json, buildQuery( - DSL.equal(ref("my_value", SHORT), DSL.castShort(booleanExpr)))); - assertJsonEquals(json, buildQuery( - DSL.equal(ref("my_value", INTEGER), DSL.castInt(booleanExpr)))); - assertJsonEquals(json, buildQuery( - DSL.equal(ref("my_value", LONG), DSL.castLong(booleanExpr)))); - - json = "{\n" - + " \"term\" : {\n" - + " \"my_value\" : {\n" - + " \"value\" : 0.0,\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + "}"; - assertJsonEquals(json, buildQuery( - DSL.equal(ref("my_value", FLOAT), DSL.castFloat(booleanExpr)))); - assertJsonEquals(json, buildQuery( - DSL.equal(ref("my_value", DOUBLE), DSL.castDouble(booleanExpr)))); - - json = "{\n" - + " \"term\" : {\n" - + " \"my_value\" : {\n" - + " \"value\" : \"false\",\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + "}"; - assertJsonEquals(json, buildQuery( - DSL.equal(ref("my_value", STRING), DSL.castString(booleanExpr)))); + String json = + "{\n" + + " \"term\" : {\n" + + " \"my_value\" : {\n" + + " \"value\" : 0,\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}"; + assertJsonEquals(json, buildQuery(DSL.equal(ref("my_value", BYTE), DSL.castByte(booleanExpr)))); + assertJsonEquals( + json, buildQuery(DSL.equal(ref("my_value", SHORT), DSL.castShort(booleanExpr)))); + assertJsonEquals( + json, buildQuery(DSL.equal(ref("my_value", INTEGER), DSL.castInt(booleanExpr)))); + assertJsonEquals(json, buildQuery(DSL.equal(ref("my_value", LONG), DSL.castLong(booleanExpr)))); + + json = + "{\n" + + " \"term\" : {\n" + + " \"my_value\" : {\n" + + " \"value\" : 0.0,\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}"; + assertJsonEquals( + json, buildQuery(DSL.equal(ref("my_value", FLOAT), DSL.castFloat(booleanExpr)))); + assertJsonEquals( + json, buildQuery(DSL.equal(ref("my_value", DOUBLE), DSL.castDouble(booleanExpr)))); + + json = + "{\n" + + " \"term\" : {\n" + + " \"my_value\" : {\n" + + " \"value\" : \"false\",\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}"; + assertJsonEquals( + json, buildQuery(DSL.equal(ref("my_value", STRING), DSL.castString(booleanExpr)))); } @Test void cast_to_date_in_filter() { - String json = "{\n" - + " \"term\" : {\n" - + " \"date_value\" : {\n" - + " \"value\" : \"2021-11-08\",\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + "}"; + String json = + "{\n" + + " \"term\" : {\n" + + " \"date_value\" : {\n" + + " \"value\" : \"2021-11-08\",\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}"; - assertJsonEquals(json, buildQuery(DSL.equal( - ref("date_value", DATE), DSL.castDate(literal("2021-11-08"))))); - assertJsonEquals(json, buildQuery(DSL.equal( - ref("date_value", DATE), DSL.castDate(literal(new ExprDateValue("2021-11-08")))))); - assertJsonEquals(json, buildQuery(DSL.equal(ref( - "date_value", DATE), DSL.castDate(literal(new ExprDatetimeValue("2021-11-08 17:00:00")))))); + assertJsonEquals( + json, buildQuery(DSL.equal(ref("date_value", DATE), DSL.castDate(literal("2021-11-08"))))); + assertJsonEquals( + json, + buildQuery( + DSL.equal( + ref("date_value", DATE), DSL.castDate(literal(new ExprDateValue("2021-11-08")))))); + assertJsonEquals( + json, + buildQuery( + DSL.equal( + ref("date_value", DATE), + DSL.castDate(literal(new ExprDatetimeValue("2021-11-08 17:00:00")))))); } @Test void cast_to_time_in_filter() { - String json = "{\n" - + " \"term\" : {\n" - + " \"time_value\" : {\n" - + " \"value\" : \"17:00:00\",\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + "}"; + String json = + "{\n" + + " \"term\" : {\n" + + " \"time_value\" : {\n" + + " \"value\" : \"17:00:00\",\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}"; - assertJsonEquals(json, buildQuery(DSL.equal( - ref("time_value", TIME), DSL.castTime(literal("17:00:00"))))); - assertJsonEquals(json, buildQuery(DSL.equal( - ref("time_value", TIME), DSL.castTime(literal(new ExprTimeValue("17:00:00")))))); - assertJsonEquals(json, buildQuery(DSL.equal(ref("time_value", TIME), DSL - .castTime(literal(new ExprTimestampValue("2021-11-08 17:00:00")))))); + assertJsonEquals( + json, buildQuery(DSL.equal(ref("time_value", TIME), DSL.castTime(literal("17:00:00"))))); + assertJsonEquals( + json, + buildQuery( + DSL.equal( + ref("time_value", TIME), DSL.castTime(literal(new ExprTimeValue("17:00:00")))))); + assertJsonEquals( + json, + buildQuery( + DSL.equal( + ref("time_value", TIME), + DSL.castTime(literal(new ExprTimestampValue("2021-11-08 17:00:00")))))); } @Test void cast_to_datetime_in_filter() { - String json = "{\n" - + " \"term\" : {\n" - + " \"datetime_value\" : {\n" - + " \"value\" : \"2021-11-08 17:00:00\",\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + "}"; + String json = + "{\n" + + " \"term\" : {\n" + + " \"datetime_value\" : {\n" + + " \"value\" : \"2021-11-08 17:00:00\",\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}"; - assertJsonEquals(json, buildQuery(DSL.equal(ref("datetime_value", DATETIME), DSL - .castDatetime(literal("2021-11-08 17:00:00"))))); - assertJsonEquals(json, buildQuery(DSL.equal(ref("datetime_value", DATETIME), DSL - .castDatetime(literal(new ExprTimestampValue("2021-11-08 17:00:00")))))); + assertJsonEquals( + json, + buildQuery( + DSL.equal( + ref("datetime_value", DATETIME), + DSL.castDatetime(literal("2021-11-08 17:00:00"))))); + assertJsonEquals( + json, + buildQuery( + DSL.equal( + ref("datetime_value", DATETIME), + DSL.castDatetime(literal(new ExprTimestampValue("2021-11-08 17:00:00")))))); } @Test void cast_to_timestamp_in_filter() { - String json = "{\n" - + " \"term\" : {\n" - + " \"timestamp_value\" : {\n" - + " \"value\" : 1636390800000,\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + "}"; + String json = + "{\n" + + " \"term\" : {\n" + + " \"timestamp_value\" : {\n" + + " \"value\" : 1636390800000,\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}"; - assertJsonEquals(json, buildQuery(DSL.equal(ref("timestamp_value", TIMESTAMP), DSL - .castTimestamp(literal("2021-11-08 17:00:00"))))); - assertJsonEquals(json, buildQuery(DSL.equal(ref("timestamp_value", TIMESTAMP), DSL - .castTimestamp(literal(new ExprTimestampValue("2021-11-08 17:00:00")))))); + assertJsonEquals( + json, + buildQuery( + DSL.equal( + ref("timestamp_value", TIMESTAMP), + DSL.castTimestamp(literal("2021-11-08 17:00:00"))))); + assertJsonEquals( + json, + buildQuery( + DSL.equal( + ref("timestamp_value", TIMESTAMP), + DSL.castTimestamp(literal(new ExprTimestampValue("2021-11-08 17:00:00")))))); } @Test @@ -1701,8 +1883,10 @@ void cast_in_range_query() { + " }\n" + " }\n" + "}", - buildQuery(DSL.greater(ref("timestamp_value", TIMESTAMP), DSL - .castTimestamp(literal("2021-11-08 17:00:00"))))); + buildQuery( + DSL.greater( + ref("timestamp_value", TIMESTAMP), + DSL.castTimestamp(literal("2021-11-08 17:00:00"))))); } @Test @@ -1718,9 +1902,9 @@ void non_literal_in_cast_should_build_script() { + " \"boost\" : 1.0\n" + " }\n" + "}", - buildQuery(DSL.equal(ref("string_value", STRING), DSL.castString(DSL - .add(literal(1), literal(0))))) - ); + buildQuery( + DSL.equal( + ref("string_value", STRING), DSL.castString(DSL.add(literal(1), literal(0)))))); } @Test @@ -1736,13 +1920,13 @@ void non_cast_nested_function_should_build_script() { + " \"boost\" : 1.0\n" + " }\n" + "}", - buildQuery(DSL.equal(ref("integer_value", INTEGER), DSL.abs(DSL - .add(literal(1), literal(0))))) - ); + buildQuery( + DSL.equal(ref("integer_value", INTEGER), DSL.abs(DSL.add(literal(1), literal(0)))))); } private static void assertJsonEquals(String expected, String actual) { - assertTrue(new JSONObject(expected).similar(new JSONObject(actual)), + assertTrue( + new JSONObject(expected).similar(new JSONObject(actual)), StringUtils.format("Expected: %s, actual: %s", expected, actual)); } @@ -1751,10 +1935,12 @@ private String buildQuery(Expression expr) { } private void mockToStringSerializer() { - doAnswer(invocation -> { - Expression expr = invocation.getArgument(0); - return expr.toString(); - }).when(serializer).serialize(any()); + doAnswer( + invocation -> { + Expression expr = invocation.getArgument(0); + return expr.toString(); + }) + .when(serializer) + .serialize(any()); } - } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/LuceneQueryTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/LuceneQueryTest.java index 37b8326ef4..df3a730bad 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/LuceneQueryTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/LuceneQueryTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.filter.lucene; import static org.junit.jupiter.api.Assertions.assertFalse; @@ -20,13 +19,12 @@ class LuceneQueryTest { @Test void should_not_support_single_argument_by_default() { - assertFalse(new LuceneQuery(){}.canSupport(DSL.abs(DSL.ref("age", INTEGER)))); + assertFalse(new LuceneQuery() {}.canSupport(DSL.abs(DSL.ref("age", INTEGER)))); } @Test void should_throw_exception_if_not_implemented() { - assertThrows(UnsupportedOperationException.class, () -> - new LuceneQuery(){}.doBuild(null, null, null)); + assertThrows( + UnsupportedOperationException.class, () -> new LuceneQuery() {}.doBuild(null, null, null)); } - } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/MatchBoolPrefixQueryTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/MatchBoolPrefixQueryTest.java index 6906619065..7465bfc5a4 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/MatchBoolPrefixQueryTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/MatchBoolPrefixQueryTest.java @@ -35,8 +35,8 @@ public class MatchBoolPrefixQueryTest { private final FunctionName matchBoolPrefix = FunctionName.of("match_bool_prefix"); static Stream> generateValidData() { - NamedArgumentExpression field = DSL.namedArgument("field", - new ReferenceExpression("field_value", OpenSearchTextType.of())); + NamedArgumentExpression field = + DSL.namedArgument("field", new ReferenceExpression("field_value", OpenSearchTextType.of())); NamedArgumentExpression query = DSL.namedArgument("query", DSL.literal("query_value")); return List.of( DSL.namedArgument("fuzziness", DSL.literal("AUTO")), @@ -48,8 +48,9 @@ static Stream> generateValidData() { DSL.namedArgument("boost", DSL.literal("1")), DSL.namedArgument("analyzer", DSL.literal("simple")), DSL.namedArgument("operator", DSL.literal("Or")), - DSL.namedArgument("operator", DSL.literal("and")) - ).stream().map(arg -> List.of(field, query, arg)); + DSL.namedArgument("operator", DSL.literal("and"))) + .stream() + .map(arg -> List.of(field, query, arg)); } @ParameterizedTest @@ -60,35 +61,40 @@ public void test_valid_arguments(List validArgs) { @Test public void test_valid_when_two_arguments() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("field_value", OpenSearchTextType.of())), - DSL.namedArgument("query", "query_value")); + List arguments = + List.of( + DSL.namedArgument( + "field", new ReferenceExpression("field_value", OpenSearchTextType.of())), + DSL.namedArgument("query", "query_value")); Assertions.assertNotNull(matchBoolPrefixQuery.build(new MatchExpression(arguments))); } @Test public void test_SyntaxCheckException_when_no_arguments() { List arguments = List.of(); - assertThrows(SyntaxCheckException.class, + assertThrows( + SyntaxCheckException.class, () -> matchBoolPrefixQuery.build(new MatchExpression(arguments))); } @Test public void test_SyntaxCheckException_when_one_argument() { List arguments = List.of(DSL.namedArgument("field", "field_value")); - assertThrows(SyntaxCheckException.class, + assertThrows( + SyntaxCheckException.class, () -> matchBoolPrefixQuery.build(new MatchExpression(arguments))); } @Test public void test_SemanticCheckException_when_invalid_argument() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("field_value", OpenSearchTextType.of())), - DSL.namedArgument("query", "query_value"), - DSL.namedArgument("unsupported", "unsupported_value")); - Assertions.assertThrows(SemanticCheckException.class, + List arguments = + List.of( + DSL.namedArgument( + "field", new ReferenceExpression("field_value", OpenSearchTextType.of())), + DSL.namedArgument("query", "query_value"), + DSL.namedArgument("unsupported", "unsupported_value")); + Assertions.assertThrows( + SemanticCheckException.class, () -> matchBoolPrefixQuery.build(new MatchExpression(arguments))); } @@ -99,14 +105,16 @@ public MatchExpression(List arguments) { @Override public ExprValue valueOf(Environment valueEnv) { - throw new UnsupportedOperationException("Invalid function call, " - + "valueOf function need implementation only to support Expression interface"); + throw new UnsupportedOperationException( + "Invalid function call, " + + "valueOf function need implementation only to support Expression interface"); } @Override public ExprType type() { - throw new UnsupportedOperationException("Invalid function call, " - + "type function need implementation only to support Expression interface"); + throw new UnsupportedOperationException( + "Invalid function call, " + + "type function need implementation only to support Expression interface"); } } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/MatchPhrasePrefixQueryTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/MatchPhrasePrefixQueryTest.java index 0defee0008..a3cf54bc5f 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/MatchPhrasePrefixQueryTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/MatchPhrasePrefixQueryTest.java @@ -5,7 +5,6 @@ package org.opensearch.sql.opensearch.storage.script.filter.lucene; - import static org.junit.jupiter.api.Assertions.assertThrows; import java.util.List; @@ -27,7 +26,7 @@ import org.opensearch.sql.opensearch.storage.script.filter.lucene.relevance.MatchPhrasePrefixQuery; @DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) -public class MatchPhrasePrefixQueryTest { +public class MatchPhrasePrefixQueryTest { private final MatchPhrasePrefixQuery matchPhrasePrefixQuery = new MatchPhrasePrefixQuery(); private final FunctionName matchPhrasePrefix = FunctionName.of("match_phrase_prefix"); @@ -35,90 +34,89 @@ public class MatchPhrasePrefixQueryTest { @Test public void test_SyntaxCheckException_when_no_arguments() { List arguments = List.of(); - assertThrows(SyntaxCheckException.class, + assertThrows( + SyntaxCheckException.class, () -> matchPhrasePrefixQuery.build(new MatchPhraseExpression(arguments))); } @Test public void test_SyntaxCheckException_when_one_argument() { - List arguments = List.of(DSL.namedArgument("field", - new ReferenceExpression("test", OpenSearchTextType.of()))); - assertThrows(SyntaxCheckException.class, + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("test", OpenSearchTextType.of()))); + assertThrows( + SyntaxCheckException.class, () -> matchPhrasePrefixQuery.build(new MatchPhraseExpression(arguments))); } @Test public void test_SyntaxCheckException_when_invalid_parameter() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("test", OpenSearchTextType.of())), - DSL.namedArgument("query", "test2"), - DSL.namedArgument("unsupported", "3")); - Assertions.assertThrows(SemanticCheckException.class, + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("test", OpenSearchTextType.of())), + DSL.namedArgument("query", "test2"), + DSL.namedArgument("unsupported", "3")); + Assertions.assertThrows( + SemanticCheckException.class, () -> matchPhrasePrefixQuery.build(new MatchPhraseExpression(arguments))); } @Test public void test_analyzer_parameter() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("t1", OpenSearchTextType.of())), - DSL.namedArgument("query", "t2"), - DSL.namedArgument("analyzer", "standard") - ); + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("t1", OpenSearchTextType.of())), + DSL.namedArgument("query", "t2"), + DSL.namedArgument("analyzer", "standard")); Assertions.assertNotNull(matchPhrasePrefixQuery.build(new MatchPhraseExpression(arguments))); } @Test public void build_succeeds_with_two_arguments() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("test", OpenSearchTextType.of())), - DSL.namedArgument("query", "test2")); + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("test", OpenSearchTextType.of())), + DSL.namedArgument("query", "test2")); Assertions.assertNotNull(matchPhrasePrefixQuery.build(new MatchPhraseExpression(arguments))); } @Test public void test_slop_parameter() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("t1", OpenSearchTextType.of())), - DSL.namedArgument("query", "t2"), - DSL.namedArgument("slop", "2") - ); + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("t1", OpenSearchTextType.of())), + DSL.namedArgument("query", "t2"), + DSL.namedArgument("slop", "2")); Assertions.assertNotNull(matchPhrasePrefixQuery.build(new MatchPhraseExpression(arguments))); } @Test public void test_zero_terms_query_parameter() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("t1", OpenSearchTextType.of())), - DSL.namedArgument("query", "t2"), - DSL.namedArgument("zero_terms_query", "ALL") - ); + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("t1", OpenSearchTextType.of())), + DSL.namedArgument("query", "t2"), + DSL.namedArgument("zero_terms_query", "ALL")); Assertions.assertNotNull(matchPhrasePrefixQuery.build(new MatchPhraseExpression(arguments))); } @Test public void test_zero_terms_query_parameter_lower_case() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("t1", OpenSearchTextType.of())), - DSL.namedArgument("query", "t2"), - DSL.namedArgument("zero_terms_query", "all") - ); + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("t1", OpenSearchTextType.of())), + DSL.namedArgument("query", "t2"), + DSL.namedArgument("zero_terms_query", "all")); Assertions.assertNotNull(matchPhrasePrefixQuery.build(new MatchPhraseExpression(arguments))); } @Test public void test_boost_parameter() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("test", OpenSearchTextType.of())), - DSL.namedArgument("query", "t2"), - DSL.namedArgument("boost", "0.1") - ); + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("test", OpenSearchTextType.of())), + DSL.namedArgument("query", "t2"), + DSL.namedArgument("boost", "0.1")); Assertions.assertNotNull(matchPhrasePrefixQuery.build(new MatchPhraseExpression(arguments))); } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/MatchPhraseQueryTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/MatchPhraseQueryTest.java index 20ecb869ba..66c4c00059 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/MatchPhraseQueryTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/MatchPhraseQueryTest.java @@ -5,7 +5,6 @@ package org.opensearch.sql.opensearch.storage.script.filter.lucene; - import static org.junit.jupiter.api.Assertions.assertThrows; import java.util.List; @@ -37,256 +36,259 @@ public class MatchPhraseQueryTest { @Test public void test_SyntaxCheckException_when_no_arguments() { List arguments = List.of(); - assertThrows(SyntaxCheckException.class, + assertThrows( + SyntaxCheckException.class, () -> matchPhraseQuery.build(new MatchPhraseExpression(arguments))); } @Test public void test_SyntaxCheckException_when_one_argument() { - List arguments = List.of(DSL.namedArgument("field", - new ReferenceExpression("test", OpenSearchTextType.of()))); - assertThrows(SyntaxCheckException.class, + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("test", OpenSearchTextType.of()))); + assertThrows( + SyntaxCheckException.class, () -> matchPhraseQuery.build(new MatchPhraseExpression(arguments))); } @Test public void test_SyntaxCheckException_when_invalid_parameter() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("test", OpenSearchTextType.of())), - DSL.namedArgument("query", "test2"), - DSL.namedArgument("unsupported", "3")); - Assertions.assertThrows(SemanticCheckException.class, + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("test", OpenSearchTextType.of())), + DSL.namedArgument("query", "test2"), + DSL.namedArgument("unsupported", "3")); + Assertions.assertThrows( + SemanticCheckException.class, () -> matchPhraseQuery.build(new MatchPhraseExpression(arguments))); } @Test public void test_analyzer_parameter() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("t1", OpenSearchTextType.of())), - DSL.namedArgument("query", "t2"), - DSL.namedArgument("analyzer", "standard") - ); + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("t1", OpenSearchTextType.of())), + DSL.namedArgument("query", "t2"), + DSL.namedArgument("analyzer", "standard")); Assertions.assertNotNull(matchPhraseQuery.build(new MatchPhraseExpression(arguments))); } @Test public void build_succeeds_with_two_arguments() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("test", OpenSearchTextType.of())), - DSL.namedArgument("query", "test2")); + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("test", OpenSearchTextType.of())), + DSL.namedArgument("query", "test2")); Assertions.assertNotNull(matchPhraseQuery.build(new MatchPhraseExpression(arguments))); } @Test public void test_slop_parameter() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("t1", OpenSearchTextType.of())), - DSL.namedArgument("query", "t2"), - DSL.namedArgument("slop", "2") - ); + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("t1", OpenSearchTextType.of())), + DSL.namedArgument("query", "t2"), + DSL.namedArgument("slop", "2")); Assertions.assertNotNull(matchPhraseQuery.build(new MatchPhraseExpression(arguments))); } @Test public void test_zero_terms_query_parameter() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("t1", OpenSearchTextType.of())), - DSL.namedArgument("query", "t2"), - DSL.namedArgument("zero_terms_query", "ALL") - ); + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("t1", OpenSearchTextType.of())), + DSL.namedArgument("query", "t2"), + DSL.namedArgument("zero_terms_query", "ALL")); Assertions.assertNotNull(matchPhraseQuery.build(new MatchPhraseExpression(arguments))); } @Test public void test_zero_terms_query_parameter_lower_case() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("t1", OpenSearchTextType.of())), - DSL.namedArgument("query", "t2"), - DSL.namedArgument("zero_terms_query", "all") - ); + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("t1", OpenSearchTextType.of())), + DSL.namedArgument("query", "t2"), + DSL.namedArgument("zero_terms_query", "all")); Assertions.assertNotNull(matchPhraseQuery.build(new MatchPhraseExpression(arguments))); } @Test public void test_SyntaxCheckException_when_no_arguments_match_phrase_syntax() { List arguments = List.of(); - assertThrows(SyntaxCheckException.class, - () -> matchPhraseQuery.build(new MatchPhraseExpression( - arguments, matchPhraseWithUnderscoreName))); + assertThrows( + SyntaxCheckException.class, + () -> + matchPhraseQuery.build( + new MatchPhraseExpression(arguments, matchPhraseWithUnderscoreName))); } @Test public void test_SyntaxCheckException_when_one_argument_match_phrase_syntax() { - List arguments = List.of(DSL.namedArgument("field", - new ReferenceExpression("test", OpenSearchTextType.of()))); - assertThrows(SyntaxCheckException.class, - () -> matchPhraseQuery.build(new MatchPhraseExpression( - arguments, matchPhraseWithUnderscoreName))); - + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("test", OpenSearchTextType.of()))); + assertThrows( + SyntaxCheckException.class, + () -> + matchPhraseQuery.build( + new MatchPhraseExpression(arguments, matchPhraseWithUnderscoreName))); } @Test public void test_SyntaxCheckException_when_invalid_parameter_match_phrase_syntax() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("test", OpenSearchTextType.of())), - DSL.namedArgument("query", "test2"), - DSL.namedArgument("unsupported", "3")); - Assertions.assertThrows(SemanticCheckException.class, - () -> matchPhraseQuery.build(new MatchPhraseExpression( - arguments, matchPhraseWithUnderscoreName))); + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("test", OpenSearchTextType.of())), + DSL.namedArgument("query", "test2"), + DSL.namedArgument("unsupported", "3")); + Assertions.assertThrows( + SemanticCheckException.class, + () -> + matchPhraseQuery.build( + new MatchPhraseExpression(arguments, matchPhraseWithUnderscoreName))); } @Test public void test_analyzer_parameter_match_phrase_syntax() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("t1", OpenSearchTextType.of())), - DSL.namedArgument("query", "t2"), - DSL.namedArgument("analyzer", "standard") - ); - Assertions.assertNotNull(matchPhraseQuery.build(new MatchPhraseExpression( - arguments, matchPhraseWithUnderscoreName))); + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("t1", OpenSearchTextType.of())), + DSL.namedArgument("query", "t2"), + DSL.namedArgument("analyzer", "standard")); + Assertions.assertNotNull( + matchPhraseQuery.build( + new MatchPhraseExpression(arguments, matchPhraseWithUnderscoreName))); } @Test public void build_succeeds_with_two_arguments_match_phrase_syntax() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("test", OpenSearchTextType.of())), - DSL.namedArgument("query", "test2")); - Assertions.assertNotNull(matchPhraseQuery.build(new MatchPhraseExpression( - arguments, matchPhraseWithUnderscoreName))); + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("test", OpenSearchTextType.of())), + DSL.namedArgument("query", "test2")); + Assertions.assertNotNull( + matchPhraseQuery.build( + new MatchPhraseExpression(arguments, matchPhraseWithUnderscoreName))); } @Test public void test_slop_parameter_match_phrase_syntax() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("t1", OpenSearchTextType.of())), - DSL.namedArgument("query", "t2"), - DSL.namedArgument("slop", "2") - ); - Assertions.assertNotNull(matchPhraseQuery.build(new MatchPhraseExpression( - arguments, matchPhraseWithUnderscoreName))); + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("t1", OpenSearchTextType.of())), + DSL.namedArgument("query", "t2"), + DSL.namedArgument("slop", "2")); + Assertions.assertNotNull( + matchPhraseQuery.build( + new MatchPhraseExpression(arguments, matchPhraseWithUnderscoreName))); } @Test public void test_zero_terms_query_parameter_match_phrase_syntax() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("t1", OpenSearchTextType.of())), - DSL.namedArgument("query", "t2"), - DSL.namedArgument("zero_terms_query", "ALL") - ); - Assertions.assertNotNull(matchPhraseQuery.build(new MatchPhraseExpression( - arguments, matchPhraseWithUnderscoreName))); + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("t1", OpenSearchTextType.of())), + DSL.namedArgument("query", "t2"), + DSL.namedArgument("zero_terms_query", "ALL")); + Assertions.assertNotNull( + matchPhraseQuery.build( + new MatchPhraseExpression(arguments, matchPhraseWithUnderscoreName))); } @Test public void test_zero_terms_query_parameter_lower_case_match_phrase_syntax() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("t1", OpenSearchTextType.of())), - DSL.namedArgument("query", "t2"), - DSL.namedArgument("zero_terms_query", "all") - ); - Assertions.assertNotNull(matchPhraseQuery.build(new MatchPhraseExpression( - arguments, matchPhraseWithUnderscoreName))); + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("t1", OpenSearchTextType.of())), + DSL.namedArgument("query", "t2"), + DSL.namedArgument("zero_terms_query", "all")); + Assertions.assertNotNull( + matchPhraseQuery.build( + new MatchPhraseExpression(arguments, matchPhraseWithUnderscoreName))); } @Test public void test_SyntaxCheckException_when_no_arguments_matchphrase_syntax() { List arguments = List.of(); - assertThrows(SyntaxCheckException.class, - () -> matchPhraseQuery.build(new MatchPhraseExpression( - arguments, matchPhraseQueryName))); + assertThrows( + SyntaxCheckException.class, + () -> matchPhraseQuery.build(new MatchPhraseExpression(arguments, matchPhraseQueryName))); } @Test public void test_SyntaxCheckException_when_one_argument_matchphrase_syntax() { - List arguments = List.of(DSL.namedArgument("field", - new ReferenceExpression("test", OpenSearchTextType.of()))); - assertThrows(SyntaxCheckException.class, - () -> matchPhraseQuery.build(new MatchPhraseExpression( - arguments, matchPhraseQueryName))); - + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("test", OpenSearchTextType.of()))); + assertThrows( + SyntaxCheckException.class, + () -> matchPhraseQuery.build(new MatchPhraseExpression(arguments, matchPhraseQueryName))); } @Test public void test_SyntaxCheckException_when_invalid_parameter_matchphrase_syntax() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("test", OpenSearchTextType.of())), - DSL.namedArgument("query", "test2"), - DSL.namedArgument("unsupported", "3")); - Assertions.assertThrows(SemanticCheckException.class, - () -> matchPhraseQuery.build(new MatchPhraseExpression( - arguments, matchPhraseQueryName))); + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("test", OpenSearchTextType.of())), + DSL.namedArgument("query", "test2"), + DSL.namedArgument("unsupported", "3")); + Assertions.assertThrows( + SemanticCheckException.class, + () -> matchPhraseQuery.build(new MatchPhraseExpression(arguments, matchPhraseQueryName))); } @Test public void test_analyzer_parameter_matchphrase_syntax() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("t1", OpenSearchTextType.of())), - DSL.namedArgument("query", "t2"), - DSL.namedArgument("analyzer", "standard") - ); - Assertions.assertNotNull(matchPhraseQuery.build(new MatchPhraseExpression( - arguments, matchPhraseQueryName))); + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("t1", OpenSearchTextType.of())), + DSL.namedArgument("query", "t2"), + DSL.namedArgument("analyzer", "standard")); + Assertions.assertNotNull( + matchPhraseQuery.build(new MatchPhraseExpression(arguments, matchPhraseQueryName))); } @Test public void build_succeeds_with_two_arguments_matchphrase_syntax() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("test", OpenSearchTextType.of())), - DSL.namedArgument("query", "test2")); - Assertions.assertNotNull(matchPhraseQuery.build(new MatchPhraseExpression( - arguments, matchPhraseQueryName))); + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("test", OpenSearchTextType.of())), + DSL.namedArgument("query", "test2")); + Assertions.assertNotNull( + matchPhraseQuery.build(new MatchPhraseExpression(arguments, matchPhraseQueryName))); } @Test public void test_slop_parameter_matchphrase_syntax() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("t1", OpenSearchTextType.of())), - DSL.namedArgument("query", "t2"), - DSL.namedArgument("slop", "2") - ); - Assertions.assertNotNull(matchPhraseQuery.build(new MatchPhraseExpression( - arguments, matchPhraseQueryName))); + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("t1", OpenSearchTextType.of())), + DSL.namedArgument("query", "t2"), + DSL.namedArgument("slop", "2")); + Assertions.assertNotNull( + matchPhraseQuery.build(new MatchPhraseExpression(arguments, matchPhraseQueryName))); } @Test public void test_zero_terms_query_parameter_matchphrase_syntax() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("t1", OpenSearchTextType.of())), - DSL.namedArgument("query", "t2"), - DSL.namedArgument("zero_terms_query", "ALL") - ); - Assertions.assertNotNull(matchPhraseQuery.build(new MatchPhraseExpression( - arguments, matchPhraseQueryName))); + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("t1", OpenSearchTextType.of())), + DSL.namedArgument("query", "t2"), + DSL.namedArgument("zero_terms_query", "ALL")); + Assertions.assertNotNull( + matchPhraseQuery.build(new MatchPhraseExpression(arguments, matchPhraseQueryName))); } @Test public void test_zero_terms_query_parameter_lower_case_matchphrase_syntax() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("t1", OpenSearchTextType.of())), - DSL.namedArgument("query", "t2"), - DSL.namedArgument("zero_terms_query", "all") - ); - Assertions.assertNotNull(matchPhraseQuery.build(new MatchPhraseExpression( - arguments, matchPhraseQueryName))); + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("t1", OpenSearchTextType.of())), + DSL.namedArgument("query", "t2"), + DSL.namedArgument("zero_terms_query", "all")); + Assertions.assertNotNull( + matchPhraseQuery.build(new MatchPhraseExpression(arguments, matchPhraseQueryName))); } private class MatchPhraseExpression extends FunctionExpression { diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/MatchQueryTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/MatchQueryTest.java index ddabb3820e..28b7878d63 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/MatchQueryTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/MatchQueryTest.java @@ -35,95 +35,81 @@ public class MatchQueryTest { private final FunctionName matchName = FunctionName.of("match"); private final FunctionName matchQueryName = FunctionName.of("matchquery"); private final FunctionName matchQueryWithUnderscoreName = FunctionName.of("match_query"); - private final FunctionName[] functionNames = - {matchName,matchQueryName, matchQueryWithUnderscoreName}; + private final FunctionName[] functionNames = { + matchName, matchQueryName, matchQueryWithUnderscoreName + }; static Stream> generateValidData() { return Stream.of( List.of( - DSL.namedArgument("field", - new ReferenceExpression("field_value", OpenSearchTextType.of())), - DSL.namedArgument("query", DSL.literal("query_value")) - ), + DSL.namedArgument( + "field", new ReferenceExpression("field_value", OpenSearchTextType.of())), + DSL.namedArgument("query", DSL.literal("query_value"))), List.of( - DSL.namedArgument("field", - new ReferenceExpression("field_value", OpenSearchTextType.of())), + DSL.namedArgument( + "field", new ReferenceExpression("field_value", OpenSearchTextType.of())), DSL.namedArgument("query", DSL.literal("query_value")), - DSL.namedArgument("analyzer", DSL.literal("standard")) - ), + DSL.namedArgument("analyzer", DSL.literal("standard"))), List.of( - DSL.namedArgument("field", - new ReferenceExpression("field_value", OpenSearchTextType.of())), + DSL.namedArgument( + "field", new ReferenceExpression("field_value", OpenSearchTextType.of())), DSL.namedArgument("query", DSL.literal("query_value")), - DSL.namedArgument("auto_generate_synonyms_phrase_query", DSL.literal("true")) - ), + DSL.namedArgument("auto_generate_synonyms_phrase_query", DSL.literal("true"))), List.of( - DSL.namedArgument("field", - new ReferenceExpression("field_value", OpenSearchTextType.of())), + DSL.namedArgument( + "field", new ReferenceExpression("field_value", OpenSearchTextType.of())), DSL.namedArgument("query", DSL.literal("query_value")), - DSL.namedArgument("fuzziness", DSL.literal("AUTO")) - ), + DSL.namedArgument("fuzziness", DSL.literal("AUTO"))), List.of( - DSL.namedArgument("field", - new ReferenceExpression("field_value", OpenSearchTextType.of())), + DSL.namedArgument( + "field", new ReferenceExpression("field_value", OpenSearchTextType.of())), DSL.namedArgument("query", DSL.literal("query_value")), - DSL.namedArgument("max_expansions", DSL.literal("50")) - ), + DSL.namedArgument("max_expansions", DSL.literal("50"))), List.of( - DSL.namedArgument("field", - new ReferenceExpression("field_value", OpenSearchTextType.of())), + DSL.namedArgument( + "field", new ReferenceExpression("field_value", OpenSearchTextType.of())), DSL.namedArgument("query", DSL.literal("query_value")), - DSL.namedArgument("prefix_length", DSL.literal("0")) - ), + DSL.namedArgument("prefix_length", DSL.literal("0"))), List.of( - DSL.namedArgument("field", - new ReferenceExpression("field_value", OpenSearchTextType.of())), + DSL.namedArgument( + "field", new ReferenceExpression("field_value", OpenSearchTextType.of())), DSL.namedArgument("query", DSL.literal("query_value")), - DSL.namedArgument("fuzzy_transpositions", DSL.literal("true")) - ), + DSL.namedArgument("fuzzy_transpositions", DSL.literal("true"))), List.of( - DSL.namedArgument("field", - new ReferenceExpression("field_value", OpenSearchTextType.of())), + DSL.namedArgument( + "field", new ReferenceExpression("field_value", OpenSearchTextType.of())), DSL.namedArgument("query", DSL.literal("query_value")), - DSL.namedArgument("fuzzy_rewrite", DSL.literal("constant_score")) - ), + DSL.namedArgument("fuzzy_rewrite", DSL.literal("constant_score"))), List.of( - DSL.namedArgument("field", - new ReferenceExpression("field_value", OpenSearchTextType.of())), + DSL.namedArgument( + "field", new ReferenceExpression("field_value", OpenSearchTextType.of())), DSL.namedArgument("query", DSL.literal("query_value")), - DSL.namedArgument("lenient", DSL.literal("false")) - ), + DSL.namedArgument("lenient", DSL.literal("false"))), List.of( - DSL.namedArgument("field", - new ReferenceExpression("field_value", OpenSearchTextType.of())), + DSL.namedArgument( + "field", new ReferenceExpression("field_value", OpenSearchTextType.of())), DSL.namedArgument("query", DSL.literal("query_value")), - DSL.namedArgument("operator", DSL.literal("OR")) - ), + DSL.namedArgument("operator", DSL.literal("OR"))), List.of( - DSL.namedArgument("field", - new ReferenceExpression("field_value", OpenSearchTextType.of())), + DSL.namedArgument( + "field", new ReferenceExpression("field_value", OpenSearchTextType.of())), DSL.namedArgument("query", DSL.literal("query_value")), - DSL.namedArgument("minimum_should_match", DSL.literal("3")) - ), + DSL.namedArgument("minimum_should_match", DSL.literal("3"))), List.of( - DSL.namedArgument("field", - new ReferenceExpression("field_value", OpenSearchTextType.of())), + DSL.namedArgument( + "field", new ReferenceExpression("field_value", OpenSearchTextType.of())), DSL.namedArgument("query", DSL.literal("query_value")), - DSL.namedArgument("zero_terms_query", DSL.literal("NONE")) - ), + DSL.namedArgument("zero_terms_query", DSL.literal("NONE"))), List.of( - DSL.namedArgument("field", - new ReferenceExpression("field_value", OpenSearchTextType.of())), + DSL.namedArgument( + "field", new ReferenceExpression("field_value", OpenSearchTextType.of())), DSL.namedArgument("query", DSL.literal("query_value")), - DSL.namedArgument("zero_terms_query", DSL.literal("none")) - ), + DSL.namedArgument("zero_terms_query", DSL.literal("none"))), List.of( - DSL.namedArgument("field", - new ReferenceExpression("field_value", OpenSearchTextType.of())), + DSL.namedArgument( + "field", new ReferenceExpression("field_value", OpenSearchTextType.of())), DSL.namedArgument("query", DSL.literal("query_value")), - DSL.namedArgument("boost", DSL.literal("1")) - ) - ); + DSL.namedArgument("boost", DSL.literal("1")))); } @ParameterizedTest @@ -135,99 +121,108 @@ public void test_valid_parameters(List validArgs) { @Test public void test_SyntaxCheckException_when_no_arguments() { List arguments = List.of(); - assertThrows(SyntaxCheckException.class, - () -> matchQuery.build(new MatchExpression(arguments))); + assertThrows( + SyntaxCheckException.class, () -> matchQuery.build(new MatchExpression(arguments))); } @Test public void test_SyntaxCheckException_when_one_argument() { List arguments = List.of(namedArgument("field", "field_value")); - assertThrows(SyntaxCheckException.class, - () -> matchQuery.build(new MatchExpression(arguments))); + assertThrows( + SyntaxCheckException.class, () -> matchQuery.build(new MatchExpression(arguments))); } @Test public void test_SemanticCheckException_when_invalid_parameter() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("field_value", OpenSearchTextType.of())), - namedArgument("query", "query_value"), - namedArgument("unsupported", "unsupported_value")); - Assertions.assertThrows(SemanticCheckException.class, - () -> matchQuery.build(new MatchExpression(arguments))); + List arguments = + List.of( + DSL.namedArgument( + "field", new ReferenceExpression("field_value", OpenSearchTextType.of())), + namedArgument("query", "query_value"), + namedArgument("unsupported", "unsupported_value")); + Assertions.assertThrows( + SemanticCheckException.class, () -> matchQuery.build(new MatchExpression(arguments))); } @ParameterizedTest @MethodSource("generateValidData") public void test_valid_parameters_matchquery_syntax(List validArgs) { - Assertions.assertNotNull(matchQuery.build( - new MatchExpression(validArgs, MatchQueryTest.this.matchQueryName))); + Assertions.assertNotNull( + matchQuery.build(new MatchExpression(validArgs, MatchQueryTest.this.matchQueryName))); } @Test public void test_SyntaxCheckException_when_no_arguments_matchquery_syntax() { List arguments = List.of(); - assertThrows(SyntaxCheckException.class, - () -> matchQuery.build( - new MatchExpression(arguments, MatchQueryTest.this.matchQueryName))); + assertThrows( + SyntaxCheckException.class, + () -> matchQuery.build(new MatchExpression(arguments, MatchQueryTest.this.matchQueryName))); } @Test public void test_SyntaxCheckException_when_one_argument_matchquery_syntax() { List arguments = List.of(namedArgument("field", "field_value")); - assertThrows(SyntaxCheckException.class, - () -> matchQuery.build( - new MatchExpression(arguments, MatchQueryTest.this.matchQueryName))); + assertThrows( + SyntaxCheckException.class, + () -> matchQuery.build(new MatchExpression(arguments, MatchQueryTest.this.matchQueryName))); } @Test public void test_SemanticCheckException_when_invalid_parameter_matchquery_syntax() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("field_value", OpenSearchTextType.of())), - namedArgument("query", "query_value"), - namedArgument("unsupported", "unsupported_value")); - Assertions.assertThrows(SemanticCheckException.class, - () -> matchQuery.build( - new MatchExpression(arguments, MatchQueryTest.this.matchQueryName))); + List arguments = + List.of( + DSL.namedArgument( + "field", new ReferenceExpression("field_value", OpenSearchTextType.of())), + namedArgument("query", "query_value"), + namedArgument("unsupported", "unsupported_value")); + Assertions.assertThrows( + SemanticCheckException.class, + () -> matchQuery.build(new MatchExpression(arguments, MatchQueryTest.this.matchQueryName))); } @ParameterizedTest @MethodSource("generateValidData") public void test_valid_parameters_match_query_syntax(List validArgs) { - Assertions.assertNotNull(matchQuery.build( - new MatchExpression(validArgs, MatchQueryTest.this.matchQueryWithUnderscoreName))); + Assertions.assertNotNull( + matchQuery.build( + new MatchExpression(validArgs, MatchQueryTest.this.matchQueryWithUnderscoreName))); } @Test public void test_SyntaxCheckException_when_no_arguments_match_query_syntax() { List arguments = List.of(); - assertThrows(SyntaxCheckException.class, - () -> matchQuery.build( - new MatchExpression(arguments, MatchQueryTest.this.matchQueryWithUnderscoreName))); + assertThrows( + SyntaxCheckException.class, + () -> + matchQuery.build( + new MatchExpression(arguments, MatchQueryTest.this.matchQueryWithUnderscoreName))); } @Test public void test_SyntaxCheckException_when_one_argument_match_query_syntax() { List arguments = List.of(namedArgument("field", "field_value")); - assertThrows(SyntaxCheckException.class, - () -> matchQuery.build( - new MatchExpression(arguments, MatchQueryTest.this.matchQueryWithUnderscoreName))); + assertThrows( + SyntaxCheckException.class, + () -> + matchQuery.build( + new MatchExpression(arguments, MatchQueryTest.this.matchQueryWithUnderscoreName))); } @Test public void test_SemanticCheckException_when_invalid_parameter_match_query_syntax() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("field_value", OpenSearchTextType.of())), - namedArgument("query", "query_value"), - namedArgument("unsupported", "unsupported_value")); - Assertions.assertThrows(SemanticCheckException.class, - () -> matchQuery.build( - new MatchExpression(arguments, MatchQueryTest.this.matchQueryWithUnderscoreName))); + List arguments = + List.of( + DSL.namedArgument( + "field", new ReferenceExpression("field_value", OpenSearchTextType.of())), + namedArgument("query", "query_value"), + namedArgument("unsupported", "unsupported_value")); + Assertions.assertThrows( + SemanticCheckException.class, + () -> + matchQuery.build( + new MatchExpression(arguments, MatchQueryTest.this.matchQueryWithUnderscoreName))); } - private NamedArgumentExpression namedArgument(String name, String value) { return DSL.namedArgument(name, DSL.literal(value)); } @@ -244,14 +239,16 @@ public MatchExpression(List arguments, FunctionName funcName) { @Override public ExprValue valueOf(Environment valueEnv) { - throw new UnsupportedOperationException("Invalid function call, " - + "valueOf function need implementation only to support Expression interface"); + throw new UnsupportedOperationException( + "Invalid function call, " + + "valueOf function need implementation only to support Expression interface"); } @Override public ExprType type() { - throw new UnsupportedOperationException("Invalid function call, " - + "type function need implementation only to support Expression interface"); + throw new UnsupportedOperationException( + "Invalid function call, " + + "type function need implementation only to support Expression interface"); } } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/MultiMatchTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/MultiMatchTest.java index 93b0cdbc93..7fcc4a6430 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/MultiMatchTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/MultiMatchTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.filter.lucene; import static org.junit.jupiter.api.Assertions.assertThrows; @@ -39,186 +38,181 @@ class MultiMatchTest { private final FunctionName multiMatchName = FunctionName.of("multimatch"); private final FunctionName snakeCaseMultiMatchName = FunctionName.of("multi_match"); private final FunctionName multiMatchQueryName = FunctionName.of("multimatchquery"); - private static final LiteralExpression fields_value = DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "title", ExprValueUtils.floatValue(1.F), - "body", ExprValueUtils.floatValue(.3F))))); + private static final LiteralExpression fields_value = + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of( + "title", ExprValueUtils.floatValue(1.F), + "body", ExprValueUtils.floatValue(.3F))))); private static final LiteralExpression query_value = DSL.literal("query_value"); static Stream> generateValidData() { return Stream.of( - List.of( - DSL.namedArgument("fields", fields_value), - DSL.namedArgument("query", query_value) - ), + List.of(DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value)), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("analyzer", DSL.literal("simple")) - ), + DSL.namedArgument("analyzer", DSL.literal("simple"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("auto_generate_synonyms_phrase_query", DSL.literal("true")) - ), + DSL.namedArgument("auto_generate_synonyms_phrase_query", DSL.literal("true"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("boost", DSL.literal("1.3")) - ), + DSL.namedArgument("boost", DSL.literal("1.3"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("cutoff_frequency", DSL.literal("4.2")) - ), + DSL.namedArgument("cutoff_frequency", DSL.literal("4.2"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("fuzziness", DSL.literal("AUTO:2,4")) - ), + DSL.namedArgument("fuzziness", DSL.literal("AUTO:2,4"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("fuzzy_transpositions", DSL.literal("true")) - ), + DSL.namedArgument("fuzzy_transpositions", DSL.literal("true"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("lenient", DSL.literal("true")) - ), + DSL.namedArgument("lenient", DSL.literal("true"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("max_expansions", DSL.literal("7")) - ), + DSL.namedArgument("max_expansions", DSL.literal("7"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("minimum_should_match", DSL.literal("4")) - ), + DSL.namedArgument("minimum_should_match", DSL.literal("4"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("operator", DSL.literal("AND")) - ), + DSL.namedArgument("operator", DSL.literal("AND"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("prefix_length", DSL.literal("7")) - ), + DSL.namedArgument("prefix_length", DSL.literal("7"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("tie_breaker", DSL.literal("0.3")) - ), + DSL.namedArgument("tie_breaker", DSL.literal("0.3"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("type", DSL.literal("cross_fields")) - ), + DSL.namedArgument("type", DSL.literal("cross_fields"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("zero_terms_query", DSL.literal("ALL")) - ), + DSL.namedArgument("zero_terms_query", DSL.literal("ALL"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("zero_terms_query", DSL.literal("all")) - ) - ); + DSL.namedArgument("zero_terms_query", DSL.literal("all")))); } @ParameterizedTest @MethodSource("generateValidData") public void test_valid_parameters_multiMatch(List validArgs) { - Assertions.assertNotNull(multiMatchQuery.build( - new MultiMatchExpression(validArgs))); + Assertions.assertNotNull(multiMatchQuery.build(new MultiMatchExpression(validArgs))); } @ParameterizedTest @MethodSource("generateValidData") public void test_valid_parameters_multi_match(List validArgs) { - Assertions.assertNotNull(multiMatchQuery.build( - new MultiMatchExpression(validArgs, snakeCaseMultiMatchName))); + Assertions.assertNotNull( + multiMatchQuery.build(new MultiMatchExpression(validArgs, snakeCaseMultiMatchName))); } @ParameterizedTest @MethodSource("generateValidData") public void test_valid_parameters_multiMatchQuery(List validArgs) { - Assertions.assertNotNull(multiMatchQuery.build( - new MultiMatchExpression(validArgs, multiMatchQueryName))); + Assertions.assertNotNull( + multiMatchQuery.build(new MultiMatchExpression(validArgs, multiMatchQueryName))); } @Test public void test_SyntaxCheckException_when_no_arguments_multiMatch() { List arguments = List.of(); - assertThrows(SyntaxCheckException.class, + assertThrows( + SyntaxCheckException.class, () -> multiMatchQuery.build(new MultiMatchExpression(arguments))); } @Test public void test_SyntaxCheckException_when_no_arguments_multi_match() { List arguments = List.of(); - assertThrows(SyntaxCheckException.class, + assertThrows( + SyntaxCheckException.class, () -> multiMatchQuery.build(new MultiMatchExpression(arguments, multiMatchName))); } @Test public void test_SyntaxCheckException_when_no_arguments_multiMatchQuery() { List arguments = List.of(); - assertThrows(SyntaxCheckException.class, + assertThrows( + SyntaxCheckException.class, () -> multiMatchQuery.build(new MultiMatchExpression(arguments, multiMatchQueryName))); } @Test public void test_SyntaxCheckException_when_one_argument_multiMatch() { List arguments = List.of(namedArgument("fields", fields_value)); - assertThrows(SyntaxCheckException.class, + assertThrows( + SyntaxCheckException.class, () -> multiMatchQuery.build(new MultiMatchExpression(arguments))); } @Test public void test_SyntaxCheckException_when_one_argument_multi_match() { List arguments = List.of(namedArgument("fields", fields_value)); - assertThrows(SyntaxCheckException.class, + assertThrows( + SyntaxCheckException.class, () -> multiMatchQuery.build(new MultiMatchExpression(arguments, snakeCaseMultiMatchName))); } @Test public void test_SyntaxCheckException_when_one_argument_multiMatchQuery() { List arguments = List.of(namedArgument("fields", fields_value)); - assertThrows(SyntaxCheckException.class, + assertThrows( + SyntaxCheckException.class, () -> multiMatchQuery.build(new MultiMatchExpression(arguments, multiMatchQueryName))); } @Test public void test_SemanticCheckException_when_invalid_parameter_multiMatch() { - List arguments = List.of( - namedArgument("fields", fields_value), - namedArgument("query", query_value), - DSL.namedArgument("unsupported", "unsupported_value")); - Assertions.assertThrows(SemanticCheckException.class, + List arguments = + List.of( + namedArgument("fields", fields_value), + namedArgument("query", query_value), + DSL.namedArgument("unsupported", "unsupported_value")); + Assertions.assertThrows( + SemanticCheckException.class, () -> multiMatchQuery.build(new MultiMatchExpression(arguments))); } @Test public void test_SemanticCheckException_when_invalid_parameter_multi_match() { - List arguments = List.of( - namedArgument("fields", fields_value), - namedArgument("query", query_value), - DSL.namedArgument("unsupported", "unsupported_value")); - Assertions.assertThrows(SemanticCheckException.class, + List arguments = + List.of( + namedArgument("fields", fields_value), + namedArgument("query", query_value), + DSL.namedArgument("unsupported", "unsupported_value")); + Assertions.assertThrows( + SemanticCheckException.class, () -> multiMatchQuery.build(new MultiMatchExpression(arguments, snakeCaseMultiMatchName))); } @Test public void test_SemanticCheckException_when_invalid_parameter_multiMatchQuery() { - List arguments = List.of( - namedArgument("fields", fields_value), - namedArgument("query", query_value), - DSL.namedArgument("unsupported", "unsupported_value")); - Assertions.assertThrows(SemanticCheckException.class, + List arguments = + List.of( + namedArgument("fields", fields_value), + namedArgument("query", query_value), + DSL.namedArgument("unsupported", "unsupported_value")); + Assertions.assertThrows( + SemanticCheckException.class, () -> multiMatchQuery.build(new MultiMatchExpression(arguments, multiMatchQueryName))); } @@ -235,17 +229,18 @@ public MultiMatchExpression(List arguments, FunctionName funcName) { super(funcName, arguments); } - @Override public ExprValue valueOf(Environment valueEnv) { - throw new UnsupportedOperationException("Invalid function call, " - + "valueOf function need implementation only to support Expression interface"); + throw new UnsupportedOperationException( + "Invalid function call, " + + "valueOf function need implementation only to support Expression interface"); } @Override public ExprType type() { - throw new UnsupportedOperationException("Invalid function call, " - + "type function need implementation only to support Expression interface"); + throw new UnsupportedOperationException( + "Invalid function call, " + + "type function need implementation only to support Expression interface"); } } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MultiFieldQueryTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MultiFieldQueryTest.java index 01ec85d64d..9518136ff0 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MultiFieldQueryTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MultiFieldQueryTest.java @@ -27,14 +27,17 @@ class MultiFieldQueryTest { MultiFieldQuery query; private final String testQueryName = "test_query"; - private final Map> actionMap - = ImmutableMap.of("paramA", (o, v) -> o); + private final Map> actionMap = + ImmutableMap.of("paramA", (o, v) -> o); @BeforeEach public void setUp() { - query = mock(MultiFieldQuery.class, - Mockito.withSettings().useConstructor(actionMap) - .defaultAnswer(Mockito.CALLS_REAL_METHODS)); + query = + mock( + MultiFieldQuery.class, + Mockito.withSettings() + .useConstructor(actionMap) + .defaultAnswer(Mockito.CALLS_REAL_METHODS)); when(query.getQueryName()).thenReturn(testQueryName); } @@ -44,17 +47,26 @@ void createQueryBuilderTest() { String sampleField = "fieldA"; float sampleValue = 34f; - var fieldSpec = ImmutableMap.builder().put(sampleField, - ExprValueUtils.floatValue(sampleValue)).build(); + var fieldSpec = + ImmutableMap.builder() + .put(sampleField, ExprValueUtils.floatValue(sampleValue)) + .build(); - query.createQueryBuilder(List.of(DSL.namedArgument("fields", - new LiteralExpression(ExprTupleValue.fromExprValueMap(fieldSpec))), - DSL.namedArgument("query", - new LiteralExpression(ExprValueUtils.stringValue(sampleQuery))))); + query.createQueryBuilder( + List.of( + DSL.namedArgument( + "fields", new LiteralExpression(ExprTupleValue.fromExprValueMap(fieldSpec))), + DSL.namedArgument( + "query", new LiteralExpression(ExprValueUtils.stringValue(sampleQuery))))); - verify(query).createBuilder(argThat( - (ArgumentMatcher>) map -> map.size() == 1 - && map.containsKey(sampleField) && map.containsValue(sampleValue)), - eq(sampleQuery)); + verify(query) + .createBuilder( + argThat( + (ArgumentMatcher>) + map -> + map.size() == 1 + && map.containsKey(sampleField) + && map.containsValue(sampleValue)), + eq(sampleQuery)); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/NoFieldQueryTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/NoFieldQueryTest.java index c4e4f1242a..17b775fa0b 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/NoFieldQueryTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/NoFieldQueryTest.java @@ -23,14 +23,17 @@ class NoFieldQueryTest { NoFieldQuery query; private final String testQueryName = "test_query"; - private final Map actionMap - = ImmutableMap.of("paramA", (o, v) -> o); + private final Map actionMap = + ImmutableMap.of("paramA", (o, v) -> o); @BeforeEach void setUp() { - query = mock(NoFieldQuery.class, - Mockito.withSettings().useConstructor(actionMap) - .defaultAnswer(Mockito.CALLS_REAL_METHODS)); + query = + mock( + NoFieldQuery.class, + Mockito.withSettings() + .useConstructor(actionMap) + .defaultAnswer(Mockito.CALLS_REAL_METHODS)); when(query.getQueryName()).thenReturn(testQueryName); } @@ -38,9 +41,10 @@ void setUp() { void createQueryBuilderTest() { String sampleQuery = "field:query"; - query.createQueryBuilder(List.of( - DSL.namedArgument("query", - new LiteralExpression(ExprValueUtils.stringValue(sampleQuery))))); + query.createQueryBuilder( + List.of( + DSL.namedArgument( + "query", new LiteralExpression(ExprValueUtils.stringValue(sampleQuery))))); verify(query).createBuilder(eq(sampleQuery)); } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/serialization/DefaultExpressionSerializerTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/serialization/DefaultExpressionSerializerTest.java index 72a319dbfe..b70595c74b 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/serialization/DefaultExpressionSerializerTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/serialization/DefaultExpressionSerializerTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.serialization; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -57,23 +56,25 @@ public void can_serialize_and_deserialize_functions() { @Test public void cannot_serialize_illegal_expression() { - Expression illegalExpr = new Expression() { - private final Object object = new Object(); // non-serializable - @Override - public ExprValue valueOf(Environment valueEnv) { - return null; - } + Expression illegalExpr = + new Expression() { + private final Object object = new Object(); // non-serializable + + @Override + public ExprValue valueOf(Environment valueEnv) { + return null; + } - @Override - public ExprType type() { - return null; - } + @Override + public ExprType type() { + return null; + } - @Override - public T accept(ExpressionNodeVisitor visitor, C context) { - return null; - } - }; + @Override + public T accept(ExpressionNodeVisitor visitor, C context) { + return null; + } + }; assertThrows(IllegalStateException.class, () -> serializer.serialize(illegalExpr)); } @@ -81,5 +82,4 @@ public T accept(ExpressionNodeVisitor visitor, C context) { public void cannot_deserialize_illegal_expression_code() { assertThrows(IllegalStateException.class, () -> serializer.deserialize("hello world")); } - } From b977f1edb7f5d5b208e16c9c69a245ff93e125bd Mon Sep 17 00:00:00 2001 From: Mitchell Gale Date: Fri, 18 Aug 2023 14:05:48 -0700 Subject: [PATCH 30/42] [Spotless] Applying Google Code Format for opensearch directory (pt 2/2) #17 (#1978) * spotless apply for OpenSearch P2. Signed-off-by: Mitchell Gale * Spotlesss apply run Signed-off-by: Mitchell Gale * Addressed PR comments Signed-off-by: Mitchell Gale * Apply suggestions from code review Co-authored-by: Guian Gumpac Signed-off-by: Mitchell Gale * spotless apply Signed-off-by: Mitchell Gale * fixed json formatting in test. Signed-off-by: Mitchell Gale --------- Signed-off-by: Mitchell Gale Signed-off-by: Mitchell Gale Co-authored-by: Guian Gumpac --- build.gradle | 4 +- .../client/OpenSearchNodeClient.java | 59 +- .../client/OpenSearchRestClient.java | 68 +- .../data/type/OpenSearchGeoPointType.java | 5 +- .../data/type/OpenSearchIpType.java | 5 +- .../data/type/OpenSearchTextType.java | 10 +- .../sql/opensearch/data/utils/Content.java | 6 +- .../data/utils/OpenSearchJsonContent.java | 26 +- .../data/value/OpenSearchExprIpValue.java | 3 +- .../data/value/OpenSearchExprTextValue.java | 5 +- .../value/OpenSearchExprValueFactory.java | 187 +++--- .../executor/OpenSearchQueryManager.java | 4 +- .../protector/ResourceMonitorPlan.java | 28 +- .../monitor/OpenSearchMemoryHealthy.java | 21 +- .../monitor/OpenSearchResourceMonitor.java | 18 +- .../request/OpenSearchQueryRequest.java | 74 +-- .../opensearch/request/OpenSearchRequest.java | 25 +- .../request/OpenSearchRequestBuilder.java | 141 ++-- .../request/OpenSearchScrollRequest.java | 54 +- .../system/OpenSearchSystemRequest.java | 5 +- .../response/OpenSearchResponse.java | 174 +++-- .../response/agg/SingleValueParser.java | 7 +- .../opensearch/response/agg/StatsParser.java | 4 +- .../response/agg/TopHitsParser.java | 11 +- .../sql/opensearch/response/agg/Utils.java | 1 + .../opensearch/security/SecurityAccess.java | 5 +- .../setting/OpenSearchSettings.java | 305 +++++---- .../opensearch/storage/OpenSearchIndex.java | 97 ++- .../storage/OpenSearchStorageEngine.java | 8 +- .../storage/scan/OpenSearchIndexScan.java | 34 +- ...OpenSearchIndexScanAggregationBuilder.java | 12 +- .../scan/OpenSearchIndexScanBuilder.java | 30 +- .../scan/OpenSearchIndexScanQueryBuilder.java | 45 +- .../storage/scan/PushDownQueryBuilder.java | 4 +- .../storage/script/StringUtils.java | 2 +- .../script/filter/lucene/LuceneQuery.java | 6 +- .../script/filter/lucene/RangeQuery.java | 16 +- .../script/filter/lucene/TermQuery.java | 5 +- .../filter/lucene/relevance/QueryQuery.java | 8 +- .../lucene/relevance/QueryStringQuery.java | 12 +- .../lucene/relevance/RelevanceQuery.java | 55 +- .../relevance/SimpleQueryStringQuery.java | 8 +- .../lucene/relevance/SingleFieldQuery.java | 20 +- .../lucene/relevance/WildcardQuery.java | 9 +- .../storage/script/sort/SortQueryBuilder.java | 37 +- .../storage/system/OpenSearchSystemIndex.java | 22 +- .../system/OpenSearchSystemIndexScan.java | 13 +- .../system/OpenSearchSystemIndexSchema.java | 88 ++- .../client/OpenSearchNodeClientTest.java | 175 +++-- .../client/OpenSearchRestClientTest.java | 203 +++--- .../OpenSearchExprGeoPointValueTest.java | 1 - .../data/value/OpenSearchExprIpValueTest.java | 1 - .../value/OpenSearchExprTextValueTest.java | 76 ++- .../value/OpenSearchExprValueFactoryTest.java | 616 +++++++++--------- .../executor/OpenSearchQueryManagerTest.java | 35 +- .../executor/ResourceMonitorPlanTest.java | 13 +- .../monitor/OpenSearchMemoryHealthyTest.java | 16 +- .../OpenSearchResourceMonitorTest.java | 20 +- .../request/OpenSearchQueryRequestTest.java | 102 ++- .../request/OpenSearchRequestBuilderTest.java | 247 +++---- .../request/OpenSearchScrollRequestTest.java | 148 ++--- ...enSearchAggregationResponseParserTest.java | 14 +- .../response/OpenSearchResponseTest.java | 121 ++-- .../setting/OpenSearchSettingsTest.java | 38 +- .../storage/OpenSearchIndexTest.java | 133 ++-- .../storage/OpenSearchStorageEngineTest.java | 25 +- ...SearchIndexScanAggregationBuilderTest.java | 7 +- .../OpenSearchIndexScanOptimizationTest.java | 586 ++++++----------- .../OpenSearchIndexScanPaginationTest.java | 46 +- .../storage/scan/OpenSearchIndexScanTest.java | 193 +++--- .../scan/PushDownQueryBuilderTest.java | 19 +- .../script/filter/lucene/QueryStringTest.java | 102 +-- .../script/filter/lucene/QueryTest.java | 107 +-- .../script/filter/lucene/RangeQueryTest.java | 10 +- .../filter/lucene/SimpleQueryStringTest.java | 112 ++-- .../filter/lucene/WildcardQueryTest.java | 43 +- .../relevance/RelevanceQueryBuildTest.java | 31 +- .../relevance/SingleFieldQueryTest.java | 47 +- .../script/sort/SortQueryBuilderTest.java | 55 +- .../system/OpenSearchSystemIndexScanTest.java | 4 +- .../system/OpenSearchSystemIndexTest.java | 29 +- .../sql/opensearch/utils/Utils.java | 11 +- 82 files changed, 2396 insertions(+), 2776 deletions(-) diff --git a/build.gradle b/build.gradle index 2bdc4865bb..2ab7abc42a 100644 --- a/build.gradle +++ b/build.gradle @@ -93,7 +93,9 @@ spotless { 'spark/**/*.java', 'plugin/**/*.java', 'ppl/**/*.java', - 'integ-test/**/*java' + 'integ-test/**/*java', + 'core/**/*.java', + 'opensearch/**/*.java' exclude '**/build/**', '**/build-*/**' } importOrder() diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/client/OpenSearchNodeClient.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/client/OpenSearchNodeClient.java index c6d44e2c23..993e092534 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/client/OpenSearchNodeClient.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/client/OpenSearchNodeClient.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.client; import com.google.common.collect.ImmutableList; @@ -40,9 +39,7 @@ public class OpenSearchNodeClient implements OpenSearchClient { /** Node client provided by OpenSearch container. */ private final NodeClient client; - /** - * Constructor of OpenSearchNodeClient. - */ + /** Constructor of OpenSearchNodeClient. */ public OpenSearchNodeClient(NodeClient client) { this.client = client; } @@ -50,8 +47,8 @@ public OpenSearchNodeClient(NodeClient client) { @Override public boolean exists(String indexName) { try { - IndicesExistsResponse checkExistResponse = client.admin().indices() - .exists(new IndicesExistsRequest(indexName)).actionGet(); + IndicesExistsResponse checkExistResponse = + client.admin().indices().exists(new IndicesExistsRequest(indexName)).actionGet(); return checkExistResponse.isExists(); } catch (Exception e) { throw new IllegalStateException("Failed to check if index [" + indexName + "] exists", e); @@ -83,13 +80,12 @@ public void createIndex(String indexName, Map mappings) { @Override public Map getIndexMappings(String... indexExpression) { try { - GetMappingsResponse mappingsResponse = client.admin().indices() - .prepareGetMappings(indexExpression) - .setLocal(true) - .get(); - return mappingsResponse.mappings().entrySet().stream().collect(Collectors.toUnmodifiableMap( - Map.Entry::getKey, - cursor -> new IndexMapping(cursor.getValue()))); + GetMappingsResponse mappingsResponse = + client.admin().indices().prepareGetMappings(indexExpression).setLocal(true).get(); + return mappingsResponse.mappings().entrySet().stream() + .collect( + Collectors.toUnmodifiableMap( + Map.Entry::getKey, cursor -> new IndexMapping(cursor.getValue()))); } catch (IndexNotFoundException e) { // Re-throw directly to be treated as client error finally throw e; @@ -127,15 +123,11 @@ public Map getIndexMaxResultWindows(String... indexExpression) } } - /** - * TODO: Scroll doesn't work for aggregation. Support aggregation later. - */ + /** TODO: Scroll doesn't work for aggregation. Support aggregation later. */ @Override public OpenSearchResponse search(OpenSearchRequest request) { return request.search( - req -> client.search(req).actionGet(), - req -> client.searchScroll(req).actionGet() - ); + req -> client.search(req).actionGet(), req -> client.searchScroll(req).actionGet()); } /** @@ -145,13 +137,12 @@ public OpenSearchResponse search(OpenSearchRequest request) { */ @Override public List indices() { - final GetIndexResponse indexResponse = client.admin().indices() - .prepareGetIndex() - .setLocal(true) - .get(); + final GetIndexResponse indexResponse = + client.admin().indices().prepareGetIndex().setLocal(true).get(); final Stream aliasStream = ImmutableList.copyOf(indexResponse.aliases().values()).stream() - .flatMap(Collection::stream).map(AliasMetadata::alias); + .flatMap(Collection::stream) + .map(AliasMetadata::alias); return Stream.concat(Arrays.stream(indexResponse.getIndices()), aliasStream) .collect(Collectors.toList()); @@ -164,20 +155,20 @@ public List indices() { */ @Override public Map meta() { - return ImmutableMap.of(META_CLUSTER_NAME, - client.settings().get("cluster.name", "opensearch")); + return ImmutableMap.of(META_CLUSTER_NAME, client.settings().get("cluster.name", "opensearch")); } @Override public void cleanup(OpenSearchRequest request) { - request.clean(scrollId -> { - try { - client.prepareClearScroll().addScrollId(scrollId).get(); - } catch (Exception e) { - throw new IllegalStateException( - "Failed to clean up resources for search request " + request, e); - } - }); + request.clean( + scrollId -> { + try { + client.prepareClearScroll().addScrollId(scrollId).get(); + } catch (Exception e) { + throw new IllegalStateException( + "Failed to clean up resources for search request " + request, e); + } + }); } @Override diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/client/OpenSearchRestClient.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/client/OpenSearchRestClient.java index c27c4bbc30..b6106982a7 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/client/OpenSearchRestClient.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/client/OpenSearchRestClient.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.client; import com.google.common.collect.ImmutableList; @@ -49,8 +48,7 @@ public class OpenSearchRestClient implements OpenSearchClient { @Override public boolean exists(String indexName) { try { - return client.indices().exists( - new GetIndexRequest(indexName), RequestOptions.DEFAULT); + return client.indices().exists(new GetIndexRequest(indexName), RequestOptions.DEFAULT); } catch (IOException e) { throw new IllegalStateException("Failed to check if index [" + indexName + "] exist", e); } @@ -59,8 +57,9 @@ public boolean exists(String indexName) { @Override public void createIndex(String indexName, Map mappings) { try { - client.indices().create( - new CreateIndexRequest(indexName).mapping(mappings), RequestOptions.DEFAULT); + client + .indices() + .create(new CreateIndexRequest(indexName).mapping(mappings), RequestOptions.DEFAULT); } catch (IOException e) { throw new IllegalStateException("Failed to create index [" + indexName + "]", e); } @@ -80,27 +79,29 @@ public Map getIndexMappings(String... indexExpression) { @Override public Map getIndexMaxResultWindows(String... indexExpression) { - GetSettingsRequest request = new GetSettingsRequest() - .indices(indexExpression).includeDefaults(true); + GetSettingsRequest request = + new GetSettingsRequest().indices(indexExpression).includeDefaults(true); try { GetSettingsResponse response = client.indices().getSettings(request, RequestOptions.DEFAULT); Map settings = response.getIndexToSettings(); Map defaultSettings = response.getIndexToDefaultSettings(); Map result = new HashMap<>(); - defaultSettings.forEach((key, value) -> { - Integer maxResultWindow = value.getAsInt("index.max_result_window", null); - if (maxResultWindow != null) { - result.put(key, maxResultWindow); - } - }); - - settings.forEach((key, value) -> { - Integer maxResultWindow = value.getAsInt("index.max_result_window", null); - if (maxResultWindow != null) { - result.put(key, maxResultWindow); - } - }); + defaultSettings.forEach( + (key, value) -> { + Integer maxResultWindow = value.getAsInt("index.max_result_window", null); + if (maxResultWindow != null) { + result.put(key, maxResultWindow); + } + }); + + settings.forEach( + (key, value) -> { + Integer maxResultWindow = value.getAsInt("index.max_result_window", null); + if (maxResultWindow != null) { + result.put(key, maxResultWindow); + } + }); return result; } catch (IOException e) { @@ -126,8 +127,7 @@ public OpenSearchResponse search(OpenSearchRequest request) { throw new IllegalStateException( "Failed to perform scroll operation with request " + req, e); } - } - ); + }); } /** @@ -142,7 +142,8 @@ public List indices() { client.indices().get(new GetIndexRequest(), RequestOptions.DEFAULT); final Stream aliasStream = ImmutableList.copyOf(indexResponse.getAliases().values()).stream() - .flatMap(Collection::stream).map(AliasMetadata::alias); + .flatMap(Collection::stream) + .map(AliasMetadata::alias); return Stream.concat(Arrays.stream(indexResponse.getIndices()), aliasStream) .collect(Collectors.toList()); } catch (IOException e) { @@ -173,16 +174,17 @@ public Map meta() { @Override public void cleanup(OpenSearchRequest request) { - request.clean(scrollId -> { - try { - ClearScrollRequest clearRequest = new ClearScrollRequest(); - clearRequest.addScrollId(scrollId); - client.clearScroll(clearRequest, RequestOptions.DEFAULT); - } catch (IOException e) { - throw new IllegalStateException( - "Failed to clean up resources for search request " + request, e); - } - }); + request.clean( + scrollId -> { + try { + ClearScrollRequest clearRequest = new ClearScrollRequest(); + clearRequest.addScrollId(scrollId); + client.clearScroll(clearRequest, RequestOptions.DEFAULT); + } catch (IOException e) { + throw new IllegalStateException( + "Failed to clean up resources for search request " + request, e); + } + }); } @Override diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchGeoPointType.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchGeoPointType.java index c2428a59a8..75137973c5 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchGeoPointType.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchGeoPointType.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.type; import static org.opensearch.sql.data.type.ExprCoreType.UNKNOWN; @@ -11,8 +10,8 @@ import lombok.EqualsAndHashCode; /** - * The type of a geo_point value. See - * doc + * The type of a geo_point value. See doc */ @EqualsAndHashCode(callSuper = false) public class OpenSearchGeoPointType extends OpenSearchDataType { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchIpType.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchIpType.java index fccafc6caf..22581ec28c 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchIpType.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchIpType.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.type; import static org.opensearch.sql.data.type.ExprCoreType.UNKNOWN; @@ -11,8 +10,8 @@ import lombok.EqualsAndHashCode; /** - * The type of an ip value. See - * doc + * The type of an ip value. See doc */ @EqualsAndHashCode(callSuper = false) public class OpenSearchIpType extends OpenSearchDataType { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchTextType.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchTextType.java index 67b7296834..e7e453ca3f 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchTextType.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchTextType.java @@ -15,8 +15,8 @@ import org.opensearch.sql.data.type.ExprType; /** - * The type of a text value. See - * doc + * The type of text value. See doc */ public class OpenSearchTextType extends OpenSearchDataType { @@ -24,8 +24,7 @@ public class OpenSearchTextType extends OpenSearchDataType { // text could have fields // a read-only collection - @EqualsAndHashCode.Exclude - Map fields = ImmutableMap.of(); + @EqualsAndHashCode.Exclude Map fields = ImmutableMap.of(); private OpenSearchTextType() { super(MappingType.Text); @@ -34,6 +33,7 @@ private OpenSearchTextType() { /** * Constructs a Text Type using the passed in fields argument. + * * @param fields The fields to be used to construct the text type. * @return A new OpenSeachTextTypeObject */ @@ -67,7 +67,7 @@ protected OpenSearchDataType cloneEmpty() { } /** - * Text field doesn't have doc value (exception thrown even when you call "get") + * Text field doesn't have doc value (exception thrown even when you call "get")
* Limitation: assume inner field name is always "keyword". */ public static String convertTextToKeyword(String fieldName, ExprType fieldType) { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/utils/Content.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/utils/Content.java index 0c3d2aec45..0fbd2d4f98 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/utils/Content.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/utils/Content.java @@ -10,12 +10,12 @@ import org.apache.commons.lang3.tuple.Pair; /** - * * Regardless the underling data format, the {@link Content} define the data in abstract manner. * which could be parsed by ElasticsearchExprValueFactory. There are two major use cases: + * *
    - *
  1. Represent the JSON data retrieve from OpenSearch search response.
  2. - *
  3. Represent the Object data extract from the OpenSearch aggregation response.
  4. + *
  5. Represent the JSON data retrieve from OpenSearch search response. + *
  6. Represent the Object data extract from the OpenSearch aggregation response. *
*/ public interface Content { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/utils/OpenSearchJsonContent.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/utils/OpenSearchJsonContent.java index 61da7c3b74..bdb15428e1 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/utils/OpenSearchJsonContent.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/utils/OpenSearchJsonContent.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.utils; import com.fasterxml.jackson.databind.JsonNode; @@ -14,9 +13,7 @@ import lombok.RequiredArgsConstructor; import org.apache.commons.lang3.tuple.Pair; -/** - * The Implementation of Content to represent {@link JsonNode}. - */ +/** The Implementation of Content to represent {@link JsonNode}. */ @RequiredArgsConstructor public class OpenSearchJsonContent implements Content { @@ -68,8 +65,7 @@ public Iterator> map() { final JsonNode mapValue = value(); mapValue .fieldNames() - .forEachRemaining( - field -> map.put(field, new OpenSearchJsonContent(mapValue.get(field)))); + .forEachRemaining(field -> map.put(field, new OpenSearchJsonContent(mapValue.get(field)))); return map.entrySet().iterator(); } @@ -133,33 +129,27 @@ public Pair geoValue() { lat = extractDoubleValue(value.get("lat")); } catch (Exception exception) { throw new IllegalStateException( - "latitude must be number value, but got value: " + value.get( - "lat")); + "latitude must be number value, but got value: " + value.get("lat")); } try { lon = extractDoubleValue(value.get("lon")); } catch (Exception exception) { throw new IllegalStateException( - "longitude must be number value, but got value: " + value.get( - "lon")); + "longitude must be number value, but got value: " + value.get("lon")); } return Pair.of(lat, lon); } else { - throw new IllegalStateException("geo point must in format of {\"lat\": number, \"lon\": " - + "number}"); + throw new IllegalStateException( + "geo point must in format of {\"lat\": number, \"lon\": number}"); } } - /** - * Getter for value. If value is array the whole array is returned. - */ + /** Getter for value. If value is array the whole array is returned. */ private JsonNode value() { return value; } - /** - * Get doubleValue from JsonNode if possible. - */ + /** Get doubleValue from JsonNode if possible. */ private Double extractDoubleValue(JsonNode node) { if (node.isTextual()) { return Double.valueOf(node.textValue()); diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprIpValue.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprIpValue.java index a17deb7e45..30b3784bfc 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprIpValue.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprIpValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.value; import java.util.Objects; @@ -14,7 +13,7 @@ import org.opensearch.sql.opensearch.data.type.OpenSearchIpType; /** - * OpenSearch IP ExprValue. + * OpenSearch IP ExprValue
* Todo, add this to avoid the unknown value type exception, the implementation will be changed. */ @RequiredArgsConstructor diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprTextValue.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprTextValue.java index d093588168..fb696d6b04 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprTextValue.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprTextValue.java @@ -3,16 +3,13 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.value; import org.opensearch.sql.data.model.ExprStringValue; import org.opensearch.sql.data.type.ExprType; import org.opensearch.sql.opensearch.data.type.OpenSearchTextType; -/** - * Expression Text Value, it is a extension of the ExprValue by OpenSearch. - */ +/** Expression Text Value, it is a extension of the ExprValue by OpenSearch. */ public class OpenSearchExprTextValue extends ExprStringValue { public OpenSearchExprTextValue(String value) { super(value); diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactory.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactory.java index 4e3e1ec5c0..22c2ece4a7 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactory.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactory.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.value; import static org.opensearch.sql.data.type.ExprCoreType.ARRAY; @@ -71,18 +70,15 @@ import org.opensearch.sql.opensearch.data.utils.OpenSearchJsonContent; import org.opensearch.sql.opensearch.response.agg.OpenSearchAggregationResponseParser; -/** - * Construct ExprValue from OpenSearch response. - */ +/** Construct ExprValue from OpenSearch response. */ public class OpenSearchExprValueFactory { - /** - * The Mapping of Field and ExprType. - */ + /** The Mapping of Field and ExprType. */ private final Map typeMapping; /** - * Extend existing mapping by new data without overwrite. - * Called from aggregation only {@see AggregationQueryBuilder#buildTypeMapping}. + * Extend existing mapping by new data without overwrite. Called from aggregation only {@see + * AggregationQueryBuilder#buildTypeMapping}. + * * @param typeMapping A data type mapping produced by aggregation. */ public void extendTypeMapping(Map typeMapping) { @@ -95,9 +91,7 @@ public void extendTypeMapping(Map typeMapping) { } } - @Getter - @Setter - private OpenSearchAggregationResponseParser parser; + @Getter @Setter private OpenSearchAggregationResponseParser parser; private static final String TOP_PATH = ""; @@ -105,48 +99,62 @@ public void extendTypeMapping(Map typeMapping) { private static final Map> typeActionMap = new ImmutableMap.Builder>() - .put(OpenSearchDataType.of(OpenSearchDataType.MappingType.Integer), + .put( + OpenSearchDataType.of(OpenSearchDataType.MappingType.Integer), (c, dt) -> new ExprIntegerValue(c.intValue())) - .put(OpenSearchDataType.of(OpenSearchDataType.MappingType.Long), + .put( + OpenSearchDataType.of(OpenSearchDataType.MappingType.Long), (c, dt) -> new ExprLongValue(c.longValue())) - .put(OpenSearchDataType.of(OpenSearchDataType.MappingType.Short), + .put( + OpenSearchDataType.of(OpenSearchDataType.MappingType.Short), (c, dt) -> new ExprShortValue(c.shortValue())) - .put(OpenSearchDataType.of(OpenSearchDataType.MappingType.Byte), + .put( + OpenSearchDataType.of(OpenSearchDataType.MappingType.Byte), (c, dt) -> new ExprByteValue(c.byteValue())) - .put(OpenSearchDataType.of(OpenSearchDataType.MappingType.Float), + .put( + OpenSearchDataType.of(OpenSearchDataType.MappingType.Float), (c, dt) -> new ExprFloatValue(c.floatValue())) - .put(OpenSearchDataType.of(OpenSearchDataType.MappingType.Double), + .put( + OpenSearchDataType.of(OpenSearchDataType.MappingType.Double), (c, dt) -> new ExprDoubleValue(c.doubleValue())) - .put(OpenSearchDataType.of(OpenSearchDataType.MappingType.Text), + .put( + OpenSearchDataType.of(OpenSearchDataType.MappingType.Text), (c, dt) -> new OpenSearchExprTextValue(c.stringValue())) - .put(OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword), + .put( + OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword), (c, dt) -> new ExprStringValue(c.stringValue())) - .put(OpenSearchDataType.of(OpenSearchDataType.MappingType.Boolean), + .put( + OpenSearchDataType.of(OpenSearchDataType.MappingType.Boolean), (c, dt) -> ExprBooleanValue.of(c.booleanValue())) - //Handles the creation of DATE, TIME & DATETIME + // Handles the creation of DATE, TIME & DATETIME .put(OpenSearchDateType.of(TIME), OpenSearchExprValueFactory::createOpenSearchDateType) .put(OpenSearchDateType.of(DATE), OpenSearchExprValueFactory::createOpenSearchDateType) - .put(OpenSearchDateType.of(TIMESTAMP), - OpenSearchExprValueFactory::createOpenSearchDateType) - .put(OpenSearchDateType.of(DATETIME), + .put( + OpenSearchDateType.of(TIMESTAMP), OpenSearchExprValueFactory::createOpenSearchDateType) - .put(OpenSearchDataType.of(OpenSearchDataType.MappingType.Ip), + .put( + OpenSearchDateType.of(DATETIME), OpenSearchExprValueFactory::createOpenSearchDateType) + .put( + OpenSearchDataType.of(OpenSearchDataType.MappingType.Ip), (c, dt) -> new OpenSearchExprIpValue(c.stringValue())) - .put(OpenSearchDataType.of(OpenSearchDataType.MappingType.GeoPoint), - (c, dt) -> new OpenSearchExprGeoPointValue(c.geoValue().getLeft(), - c.geoValue().getRight())) - .put(OpenSearchDataType.of(OpenSearchDataType.MappingType.Binary), + .put( + OpenSearchDataType.of(OpenSearchDataType.MappingType.GeoPoint), + (c, dt) -> + new OpenSearchExprGeoPointValue(c.geoValue().getLeft(), c.geoValue().getRight())) + .put( + OpenSearchDataType.of(OpenSearchDataType.MappingType.Binary), (c, dt) -> new OpenSearchExprBinaryValue(c.stringValue())) .build(); - /** - * Constructor of OpenSearchExprValueFactory. - */ + /** Constructor of OpenSearchExprValueFactory. */ public OpenSearchExprValueFactory(Map typeMapping) { this.typeMapping = OpenSearchDataType.traverseAndFlatten(typeMapping); } /** + * + * + *
    * The struct construction has the following assumption:
    *  1. The field has OpenSearch Object data type.
    *     See 
@@ -155,19 +163,23 @@ public OpenSearchExprValueFactory(Map typeMapping) {
    *     { "employ",       "STRUCT"  }
    *     { "employ.id",    "INTEGER" }
    *     { "employ.state", "STRING"  }
+   *  
*/ public ExprValue construct(String jsonString, boolean supportArrays) { try { - return parse(new OpenSearchJsonContent(OBJECT_MAPPER.readTree(jsonString)), TOP_PATH, - Optional.of(STRUCT), supportArrays); + return parse( + new OpenSearchJsonContent(OBJECT_MAPPER.readTree(jsonString)), + TOP_PATH, + Optional.of(STRUCT), + supportArrays); } catch (JsonProcessingException e) { throw new IllegalStateException(String.format("invalid json: %s.", jsonString), e); } } /** - * Construct ExprValue from field and its value object. Throw exception if trying - * to construct from field of unsupported type. + * Construct ExprValue from field and its value object. Throw exception if trying to construct + * from field of unsupported type.
* Todo, add IP, GeoPoint support after we have function implementation around it. * * @param field field name @@ -179,11 +191,7 @@ public ExprValue construct(String field, Object value, boolean supportArrays) { } private ExprValue parse( - Content content, - String field, - Optional fieldType, - boolean supportArrays - ) { + Content content, String field, Optional fieldType, boolean supportArrays) { if (content.isNull() || !fieldType.isPresent()) { return ExprNullValue.of(); } @@ -207,16 +215,16 @@ private ExprValue parse( } /** - * In OpenSearch, it is possible field doesn't have type definition in mapping. - * but has empty value. For example, {"empty_field": []}. + * In OpenSearch, it is possible field doesn't have type definition in mapping. but has empty + * value. For example, {"empty_field": []}. */ private Optional type(String field) { return Optional.ofNullable(typeMapping.get(field)); } /** - * Parse value with the first matching formatter into {@link ExprValue} - * with corresponding {@link ExprCoreType}. + * Parse value with the first matching formatter into {@link ExprValue} with corresponding {@link + * ExprCoreType}. * * @param value - time as string * @param dataType - field data type @@ -232,12 +240,12 @@ private static ExprValue parseDateTimeString(String value, OpenSearchDateType da TemporalAccessor accessor = formatter.parse(value); ZonedDateTime zonedDateTime = DateFormatters.from(accessor); switch (returnFormat) { - case TIME: return new ExprTimeValue( - zonedDateTime.withZoneSameLocal(UTC_ZONE_ID).toLocalTime()); - case DATE: return new ExprDateValue( - zonedDateTime.withZoneSameLocal(UTC_ZONE_ID).toLocalDate()); - default: return new ExprTimestampValue( - zonedDateTime.withZoneSameLocal(UTC_ZONE_ID).toInstant()); + case TIME: + return new ExprTimeValue(zonedDateTime.withZoneSameLocal(UTC_ZONE_ID).toLocalTime()); + case DATE: + return new ExprDateValue(zonedDateTime.withZoneSameLocal(UTC_ZONE_ID).toLocalDate()); + default: + return new ExprTimestampValue(zonedDateTime.withZoneSameLocal(UTC_ZONE_ID).toInstant()); } } catch (IllegalArgumentException ignored) { // nothing to do, try another format @@ -247,19 +255,22 @@ private static ExprValue parseDateTimeString(String value, OpenSearchDateType da // if no formatters are available, try the default formatter try { switch (returnFormat) { - case TIME: return new ExprTimeValue( - DateFormatters.from(STRICT_HOUR_MINUTE_SECOND_FORMATTER.parse(value)).toLocalTime()); - case DATE: return new ExprDateValue( - DateFormatters.from(STRICT_YEAR_MONTH_DAY_FORMATTER.parse(value)).toLocalDate()); - default: return new ExprTimestampValue( - DateFormatters.from(DATE_TIME_FORMATTER.parse(value)).toInstant()); + case TIME: + return new ExprTimeValue( + DateFormatters.from(STRICT_HOUR_MINUTE_SECOND_FORMATTER.parse(value)).toLocalTime()); + case DATE: + return new ExprDateValue( + DateFormatters.from(STRICT_YEAR_MONTH_DAY_FORMATTER.parse(value)).toLocalDate()); + default: + return new ExprTimestampValue( + DateFormatters.from(DATE_TIME_FORMATTER.parse(value)).toInstant()); } } catch (DateTimeParseException ignored) { // ignored } - throw new IllegalArgumentException(String.format( - "Construct %s from \"%s\" failed, unsupported format.", returnFormat, value)); + throw new IllegalArgumentException( + String.format("Construct %s from \"%s\" failed, unsupported format.", returnFormat, value)); } private static ExprValue createOpenSearchDateType(Content value, ExprType type) { @@ -270,8 +281,8 @@ private static ExprValue createOpenSearchDateType(Content value, ExprType type) var numFormatters = dt.getNumericNamedFormatters(); if (numFormatters.size() > 0 || !dt.hasFormats()) { long epochMillis = 0; - if (numFormatters.contains(DateFormatter.forPattern( - FormatNames.EPOCH_SECOND.getSnakeCaseName()))) { + if (numFormatters.contains( + DateFormatter.forPattern(FormatNames.EPOCH_SECOND.getSnakeCaseName()))) { // no CamelCase for `EPOCH_*` formats epochMillis = value.longValue() * 1000; } else /* EPOCH_MILLIS */ { @@ -279,9 +290,12 @@ private static ExprValue createOpenSearchDateType(Content value, ExprType type) } Instant instant = Instant.ofEpochMilli(epochMillis); switch ((ExprCoreType) returnFormat) { - case TIME: return new ExprTimeValue(LocalTime.from(instant.atZone(UTC_ZONE_ID))); - case DATE: return new ExprDateValue(LocalDate.ofInstant(instant, UTC_ZONE_ID)); - default: return new ExprTimestampValue(instant); + case TIME: + return new ExprTimeValue(LocalTime.from(instant.atZone(UTC_ZONE_ID))); + case DATE: + return new ExprDateValue(LocalDate.ofInstant(instant, UTC_ZONE_ID)); + default: + return new ExprTimestampValue(instant); } } else { // custom format @@ -297,6 +311,7 @@ private static ExprValue createOpenSearchDateType(Content value, ExprType type) /** * Parse struct content. + * * @param content Content to parse. * @param prefix Prefix for Level of object depth to parse. * @param supportArrays Parsing the whole array if array is type nested. @@ -304,15 +319,23 @@ private static ExprValue createOpenSearchDateType(Content value, ExprType type) */ private ExprValue parseStruct(Content content, String prefix, boolean supportArrays) { LinkedHashMap result = new LinkedHashMap<>(); - content.map().forEachRemaining(entry -> result.put(entry.getKey(), - parse(entry.getValue(), - makeField(prefix, entry.getKey()), - type(makeField(prefix, entry.getKey())), supportArrays))); + content + .map() + .forEachRemaining( + entry -> + result.put( + entry.getKey(), + parse( + entry.getValue(), + makeField(prefix, entry.getKey()), + type(makeField(prefix, entry.getKey())), + supportArrays))); return new ExprTupleValue(result); } /** * Parse array content. Can also parse nested which isn't necessarily an array. + * * @param content Content to parse. * @param prefix Prefix for Level of object depth to parse. * @param type Type of content parsing. @@ -320,32 +343,31 @@ private ExprValue parseStruct(Content content, String prefix, boolean supportArr * @return Value parsed from content. */ private ExprValue parseArray( - Content content, - String prefix, - ExprType type, - boolean supportArrays - ) { + Content content, String prefix, ExprType type, boolean supportArrays) { List result = new ArrayList<>(); // ARRAY is mapped to nested but can take the json structure of an Object. if (content.objectValue() instanceof ObjectNode) { result.add(parseStruct(content, prefix, supportArrays)); // non-object type arrays are only supported when parsing inner_hits of OS response. - } else if ( - !(type instanceof OpenSearchDataType + } else if (!(type instanceof OpenSearchDataType && ((OpenSearchDataType) type).getExprType().equals(ARRAY)) && !supportArrays) { return parseInnerArrayValue(content.array().next(), prefix, type, supportArrays); } else { - content.array().forEachRemaining(v -> { - result.add(parseInnerArrayValue(v, prefix, type, supportArrays)); - }); + content + .array() + .forEachRemaining( + v -> { + result.add(parseInnerArrayValue(v, prefix, type, supportArrays)); + }); } return new ExprCollectionValue(result); } /** * Parse inner array value. Can be object type and recurse continues. + * * @param content Array index being parsed. * @param prefix Prefix for value. * @param type Type of inner array value. @@ -353,11 +375,7 @@ private ExprValue parseArray( * @return Inner array value. */ private ExprValue parseInnerArrayValue( - Content content, - String prefix, - ExprType type, - boolean supportArrays - ) { + Content content, String prefix, ExprType type, boolean supportArrays) { if (type instanceof OpenSearchIpType || type instanceof OpenSearchBinaryType || type instanceof OpenSearchDateType @@ -382,6 +400,7 @@ private ExprValue parseInnerArrayValue( /** * Make complete path string for field. + * * @param path Path of field. * @param field Field to append to path. * @return Field appended to path level. diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/OpenSearchQueryManager.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/OpenSearchQueryManager.java index 9c6fcdb825..dbe91dc398 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/OpenSearchQueryManager.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/OpenSearchQueryManager.java @@ -18,9 +18,7 @@ import org.opensearch.sql.executor.execution.AbstractPlan; import org.opensearch.threadpool.ThreadPool; -/** - * QueryManager implemented in OpenSearch cluster. - */ +/** QueryManager implemented in OpenSearch cluster. */ @RequiredArgsConstructor public class OpenSearchQueryManager implements QueryManager { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/protector/ResourceMonitorPlan.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/protector/ResourceMonitorPlan.java index 4c02affc5e..e3bc48ba72 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/protector/ResourceMonitorPlan.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/protector/ResourceMonitorPlan.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.executor.protector; import java.io.IOException; @@ -19,36 +18,23 @@ import org.opensearch.sql.planner.physical.PhysicalPlan; import org.opensearch.sql.planner.physical.PhysicalPlanNodeVisitor; -/** - * A PhysicalPlan which will run the delegate plan in resource protection manner. - */ +/** A PhysicalPlan which will run the delegate plan in resource protection manner. */ @ToString @RequiredArgsConstructor @EqualsAndHashCode(callSuper = false) public class ResourceMonitorPlan extends PhysicalPlan implements SerializablePlan { - /** - * How many method calls to delegate's next() to perform resource check once. - */ + /** How many method calls to delegate's next() to perform resource check once. */ public static final long NUMBER_OF_NEXT_CALL_TO_CHECK = 1000; - /** - * Delegated PhysicalPlan. - */ + /** Delegated PhysicalPlan. */ private final PhysicalPlan delegate; - /** - * ResourceMonitor. - */ - @ToString.Exclude - private final ResourceMonitor monitor; - - /** - * Count how many calls to delegate's next() already. - */ - @EqualsAndHashCode.Exclude - private long nextCallCount = 0L; + /** ResourceMonitor. */ + @ToString.Exclude private final ResourceMonitor monitor; + /** Count how many calls to delegate's next() already. */ + @EqualsAndHashCode.Exclude private long nextCallCount = 0L; @Override public R accept(PhysicalPlanNodeVisitor visitor, C context) { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/monitor/OpenSearchMemoryHealthy.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/monitor/OpenSearchMemoryHealthy.java index c0a4aeb0b7..4b7b6c5dcb 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/monitor/OpenSearchMemoryHealthy.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/monitor/OpenSearchMemoryHealthy.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.monitor; import com.google.common.annotations.VisibleForTesting; @@ -11,9 +10,7 @@ import lombok.NoArgsConstructor; import lombok.extern.log4j.Log4j2; -/** - * OpenSearch Memory Monitor. - */ +/** OpenSearch Memory Monitor. */ @Log4j2 public class OpenSearchMemoryHealthy { private final RandomFail randomFail; @@ -25,16 +22,12 @@ public OpenSearchMemoryHealthy() { } @VisibleForTesting - public OpenSearchMemoryHealthy( - RandomFail randomFail, - MemoryUsage memoryUsage) { + public OpenSearchMemoryHealthy(RandomFail randomFail, MemoryUsage memoryUsage) { this.randomFail = randomFail; this.memoryUsage = memoryUsage; } - /** - * Is Memory Healthy. Calculate based on the current heap memory usage. - */ + /** Is Memory Healthy. Calculate based on the current heap memory usage. */ public boolean isMemoryHealthy(long limitBytes) { final long memoryUsage = this.memoryUsage.usage(); log.debug("Memory usage:{}, limit:{}", memoryUsage, limitBytes); @@ -66,12 +59,8 @@ public long usage() { } @NoArgsConstructor - public static class MemoryUsageExceedFastFailureException extends RuntimeException { - - } + public static class MemoryUsageExceedFastFailureException extends RuntimeException {} @NoArgsConstructor - public static class MemoryUsageExceedException extends RuntimeException { - - } + public static class MemoryUsageExceedException extends RuntimeException {} } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/monitor/OpenSearchResourceMonitor.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/monitor/OpenSearchResourceMonitor.java index 5ed82c7a5d..3990fef7b7 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/monitor/OpenSearchResourceMonitor.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/monitor/OpenSearchResourceMonitor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.monitor; import io.github.resilience4j.core.IntervalFunction; @@ -17,7 +16,7 @@ /** * {@link ResourceMonitor} implementation on Elasticsearch. When the heap memory usage exceeds - * certain threshold, the monitor is not healthy. + * certain threshold, the monitor is not healthy.
* Todo, add metrics. */ @Log4j2 @@ -26,20 +25,15 @@ public class OpenSearchResourceMonitor extends ResourceMonitor { private final Retry retry; private final OpenSearchMemoryHealthy memoryMonitor; - /** - * Constructor of ElasticsearchCircuitBreaker. - */ - public OpenSearchResourceMonitor( - Settings settings, - OpenSearchMemoryHealthy memoryMonitor) { + /** Constructor. */ + public OpenSearchResourceMonitor(Settings settings, OpenSearchMemoryHealthy memoryMonitor) { this.settings = settings; RetryConfig config = RetryConfig.custom() .maxAttempts(3) .intervalFunction(IntervalFunction.ofExponentialRandomBackoff(1000)) .retryExceptions(OpenSearchMemoryHealthy.MemoryUsageExceedException.class) - .ignoreExceptions( - OpenSearchMemoryHealthy.MemoryUsageExceedFastFailureException.class) + .ignoreExceptions(OpenSearchMemoryHealthy.MemoryUsageExceedFastFailureException.class) .build(); retry = Retry.of("mem", config); this.memoryMonitor = memoryMonitor; @@ -55,9 +49,7 @@ public boolean isHealthy() { try { ByteSizeValue limit = settings.getSettingValue(Settings.Key.QUERY_MEMORY_LIMIT); Supplier booleanSupplier = - Retry.decorateSupplier(retry, - () -> memoryMonitor - .isMemoryHealthy(limit.getBytes())); + Retry.decorateSupplier(retry, () -> memoryMonitor.isMemoryHealthy(limit.getBytes())); return booleanSupplier.get(); } catch (Exception e) { return false; diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchQueryRequest.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchQueryRequest.java index 919596eee2..6447a3ff65 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchQueryRequest.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchQueryRequest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.request; import java.io.IOException; @@ -33,49 +32,31 @@ @ToString public class OpenSearchQueryRequest implements OpenSearchRequest { - /** - * {@link OpenSearchRequest.IndexName}. - */ + /** {@link OpenSearchRequest.IndexName}. */ private final IndexName indexName; - /** - * Search request source builder. - */ + /** Search request source builder. */ private final SearchSourceBuilder sourceBuilder; - /** - * OpenSearchExprValueFactory. - */ - @EqualsAndHashCode.Exclude - @ToString.Exclude + /** OpenSearchExprValueFactory. */ + @EqualsAndHashCode.Exclude @ToString.Exclude private final OpenSearchExprValueFactory exprValueFactory; + /** List of includes expected in the response. */ + @EqualsAndHashCode.Exclude @ToString.Exclude private final List includes; - /** - * List of includes expected in the response. - */ - @EqualsAndHashCode.Exclude - @ToString.Exclude - private final List includes; - - /** - * Indicate the search already done. - */ + /** Indicate the search already done. */ private boolean searchDone = false; - /** - * Constructor of OpenSearchQueryRequest. - */ - public OpenSearchQueryRequest(String indexName, int size, - OpenSearchExprValueFactory factory, List includes) { + /** Constructor of OpenSearchQueryRequest. */ + public OpenSearchQueryRequest( + String indexName, int size, OpenSearchExprValueFactory factory, List includes) { this(new IndexName(indexName), size, factory, includes); } - /** - * Constructor of OpenSearchQueryRequest. - */ - public OpenSearchQueryRequest(IndexName indexName, int size, - OpenSearchExprValueFactory factory, List includes) { + /** Constructor of OpenSearchQueryRequest. */ + public OpenSearchQueryRequest( + IndexName indexName, int size, OpenSearchExprValueFactory factory, List includes) { this.indexName = indexName; this.sourceBuilder = new SearchSourceBuilder(); sourceBuilder.from(0); @@ -85,11 +66,12 @@ public OpenSearchQueryRequest(IndexName indexName, int size, this.includes = includes; } - /** - * Constructor of OpenSearchQueryRequest. - */ - public OpenSearchQueryRequest(IndexName indexName, SearchSourceBuilder sourceBuilder, - OpenSearchExprValueFactory factory, List includes) { + /** Constructor of OpenSearchQueryRequest. */ + public OpenSearchQueryRequest( + IndexName indexName, + SearchSourceBuilder sourceBuilder, + OpenSearchExprValueFactory factory, + List includes) { this.indexName = indexName; this.sourceBuilder = sourceBuilder; this.exprValueFactory = factory; @@ -97,22 +79,24 @@ public OpenSearchQueryRequest(IndexName indexName, SearchSourceBuilder sourceBui } @Override - public OpenSearchResponse search(Function searchAction, - Function scrollAction) { + public OpenSearchResponse search( + Function searchAction, + Function scrollAction) { if (searchDone) { return new OpenSearchResponse(SearchHits.empty(), exprValueFactory, includes); } else { searchDone = true; return new OpenSearchResponse( - searchAction.apply(new SearchRequest() - .indices(indexName.getIndexNames()) - .source(sourceBuilder)), exprValueFactory, includes); + searchAction.apply( + new SearchRequest().indices(indexName.getIndexNames()).source(sourceBuilder)), + exprValueFactory, + includes); } } @Override public void clean(Consumer cleanAction) { - //do nothing. + // do nothing. } @Override @@ -122,7 +106,7 @@ public boolean hasAnotherBatch() { @Override public void writeTo(StreamOutput out) throws IOException { - throw new UnsupportedOperationException("OpenSearchQueryRequest serialization " - + "is not implemented."); + throw new UnsupportedOperationException( + "OpenSearchQueryRequest serialization is not implemented."); } } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchRequest.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchRequest.java index 5c9d0033c1..f775d55296 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchRequest.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchRequest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.request; import java.io.IOException; @@ -20,14 +19,10 @@ import org.opensearch.sql.opensearch.data.value.OpenSearchExprValueFactory; import org.opensearch.sql.opensearch.response.OpenSearchResponse; -/** - * OpenSearch search request. - */ +/** OpenSearch search request. */ public interface OpenSearchRequest extends Writeable { - /** - * Default query timeout in minutes. - */ + /** Default query timeout in minutes. */ TimeValue DEFAULT_QUERY_TIMEOUT = TimeValue.timeValueMinutes(1L); /** @@ -37,8 +32,9 @@ public interface OpenSearchRequest extends Writeable { * @param scrollAction scroll search action. * @return OpenSearchResponse. */ - OpenSearchResponse search(Function searchAction, - Function scrollAction); + OpenSearchResponse search( + Function searchAction, + Function scrollAction); /** * Apply the cleanAction on request. @@ -49,21 +45,20 @@ OpenSearchResponse search(Function searchAction, /** * Get the OpenSearchExprValueFactory. + * * @return OpenSearchExprValueFactory. */ OpenSearchExprValueFactory getExprValueFactory(); /** * Check if there is more data to get from OpenSearch. - * @return True if calling {@ref OpenSearchClient.search} with this request will - * return non-empty response. + * + * @return True if calling {@ref OpenSearchClient.search} with this request will return non-empty + * response. */ boolean hasAnotherBatch(); - /** - * OpenSearch Index Name. - * Indices are separated by ",". - */ + /** OpenSearch Index Name. Indices are separated by ",". */ @EqualsAndHashCode class IndexName implements Writeable { private static final String COMMA = ","; diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchRequestBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchRequestBuilder.java index 80259f15d3..1df3dcb183 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchRequestBuilder.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchRequestBuilder.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.request; import static java.util.stream.Collectors.mapping; @@ -47,47 +46,36 @@ import org.opensearch.sql.opensearch.data.value.OpenSearchExprValueFactory; import org.opensearch.sql.opensearch.response.agg.OpenSearchAggregationResponseParser; -/** - * OpenSearch search request builder. - */ +/** OpenSearch search request builder. */ @EqualsAndHashCode @Getter @ToString public class OpenSearchRequestBuilder { - /** - * Search request source builder. - */ + /** Search request source builder. */ private final SearchSourceBuilder sourceBuilder; - /** - * Query size of the request -- how many rows will be returned. - */ + /** Query size of the request -- how many rows will be returned. */ private int requestedTotalSize; - /** - * Size of each page request to return. - */ + /** Size of each page request to return. */ private Integer pageSize = null; - /** - * OpenSearchExprValueFactory. - */ - @EqualsAndHashCode.Exclude - @ToString.Exclude + /** OpenSearchExprValueFactory. */ + @EqualsAndHashCode.Exclude @ToString.Exclude private final OpenSearchExprValueFactory exprValueFactory; + private int startFrom = 0; - /** - * Constructor. - */ - public OpenSearchRequestBuilder(int requestedTotalSize, - OpenSearchExprValueFactory exprValueFactory) { + /** Constructor. */ + public OpenSearchRequestBuilder( + int requestedTotalSize, OpenSearchExprValueFactory exprValueFactory) { this.requestedTotalSize = requestedTotalSize; - this.sourceBuilder = new SearchSourceBuilder() - .from(startFrom) - .timeout(OpenSearchRequest.DEFAULT_QUERY_TIMEOUT) - .trackScores(false); + this.sourceBuilder = + new SearchSourceBuilder() + .from(startFrom) + .timeout(OpenSearchRequest.DEFAULT_QUERY_TIMEOUT) + .trackScores(false); this.exprValueFactory = exprValueFactory; } @@ -96,13 +84,11 @@ public OpenSearchRequestBuilder(int requestedTotalSize, * * @return query request or scroll request */ - public OpenSearchRequest build(OpenSearchRequest.IndexName indexName, - int maxResultWindow, TimeValue scrollTimeout) { + public OpenSearchRequest build( + OpenSearchRequest.IndexName indexName, int maxResultWindow, TimeValue scrollTimeout) { int size = requestedTotalSize; FetchSourceContext fetchSource = this.sourceBuilder.fetchSource(); - List includes = fetchSource != null - ? Arrays.asList(fetchSource.includes()) - : List.of(); + List includes = fetchSource != null ? Arrays.asList(fetchSource.includes()) : List.of(); if (pageSize == null) { if (startFrom + size > maxResultWindow) { sourceBuilder.size(maxResultWindow - startFrom); @@ -118,12 +104,11 @@ public OpenSearchRequest build(OpenSearchRequest.IndexName indexName, throw new UnsupportedOperationException("Non-zero offset is not supported with pagination"); } sourceBuilder.size(pageSize); - return new OpenSearchScrollRequest(indexName, scrollTimeout, - sourceBuilder, exprValueFactory, includes); + return new OpenSearchScrollRequest( + indexName, scrollTimeout, sourceBuilder, exprValueFactory, includes); } } - boolean isBoolFilterQuery(QueryBuilder current) { return (current instanceof BoolQueryBuilder); } @@ -131,7 +116,7 @@ boolean isBoolFilterQuery(QueryBuilder current) { /** * Push down query to DSL request. * - * @param query query request + * @param query query request */ public void pushDownFilter(QueryBuilder query) { QueryBuilder current = sourceBuilder.query(); @@ -142,9 +127,7 @@ public void pushDownFilter(QueryBuilder query) { if (isBoolFilterQuery(current)) { ((BoolQueryBuilder) current).filter(query); } else { - sourceBuilder.query(QueryBuilders.boolQuery() - .filter(current) - .filter(query)); + sourceBuilder.query(QueryBuilders.boolQuery().filter(current).filter(query)); } } @@ -181,9 +164,7 @@ public void pushDownSort(List> sortBuilders) { } } - /** - * Pushdown size (limit) and from (offset) to DSL request. - */ + /** Pushdown size (limit) and from (offset) to DSL request. */ public void pushDownLimit(Integer limit, Integer offset) { requestedTotalSize = limit; startFrom = offset; @@ -200,6 +181,7 @@ public void pushDownPageSize(int pageSize) { /** * Add highlight to DSL requests. + * * @param field name of the field to highlight */ public void pushDownHighlight(String field, Map arguments) { @@ -208,32 +190,34 @@ public void pushDownHighlight(String field, Map arguments) { // OS does not allow duplicates of highlight fields if (sourceBuilder.highlighter().fields().stream() .anyMatch(f -> f.name().equals(unquotedField))) { - throw new SemanticCheckException(String.format( - "Duplicate field %s in highlight", field)); + throw new SemanticCheckException(String.format("Duplicate field %s in highlight", field)); } sourceBuilder.highlighter().field(unquotedField); } else { - HighlightBuilder highlightBuilder = - new HighlightBuilder().field(unquotedField); + HighlightBuilder highlightBuilder = new HighlightBuilder().field(unquotedField); sourceBuilder.highlighter(highlightBuilder); } // lastFieldIndex denotes previously set highlighter with field parameter int lastFieldIndex = sourceBuilder.highlighter().fields().size() - 1; if (arguments.containsKey("pre_tags")) { - sourceBuilder.highlighter().fields().get(lastFieldIndex) + sourceBuilder + .highlighter() + .fields() + .get(lastFieldIndex) .preTags(arguments.get("pre_tags").toString()); } if (arguments.containsKey("post_tags")) { - sourceBuilder.highlighter().fields().get(lastFieldIndex) + sourceBuilder + .highlighter() + .fields() + .get(lastFieldIndex) .postTags(arguments.get("post_tags").toString()); } } - /** - * Push down project list to DSL requests. - */ + /** Push down project list to DSL requests. */ public void pushDownProjects(Set projects) { sourceBuilder.fetchSource( projects.stream().map(ReferenceExpression::getAttr).distinct().toArray(String[]::new), @@ -254,21 +238,22 @@ private boolean isSortByDocOnly() { /** * Push down nested to sourceBuilder. + * * @param nestedArgs : Nested arguments to push down. */ public void pushDownNested(List> nestedArgs) { initBoolQueryFilter(); List nestedQueries = extractNestedQueries(query()); - groupFieldNamesByPath(nestedArgs).forEach( - (path, fieldNames) -> - buildInnerHit(fieldNames, findNestedQueryWithSamePath(nestedQueries, path)) - ); + groupFieldNamesByPath(nestedArgs) + .forEach( + (path, fieldNames) -> + buildInnerHit(fieldNames, findNestedQueryWithSamePath(nestedQueries, path))); } /** - * InnerHit must be added to the NestedQueryBuilder. We need to extract - * the nested queries currently in the query if there is already a filter - * push down with nested query. + * InnerHit must be added to the NestedQueryBuilder. We need to extract the nested queries + * currently in the query if there is already a filter push down with nested query. + * * @param query : current query. * @return : grouped nested queries currently in query. */ @@ -289,9 +274,7 @@ public int getMaxResponseSize() { return pageSize == null ? requestedTotalSize : pageSize; } - /** - * Initialize bool query for push down. - */ + /** Initialize bool query for push down. */ private void initBoolQueryFilter() { if (sourceBuilder.query() == null) { sourceBuilder.query(QueryBuilders.boolQuery()); @@ -304,44 +287,42 @@ private void initBoolQueryFilter() { /** * Map all field names in nested queries that use same path. + * * @param fields : Fields for nested queries. * @return : Map of path and associated field names. */ private Map> groupFieldNamesByPath( List> fields) { // TODO filter out reverse nested when supported - .filter(not(isReverseNested())) - return fields.stream().collect( - Collectors.groupingBy( - m -> m.get("path").toString(), - mapping( - m -> m.get("field").toString(), - toList() - ) - ) - ); + return fields.stream() + .collect( + Collectors.groupingBy( + m -> m.get("path").toString(), mapping(m -> m.get("field").toString(), toList()))); } /** * Build inner hits portion to nested query. + * * @param paths : Set of all paths used in nested queries. * @param query : Current pushDown query. */ private void buildInnerHit(List paths, NestedQueryBuilder query) { - query.innerHit(new InnerHitBuilder().setFetchSourceContext( - new FetchSourceContext(true, paths.toArray(new String[0]), null) - )); + query.innerHit( + new InnerHitBuilder() + .setFetchSourceContext( + new FetchSourceContext(true, paths.toArray(new String[0]), null))); } /** - * We need to group nested queries with same path for adding new fields with same path of - * inner hits. If we try to add additional inner hits with same path we get an OS error. + * We need to group nested queries with same path for adding new fields with same path of inner + * hits. If we try to add additional inner hits with same path we get an OS error. + * * @param nestedQueries Current list of nested queries in query. * @param path path comparing with current nested queries. * @return Query with same path or new empty nested query. */ private NestedQueryBuilder findNestedQueryWithSamePath( - List nestedQueries, String path - ) { + List nestedQueries, String path) { return nestedQueries.stream() .filter(query -> isSamePath(path, query)) .findAny() @@ -350,6 +331,7 @@ private NestedQueryBuilder findNestedQueryWithSamePath( /** * Check if is nested query is of the same path value. + * * @param path Value of path to compare with nested query. * @param query nested query builder to compare with path. * @return true if nested query has same path. @@ -358,9 +340,7 @@ private boolean isSamePath(String path, NestedQueryBuilder query) { return nestedQuery(path, query.query(), query.scoreMode()).equals(query); } - /** - * Create a nested query with match all filter to place inner hits. - */ + /** Create a nested query with match all filter to place inner hits. */ private Supplier createEmptyNestedQuery(String path) { return () -> { NestedQueryBuilder nestedQuery = nestedQuery(path, matchAllQuery(), ScoreMode.None); @@ -371,6 +351,7 @@ private Supplier createEmptyNestedQuery(String path) { /** * Return current query. + * * @return : Current source builder query. */ private BoolQueryBuilder query() { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchScrollRequest.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchScrollRequest.java index 34e8fcd096..c9490f0767 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchScrollRequest.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchScrollRequest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.request; import java.io.IOException; @@ -41,62 +40,56 @@ public class OpenSearchScrollRequest implements OpenSearchRequest { /** * Search request used to initiate paged (scrolled) search. Not needed to get subsequent pages. */ - @EqualsAndHashCode.Exclude - private final transient SearchRequest initialSearchRequest; + @EqualsAndHashCode.Exclude private final transient SearchRequest initialSearchRequest; + /** Scroll context timeout. */ private final TimeValue scrollTimeout; - /** - * {@link OpenSearchRequest.IndexName}. - */ + /** {@link OpenSearchRequest.IndexName}. */ private final IndexName indexName; /** Index name. */ - @EqualsAndHashCode.Exclude - @ToString.Exclude + @EqualsAndHashCode.Exclude @ToString.Exclude private final OpenSearchExprValueFactory exprValueFactory; /** * Scroll id which is set after first request issued. Because OpenSearchClient is shared by * multiple threads so this state has to be maintained here. */ - @Setter - @Getter - private String scrollId = NO_SCROLL_ID; + @Setter @Getter private String scrollId = NO_SCROLL_ID; public static final String NO_SCROLL_ID = ""; - @EqualsAndHashCode.Exclude - private boolean needClean = true; + @EqualsAndHashCode.Exclude private boolean needClean = true; - @Getter - @EqualsAndHashCode.Exclude - @ToString.Exclude - private final List includes; + @Getter @EqualsAndHashCode.Exclude @ToString.Exclude private final List includes; /** Constructor. */ - public OpenSearchScrollRequest(IndexName indexName, - TimeValue scrollTimeout, - SearchSourceBuilder sourceBuilder, - OpenSearchExprValueFactory exprValueFactory, - List includes) { + public OpenSearchScrollRequest( + IndexName indexName, + TimeValue scrollTimeout, + SearchSourceBuilder sourceBuilder, + OpenSearchExprValueFactory exprValueFactory, + List includes) { this.indexName = indexName; this.scrollTimeout = scrollTimeout; this.exprValueFactory = exprValueFactory; - this.initialSearchRequest = new SearchRequest() - .indices(indexName.getIndexNames()) - .scroll(scrollTimeout) - .source(sourceBuilder); + this.initialSearchRequest = + new SearchRequest() + .indices(indexName.getIndexNames()) + .scroll(scrollTimeout) + .source(sourceBuilder); this.includes = includes; } - - /** Executes request using either {@param searchAction} or {@param scrollAction} as appropriate. + /** + * Executes request using either {@param searchAction} or {@param scrollAction} as appropriate. */ @Override - public OpenSearchResponse search(Function searchAction, - Function scrollAction) { + public OpenSearchResponse search( + Function searchAction, + Function scrollAction) { SearchResponse openSearchResponse; if (isScroll()) { openSearchResponse = scrollAction.apply(scrollRequest()); @@ -172,6 +165,7 @@ public void writeTo(StreamOutput out) throws IOException { /** * Constructs OpenSearchScrollRequest from serialized representation. + * * @param in stream to read data from. * @param engine OpenSearchSqlEngine to get node-specific context. * @throws IOException thrown if reading from input {@code in} fails. diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/system/OpenSearchSystemRequest.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/system/OpenSearchSystemRequest.java index a2fbf79624..2969c7639b 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/system/OpenSearchSystemRequest.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/system/OpenSearchSystemRequest.java @@ -3,15 +3,12 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.request.system; import java.util.List; import org.opensearch.sql.data.model.ExprValue; -/** - * OpenSearch system request query against the system index. - */ +/** OpenSearch system request query against the system index. */ public interface OpenSearchSystemRequest { /** diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/OpenSearchResponse.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/OpenSearchResponse.java index 03abfbf6c1..e43777a740 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/OpenSearchResponse.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/OpenSearchResponse.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.response; import static org.opensearch.sql.opensearch.storage.OpenSearchIndex.METADATAFIELD_TYPE_MAP; @@ -35,52 +34,37 @@ import org.opensearch.sql.data.model.ExprValueUtils; import org.opensearch.sql.opensearch.data.value.OpenSearchExprValueFactory; -/** - * OpenSearch search response. - */ +/** OpenSearch search response. */ @EqualsAndHashCode @ToString public class OpenSearchResponse implements Iterable { - /** - * Search query result (non-aggregation). - */ + /** Search query result (non-aggregation). */ private final SearchHits hits; - /** - * Search aggregation result. - */ + /** Search aggregation result. */ private final Aggregations aggregations; - /** - * List of requested include fields. - */ + /** List of requested include fields. */ private final List includes; - /** - * OpenSearchExprValueFactory used to build ExprValue from search result. - */ - @EqualsAndHashCode.Exclude - private final OpenSearchExprValueFactory exprValueFactory; + /** OpenSearchExprValueFactory used to build ExprValue from search result. */ + @EqualsAndHashCode.Exclude private final OpenSearchExprValueFactory exprValueFactory; - /** - * Constructor of OpenSearchResponse. - */ - public OpenSearchResponse(SearchResponse searchResponse, - OpenSearchExprValueFactory exprValueFactory, - List includes) { + /** Constructor of OpenSearchResponse. */ + public OpenSearchResponse( + SearchResponse searchResponse, + OpenSearchExprValueFactory exprValueFactory, + List includes) { this.hits = searchResponse.getHits(); this.aggregations = searchResponse.getAggregations(); this.exprValueFactory = exprValueFactory; this.includes = includes; } - /** - * Constructor of OpenSearchResponse with SearchHits. - */ - public OpenSearchResponse(SearchHits hits, - OpenSearchExprValueFactory exprValueFactory, - List includes) { + /** Constructor of OpenSearchResponse with SearchHits. */ + public OpenSearchResponse( + SearchHits hits, OpenSearchExprValueFactory exprValueFactory, List includes) { this.hits = hits; this.aggregations = null; this.exprValueFactory = exprValueFactory; @@ -111,48 +95,52 @@ public Iterator iterator() { return handleAggregationResponse(); } else { return Arrays.stream(hits.getHits()) - .map(hit -> { - ImmutableMap.Builder builder = new ImmutableMap.Builder<>(); - addParsedHitsToBuilder(builder, hit); - addMetaDataFieldsToBuilder(builder, hit); - addHighlightsToBuilder(builder, hit); - return (ExprValue) ExprTupleValue.fromExprValueMap(builder.build()); - }).iterator(); + .map( + hit -> { + ImmutableMap.Builder builder = new ImmutableMap.Builder<>(); + addParsedHitsToBuilder(builder, hit); + addMetaDataFieldsToBuilder(builder, hit); + addHighlightsToBuilder(builder, hit); + return (ExprValue) ExprTupleValue.fromExprValueMap(builder.build()); + }) + .iterator(); } } /** - * Parse response for all hits to add to builder. Inner_hits supports arrays of objects - * with nested type. + * Parse response for all hits to add to builder. Inner_hits supports arrays of objects with + * nested type. + * * @param builder builder to build values from response. * @param hit Search hit from response. */ private void addParsedHitsToBuilder( - ImmutableMap.Builder builder, - SearchHit hit - ) { + ImmutableMap.Builder builder, SearchHit hit) { builder.putAll( - exprValueFactory.construct( - hit.getSourceAsString(), - !(hit.getInnerHits() == null || hit.getInnerHits().isEmpty()) - ).tupleValue()); + exprValueFactory + .construct( + hit.getSourceAsString(), + !(hit.getInnerHits() == null || hit.getInnerHits().isEmpty())) + .tupleValue()); } /** * If highlight fields are present in response add the fields to the builder. + * * @param builder builder to build values from response. * @param hit Search hit from response. */ private void addHighlightsToBuilder( - ImmutableMap.Builder builder, - SearchHit hit - ) { + ImmutableMap.Builder builder, SearchHit hit) { if (!hit.getHighlightFields().isEmpty()) { var hlBuilder = ImmutableMap.builder(); for (var es : hit.getHighlightFields().entrySet()) { - hlBuilder.put(es.getKey(), ExprValueUtils.collectionValue( - Arrays.stream(es.getValue().fragments()).map( - Text::toString).collect(Collectors.toList()))); + hlBuilder.put( + es.getKey(), + ExprValueUtils.collectionValue( + Arrays.stream(es.getValue().fragments()) + .map(Text::toString) + .collect(Collectors.toList()))); } builder.put("_highlight", ExprTupleValue.fromExprValueMap(hlBuilder.build())); } @@ -160,58 +148,56 @@ private void addHighlightsToBuilder( /** * Add metadata fields to builder from response. + * * @param builder builder to build values from response. * @param hit Search hit from response. */ private void addMetaDataFieldsToBuilder( - ImmutableMap.Builder builder, - SearchHit hit - ) { - List metaDataFieldSet = includes.stream() - .filter(METADATAFIELD_TYPE_MAP::containsKey) - .collect(Collectors.toList()); - ExprFloatValue maxScore = Float.isNaN(hits.getMaxScore()) - ? null : new ExprFloatValue(hits.getMaxScore()); - - metaDataFieldSet.forEach(metaDataField -> { - if (metaDataField.equals(METADATA_FIELD_INDEX)) { - builder.put(METADATA_FIELD_INDEX, new ExprStringValue(hit.getIndex())); - } else if (metaDataField.equals(METADATA_FIELD_ID)) { - builder.put(METADATA_FIELD_ID, new ExprStringValue(hit.getId())); - } else if (metaDataField.equals(METADATA_FIELD_SCORE)) { - if (!Float.isNaN(hit.getScore())) { - builder.put(METADATA_FIELD_SCORE, new ExprFloatValue(hit.getScore())); - } - } else if (metaDataField.equals(METADATA_FIELD_MAXSCORE)) { - if (maxScore != null) { - builder.put(METADATA_FIELD_MAXSCORE, maxScore); - } - } else if (metaDataField.equals(METADATA_FIELD_SORT)) { - builder.put(METADATA_FIELD_SORT, new ExprLongValue(hit.getSeqNo())); - } else { // if (metaDataField.equals(METADATA_FIELD_ROUTING)){ - builder.put(METADATA_FIELD_ROUTING, new ExprStringValue(hit.getShard().toString())); - } - }); + ImmutableMap.Builder builder, SearchHit hit) { + List metaDataFieldSet = + includes.stream().filter(METADATAFIELD_TYPE_MAP::containsKey).collect(Collectors.toList()); + ExprFloatValue maxScore = + Float.isNaN(hits.getMaxScore()) ? null : new ExprFloatValue(hits.getMaxScore()); + + metaDataFieldSet.forEach( + metaDataField -> { + if (metaDataField.equals(METADATA_FIELD_INDEX)) { + builder.put(METADATA_FIELD_INDEX, new ExprStringValue(hit.getIndex())); + } else if (metaDataField.equals(METADATA_FIELD_ID)) { + builder.put(METADATA_FIELD_ID, new ExprStringValue(hit.getId())); + } else if (metaDataField.equals(METADATA_FIELD_SCORE)) { + if (!Float.isNaN(hit.getScore())) { + builder.put(METADATA_FIELD_SCORE, new ExprFloatValue(hit.getScore())); + } + } else if (metaDataField.equals(METADATA_FIELD_MAXSCORE)) { + if (maxScore != null) { + builder.put(METADATA_FIELD_MAXSCORE, maxScore); + } + } else if (metaDataField.equals(METADATA_FIELD_SORT)) { + builder.put(METADATA_FIELD_SORT, new ExprLongValue(hit.getSeqNo())); + } else { // if (metaDataField.equals(METADATA_FIELD_ROUTING)){ + builder.put(METADATA_FIELD_ROUTING, new ExprStringValue(hit.getShard().toString())); + } + }); } /** * Handle an aggregation response. + * * @return Parsed and built return values from response. */ private Iterator handleAggregationResponse() { - return exprValueFactory.getParser().parse(aggregations).stream().map(entry -> { - ImmutableMap.Builder builder = new ImmutableMap.Builder<>(); - for (Map.Entry value : entry.entrySet()) { - builder.put( - value.getKey(), - exprValueFactory.construct( - value.getKey(), - value.getValue(), - false - ) - ); - } - return (ExprValue) ExprTupleValue.fromExprValueMap(builder.build()); - }).iterator(); + return exprValueFactory.getParser().parse(aggregations).stream() + .map( + entry -> { + ImmutableMap.Builder builder = new ImmutableMap.Builder<>(); + for (Map.Entry value : entry.entrySet()) { + builder.put( + value.getKey(), + exprValueFactory.construct(value.getKey(), value.getValue(), false)); + } + return (ExprValue) ExprTupleValue.fromExprValueMap(builder.build()); + }) + .iterator(); } } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/SingleValueParser.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/SingleValueParser.java index 384e07ad8f..1492fedfc2 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/SingleValueParser.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/SingleValueParser.java @@ -23,9 +23,7 @@ import org.opensearch.search.aggregations.Aggregation; import org.opensearch.search.aggregations.metrics.NumericMetricsAggregation; -/** - * {@link NumericMetricsAggregation.SingleValue} metric parser. - */ +/** {@link NumericMetricsAggregation.SingleValue} metric parser. */ @EqualsAndHashCode @RequiredArgsConstructor public class SingleValueParser implements MetricParser { @@ -35,7 +33,6 @@ public class SingleValueParser implements MetricParser { @Override public Map parse(Aggregation agg) { return Collections.singletonMap( - agg.getName(), - handleNanInfValue(((NumericMetricsAggregation.SingleValue) agg).value())); + agg.getName(), handleNanInfValue(((NumericMetricsAggregation.SingleValue) agg).value())); } } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/StatsParser.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/StatsParser.java index c80b75de05..82a2f8648f 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/StatsParser.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/StatsParser.java @@ -24,9 +24,7 @@ import org.opensearch.search.aggregations.Aggregation; import org.opensearch.search.aggregations.metrics.ExtendedStats; -/** - * {@link ExtendedStats} metric parser. - */ +/** {@link ExtendedStats} metric parser. */ @EqualsAndHashCode @RequiredArgsConstructor public class StatsParser implements MetricParser { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/TopHitsParser.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/TopHitsParser.java index a98e1b4ce3..b29b44f033 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/TopHitsParser.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/TopHitsParser.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.response.agg; import java.util.Arrays; @@ -16,21 +15,19 @@ import org.opensearch.search.aggregations.Aggregation; import org.opensearch.search.aggregations.metrics.TopHits; -/** - * {@link TopHits} metric parser. - */ +/** {@link TopHits} metric parser. */ @EqualsAndHashCode @RequiredArgsConstructor public class TopHitsParser implements MetricParser { - @Getter - private final String name; + @Getter private final String name; @Override public Map parse(Aggregation agg) { return Collections.singletonMap( agg.getName(), Arrays.stream(((TopHits) agg).getHits().getHits()) - .flatMap(h -> h.getSourceAsMap().values().stream()).collect(Collectors.toList())); + .flatMap(h -> h.getSourceAsMap().values().stream()) + .collect(Collectors.toList())); } } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/Utils.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/Utils.java index 953f4d19b4..9ce46c6de6 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/Utils.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/Utils.java @@ -19,6 +19,7 @@ public class Utils { /** * Utils to handle Nan/Infinite Value. + * * @return null if is Nan or is +-Infinity. */ public static Object handleNanInfValue(double value) { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/security/SecurityAccess.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/security/SecurityAccess.java index 0c1b2e58b1..95c52ea275 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/security/SecurityAccess.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/security/SecurityAccess.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.security; import java.security.AccessController; @@ -17,9 +16,7 @@ */ public class SecurityAccess { - /** - * Execute the operation in privileged mode. - */ + /** Execute the operation in privileged mode. */ public static T doPrivileged(final PrivilegedExceptionAction operation) { SpecialPermission.check(); try { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/setting/OpenSearchSettings.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/setting/OpenSearchSettings.java index 0810312974..133903dabe 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/setting/OpenSearchSettings.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/setting/OpenSearchSettings.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.setting; import static org.opensearch.common.settings.Settings.EMPTY; @@ -27,129 +26,172 @@ import org.opensearch.sql.common.setting.LegacySettings; import org.opensearch.sql.common.setting.Settings; -/** - * Setting implementation on OpenSearch. - */ +/** Setting implementation on OpenSearch. */ @Log4j2 public class OpenSearchSettings extends Settings { - /** - * Default settings. - */ + /** Default settings. */ private final Map> defaultSettings; - /** - * Latest setting value for each registered key. Thread-safe is required. - */ + + /** Latest setting value for each registered key. Thread-safe is required. */ @VisibleForTesting private final Map latestSettings = new ConcurrentHashMap<>(); - public static final Setting SQL_ENABLED_SETTING = Setting.boolSetting( - Key.SQL_ENABLED.getKeyValue(), - LegacyOpenDistroSettings.SQL_ENABLED_SETTING, - Setting.Property.NodeScope, - Setting.Property.Dynamic); - - public static final Setting SQL_SLOWLOG_SETTING = Setting.intSetting( - Key.SQL_SLOWLOG.getKeyValue(), - LegacyOpenDistroSettings.SQL_QUERY_SLOWLOG_SETTING, - 0, - Setting.Property.NodeScope, - Setting.Property.Dynamic); - - public static final Setting SQL_CURSOR_KEEP_ALIVE_SETTING = Setting.positiveTimeSetting( - Key.SQL_CURSOR_KEEP_ALIVE.getKeyValue(), - LegacyOpenDistroSettings.SQL_CURSOR_KEEPALIVE_SETTING, - Setting.Property.NodeScope, - Setting.Property.Dynamic); - - public static final Setting SQL_DELETE_ENABLED_SETTING = Setting.boolSetting( - Key.SQL_DELETE_ENABLED.getKeyValue(), - false, - Setting.Property.NodeScope, - Setting.Property.Dynamic); - - public static final Setting PPL_ENABLED_SETTING = Setting.boolSetting( - Key.PPL_ENABLED.getKeyValue(), - LegacyOpenDistroSettings.PPL_ENABLED_SETTING, - Setting.Property.NodeScope, - Setting.Property.Dynamic); - - public static final Setting QUERY_MEMORY_LIMIT_SETTING = new Setting<>( - Key.QUERY_MEMORY_LIMIT.getKeyValue(), - LegacyOpenDistroSettings.PPL_QUERY_MEMORY_LIMIT_SETTING, - (s) -> MemorySizeValue.parseBytesSizeValueOrHeapRatio( - s, LegacySettings.Key.PPL_QUERY_MEMORY_LIMIT.getKeyValue()), - Setting.Property.NodeScope, - Setting.Property.Dynamic); - - public static final Setting QUERY_SIZE_LIMIT_SETTING = Setting.intSetting( - Key.QUERY_SIZE_LIMIT.getKeyValue(), - LegacyOpenDistroSettings.QUERY_SIZE_LIMIT_SETTING, - 0, - Setting.Property.NodeScope, - Setting.Property.Dynamic); - - public static final Setting METRICS_ROLLING_WINDOW_SETTING = Setting.longSetting( - Key.METRICS_ROLLING_WINDOW.getKeyValue(), - LegacyOpenDistroSettings.METRICS_ROLLING_WINDOW_SETTING, - 2L, - Setting.Property.NodeScope, - Setting.Property.Dynamic); - - public static final Setting METRICS_ROLLING_INTERVAL_SETTING = Setting.longSetting( - Key.METRICS_ROLLING_INTERVAL.getKeyValue(), - LegacyOpenDistroSettings.METRICS_ROLLING_INTERVAL_SETTING, - 1L, - Setting.Property.NodeScope, - Setting.Property.Dynamic); + public static final Setting SQL_ENABLED_SETTING = + Setting.boolSetting( + Key.SQL_ENABLED.getKeyValue(), + LegacyOpenDistroSettings.SQL_ENABLED_SETTING, + Setting.Property.NodeScope, + Setting.Property.Dynamic); + + public static final Setting SQL_SLOWLOG_SETTING = + Setting.intSetting( + Key.SQL_SLOWLOG.getKeyValue(), + LegacyOpenDistroSettings.SQL_QUERY_SLOWLOG_SETTING, + 0, + Setting.Property.NodeScope, + Setting.Property.Dynamic); + + public static final Setting SQL_CURSOR_KEEP_ALIVE_SETTING = + Setting.positiveTimeSetting( + Key.SQL_CURSOR_KEEP_ALIVE.getKeyValue(), + LegacyOpenDistroSettings.SQL_CURSOR_KEEPALIVE_SETTING, + Setting.Property.NodeScope, + Setting.Property.Dynamic); + + public static final Setting SQL_DELETE_ENABLED_SETTING = + Setting.boolSetting( + Key.SQL_DELETE_ENABLED.getKeyValue(), + false, + Setting.Property.NodeScope, + Setting.Property.Dynamic); + + public static final Setting PPL_ENABLED_SETTING = + Setting.boolSetting( + Key.PPL_ENABLED.getKeyValue(), + LegacyOpenDistroSettings.PPL_ENABLED_SETTING, + Setting.Property.NodeScope, + Setting.Property.Dynamic); + + public static final Setting QUERY_MEMORY_LIMIT_SETTING = + new Setting<>( + Key.QUERY_MEMORY_LIMIT.getKeyValue(), + LegacyOpenDistroSettings.PPL_QUERY_MEMORY_LIMIT_SETTING, + (s) -> + MemorySizeValue.parseBytesSizeValueOrHeapRatio( + s, LegacySettings.Key.PPL_QUERY_MEMORY_LIMIT.getKeyValue()), + Setting.Property.NodeScope, + Setting.Property.Dynamic); + + public static final Setting QUERY_SIZE_LIMIT_SETTING = + Setting.intSetting( + Key.QUERY_SIZE_LIMIT.getKeyValue(), + LegacyOpenDistroSettings.QUERY_SIZE_LIMIT_SETTING, + 0, + Setting.Property.NodeScope, + Setting.Property.Dynamic); + + public static final Setting METRICS_ROLLING_WINDOW_SETTING = + Setting.longSetting( + Key.METRICS_ROLLING_WINDOW.getKeyValue(), + LegacyOpenDistroSettings.METRICS_ROLLING_WINDOW_SETTING, + 2L, + Setting.Property.NodeScope, + Setting.Property.Dynamic); + + public static final Setting METRICS_ROLLING_INTERVAL_SETTING = + Setting.longSetting( + Key.METRICS_ROLLING_INTERVAL.getKeyValue(), + LegacyOpenDistroSettings.METRICS_ROLLING_INTERVAL_SETTING, + 1L, + Setting.Property.NodeScope, + Setting.Property.Dynamic); // we are keeping this to not break upgrades if the config is already present. // This will be completely removed in 3.0. - public static final Setting DATASOURCE_CONFIG = SecureSetting.secureFile( - "plugins.query.federation.datasources.config", - null, - Setting.Property.Deprecated); - - public static final Setting DATASOURCE_MASTER_SECRET_KEY = Setting.simpleString( - ENCYRPTION_MASTER_KEY.getKeyValue(), - Setting.Property.NodeScope, - Setting.Property.Final, - Setting.Property.Filtered); - - public static final Setting DATASOURCE_URI_ALLOW_HOSTS = Setting.simpleString( - Key.DATASOURCES_URI_ALLOWHOSTS.getKeyValue(), - ".*", - Setting.Property.NodeScope, - Setting.Property.Dynamic); + public static final Setting DATASOURCE_CONFIG = + SecureSetting.secureFile( + "plugins.query.federation.datasources.config", null, Setting.Property.Deprecated); - /** - * Construct OpenSearchSetting. - * The OpenSearchSetting must be singleton. - */ + public static final Setting DATASOURCE_MASTER_SECRET_KEY = + Setting.simpleString( + ENCYRPTION_MASTER_KEY.getKeyValue(), + Setting.Property.NodeScope, + Setting.Property.Final, + Setting.Property.Filtered); + + public static final Setting DATASOURCE_URI_ALLOW_HOSTS = + Setting.simpleString( + Key.DATASOURCES_URI_ALLOWHOSTS.getKeyValue(), + ".*", + Setting.Property.NodeScope, + Setting.Property.Dynamic); + + /** Construct OpenSearchSetting. The OpenSearchSetting must be singleton. */ @SuppressWarnings("unchecked") public OpenSearchSettings(ClusterSettings clusterSettings) { ImmutableMap.Builder> settingBuilder = new ImmutableMap.Builder<>(); - register(settingBuilder, clusterSettings, Key.SQL_ENABLED, - SQL_ENABLED_SETTING, new Updater(Key.SQL_ENABLED)); - register(settingBuilder, clusterSettings, Key.SQL_SLOWLOG, - SQL_SLOWLOG_SETTING, new Updater(Key.SQL_SLOWLOG)); - register(settingBuilder, clusterSettings, Key.SQL_CURSOR_KEEP_ALIVE, - SQL_CURSOR_KEEP_ALIVE_SETTING, new Updater(Key.SQL_CURSOR_KEEP_ALIVE)); - register(settingBuilder, clusterSettings, Key.SQL_DELETE_ENABLED, - SQL_DELETE_ENABLED_SETTING, new Updater(Key.SQL_DELETE_ENABLED)); - register(settingBuilder, clusterSettings, Key.PPL_ENABLED, - PPL_ENABLED_SETTING, new Updater(Key.PPL_ENABLED)); - register(settingBuilder, clusterSettings, Key.QUERY_MEMORY_LIMIT, - QUERY_MEMORY_LIMIT_SETTING, new Updater(Key.QUERY_MEMORY_LIMIT)); - register(settingBuilder, clusterSettings, Key.QUERY_SIZE_LIMIT, - QUERY_SIZE_LIMIT_SETTING, new Updater(Key.QUERY_SIZE_LIMIT)); - register(settingBuilder, clusterSettings, Key.METRICS_ROLLING_WINDOW, - METRICS_ROLLING_WINDOW_SETTING, new Updater(Key.METRICS_ROLLING_WINDOW)); - register(settingBuilder, clusterSettings, Key.METRICS_ROLLING_INTERVAL, - METRICS_ROLLING_INTERVAL_SETTING, new Updater(Key.METRICS_ROLLING_INTERVAL)); - register(settingBuilder, clusterSettings, Key.DATASOURCES_URI_ALLOWHOSTS, - DATASOURCE_URI_ALLOW_HOSTS, new Updater(Key.DATASOURCES_URI_ALLOWHOSTS)); - registerNonDynamicSettings(settingBuilder, clusterSettings, Key.CLUSTER_NAME, - ClusterName.CLUSTER_NAME_SETTING); + register( + settingBuilder, + clusterSettings, + Key.SQL_ENABLED, + SQL_ENABLED_SETTING, + new Updater(Key.SQL_ENABLED)); + register( + settingBuilder, + clusterSettings, + Key.SQL_SLOWLOG, + SQL_SLOWLOG_SETTING, + new Updater(Key.SQL_SLOWLOG)); + register( + settingBuilder, + clusterSettings, + Key.SQL_CURSOR_KEEP_ALIVE, + SQL_CURSOR_KEEP_ALIVE_SETTING, + new Updater(Key.SQL_CURSOR_KEEP_ALIVE)); + register( + settingBuilder, + clusterSettings, + Key.SQL_DELETE_ENABLED, + SQL_DELETE_ENABLED_SETTING, + new Updater(Key.SQL_DELETE_ENABLED)); + register( + settingBuilder, + clusterSettings, + Key.PPL_ENABLED, + PPL_ENABLED_SETTING, + new Updater(Key.PPL_ENABLED)); + register( + settingBuilder, + clusterSettings, + Key.QUERY_MEMORY_LIMIT, + QUERY_MEMORY_LIMIT_SETTING, + new Updater(Key.QUERY_MEMORY_LIMIT)); + register( + settingBuilder, + clusterSettings, + Key.QUERY_SIZE_LIMIT, + QUERY_SIZE_LIMIT_SETTING, + new Updater(Key.QUERY_SIZE_LIMIT)); + register( + settingBuilder, + clusterSettings, + Key.METRICS_ROLLING_WINDOW, + METRICS_ROLLING_WINDOW_SETTING, + new Updater(Key.METRICS_ROLLING_WINDOW)); + register( + settingBuilder, + clusterSettings, + Key.METRICS_ROLLING_INTERVAL, + METRICS_ROLLING_INTERVAL_SETTING, + new Updater(Key.METRICS_ROLLING_INTERVAL)); + register( + settingBuilder, + clusterSettings, + Key.DATASOURCES_URI_ALLOWHOSTS, + DATASOURCE_URI_ALLOW_HOSTS, + new Updater(Key.DATASOURCES_URI_ALLOWHOSTS)); + registerNonDynamicSettings( + settingBuilder, clusterSettings, Key.CLUSTER_NAME, ClusterName.CLUSTER_NAME_SETTING); defaultSettings = settingBuilder.build(); } @@ -159,36 +201,33 @@ public T getSettingValue(Settings.Key key) { return (T) latestSettings.getOrDefault(key, defaultSettings.get(key).getDefault(EMPTY)); } - /** - * Register the pair of {key, setting}. - */ - private void register(ImmutableMap.Builder> settingBuilder, - ClusterSettings clusterSettings, Settings.Key key, - Setting setting, - Consumer updater) { + /** Register the pair of {key, setting}. */ + private void register( + ImmutableMap.Builder> settingBuilder, + ClusterSettings clusterSettings, + Settings.Key key, + Setting setting, + Consumer updater) { if (clusterSettings.get(setting) != null) { latestSettings.put(key, clusterSettings.get(setting)); } settingBuilder.put(key, setting); - clusterSettings - .addSettingsUpdateConsumer(setting, updater); + clusterSettings.addSettingsUpdateConsumer(setting, updater); } - /** - * Register Non Dynamic Settings without consumer. - */ + /** Register Non Dynamic Settings without consumer. */ private void registerNonDynamicSettings( ImmutableMap.Builder> settingBuilder, - ClusterSettings clusterSettings, Settings.Key key, + ClusterSettings clusterSettings, + Settings.Key key, Setting setting) { settingBuilder.put(key, setting); latestSettings.put(key, clusterSettings.get(setting)); } - /** - * Add the inner class only for UT coverage purpose. - * Lambda could be much elegant solution. But which is hard to test. + * Add the inner class only for UT coverage purpose. Lambda could be much elegant solution. But + * which is hard to test. */ @VisibleForTesting @RequiredArgsConstructor @@ -202,9 +241,7 @@ public void accept(Object newValue) { } } - /** - * Used by Plugin to init Setting. - */ + /** Used by Plugin to init Setting. */ public static List> pluginSettings() { return new ImmutableList.Builder>() .add(SQL_ENABLED_SETTING) @@ -220,9 +257,7 @@ public static List> pluginSettings() { .build(); } - /** - * Init Non Dynamic Plugin Settings. - */ + /** Init Non Dynamic Plugin Settings. */ public static List> pluginNonDynamicSettings() { return new ImmutableList.Builder>() .add(DATASOURCE_MASTER_SECRET_KEY) @@ -230,9 +265,7 @@ public static List> pluginNonDynamicSettings() { .build(); } - /** - * Used by local cluster to get settings from a setting instance. - */ + /** Used by local cluster to get settings from a setting instance. */ public List> getSettings() { return pluginSettings(); } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchIndex.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchIndex.java index 62617f744e..c6afdb8511 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchIndex.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchIndex.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage; import com.google.common.annotations.VisibleForTesting; @@ -47,43 +46,33 @@ public class OpenSearchIndex implements Table { public static final String METADATA_FIELD_ROUTING = "_routing"; - public static final java.util.Map METADATAFIELD_TYPE_MAP = Map.of( - METADATA_FIELD_ID, ExprCoreType.STRING, - METADATA_FIELD_INDEX, ExprCoreType.STRING, - METADATA_FIELD_SCORE, ExprCoreType.FLOAT, - METADATA_FIELD_MAXSCORE, ExprCoreType.FLOAT, - METADATA_FIELD_SORT, ExprCoreType.LONG, - METADATA_FIELD_ROUTING, ExprCoreType.STRING - ); + public static final java.util.Map METADATAFIELD_TYPE_MAP = + Map.of( + METADATA_FIELD_ID, ExprCoreType.STRING, + METADATA_FIELD_INDEX, ExprCoreType.STRING, + METADATA_FIELD_SCORE, ExprCoreType.FLOAT, + METADATA_FIELD_MAXSCORE, ExprCoreType.FLOAT, + METADATA_FIELD_SORT, ExprCoreType.LONG, + METADATA_FIELD_ROUTING, ExprCoreType.STRING); /** OpenSearch client connection. */ private final OpenSearchClient client; private final Settings settings; - /** - * {@link OpenSearchRequest.IndexName}. - */ + /** {@link OpenSearchRequest.IndexName}. */ private final OpenSearchRequest.IndexName indexName; - /** - * The cached mapping of field and type in index. - */ + /** The cached mapping of field and type in index. */ private Map cachedFieldOpenSearchTypes = null; - /** - * The cached ExprType of fields. - */ + /** The cached ExprType of fields. */ private Map cachedFieldTypes = null; - /** - * The cached max result window setting of index. - */ + /** The cached max result window setting of index. */ private Integer cachedMaxResultWindow = null; - /** - * Constructor. - */ + /** Constructor. */ public OpenSearchIndex(OpenSearchClient client, Settings settings, String indexName) { this.client = client; this.settings = settings; @@ -113,22 +102,24 @@ public void create(Map schema) { * or lazy evaluate when query engine pulls field type. */ /** - * Get simplified parsed mapping info. Unlike {@link #getFieldOpenSearchTypes()} - * it returns a flattened map. + * Get simplified parsed mapping info. Unlike {@link #getFieldOpenSearchTypes()} it returns a + * flattened map. + * * @return A map between field names and matching `ExprCoreType`s. */ @Override public Map getFieldTypes() { if (cachedFieldOpenSearchTypes == null) { - cachedFieldOpenSearchTypes = new OpenSearchDescribeIndexRequest(client, indexName) - .getFieldTypes(); + cachedFieldOpenSearchTypes = + new OpenSearchDescribeIndexRequest(client, indexName).getFieldTypes(); } if (cachedFieldTypes == null) { - cachedFieldTypes = OpenSearchDataType.traverseAndFlatten(cachedFieldOpenSearchTypes) - .entrySet().stream().collect( - LinkedHashMap::new, - (map, item) -> map.put(item.getKey(), item.getValue().getExprType()), - Map::putAll); + cachedFieldTypes = + OpenSearchDataType.traverseAndFlatten(cachedFieldOpenSearchTypes).entrySet().stream() + .collect( + LinkedHashMap::new, + (map, item) -> map.put(item.getKey(), item.getValue().getExprType()), + Map::putAll); } return cachedFieldTypes; } @@ -140,19 +131,18 @@ public Map getReservedFieldTypes() { /** * Get parsed mapping info. + * * @return A complete map between field names and their types. */ public Map getFieldOpenSearchTypes() { if (cachedFieldOpenSearchTypes == null) { - cachedFieldOpenSearchTypes = new OpenSearchDescribeIndexRequest(client, indexName) - .getFieldTypes(); + cachedFieldOpenSearchTypes = + new OpenSearchDescribeIndexRequest(client, indexName).getFieldTypes(); } return cachedFieldOpenSearchTypes; } - /** - * Get the max result window setting of the table. - */ + /** Get the max result window setting of the table. */ public Integer getMaxResultWindow() { if (cachedMaxResultWindow == null) { cachedMaxResultWindow = @@ -161,9 +151,7 @@ public Integer getMaxResultWindow() { return cachedMaxResultWindow; } - /** - * TODO: Push down operations to index scan operator as much as possible in future. - */ + /** TODO: Push down operations to index scan operator as much as possible in future. */ @Override public PhysicalPlan implement(LogicalPlan plan) { // TODO: Leave it here to avoid impact Prometheus and AD operators. Need to move to Planner. @@ -175,12 +163,13 @@ public TableScanBuilder createScanBuilder() { final int querySizeLimit = settings.getSettingValue(Settings.Key.QUERY_SIZE_LIMIT); final TimeValue cursorKeepAlive = settings.getSettingValue(Settings.Key.SQL_CURSOR_KEEP_ALIVE); - var builder = new OpenSearchRequestBuilder( - querySizeLimit, - createExprValueFactory()); + var builder = new OpenSearchRequestBuilder(querySizeLimit, createExprValueFactory()); Function createScanOperator = - requestBuilder -> new OpenSearchIndexScan(client, requestBuilder.getMaxResponseSize(), - requestBuilder.build(indexName, getMaxResultWindow(), cursorKeepAlive)); + requestBuilder -> + new OpenSearchIndexScan( + client, + requestBuilder.getMaxResponseSize(), + requestBuilder.build(indexName, getMaxResultWindow(), cursorKeepAlive)); return new OpenSearchIndexScanBuilder(builder, createScanOperator); } @@ -193,27 +182,27 @@ private OpenSearchExprValueFactory createExprValueFactory() { @VisibleForTesting @RequiredArgsConstructor - public static class OpenSearchDefaultImplementor - extends DefaultImplementor { + public static class OpenSearchDefaultImplementor extends DefaultImplementor { private final OpenSearchClient client; @Override public PhysicalPlan visitMLCommons(LogicalMLCommons node, OpenSearchIndexScan context) { - return new MLCommonsOperator(visitChild(node, context), node.getAlgorithm(), - node.getArguments(), client.getNodeClient()); + return new MLCommonsOperator( + visitChild(node, context), + node.getAlgorithm(), + node.getArguments(), + client.getNodeClient()); } @Override public PhysicalPlan visitAD(LogicalAD node, OpenSearchIndexScan context) { - return new ADOperator(visitChild(node, context), - node.getArguments(), client.getNodeClient()); + return new ADOperator(visitChild(node, context), node.getArguments(), client.getNodeClient()); } @Override public PhysicalPlan visitML(LogicalML node, OpenSearchIndexScan context) { - return new MLOperator(visitChild(node, context), - node.getArguments(), client.getNodeClient()); + return new MLOperator(visitChild(node, context), node.getArguments(), client.getNodeClient()); } } } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchStorageEngine.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchStorageEngine.java index c915fa549b..7c022e2190 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchStorageEngine.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchStorageEngine.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage; import static org.opensearch.sql.utils.SystemIndexUtils.isSystemIndex; @@ -22,10 +21,9 @@ public class OpenSearchStorageEngine implements StorageEngine { /** OpenSearch client connection. */ - @Getter - private final OpenSearchClient client; - @Getter - private final Settings settings; + @Getter private final OpenSearchClient client; + + @Getter private final Settings settings; @Override public Table getTable(DataSourceSchemaName dataSourceSchemaName, String name) { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScan.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScan.java index 0ca9cde3d2..b2e9319bb1 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScan.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScan.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.scan; import java.io.IOException; @@ -26,9 +25,7 @@ import org.opensearch.sql.planner.SerializablePlan; import org.opensearch.sql.storage.TableScanOperator; -/** - * OpenSearch index scan operator. - */ +/** OpenSearch index scan operator. */ @EqualsAndHashCode(onlyExplicitlyIncluded = true, callSuper = false) @ToString(onlyExplicitlyIncluded = true) public class OpenSearchIndexScan extends TableScanOperator implements SerializablePlan { @@ -37,14 +34,10 @@ public class OpenSearchIndexScan extends TableScanOperator implements Serializab private OpenSearchClient client; /** Search request. */ - @EqualsAndHashCode.Include - @ToString.Include - private OpenSearchRequest request; + @EqualsAndHashCode.Include @ToString.Include private OpenSearchRequest request; /** Largest number of rows allowed in the response. */ - @EqualsAndHashCode.Include - @ToString.Include - private int maxResponseSize; + @EqualsAndHashCode.Include @ToString.Include private int maxResponseSize; /** Number of rows returned. */ private Integer queryCount; @@ -52,12 +45,9 @@ public class OpenSearchIndexScan extends TableScanOperator implements Serializab /** Search response for current batch. */ private Iterator iterator; - /** - * Creates index scan based on a provided OpenSearchRequestBuilder. - */ - public OpenSearchIndexScan(OpenSearchClient client, - int maxResponseSize, - OpenSearchRequest request) { + /** Creates index scan based on a provided OpenSearchRequestBuilder. */ + public OpenSearchIndexScan( + OpenSearchClient client, int maxResponseSize, OpenSearchRequest request) { this.client = client; this.maxResponseSize = maxResponseSize; this.request = request; @@ -106,12 +96,13 @@ public String explain() { return request.toString(); } - /** No-args constructor. + /** + * No-args constructor. + * * @deprecated Exists only to satisfy Java serialization API. */ @Deprecated(since = "introduction") - public OpenSearchIndexScan() { - } + public OpenSearchIndexScan() {} @Override public void readExternal(ObjectInput in) throws IOException { @@ -119,8 +110,9 @@ public void readExternal(ObjectInput in) throws IOException { byte[] requestStream = new byte[reqSize]; in.read(requestStream); - var engine = (OpenSearchStorageEngine) ((PlanSerializer.CursorDeserializationStream) in) - .resolveObject("engine"); + var engine = + (OpenSearchStorageEngine) + ((PlanSerializer.CursorDeserializationStream) in).resolveObject("engine"); try (BytesStreamInput bsi = new BytesStreamInput(requestStream)) { request = new OpenSearchScrollRequest(bsi, engine); diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanAggregationBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanAggregationBuilder.java index d5f89d2579..02ac21a39d 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanAggregationBuilder.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanAggregationBuilder.java @@ -24,9 +24,7 @@ import org.opensearch.sql.planner.logical.LogicalFilter; import org.opensearch.sql.planner.logical.LogicalSort; -/** - * Index scan builder for aggregate query used by {@link OpenSearchIndexScanBuilder} internally. - */ +/** Index scan builder for aggregate query used by {@link OpenSearchIndexScanBuilder} internally. */ @EqualsAndHashCode class OpenSearchIndexScanAggregationBuilder implements PushDownQueryBuilder { @@ -42,9 +40,8 @@ class OpenSearchIndexScanAggregationBuilder implements PushDownQueryBuilder { /** Sorting items pushed down. */ private List> sortList; - - OpenSearchIndexScanAggregationBuilder(OpenSearchRequestBuilder requestBuilder, - LogicalAggregation aggregation) { + OpenSearchIndexScanAggregationBuilder( + OpenSearchRequestBuilder requestBuilder, LogicalAggregation aggregation) { this.requestBuilder = requestBuilder; aggregatorList = aggregation.getAggregatorList(); groupByList = aggregation.getGroupByList(); @@ -57,8 +54,7 @@ public OpenSearchRequestBuilder build() { Pair, OpenSearchAggregationResponseParser> aggregationBuilder = builder.buildAggregationBuilder(aggregatorList, groupByList, sortList); requestBuilder.pushDownAggregation(aggregationBuilder); - requestBuilder.pushTypeMapping( - builder.buildTypeMapping(aggregatorList, groupByList)); + requestBuilder.pushTypeMapping(builder.buildTypeMapping(aggregatorList, groupByList)); return requestBuilder; } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanBuilder.java index edcbedc7a7..8a2f3e98f4 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanBuilder.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanBuilder.java @@ -30,29 +30,24 @@ public class OpenSearchIndexScanBuilder extends TableScanBuilder { private final Function scanFactory; - /** - * Delegated index scan builder for non-aggregate or aggregate query. - */ - @EqualsAndHashCode.Include - private PushDownQueryBuilder delegate; + + /** Delegated index scan builder for non-aggregate or aggregate query. */ + @EqualsAndHashCode.Include private PushDownQueryBuilder delegate; /** Is limit operator pushed down. */ private boolean isLimitPushedDown = false; - /** - * Constructor used during query execution. - */ - public OpenSearchIndexScanBuilder(OpenSearchRequestBuilder requestBuilder, + /** Constructor used during query execution. */ + public OpenSearchIndexScanBuilder( + OpenSearchRequestBuilder requestBuilder, Function scanFactory) { this.delegate = new OpenSearchIndexScanQueryBuilder(requestBuilder); this.scanFactory = scanFactory; - } - /** - * Constructor used for unit tests. - */ - protected OpenSearchIndexScanBuilder(PushDownQueryBuilder translator, + /** Constructor used for unit tests. */ + protected OpenSearchIndexScanBuilder( + PushDownQueryBuilder translator, Function scanFactory) { this.delegate = translator; this.scanFactory = scanFactory; @@ -117,13 +112,16 @@ public boolean pushDownNested(LogicalNested nested) { /** * Valid if sorting is only by fields. + * * @param sort Logical sort * @return True if sorting by fields only */ private boolean sortByFieldsOnly(LogicalSort sort) { return sort.getSortList().stream() - .map(sortItem -> sortItem.getRight() instanceof ReferenceExpression - || isNestedFunction(sortItem.getRight())) + .map( + sortItem -> + sortItem.getRight() instanceof ReferenceExpression + || isNestedFunction(sortItem.getRight())) .reduce(true, Boolean::logicalAnd); } } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanQueryBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanQueryBuilder.java index 590272a9f1..f4b0b05256 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanQueryBuilder.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanQueryBuilder.java @@ -35,8 +35,8 @@ import org.opensearch.sql.planner.logical.LogicalSort; /** - * Index scan builder for simple non-aggregate query used by - * {@link OpenSearchIndexScanBuilder} internally. + * Index scan builder for simple non-aggregate query used by {@link OpenSearchIndexScanBuilder} + * internally. */ @VisibleForTesting @EqualsAndHashCode @@ -50,13 +50,11 @@ public OpenSearchIndexScanQueryBuilder(OpenSearchRequestBuilder requestBuilder) @Override public boolean pushDownFilter(LogicalFilter filter) { - FilterQueryBuilder queryBuilder = new FilterQueryBuilder( - new DefaultExpressionSerializer()); + FilterQueryBuilder queryBuilder = new FilterQueryBuilder(new DefaultExpressionSerializer()); Expression queryCondition = filter.getCondition(); QueryBuilder query = queryBuilder.build(queryCondition); requestBuilder.pushDownFilter(query); - requestBuilder.pushDownTrackedScore( - trackScoresFromOpenSearchFunction(queryCondition)); + requestBuilder.pushDownTrackedScore(trackScoresFromOpenSearchFunction(queryCondition)); return true; } @@ -64,9 +62,10 @@ public boolean pushDownFilter(LogicalFilter filter) { public boolean pushDownSort(LogicalSort sort) { List> sortList = sort.getSortList(); final SortQueryBuilder builder = new SortQueryBuilder(); - requestBuilder.pushDownSort(sortList.stream() - .map(sortItem -> builder.build(sortItem.getValue(), sortItem.getKey())) - .collect(Collectors.toList())); + requestBuilder.pushDownSort( + sortList.stream() + .map(sortItem -> builder.build(sortItem.getValue(), sortItem.getKey())) + .collect(Collectors.toList())); return true; } @@ -78,8 +77,7 @@ public boolean pushDownLimit(LogicalLimit limit) { @Override public boolean pushDownProject(LogicalProject project) { - requestBuilder.pushDownProjects( - findReferenceExpressions(project.getProjectList())); + requestBuilder.pushDownProjects(findReferenceExpressions(project.getProjectList())); // Return false intentionally to keep the original project operator return false; @@ -105,8 +103,8 @@ private boolean trackScoresFromOpenSearchFunction(Expression condition) { return true; } if (condition instanceof FunctionExpression) { - return ((FunctionExpression) condition).getArguments().stream() - .anyMatch(this::trackScoresFromOpenSearchFunction); + return ((FunctionExpression) condition) + .getArguments().stream().anyMatch(this::trackScoresFromOpenSearchFunction); } return false; } @@ -114,8 +112,7 @@ private boolean trackScoresFromOpenSearchFunction(Expression condition) { @Override public boolean pushDownNested(LogicalNested nested) { requestBuilder.pushDownNested(nested.getFields()); - requestBuilder.pushDownProjects( - findReferenceExpressions(nested.getProjectList())); + requestBuilder.pushDownProjects(findReferenceExpressions(nested.getProjectList())); // Return false intentionally to keep the original nested operator // Since we return false we need to pushDownProject here as it won't be // pushed down due to no matching push down rule. @@ -130,8 +127,8 @@ public OpenSearchRequestBuilder build() { /** * Find reference expression from expression. - * @param expressions a list of expression. * + * @param expressions a list of expression. * @return a set of ReferenceExpression */ public static Set findReferenceExpressions( @@ -145,18 +142,20 @@ public static Set findReferenceExpressions( /** * Find reference expression from expression. - * @param expression expression. * + * @param expression expression. * @return a list of ReferenceExpression */ public static List findReferenceExpression(NamedExpression expression) { List results = new ArrayList<>(); - expression.accept(new ExpressionNodeVisitor<>() { - @Override - public Object visitReference(ReferenceExpression node, Object context) { - return results.add(node); - } - }, null); + expression.accept( + new ExpressionNodeVisitor<>() { + @Override + public Object visitReference(ReferenceExpression node, Object context) { + return results.add(node); + } + }, + null); return results; } } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/PushDownQueryBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/PushDownQueryBuilder.java index 274bc4647d..b855b9a8b5 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/PushDownQueryBuilder.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/PushDownQueryBuilder.java @@ -14,9 +14,7 @@ import org.opensearch.sql.planner.logical.LogicalProject; import org.opensearch.sql.planner.logical.LogicalSort; -/** - * Translates a logical query plan into OpenSearch DSL and an appropriate request. - */ +/** Translates a logical query plan into OpenSearch DSL and an appropriate request. */ public interface PushDownQueryBuilder { default boolean pushDownFilter(LogicalFilter filter) { return false; diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/StringUtils.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/StringUtils.java index 7b68bd5c92..a485296b52 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/StringUtils.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/StringUtils.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script; import lombok.experimental.UtilityClass; @@ -12,6 +11,7 @@ public class StringUtils { /** * Converts sql wildcard character % and _ to * and ?. + * * @param text string to be converted * @return converted string */ diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/LuceneQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/LuceneQuery.java index a1b633f942..753c2bbbc7 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/LuceneQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/LuceneQuery.java @@ -40,10 +40,12 @@ public abstract class LuceneQuery { /** * Check if function expression supported by current Lucene query. Default behavior is that report * supported if: + * *
    - *
  1. Left is a reference
  2. - *
  3. Right side is a literal
  4. + *
  5. Left is a reference + *
  6. Right side is a literal *
+ * * @param func function * @return return true if supported, otherwise false. */ diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/RangeQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/RangeQuery.java index 7e13cad592..2e33e3cc7c 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/RangeQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/RangeQuery.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.filter.lucene; import lombok.RequiredArgsConstructor; @@ -14,19 +13,19 @@ import org.opensearch.sql.data.type.ExprCoreType; import org.opensearch.sql.data.type.ExprType; -/** - * Lucene query that builds range query for non-quality comparison. - */ +/** Lucene query that builds range query for non-quality comparison. */ @RequiredArgsConstructor public class RangeQuery extends LuceneQuery { public enum Comparison { - LT, GT, LTE, GTE, BETWEEN + LT, + GT, + LTE, + GTE, + BETWEEN } - /** - * Comparison that range query build for. - */ + /** Comparison that range query build for. */ private final Comparison comparison; @Override @@ -55,5 +54,4 @@ private Object value(ExprValue literal) { return literal.value(); } } - } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/TermQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/TermQuery.java index c98de1cd84..cd506898d7 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/TermQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/TermQuery.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.filter.lucene; import org.opensearch.index.query.QueryBuilder; @@ -13,9 +12,7 @@ import org.opensearch.sql.data.type.ExprType; import org.opensearch.sql.opensearch.data.type.OpenSearchTextType; -/** - * Lucene query that build term query for equality comparison. - */ +/** Lucene query that build term query for equality comparison. */ public class TermQuery extends LuceneQuery { @Override diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/QueryQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/QueryQuery.java index 35d5a43a41..0346b7712e 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/QueryQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/QueryQuery.java @@ -8,16 +8,14 @@ import org.opensearch.index.query.QueryBuilders; import org.opensearch.index.query.QueryStringQueryBuilder; -/** - * Class for Lucene query that builds the 'query' query. - */ +/** Class for Lucene query that builds the 'query' query. */ public class QueryQuery extends NoFieldQuery { private final String queryQueryName = "query"; /** - * Default constructor for QueryQuery configures how RelevanceQuery.build() handles - * named arguments by calling the constructor of QueryStringQuery. + * Default constructor for QueryQuery configures how RelevanceQuery.build() handles named + * arguments by calling the constructor of QueryStringQuery. */ public QueryQuery() { super(FunctionParameterRepository.QueryStringQueryBuildActions); diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/QueryStringQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/QueryStringQuery.java index 43131baa3e..410c55cea6 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/QueryStringQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/QueryStringQuery.java @@ -9,13 +9,11 @@ import org.opensearch.index.query.QueryBuilders; import org.opensearch.index.query.QueryStringQueryBuilder; -/** - * Class for Lucene query that builds the query_string query. - */ +/** Class for Lucene query that builds the query_string query. */ public class QueryStringQuery extends MultiFieldQuery { /** - * Default constructor for QueryString configures how RelevanceQuery.build() handles - * named arguments. + * Default constructor for QueryString configures how RelevanceQuery.build() handles named + * arguments. */ public QueryStringQuery() { super(FunctionParameterRepository.QueryStringQueryBuildActions); @@ -29,8 +27,8 @@ public QueryStringQuery() { * @return : Builder for query_string query */ @Override - protected QueryStringQueryBuilder createBuilder(ImmutableMap fields, - String query) { + protected QueryStringQueryBuilder createBuilder( + ImmutableMap fields, String query) { return QueryBuilders.queryStringQuery(query).fields(fields); } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/RelevanceQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/RelevanceQuery.java index b8641a5c0b..87faf320ec 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/RelevanceQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/RelevanceQuery.java @@ -20,37 +20,39 @@ import org.opensearch.sql.expression.NamedArgumentExpression; import org.opensearch.sql.opensearch.storage.script.filter.lucene.LuceneQuery; -/** - * Base class for query abstraction that builds a relevance query from function expression. - */ +/** Base class for query abstraction that builds a relevance query from function expression. */ @RequiredArgsConstructor public abstract class RelevanceQuery extends LuceneQuery { - @Getter - private final Map> queryBuildActions; + @Getter private final Map> queryBuildActions; protected void ignoreArguments(List arguments) { - arguments.removeIf(a -> a.getArgName().equalsIgnoreCase("field") - || a.getArgName().equalsIgnoreCase("fields") - || a.getArgName().equalsIgnoreCase("query")); + arguments.removeIf( + a -> + a.getArgName().equalsIgnoreCase("field") + || a.getArgName().equalsIgnoreCase("fields") + || a.getArgName().equalsIgnoreCase("query")); } protected void checkValidArguments(String argNormalized, T queryBuilder) { if (!queryBuildActions.containsKey(argNormalized)) { throw new SemanticCheckException( - String.format("Parameter %s is invalid for %s function.", - argNormalized, queryBuilder.getWriteableName())); + String.format( + "Parameter %s is invalid for %s function.", + argNormalized, queryBuilder.getWriteableName())); } } protected T loadArguments(List arguments) throws SemanticCheckException { // Aggregate parameters by name, so getting a Map - arguments.stream().collect(Collectors.groupingBy(a -> a.getArgName().toLowerCase())) - .forEach((k, v) -> { - if (v.size() > 1) { - throw new SemanticCheckException( - String.format("Parameter '%s' can only be specified once.", k)); - } - }); + arguments.stream() + .collect(Collectors.groupingBy(a -> a.getArgName().toLowerCase())) + .forEach( + (k, v) -> { + if (v.size() > 1) { + throw new SemanticCheckException( + String.format("Parameter '%s' can only be specified once.", k)); + } + }); T queryBuilder = createQueryBuilder(arguments); @@ -63,9 +65,7 @@ protected T loadArguments(List arguments) throws Semant checkValidArguments(argNormalized, queryBuilder); - (Objects.requireNonNull( - queryBuildActions - .get(argNormalized))) + (Objects.requireNonNull(queryBuildActions.get(argNormalized))) .apply(queryBuilder, arg.getValue().valueOf()); } @@ -74,15 +74,16 @@ protected T loadArguments(List arguments) throws Semant @Override public QueryBuilder build(FunctionExpression func) { - var arguments = func.getArguments().stream() - .map(a -> (NamedArgumentExpression)a).collect(Collectors.toList()); + var arguments = + func.getArguments().stream() + .map(a -> (NamedArgumentExpression) a) + .collect(Collectors.toList()); if (arguments.size() < 2) { throw new SyntaxCheckException( String.format("%s requires at least two parameters", getQueryName())); } return loadArguments(arguments); - } protected abstract T createQueryBuilder(List arguments); @@ -90,12 +91,10 @@ public QueryBuilder build(FunctionExpression func) { protected abstract String getQueryName(); /** - * Convenience interface for a function that updates a QueryBuilder - * based on ExprValue. + * Convenience interface for a function that updates a QueryBuilder based on ExprValue. * * @param Concrete query builder */ - protected interface QueryBuilderStep extends - BiFunction { - } + protected interface QueryBuilderStep + extends BiFunction {} } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/SimpleQueryStringQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/SimpleQueryStringQuery.java index 157921572a..86dd44c118 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/SimpleQueryStringQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/SimpleQueryStringQuery.java @@ -11,16 +11,16 @@ public class SimpleQueryStringQuery extends MultiFieldQuery { /** - * Default constructor for SimpleQueryString configures how RelevanceQuery.build() handles - * named arguments. + * Default constructor for SimpleQueryString configures how RelevanceQuery.build() handles named + * arguments. */ public SimpleQueryStringQuery() { super(FunctionParameterRepository.SimpleQueryStringQueryBuildActions); } @Override - protected SimpleQueryStringBuilder createBuilder(ImmutableMap fields, - String query) { + protected SimpleQueryStringBuilder createBuilder( + ImmutableMap fields, String query) { return QueryBuilders.simpleQueryStringQuery(query).fields(fields); } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/SingleFieldQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/SingleFieldQuery.java index ec110dfd8b..086aaddc5e 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/SingleFieldQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/SingleFieldQuery.java @@ -26,18 +26,20 @@ public SingleFieldQuery(Map> queryBuildActions) { @Override protected T createQueryBuilder(List arguments) { // Extract 'field' and 'query' - var field = arguments.stream() - .filter(a -> a.getArgName().equalsIgnoreCase("field")) - .findFirst() - .orElseThrow(() -> new SemanticCheckException("'field' parameter is missing.")); + var field = + arguments.stream() + .filter(a -> a.getArgName().equalsIgnoreCase("field")) + .findFirst() + .orElseThrow(() -> new SemanticCheckException("'field' parameter is missing.")); - var query = arguments.stream() - .filter(a -> a.getArgName().equalsIgnoreCase("query")) - .findFirst() - .orElseThrow(() -> new SemanticCheckException("'query' parameter is missing")); + var query = + arguments.stream() + .filter(a -> a.getArgName().equalsIgnoreCase("query")) + .findFirst() + .orElseThrow(() -> new SemanticCheckException("'query' parameter is missing")); return createBuilder( - ((ReferenceExpression)field.getValue()).getAttr(), + ((ReferenceExpression) field.getValue()).getAttr(), query.getValue().valueOf().stringValue()); } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/WildcardQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/WildcardQuery.java index 9fd37e3de7..7b9887e516 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/WildcardQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/WildcardQuery.java @@ -3,20 +3,17 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.filter.lucene.relevance; import org.opensearch.index.query.QueryBuilders; import org.opensearch.index.query.WildcardQueryBuilder; import org.opensearch.sql.opensearch.storage.script.StringUtils; -/** - * Lucene query that builds wildcard query. - */ +/** Lucene query that builds wildcard query. */ public class WildcardQuery extends SingleFieldQuery { /** - * Default constructor for WildcardQuery configures how RelevanceQuery.build() handles - * named arguments. + * Default constructor for WildcardQuery configures how RelevanceQuery.build() handles named + * arguments. */ public WildcardQuery() { super(FunctionParameterRepository.WildcardQueryBuildActions); diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/sort/SortQueryBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/sort/SortQueryBuilder.java index 9002df7c8f..7669b569d4 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/sort/SortQueryBuilder.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/sort/SortQueryBuilder.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.sort; import static org.opensearch.sql.analysis.NestedAnalyzer.generatePath; @@ -22,23 +21,17 @@ import org.opensearch.sql.expression.ReferenceExpression; import org.opensearch.sql.opensearch.data.type.OpenSearchTextType; -/** - * Builder of {@link SortBuilder}. - */ +/** Builder of {@link SortBuilder}. */ public class SortQueryBuilder { - /** - * The mapping between Core Engine sort order and OpenSearch sort order. - */ + /** The mapping between Core Engine sort order and OpenSearch sort order. */ private Map sortOrderMap = new ImmutableMap.Builder() .put(Sort.SortOrder.ASC, SortOrder.ASC) .put(Sort.SortOrder.DESC, SortOrder.DESC) .build(); - /** - * The mapping between Core Engine null order and OpenSearch null order. - */ + /** The mapping between Core Engine null order and OpenSearch null order. */ private Map missingMap = new ImmutableMap.Builder() .put(Sort.NullOrder.NULL_FIRST, "_first") @@ -61,14 +54,15 @@ public SortBuilder build(Expression expression, Sort.SortOption option) { } else if (isNestedFunction(expression)) { validateNestedArgs((FunctionExpression) expression); - String orderByName = ((FunctionExpression)expression).getArguments().get(0).toString(); + String orderByName = ((FunctionExpression) expression).getArguments().get(0).toString(); // Generate path if argument not supplied in function. - ReferenceExpression path = ((FunctionExpression)expression).getArguments().size() == 2 - ? (ReferenceExpression) ((FunctionExpression)expression).getArguments().get(1) - : generatePath(orderByName); + ReferenceExpression path = + ((FunctionExpression) expression).getArguments().size() == 2 + ? (ReferenceExpression) ((FunctionExpression) expression).getArguments().get(1) + : generatePath(orderByName); return SortBuilders.fieldSort(orderByName) - .order(sortOrderMap.get(option.getSortOrder())) - .setNestedSort(new NestedSortBuilder(path.toString())); + .order(sortOrderMap.get(option.getSortOrder())) + .setNestedSort(new NestedSortBuilder(path.toString())); } else { throw new IllegalStateException("unsupported expression " + expression.getClass()); } @@ -76,29 +70,26 @@ public SortBuilder build(Expression expression, Sort.SortOption option) { /** * Validate semantics for arguments in nested function. + * * @param nestedFunc Nested function expression. */ private void validateNestedArgs(FunctionExpression nestedFunc) { if (nestedFunc.getArguments().size() < 1 || nestedFunc.getArguments().size() > 2) { throw new IllegalArgumentException( - "nested function supports 2 parameters (field, path) or 1 parameter (field)" - ); + "nested function supports 2 parameters (field, path) or 1 parameter (field)"); } for (Expression arg : nestedFunc.getArguments()) { if (!(arg instanceof ReferenceExpression)) { throw new IllegalArgumentException( - String.format("Illegal nested field name: %s", - arg.toString() - ) - ); + String.format("Illegal nested field name: %s", arg.toString())); } } } private FieldSortBuilder fieldBuild(ReferenceExpression ref, Sort.SortOption option) { return SortBuilders.fieldSort( - OpenSearchTextType.convertTextToKeyword(ref.getAttr(), ref.type())) + OpenSearchTextType.convertTextToKeyword(ref.getAttr(), ref.type())) .order(sortOrderMap.get(option.getSortOrder())) .missing(missingMap.get(option.getNullOrder())); } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndex.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndex.java index 7b6efeeba4..b1b2081f94 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndex.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndex.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.system; import static org.opensearch.sql.utils.SystemIndexUtils.systemTable; @@ -24,17 +23,12 @@ import org.opensearch.sql.storage.Table; import org.opensearch.sql.utils.SystemIndexUtils; -/** - * OpenSearch System Index Table Implementation. - */ +/** OpenSearch System Index Table Implementation. */ public class OpenSearchSystemIndex implements Table { - /** - * System Index Name. - */ + /** System Index Name. */ private final Pair systemIndexBundle; - public OpenSearchSystemIndex( - OpenSearchClient client, String indexName) { + public OpenSearchSystemIndex(OpenSearchClient client, String indexName) { this.systemIndexBundle = buildIndexBundle(client, indexName); } @@ -61,8 +55,7 @@ public PhysicalPlan implement(LogicalPlan plan) { @VisibleForTesting @RequiredArgsConstructor - public class OpenSearchSystemIndexDefaultImplementor - extends DefaultImplementor { + public class OpenSearchSystemIndexDefaultImplementor extends DefaultImplementor { @Override public PhysicalPlan visitRelation(LogicalRelation node, Object context) { @@ -79,10 +72,11 @@ private Pair buildIndexBun OpenSearchClient client, String indexName) { SystemIndexUtils.SystemTable systemTable = systemTable(indexName); if (systemTable.isSystemInfoTable()) { - return Pair.of(OpenSearchSystemIndexSchema.SYS_TABLE_TABLES, - new OpenSearchCatIndicesRequest(client)); + return Pair.of( + OpenSearchSystemIndexSchema.SYS_TABLE_TABLES, new OpenSearchCatIndicesRequest(client)); } else { - return Pair.of(OpenSearchSystemIndexSchema.SYS_TABLE_MAPPINGS, + return Pair.of( + OpenSearchSystemIndexSchema.SYS_TABLE_MAPPINGS, new OpenSearchDescribeIndexRequest(client, systemTable.getTableName())); } } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexScan.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexScan.java index ee377263c1..57cdd52985 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexScan.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexScan.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.system; import java.util.Iterator; @@ -14,21 +13,15 @@ import org.opensearch.sql.opensearch.request.system.OpenSearchSystemRequest; import org.opensearch.sql.storage.TableScanOperator; -/** - * OpenSearch index scan operator. - */ +/** OpenSearch index scan operator. */ @RequiredArgsConstructor @EqualsAndHashCode(onlyExplicitlyIncluded = true, callSuper = false) @ToString(onlyExplicitlyIncluded = true) public class OpenSearchSystemIndexScan extends TableScanOperator { - /** - * OpenSearch request. - */ + /** OpenSearch request. */ private final OpenSearchSystemRequest request; - /** - * Search response for current batch. - */ + /** Search response for current batch. */ private Iterator iterator; @Override diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexSchema.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexSchema.java index aa09ff4660..781431ea67 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexSchema.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexSchema.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.system; import static org.opensearch.sql.data.type.ExprCoreType.STRING; @@ -15,53 +14,52 @@ import lombok.RequiredArgsConstructor; import org.opensearch.sql.data.type.ExprType; -/** - * Definition of the system table schema. - */ +/** Definition of the system table schema. */ @Getter @RequiredArgsConstructor public enum OpenSearchSystemIndexSchema { - - SYS_TABLE_TABLES(new LinkedHashMap() {{ - put("TABLE_CAT", STRING); - put("TABLE_SCHEM", STRING); - put("TABLE_NAME", STRING); - put("TABLE_TYPE", STRING); - put("REMARKS", STRING); - put("TYPE_CAT", STRING); - put("TYPE_SCHEM", STRING); - put("TYPE_NAME", STRING); - put("SELF_REFERENCING_COL_NAME", STRING); - put("REF_GENERATION", STRING); - } - } - ), - SYS_TABLE_MAPPINGS(new ImmutableMap.Builder() - .put("TABLE_CAT", STRING) - .put("TABLE_SCHEM", STRING) - .put("TABLE_NAME", STRING) - .put("COLUMN_NAME", STRING) - .put("DATA_TYPE", STRING) - .put("TYPE_NAME", STRING) - .put("COLUMN_SIZE", STRING) - .put("BUFFER_LENGTH", STRING) - .put("DECIMAL_DIGITS", STRING) - .put("NUM_PREC_RADIX", STRING) - .put("NULLABLE", STRING) - .put("REMARKS", STRING) - .put("COLUMN_DEF", STRING) - .put("SQL_DATA_TYPE", STRING) - .put("SQL_DATETIME_SUB", STRING) - .put("CHAR_OCTET_LENGTH", STRING) - .put("ORDINAL_POSITION", STRING) - .put("IS_NULLABLE", STRING) - .put("SCOPE_CATALOG", STRING) - .put("SCOPE_SCHEMA", STRING) - .put("SCOPE_TABLE", STRING) - .put("SOURCE_DATA_TYPE", STRING) - .put("IS_AUTOINCREMENT", STRING) - .put("IS_GENERATEDCOLUMN", STRING) - .build()); + SYS_TABLE_TABLES( + new LinkedHashMap() { + { + put("TABLE_CAT", STRING); + put("TABLE_SCHEM", STRING); + put("TABLE_NAME", STRING); + put("TABLE_TYPE", STRING); + put("REMARKS", STRING); + put("TYPE_CAT", STRING); + put("TYPE_SCHEM", STRING); + put("TYPE_NAME", STRING); + put("SELF_REFERENCING_COL_NAME", STRING); + put("REF_GENERATION", STRING); + } + }), + SYS_TABLE_MAPPINGS( + new ImmutableMap.Builder() + .put("TABLE_CAT", STRING) + .put("TABLE_SCHEM", STRING) + .put("TABLE_NAME", STRING) + .put("COLUMN_NAME", STRING) + .put("DATA_TYPE", STRING) + .put("TYPE_NAME", STRING) + .put("COLUMN_SIZE", STRING) + .put("BUFFER_LENGTH", STRING) + .put("DECIMAL_DIGITS", STRING) + .put("NUM_PREC_RADIX", STRING) + .put("NULLABLE", STRING) + .put("REMARKS", STRING) + .put("COLUMN_DEF", STRING) + .put("SQL_DATA_TYPE", STRING) + .put("SQL_DATETIME_SUB", STRING) + .put("CHAR_OCTET_LENGTH", STRING) + .put("ORDINAL_POSITION", STRING) + .put("IS_NULLABLE", STRING) + .put("SCOPE_CATALOG", STRING) + .put("SCOPE_SCHEMA", STRING) + .put("SCOPE_TABLE", STRING) + .put("SOURCE_DATA_TYPE", STRING) + .put("IS_AUTOINCREMENT", STRING) + .put("IS_GENERATEDCOLUMN", STRING) + .build()); private final Map mapping; } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchNodeClientTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchNodeClientTest.java index d985bcbeec..040b7d2759 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchNodeClientTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchNodeClientTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.client; import static org.junit.jupiter.api.Assertions.assertAll; @@ -89,17 +88,14 @@ class OpenSearchNodeClientTest { @Mock(answer = RETURNS_DEEP_STUBS) private NodeClient nodeClient; - @Mock - private OpenSearchExprValueFactory factory; + @Mock private OpenSearchExprValueFactory factory; - @Mock - private SearchHit searchHit; + @Mock private SearchHit searchHit; - @Mock - private GetIndexResponse indexResponse; + @Mock private GetIndexResponse indexResponse; - private final ExprTupleValue exprTupleValue = ExprTupleValue.fromExprValueMap( - Map.of("id", new ExprIntegerValue(1))); + private final ExprTupleValue exprTupleValue = + ExprTupleValue.fromExprValueMap(Map.of("id", new ExprIntegerValue(1))); private OpenSearchClient client; @@ -110,8 +106,7 @@ void setUp() { @Test void is_index_exist() { - when(nodeClient.admin().indices() - .exists(any(IndicesExistsRequest.class)).actionGet()) + when(nodeClient.admin().indices().exists(any(IndicesExistsRequest.class)).actionGet()) .thenReturn(new IndicesExistsResponse(true)); assertTrue(client.exists("test")); @@ -120,8 +115,7 @@ void is_index_exist() { @Test void is_index_not_exist() { String indexName = "test"; - when(nodeClient.admin().indices() - .exists(any(IndicesExistsRequest.class)).actionGet()) + when(nodeClient.admin().indices().exists(any(IndicesExistsRequest.class)).actionGet()) .thenReturn(new IndicesExistsResponse(false)); assertFalse(client.exists(indexName)); @@ -137,11 +131,8 @@ void is_index_exist_with_exception() { @Test void create_index() { String indexName = "test"; - Map mappings = ImmutableMap.of( - "properties", - ImmutableMap.of("name", "text")); - when(nodeClient.admin().indices() - .create(any(CreateIndexRequest.class)).actionGet()) + Map mappings = ImmutableMap.of("properties", ImmutableMap.of("name", "text")); + when(nodeClient.admin().indices().create(any(CreateIndexRequest.class)).actionGet()) .thenReturn(new CreateIndexResponse(true, true, indexName)); client.createIndex(indexName, mappings); @@ -151,8 +142,7 @@ void create_index() { void create_index_with_exception() { when(nodeClient.admin().indices().create(any())).thenThrow(RuntimeException.class); - assertThrows(IllegalStateException.class, - () -> client.createIndex("test", ImmutableMap.of())); + assertThrows(IllegalStateException.class, () -> client.createIndex("test", ImmutableMap.of())); } @Test @@ -172,58 +162,57 @@ void get_index_mappings() throws IOException { () -> assertEquals(10, mapping.size()), () -> assertEquals(17, parsedTypes.size()), () -> assertEquals("TEXT", mapping.get("address").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Text), - parsedTypes.get("address")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Text), parsedTypes.get("address")), () -> assertEquals("INTEGER", mapping.get("age").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Integer), - parsedTypes.get("age")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Integer), parsedTypes.get("age")), () -> assertEquals("DOUBLE", mapping.get("balance").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Double), - parsedTypes.get("balance")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Double), parsedTypes.get("balance")), () -> assertEquals("KEYWORD", mapping.get("city").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Keyword), - parsedTypes.get("city")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Keyword), parsedTypes.get("city")), () -> assertEquals("DATE", mapping.get("birthday").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Date), - parsedTypes.get("birthday")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Date), parsedTypes.get("birthday")), () -> assertEquals("GEO_POINT", mapping.get("location").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.GeoPoint), - parsedTypes.get("location")), + () -> + assertEquals(OpenSearchTextType.of(MappingType.GeoPoint), parsedTypes.get("location")), // unknown type isn't parsed and ignored () -> assertFalse(mapping.containsKey("new_field")), () -> assertNull(parsedTypes.get("new_field")), () -> assertEquals("TEXT", mapping.get("field with spaces").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Text), - parsedTypes.get("field with spaces")), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Text), parsedTypes.get("field with spaces")), () -> assertEquals("TEXT", mapping.get("employer").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Text), - parsedTypes.get("employer")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Text), parsedTypes.get("employer")), // `employer` is a `text` with `fields` - () -> assertTrue(((OpenSearchTextType)parsedTypes.get("employer")).getFields().size() > 0), + () -> assertTrue(((OpenSearchTextType) parsedTypes.get("employer")).getFields().size() > 0), () -> assertEquals("NESTED", mapping.get("projects").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Nested), - parsedTypes.get("projects")), - () -> assertEquals(OpenSearchTextType.of(MappingType.Boolean), - parsedTypes.get("projects.active")), - () -> assertEquals(OpenSearchTextType.of(MappingType.Date), - parsedTypes.get("projects.release")), - () -> assertEquals(OpenSearchTextType.of(MappingType.Nested), - parsedTypes.get("projects.members")), - () -> assertEquals(OpenSearchTextType.of(MappingType.Text), - parsedTypes.get("projects.members.name")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Nested), parsedTypes.get("projects")), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Boolean), parsedTypes.get("projects.active")), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Date), parsedTypes.get("projects.release")), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Nested), parsedTypes.get("projects.members")), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Text), parsedTypes.get("projects.members.name")), () -> assertEquals("OBJECT", mapping.get("manager").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Object), - parsedTypes.get("manager")), - () -> assertEquals(OpenSearchTextType.of(MappingType.Text), - parsedTypes.get("manager.name")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Object), parsedTypes.get("manager")), + () -> + assertEquals(OpenSearchTextType.of(MappingType.Text), parsedTypes.get("manager.name")), // `manager.name` is a `text` with `fields` - () -> assertTrue(((OpenSearchTextType)parsedTypes.get("manager.name")) - .getFields().size() > 0), - () -> assertEquals(OpenSearchTextType.of(MappingType.Keyword), - parsedTypes.get("manager.address")), - () -> assertEquals(OpenSearchTextType.of(MappingType.Long), - parsedTypes.get("manager.salary")) - ); + () -> + assertTrue( + ((OpenSearchTextType) parsedTypes.get("manager.name")).getFields().size() > 0), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Keyword), parsedTypes.get("manager.address")), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Long), parsedTypes.get("manager.salary"))); } @Test @@ -247,11 +236,8 @@ void get_index_mappings_with_IOException() { @Test void get_index_mappings_with_non_exist_index() { - when(nodeClient.admin().indices() - .prepareGetMappings(any()) - .setLocal(anyBoolean()) - .get() - ).thenThrow(IndexNotFoundException.class); + when(nodeClient.admin().indices().prepareGetMappings(any()).setLocal(anyBoolean()).get()) + .thenThrow(IndexNotFoundException.class); assertThrows(IndexNotFoundException.class, () -> client.getIndexMappings("non_exist_index")); } @@ -307,9 +293,7 @@ void search() { when(searchResponse.getHits()) .thenReturn( new SearchHits( - new SearchHit[] {searchHit}, - new TotalHits(1L, TotalHits.Relation.EQUAL_TO), - 1.0F)); + new SearchHit[] {searchHit}, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0F)); when(searchHit.getSourceAsString()).thenReturn("{\"id\", 1}"); when(searchHit.getInnerHits()).thenReturn(null); when(factory.construct(any(), anyBoolean())).thenReturn(exprTupleValue); @@ -320,9 +304,13 @@ void search() { when(scrollResponse.getHits()).thenReturn(SearchHits.empty()); // Verify response for first scroll request - OpenSearchScrollRequest request = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), - new SearchSourceBuilder(), factory, List.of("id")); + OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + new SearchSourceBuilder(), + factory, + List.of("id")); OpenSearchResponse response1 = client.search(request); assertFalse(response1.isEmpty()); @@ -355,9 +343,13 @@ void cleanup() { when(requestBuilder.addScrollId(any())).thenReturn(requestBuilder); when(requestBuilder.get()).thenReturn(null); - OpenSearchScrollRequest request = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), - new SearchSourceBuilder(), factory, List.of()); + OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + new SearchSourceBuilder(), + factory, + List.of()); request.setScrollId("scroll123"); // Enforce cleaning by setting a private field. FieldUtils.writeField(request, "needClean", true, true); @@ -372,9 +364,13 @@ void cleanup() { @Test void cleanup_without_scrollId() { - OpenSearchScrollRequest request = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), - new SearchSourceBuilder(), factory, List.of()); + OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + new SearchSourceBuilder(), + factory, + List.of()); client.cleanup(request); verify(nodeClient, never()).prepareClearScroll(); } @@ -384,9 +380,13 @@ void cleanup_without_scrollId() { void cleanup_rethrows_exception() { when(nodeClient.prepareClearScroll()).thenThrow(new RuntimeException()); - OpenSearchScrollRequest request = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), - new SearchSourceBuilder(), factory, List.of()); + OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + new SearchSourceBuilder(), + factory, + List.of()); request.setScrollId("scroll123"); // Enforce cleaning by setting a private field. FieldUtils.writeField(request, "needClean", true, true); @@ -398,10 +398,8 @@ void get_indices() { AliasMetadata aliasMetadata = mock(AliasMetadata.class); final var openMap = Map.of("index", List.of(aliasMetadata)); when(aliasMetadata.alias()).thenReturn("index_alias"); - when(nodeClient.admin().indices() - .prepareGetIndex() - .setLocal(true) - .get()).thenReturn(indexResponse); + when(nodeClient.admin().indices().prepareGetIndex().setLocal(true).get()) + .thenReturn(indexResponse); when(indexResponse.getIndices()).thenReturn(new String[] {"index"}); when(indexResponse.aliases()).thenReturn(openMap); @@ -427,10 +425,8 @@ void ml() { public void mockNodeClientIndicesMappings(String indexName, String mappings) { GetMappingsResponse mockResponse = mock(GetMappingsResponse.class); MappingMetadata emptyMapping = mock(MappingMetadata.class); - when(nodeClient.admin().indices() - .prepareGetMappings(any()) - .setLocal(anyBoolean()) - .get()).thenReturn(mockResponse); + when(nodeClient.admin().indices().prepareGetMappings(any()).setLocal(anyBoolean()).get()) + .thenReturn(mockResponse); try { Map metadata; if (mappings.isEmpty()) { @@ -445,13 +441,12 @@ public void mockNodeClientIndicesMappings(String indexName, String mappings) { } } - private void mockNodeClientSettings(String indexName, String indexMetadata) - throws IOException { + private void mockNodeClientSettings(String indexName, String indexMetadata) throws IOException { GetSettingsResponse mockResponse = mock(GetSettingsResponse.class); when(nodeClient.admin().indices().prepareGetSettings(any()).setLocal(anyBoolean()).get()) .thenReturn(mockResponse); - Map metadata = Map.of(indexName, - IndexMetadata.fromXContent(createParser(indexMetadata)).getSettings()); + Map metadata = + Map.of(indexName, IndexMetadata.fromXContent(createParser(indexMetadata)).getSettings()); when(mockResponse.getIndexToSettings()).thenReturn(metadata); } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchRestClientTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchRestClientTest.java index 409596910e..99201aae4f 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchRestClientTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchRestClientTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.client; import static org.junit.jupiter.api.Assertions.assertAll; @@ -79,22 +78,20 @@ class OpenSearchRestClientTest { private static final String TEST_MAPPING_FILE = "mappings/accounts.json"; + @Mock(answer = RETURNS_DEEP_STUBS) private RestHighLevelClient restClient; private OpenSearchClient client; - @Mock - private OpenSearchExprValueFactory factory; + @Mock private OpenSearchExprValueFactory factory; - @Mock - private SearchHit searchHit; + @Mock private SearchHit searchHit; - @Mock - private GetIndexResponse getIndexResponse; + @Mock private GetIndexResponse getIndexResponse; - private final ExprTupleValue exprTupleValue = ExprTupleValue.fromExprValueMap( - Map.of("id", new ExprIntegerValue(1))); + private final ExprTupleValue exprTupleValue = + ExprTupleValue.fromExprValueMap(Map.of("id", new ExprIntegerValue(1))); @BeforeEach void setUp() { @@ -103,8 +100,9 @@ void setUp() { @Test void is_index_exist() throws IOException { - when(restClient.indices() - .exists(any(), any())) // use any() because missing equals() in GetIndexRequest + when(restClient + .indices() + .exists(any(), any())) // use any() because missing equals() in GetIndexRequest .thenReturn(true); assertTrue(client.exists("test")); @@ -112,8 +110,9 @@ void is_index_exist() throws IOException { @Test void is_index_not_exist() throws IOException { - when(restClient.indices() - .exists(any(), any())) // use any() because missing equals() in GetIndexRequest + when(restClient + .indices() + .exists(any(), any())) // use any() because missing equals() in GetIndexRequest .thenReturn(false); assertFalse(client.exists("test")); @@ -129,11 +128,8 @@ void is_index_exist_with_exception() throws IOException { @Test void create_index() throws IOException { String indexName = "test"; - Map mappings = ImmutableMap.of( - "properties", - ImmutableMap.of("name", "text")); - when(restClient.indices() - .create(any(), any())) + Map mappings = ImmutableMap.of("properties", ImmutableMap.of("name", "text")); + when(restClient.indices().create(any(), any())) .thenReturn(new CreateIndexResponse(true, true, indexName)); client.createIndex(indexName, mappings); @@ -142,8 +138,7 @@ void create_index() throws IOException { @Test void create_index_with_IOException() throws IOException { when(restClient.indices().create(any(), any())).thenThrow(IOException.class); - assertThrows(IllegalStateException.class, - () -> client.createIndex("test", ImmutableMap.of())); + assertThrows(IllegalStateException.class, () -> client.createIndex("test", ImmutableMap.of())); } @Test @@ -167,58 +162,57 @@ void get_index_mappings() throws IOException { () -> assertEquals(10, mapping.size()), () -> assertEquals(17, parsedTypes.size()), () -> assertEquals("TEXT", mapping.get("address").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Text), - parsedTypes.get("address")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Text), parsedTypes.get("address")), () -> assertEquals("INTEGER", mapping.get("age").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Integer), - parsedTypes.get("age")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Integer), parsedTypes.get("age")), () -> assertEquals("DOUBLE", mapping.get("balance").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Double), - parsedTypes.get("balance")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Double), parsedTypes.get("balance")), () -> assertEquals("KEYWORD", mapping.get("city").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Keyword), - parsedTypes.get("city")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Keyword), parsedTypes.get("city")), () -> assertEquals("DATE", mapping.get("birthday").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Date), - parsedTypes.get("birthday")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Date), parsedTypes.get("birthday")), () -> assertEquals("GEO_POINT", mapping.get("location").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.GeoPoint), - parsedTypes.get("location")), + () -> + assertEquals(OpenSearchTextType.of(MappingType.GeoPoint), parsedTypes.get("location")), // unknown type isn't parsed and ignored () -> assertFalse(mapping.containsKey("new_field")), () -> assertNull(parsedTypes.get("new_field")), () -> assertEquals("TEXT", mapping.get("field with spaces").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Text), - parsedTypes.get("field with spaces")), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Text), parsedTypes.get("field with spaces")), () -> assertEquals("TEXT", mapping.get("employer").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Text), - parsedTypes.get("employer")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Text), parsedTypes.get("employer")), // `employer` is a `text` with `fields` - () -> assertTrue(((OpenSearchTextType)parsedTypes.get("employer")).getFields().size() > 0), + () -> assertTrue(((OpenSearchTextType) parsedTypes.get("employer")).getFields().size() > 0), () -> assertEquals("NESTED", mapping.get("projects").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Nested), - parsedTypes.get("projects")), - () -> assertEquals(OpenSearchTextType.of(MappingType.Boolean), - parsedTypes.get("projects.active")), - () -> assertEquals(OpenSearchTextType.of(MappingType.Date), - parsedTypes.get("projects.release")), - () -> assertEquals(OpenSearchTextType.of(MappingType.Nested), - parsedTypes.get("projects.members")), - () -> assertEquals(OpenSearchTextType.of(MappingType.Text), - parsedTypes.get("projects.members.name")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Nested), parsedTypes.get("projects")), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Boolean), parsedTypes.get("projects.active")), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Date), parsedTypes.get("projects.release")), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Nested), parsedTypes.get("projects.members")), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Text), parsedTypes.get("projects.members.name")), () -> assertEquals("OBJECT", mapping.get("manager").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Object), - parsedTypes.get("manager")), - () -> assertEquals(OpenSearchTextType.of(MappingType.Text), - parsedTypes.get("manager.name")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Object), parsedTypes.get("manager")), + () -> + assertEquals(OpenSearchTextType.of(MappingType.Text), parsedTypes.get("manager.name")), // `manager.name` is a `text` with `fields` - () -> assertTrue(((OpenSearchTextType)parsedTypes.get("manager.name")) - .getFields().size() > 0), - () -> assertEquals(OpenSearchTextType.of(MappingType.Keyword), - parsedTypes.get("manager.address")), - () -> assertEquals(OpenSearchTextType.of(MappingType.Long), - parsedTypes.get("manager.salary")) - ); + () -> + assertTrue( + ((OpenSearchTextType) parsedTypes.get("manager.name")).getFields().size() > 0), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Keyword), parsedTypes.get("manager.address")), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Long), parsedTypes.get("manager.salary"))); } @Test @@ -234,14 +228,11 @@ void get_index_max_result_windows_settings() throws IOException { Integer maxResultWindow = 1000; GetSettingsResponse response = mock(GetSettingsResponse.class); - Settings maxResultWindowSettings = Settings.builder() - .put("index.max_result_window", maxResultWindow) - .build(); + Settings maxResultWindowSettings = + Settings.builder().put("index.max_result_window", maxResultWindow).build(); Settings emptySettings = Settings.builder().build(); - Map indexToSettings = - mockSettings(indexName, maxResultWindowSettings); - Map indexToDefaultSettings = - mockSettings(indexName, emptySettings); + Map indexToSettings = mockSettings(indexName, maxResultWindowSettings); + Map indexToDefaultSettings = mockSettings(indexName, emptySettings); when(response.getIndexToSettings()).thenReturn(indexToSettings); when(response.getIndexToDefaultSettings()).thenReturn(indexToDefaultSettings); when(restClient.indices().getSettings(any(GetSettingsRequest.class), any())) @@ -258,14 +249,11 @@ void get_index_max_result_windows_default_settings() throws IOException { Integer maxResultWindow = 10000; GetSettingsResponse response = mock(GetSettingsResponse.class); - Settings maxResultWindowSettings = Settings.builder() - .put("index.max_result_window", maxResultWindow) - .build(); + Settings maxResultWindowSettings = + Settings.builder().put("index.max_result_window", maxResultWindow).build(); Settings emptySettings = Settings.builder().build(); - Map indexToSettings = - mockSettings(indexName, emptySettings); - Map indexToDefaultSettings = - mockSettings(indexName, maxResultWindowSettings); + Map indexToSettings = mockSettings(indexName, emptySettings); + Map indexToDefaultSettings = mockSettings(indexName, maxResultWindowSettings); when(response.getIndexToSettings()).thenReturn(indexToSettings); when(response.getIndexToDefaultSettings()).thenReturn(indexToDefaultSettings); when(restClient.indices().getSettings(any(GetSettingsRequest.class), any())) @@ -292,9 +280,7 @@ void search() throws IOException { when(searchResponse.getHits()) .thenReturn( new SearchHits( - new SearchHit[] {searchHit}, - new TotalHits(1L, TotalHits.Relation.EQUAL_TO), - 1.0F)); + new SearchHit[] {searchHit}, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0F)); when(searchHit.getSourceAsString()).thenReturn("{\"id\", 1}"); when(searchHit.getInnerHits()).thenReturn(null); when(factory.construct(any(), anyBoolean())).thenReturn(exprTupleValue); @@ -305,9 +291,13 @@ void search() throws IOException { when(scrollResponse.getHits()).thenReturn(SearchHits.empty()); // Verify response for first scroll request - OpenSearchScrollRequest request = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), - new SearchSourceBuilder(), factory, List.of("id")); + OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + new SearchSourceBuilder(), + factory, + List.of("id")); OpenSearchResponse response1 = client.search(request); assertFalse(response1.isEmpty()); @@ -327,9 +317,14 @@ void search_with_IOException() throws IOException { when(restClient.search(any(), any())).thenThrow(new IOException()); assertThrows( IllegalStateException.class, - () -> client.search(new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), - new SearchSourceBuilder(), factory, List.of()))); + () -> + client.search( + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + new SearchSourceBuilder(), + factory, + List.of()))); } @Test @@ -349,28 +344,34 @@ void scroll_with_IOException() throws IOException { when(restClient.scroll(any(), any())).thenThrow(new IOException()); // First request run successfully - OpenSearchScrollRequest scrollRequest = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), - new SearchSourceBuilder(), factory, List.of()); + OpenSearchScrollRequest scrollRequest = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + new SearchSourceBuilder(), + factory, + List.of()); client.search(scrollRequest); - assertThrows( - IllegalStateException.class, () -> client.search(scrollRequest)); + assertThrows(IllegalStateException.class, () -> client.search(scrollRequest)); } @Test void schedule() { AtomicBoolean isRun = new AtomicBoolean(false); - client.schedule( - () -> isRun.set(true)); + client.schedule(() -> isRun.set(true)); assertTrue(isRun.get()); } @Test @SneakyThrows void cleanup() { - OpenSearchScrollRequest request = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), - new SearchSourceBuilder(), factory, List.of()); + OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + new SearchSourceBuilder(), + factory, + List.of()); // Enforce cleaning by setting a private field. FieldUtils.writeField(request, "needClean", true, true); request.setScrollId("scroll123"); @@ -381,9 +382,13 @@ void cleanup() { @Test void cleanup_without_scrollId() throws IOException { - OpenSearchScrollRequest request = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), - new SearchSourceBuilder(), factory, List.of()); + OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + new SearchSourceBuilder(), + factory, + List.of()); client.cleanup(request); verify(restClient, never()).clearScroll(any(), any()); } @@ -393,9 +398,13 @@ void cleanup_without_scrollId() throws IOException { void cleanup_with_IOException() { when(restClient.clearScroll(any(), any())).thenThrow(new IOException()); - OpenSearchScrollRequest request = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), - new SearchSourceBuilder(), factory, List.of()); + OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + new SearchSourceBuilder(), + factory, + List.of()); // Enforce cleaning by setting a private field. FieldUtils.writeField(request, "needClean", true, true); request.setScrollId("scroll123"); diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprGeoPointValueTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprGeoPointValueTest.java index 4edb25aff5..defa97d8c8 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprGeoPointValueTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprGeoPointValueTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.value; import static org.junit.jupiter.api.Assertions.assertEquals; diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprIpValueTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprIpValueTest.java index cda4377c60..38a4ad3199 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprIpValueTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprIpValueTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.value; import static org.junit.jupiter.api.Assertions.assertEquals; diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprTextValueTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprTextValueTest.java index b60402e746..9b7e032c57 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprTextValueTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprTextValueTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.value; import static org.junit.jupiter.api.Assertions.assertAll; @@ -27,54 +26,73 @@ public void type_of_ExprTextValue() { @Test public void getFields() { - var fields = Map.of( - "f1", OpenSearchDataType.of(OpenSearchDataType.MappingType.Integer), - "f2", OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword), - "f3", OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword)); + var fields = + Map.of( + "f1", OpenSearchDataType.of(OpenSearchDataType.MappingType.Integer), + "f2", OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword), + "f3", OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword)); assertEquals(fields, OpenSearchTextType.of(fields).getFields()); } @Test void non_text_types_arent_converted() { assertAll( - () -> assertEquals("field", OpenSearchTextType.convertTextToKeyword("field", - OpenSearchDataType.of(INTEGER))), - () -> assertEquals("field", OpenSearchTextType.convertTextToKeyword("field", - OpenSearchDataType.of(STRING))), - () -> assertEquals("field", OpenSearchTextType.convertTextToKeyword("field", - OpenSearchDataType.of(OpenSearchDataType.MappingType.GeoPoint))), - () -> assertEquals("field", OpenSearchTextType.convertTextToKeyword("field", - OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword))), - () -> assertEquals("field", OpenSearchTextType.convertTextToKeyword("field", - OpenSearchDataType.of(OpenSearchDataType.MappingType.Integer))), + () -> + assertEquals( + "field", + OpenSearchTextType.convertTextToKeyword("field", OpenSearchDataType.of(INTEGER))), + () -> + assertEquals( + "field", + OpenSearchTextType.convertTextToKeyword("field", OpenSearchDataType.of(STRING))), + () -> + assertEquals( + "field", + OpenSearchTextType.convertTextToKeyword( + "field", OpenSearchDataType.of(OpenSearchDataType.MappingType.GeoPoint))), + () -> + assertEquals( + "field", + OpenSearchTextType.convertTextToKeyword( + "field", OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword))), + () -> + assertEquals( + "field", + OpenSearchTextType.convertTextToKeyword( + "field", OpenSearchDataType.of(OpenSearchDataType.MappingType.Integer))), () -> assertEquals("field", OpenSearchTextType.convertTextToKeyword("field", STRING)), - () -> assertEquals("field", OpenSearchTextType.convertTextToKeyword("field", INTEGER)) - ); + () -> assertEquals("field", OpenSearchTextType.convertTextToKeyword("field", INTEGER))); } @Test void non_text_types_with_nested_objects_arent_converted() { - var objectType = OpenSearchDataType.of(OpenSearchDataType.MappingType.Object, - Map.of("subfield", OpenSearchDataType.of(STRING))); - var arrayType = OpenSearchDataType.of(OpenSearchDataType.MappingType.Nested, - Map.of("subfield", OpenSearchDataType.of(STRING))); + var objectType = + OpenSearchDataType.of( + OpenSearchDataType.MappingType.Object, + Map.of("subfield", OpenSearchDataType.of(STRING))); + var arrayType = + OpenSearchDataType.of( + OpenSearchDataType.MappingType.Nested, + Map.of("subfield", OpenSearchDataType.of(STRING))); assertAll( () -> assertEquals("field", OpenSearchTextType.convertTextToKeyword("field", objectType)), - () -> assertEquals("field", OpenSearchTextType.convertTextToKeyword("field", arrayType)) - ); + () -> assertEquals("field", OpenSearchTextType.convertTextToKeyword("field", arrayType))); } @Test void text_type_without_fields_isnt_converted() { - assertEquals("field", OpenSearchTextType.convertTextToKeyword("field", - OpenSearchDataType.of(OpenSearchDataType.MappingType.Text))); + assertEquals( + "field", + OpenSearchTextType.convertTextToKeyword( + "field", OpenSearchDataType.of(OpenSearchDataType.MappingType.Text))); } @Test void text_type_with_fields_is_converted() { - var textWithKeywordType = OpenSearchTextType.of(Map.of("keyword", - OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword))); - assertEquals("field.keyword", - OpenSearchTextType.convertTextToKeyword("field", textWithKeywordType)); + var textWithKeywordType = + OpenSearchTextType.of( + Map.of("keyword", OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword))); + assertEquals( + "field.keyword", OpenSearchTextType.convertTextToKeyword("field", textWithKeywordType)); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactoryTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactoryTest.java index 827606a961..3d3a6a5996 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactoryTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactoryTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.value; import static org.junit.jupiter.api.Assertions.assertAll; @@ -88,8 +87,8 @@ class OpenSearchExprValueFactoryTest { .put("timeNoMillisOrTimeV", OpenSearchDateType.of("time_no_millis || time")) .put("dateOrOrdinalDateV", OpenSearchDateType.of("date || ordinal_date")) .put("customFormatV", OpenSearchDateType.of("yyyy-MM-dd-HH-mm-ss")) - .put("customAndEpochMillisV", - OpenSearchDateType.of("yyyy-MM-dd-HH-mm-ss || epoch_millis")) + .put( + "customAndEpochMillisV", OpenSearchDateType.of("yyyy-MM-dd-HH-mm-ss || epoch_millis")) .put("incompleteFormatV", OpenSearchDateType.of("year")) .put("boolV", OpenSearchDataType.of(BOOLEAN)) .put("structV", OpenSearchDataType.of(STRUCT)) @@ -98,20 +97,22 @@ class OpenSearchExprValueFactoryTest { .put("arrayV", OpenSearchDataType.of(ARRAY)) .put("arrayV.info", OpenSearchDataType.of(STRING)) .put("arrayV.author", OpenSearchDataType.of(STRING)) - .put("deepNestedV", OpenSearchDataType.of( - OpenSearchDataType.of(OpenSearchDataType.MappingType.Nested)) - ) - .put("deepNestedV.year", OpenSearchDataType.of( - OpenSearchDataType.of(OpenSearchDataType.MappingType.Nested)) - ) + .put( + "deepNestedV", + OpenSearchDataType.of(OpenSearchDataType.of(OpenSearchDataType.MappingType.Nested))) + .put( + "deepNestedV.year", + OpenSearchDataType.of(OpenSearchDataType.of(OpenSearchDataType.MappingType.Nested))) .put("deepNestedV.year.timeV", OpenSearchDateType.of(TIME)) - .put("nestedV", OpenSearchDataType.of( - OpenSearchDataType.of(OpenSearchDataType.MappingType.Nested)) - ) + .put( + "nestedV", + OpenSearchDataType.of(OpenSearchDataType.of(OpenSearchDataType.MappingType.Nested))) .put("nestedV.count", OpenSearchDataType.of(INTEGER)) .put("textV", OpenSearchDataType.of(OpenSearchDataType.MappingType.Text)) - .put("textKeywordV", OpenSearchTextType.of(Map.of("words", - OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword)))) + .put( + "textKeywordV", + OpenSearchTextType.of( + Map.of("words", OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword)))) .put("ipV", OpenSearchDataType.of(OpenSearchDataType.MappingType.Ip)) .put("geoV", OpenSearchDataType.of(OpenSearchDataType.MappingType.GeoPoint)) .put("binaryV", OpenSearchDataType.of(OpenSearchDataType.MappingType.Binary)) @@ -124,9 +125,8 @@ class OpenSearchExprValueFactoryTest { public void constructNullValue() { assertAll( () -> assertEquals(nullValue(), tupleValue("{\"intV\":null}").get("intV")), - () -> assertEquals(nullValue(), constructFromObject("intV", null)), - () -> assertTrue(new OpenSearchJsonContent(null).isNull()) - ); + () -> assertEquals(nullValue(), constructFromObject("intV", null)), + () -> assertTrue(new OpenSearchJsonContent(null).isNull())); } @Test @@ -136,8 +136,7 @@ public void iterateArrayValue() throws JsonProcessingException { assertAll( () -> assertEquals("zz", arrayIt.next().stringValue()), () -> assertEquals("bb", arrayIt.next().stringValue()), - () -> assertFalse(arrayIt.hasNext()) - ); + () -> assertFalse(arrayIt.hasNext())); } @Test @@ -146,8 +145,7 @@ public void iterateArrayValueWithOneElement() throws JsonProcessingException { var arrayIt = new OpenSearchJsonContent(mapper.readTree("[\"zz\"]")).array(); assertAll( () -> assertEquals("zz", arrayIt.next().stringValue()), - () -> assertFalse(arrayIt.hasNext()) - ); + () -> assertFalse(arrayIt.hasNext())); } @Test @@ -160,8 +158,7 @@ public void constructByte() { assertAll( () -> assertEquals(byteValue((byte) 1), tupleValue("{\"byteV\":1}").get("byteV")), () -> assertEquals(byteValue((byte) 1), constructFromObject("byteV", 1)), - () -> assertEquals(byteValue((byte) 1), constructFromObject("byteV", "1.0")) - ); + () -> assertEquals(byteValue((byte) 1), constructFromObject("byteV", "1.0"))); } @Test @@ -169,8 +166,7 @@ public void constructShort() { assertAll( () -> assertEquals(shortValue((short) 1), tupleValue("{\"shortV\":1}").get("shortV")), () -> assertEquals(shortValue((short) 1), constructFromObject("shortV", 1)), - () -> assertEquals(shortValue((short) 1), constructFromObject("shortV", "1.0")) - ); + () -> assertEquals(shortValue((short) 1), constructFromObject("shortV", "1.0"))); } @Test @@ -178,8 +174,7 @@ public void constructInteger() { assertAll( () -> assertEquals(integerValue(1), tupleValue("{\"intV\":1}").get("intV")), () -> assertEquals(integerValue(1), constructFromObject("intV", 1)), - () -> assertEquals(integerValue(1), constructFromObject("intV", "1.0")) - ); + () -> assertEquals(integerValue(1), constructFromObject("intV", "1.0"))); } @Test @@ -192,33 +187,29 @@ public void constructLong() { assertAll( () -> assertEquals(longValue(1L), tupleValue("{\"longV\":1}").get("longV")), () -> assertEquals(longValue(1L), constructFromObject("longV", 1L)), - () -> assertEquals(longValue(1L), constructFromObject("longV", "1.0")) - ); + () -> assertEquals(longValue(1L), constructFromObject("longV", "1.0"))); } @Test public void constructFloat() { assertAll( () -> assertEquals(floatValue(1f), tupleValue("{\"floatV\":1.0}").get("floatV")), - () -> assertEquals(floatValue(1f), constructFromObject("floatV", 1f)) - ); + () -> assertEquals(floatValue(1f), constructFromObject("floatV", 1f))); } @Test public void constructDouble() { assertAll( () -> assertEquals(doubleValue(1d), tupleValue("{\"doubleV\":1.0}").get("doubleV")), - () -> assertEquals(doubleValue(1d), constructFromObject("doubleV", 1d)) - ); + () -> assertEquals(doubleValue(1d), constructFromObject("doubleV", 1d))); } @Test public void constructString() { assertAll( - () -> assertEquals(stringValue("text"), - tupleValue("{\"stringV\":\"text\"}").get("stringV")), - () -> assertEquals(stringValue("text"), constructFromObject("stringV", "text")) - ); + () -> + assertEquals(stringValue("text"), tupleValue("{\"stringV\":\"text\"}").get("stringV")), + () -> assertEquals(stringValue("text"), constructFromObject("stringV", "text"))); } @Test @@ -228,23 +219,25 @@ public void constructBoolean() { () -> assertEquals(booleanValue(true), constructFromObject("boolV", true)), () -> assertEquals(booleanValue(true), constructFromObject("boolV", "true")), () -> assertEquals(booleanValue(true), constructFromObject("boolV", 1)), - () -> assertEquals(booleanValue(false), constructFromObject("boolV", 0)) - ); + () -> assertEquals(booleanValue(false), constructFromObject("boolV", 0))); } @Test public void constructText() { assertAll( - () -> assertEquals(new OpenSearchExprTextValue("text"), - tupleValue("{\"textV\":\"text\"}").get("textV")), - () -> assertEquals(new OpenSearchExprTextValue("text"), - constructFromObject("textV", "text")), - - () -> assertEquals(new OpenSearchExprTextValue("text"), - tupleValue("{\"textKeywordV\":\"text\"}").get("textKeywordV")), - () -> assertEquals(new OpenSearchExprTextValue("text"), - constructFromObject("textKeywordV", "text")) - ); + () -> + assertEquals( + new OpenSearchExprTextValue("text"), + tupleValue("{\"textV\":\"text\"}").get("textV")), + () -> + assertEquals(new OpenSearchExprTextValue("text"), constructFromObject("textV", "text")), + () -> + assertEquals( + new OpenSearchExprTextValue("text"), + tupleValue("{\"textKeywordV\":\"text\"}").get("textKeywordV")), + () -> + assertEquals( + new OpenSearchExprTextValue("text"), constructFromObject("textKeywordV", "text"))); } @Test @@ -252,95 +245,122 @@ public void constructDates() { ExprValue dateStringV = constructFromObject("dateStringV", "1984-04-12"); assertAll( () -> assertEquals(new ExprDateValue("1984-04-12"), dateStringV), - () -> assertEquals(new ExprDateValue( - LocalDate.ofInstant(Instant.ofEpochMilli(450576000000L), UTC_ZONE_ID)), - constructFromObject("dateV", 450576000000L)), - () -> assertEquals(new ExprDateValue("1984-04-12"), - constructFromObject("dateOrOrdinalDateV", "1984-103")), - () -> assertEquals(new ExprDateValue("2015-01-01"), - tupleValue("{\"dateV\":\"2015-01-01\"}").get("dateV")) - ); + () -> + assertEquals( + new ExprDateValue( + LocalDate.ofInstant(Instant.ofEpochMilli(450576000000L), UTC_ZONE_ID)), + constructFromObject("dateV", 450576000000L)), + () -> + assertEquals( + new ExprDateValue("1984-04-12"), + constructFromObject("dateOrOrdinalDateV", "1984-103")), + () -> + assertEquals( + new ExprDateValue("2015-01-01"), + tupleValue("{\"dateV\":\"2015-01-01\"}").get("dateV"))); } @Test public void constructTimes() { - ExprValue timeStringV = constructFromObject("timeStringV","12:10:30.000Z"); + ExprValue timeStringV = constructFromObject("timeStringV", "12:10:30.000Z"); assertAll( () -> assertTrue(timeStringV.isDateTime()), () -> assertTrue(timeStringV instanceof ExprTimeValue), () -> assertEquals(new ExprTimeValue("12:10:30"), timeStringV), - () -> assertEquals(new ExprTimeValue(LocalTime.from( - Instant.ofEpochMilli(1420070400001L).atZone(UTC_ZONE_ID))), - constructFromObject("timeV", 1420070400001L)), - () -> assertEquals(new ExprTimeValue("09:07:42.000"), - constructFromObject("timeNoMillisOrTimeV", "09:07:42.000Z")), - () -> assertEquals(new ExprTimeValue("09:07:42"), - tupleValue("{\"timeV\":\"09:07:42\"}").get("timeV")) - ); + () -> + assertEquals( + new ExprTimeValue( + LocalTime.from(Instant.ofEpochMilli(1420070400001L).atZone(UTC_ZONE_ID))), + constructFromObject("timeV", 1420070400001L)), + () -> + assertEquals( + new ExprTimeValue("09:07:42.000"), + constructFromObject("timeNoMillisOrTimeV", "09:07:42.000Z")), + () -> + assertEquals( + new ExprTimeValue("09:07:42"), + tupleValue("{\"timeV\":\"09:07:42\"}").get("timeV"))); } @Test public void constructDatetime() { assertAll( - () -> assertEquals( - new ExprTimestampValue("2015-01-01 00:00:00"), - tupleValue("{\"timestampV\":\"2015-01-01\"}").get("timestampV")), - () -> assertEquals( - new ExprTimestampValue("2015-01-01 12:10:30"), - tupleValue("{\"timestampV\":\"2015-01-01T12:10:30Z\"}").get("timestampV")), - () -> assertEquals( - new ExprTimestampValue("2015-01-01 12:10:30"), - tupleValue("{\"timestampV\":\"2015-01-01T12:10:30\"}").get("timestampV")), - () -> assertEquals( - new ExprTimestampValue("2015-01-01 12:10:30"), - tupleValue("{\"timestampV\":\"2015-01-01 12:10:30\"}").get("timestampV")), - () -> assertEquals( - new ExprTimestampValue(Instant.ofEpochMilli(1420070400001L)), - constructFromObject("timestampV", 1420070400001L)), - () -> assertEquals( - new ExprTimestampValue(Instant.ofEpochMilli(1420070400001L)), - constructFromObject("timestampV", Instant.ofEpochMilli(1420070400001L))), - () -> assertEquals( - new ExprTimestampValue(Instant.ofEpochMilli(1420070400001L)), - constructFromObject("epochMillisV", "1420070400001")), - () -> assertEquals( - new ExprTimestampValue(Instant.ofEpochMilli(1420070400001L)), - constructFromObject("epochMillisV", 1420070400001L)), - () -> assertEquals( - new ExprTimestampValue(Instant.ofEpochSecond(142704001L)), - constructFromObject("epochSecondV", 142704001L)), - () -> assertEquals( - new ExprTimeValue("10:20:30"), - tupleValue("{ \"timeCustomV\" : 102030 }").get("timeCustomV")), - () -> assertEquals( - new ExprDateValue("1961-04-12"), - tupleValue("{ \"dateCustomV\" : 19610412 }").get("dateCustomV")), - () -> assertEquals( - new ExprTimestampValue("1984-05-10 20:30:40"), - tupleValue("{ \"dateTimeCustomV\" : 19840510203040 }").get("dateTimeCustomV")), - () -> assertEquals( - new ExprTimestampValue("2015-01-01 12:10:30"), - constructFromObject("timestampV", "2015-01-01 12:10:30")), - () -> assertEquals( - new ExprDatetimeValue("2015-01-01 12:10:30"), - constructFromObject("datetimeV", "2015-01-01 12:10:30")), - () -> assertEquals( - new ExprDatetimeValue("2015-01-01 12:10:30"), - constructFromObject("datetimeDefaultV", "2015-01-01 12:10:30")), - () -> assertEquals( - new ExprTimestampValue(Instant.ofEpochMilli(1420070400001L)), - constructFromObject("dateOrEpochMillisV", "1420070400001")), + () -> + assertEquals( + new ExprTimestampValue("2015-01-01 00:00:00"), + tupleValue("{\"timestampV\":\"2015-01-01\"}").get("timestampV")), + () -> + assertEquals( + new ExprTimestampValue("2015-01-01 12:10:30"), + tupleValue("{\"timestampV\":\"2015-01-01T12:10:30Z\"}").get("timestampV")), + () -> + assertEquals( + new ExprTimestampValue("2015-01-01 12:10:30"), + tupleValue("{\"timestampV\":\"2015-01-01T12:10:30\"}").get("timestampV")), + () -> + assertEquals( + new ExprTimestampValue("2015-01-01 12:10:30"), + tupleValue("{\"timestampV\":\"2015-01-01 12:10:30\"}").get("timestampV")), + () -> + assertEquals( + new ExprTimestampValue(Instant.ofEpochMilli(1420070400001L)), + constructFromObject("timestampV", 1420070400001L)), + () -> + assertEquals( + new ExprTimestampValue(Instant.ofEpochMilli(1420070400001L)), + constructFromObject("timestampV", Instant.ofEpochMilli(1420070400001L))), + () -> + assertEquals( + new ExprTimestampValue(Instant.ofEpochMilli(1420070400001L)), + constructFromObject("epochMillisV", "1420070400001")), + () -> + assertEquals( + new ExprTimestampValue(Instant.ofEpochMilli(1420070400001L)), + constructFromObject("epochMillisV", 1420070400001L)), + () -> + assertEquals( + new ExprTimestampValue(Instant.ofEpochSecond(142704001L)), + constructFromObject("epochSecondV", 142704001L)), + () -> + assertEquals( + new ExprTimeValue("10:20:30"), + tupleValue("{ \"timeCustomV\" : 102030 }").get("timeCustomV")), + () -> + assertEquals( + new ExprDateValue("1961-04-12"), + tupleValue("{ \"dateCustomV\" : 19610412 }").get("dateCustomV")), + () -> + assertEquals( + new ExprTimestampValue("1984-05-10 20:30:40"), + tupleValue("{ \"dateTimeCustomV\" : 19840510203040 }").get("dateTimeCustomV")), + () -> + assertEquals( + new ExprTimestampValue("2015-01-01 12:10:30"), + constructFromObject("timestampV", "2015-01-01 12:10:30")), + () -> + assertEquals( + new ExprDatetimeValue("2015-01-01 12:10:30"), + constructFromObject("datetimeV", "2015-01-01 12:10:30")), + () -> + assertEquals( + new ExprDatetimeValue("2015-01-01 12:10:30"), + constructFromObject("datetimeDefaultV", "2015-01-01 12:10:30")), + () -> + assertEquals( + new ExprTimestampValue(Instant.ofEpochMilli(1420070400001L)), + constructFromObject("dateOrEpochMillisV", "1420070400001")), // case: timestamp-formatted field, but it only gets a time: should match a time - () -> assertEquals( - new ExprTimeValue("19:36:22"), - tupleValue("{\"timestampV\":\"19:36:22\"}").get("timestampV")), + () -> + assertEquals( + new ExprTimeValue("19:36:22"), + tupleValue("{\"timestampV\":\"19:36:22\"}").get("timestampV")), // case: timestamp-formatted field, but it only gets a date: should match a date - () -> assertEquals( - new ExprDateValue("2011-03-03"), - tupleValue("{\"timestampV\":\"2011-03-03\"}").get("timestampV")) - ); + () -> + assertEquals( + new ExprDateValue("2011-03-03"), + tupleValue("{\"timestampV\":\"2011-03-03\"}").get("timestampV"))); } @Test @@ -350,11 +370,11 @@ public void constructDatetime_fromCustomFormat() { constructFromObject("customFormatV", "2015-01-01-12-10-30")); IllegalArgumentException exception = - assertThrows(IllegalArgumentException.class, + assertThrows( + IllegalArgumentException.class, () -> constructFromObject("customFormatV", "2015-01-01 12-10-30")); assertEquals( - "Construct TIMESTAMP from \"2015-01-01 12-10-30\" failed, " - + "unsupported format.", + "Construct TIMESTAMP from \"2015-01-01 12-10-30\" failed, unsupported format.", exception.getMessage()); assertEquals( @@ -369,91 +389,87 @@ public void constructDatetime_fromCustomFormat() { @Test public void constructDatetimeFromUnsupportedFormat_ThrowIllegalArgumentException() { IllegalArgumentException exception = - assertThrows(IllegalArgumentException.class, + assertThrows( + IllegalArgumentException.class, () -> constructFromObject("timestampV", "2015-01-01 12:10")); assertEquals( - "Construct TIMESTAMP from \"2015-01-01 12:10\" failed, " - + "unsupported format.", + "Construct TIMESTAMP from \"2015-01-01 12:10\" failed, unsupported format.", exception.getMessage()); // fail with missing seconds exception = - assertThrows(IllegalArgumentException.class, + assertThrows( + IllegalArgumentException.class, () -> constructFromObject("dateOrEpochMillisV", "2015-01-01 12:10")); assertEquals( - "Construct TIMESTAMP from \"2015-01-01 12:10\" failed, " - + "unsupported format.", + "Construct TIMESTAMP from \"2015-01-01 12:10\" failed, unsupported format.", exception.getMessage()); } @Test public void constructTimeFromUnsupportedFormat_ThrowIllegalArgumentException() { - IllegalArgumentException exception = assertThrows( - IllegalArgumentException.class, () -> constructFromObject("timeV", "2015-01-01")); + IllegalArgumentException exception = + assertThrows( + IllegalArgumentException.class, () -> constructFromObject("timeV", "2015-01-01")); assertEquals( - "Construct TIME from \"2015-01-01\" failed, " - + "unsupported format.", - exception.getMessage()); + "Construct TIME from \"2015-01-01\" failed, unsupported format.", exception.getMessage()); - exception = assertThrows( - IllegalArgumentException.class, () -> constructFromObject("timeStringV", "10:10")); + exception = + assertThrows( + IllegalArgumentException.class, () -> constructFromObject("timeStringV", "10:10")); assertEquals( - "Construct TIME from \"10:10\" failed, " - + "unsupported format.", - exception.getMessage()); + "Construct TIME from \"10:10\" failed, unsupported format.", exception.getMessage()); } @Test public void constructDateFromUnsupportedFormat_ThrowIllegalArgumentException() { - IllegalArgumentException exception = assertThrows( - IllegalArgumentException.class, () -> constructFromObject("dateV", "12:10:10")); + IllegalArgumentException exception = + assertThrows( + IllegalArgumentException.class, () -> constructFromObject("dateV", "12:10:10")); assertEquals( - "Construct DATE from \"12:10:10\" failed, " - + "unsupported format.", - exception.getMessage()); + "Construct DATE from \"12:10:10\" failed, unsupported format.", exception.getMessage()); - exception = assertThrows( - IllegalArgumentException.class, () -> constructFromObject("dateStringV", "abc")); - assertEquals( - "Construct DATE from \"abc\" failed, " - + "unsupported format.", - exception.getMessage()); + exception = + assertThrows( + IllegalArgumentException.class, () -> constructFromObject("dateStringV", "abc")); + assertEquals("Construct DATE from \"abc\" failed, unsupported format.", exception.getMessage()); } @Test public void constructDateFromIncompleteFormat() { - assertEquals( - new ExprDateValue("1984-01-01"), - constructFromObject("incompleteFormatV", "1984")); + assertEquals(new ExprDateValue("1984-01-01"), constructFromObject("incompleteFormatV", "1984")); } @Test public void constructArray() { assertEquals( - new ExprCollectionValue(List.of(new ExprTupleValue( - new LinkedHashMap() { - { - put("info", stringValue("zz")); - put("author", stringValue("au")); - } - }))), + new ExprCollectionValue( + List.of( + new ExprTupleValue( + new LinkedHashMap() { + { + put("info", stringValue("zz")); + put("author", stringValue("au")); + } + }))), tupleValue("{\"arrayV\":[{\"info\":\"zz\",\"author\":\"au\"}]}").get("arrayV")); assertEquals( - new ExprCollectionValue(List.of(new ExprTupleValue( - new LinkedHashMap() { - { - put("info", stringValue("zz")); - put("author", stringValue("au")); - } - }))), - constructFromObject("arrayV", List.of( - ImmutableMap.of("info", "zz", "author", "au")))); + new ExprCollectionValue( + List.of( + new ExprTupleValue( + new LinkedHashMap() { + { + put("info", stringValue("zz")); + put("author", stringValue("au")); + } + }))), + constructFromObject("arrayV", List.of(ImmutableMap.of("info", "zz", "author", "au")))); } @Test public void constructArrayOfStrings() { - assertEquals(new ExprCollectionValue( - List.of(stringValue("zz"), stringValue("au"))), + assertEquals( + new ExprCollectionValue(List.of(stringValue("zz"), stringValue("au"))), constructFromObject("arrayV", List.of("zz", "au"))); } @@ -461,100 +477,71 @@ public void constructArrayOfStrings() { public void constructNestedArraysOfStrings() { assertEquals( new ExprCollectionValue( - List.of( - collectionValue( - List.of("zz", "au") - ), - collectionValue( - List.of("ss") - ) - ) - ), - tupleValueWithArraySupport( - "{\"stringV\":[" - + "[\"zz\", \"au\"]," - + "[\"ss\"]" - + "]}" - ).get("stringV")); + List.of(collectionValue(List.of("zz", "au")), collectionValue(List.of("ss")))), + tupleValueWithArraySupport("{\"stringV\":[ [\"zz\", \"au\"], [\"ss\"] ]}").get("stringV")); } @Test public void constructNestedArraysOfStringsReturnsFirstIndex() { assertEquals( - stringValue("zz"), - tupleValue( - "{\"stringV\":[" - + "[\"zz\", \"au\"]," - + "[\"ss\"]" - + "]}" - ).get("stringV")); + stringValue("zz"), tupleValue("{\"stringV\":[[\"zz\", \"au\"],[\"ss\"]]}").get("stringV")); } @Test public void constructMultiNestedArraysOfStringsReturnsFirstIndex() { assertEquals( stringValue("z"), - tupleValue( - "{\"stringV\":" - + "[\"z\"," - + "[\"s\"]," - + "[\"zz\", \"au\"]" - + "]}" - ).get("stringV")); + tupleValue("{\"stringV\":" + "[\"z\",[\"s\"],[\"zz\", \"au\"]]}").get("stringV")); } @Test public void constructArrayOfInts() { - assertEquals(new ExprCollectionValue( - List.of(integerValue(1), integerValue(2))), + assertEquals( + new ExprCollectionValue(List.of(integerValue(1), integerValue(2))), constructFromObject("arrayV", List.of(1, 2))); } @Test public void constructArrayOfShorts() { // Shorts are treated same as integer - assertEquals(new ExprCollectionValue( - List.of(shortValue((short)3), shortValue((short)4))), + assertEquals( + new ExprCollectionValue(List.of(shortValue((short) 3), shortValue((short) 4))), constructFromObject("arrayV", List.of(3, 4))); } @Test public void constructArrayOfLongs() { - assertEquals(new ExprCollectionValue( - List.of(longValue(123456789L), longValue(987654321L))), + assertEquals( + new ExprCollectionValue(List.of(longValue(123456789L), longValue(987654321L))), constructFromObject("arrayV", List.of(123456789L, 987654321L))); } @Test public void constructArrayOfFloats() { - assertEquals(new ExprCollectionValue( - List.of(floatValue(3.14f), floatValue(4.13f))), + assertEquals( + new ExprCollectionValue(List.of(floatValue(3.14f), floatValue(4.13f))), constructFromObject("arrayV", List.of(3.14f, 4.13f))); } @Test public void constructArrayOfDoubles() { - assertEquals(new ExprCollectionValue( - List.of(doubleValue(9.1928374756D), doubleValue(4.987654321D))), + assertEquals( + new ExprCollectionValue(List.of(doubleValue(9.1928374756D), doubleValue(4.987654321D))), constructFromObject("arrayV", List.of(9.1928374756D, 4.987654321D))); } @Test public void constructArrayOfBooleans() { - assertEquals(new ExprCollectionValue( - List.of(booleanValue(true), booleanValue(false))), + assertEquals( + new ExprCollectionValue(List.of(booleanValue(true), booleanValue(false))), constructFromObject("arrayV", List.of(true, false))); } @Test public void constructNestedObjectArrayNode() { - assertEquals(collectionValue( - List.of( - Map.of("count", 1), - Map.of("count", 2) - )), - tupleValueWithArraySupport("{\"nestedV\":[{\"count\":1},{\"count\":2}]}") - .get("nestedV")); + assertEquals( + collectionValue(List.of(Map.of("count", 1), Map.of("count", 2))), + tupleValueWithArraySupport("{\"nestedV\":[{\"count\":1},{\"count\":2}]}").get("nestedV")); } @Test @@ -562,84 +549,70 @@ public void constructNestedObjectArrayOfObjectArraysNode() { assertEquals( collectionValue( List.of( - Map.of("year", + Map.of( + "year", List.of( Map.of("timeV", new ExprTimeValue("09:07:42")), - Map.of("timeV", new ExprTimeValue("09:07:42")) - ) - ), - Map.of("year", + Map.of("timeV", new ExprTimeValue("09:07:42")))), + Map.of( + "year", List.of( Map.of("timeV", new ExprTimeValue("09:07:42")), - Map.of("timeV", new ExprTimeValue("09:07:42")) - ) - ) - ) - ), + Map.of("timeV", new ExprTimeValue("09:07:42")))))), tupleValueWithArraySupport( - "{\"deepNestedV\":" - + "[" - + "{\"year\":" - + "[" - + "{\"timeV\":\"09:07:42\"}," - + "{\"timeV\":\"09:07:42\"}" - + "]" - + "}," - + "{\"year\":" - + "[" - + "{\"timeV\":\"09:07:42\"}," - + "{\"timeV\":\"09:07:42\"}" - + "]" - + "}" - + "]" - + "}") + "{\"deepNestedV\":" + + " [" + + " {\"year\":" + + " [" + + " {\"timeV\":\"09:07:42\"}," + + " {\"timeV\":\"09:07:42\"}" + + " ]" + + " }," + + " {\"year\":" + + " [" + + " {\"timeV\":\"09:07:42\"}," + + " {\"timeV\":\"09:07:42\"}" + + " ]" + + " }" + + " ]" + + "}") .get("deepNestedV")); } @Test public void constructNestedArrayNode() { - assertEquals(collectionValue( - List.of( - 1969, - 2011 - )), - tupleValueWithArraySupport("{\"nestedV\":[1969,2011]}") - .get("nestedV")); + assertEquals( + collectionValue(List.of(1969, 2011)), + tupleValueWithArraySupport("{\"nestedV\":[1969,2011]}").get("nestedV")); } @Test public void constructNestedObjectNode() { - assertEquals(collectionValue( - List.of( - Map.of("count", 1969) - )), - tupleValue("{\"nestedV\":{\"count\":1969}}") - .get("nestedV")); + assertEquals( + collectionValue(List.of(Map.of("count", 1969))), + tupleValue("{\"nestedV\":{\"count\":1969}}").get("nestedV")); } @Test public void constructArrayOfGeoPoints() { - assertEquals(new ExprCollectionValue( + assertEquals( + new ExprCollectionValue( List.of( new OpenSearchExprGeoPointValue(42.60355556, -97.25263889), - new OpenSearchExprGeoPointValue(-33.6123556, 66.287449)) - ), + new OpenSearchExprGeoPointValue(-33.6123556, 66.287449))), tupleValueWithArraySupport( - "{\"geoV\":[" - + "{\"lat\":42.60355556,\"lon\":-97.25263889}," - + "{\"lat\":-33.6123556,\"lon\":66.287449}" - + "]}" - ).get("geoV") - ); + "{\"geoV\":[" + + "{\"lat\":42.60355556,\"lon\":-97.25263889}," + + "{\"lat\":-33.6123556,\"lon\":66.287449}" + + "]}") + .get("geoV")); } @Test public void constructArrayOfIPsReturnsFirstIndex() { assertEquals( new OpenSearchExprIpValue("192.168.0.1"), - tupleValue("{\"ipV\":[\"192.168.0.1\",\"192.168.0.2\"]}") - .get("ipV") - ); + tupleValue("{\"ipV\":[\"192.168.0.1\",\"192.168.0.2\"]}").get("ipV")); } @Test @@ -647,8 +620,7 @@ public void constructBinaryArrayReturnsFirstIndex() { assertEquals( new OpenSearchExprBinaryValue("U29tZSBiaWsdfsdfgYmxvYg=="), tupleValue("{\"binaryV\":[\"U29tZSBiaWsdfsdfgYmxvYg==\",\"U987yuhjjiy8jhk9vY+98jjdf\"]}") - .get("binaryV") - ); + .get("binaryV")); } @Test @@ -656,26 +628,21 @@ public void constructArrayOfCustomEpochMillisReturnsFirstIndex() { assertEquals( new ExprDatetimeValue("2015-01-01 12:10:30"), tupleValue("{\"customAndEpochMillisV\":[\"2015-01-01 12:10:30\",\"1999-11-09 01:09:44\"]}") - .get("customAndEpochMillisV") - ); + .get("customAndEpochMillisV")); } @Test public void constructArrayOfDateStringsReturnsFirstIndex() { assertEquals( new ExprDateValue("1984-04-12"), - tupleValue("{\"dateStringV\":[\"1984-04-12\",\"2033-05-03\"]}") - .get("dateStringV") - ); + tupleValue("{\"dateStringV\":[\"1984-04-12\",\"2033-05-03\"]}").get("dateStringV")); } @Test public void constructArrayOfTimeStringsReturnsFirstIndex() { assertEquals( new ExprTimeValue("12:10:30"), - tupleValue("{\"timeStringV\":[\"12:10:30.000Z\",\"18:33:55.000Z\"]}") - .get("timeStringV") - ); + tupleValue("{\"timeStringV\":[\"12:10:30.000Z\",\"18:33:55.000Z\"]}").get("timeStringV")); } @Test @@ -683,8 +650,7 @@ public void constructArrayOfEpochMillis() { assertEquals( new ExprTimestampValue(Instant.ofEpochMilli(1420070400001L)), tupleValue("{\"dateOrEpochMillisV\":[\"1420070400001\",\"1454251113333\"]}") - .get("dateOrEpochMillisV") - ); + .get("dateOrEpochMillisV")); } @Test @@ -711,54 +677,64 @@ public void constructStruct() { @Test public void constructIP() { - assertEquals(new OpenSearchExprIpValue("192.168.0.1"), + assertEquals( + new OpenSearchExprIpValue("192.168.0.1"), tupleValue("{\"ipV\":\"192.168.0.1\"}").get("ipV")); } @Test public void constructGeoPoint() { - assertEquals(new OpenSearchExprGeoPointValue(42.60355556, -97.25263889), + assertEquals( + new OpenSearchExprGeoPointValue(42.60355556, -97.25263889), tupleValue("{\"geoV\":{\"lat\":42.60355556,\"lon\":-97.25263889}}").get("geoV")); - assertEquals(new OpenSearchExprGeoPointValue(42.60355556, -97.25263889), + assertEquals( + new OpenSearchExprGeoPointValue(42.60355556, -97.25263889), tupleValue("{\"geoV\":{\"lat\":\"42.60355556\",\"lon\":\"-97.25263889\"}}").get("geoV")); - assertEquals(new OpenSearchExprGeoPointValue(42.60355556, -97.25263889), + assertEquals( + new OpenSearchExprGeoPointValue(42.60355556, -97.25263889), constructFromObject("geoV", "42.60355556,-97.25263889")); } @Test public void constructGeoPointFromUnsupportedFormatShouldThrowException() { IllegalStateException exception = - assertThrows(IllegalStateException.class, + assertThrows( + IllegalStateException.class, () -> tupleValue("{\"geoV\":[42.60355556,-97.25263889]}").get("geoV")); - assertEquals("geo point must in format of {\"lat\": number, \"lon\": number}", - exception.getMessage()); + assertEquals( + "geo point must in format of {\"lat\": number, \"lon\": number}", exception.getMessage()); exception = - assertThrows(IllegalStateException.class, + assertThrows( + IllegalStateException.class, () -> tupleValue("{\"geoV\":{\"lon\":-97.25263889}}").get("geoV")); - assertEquals("geo point must in format of {\"lat\": number, \"lon\": number}", - exception.getMessage()); + assertEquals( + "geo point must in format of {\"lat\": number, \"lon\": number}", exception.getMessage()); exception = - assertThrows(IllegalStateException.class, + assertThrows( + IllegalStateException.class, () -> tupleValue("{\"geoV\":{\"lat\":-97.25263889}}").get("geoV")); - assertEquals("geo point must in format of {\"lat\": number, \"lon\": number}", - exception.getMessage()); + assertEquals( + "geo point must in format of {\"lat\": number, \"lon\": number}", exception.getMessage()); exception = - assertThrows(IllegalStateException.class, + assertThrows( + IllegalStateException.class, () -> tupleValue("{\"geoV\":{\"lat\":true,\"lon\":-97.25263889}}").get("geoV")); assertEquals("latitude must be number value, but got value: true", exception.getMessage()); exception = - assertThrows(IllegalStateException.class, + assertThrows( + IllegalStateException.class, () -> tupleValue("{\"geoV\":{\"lat\":42.60355556,\"lon\":false}}").get("geoV")); assertEquals("longitude must be number value, but got value: false", exception.getMessage()); } @Test public void constructBinary() { - assertEquals(new OpenSearchExprBinaryValue("U29tZSBiaW5hcnkgYmxvYg=="), + assertEquals( + new OpenSearchExprBinaryValue("U29tZSBiaW5hcnkgYmxvYg=="), tupleValue("{\"binaryV\":\"U29tZSBiaW5hcnkgYmxvYg==\"}").get("binaryV")); } @@ -769,14 +745,16 @@ public void constructBinary() { @Test public void constructFromOpenSearchArrayReturnFirstElement() { assertEquals(integerValue(1), tupleValue("{\"intV\":[1, 2, 3]}").get("intV")); - assertEquals(new ExprTupleValue( - new LinkedHashMap() { - { - put("id", integerValue(1)); - put("state", stringValue("WA")); - } - }), tupleValue("{\"structV\":[{\"id\":1,\"state\":\"WA\"},{\"id\":2,\"state\":\"CA\"}]}}") - .get("structV")); + assertEquals( + new ExprTupleValue( + new LinkedHashMap() { + { + put("id", integerValue(1)); + put("state", stringValue("WA")); + } + }), + tupleValue("{\"structV\":[{\"id\":1,\"state\":\"WA\"},{\"id\":2,\"state\":\"CA\"}]}}") + .get("structV")); } @Test @@ -799,19 +777,13 @@ public void constructUnsupportedTypeThrowException() { new OpenSearchExprValueFactory(Map.of("type", new TestType())); IllegalStateException exception = assertThrows( - IllegalStateException.class, - () -> exprValueFactory.construct("{\"type\":1}", false) - ); + IllegalStateException.class, () -> exprValueFactory.construct("{\"type\":1}", false)); assertEquals("Unsupported type: TEST_TYPE for value: 1.", exception.getMessage()); exception = assertThrows( - IllegalStateException.class, - () -> exprValueFactory.construct("type", 1, false) - ); - assertEquals( - "Unsupported type: TEST_TYPE for value: 1.", - exception.getMessage()); + IllegalStateException.class, () -> exprValueFactory.construct("type", 1, false)); + assertEquals("Unsupported type: TEST_TYPE for value: 1.", exception.getMessage()); } @Test @@ -820,21 +792,21 @@ public void constructUnsupportedTypeThrowException() { public void factoryMappingsAreExtendableWithoutOverWrite() throws NoSuchFieldException, IllegalAccessException { var factory = new OpenSearchExprValueFactory(Map.of("value", OpenSearchDataType.of(INTEGER))); - factory.extendTypeMapping(Map.of( - "value", OpenSearchDataType.of(DOUBLE), - "agg", OpenSearchDataType.of(DATE))); + factory.extendTypeMapping( + Map.of( + "value", OpenSearchDataType.of(DOUBLE), + "agg", OpenSearchDataType.of(DATE))); // extract private field for testing purposes var field = factory.getClass().getDeclaredField("typeMapping"); field.setAccessible(true); @SuppressWarnings("unchecked") - var mapping = (Map)field.get(factory); + var mapping = (Map) field.get(factory); assertAll( () -> assertEquals(2, mapping.size()), () -> assertTrue(mapping.containsKey("value")), () -> assertTrue(mapping.containsKey("agg")), () -> assertEquals(OpenSearchDataType.of(INTEGER), mapping.get("value")), - () -> assertEquals(OpenSearchDataType.of(DATE), mapping.get("agg")) - ); + () -> assertEquals(OpenSearchDataType.of(DATE), mapping.get("agg"))); } public Map tupleValue(String jsonString) { diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/OpenSearchQueryManagerTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/OpenSearchQueryManagerTest.java index 6d2b9b13ce..047a510180 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/OpenSearchQueryManagerTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/OpenSearchQueryManagerTest.java @@ -32,17 +32,13 @@ @ExtendWith(MockitoExtension.class) class OpenSearchQueryManagerTest { - @Mock - private QueryId queryId; + @Mock private QueryId queryId; - @Mock - private QueryService queryService; + @Mock private QueryService queryService; - @Mock - private UnresolvedPlan plan; + @Mock private UnresolvedPlan plan; - @Mock - private ResponseListener listener; + @Mock private ResponseListener listener; @Test public void submitQuery() { @@ -51,19 +47,20 @@ public void submitQuery() { when(nodeClient.threadPool()).thenReturn(threadPool); AtomicBoolean isRun = new AtomicBoolean(false); - AbstractPlan queryPlan = new QueryPlan(queryId, plan, queryService, listener) { - @Override - public void execute() { - isRun.set(true); - } - }; + AbstractPlan queryPlan = + new QueryPlan(queryId, plan, queryService, listener) { + @Override + public void execute() { + isRun.set(true); + } + }; doAnswer( - invocation -> { - Runnable task = invocation.getArgument(0); - task.run(); - return null; - }) + invocation -> { + Runnable task = invocation.getArgument(0); + task.run(); + return null; + }) .when(threadPool) .schedule(any(), any(), any()); new OpenSearchQueryManager(nodeClient).submit(queryPlan); diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/ResourceMonitorPlanTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/ResourceMonitorPlanTest.java index 96e85a8173..26bcdf6d89 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/ResourceMonitorPlanTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/ResourceMonitorPlanTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.executor; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -27,17 +26,13 @@ @ExtendWith(MockitoExtension.class) class ResourceMonitorPlanTest { - @Mock - private PhysicalPlan plan; + @Mock private PhysicalPlan plan; - @Mock - private ResourceMonitor resourceMonitor; + @Mock private ResourceMonitor resourceMonitor; - @Mock - private PhysicalPlanNodeVisitor visitor; + @Mock private PhysicalPlanNodeVisitor visitor; - @Mock - private Object context; + @Mock private Object context; private ResourceMonitorPlan monitorPlan; diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/monitor/OpenSearchMemoryHealthyTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/monitor/OpenSearchMemoryHealthyTest.java index af4cdc8ce6..a61f7343e6 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/monitor/OpenSearchMemoryHealthyTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/monitor/OpenSearchMemoryHealthyTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.monitor; import static org.junit.jupiter.api.Assertions.assertNotNull; @@ -20,11 +19,9 @@ @ExtendWith(MockitoExtension.class) class OpenSearchMemoryHealthyTest { - @Mock - private OpenSearchMemoryHealthy.RandomFail randomFail; + @Mock private OpenSearchMemoryHealthy.RandomFail randomFail; - @Mock - private OpenSearchMemoryHealthy.MemoryUsage memoryUsage; + @Mock private OpenSearchMemoryHealthy.MemoryUsage memoryUsage; private OpenSearchMemoryHealthy monitor; @@ -45,7 +42,8 @@ void memoryUsageExceedLimitFastFailure() { when(memoryUsage.usage()).thenReturn(10L); when(randomFail.shouldFail()).thenReturn(true); - assertThrows(OpenSearchMemoryHealthy.MemoryUsageExceedFastFailureException.class, + assertThrows( + OpenSearchMemoryHealthy.MemoryUsageExceedFastFailureException.class, () -> monitor.isMemoryHealthy(9L)); } @@ -54,7 +52,8 @@ void memoryUsageExceedLimitWithoutFastFailure() { when(memoryUsage.usage()).thenReturn(10L); when(randomFail.shouldFail()).thenReturn(false); - assertThrows(OpenSearchMemoryHealthy.MemoryUsageExceedException.class, + assertThrows( + OpenSearchMemoryHealthy.MemoryUsageExceedException.class, () -> monitor.isMemoryHealthy(9L)); } @@ -72,8 +71,7 @@ void randomFail() { @Test void setMemoryUsage() { - OpenSearchMemoryHealthy.MemoryUsage usage = - new OpenSearchMemoryHealthy.MemoryUsage(); + OpenSearchMemoryHealthy.MemoryUsage usage = new OpenSearchMemoryHealthy.MemoryUsage(); assertTrue(usage.usage() > 0); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/monitor/OpenSearchResourceMonitorTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/monitor/OpenSearchResourceMonitorTest.java index cd27b0710e..f56d8cb81b 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/monitor/OpenSearchResourceMonitorTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/monitor/OpenSearchResourceMonitorTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.monitor; import static org.junit.jupiter.api.Assertions.assertFalse; @@ -24,11 +23,9 @@ @ExtendWith(MockitoExtension.class) class OpenSearchResourceMonitorTest { - @Mock - private Settings settings; + @Mock private Settings settings; - @Mock - private OpenSearchMemoryHealthy memoryMonitor; + @Mock private OpenSearchMemoryHealthy memoryMonitor; @BeforeEach public void setup() { @@ -47,8 +44,8 @@ void isHealthy() { @Test void notHealthyFastFailure() { - when(memoryMonitor.isMemoryHealthy(anyLong())).thenThrow( - OpenSearchMemoryHealthy.MemoryUsageExceedFastFailureException.class); + when(memoryMonitor.isMemoryHealthy(anyLong())) + .thenThrow(OpenSearchMemoryHealthy.MemoryUsageExceedFastFailureException.class); OpenSearchResourceMonitor resourceMonitor = new OpenSearchResourceMonitor(settings, memoryMonitor); @@ -58,8 +55,8 @@ void notHealthyFastFailure() { @Test void notHealthyWithRetry() { - when(memoryMonitor.isMemoryHealthy(anyLong())).thenThrow( - OpenSearchMemoryHealthy.MemoryUsageExceedException.class); + when(memoryMonitor.isMemoryHealthy(anyLong())) + .thenThrow(OpenSearchMemoryHealthy.MemoryUsageExceedException.class); OpenSearchResourceMonitor resourceMonitor = new OpenSearchResourceMonitor(settings, memoryMonitor); @@ -70,8 +67,9 @@ void notHealthyWithRetry() { @Test void healthyWithRetry() { - when(memoryMonitor.isMemoryHealthy(anyLong())).thenThrow( - OpenSearchMemoryHealthy.MemoryUsageExceedException.class).thenReturn(true); + when(memoryMonitor.isMemoryHealthy(anyLong())) + .thenThrow(OpenSearchMemoryHealthy.MemoryUsageExceedException.class) + .thenReturn(true); OpenSearchResourceMonitor resourceMonitor = new OpenSearchResourceMonitor(settings, memoryMonitor); diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchQueryRequestTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchQueryRequestTest.java index b6966f2403..d2bc5b0641 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchQueryRequestTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchQueryRequestTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.request; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -41,32 +40,23 @@ @ExtendWith(MockitoExtension.class) public class OpenSearchQueryRequestTest { - @Mock - private Function searchAction; + @Mock private Function searchAction; - @Mock - private Function scrollAction; + @Mock private Function scrollAction; - @Mock - private Consumer cleanAction; + @Mock private Consumer cleanAction; - @Mock - private SearchResponse searchResponse; + @Mock private SearchResponse searchResponse; - @Mock - private SearchHits searchHits; + @Mock private SearchHits searchHits; - @Mock - private SearchHit searchHit; + @Mock private SearchHit searchHit; - @Mock - private SearchSourceBuilder sourceBuilder; + @Mock private SearchSourceBuilder sourceBuilder; - @Mock - private FetchSourceContext fetchSourceContext; + @Mock private FetchSourceContext fetchSourceContext; - @Mock - private OpenSearchExprValueFactory factory; + @Mock private OpenSearchExprValueFactory factory; private final OpenSearchQueryRequest request = new OpenSearchQueryRequest("test", 200, factory, List.of()); @@ -76,12 +66,9 @@ public class OpenSearchQueryRequestTest { @Test void search() { - OpenSearchQueryRequest request = new OpenSearchQueryRequest( - new OpenSearchRequest.IndexName("test"), - sourceBuilder, - factory, - List.of() - ); + OpenSearchQueryRequest request = + new OpenSearchQueryRequest( + new OpenSearchRequest.IndexName("test"), sourceBuilder, factory, List.of()); when(searchAction.apply(any())).thenReturn(searchResponse); when(searchResponse.getHits()).thenReturn(searchHits); @@ -96,12 +83,9 @@ void search() { @Test void search_withoutContext() { - OpenSearchQueryRequest request = new OpenSearchQueryRequest( - new OpenSearchRequest.IndexName("test"), - sourceBuilder, - factory, - List.of() - ); + OpenSearchQueryRequest request = + new OpenSearchQueryRequest( + new OpenSearchRequest.IndexName("test"), sourceBuilder, factory, List.of()); when(searchAction.apply(any())).thenReturn(searchResponse); when(searchResponse.getHits()).thenReturn(searchHits); @@ -113,12 +97,9 @@ void search_withoutContext() { @Test void search_withIncludes() { - OpenSearchQueryRequest request = new OpenSearchQueryRequest( - new OpenSearchRequest.IndexName("test"), - sourceBuilder, - factory, - List.of() - ); + OpenSearchQueryRequest request = + new OpenSearchQueryRequest( + new OpenSearchRequest.IndexName("test"), sourceBuilder, factory, List.of()); String[] includes = {"_id", "_index"}; when(searchAction.apply(any())).thenReturn(searchResponse); @@ -144,13 +125,15 @@ void clean() { void searchRequest() { request.getSourceBuilder().query(QueryBuilders.termQuery("name", "John")); - assertSearchRequest(new SearchRequest() - .indices("test") - .source(new SearchSourceBuilder() - .timeout(DEFAULT_QUERY_TIMEOUT) - .from(0) - .size(200) - .query(QueryBuilders.termQuery("name", "John"))), + assertSearchRequest( + new SearchRequest() + .indices("test") + .source( + new SearchSourceBuilder() + .timeout(DEFAULT_QUERY_TIMEOUT) + .from(0) + .size(200) + .query(QueryBuilders.termQuery("name", "John"))), request); } @@ -161,28 +144,31 @@ void searchCrossClusterRequest() { assertSearchRequest( new SearchRequest() .indices("ccs:test") - .source(new SearchSourceBuilder() - .timeout(DEFAULT_QUERY_TIMEOUT) - .from(0) - .size(200) - .query(QueryBuilders.termQuery("name", "John"))), + .source( + new SearchSourceBuilder() + .timeout(DEFAULT_QUERY_TIMEOUT) + .from(0) + .size(200) + .query(QueryBuilders.termQuery("name", "John"))), remoteRequest); } @Test void writeTo_unsupported() { - assertThrows(UnsupportedOperationException.class, - () -> request.writeTo(mock(StreamOutput.class))); + assertThrows( + UnsupportedOperationException.class, () -> request.writeTo(mock(StreamOutput.class))); } private void assertSearchRequest(SearchRequest expected, OpenSearchQueryRequest request) { - Function querySearch = searchRequest -> { - assertEquals(expected, searchRequest); - return when(mock(SearchResponse.class).getHits()) - .thenReturn(new SearchHits(new SearchHit[0], - new TotalHits(0, TotalHits.Relation.EQUAL_TO), 0.0f)) - .getMock(); - }; + Function querySearch = + searchRequest -> { + assertEquals(expected, searchRequest); + return when(mock(SearchResponse.class).getHits()) + .thenReturn( + new SearchHits( + new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 0.0f)) + .getMock(); + }; request.search(querySearch, searchScrollRequest -> null); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchRequestBuilderTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchRequestBuilderTest.java index 483ea1290e..5bb0a2207b 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchRequestBuilderTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchRequestBuilderTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.request; import static org.junit.Assert.assertThrows; @@ -71,11 +70,10 @@ class OpenSearchRequestBuilderTest { private static final Integer DEFAULT_LIMIT = 200; private static final Integer MAX_RESULT_WINDOW = 500; - private static final OpenSearchRequest.IndexName indexName - = new OpenSearchRequest.IndexName("test"); + private static final OpenSearchRequest.IndexName indexName = + new OpenSearchRequest.IndexName("test"); - @Mock - private OpenSearchExprValueFactory exprValueFactory; + @Mock private OpenSearchExprValueFactory exprValueFactory; private OpenSearchRequestBuilder requestBuilder; @@ -99,7 +97,8 @@ void build_query_request() { .size(limit) .timeout(DEFAULT_QUERY_TIMEOUT) .trackScores(true), - exprValueFactory, List.of()), + exprValueFactory, + List.of()), requestBuilder.build(indexName, MAX_RESULT_WINDOW, DEFAULT_QUERY_TIMEOUT)); } @@ -111,12 +110,14 @@ void build_scroll_request_with_correct_size() { assertEquals( new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), new SearchSourceBuilder() .from(offset) .size(MAX_RESULT_WINDOW - offset) .timeout(DEFAULT_QUERY_TIMEOUT), - exprValueFactory, List.of()), + exprValueFactory, + List.of()), requestBuilder.build(indexName, MAX_RESULT_WINDOW, DEFAULT_QUERY_TIMEOUT)); } @@ -126,33 +127,32 @@ void test_push_down_query() { requestBuilder.pushDownFilter(query); var r = requestBuilder.build(indexName, MAX_RESULT_WINDOW, DEFAULT_QUERY_TIMEOUT); - Function querySearch = searchRequest -> { - assertEquals( - new SearchSourceBuilder() - .from(DEFAULT_OFFSET) - .size(DEFAULT_LIMIT) - .timeout(DEFAULT_QUERY_TIMEOUT) - .query(query) - .sort(DOC_FIELD_NAME, ASC), - searchRequest.source() - ); - return mock(); - }; - Function scrollSearch = searchScrollRequest -> { - throw new UnsupportedOperationException(); - }; + Function querySearch = + searchRequest -> { + assertEquals( + new SearchSourceBuilder() + .from(DEFAULT_OFFSET) + .size(DEFAULT_LIMIT) + .timeout(DEFAULT_QUERY_TIMEOUT) + .query(query) + .sort(DOC_FIELD_NAME, ASC), + searchRequest.source()); + return mock(); + }; + Function scrollSearch = + searchScrollRequest -> { + throw new UnsupportedOperationException(); + }; r.search(querySearch, scrollSearch); - } @Test void test_push_down_aggregation() { - AggregationBuilder aggBuilder = AggregationBuilders.composite( - "composite_buckets", - Collections.singletonList(new TermsValuesSourceBuilder("longA"))); + AggregationBuilder aggBuilder = + AggregationBuilders.composite( + "composite_buckets", Collections.singletonList(new TermsValuesSourceBuilder("longA"))); OpenSearchAggregationResponseParser responseParser = - new CompositeAggregationParser( - new SingleValueParser("AVG(intA)")); + new CompositeAggregationParser(new SingleValueParser("AVG(intA)")); requestBuilder.pushDownAggregation(Pair.of(List.of(aggBuilder), responseParser)); assertEquals( @@ -161,8 +161,7 @@ void test_push_down_aggregation() { .size(0) .timeout(DEFAULT_QUERY_TIMEOUT) .aggregation(aggBuilder), - requestBuilder.getSourceBuilder() - ); + requestBuilder.getSourceBuilder()); verify(exprValueFactory).setParser(responseParser); } @@ -184,21 +183,25 @@ void test_push_down_query_and_sort() { requestBuilder); } - void assertSearchSourceBuilder(SearchSourceBuilder expected, - OpenSearchRequestBuilder requestBuilder) + void assertSearchSourceBuilder( + SearchSourceBuilder expected, OpenSearchRequestBuilder requestBuilder) throws UnsupportedOperationException { - Function querySearch = searchRequest -> { - assertEquals(expected, searchRequest.source()); - return when(mock(SearchResponse.class).getHits()) - .thenReturn(new SearchHits(new SearchHit[0], new TotalHits(0, - TotalHits.Relation.EQUAL_TO), 0.0f)) - .getMock(); - }; - Function scrollSearch = searchScrollRequest -> { - throw new UnsupportedOperationException(); - }; - requestBuilder.build(indexName, MAX_RESULT_WINDOW, DEFAULT_QUERY_TIMEOUT).search( - querySearch, scrollSearch); + Function querySearch = + searchRequest -> { + assertEquals(expected, searchRequest.source()); + return when(mock(SearchResponse.class).getHits()) + .thenReturn( + new SearchHits( + new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 0.0f)) + .getMock(); + }; + Function scrollSearch = + searchScrollRequest -> { + throw new UnsupportedOperationException(); + }; + requestBuilder + .build(indexName, MAX_RESULT_WINDOW, DEFAULT_QUERY_TIMEOUT) + .search(querySearch, scrollSearch); } @Test @@ -231,9 +234,8 @@ void test_push_down_non_field_sort() { @Test void test_push_down_multiple_sort() { - requestBuilder.pushDownSort(List.of( - SortBuilders.fieldSort("intA"), - SortBuilders.fieldSort("intB"))); + requestBuilder.pushDownSort( + List.of(SortBuilders.fieldSort("intA"), SortBuilders.fieldSort("intB"))); assertSearchSourceBuilder( new SearchSourceBuilder() @@ -255,7 +257,7 @@ void test_push_down_project() { .from(DEFAULT_OFFSET) .size(DEFAULT_LIMIT) .timeout(DEFAULT_QUERY_TIMEOUT) - .fetchSource(new String[]{"intA"}, new String[0]), + .fetchSource(new String[] {"intA"}, new String[0]), requestBuilder); assertEquals( @@ -285,7 +287,7 @@ void test_push_down_project_limit() { .from(offset) .size(limit) .timeout(DEFAULT_QUERY_TIMEOUT) - .fetchSource(new String[]{"intA"}, new String[0]), + .fetchSource(new String[] {"intA"}, new String[0]), requestBuilder); assertEquals( @@ -315,7 +317,7 @@ void test_push_down_project_limit_and_offset() { .from(offset) .size(limit) .timeout(DEFAULT_QUERY_TIMEOUT) - .fetchSource(new String[]{"intA"}, new String[0]), + .fetchSource(new String[] {"intA"}, new String[0]), requestBuilder); assertEquals( @@ -333,24 +335,25 @@ void test_push_down_project_limit_and_offset() { @Test void test_push_down_nested() { - List> args = List.of( - Map.of( - "field", new ReferenceExpression("message.info", STRING), - "path", new ReferenceExpression("message", STRING) - ) - ); + List> args = + List.of( + Map.of( + "field", new ReferenceExpression("message.info", STRING), + "path", new ReferenceExpression("message", STRING))); List projectList = List.of( - new NamedExpression("message.info", DSL.nested(DSL.ref("message.info", STRING)), null) - ); + new NamedExpression("message.info", DSL.nested(DSL.ref("message.info", STRING)), null)); LogicalNested nested = new LogicalNested(null, args, projectList); requestBuilder.pushDownNested(nested.getFields()); - NestedQueryBuilder nestedQuery = nestedQuery("message", matchAllQuery(), ScoreMode.None) - .innerHit(new InnerHitBuilder().setFetchSourceContext( - new FetchSourceContext(true, new String[]{"message.info"}, null))); + NestedQueryBuilder nestedQuery = + nestedQuery("message", matchAllQuery(), ScoreMode.None) + .innerHit( + new InnerHitBuilder() + .setFetchSourceContext( + new FetchSourceContext(true, new String[] {"message.info"}, null))); assertSearchSourceBuilder( new SearchSourceBuilder() @@ -363,28 +366,29 @@ void test_push_down_nested() { @Test void test_push_down_multiple_nested_with_same_path() { - List> args = List.of( - Map.of( - "field", new ReferenceExpression("message.info", STRING), - "path", new ReferenceExpression("message", STRING) - ), - Map.of( - "field", new ReferenceExpression("message.from", STRING), - "path", new ReferenceExpression("message", STRING) - ) - ); + List> args = + List.of( + Map.of( + "field", new ReferenceExpression("message.info", STRING), + "path", new ReferenceExpression("message", STRING)), + Map.of( + "field", new ReferenceExpression("message.from", STRING), + "path", new ReferenceExpression("message", STRING))); List projectList = List.of( new NamedExpression("message.info", DSL.nested(DSL.ref("message.info", STRING)), null), - new NamedExpression("message.from", DSL.nested(DSL.ref("message.from", STRING)), null) - ); + new NamedExpression("message.from", DSL.nested(DSL.ref("message.from", STRING)), null)); LogicalNested nested = new LogicalNested(null, args, projectList); requestBuilder.pushDownNested(nested.getFields()); - NestedQueryBuilder nestedQuery = nestedQuery("message", matchAllQuery(), ScoreMode.None) - .innerHit(new InnerHitBuilder().setFetchSourceContext( - new FetchSourceContext(true, new String[]{"message.info", "message.from"}, null))); + NestedQueryBuilder nestedQuery = + nestedQuery("message", matchAllQuery(), ScoreMode.None) + .innerHit( + new InnerHitBuilder() + .setFetchSourceContext( + new FetchSourceContext( + true, new String[] {"message.info", "message.from"}, null))); assertSearchSourceBuilder( new SearchSourceBuilder() .query(QueryBuilders.boolQuery().filter(QueryBuilders.boolQuery().must(nestedQuery))) @@ -396,35 +400,35 @@ void test_push_down_multiple_nested_with_same_path() { @Test void test_push_down_nested_with_filter() { - List> args = List.of( - Map.of( - "field", new ReferenceExpression("message.info", STRING), - "path", new ReferenceExpression("message", STRING) - ) - ); + List> args = + List.of( + Map.of( + "field", new ReferenceExpression("message.info", STRING), + "path", new ReferenceExpression("message", STRING))); List projectList = List.of( - new NamedExpression("message.info", DSL.nested(DSL.ref("message.info", STRING)), null) - ); + new NamedExpression("message.info", DSL.nested(DSL.ref("message.info", STRING)), null)); LogicalNested nested = new LogicalNested(null, args, projectList); requestBuilder.getSourceBuilder().query(QueryBuilders.rangeQuery("myNum").gt(3)); requestBuilder.pushDownNested(nested.getFields()); - NestedQueryBuilder nestedQuery = nestedQuery("message", matchAllQuery(), ScoreMode.None) - .innerHit(new InnerHitBuilder().setFetchSourceContext( - new FetchSourceContext(true, new String[]{"message.info"}, null))); + NestedQueryBuilder nestedQuery = + nestedQuery("message", matchAllQuery(), ScoreMode.None) + .innerHit( + new InnerHitBuilder() + .setFetchSourceContext( + new FetchSourceContext(true, new String[] {"message.info"}, null))); assertSearchSourceBuilder( new SearchSourceBuilder() .query( - QueryBuilders.boolQuery().filter( - QueryBuilders.boolQuery() - .must(QueryBuilders.rangeQuery("myNum").gt(3)) - .must(nestedQuery) - ) - ) + QueryBuilders.boolQuery() + .filter( + QueryBuilders.boolQuery() + .must(QueryBuilders.rangeQuery("myNum").gt(3)) + .must(nestedQuery))) .from(DEFAULT_OFFSET) .size(DEFAULT_LIMIT) .timeout(DEFAULT_QUERY_TIMEOUT), @@ -433,17 +437,15 @@ void test_push_down_nested_with_filter() { @Test void testPushDownNestedWithNestedFilter() { - List> args = List.of( - Map.of( - "field", new ReferenceExpression("message.info", STRING), - "path", new ReferenceExpression("message", STRING) - ) - ); + List> args = + List.of( + Map.of( + "field", new ReferenceExpression("message.info", STRING), + "path", new ReferenceExpression("message", STRING))); List projectList = List.of( - new NamedExpression("message.info", DSL.nested(DSL.ref("message.info", STRING)), null) - ); + new NamedExpression("message.info", DSL.nested(DSL.ref("message.info", STRING)), null)); QueryBuilder innerFilterQuery = QueryBuilders.rangeQuery("myNum").gt(3); QueryBuilder filterQuery = @@ -452,20 +454,20 @@ void testPushDownNestedWithNestedFilter() { requestBuilder.getSourceBuilder().query(filterQuery); requestBuilder.pushDownNested(nested.getFields()); - NestedQueryBuilder nestedQuery = nestedQuery("message", matchAllQuery(), ScoreMode.None) - .innerHit(new InnerHitBuilder().setFetchSourceContext( - new FetchSourceContext(true, new String[]{"message.info"}, null))); - - assertSearchSourceBuilder(new SearchSourceBuilder() - .query( - QueryBuilders.boolQuery().filter( - QueryBuilders.boolQuery() - .must(filterQuery) - ) - ) - .from(DEFAULT_OFFSET) - .size(DEFAULT_LIMIT) - .timeout(DEFAULT_QUERY_TIMEOUT), requestBuilder); + NestedQueryBuilder nestedQuery = + nestedQuery("message", matchAllQuery(), ScoreMode.None) + .innerHit( + new InnerHitBuilder() + .setFetchSourceContext( + new FetchSourceContext(true, new String[] {"message.info"}, null))); + + assertSearchSourceBuilder( + new SearchSourceBuilder() + .query(QueryBuilders.boolQuery().filter(QueryBuilders.boolQuery().must(filterQuery))) + .from(DEFAULT_OFFSET) + .size(DEFAULT_LIMIT) + .timeout(DEFAULT_QUERY_TIMEOUT), + requestBuilder); } @Test @@ -479,8 +481,9 @@ void test_push_type_mapping() { @Test void push_down_highlight_with_repeating_fields() { requestBuilder.pushDownHighlight("name", Map.of()); - var exception = assertThrows(SemanticCheckException.class, () -> - requestBuilder.pushDownHighlight("name", Map.of())); + var exception = + assertThrows( + SemanticCheckException.class, () -> requestBuilder.pushDownHighlight("name", Map.of())); assertEquals("Duplicate field name in highlight", exception.getMessage()); } @@ -488,10 +491,7 @@ void push_down_highlight_with_repeating_fields() { void push_down_page_size() { requestBuilder.pushDownPageSize(3); assertSearchSourceBuilder( - new SearchSourceBuilder() - .from(DEFAULT_OFFSET) - .size(3) - .timeout(DEFAULT_QUERY_TIMEOUT), + new SearchSourceBuilder().from(DEFAULT_OFFSET).size(3).timeout(DEFAULT_QUERY_TIMEOUT), requestBuilder); } @@ -499,7 +499,8 @@ void push_down_page_size() { void exception_when_non_zero_offset_and_page_size() { requestBuilder.pushDownPageSize(3); requestBuilder.pushDownLimit(300, 2); - assertThrows(UnsupportedOperationException.class, + assertThrows( + UnsupportedOperationException.class, () -> requestBuilder.build(indexName, MAX_RESULT_WINDOW, DEFAULT_QUERY_TIMEOUT)); } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchScrollRequestTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchScrollRequestTest.java index 4b9233dbc1..66cb6bf14c 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchScrollRequestTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchScrollRequestTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.request; import static org.junit.jupiter.api.Assertions.assertAll; @@ -49,50 +48,48 @@ @DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) class OpenSearchScrollRequestTest { - public static final OpenSearchRequest.IndexName INDEX_NAME - = new OpenSearchRequest.IndexName("test"); + public static final OpenSearchRequest.IndexName INDEX_NAME = + new OpenSearchRequest.IndexName("test"); public static final TimeValue SCROLL_TIMEOUT = TimeValue.timeValueMinutes(1); - @Mock - private SearchResponse searchResponse; + @Mock private SearchResponse searchResponse; - @Mock - private SearchHits searchHits; + @Mock private SearchHits searchHits; - @Mock - private SearchHit searchHit; + @Mock private SearchHit searchHit; - @Mock - private SearchSourceBuilder sourceBuilder; + @Mock private SearchSourceBuilder sourceBuilder; - @Mock - private OpenSearchExprValueFactory factory; + @Mock private OpenSearchExprValueFactory factory; private final SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); - private final OpenSearchScrollRequest request = new OpenSearchScrollRequest( - INDEX_NAME, SCROLL_TIMEOUT, - searchSourceBuilder, factory, List.of()); + private final OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + INDEX_NAME, SCROLL_TIMEOUT, searchSourceBuilder, factory, List.of()); @Test void constructor() { - var request = new OpenSearchScrollRequest(INDEX_NAME, SCROLL_TIMEOUT, - searchSourceBuilder, factory, List.of("test")); + var request = + new OpenSearchScrollRequest( + INDEX_NAME, SCROLL_TIMEOUT, searchSourceBuilder, factory, List.of("test")); assertEquals(List.of("test"), request.getIncludes()); } @Test void searchRequest() { searchSourceBuilder.query(QueryBuilders.termQuery("name", "John")); - request.search(searchRequest -> { - assertEquals( - new SearchRequest() - .indices("test") - .scroll(TimeValue.timeValueMinutes(1)) - .source(new SearchSourceBuilder().query(QueryBuilders.termQuery("name", "John"))), - searchRequest); - SearchHits searchHitsMock = when(mock(SearchHits.class).getHits()) - .thenReturn(new SearchHit[0]).getMock(); - return when(mock(SearchResponse.class).getHits()).thenReturn(searchHitsMock).getMock(); - }, searchScrollRequest -> null); + request.search( + searchRequest -> { + assertEquals( + new SearchRequest() + .indices("test") + .scroll(TimeValue.timeValueMinutes(1)) + .source(new SearchSourceBuilder().query(QueryBuilders.termQuery("name", "John"))), + searchRequest); + SearchHits searchHitsMock = + when(mock(SearchHits.class).getHits()).thenReturn(new SearchHit[0]).getMock(); + return when(mock(SearchResponse.class).getHits()).thenReturn(searchHitsMock).getMock(); + }, + searchScrollRequest -> null); } @Test @@ -110,21 +107,19 @@ void isScrollStarted() { void scrollRequest() { request.setScrollId("scroll123"); assertEquals( - new SearchScrollRequest() - .scroll(TimeValue.timeValueMinutes(1)) - .scrollId("scroll123"), + new SearchScrollRequest().scroll(TimeValue.timeValueMinutes(1)).scrollId("scroll123"), request.scrollRequest()); } @Test void search() { - OpenSearchScrollRequest request = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), - TimeValue.timeValueMinutes(1), - sourceBuilder, - factory, - List.of() - ); + OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + sourceBuilder, + factory, + List.of()); when(searchResponse.getHits()).thenReturn(searchHits); when(searchHits.getHits()).thenReturn(new SearchHit[] {searchHit}); @@ -135,13 +130,13 @@ void search() { @Test void search_without_context() { - OpenSearchScrollRequest request = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), - TimeValue.timeValueMinutes(1), - sourceBuilder, - factory, - List.of() - ); + OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + sourceBuilder, + factory, + List.of()); when(searchResponse.getHits()).thenReturn(searchHits); when(searchHits.getHits()).thenReturn(new SearchHit[] {searchHit}); @@ -154,13 +149,13 @@ void search_without_context() { @SneakyThrows void search_without_scroll_and_initial_request_should_throw() { // Steps: serialize a not used request, deserialize it, then use - OpenSearchScrollRequest request = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), - TimeValue.timeValueMinutes(1), - sourceBuilder, - factory, - List.of() - ); + OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + sourceBuilder, + factory, + List.of()); var outStream = new BytesStreamOutput(); request.writeTo(outStream); outStream.flush(); @@ -172,20 +167,21 @@ void search_without_scroll_and_initial_request_should_throw() { assertAll( () -> assertFalse(request2.isScroll()), () -> assertNull(request2.getInitialSearchRequest()), - () -> assertThrows(UnsupportedOperationException.class, - () -> request2.search(sr -> fail("search"), sr -> fail("scroll"))) - ); + () -> + assertThrows( + UnsupportedOperationException.class, + () -> request2.search(sr -> fail("search"), sr -> fail("scroll")))); } @Test void search_withoutIncludes() { - OpenSearchScrollRequest request = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), - TimeValue.timeValueMinutes(1), - sourceBuilder, - factory, - List.of() - ); + OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + sourceBuilder, + factory, + List.of()); when(searchResponse.getHits()).thenReturn(searchHits); when(searchHits.getHits()).thenReturn(new SearchHit[] {searchHit}); @@ -213,9 +209,10 @@ void clean_on_empty_response() { // This could happen on sequential search calls SearchResponse searchResponse = mock(); when(searchResponse.getScrollId()).thenReturn("scroll1", "scroll2"); - when(searchResponse.getHits()).thenReturn( - new SearchHits(new SearchHit[1], new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1F), - new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 1F)); + when(searchResponse.getHits()) + .thenReturn( + new SearchHits(new SearchHit[1], new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1F), + new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 1F)); request.search((x) -> searchResponse, (x) -> searchResponse); assertEquals("scroll1", request.getScrollId()); @@ -233,8 +230,9 @@ void clean_on_empty_response() { void no_clean_on_non_empty_response() { SearchResponse searchResponse = mock(); when(searchResponse.getScrollId()).thenReturn("scroll"); - when(searchResponse.getHits()).thenReturn( - new SearchHits(new SearchHit[1], new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1F)); + when(searchResponse.getHits()) + .thenReturn( + new SearchHits(new SearchHit[1], new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1F)); request.search((sr) -> searchResponse, (sr) -> searchResponse); assertEquals("scroll", request.getScrollId()); @@ -246,8 +244,7 @@ void no_clean_on_non_empty_response() { @Test void no_cursor_on_empty_response() { SearchResponse searchResponse = mock(); - when(searchResponse.getHits()).thenReturn( - new SearchHits(new SearchHit[0], null, 1f)); + when(searchResponse.getHits()).thenReturn(new SearchHits(new SearchHit[0], null, 1f)); request.search((x) -> searchResponse, (x) -> searchResponse); assertFalse(request.hasAnotherBatch()); @@ -256,8 +253,9 @@ void no_cursor_on_empty_response() { @Test void no_clean_if_no_scroll_in_response() { SearchResponse searchResponse = mock(); - when(searchResponse.getHits()).thenReturn( - new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 1F)); + when(searchResponse.getHits()) + .thenReturn( + new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 1F)); request.search((x) -> searchResponse, (x) -> searchResponse); assertEquals(NO_SCROLL_ID, request.getScrollId()); @@ -286,8 +284,10 @@ void serialize_deserialize_no_needClean() { @Test @SneakyThrows void serialize_deserialize_needClean() { - lenient().when(searchResponse.getHits()).thenReturn( - new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 1F)); + lenient() + .when(searchResponse.getHits()) + .thenReturn( + new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 1F)); lenient().when(searchResponse.getScrollId()).thenReturn(""); var stream = new BytesStreamOutput(); diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchAggregationResponseParserTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchAggregationResponseParserTest.java index 7ed6c900dd..cd915cf5e5 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchAggregationResponseParserTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchAggregationResponseParserTest.java @@ -36,12 +36,7 @@ class OpenSearchAggregationResponseParserTest { /** SELECT MAX(age) as max FROM accounts. */ @Test void no_bucket_one_metric_should_pass() { - String response = - "{\n" - + " \"max#max\": {\n" - + " \"value\": 40\n" - + " }\n" - + "}"; + String response = "{\n \"max#max\": {\n \"value\": 40\n }\n}"; NoBucketAggregationParser parser = new NoBucketAggregationParser(new SingleValueParser("max")); assertThat(parse(parser, response), contains(entry("max", 40d))); } @@ -145,12 +140,7 @@ void two_bucket_one_metric_should_pass() { @Test void unsupported_aggregation_should_fail() { - String response = - "{\n" - + " \"date_histogram#date_histogram\": {\n" - + " \"value\": 40\n" - + " }\n" - + "}"; + String response = "{\n \"date_histogram#date_histogram\": {\n \"value\": 40\n }\n}"; NoBucketAggregationParser parser = new NoBucketAggregationParser(new SingleValueParser("max")); RuntimeException exception = assertThrows(RuntimeException.class, () -> parse(parser, response)); diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchResponseTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchResponseTest.java index b26847b095..6f4605bc2f 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchResponseTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchResponseTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.response; import static java.util.Collections.emptyList; @@ -49,31 +48,25 @@ @ExtendWith(MockitoExtension.class) class OpenSearchResponseTest { - @Mock - private SearchResponse searchResponse; + @Mock private SearchResponse searchResponse; - @Mock - private OpenSearchExprValueFactory factory; + @Mock private OpenSearchExprValueFactory factory; - @Mock - private SearchHit searchHit1; + @Mock private SearchHit searchHit1; - @Mock - private SearchHit searchHit2; + @Mock private SearchHit searchHit2; - @Mock - private Aggregations aggregations; + @Mock private Aggregations aggregations; private List includes = List.of(); - @Mock - private OpenSearchAggregationResponseParser parser; + @Mock private OpenSearchAggregationResponseParser parser; - private ExprTupleValue exprTupleValue1 = ExprTupleValue.fromExprValueMap(ImmutableMap.of("id1", - new ExprIntegerValue(1))); + private ExprTupleValue exprTupleValue1 = + ExprTupleValue.fromExprValueMap(ImmutableMap.of("id1", new ExprIntegerValue(1))); - private ExprTupleValue exprTupleValue2 = ExprTupleValue.fromExprValueMap(ImmutableMap.of("id2", - new ExprIntegerValue(2))); + private ExprTupleValue exprTupleValue2 = + ExprTupleValue.fromExprValueMap(ImmutableMap.of("id2", new ExprIntegerValue(2))); @Test void isEmpty() { @@ -119,7 +112,8 @@ void iterator() { when(searchHit1.getInnerHits()).thenReturn(null); when(searchHit2.getInnerHits()).thenReturn(null); when(factory.construct(any(), anyBoolean())) - .thenReturn(exprTupleValue1).thenReturn(exprTupleValue2); + .thenReturn(exprTupleValue1) + .thenReturn(exprTupleValue2); int i = 0; for (ExprValue hit : new OpenSearchResponse(searchResponse, factory, List.of("id1"))) { @@ -137,9 +131,8 @@ void iterator() { @Test void iterator_metafields() { - ExprTupleValue exprTupleHit = ExprTupleValue.fromExprValueMap(ImmutableMap.of( - "id1", new ExprIntegerValue(1) - )); + ExprTupleValue exprTupleHit = + ExprTupleValue.fromExprValueMap(ImmutableMap.of("id1", new ExprIntegerValue(1))); when(searchResponse.getHits()) .thenReturn( @@ -160,15 +153,16 @@ void iterator_metafields() { when(factory.construct(any(), anyBoolean())).thenReturn(exprTupleHit); - ExprTupleValue exprTupleResponse = ExprTupleValue.fromExprValueMap(ImmutableMap.of( - "id1", new ExprIntegerValue(1), - "_index", new ExprStringValue("testIndex"), - "_id", new ExprStringValue("testId"), - "_routing", new ExprStringValue(shardTarget.toString()), - "_sort", new ExprLongValue(123456L), - "_score", new ExprFloatValue(3.75F), - "_maxscore", new ExprFloatValue(3.75F) - )); + ExprTupleValue exprTupleResponse = + ExprTupleValue.fromExprValueMap( + ImmutableMap.of( + "id1", new ExprIntegerValue(1), + "_index", new ExprStringValue("testIndex"), + "_id", new ExprStringValue("testId"), + "_routing", new ExprStringValue(shardTarget.toString()), + "_sort", new ExprLongValue(123456L), + "_score", new ExprFloatValue(3.75F), + "_maxscore", new ExprFloatValue(3.75F))); List includes = List.of("id1", "_index", "_id", "_routing", "_sort", "_score", "_maxscore"); int i = 0; for (ExprValue hit : new OpenSearchResponse(searchResponse, factory, includes)) { @@ -184,9 +178,8 @@ void iterator_metafields() { @Test void iterator_metafields_withoutIncludes() { - ExprTupleValue exprTupleHit = ExprTupleValue.fromExprValueMap(ImmutableMap.of( - "id1", new ExprIntegerValue(1) - )); + ExprTupleValue exprTupleHit = + ExprTupleValue.fromExprValueMap(ImmutableMap.of("id1", new ExprIntegerValue(1))); when(searchResponse.getHits()) .thenReturn( @@ -200,9 +193,8 @@ void iterator_metafields_withoutIncludes() { when(factory.construct(any(), anyBoolean())).thenReturn(exprTupleHit); List includes = List.of("id1"); - ExprTupleValue exprTupleResponse = ExprTupleValue.fromExprValueMap(ImmutableMap.of( - "id1", new ExprIntegerValue(1) - )); + ExprTupleValue exprTupleResponse = + ExprTupleValue.fromExprValueMap(ImmutableMap.of("id1", new ExprIntegerValue(1))); int i = 0; for (ExprValue hit : new OpenSearchResponse(searchResponse, factory, includes)) { if (i == 0) { @@ -217,9 +209,8 @@ void iterator_metafields_withoutIncludes() { @Test void iterator_metafields_scoreNaN() { - ExprTupleValue exprTupleHit = ExprTupleValue.fromExprValueMap(ImmutableMap.of( - "id1", new ExprIntegerValue(1) - )); + ExprTupleValue exprTupleHit = + ExprTupleValue.fromExprValueMap(ImmutableMap.of("id1", new ExprIntegerValue(1))); when(searchResponse.getHits()) .thenReturn( @@ -237,12 +228,13 @@ void iterator_metafields_scoreNaN() { when(factory.construct(any(), anyBoolean())).thenReturn(exprTupleHit); List includes = List.of("id1", "_index", "_id", "_sort", "_score", "_maxscore"); - ExprTupleValue exprTupleResponse = ExprTupleValue.fromExprValueMap(ImmutableMap.of( - "id1", new ExprIntegerValue(1), - "_index", new ExprStringValue("testIndex"), - "_id", new ExprStringValue("testId"), - "_sort", new ExprLongValue(123456L) - )); + ExprTupleValue exprTupleResponse = + ExprTupleValue.fromExprValueMap( + ImmutableMap.of( + "id1", new ExprIntegerValue(1), + "_index", new ExprStringValue("testIndex"), + "_id", new ExprStringValue("testId"), + "_sort", new ExprLongValue(123456L))); int i = 0; for (ExprValue hit : new OpenSearchResponse(searchResponse, factory, includes)) { if (i == 0) { @@ -262,13 +254,14 @@ void iterator_with_inner_hits() { new SearchHit[] {searchHit1}, new TotalHits(2L, TotalHits.Relation.EQUAL_TO), 1.0F)); - when(searchHit1.getInnerHits()).thenReturn( - Map.of( - "innerHit", - new SearchHits( - new SearchHit[] {searchHit1}, - new TotalHits(2L, TotalHits.Relation.EQUAL_TO), - 1.0F))); + when(searchHit1.getInnerHits()) + .thenReturn( + Map.of( + "innerHit", + new SearchHits( + new SearchHit[] {searchHit1}, + new TotalHits(2L, TotalHits.Relation.EQUAL_TO), + 1.0F))); when(factory.construct(any(), anyBoolean())).thenReturn(exprTupleValue1); @@ -321,18 +314,17 @@ void aggregation_iterator() { @Test void highlight_iterator() { SearchHit searchHit = new SearchHit(1); - searchHit.sourceRef( - new BytesArray("{\"name\":\"John\"}")); - Map highlightMap = Map.of("highlights", - new HighlightField("Title", new Text[] {new Text("field")})); - searchHit.highlightFields(Map.of("highlights", new HighlightField("Title", - new Text[] {new Text("field")}))); + searchHit.sourceRef(new BytesArray("{\"name\":\"John\"}")); + Map highlightMap = + Map.of("highlights", new HighlightField("Title", new Text[] {new Text("field")})); + searchHit.highlightFields( + Map.of("highlights", new HighlightField("Title", new Text[] {new Text("field")}))); ExprValue resultTuple = ExprValueUtils.tupleValue(searchHit.getSourceAsMap()); when(searchResponse.getHits()) .thenReturn( new SearchHits( - new SearchHit[]{searchHit1}, + new SearchHit[] {searchHit1}, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0F)); @@ -340,11 +332,12 @@ void highlight_iterator() { when(factory.construct(any(), anyBoolean())).thenReturn(resultTuple); for (ExprValue resultHit : new OpenSearchResponse(searchResponse, factory, includes)) { - var expected = ExprValueUtils.collectionValue( - Arrays.stream(searchHit.getHighlightFields().get("highlights").getFragments()) - .map(t -> (t.toString())).collect(Collectors.toList())); - var result = resultHit.tupleValue().get( - "_highlight").tupleValue().get("highlights"); + var expected = + ExprValueUtils.collectionValue( + Arrays.stream(searchHit.getHighlightFields().get("highlights").getFragments()) + .map(t -> (t.toString())) + .collect(Collectors.toList())); + var result = resultHit.tupleValue().get("_highlight").tupleValue().get("highlights"); assertTrue(expected.equals(result)); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/setting/OpenSearchSettingsTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/setting/OpenSearchSettingsTest.java index 835798f162..ff2c311753 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/setting/OpenSearchSettingsTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/setting/OpenSearchSettingsTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.setting; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -33,14 +32,12 @@ @ExtendWith(MockitoExtension.class) class OpenSearchSettingsTest { - @Mock - private ClusterSettings clusterSettings; + @Mock private ClusterSettings clusterSettings; @Test void getSettingValue() { when(clusterSettings.get(ClusterName.CLUSTER_NAME_SETTING)).thenReturn(ClusterName.DEFAULT); - when(clusterSettings.get(not((eq(ClusterName.CLUSTER_NAME_SETTING))))) - .thenReturn(null); + when(clusterSettings.get(not((eq(ClusterName.CLUSTER_NAME_SETTING))))).thenReturn(null); OpenSearchSettings settings = new OpenSearchSettings(clusterSettings); ByteSizeValue sizeValue = settings.getSettingValue(Settings.Key.QUERY_MEMORY_LIMIT); @@ -50,11 +47,14 @@ void getSettingValue() { @Test void getSettingValueWithPresetValuesInYml() { when(clusterSettings.get(ClusterName.CLUSTER_NAME_SETTING)).thenReturn(ClusterName.DEFAULT); - when(clusterSettings - .get((Setting) OpenSearchSettings.QUERY_MEMORY_LIMIT_SETTING)) + when(clusterSettings.get( + (Setting) OpenSearchSettings.QUERY_MEMORY_LIMIT_SETTING)) .thenReturn(new ByteSizeValue(20)); - when(clusterSettings.get(not(or(eq(ClusterName.CLUSTER_NAME_SETTING), - eq((Setting) OpenSearchSettings.QUERY_MEMORY_LIMIT_SETTING))))) + when(clusterSettings.get( + not( + or( + eq(ClusterName.CLUSTER_NAME_SETTING), + eq((Setting) OpenSearchSettings.QUERY_MEMORY_LIMIT_SETTING))))) .thenReturn(null); OpenSearchSettings settings = new OpenSearchSettings(clusterSettings); ByteSizeValue sizeValue = settings.getSettingValue(Settings.Key.QUERY_MEMORY_LIMIT); @@ -78,8 +78,7 @@ void pluginNonDynamicSettings() { @Test void getSettings() { when(clusterSettings.get(ClusterName.CLUSTER_NAME_SETTING)).thenReturn(ClusterName.DEFAULT); - when(clusterSettings.get(not((eq(ClusterName.CLUSTER_NAME_SETTING))))) - .thenReturn(null); + when(clusterSettings.get(not((eq(ClusterName.CLUSTER_NAME_SETTING))))).thenReturn(null); OpenSearchSettings settings = new OpenSearchSettings(clusterSettings); assertFalse(settings.getSettings().isEmpty()); } @@ -87,12 +86,10 @@ void getSettings() { @Test void update() { when(clusterSettings.get(ClusterName.CLUSTER_NAME_SETTING)).thenReturn(ClusterName.DEFAULT); - when(clusterSettings.get(not((eq(ClusterName.CLUSTER_NAME_SETTING))))) - .thenReturn(null); + when(clusterSettings.get(not((eq(ClusterName.CLUSTER_NAME_SETTING))))).thenReturn(null); OpenSearchSettings settings = new OpenSearchSettings(clusterSettings); ByteSizeValue oldValue = settings.getSettingValue(Settings.Key.QUERY_MEMORY_LIMIT); - OpenSearchSettings.Updater updater = - settings.new Updater(Settings.Key.QUERY_MEMORY_LIMIT); + OpenSearchSettings.Updater updater = settings.new Updater(Settings.Key.QUERY_MEMORY_LIMIT); updater.accept(new ByteSizeValue(0L)); ByteSizeValue newValue = settings.getSettingValue(Settings.Key.QUERY_MEMORY_LIMIT); @@ -103,8 +100,7 @@ void update() { @Test void settingsFallback() { when(clusterSettings.get(ClusterName.CLUSTER_NAME_SETTING)).thenReturn(ClusterName.DEFAULT); - when(clusterSettings.get(not((eq(ClusterName.CLUSTER_NAME_SETTING))))) - .thenReturn(null); + when(clusterSettings.get(not((eq(ClusterName.CLUSTER_NAME_SETTING))))).thenReturn(null); OpenSearchSettings settings = new OpenSearchSettings(clusterSettings); assertEquals( settings.getSettingValue(Settings.Key.SQL_ENABLED), @@ -156,17 +152,17 @@ public void updateLegacySettingsFallback() { assertEquals(OpenSearchSettings.SQL_ENABLED_SETTING.get(settings), false); assertEquals(OpenSearchSettings.SQL_SLOWLOG_SETTING.get(settings), 10); - assertEquals(OpenSearchSettings.SQL_CURSOR_KEEP_ALIVE_SETTING.get(settings), - timeValueMinutes(1)); + assertEquals( + OpenSearchSettings.SQL_CURSOR_KEEP_ALIVE_SETTING.get(settings), timeValueMinutes(1)); assertEquals(OpenSearchSettings.PPL_ENABLED_SETTING.get(settings), true); - assertEquals(OpenSearchSettings.QUERY_MEMORY_LIMIT_SETTING.get(settings), + assertEquals( + OpenSearchSettings.QUERY_MEMORY_LIMIT_SETTING.get(settings), new ByteSizeValue((int) (JvmInfo.jvmInfo().getMem().getHeapMax().getBytes() * 0.2))); assertEquals(OpenSearchSettings.QUERY_SIZE_LIMIT_SETTING.get(settings), 100); assertEquals(OpenSearchSettings.METRICS_ROLLING_WINDOW_SETTING.get(settings), 2000L); assertEquals(OpenSearchSettings.METRICS_ROLLING_INTERVAL_SETTING.get(settings), 100L); } - @Test void legacySettingsShouldBeDeprecatedBeforeRemove() { assertEquals(15, legacySettings().size()); diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchIndexTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchIndexTest.java index 39af59b6cd..3ddb07d86a 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchIndexTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchIndexTest.java @@ -63,20 +63,16 @@ class OpenSearchIndexTest { public static final int QUERY_SIZE_LIMIT = 200; public static final TimeValue SCROLL_TIMEOUT = new TimeValue(1); - public static final OpenSearchRequest.IndexName INDEX_NAME - = new OpenSearchRequest.IndexName("test"); + public static final OpenSearchRequest.IndexName INDEX_NAME = + new OpenSearchRequest.IndexName("test"); - @Mock - private OpenSearchClient client; + @Mock private OpenSearchClient client; - @Mock - private OpenSearchExprValueFactory exprValueFactory; + @Mock private OpenSearchExprValueFactory exprValueFactory; - @Mock - private Settings settings; + @Mock private Settings settings; - @Mock - private IndexMapping mapping; + @Mock private IndexMapping mapping; private OpenSearchIndex index; @@ -94,16 +90,18 @@ void isExist() { @Test void createIndex() { - Map mappings = Map.of( - "properties", + Map mappings = Map.of( - "name", "text", - "age", "integer")); + "properties", + Map.of( + "name", "text", + "age", "integer")); doNothing().when(client).createIndex("test", mappings); Map schema = new HashMap<>(); - schema.put("name", OpenSearchTextType.of(Map.of("keyword", - OpenSearchDataType.of(MappingType.Keyword)))); + schema.put( + "name", + OpenSearchTextType.of(Map.of("keyword", OpenSearchDataType.of(MappingType.Keyword)))); schema.put("age", INTEGER); index.create(schema); verify(client).createIndex(any(), any()); @@ -111,24 +109,27 @@ void createIndex() { @Test void getFieldTypes() { - when(mapping.getFieldMappings()).thenReturn( - ImmutableMap.builder() - .put("name", MappingType.Keyword) - .put("address", MappingType.Text) - .put("age", MappingType.Integer) - .put("account_number", MappingType.Long) - .put("balance1", MappingType.Float) - .put("balance2", MappingType.Double) - .put("gender", MappingType.Boolean) - .put("family", MappingType.Nested) - .put("employer", MappingType.Object) - .put("birthday", MappingType.Date) - .put("id1", MappingType.Byte) - .put("id2", MappingType.Short) - .put("blob", MappingType.Binary) - .build().entrySet().stream().collect(Collectors.toMap( - Map.Entry::getKey, e -> OpenSearchDataType.of(e.getValue()) - ))); + when(mapping.getFieldMappings()) + .thenReturn( + ImmutableMap.builder() + .put("name", MappingType.Keyword) + .put("address", MappingType.Text) + .put("age", MappingType.Integer) + .put("account_number", MappingType.Long) + .put("balance1", MappingType.Float) + .put("balance2", MappingType.Double) + .put("gender", MappingType.Boolean) + .put("family", MappingType.Nested) + .put("employer", MappingType.Object) + .put("birthday", MappingType.Date) + .put("id1", MappingType.Byte) + .put("id2", MappingType.Short) + .put("blob", MappingType.Binary) + .build() + .entrySet() + .stream() + .collect( + Collectors.toMap(Map.Entry::getKey, e -> OpenSearchDataType.of(e.getValue())))); when(client.getIndexMappings("test")).thenReturn(ImmutableMap.of("test", mapping)); // Run more than once to confirm caching logic is covered and can work @@ -150,35 +151,30 @@ void getFieldTypes() { hasEntry("birthday", ExprCoreType.TIMESTAMP), hasEntry("id1", ExprCoreType.BYTE), hasEntry("id2", ExprCoreType.SHORT), - hasEntry("blob", (ExprType) OpenSearchDataType.of(MappingType.Binary)) - )); + hasEntry("blob", (ExprType) OpenSearchDataType.of(MappingType.Binary)))); } } @Test void checkCacheUsedForFieldMappings() { - when(mapping.getFieldMappings()).thenReturn( - Map.of("name", OpenSearchDataType.of(MappingType.Keyword))); - when(client.getIndexMappings("test")).thenReturn( - ImmutableMap.of("test", mapping)); + when(mapping.getFieldMappings()) + .thenReturn(Map.of("name", OpenSearchDataType.of(MappingType.Keyword))); + when(client.getIndexMappings("test")).thenReturn(ImmutableMap.of("test", mapping)); OpenSearchIndex index = new OpenSearchIndex(client, settings, "test"); - assertThat(index.getFieldTypes(), allOf( - aMapWithSize(1), - hasEntry("name", STRING))); - assertThat(index.getFieldOpenSearchTypes(), allOf( - aMapWithSize(1), - hasEntry("name", OpenSearchDataType.of(STRING)))); + assertThat(index.getFieldTypes(), allOf(aMapWithSize(1), hasEntry("name", STRING))); + assertThat( + index.getFieldOpenSearchTypes(), + allOf(aMapWithSize(1), hasEntry("name", OpenSearchDataType.of(STRING)))); - lenient().when(mapping.getFieldMappings()).thenReturn( - Map.of("name", OpenSearchDataType.of(MappingType.Integer))); + lenient() + .when(mapping.getFieldMappings()) + .thenReturn(Map.of("name", OpenSearchDataType.of(MappingType.Integer))); - assertThat(index.getFieldTypes(), allOf( - aMapWithSize(1), - hasEntry("name", STRING))); - assertThat(index.getFieldOpenSearchTypes(), allOf( - aMapWithSize(1), - hasEntry("name", OpenSearchDataType.of(STRING)))); + assertThat(index.getFieldTypes(), allOf(aMapWithSize(1), hasEntry("name", STRING))); + assertThat( + index.getFieldOpenSearchTypes(), + allOf(aMapWithSize(1), hasEntry("name", OpenSearchDataType.of(STRING)))); } @Test @@ -193,8 +189,7 @@ void getReservedFieldTypes() { hasEntry("_routing", ExprCoreType.STRING), hasEntry("_sort", ExprCoreType.LONG), hasEntry("_score", ExprCoreType.FLOAT), - hasEntry("_maxscore", ExprCoreType.FLOAT) - )); + hasEntry("_maxscore", ExprCoreType.FLOAT))); } @Test @@ -204,8 +199,9 @@ void implementRelationOperatorOnly() { LogicalPlan plan = index.createScanBuilder(); Integer maxResultWindow = index.getMaxResultWindow(); final var requestBuilder = new OpenSearchRequestBuilder(QUERY_SIZE_LIMIT, exprValueFactory); - assertEquals(new OpenSearchIndexScan(client, - 200, requestBuilder.build(INDEX_NAME, maxResultWindow, SCROLL_TIMEOUT)), + assertEquals( + new OpenSearchIndexScan( + client, 200, requestBuilder.build(INDEX_NAME, maxResultWindow, SCROLL_TIMEOUT)), index.implement(index.optimize(plan))); } @@ -216,8 +212,10 @@ void implementRelationOperatorWithOptimization() { LogicalPlan plan = index.createScanBuilder(); Integer maxResultWindow = index.getMaxResultWindow(); final var requestBuilder = new OpenSearchRequestBuilder(QUERY_SIZE_LIMIT, exprValueFactory); - assertEquals(new OpenSearchIndexScan(client, 200, - requestBuilder.build(INDEX_NAME, maxResultWindow, SCROLL_TIMEOUT)), index.implement(plan)); + assertEquals( + new OpenSearchIndexScan( + client, 200, requestBuilder.build(INDEX_NAME, maxResultWindow, SCROLL_TIMEOUT)), + index.implement(plan)); } @Test @@ -239,12 +237,7 @@ void implementOtherLogicalOperators() { LogicalPlanDSL.dedupe( sort( eval( - remove( - rename( - index.createScanBuilder(), - mappings), - exclude), - newEvalField), + remove(rename(index.createScanBuilder(), mappings), exclude), newEvalField), sortField), dedupeField), include); @@ -258,9 +251,11 @@ void implementOtherLogicalOperators() { PhysicalPlanDSL.eval( PhysicalPlanDSL.remove( PhysicalPlanDSL.rename( - new OpenSearchIndexScan(client, - QUERY_SIZE_LIMIT, requestBuilder.build(INDEX_NAME, maxResultWindow, - SCROLL_TIMEOUT)), + new OpenSearchIndexScan( + client, + QUERY_SIZE_LIMIT, + requestBuilder.build( + INDEX_NAME, maxResultWindow, SCROLL_TIMEOUT)), mappings), exclude), newEvalField), diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchStorageEngineTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchStorageEngineTest.java index 1089e7e252..38f2ae495e 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchStorageEngineTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchStorageEngineTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage; import static org.junit.jupiter.api.Assertions.assertAll; @@ -25,31 +24,23 @@ @ExtendWith(MockitoExtension.class) class OpenSearchStorageEngineTest { - @Mock - private OpenSearchClient client; + @Mock private OpenSearchClient client; - @Mock - private Settings settings; + @Mock private Settings settings; @Test public void getTable() { OpenSearchStorageEngine engine = new OpenSearchStorageEngine(client, settings); - Table table = engine.getTable(new DataSourceSchemaName(DEFAULT_DATASOURCE_NAME, "default"), - "test"); - assertAll( - () -> assertNotNull(table), - () -> assertTrue(table instanceof OpenSearchIndex) - ); + Table table = + engine.getTable(new DataSourceSchemaName(DEFAULT_DATASOURCE_NAME, "default"), "test"); + assertAll(() -> assertNotNull(table), () -> assertTrue(table instanceof OpenSearchIndex)); } @Test public void getSystemTable() { OpenSearchStorageEngine engine = new OpenSearchStorageEngine(client, settings); - Table table = engine.getTable(new DataSourceSchemaName(DEFAULT_DATASOURCE_NAME, "default"), - TABLE_INFO); - assertAll( - () -> assertNotNull(table), - () -> assertTrue(table instanceof OpenSearchSystemIndex) - ); + Table table = + engine.getTable(new DataSourceSchemaName(DEFAULT_DATASOURCE_NAME, "default"), TABLE_INFO); + assertAll(() -> assertNotNull(table), () -> assertTrue(table instanceof OpenSearchSystemIndex)); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanAggregationBuilderTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanAggregationBuilderTest.java index 5a510fefec..229d62abdf 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanAggregationBuilderTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanAggregationBuilderTest.java @@ -26,10 +26,8 @@ @ExtendWith(MockitoExtension.class) class OpenSearchIndexScanAggregationBuilderTest { - @Mock - OpenSearchRequestBuilder requestBuilder; - @Mock - LogicalAggregation logicalAggregation; + @Mock OpenSearchRequestBuilder requestBuilder; + @Mock LogicalAggregation logicalAggregation; OpenSearchIndexScanAggregationBuilder builder; @BeforeEach @@ -71,5 +69,4 @@ void pushDownPageSize() { void pushDownNested() { assertFalse(builder.pushDownNested(mock(LogicalNested.class))); } - } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanOptimizationTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanOptimizationTest.java index e045bae3e3..6749f87c5b 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanOptimizationTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanOptimizationTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.scan; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -91,16 +90,13 @@ @ExtendWith(MockitoExtension.class) class OpenSearchIndexScanOptimizationTest { - @Mock - private Table table; + @Mock private Table table; - @Mock - private OpenSearchIndexScan indexScan; + @Mock private OpenSearchIndexScan indexScan; private OpenSearchIndexScanBuilder indexScanBuilder; - @Mock - private OpenSearchRequestBuilder requestBuilder; + @Mock private OpenSearchRequestBuilder requestBuilder; private Runnable[] verifyPushDownCalls = {}; @@ -114,72 +110,54 @@ void setUp() { void test_project_push_down() { assertEqualsAfterOptimization( project( - indexScanBuilder( - withProjectPushedDown(DSL.ref("intV", INTEGER))), - DSL.named("i", DSL.ref("intV", INTEGER)) - ), - project( - relation("schema", table), - DSL.named("i", DSL.ref("intV", INTEGER))) - ); + indexScanBuilder(withProjectPushedDown(DSL.ref("intV", INTEGER))), + DSL.named("i", DSL.ref("intV", INTEGER))), + project(relation("schema", table), DSL.named("i", DSL.ref("intV", INTEGER)))); } - /** - * SELECT intV as i FROM schema WHERE intV = 1. - */ + /** SELECT intV as i FROM schema WHERE intV = 1. */ @Test void test_filter_push_down() { assertEqualsAfterOptimization( project( indexScanBuilder( - //withProjectPushedDown(DSL.ref("intV", INTEGER)), - withFilterPushedDown(QueryBuilders.termQuery("intV", 1)) - ), - DSL.named("i", DSL.ref("intV", INTEGER)) - ), + // withProjectPushedDown(DSL.ref("intV", INTEGER)), + withFilterPushedDown(QueryBuilders.termQuery("intV", 1))), + DSL.named("i", DSL.ref("intV", INTEGER))), project( filter( relation("schema", table), - DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1))) - ), - DSL.named("i", DSL.ref("intV", INTEGER)) - ) - ); + DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1)))), + DSL.named("i", DSL.ref("intV", INTEGER)))); } - /** - * SELECT intV as i FROM schema WHERE query_string(["intV^1.5", "QUERY", boost=12.5). - */ + /** SELECT intV as i FROM schema WHERE query_string(["intV^1.5", "QUERY", boost=12.5). */ @Test void test_filter_on_opensearchfunction_with_trackedscores_push_down() { LogicalPlan expectedPlan = project( indexScanBuilder( withFilterPushedDown( - QueryBuilders.queryStringQuery("QUERY") - .field("intV", 1.5F) - .boost(12.5F) - ), - withTrackedScoresPushedDown(true) - ), - DSL.named("i", DSL.ref("intV", INTEGER)) - ); - FunctionExpression queryString = DSL.query_string( - DSL.namedArgument("fields", DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "intV", ExprValueUtils.floatValue(1.5F)))))), - DSL.namedArgument("query", "QUERY"), - DSL.namedArgument("boost", "12.5")); + QueryBuilders.queryStringQuery("QUERY").field("intV", 1.5F).boost(12.5F)), + withTrackedScoresPushedDown(true)), + DSL.named("i", DSL.ref("intV", INTEGER))); + FunctionExpression queryString = + DSL.query_string( + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of("intV", ExprValueUtils.floatValue(1.5F)))))), + DSL.namedArgument("query", "QUERY"), + DSL.namedArgument("boost", "12.5")); ((OpenSearchFunctions.OpenSearchFunction) queryString).setScoreTracked(true); - LogicalPlan logicalPlan = project( - filter( - relation("schema", table), - queryString - ), - DSL.named("i", DSL.ref("intV", INTEGER)) - ); + LogicalPlan logicalPlan = + project( + filter(relation("schema", table), queryString), + DSL.named("i", DSL.ref("intV", INTEGER))); assertEqualsAfterOptimization(expectedPlan, logicalPlan); } @@ -197,35 +175,36 @@ void test_filter_on_multiple_opensearchfunctions_with_trackedscores_push_down() .should( QueryBuilders.queryStringQuery("QUERY") .field("intV", 1.5F) - .boost(12.5F) - ) - ), - withTrackedScoresPushedDown(true) - ), - DSL.named("i", DSL.ref("intV", INTEGER)) - ); - FunctionExpression firstQueryString = DSL.query_string( - DSL.namedArgument("fields", DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "intV", ExprValueUtils.floatValue(1.5F)))))), - DSL.namedArgument("query", "QUERY"), - DSL.namedArgument("boost", "12.5")); + .boost(12.5F))), + withTrackedScoresPushedDown(true)), + DSL.named("i", DSL.ref("intV", INTEGER))); + FunctionExpression firstQueryString = + DSL.query_string( + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of("intV", ExprValueUtils.floatValue(1.5F)))))), + DSL.namedArgument("query", "QUERY"), + DSL.namedArgument("boost", "12.5")); ((OpenSearchFunctions.OpenSearchFunction) firstQueryString).setScoreTracked(false); - FunctionExpression secondQueryString = DSL.query_string( - DSL.namedArgument("fields", DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "intV", ExprValueUtils.floatValue(1.5F)))))), - DSL.namedArgument("query", "QUERY"), - DSL.namedArgument("boost", "12.5")); + FunctionExpression secondQueryString = + DSL.query_string( + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of("intV", ExprValueUtils.floatValue(1.5F)))))), + DSL.namedArgument("query", "QUERY"), + DSL.namedArgument("boost", "12.5")); ((OpenSearchFunctions.OpenSearchFunction) secondQueryString).setScoreTracked(true); - LogicalPlan logicalPlan = project( - filter( - relation("schema", table), - DSL.or(firstQueryString, secondQueryString) - ), - DSL.named("i", DSL.ref("intV", INTEGER)) - ); + LogicalPlan logicalPlan = + project( + filter(relation("schema", table), DSL.or(firstQueryString, secondQueryString)), + DSL.named("i", DSL.ref("intV", INTEGER))); assertEqualsAfterOptimization(expectedPlan, logicalPlan); } @@ -235,34 +214,28 @@ void test_filter_on_opensearchfunction_without_trackedscores_push_down() { project( indexScanBuilder( withFilterPushedDown( - QueryBuilders.queryStringQuery("QUERY") - .field("intV", 1.5F) - .boost(12.5F) - ), - withTrackedScoresPushedDown(false) - ), - DSL.named("i", DSL.ref("intV", INTEGER)) - ); - FunctionExpression queryString = DSL.query_string( - DSL.namedArgument("fields", DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "intV", ExprValueUtils.floatValue(1.5F)))))), - DSL.namedArgument("query", "QUERY"), - DSL.namedArgument("boost", "12.5")); - - LogicalPlan logicalPlan = project( - filter( - relation("schema", table), - queryString - ), - DSL.named("i", DSL.ref("intV", INTEGER)) - ); + QueryBuilders.queryStringQuery("QUERY").field("intV", 1.5F).boost(12.5F)), + withTrackedScoresPushedDown(false)), + DSL.named("i", DSL.ref("intV", INTEGER))); + FunctionExpression queryString = + DSL.query_string( + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of("intV", ExprValueUtils.floatValue(1.5F)))))), + DSL.namedArgument("query", "QUERY"), + DSL.namedArgument("boost", "12.5")); + + LogicalPlan logicalPlan = + project( + filter(relation("schema", table), queryString), + DSL.named("i", DSL.ref("intV", INTEGER))); assertEqualsAfterOptimization(expectedPlan, logicalPlan); } - /** - * SELECT avg(intV) FROM schema GROUP BY string_value. - */ + /** SELECT avg(intV) FROM schema GROUP BY string_value. */ @Test void test_aggregation_push_down() { assertEqualsAfterOptimization( @@ -272,20 +245,17 @@ void test_aggregation_push_down() { aggregate("AVG(intV)") .aggregateBy("intV") .groupBy("longV") - .resultTypes(Map.of( - "AVG(intV)", DOUBLE, - "longV", LONG)))), + .resultTypes( + Map.of( + "AVG(intV)", DOUBLE, + "longV", LONG)))), DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE))), project( aggregation( relation("schema", table), - ImmutableList - .of(DSL.named("AVG(intV)", - DSL.avg(DSL.ref("intV", INTEGER)))), + ImmutableList.of(DSL.named("AVG(intV)", DSL.avg(DSL.ref("intV", INTEGER)))), ImmutableList.of(DSL.named("longV", DSL.ref("longV", LONG)))), - DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE)) - ) - ); + DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE)))); } /* @@ -319,125 +289,79 @@ void aggregation_cant_merge_indexScan_with_project() { } */ - /** - * Sort - Relation --> IndexScan. - */ + /** Sort - Relation --> IndexScan. */ @Test void test_sort_push_down() { assertEqualsAfterOptimization( indexScanBuilder( withSortPushedDown( - SortBuilders.fieldSort("intV").order(SortOrder.ASC).missing("_first")) - ), - sort( - relation("schema", table), - Pair.of(SortOption.DEFAULT_ASC, DSL.ref("intV", INTEGER)) - ) - ); + SortBuilders.fieldSort("intV").order(SortOrder.ASC).missing("_first"))), + sort(relation("schema", table), Pair.of(SortOption.DEFAULT_ASC, DSL.ref("intV", INTEGER)))); } @Test void test_page_push_down() { assertEqualsAfterOptimization( project( - indexScanBuilder( - withPageSizePushDown(5)), - DSL.named("intV", DSL.ref("intV", INTEGER)) - ), - paginate(project( - relation("schema", table), - DSL.named("intV", DSL.ref("intV", INTEGER)) - ), 5 - )); + indexScanBuilder(withPageSizePushDown(5)), DSL.named("intV", DSL.ref("intV", INTEGER))), + paginate( + project(relation("schema", table), DSL.named("intV", DSL.ref("intV", INTEGER))), 5)); } @Test void test_score_sort_push_down() { assertEqualsAfterOptimization( - indexScanBuilder( - withSortPushedDown( - SortBuilders.scoreSort().order(SortOrder.ASC) - ) - ), + indexScanBuilder(withSortPushedDown(SortBuilders.scoreSort().order(SortOrder.ASC))), sort( relation("schema", table), - Pair.of(SortOption.DEFAULT_ASC, DSL.ref("_score", INTEGER)) - ) - ); + Pair.of(SortOption.DEFAULT_ASC, DSL.ref("_score", INTEGER)))); } @Test void test_limit_push_down() { assertEqualsAfterOptimization( project( - indexScanBuilder( - withLimitPushedDown(1, 1)), - DSL.named("intV", DSL.ref("intV", INTEGER)) - ), + indexScanBuilder(withLimitPushedDown(1, 1)), + DSL.named("intV", DSL.ref("intV", INTEGER))), project( - limit( - relation("schema", table), - 1, 1), - DSL.named("intV", DSL.ref("intV", INTEGER)) - ) - ); + limit(relation("schema", table), 1, 1), DSL.named("intV", DSL.ref("intV", INTEGER)))); } @Test void test_highlight_push_down() { assertEqualsAfterOptimization( project( - indexScanBuilder( - withHighlightPushedDown("*", Collections.emptyMap())), - DSL.named("highlight(*)", - new HighlightExpression(DSL.literal("*"))) - ), + indexScanBuilder(withHighlightPushedDown("*", Collections.emptyMap())), + DSL.named("highlight(*)", new HighlightExpression(DSL.literal("*")))), project( - highlight( - relation("schema", table), - DSL.literal("*"), Collections.emptyMap()), - DSL.named("highlight(*)", - new HighlightExpression(DSL.literal("*"))) - ) - ); + highlight(relation("schema", table), DSL.literal("*"), Collections.emptyMap()), + DSL.named("highlight(*)", new HighlightExpression(DSL.literal("*"))))); } @Test void test_nested_push_down() { - List> args = List.of( - Map.of( - "field", new ReferenceExpression("message.info", STRING), - "path", new ReferenceExpression("message", STRING) - ) - ); + List> args = + List.of( + Map.of( + "field", new ReferenceExpression("message.info", STRING), + "path", new ReferenceExpression("message", STRING))); List projectList = List.of( - new NamedExpression("message.info", DSL.nested(DSL.ref("message.info", STRING)), null) - ); + new NamedExpression("message.info", DSL.nested(DSL.ref("message.info", STRING)), null)); LogicalNested nested = new LogicalNested(null, args, projectList); assertEqualsAfterOptimization( project( - nested( - indexScanBuilder( - withNestedPushedDown(nested.getFields())), args, projectList), - DSL.named("message.info", - DSL.nested(DSL.ref("message.info", STRING))) - ), - project( - nested( - relation("schema", table), args, projectList), - DSL.named("message.info", - DSL.nested(DSL.ref("message.info", STRING))) - ) - ); + nested(indexScanBuilder(withNestedPushedDown(nested.getFields())), args, projectList), + DSL.named("message.info", DSL.nested(DSL.ref("message.info", STRING)))), + project( + nested(relation("schema", table), args, projectList), + DSL.named("message.info", DSL.nested(DSL.ref("message.info", STRING))))); } - /** - * SELECT avg(intV) FROM schema WHERE intV = 1 GROUP BY string_value. - */ + /** SELECT avg(intV) FROM schema WHERE intV = 1 GROUP BY string_value. */ @Test void test_aggregation_filter_push_down() { assertEqualsAfterOptimization( @@ -448,50 +372,37 @@ void test_aggregation_filter_push_down() { aggregate("AVG(intV)") .aggregateBy("intV") .groupBy("longV") - .resultTypes(Map.of( - "AVG(intV)", DOUBLE, - "longV", LONG)))), - DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE)) - ), + .resultTypes( + Map.of( + "AVG(intV)", DOUBLE, + "longV", LONG)))), + DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE))), project( aggregation( filter( relation("schema", table), - DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1))) - ), - ImmutableList - .of(DSL.named("AVG(intV)", - DSL.avg(DSL.ref("intV", INTEGER)))), + DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1)))), + ImmutableList.of(DSL.named("AVG(intV)", DSL.avg(DSL.ref("intV", INTEGER)))), ImmutableList.of(DSL.named("longV", DSL.ref("longV", LONG)))), - DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE)) - ) - ); + DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE)))); } - /** - * Sort - Filter - Relation --> IndexScan. - */ + /** Sort - Filter - Relation --> IndexScan. */ @Test void test_sort_filter_push_down() { assertEqualsAfterOptimization( indexScanBuilder( withFilterPushedDown(QueryBuilders.termQuery("intV", 1)), withSortPushedDown( - SortBuilders.fieldSort("longV").order(SortOrder.ASC).missing("_first")) - ), + SortBuilders.fieldSort("longV").order(SortOrder.ASC).missing("_first"))), sort( filter( relation("schema", table), - DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1))) - ), - Pair.of(SortOption.DEFAULT_ASC, DSL.ref("longV", LONG)) - ) - ); + DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1)))), + Pair.of(SortOption.DEFAULT_ASC, DSL.ref("longV", LONG)))); } - /** - * SELECT avg(intV) FROM schema GROUP BY stringV ORDER BY stringV. - */ + /** SELECT avg(intV) FROM schema GROUP BY stringV ORDER BY stringV. */ @Test void test_sort_aggregation_push_down() { assertEqualsAfterOptimization( @@ -502,22 +413,19 @@ void test_sort_aggregation_push_down() { .aggregateBy("intV") .groupBy("stringV") .sortBy(SortOption.DEFAULT_DESC) - .resultTypes(Map.of( - "AVG(intV)", DOUBLE, - "stringV", STRING)))), + .resultTypes( + Map.of( + "AVG(intV)", DOUBLE, + "stringV", STRING)))), DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE))), project( sort( aggregation( relation("schema", table), - ImmutableList - .of(DSL.named("AVG(intV)", DSL.avg(DSL.ref("intV", INTEGER)))), + ImmutableList.of(DSL.named("AVG(intV)", DSL.avg(DSL.ref("intV", INTEGER)))), ImmutableList.of(DSL.named("stringV", DSL.ref("stringV", STRING)))), - Pair.of(SortOption.DEFAULT_DESC, DSL.ref("stringV", STRING)) - ), - DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE)) - ) - ); + Pair.of(SortOption.DEFAULT_DESC, DSL.ref("stringV", STRING))), + DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE)))); } @Test @@ -529,21 +437,17 @@ void test_limit_sort_filter_push_down() { withSortPushedDown( SortBuilders.fieldSort("longV").order(SortOrder.ASC).missing("_first")), withLimitPushedDown(1, 1)), - DSL.named("intV", DSL.ref("intV", INTEGER)) - ), + DSL.named("intV", DSL.ref("intV", INTEGER))), project( limit( sort( filter( relation("schema", table), - DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1))) - ), - Pair.of(SortOption.DEFAULT_ASC, DSL.ref("longV", LONG)) - ), 1, 1 - ), - DSL.named("intV", DSL.ref("intV", INTEGER)) - ) - ); + DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1)))), + Pair.of(SortOption.DEFAULT_ASC, DSL.ref("longV", LONG))), + 1, + 1), + DSL.named("intV", DSL.ref("intV", INTEGER)))); } /* @@ -557,23 +461,16 @@ void only_one_project_should_be_push() { project( project( indexScanBuilder( - withProjectPushedDown( - DSL.ref("intV", INTEGER), - DSL.ref("stringV", STRING))), + withProjectPushedDown(DSL.ref("intV", INTEGER), DSL.ref("stringV", STRING))), DSL.named("i", DSL.ref("intV", INTEGER)), - DSL.named("s", DSL.ref("stringV", STRING)) - ), - DSL.named("i", DSL.ref("intV", INTEGER)) - ), + DSL.named("s", DSL.ref("stringV", STRING))), + DSL.named("i", DSL.ref("intV", INTEGER))), project( project( relation("schema", table), DSL.named("i", DSL.ref("intV", INTEGER)), - DSL.named("s", DSL.ref("stringV", STRING)) - ), - DSL.named("i", DSL.ref("intV", INTEGER)) - ) - ); + DSL.named("s", DSL.ref("stringV", STRING))), + DSL.named("i", DSL.ref("intV", INTEGER)))); } @Test @@ -586,21 +483,14 @@ void test_nested_sort_filter_push_down() { SortBuilders.fieldSort("message.info") .order(SortOrder.ASC) .setNestedSort(new NestedSortBuilder("message")))), - DSL.named("intV", DSL.ref("intV", INTEGER)) - ), + DSL.named("intV", DSL.ref("intV", INTEGER))), project( - sort( - filter( - relation("schema", table), - DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1))) - ), - Pair.of( - SortOption.DEFAULT_ASC, DSL.nested(DSL.ref("message.info", STRING)) - ) - ), - DSL.named("intV", DSL.ref("intV", INTEGER)) - ) - ); + sort( + filter( + relation("schema", table), + DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1)))), + Pair.of(SortOption.DEFAULT_ASC, DSL.nested(DSL.ref("message.info", STRING)))), + DSL.named("intV", DSL.ref("intV", INTEGER)))); } @Test @@ -610,54 +500,30 @@ void test_function_expression_sort_returns_optimized_logical_sort() { sort( indexScanBuilder(), Pair.of( - SortOption.DEFAULT_ASC, - DSL.match(DSL.namedArgument("field", literal("message"))) - ) - ), + SortOption.DEFAULT_ASC, DSL.match(DSL.namedArgument("field", literal("message"))))), sort( relation("schema", table), Pair.of( SortOption.DEFAULT_ASC, - DSL.match(DSL.namedArgument("field", literal("message")) - ) - ) - ) - ); + DSL.match(DSL.namedArgument("field", literal("message")))))); } @Test void test_non_field_sort_returns_optimized_logical_sort() { // Invalid use case coverage OpenSearchIndexScanBuilder::sortByFieldsOnly returns false assertEqualsAfterOptimization( - sort( - indexScanBuilder(), - Pair.of( - SortOption.DEFAULT_ASC, - DSL.literal("field") - ) - ), - sort( - relation("schema", table), - Pair.of( - SortOption.DEFAULT_ASC, - DSL.literal("field") - ) - ) - ); + sort(indexScanBuilder(), Pair.of(SortOption.DEFAULT_ASC, DSL.literal("field"))), + sort(relation("schema", table), Pair.of(SortOption.DEFAULT_ASC, DSL.literal("field")))); } @Test void sort_with_expression_cannot_merge_with_relation() { assertEqualsAfterOptimization( sort( - indexScanBuilder(), - Pair.of(SortOption.DEFAULT_ASC, DSL.abs(DSL.ref("intV", INTEGER))) - ), + indexScanBuilder(), Pair.of(SortOption.DEFAULT_ASC, DSL.abs(DSL.ref("intV", INTEGER)))), sort( relation("schema", table), - Pair.of(SortOption.DEFAULT_ASC, DSL.abs(DSL.ref("intV", INTEGER))) - ) - ); + Pair.of(SortOption.DEFAULT_ASC, DSL.abs(DSL.ref("intV", INTEGER))))); } @Test @@ -669,20 +535,17 @@ void sort_with_expression_cannot_merge_with_aggregation() { aggregate("AVG(intV)") .aggregateBy("intV") .groupBy("stringV") - .resultTypes(Map.of( - "AVG(intV)", DOUBLE, - "stringV", STRING)))), - Pair.of(SortOption.DEFAULT_ASC, DSL.abs(DSL.ref("intV", INTEGER))) - ), + .resultTypes( + Map.of( + "AVG(intV)", DOUBLE, + "stringV", STRING)))), + Pair.of(SortOption.DEFAULT_ASC, DSL.abs(DSL.ref("intV", INTEGER)))), sort( aggregation( relation("schema", table), - ImmutableList - .of(DSL.named("AVG(intV)", DSL.avg(DSL.ref("intV", INTEGER)))), + ImmutableList.of(DSL.named("AVG(intV)", DSL.avg(DSL.ref("intV", INTEGER)))), ImmutableList.of(DSL.named("stringV", DSL.ref("stringV", STRING)))), - Pair.of(SortOption.DEFAULT_ASC, DSL.abs(DSL.ref("intV", INTEGER))) - ) - ); + Pair.of(SortOption.DEFAULT_ASC, DSL.abs(DSL.ref("intV", INTEGER))))); } @Test @@ -690,30 +553,21 @@ void aggregation_cant_merge_index_scan_with_limit() { assertEqualsAfterOptimization( project( aggregation( - indexScanBuilder( - withLimitPushedDown(10, 0)), - ImmutableList - .of(DSL.named("AVG(intV)", - DSL.avg(DSL.ref("intV", INTEGER)))), - ImmutableList.of(DSL.named("longV", - DSL.abs(DSL.ref("longV", LONG))))), + indexScanBuilder(withLimitPushedDown(10, 0)), + ImmutableList.of(DSL.named("AVG(intV)", DSL.avg(DSL.ref("intV", INTEGER)))), + ImmutableList.of(DSL.named("longV", DSL.abs(DSL.ref("longV", LONG))))), DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE))), project( aggregation( - limit( - relation("schema", table), - 10, 0), - ImmutableList - .of(DSL.named("AVG(intV)", - DSL.avg(DSL.ref("intV", INTEGER)))), - ImmutableList.of(DSL.named("longV", - DSL.abs(DSL.ref("longV", LONG))))), + limit(relation("schema", table), 10, 0), + ImmutableList.of(DSL.named("AVG(intV)", DSL.avg(DSL.ref("intV", INTEGER)))), + ImmutableList.of(DSL.named("longV", DSL.abs(DSL.ref("longV", LONG))))), DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE)))); } /** - * Can't Optimize the following query. - * SELECT avg(intV) FROM schema GROUP BY stringV ORDER BY avg(intV). + * Can't Optimize the following query. SELECT avg(intV) FROM schema GROUP BY stringV ORDER BY + * avg(intV). */ @Test void sort_refer_to_aggregator_should_not_merge_with_indexAgg() { @@ -725,52 +579,39 @@ void sort_refer_to_aggregator_should_not_merge_with_indexAgg() { aggregate("AVG(intV)") .aggregateBy("intV") .groupBy("stringV") - .resultTypes(Map.of( - "AVG(intV)", DOUBLE, - "stringV", STRING)))), - Pair.of(SortOption.DEFAULT_ASC, DSL.ref("AVG(intV)", INTEGER)) - ), + .resultTypes( + Map.of( + "AVG(intV)", DOUBLE, + "stringV", STRING)))), + Pair.of(SortOption.DEFAULT_ASC, DSL.ref("AVG(intV)", INTEGER))), DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE))), project( sort( aggregation( relation("schema", table), - ImmutableList - .of(DSL.named("AVG(intV)", DSL.avg(DSL.ref("intV", INTEGER)))), + ImmutableList.of(DSL.named("AVG(intV)", DSL.avg(DSL.ref("intV", INTEGER)))), ImmutableList.of(DSL.named("stringV", DSL.ref("stringV", STRING)))), - Pair.of(SortOption.DEFAULT_ASC, DSL.ref("AVG(intV)", INTEGER)) - ), - DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE)) - ) - ); + Pair.of(SortOption.DEFAULT_ASC, DSL.ref("AVG(intV)", INTEGER))), + DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE)))); } @Test void project_literal_should_not_be_pushed_down() { assertEqualsAfterOptimization( - project( - indexScanBuilder(), - DSL.named("i", DSL.literal("str")) - ), - optimize( - project( - relation("schema", table), - DSL.named("i", DSL.literal("str")) - ) - ) - ); + project(indexScanBuilder(), DSL.named("i", DSL.literal("str"))), + optimize(project(relation("schema", table), DSL.named("i", DSL.literal("str"))))); } private OpenSearchIndexScanBuilder indexScanBuilder(Runnable... verifyPushDownCalls) { this.verifyPushDownCalls = verifyPushDownCalls; - return new OpenSearchIndexScanBuilder(new OpenSearchIndexScanQueryBuilder(requestBuilder), - requestBuilder -> indexScan); + return new OpenSearchIndexScanBuilder( + new OpenSearchIndexScanQueryBuilder(requestBuilder), requestBuilder -> indexScan); } private OpenSearchIndexScanBuilder indexScanAggBuilder(Runnable... verifyPushDownCalls) { this.verifyPushDownCalls = verifyPushDownCalls; - var aggregationBuilder = new OpenSearchIndexScanAggregationBuilder( - requestBuilder, mock(LogicalAggregation.class)); + var aggregationBuilder = + new OpenSearchIndexScanAggregationBuilder(requestBuilder, mock(LogicalAggregation.class)); return new OpenSearchIndexScanBuilder(aggregationBuilder, builder -> indexScan); } @@ -797,29 +638,32 @@ private Runnable withAggregationPushedDown( AggregationAssertHelper.AggregationAssertHelperBuilder aggregation) { // Assume single term bucket and AVG metric in all tests in this suite - CompositeAggregationBuilder aggBuilder = AggregationBuilders.composite( - "composite_buckets", - Collections.singletonList( - new TermsValuesSourceBuilder(aggregation.groupBy) - .field(aggregation.groupBy) - .order(aggregation.sortBy.getSortOrder() == ASC ? "asc" : "desc") - .missingOrder(aggregation.sortBy.getNullOrder() == NULL_FIRST ? "first" : "last") - .missingBucket(true))) - .subAggregation( - AggregationBuilders.avg(aggregation.aggregateName) - .field(aggregation.aggregateBy)) - .size(AggregationQueryBuilder.AGGREGATION_BUCKET_SIZE); + CompositeAggregationBuilder aggBuilder = + AggregationBuilders.composite( + "composite_buckets", + Collections.singletonList( + new TermsValuesSourceBuilder(aggregation.groupBy) + .field(aggregation.groupBy) + .order(aggregation.sortBy.getSortOrder() == ASC ? "asc" : "desc") + .missingOrder( + aggregation.sortBy.getNullOrder() == NULL_FIRST ? "first" : "last") + .missingBucket(true))) + .subAggregation( + AggregationBuilders.avg(aggregation.aggregateName).field(aggregation.aggregateBy)) + .size(AggregationQueryBuilder.AGGREGATION_BUCKET_SIZE); List aggBuilders = Collections.singletonList(aggBuilder); OpenSearchAggregationResponseParser responseParser = - new CompositeAggregationParser( - new SingleValueParser(aggregation.aggregateName)); + new CompositeAggregationParser(new SingleValueParser(aggregation.aggregateName)); return () -> { verify(requestBuilder, times(1)).pushDownAggregation(Pair.of(aggBuilders, responseParser)); - verify(requestBuilder, times(1)).pushTypeMapping(aggregation.resultTypes - .entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, - e -> OpenSearchDataType.of(e.getValue())))); + verify(requestBuilder, times(1)) + .pushTypeMapping( + aggregation.resultTypes.entrySet().stream() + .collect( + Collectors.toMap( + Map.Entry::getKey, e -> OpenSearchDataType.of(e.getValue())))); }; } @@ -832,8 +676,8 @@ private Runnable withLimitPushedDown(int size, int offset) { } private Runnable withProjectPushedDown(ReferenceExpression... references) { - return () -> verify(requestBuilder, times(1)).pushDownProjects( - new HashSet<>(Arrays.asList(references))); + return () -> + verify(requestBuilder, times(1)).pushDownProjects(new HashSet<>(Arrays.asList(references))); } private Runnable withHighlightPushedDown(String field, Map arguments) { @@ -875,16 +719,18 @@ private static class AggregationAssertHelper { } private LogicalPlan optimize(LogicalPlan plan) { - LogicalPlanOptimizer optimizer = new LogicalPlanOptimizer(List.of( - new CreateTableScanBuilder(), - new PushDownPageSize(), - PUSH_DOWN_FILTER, - PUSH_DOWN_AGGREGATION, - PUSH_DOWN_SORT, - PUSH_DOWN_LIMIT, - PUSH_DOWN_HIGHLIGHT, - PUSH_DOWN_NESTED, - PUSH_DOWN_PROJECT)); + LogicalPlanOptimizer optimizer = + new LogicalPlanOptimizer( + List.of( + new CreateTableScanBuilder(), + new PushDownPageSize(), + PUSH_DOWN_FILTER, + PUSH_DOWN_AGGREGATION, + PUSH_DOWN_SORT, + PUSH_DOWN_LIMIT, + PUSH_DOWN_HIGHLIGHT, + PUSH_DOWN_NESTED, + PUSH_DOWN_PROJECT)); return optimizer.optimize(plan); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanPaginationTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanPaginationTest.java index 67f0869d6e..2085519b12 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanPaginationTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanPaginationTest.java @@ -44,34 +44,37 @@ @DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) public class OpenSearchIndexScanPaginationTest { - public static final OpenSearchRequest.IndexName INDEX_NAME - = new OpenSearchRequest.IndexName("test"); + public static final OpenSearchRequest.IndexName INDEX_NAME = + new OpenSearchRequest.IndexName("test"); public static final int MAX_RESULT_WINDOW = 3; public static final TimeValue SCROLL_TIMEOUT = TimeValue.timeValueMinutes(4); - @Mock - private Settings settings; + @Mock private Settings settings; @BeforeEach void setup() { lenient().when(settings.getSettingValue(Settings.Key.QUERY_SIZE_LIMIT)).thenReturn(QUERY_SIZE); - lenient().when(settings.getSettingValue(Settings.Key.SQL_CURSOR_KEEP_ALIVE)) - .thenReturn(TimeValue.timeValueMinutes(1)); + lenient() + .when(settings.getSettingValue(Settings.Key.SQL_CURSOR_KEEP_ALIVE)) + .thenReturn(TimeValue.timeValueMinutes(1)); } - @Mock - private OpenSearchClient client; + @Mock private OpenSearchClient client; - private final OpenSearchExprValueFactory exprValueFactory - = new OpenSearchExprValueFactory(Map.of( - "name", OpenSearchDataType.of(STRING), - "department", OpenSearchDataType.of(STRING))); + private final OpenSearchExprValueFactory exprValueFactory = + new OpenSearchExprValueFactory( + Map.of( + "name", OpenSearchDataType.of(STRING), + "department", OpenSearchDataType.of(STRING))); @Test void query_empty_result() { mockResponse(client); var builder = new OpenSearchRequestBuilder(QUERY_SIZE, exprValueFactory); - try (var indexScan = new OpenSearchIndexScan(client, MAX_RESULT_WINDOW, - builder.build(INDEX_NAME, MAX_RESULT_WINDOW, SCROLL_TIMEOUT))) { + try (var indexScan = + new OpenSearchIndexScan( + client, + MAX_RESULT_WINDOW, + builder.build(INDEX_NAME, MAX_RESULT_WINDOW, SCROLL_TIMEOUT))) { indexScan.open(); assertFalse(indexScan.hasNext()); } @@ -80,8 +83,11 @@ void query_empty_result() { @Test void explain_not_implemented() { - assertThrows(Throwable.class, () -> mock(OpenSearchIndexScan.class, - withSettings().defaultAnswer(CALLS_REAL_METHODS)).explain()); + assertThrows( + Throwable.class, + () -> + mock(OpenSearchIndexScan.class, withSettings().defaultAnswer(CALLS_REAL_METHODS)) + .explain()); } @Test @@ -92,9 +98,11 @@ void dont_serialize_if_no_cursor() { OpenSearchResponse response = mock(); when(builder.build(any(), anyInt(), any())).thenReturn(request); when(client.search(any())).thenReturn(response); - try (var indexScan - = new OpenSearchIndexScan(client, MAX_RESULT_WINDOW, - builder.build(INDEX_NAME, MAX_RESULT_WINDOW, SCROLL_TIMEOUT))) { + try (var indexScan = + new OpenSearchIndexScan( + client, + MAX_RESULT_WINDOW, + builder.build(INDEX_NAME, MAX_RESULT_WINDOW, SCROLL_TIMEOUT))) { indexScan.open(); when(request.hasAnotherBatch()).thenReturn(false); diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanTest.java index 67749c4055..ac1e9038fb 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.scan; import static org.junit.jupiter.api.Assertions.assertAll; @@ -66,20 +65,19 @@ class OpenSearchIndexScanTest { public static final int QUERY_SIZE = 200; - public static final OpenSearchRequest.IndexName INDEX_NAME - = new OpenSearchRequest.IndexName("employees"); + public static final OpenSearchRequest.IndexName INDEX_NAME = + new OpenSearchRequest.IndexName("employees"); public static final int MAX_RESULT_WINDOW = 10000; public static final TimeValue CURSOR_KEEP_ALIVE = TimeValue.timeValueMinutes(1); - @Mock - private OpenSearchClient client; + @Mock private OpenSearchClient client; - private final OpenSearchExprValueFactory exprValueFactory = new OpenSearchExprValueFactory( - Map.of("name", OpenSearchDataType.of(STRING), - "department", OpenSearchDataType.of(STRING))); + private final OpenSearchExprValueFactory exprValueFactory = + new OpenSearchExprValueFactory( + Map.of( + "name", OpenSearchDataType.of(STRING), "department", OpenSearchDataType.of(STRING))); @BeforeEach - void setup() { - } + void setup() {} @Test void explain() { @@ -96,8 +94,8 @@ void throws_no_cursor_exception() { var request = mock(OpenSearchRequest.class); when(request.hasAnotherBatch()).thenReturn(false); try (var indexScan = new OpenSearchIndexScan(client, QUERY_SIZE, request); - var byteStream = new ByteArrayOutputStream(); - var objectStream = new ObjectOutputStream(byteStream)) { + var byteStream = new ByteArrayOutputStream(); + var objectStream = new ObjectOutputStream(byteStream)) { assertThrows(NoCursorException.class, () -> objectStream.writeObject(indexScan)); } } @@ -112,8 +110,9 @@ void serialize() { var index = mock(OpenSearchIndex.class); when(engine.getClient()).thenReturn(client); when(engine.getTable(any(), any())).thenReturn(index); - var request = new OpenSearchScrollRequest( - INDEX_NAME, CURSOR_KEEP_ALIVE, searchSourceBuilder, factory, List.of()); + var request = + new OpenSearchScrollRequest( + INDEX_NAME, CURSOR_KEEP_ALIVE, searchSourceBuilder, factory, List.of()); request.setScrollId("valid-id"); // make a response, so OpenSearchResponse::isEmpty would return true and unset needClean var response = mock(SearchResponse.class); @@ -121,7 +120,7 @@ void serialize() { var hits = mock(SearchHits.class); when(response.getHits()).thenReturn(hits); when(response.getScrollId()).thenReturn("valid-id"); - when(hits.getHits()).thenReturn(new SearchHit[]{ mock() }); + when(hits.getHits()).thenReturn(new SearchHit[] {mock()}); request.search(null, (req) -> response); try (var indexScan = new OpenSearchIndexScan(client, QUERY_SIZE, request)) { @@ -145,8 +144,9 @@ void query_empty_result() { mockResponse(client); final var name = new OpenSearchRequest.IndexName("test"); final var requestBuilder = new OpenSearchRequestBuilder(QUERY_SIZE, exprValueFactory); - try (OpenSearchIndexScan indexScan = new OpenSearchIndexScan(client, - QUERY_SIZE, requestBuilder.build(name, MAX_RESULT_WINDOW, CURSOR_KEEP_ALIVE))) { + try (OpenSearchIndexScan indexScan = + new OpenSearchIndexScan( + client, QUERY_SIZE, requestBuilder.build(name, MAX_RESULT_WINDOW, CURSOR_KEEP_ALIVE))) { indexScan.open(); assertFalse(indexScan.hasNext()); } @@ -155,88 +155,84 @@ void query_empty_result() { @Test void query_all_results_with_query() { - mockResponse(client, new ExprValue[]{ - employee(1, "John", "IT"), - employee(2, "Smith", "HR"), - employee(3, "Allen", "IT")}); + mockResponse( + client, + new ExprValue[] { + employee(1, "John", "IT"), employee(2, "Smith", "HR"), employee(3, "Allen", "IT") + }); final var requestBuilder = new OpenSearchRequestBuilder(QUERY_SIZE, exprValueFactory); - try (OpenSearchIndexScan indexScan = new OpenSearchIndexScan(client, - 10, requestBuilder.build(INDEX_NAME, 10000, CURSOR_KEEP_ALIVE))) { + try (OpenSearchIndexScan indexScan = + new OpenSearchIndexScan( + client, 10, requestBuilder.build(INDEX_NAME, 10000, CURSOR_KEEP_ALIVE))) { indexScan.open(); assertAll( () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(1, "John", "IT"), indexScan.next()), - () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(2, "Smith", "HR"), indexScan.next()), - () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(3, "Allen", "IT"), indexScan.next()), - - () -> assertFalse(indexScan.hasNext()) - ); + () -> assertFalse(indexScan.hasNext())); } verify(client).cleanup(any()); } - static final OpenSearchRequest.IndexName EMPLOYEES_INDEX - = new OpenSearchRequest.IndexName("employees"); + static final OpenSearchRequest.IndexName EMPLOYEES_INDEX = + new OpenSearchRequest.IndexName("employees"); @Test void query_all_results_with_scroll() { - mockResponse(client, - new ExprValue[]{employee(1, "John", "IT"), employee(2, "Smith", "HR")}, - new ExprValue[]{employee(3, "Allen", "IT")}); + mockResponse( + client, + new ExprValue[] {employee(1, "John", "IT"), employee(2, "Smith", "HR")}, + new ExprValue[] {employee(3, "Allen", "IT")}); final var requestBuilder = new OpenSearchRequestBuilder(QUERY_SIZE, exprValueFactory); - try (OpenSearchIndexScan indexScan = new OpenSearchIndexScan(client, - 10, requestBuilder.build(INDEX_NAME, 10000, CURSOR_KEEP_ALIVE))) { + try (OpenSearchIndexScan indexScan = + new OpenSearchIndexScan( + client, 10, requestBuilder.build(INDEX_NAME, 10000, CURSOR_KEEP_ALIVE))) { indexScan.open(); assertAll( () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(1, "John", "IT"), indexScan.next()), - () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(2, "Smith", "HR"), indexScan.next()), - () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(3, "Allen", "IT"), indexScan.next()), - - () -> assertFalse(indexScan.hasNext()) - ); + () -> assertFalse(indexScan.hasNext())); } verify(client).cleanup(any()); } @Test void query_some_results_with_query() { - mockResponse(client, new ExprValue[]{ - employee(1, "John", "IT"), - employee(2, "Smith", "HR"), - employee(3, "Allen", "IT"), - employee(4, "Bob", "HR")}); + mockResponse( + client, + new ExprValue[] { + employee(1, "John", "IT"), + employee(2, "Smith", "HR"), + employee(3, "Allen", "IT"), + employee(4, "Bob", "HR") + }); final int limit = 3; OpenSearchRequestBuilder builder = new OpenSearchRequestBuilder(0, exprValueFactory); - try (OpenSearchIndexScan indexScan = new OpenSearchIndexScan(client, - limit, builder.build(INDEX_NAME, MAX_RESULT_WINDOW, CURSOR_KEEP_ALIVE))) { + try (OpenSearchIndexScan indexScan = + new OpenSearchIndexScan( + client, limit, builder.build(INDEX_NAME, MAX_RESULT_WINDOW, CURSOR_KEEP_ALIVE))) { indexScan.open(); assertAll( () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(1, "John", "IT"), indexScan.next()), - () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(2, "Smith", "HR"), indexScan.next()), - () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(3, "Allen", "IT"), indexScan.next()), - - () -> assertFalse(indexScan.hasNext()) - ); + () -> assertFalse(indexScan.hasNext())); } verify(client).cleanup(any()); } @@ -245,55 +241,56 @@ void query_some_results_with_query() { void query_some_results_with_scroll() { mockTwoPageResponse(client); final var requestuilder = new OpenSearchRequestBuilder(10, exprValueFactory); - try (OpenSearchIndexScan indexScan = new OpenSearchIndexScan(client, - 3, requestuilder.build(INDEX_NAME, MAX_RESULT_WINDOW, CURSOR_KEEP_ALIVE))) { + try (OpenSearchIndexScan indexScan = + new OpenSearchIndexScan( + client, 3, requestuilder.build(INDEX_NAME, MAX_RESULT_WINDOW, CURSOR_KEEP_ALIVE))) { indexScan.open(); assertAll( () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(1, "John", "IT"), indexScan.next()), - () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(2, "Smith", "HR"), indexScan.next()), - () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(3, "Allen", "IT"), indexScan.next()), - - () -> assertFalse(indexScan.hasNext()) - ); + () -> assertFalse(indexScan.hasNext())); } verify(client).cleanup(any()); } static void mockTwoPageResponse(OpenSearchClient client) { - mockResponse(client, - new ExprValue[]{employee(1, "John", "IT"), employee(2, "Smith", "HR")}, - new ExprValue[]{employee(3, "Allen", "IT"), employee(4, "Bob", "HR")}); + mockResponse( + client, + new ExprValue[] {employee(1, "John", "IT"), employee(2, "Smith", "HR")}, + new ExprValue[] {employee(3, "Allen", "IT"), employee(4, "Bob", "HR")}); } @Test void query_results_limited_by_query_size() { - mockResponse(client, new ExprValue[]{ - employee(1, "John", "IT"), - employee(2, "Smith", "HR"), - employee(3, "Allen", "IT"), - employee(4, "Bob", "HR")}); + mockResponse( + client, + new ExprValue[] { + employee(1, "John", "IT"), + employee(2, "Smith", "HR"), + employee(3, "Allen", "IT"), + employee(4, "Bob", "HR") + }); final int defaultQuerySize = 2; final var requestBuilder = new OpenSearchRequestBuilder(defaultQuerySize, exprValueFactory); - try (OpenSearchIndexScan indexScan = new OpenSearchIndexScan(client, - defaultQuerySize, requestBuilder.build(INDEX_NAME, QUERY_SIZE, CURSOR_KEEP_ALIVE))) { + try (OpenSearchIndexScan indexScan = + new OpenSearchIndexScan( + client, + defaultQuerySize, + requestBuilder.build(INDEX_NAME, QUERY_SIZE, CURSOR_KEEP_ALIVE))) { indexScan.open(); assertAll( () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(1, "John", "IT"), indexScan.next()), - () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(2, "Smith", "HR"), indexScan.next()), - - () -> assertFalse(indexScan.hasNext()) - ); + () -> assertFalse(indexScan.hasNext())); } verify(client).cleanup(any()); } @@ -323,7 +320,8 @@ void push_down_highlight() { .pushDown(QueryBuilders.termQuery("name", "John")) .pushDownHighlight("Title", args) .pushDownHighlight("Body", args) - .shouldQueryHighlight(QueryBuilders.termQuery("name", "John"), + .shouldQueryHighlight( + QueryBuilders.termQuery("name", "John"), new HighlightBuilder().field("Title").field("Body")); } @@ -332,14 +330,12 @@ void push_down_highlight_with_arguments() { Map args = new HashMap<>(); args.put("pre_tags", new Literal("", DataType.STRING)); args.put("post_tags", new Literal("", DataType.STRING)); - HighlightBuilder highlightBuilder = new HighlightBuilder() - .field("Title"); + HighlightBuilder highlightBuilder = new HighlightBuilder().field("Title"); highlightBuilder.fields().get(0).preTags("").postTags(""); assertThat() .pushDown(QueryBuilders.termQuery("name", "John")) .pushDownHighlight("Title", args) - .shouldQueryHighlight(QueryBuilders.termQuery("name", "John"), - highlightBuilder); + .shouldQueryHighlight(QueryBuilders.termQuery("name", "John"), highlightBuilder); } private PushDownAssertion assertThat() { @@ -352,8 +348,7 @@ private static class PushDownAssertion { private final OpenSearchResponse response; private final OpenSearchExprValueFactory factory; - public PushDownAssertion(OpenSearchClient client, - OpenSearchExprValueFactory valueFactory) { + public PushDownAssertion(OpenSearchClient client, OpenSearchExprValueFactory valueFactory) { this.client = client; this.requestBuilder = new OpenSearchRequestBuilder(QUERY_SIZE, valueFactory); @@ -373,35 +368,39 @@ PushDownAssertion pushDownHighlight(String query, Map arguments } PushDownAssertion shouldQueryHighlight(QueryBuilder query, HighlightBuilder highlight) { - var sourceBuilder = new SearchSourceBuilder() - .from(0) - .timeout(CURSOR_KEEP_ALIVE) - .query(query) - .size(QUERY_SIZE) - .highlighter(highlight) - .sort(DOC_FIELD_NAME, ASC); + var sourceBuilder = + new SearchSourceBuilder() + .from(0) + .timeout(CURSOR_KEEP_ALIVE) + .query(query) + .size(QUERY_SIZE) + .highlighter(highlight) + .sort(DOC_FIELD_NAME, ASC); OpenSearchRequest request = new OpenSearchQueryRequest(EMPLOYEES_INDEX, sourceBuilder, factory, List.of()); when(client.search(request)).thenReturn(response); - var indexScan = new OpenSearchIndexScan(client, - QUERY_SIZE, requestBuilder.build(EMPLOYEES_INDEX, 10000, CURSOR_KEEP_ALIVE)); + var indexScan = + new OpenSearchIndexScan( + client, QUERY_SIZE, requestBuilder.build(EMPLOYEES_INDEX, 10000, CURSOR_KEEP_ALIVE)); indexScan.open(); return this; } PushDownAssertion shouldQuery(QueryBuilder expected) { - var builder = new SearchSourceBuilder() - .from(0) - .query(expected) - .size(QUERY_SIZE) - .timeout(CURSOR_KEEP_ALIVE) - .sort(DOC_FIELD_NAME, ASC); + var builder = + new SearchSourceBuilder() + .from(0) + .query(expected) + .size(QUERY_SIZE) + .timeout(CURSOR_KEEP_ALIVE) + .sort(DOC_FIELD_NAME, ASC); OpenSearchRequest request = new OpenSearchQueryRequest(EMPLOYEES_INDEX, builder, factory, List.of()); when(client.search(request)).thenReturn(response); - var indexScan = new OpenSearchIndexScan(client, - 10000, requestBuilder.build(EMPLOYEES_INDEX, 10000, CURSOR_KEEP_ALIVE)); + var indexScan = + new OpenSearchIndexScan( + client, 10000, requestBuilder.build(EMPLOYEES_INDEX, 10000, CURSOR_KEEP_ALIVE)); indexScan.open(); return this; } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/PushDownQueryBuilderTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/PushDownQueryBuilderTest.java index 0b0568a6b7..5f233d7f45 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/PushDownQueryBuilderTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/PushDownQueryBuilderTest.java @@ -1,6 +1,5 @@ package org.opensearch.sql.opensearch.storage.scan; - import static org.junit.jupiter.api.Assertions.assertAll; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.mockito.Mockito.mock; @@ -21,12 +20,13 @@ class PushDownQueryBuilderTest { @Test void default_implementations() { - var sample = new PushDownQueryBuilder() { - @Override - public OpenSearchRequestBuilder build() { - return null; - } - }; + var sample = + new PushDownQueryBuilder() { + @Override + public OpenSearchRequestBuilder build() { + return null; + } + }; assertAll( () -> assertFalse(sample.pushDownFilter(mock(LogicalFilter.class))), () -> assertFalse(sample.pushDownProject(mock(LogicalProject.class))), @@ -34,9 +34,6 @@ public OpenSearchRequestBuilder build() { () -> assertFalse(sample.pushDownSort(mock(LogicalSort.class))), () -> assertFalse(sample.pushDownNested(mock(LogicalNested.class))), () -> assertFalse(sample.pushDownLimit(mock(LogicalLimit.class))), - () -> assertFalse(sample.pushDownPageSize(mock(LogicalPaginate.class))) - - ); + () -> assertFalse(sample.pushDownPageSize(mock(LogicalPaginate.class)))); } - } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/QueryStringTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/QueryStringTest.java index 32c02959b8..781e27d71a 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/QueryStringTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/QueryStringTest.java @@ -36,76 +36,82 @@ class QueryStringTest { private final QueryStringQuery queryStringQuery = new QueryStringQuery(); private final FunctionName queryStringFunc = FunctionName.of("query_string"); - private static final LiteralExpression fields_value = DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "title", ExprValueUtils.floatValue(1.F), - "body", ExprValueUtils.floatValue(.3F))))); + private static final LiteralExpression fields_value = + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of( + "title", ExprValueUtils.floatValue(1.F), + "body", ExprValueUtils.floatValue(.3F))))); private static final LiteralExpression query_value = DSL.literal("query_value"); static Stream> generateValidData() { Expression field = DSL.namedArgument("fields", fields_value); Expression query = DSL.namedArgument("query", query_value); return Stream.of( - DSL.namedArgument("analyzer", DSL.literal("standard")), - DSL.namedArgument("analyze_wildcard", DSL.literal("true")), - DSL.namedArgument("allow_leading_wildcard", DSL.literal("true")), - DSL.namedArgument("auto_generate_synonyms_phrase_query", DSL.literal("true")), - DSL.namedArgument("boost", DSL.literal("1")), - DSL.namedArgument("default_operator", DSL.literal("AND")), - DSL.namedArgument("default_operator", DSL.literal("and")), - DSL.namedArgument("enable_position_increments", DSL.literal("true")), - DSL.namedArgument("escape", DSL.literal("false")), - DSL.namedArgument("fuzziness", DSL.literal("1")), - DSL.namedArgument("fuzzy_rewrite", DSL.literal("constant_score")), - DSL.namedArgument("fuzzy_max_expansions", DSL.literal("42")), - DSL.namedArgument("fuzzy_prefix_length", DSL.literal("42")), - DSL.namedArgument("fuzzy_transpositions", DSL.literal("true")), - DSL.namedArgument("lenient", DSL.literal("true")), - DSL.namedArgument("max_determinized_states", DSL.literal("10000")), - DSL.namedArgument("minimum_should_match", DSL.literal("4")), - DSL.namedArgument("quote_analyzer", DSL.literal("standard")), - DSL.namedArgument("phrase_slop", DSL.literal("0")), - DSL.namedArgument("quote_field_suffix", DSL.literal(".exact")), - DSL.namedArgument("rewrite", DSL.literal("constant_score")), - DSL.namedArgument("type", DSL.literal("best_fields")), - DSL.namedArgument("tie_breaker", DSL.literal("0.3")), - DSL.namedArgument("time_zone", DSL.literal("Canada/Pacific")), - DSL.namedArgument("ANALYZER", DSL.literal("standard")), - DSL.namedArgument("ANALYZE_wildcard", DSL.literal("true")), - DSL.namedArgument("Allow_Leading_wildcard", DSL.literal("true")), - DSL.namedArgument("Auto_Generate_Synonyms_Phrase_Query", DSL.literal("true")), - DSL.namedArgument("Boost", DSL.literal("1")) - ).map(arg -> List.of(field, query, arg)); + DSL.namedArgument("analyzer", DSL.literal("standard")), + DSL.namedArgument("analyze_wildcard", DSL.literal("true")), + DSL.namedArgument("allow_leading_wildcard", DSL.literal("true")), + DSL.namedArgument("auto_generate_synonyms_phrase_query", DSL.literal("true")), + DSL.namedArgument("boost", DSL.literal("1")), + DSL.namedArgument("default_operator", DSL.literal("AND")), + DSL.namedArgument("default_operator", DSL.literal("and")), + DSL.namedArgument("enable_position_increments", DSL.literal("true")), + DSL.namedArgument("escape", DSL.literal("false")), + DSL.namedArgument("fuzziness", DSL.literal("1")), + DSL.namedArgument("fuzzy_rewrite", DSL.literal("constant_score")), + DSL.namedArgument("fuzzy_max_expansions", DSL.literal("42")), + DSL.namedArgument("fuzzy_prefix_length", DSL.literal("42")), + DSL.namedArgument("fuzzy_transpositions", DSL.literal("true")), + DSL.namedArgument("lenient", DSL.literal("true")), + DSL.namedArgument("max_determinized_states", DSL.literal("10000")), + DSL.namedArgument("minimum_should_match", DSL.literal("4")), + DSL.namedArgument("quote_analyzer", DSL.literal("standard")), + DSL.namedArgument("phrase_slop", DSL.literal("0")), + DSL.namedArgument("quote_field_suffix", DSL.literal(".exact")), + DSL.namedArgument("rewrite", DSL.literal("constant_score")), + DSL.namedArgument("type", DSL.literal("best_fields")), + DSL.namedArgument("tie_breaker", DSL.literal("0.3")), + DSL.namedArgument("time_zone", DSL.literal("Canada/Pacific")), + DSL.namedArgument("ANALYZER", DSL.literal("standard")), + DSL.namedArgument("ANALYZE_wildcard", DSL.literal("true")), + DSL.namedArgument("Allow_Leading_wildcard", DSL.literal("true")), + DSL.namedArgument("Auto_Generate_Synonyms_Phrase_Query", DSL.literal("true")), + DSL.namedArgument("Boost", DSL.literal("1"))) + .map(arg -> List.of(field, query, arg)); } @ParameterizedTest @MethodSource("generateValidData") void test_valid_parameters(List validArgs) { - Assertions.assertNotNull(queryStringQuery.build( - new QueryStringExpression(validArgs))); + Assertions.assertNotNull(queryStringQuery.build(new QueryStringExpression(validArgs))); } @Test void test_SyntaxCheckException_when_no_arguments() { List arguments = List.of(); - assertThrows(SyntaxCheckException.class, + assertThrows( + SyntaxCheckException.class, () -> queryStringQuery.build(new QueryStringExpression(arguments))); } @Test void test_SyntaxCheckException_when_one_argument() { List arguments = List.of(namedArgument("fields", fields_value)); - assertThrows(SyntaxCheckException.class, + assertThrows( + SyntaxCheckException.class, () -> queryStringQuery.build(new QueryStringExpression(arguments))); } @Test void test_SemanticCheckException_when_invalid_parameter() { - List arguments = List.of( - namedArgument("fields", fields_value), - namedArgument("query", query_value), - namedArgument("unsupported", "unsupported_value")); - Assertions.assertThrows(SemanticCheckException.class, + List arguments = + List.of( + namedArgument("fields", fields_value), + namedArgument("query", query_value), + namedArgument("unsupported", "unsupported_value")); + Assertions.assertThrows( + SemanticCheckException.class, () -> queryStringQuery.build(new QueryStringExpression(arguments))); } @@ -124,14 +130,16 @@ public QueryStringExpression(List arguments) { @Override public ExprValue valueOf(Environment valueEnv) { - throw new UnsupportedOperationException("Invalid function call, " - + "valueOf function need implementation only to support Expression interface"); + throw new UnsupportedOperationException( + "Invalid function call, " + + "valueOf function need implementation only to support Expression interface"); } @Override public ExprType type() { - throw new UnsupportedOperationException("Invalid function call, " - + "type function need implementation only to support Expression interface"); + throw new UnsupportedOperationException( + "Invalid function call, " + + "type function need implementation only to support Expression interface"); } } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/QueryTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/QueryTest.java index a61b47b7b1..d81218c0c3 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/QueryTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/QueryTest.java @@ -37,78 +37,78 @@ class QueryTest { static Stream> generateValidData() { Expression query = DSL.namedArgument("query", query_value); return List.of( - DSL.namedArgument("analyzer", DSL.literal("standard")), - DSL.namedArgument("analyze_wildcard", DSL.literal("true")), - DSL.namedArgument("allow_leading_wildcard", DSL.literal("true")), - DSL.namedArgument("auto_generate_synonyms_phrase_query", DSL.literal("true")), - DSL.namedArgument("boost", DSL.literal("1")), - DSL.namedArgument("default_operator", DSL.literal("AND")), - DSL.namedArgument("default_operator", DSL.literal("and")), - DSL.namedArgument("enable_position_increments", DSL.literal("true")), - DSL.namedArgument("escape", DSL.literal("false")), - DSL.namedArgument("fuzziness", DSL.literal("1")), - DSL.namedArgument("fuzzy_rewrite", DSL.literal("constant_score")), - DSL.namedArgument("fuzzy_max_expansions", DSL.literal("42")), - DSL.namedArgument("fuzzy_prefix_length", DSL.literal("42")), - DSL.namedArgument("fuzzy_transpositions", DSL.literal("true")), - DSL.namedArgument("lenient", DSL.literal("true")), - DSL.namedArgument("max_determinized_states", DSL.literal("10000")), - DSL.namedArgument("minimum_should_match", DSL.literal("4")), - DSL.namedArgument("quote_analyzer", DSL.literal("standard")), - DSL.namedArgument("phrase_slop", DSL.literal("0")), - DSL.namedArgument("quote_field_suffix", DSL.literal(".exact")), - DSL.namedArgument("rewrite", DSL.literal("constant_score")), - DSL.namedArgument("type", DSL.literal("best_fields")), - DSL.namedArgument("tie_breaker", DSL.literal("0.3")), - DSL.namedArgument("time_zone", DSL.literal("Canada/Pacific")), - DSL.namedArgument("ANALYZER", DSL.literal("standard")), - DSL.namedArgument("ANALYZE_wildcard", DSL.literal("true")), - DSL.namedArgument("Allow_Leading_wildcard", DSL.literal("true")), - DSL.namedArgument("Auto_Generate_Synonyms_Phrase_Query", DSL.literal("true")), - DSL.namedArgument("Boost", DSL.literal("1")) - ).stream().map(arg -> List.of(query, arg)); + DSL.namedArgument("analyzer", DSL.literal("standard")), + DSL.namedArgument("analyze_wildcard", DSL.literal("true")), + DSL.namedArgument("allow_leading_wildcard", DSL.literal("true")), + DSL.namedArgument("auto_generate_synonyms_phrase_query", DSL.literal("true")), + DSL.namedArgument("boost", DSL.literal("1")), + DSL.namedArgument("default_operator", DSL.literal("AND")), + DSL.namedArgument("default_operator", DSL.literal("and")), + DSL.namedArgument("enable_position_increments", DSL.literal("true")), + DSL.namedArgument("escape", DSL.literal("false")), + DSL.namedArgument("fuzziness", DSL.literal("1")), + DSL.namedArgument("fuzzy_rewrite", DSL.literal("constant_score")), + DSL.namedArgument("fuzzy_max_expansions", DSL.literal("42")), + DSL.namedArgument("fuzzy_prefix_length", DSL.literal("42")), + DSL.namedArgument("fuzzy_transpositions", DSL.literal("true")), + DSL.namedArgument("lenient", DSL.literal("true")), + DSL.namedArgument("max_determinized_states", DSL.literal("10000")), + DSL.namedArgument("minimum_should_match", DSL.literal("4")), + DSL.namedArgument("quote_analyzer", DSL.literal("standard")), + DSL.namedArgument("phrase_slop", DSL.literal("0")), + DSL.namedArgument("quote_field_suffix", DSL.literal(".exact")), + DSL.namedArgument("rewrite", DSL.literal("constant_score")), + DSL.namedArgument("type", DSL.literal("best_fields")), + DSL.namedArgument("tie_breaker", DSL.literal("0.3")), + DSL.namedArgument("time_zone", DSL.literal("Canada/Pacific")), + DSL.namedArgument("ANALYZER", DSL.literal("standard")), + DSL.namedArgument("ANALYZE_wildcard", DSL.literal("true")), + DSL.namedArgument("Allow_Leading_wildcard", DSL.literal("true")), + DSL.namedArgument("Auto_Generate_Synonyms_Phrase_Query", DSL.literal("true")), + DSL.namedArgument("Boost", DSL.literal("1"))) + .stream() + .map(arg -> List.of(query, arg)); } @ParameterizedTest @MethodSource("generateValidData") public void test_valid_parameters(List validArgs) { - Assertions.assertNotNull(queryQuery.build( - new QueryExpression(validArgs))); + Assertions.assertNotNull(queryQuery.build(new QueryExpression(validArgs))); } @Test public void test_SyntaxCheckException_when_no_arguments() { List arguments = List.of(); - assertThrows(SyntaxCheckException.class, - () -> queryQuery.build(new QueryExpression(arguments))); + assertThrows( + SyntaxCheckException.class, () -> queryQuery.build(new QueryExpression(arguments))); } @Test public void test_SyntaxCheckException_when_field_argument() { - List arguments = List.of( - namedArgument("fields", "invalid argument"), - namedArgument("query", query_value)); - assertThrows(SemanticCheckException.class, - () -> queryQuery.build(new QueryExpression(arguments))); + List arguments = + List.of(namedArgument("fields", "invalid argument"), namedArgument("query", query_value)); + assertThrows( + SemanticCheckException.class, () -> queryQuery.build(new QueryExpression(arguments))); } @Test public void test_SemanticCheckException_when_invalid_parameter() { - List arguments = List.of( - namedArgument("query", query_value), - namedArgument("unsupported", "unsupported_value")); - Assertions.assertThrows(SemanticCheckException.class, - () -> queryQuery.build(new QueryExpression(arguments))); + List arguments = + List.of( + namedArgument("query", query_value), namedArgument("unsupported", "unsupported_value")); + Assertions.assertThrows( + SemanticCheckException.class, () -> queryQuery.build(new QueryExpression(arguments))); } @Test public void test_SemanticCheckException_when_sending_parameter_multiple_times() { - List arguments = List.of( + List arguments = + List.of( namedArgument("query", query_value), namedArgument("allow_leading_wildcard", DSL.literal("true")), namedArgument("allow_leading_wildcard", DSL.literal("true"))); - Assertions.assertThrows(SemanticCheckException.class, - () -> queryQuery.build(new QueryExpression(arguments))); + Assertions.assertThrows( + SemanticCheckException.class, () -> queryQuery.build(new QueryExpression(arguments))); } private NamedArgumentExpression namedArgument(String name, String value) { @@ -126,14 +126,16 @@ public QueryExpression(List arguments) { @Override public ExprValue valueOf(Environment valueEnv) { - throw new UnsupportedOperationException("Invalid function call, " - + "valueOf function need implementation only to support Expression interface"); + throw new UnsupportedOperationException( + "Invalid function call, " + + "valueOf function need implementation only to support Expression interface"); } @Override public ExprType type() { - throw new UnsupportedOperationException("Invalid function call, " - + "type function need implementation only to support Expression interface"); + throw new UnsupportedOperationException( + "Invalid function call, " + + "type function need implementation only to support Expression interface"); } } @@ -141,7 +143,6 @@ public ExprType type() { public void test_can_get_query_name() { List arguments = List.of(namedArgument("query", query_value)); queryQuery.build(new QueryExpression(arguments)); - Assertions.assertEquals("query", - queryQuery.getQueryName()); + Assertions.assertEquals("query", queryQuery.getQueryName()); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/RangeQueryTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/RangeQueryTest.java index 208c782593..ca87f42900 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/RangeQueryTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/RangeQueryTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.filter.lucene; import static org.junit.jupiter.api.Assertions.assertThrows; @@ -21,9 +20,10 @@ class RangeQueryTest { @Test void should_throw_exception_for_unsupported_comparison() { // Note that since we do switch check on enum comparison, this should be impossible - assertThrows(IllegalStateException.class, () -> - new RangeQuery(Comparison.BETWEEN) - .doBuild("name", STRING, ExprValueUtils.stringValue("John"))); + assertThrows( + IllegalStateException.class, + () -> + new RangeQuery(Comparison.BETWEEN) + .doBuild("name", STRING, ExprValueUtils.stringValue("John"))); } - } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/SimpleQueryStringTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/SimpleQueryStringTest.java index f7129117a1..ea14461521 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/SimpleQueryStringTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/SimpleQueryStringTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.filter.lucene; import static org.junit.jupiter.api.Assertions.assertThrows; @@ -37,148 +36,129 @@ class SimpleQueryStringTest { private final SimpleQueryStringQuery simpleQueryStringQuery = new SimpleQueryStringQuery(); private final FunctionName simpleQueryString = FunctionName.of("simple_query_string"); - private static final LiteralExpression fields_value = DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "title", ExprValueUtils.floatValue(1.F), - "body", ExprValueUtils.floatValue(.3F))))); + private static final LiteralExpression fields_value = + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of( + "title", ExprValueUtils.floatValue(1.F), + "body", ExprValueUtils.floatValue(.3F))))); private static final LiteralExpression query_value = DSL.literal("query_value"); static Stream> generateValidData() { return Stream.of( - List.of( - DSL.namedArgument("fields", fields_value), - DSL.namedArgument("query", query_value) - ), + List.of(DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value)), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("analyze_wildcard", DSL.literal("true")) - ), + DSL.namedArgument("analyze_wildcard", DSL.literal("true"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("analyzer", DSL.literal("standard")) - ), + DSL.namedArgument("analyzer", DSL.literal("standard"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("auto_generate_synonyms_phrase_query", DSL.literal("true")) - ), + DSL.namedArgument("auto_generate_synonyms_phrase_query", DSL.literal("true"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("flags", DSL.literal("PREFIX")) - ), + DSL.namedArgument("flags", DSL.literal("PREFIX"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("flags", DSL.literal("PREFIX|NOT|AND")) - ), + DSL.namedArgument("flags", DSL.literal("PREFIX|NOT|AND"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("flags", DSL.literal("NOT|AND")) - ), + DSL.namedArgument("flags", DSL.literal("NOT|AND"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("flags", DSL.literal("PREFIX|not|AND")) - ), + DSL.namedArgument("flags", DSL.literal("PREFIX|not|AND"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("flags", DSL.literal("not|and")) - ), + DSL.namedArgument("flags", DSL.literal("not|and"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("fuzzy_max_expansions", DSL.literal("42")) - ), + DSL.namedArgument("fuzzy_max_expansions", DSL.literal("42"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("fuzzy_prefix_length", DSL.literal("42")) - ), + DSL.namedArgument("fuzzy_prefix_length", DSL.literal("42"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("fuzzy_transpositions", DSL.literal("true")) - ), + DSL.namedArgument("fuzzy_transpositions", DSL.literal("true"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("lenient", DSL.literal("true")) - ), + DSL.namedArgument("lenient", DSL.literal("true"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("default_operator", DSL.literal("AND")) - ), + DSL.namedArgument("default_operator", DSL.literal("AND"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("default_operator", DSL.literal("and")) - ), + DSL.namedArgument("default_operator", DSL.literal("and"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("minimum_should_match", DSL.literal("4")) - ), + DSL.namedArgument("minimum_should_match", DSL.literal("4"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("quote_field_suffix", DSL.literal(".exact")) - ), + DSL.namedArgument("quote_field_suffix", DSL.literal(".exact"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("boost", DSL.literal("1")) - ), - List.of( - DSL.namedArgument("FIELDS", fields_value), - DSL.namedArgument("QUERY", query_value) - ), + DSL.namedArgument("boost", DSL.literal("1"))), + List.of(DSL.namedArgument("FIELDS", fields_value), DSL.namedArgument("QUERY", query_value)), List.of( DSL.namedArgument("FIELDS", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("ANALYZE_wildcard", DSL.literal("true")) - ), + DSL.namedArgument("ANALYZE_wildcard", DSL.literal("true"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("analyZER", DSL.literal("standard")) - ) - ); + DSL.namedArgument("analyZER", DSL.literal("standard")))); } @ParameterizedTest @MethodSource("generateValidData") public void test_valid_parameters(List validArgs) { - Assertions.assertNotNull(simpleQueryStringQuery.build( - new SimpleQueryStringExpression(validArgs))); + Assertions.assertNotNull( + simpleQueryStringQuery.build(new SimpleQueryStringExpression(validArgs))); } @Test public void test_SyntaxCheckException_when_no_arguments() { List arguments = List.of(); - assertThrows(SyntaxCheckException.class, + assertThrows( + SyntaxCheckException.class, () -> simpleQueryStringQuery.build(new SimpleQueryStringExpression(arguments))); } @Test public void test_SyntaxCheckException_when_one_argument() { List arguments = List.of(namedArgument("fields", fields_value)); - assertThrows(SyntaxCheckException.class, + assertThrows( + SyntaxCheckException.class, () -> simpleQueryStringQuery.build(new SimpleQueryStringExpression(arguments))); } @Test public void test_SemanticCheckException_when_invalid_parameter() { - List arguments = List.of( - namedArgument("fields", fields_value), - namedArgument("query", query_value), - namedArgument("unsupported", "unsupported_value")); - Assertions.assertThrows(SemanticCheckException.class, + List arguments = + List.of( + namedArgument("fields", fields_value), + namedArgument("query", query_value), + namedArgument("unsupported", "unsupported_value")); + Assertions.assertThrows( + SemanticCheckException.class, () -> simpleQueryStringQuery.build(new SimpleQueryStringExpression(arguments))); } @@ -197,14 +177,16 @@ public SimpleQueryStringExpression(List arguments) { @Override public ExprValue valueOf(Environment valueEnv) { - throw new UnsupportedOperationException("Invalid function call, " - + "valueOf function need implementation only to support Expression interface"); + throw new UnsupportedOperationException( + "Invalid function call, " + + "valueOf function need implementation only to support Expression interface"); } @Override public ExprType type() { - throw new UnsupportedOperationException("Invalid function call, " - + "type function need implementation only to support Expression interface"); + throw new UnsupportedOperationException( + "Invalid function call, " + + "type function need implementation only to support Expression interface"); } } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/WildcardQueryTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/WildcardQueryTest.java index 98bd7c5784..7182626c02 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/WildcardQueryTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/WildcardQueryTest.java @@ -36,46 +36,45 @@ class WildcardQueryTest { static Stream> generateValidData() { return Stream.of( List.of( - namedArgument("field", - new ReferenceExpression("title", OpenSearchTextType.of())), + namedArgument("field", new ReferenceExpression("title", OpenSearchTextType.of())), namedArgument("query", "query_value*"), namedArgument("boost", "0.7"), namedArgument("case_insensitive", "false"), - namedArgument("rewrite", "constant_score_boolean") - ) - ); + namedArgument("rewrite", "constant_score_boolean"))); } @ParameterizedTest @MethodSource("generateValidData") public void test_valid_parameters(List validArgs) { - Assertions.assertNotNull(wildcardQueryQuery.build( - new WildcardQueryExpression(validArgs))); + Assertions.assertNotNull(wildcardQueryQuery.build(new WildcardQueryExpression(validArgs))); } @Test public void test_SyntaxCheckException_when_no_arguments() { List arguments = List.of(); - assertThrows(SyntaxCheckException.class, + assertThrows( + SyntaxCheckException.class, () -> wildcardQueryQuery.build(new WildcardQueryExpression(arguments))); } @Test public void test_SyntaxCheckException_when_one_argument() { - List arguments = List.of(namedArgument("field", - new ReferenceExpression("title", OpenSearchTextType.of()))); - assertThrows(SyntaxCheckException.class, + List arguments = + List.of(namedArgument("field", new ReferenceExpression("title", OpenSearchTextType.of()))); + assertThrows( + SyntaxCheckException.class, () -> wildcardQueryQuery.build(new WildcardQueryExpression(arguments))); } @Test public void test_SemanticCheckException_when_invalid_parameter() { - List arguments = List.of( - namedArgument("field", - new ReferenceExpression("title", OpenSearchTextType.of())), - namedArgument("query", "query_value*"), - namedArgument("unsupported", "unsupported_value")); - Assertions.assertThrows(SemanticCheckException.class, + List arguments = + List.of( + namedArgument("field", new ReferenceExpression("title", OpenSearchTextType.of())), + namedArgument("query", "query_value*"), + namedArgument("unsupported", "unsupported_value")); + Assertions.assertThrows( + SemanticCheckException.class, () -> wildcardQueryQuery.build(new WildcardQueryExpression(arguments))); } @@ -86,14 +85,16 @@ public WildcardQueryExpression(List arguments) { @Override public ExprValue valueOf(Environment valueEnv) { - throw new UnsupportedOperationException("Invalid function call, " - + "valueOf function need implementation only to support Expression interface"); + throw new UnsupportedOperationException( + "Invalid function call, " + + "valueOf function need implementation only to support Expression interface"); } @Override public ExprType type() { - throw new UnsupportedOperationException("Invalid function call, " - + "type function need implementation only to support Expression interface"); + throw new UnsupportedOperationException( + "Invalid function call, " + + "type function need implementation only to support Expression interface"); } } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/RelevanceQueryBuildTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/RelevanceQueryBuildTest.java index 5406f4cb58..a93a1e5fa4 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/RelevanceQueryBuildTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/RelevanceQueryBuildTest.java @@ -48,12 +48,17 @@ class RelevanceQueryBuildTest { private QueryBuilder queryBuilder; private final Map> queryBuildActions = ImmutableMap.>builder() - .put("boost", (k, v) -> k.boost(Float.parseFloat(v.stringValue()))).build(); + .put("boost", (k, v) -> k.boost(Float.parseFloat(v.stringValue()))) + .build(); @BeforeEach public void setUp() { - query = mock(RelevanceQuery.class, withSettings().useConstructor(queryBuildActions) - .defaultAnswer(Mockito.CALLS_REAL_METHODS)); + query = + mock( + RelevanceQuery.class, + withSettings() + .useConstructor(queryBuildActions) + .defaultAnswer(Mockito.CALLS_REAL_METHODS)); queryBuilder = mock(QueryBuilder.class); when(query.createQueryBuilder(any())).thenReturn(queryBuilder); String queryName = "mock_query"; @@ -64,9 +69,13 @@ public void setUp() { @Test void throws_SemanticCheckException_when_same_argument_twice() { - FunctionExpression expr = createCall(List.of(FIELD_ARG, QUERY_ARG, - namedArgument("boost", "2.3"), - namedArgument("boost", "2.4"))); + FunctionExpression expr = + createCall( + List.of( + FIELD_ARG, + QUERY_ARG, + namedArgument("boost", "2.3"), + namedArgument("boost", "2.4"))); SemanticCheckException exception = assertThrows(SemanticCheckException.class, () -> query.build(expr)); assertEquals("Parameter 'boost' can only be specified once.", exception.getMessage()); @@ -79,8 +88,7 @@ void throws_SemanticCheckException_when_wrong_argument_name() { SemanticCheckException exception = assertThrows(SemanticCheckException.class, () -> query.build(expr)); - assertEquals("Parameter wrongarg is invalid for mock_query function.", - exception.getMessage()); + assertEquals("Parameter wrongarg is invalid for mock_query function.", exception.getMessage()); } @Test @@ -95,14 +103,13 @@ void calls_action_when_correct_argument_name() { @ParameterizedTest @MethodSource("insufficientArguments") public void throws_SyntaxCheckException_when_no_required_arguments(List arguments) { - SyntaxCheckException exception = assertThrows(SyntaxCheckException.class, - () -> query.build(createCall(arguments))); + SyntaxCheckException exception = + assertThrows(SyntaxCheckException.class, () -> query.build(createCall(arguments))); assertEquals("mock_query requires at least two parameters", exception.getMessage()); } public static Stream> insufficientArguments() { - return Stream.of(List.of(), - List.of(namedArgument("field", "field_A"))); + return Stream.of(List.of(), List.of(namedArgument("field", "field_A"))); } private static NamedArgumentExpression namedArgument(String field, String fieldValue) { diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/SingleFieldQueryTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/SingleFieldQueryTest.java index 3628dc8abc..7234ee9275 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/SingleFieldQueryTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/SingleFieldQueryTest.java @@ -26,14 +26,17 @@ class SingleFieldQueryTest { SingleFieldQuery query; private final String testQueryName = "test_query"; - private final Map actionMap - = ImmutableMap.of("paramA", (o, v) -> o); + private final Map actionMap = + ImmutableMap.of("paramA", (o, v) -> o); @BeforeEach void setUp() { - query = mock(SingleFieldQuery.class, - Mockito.withSettings().useConstructor(actionMap) - .defaultAnswer(Mockito.CALLS_REAL_METHODS)); + query = + mock( + SingleFieldQuery.class, + Mockito.withSettings() + .useConstructor(actionMap) + .defaultAnswer(Mockito.CALLS_REAL_METHODS)); when(query.getQueryName()).thenReturn(testQueryName); } @@ -42,15 +45,20 @@ void createQueryBuilderTestTypeTextKeyword() { String sampleQuery = "sample query"; String sampleField = "fieldA"; - query.createQueryBuilder(List.of(DSL.namedArgument("field", - new ReferenceExpression(sampleField, - OpenSearchTextType.of(Map.of("words", - OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword))))), - DSL.namedArgument("query", - new LiteralExpression(ExprValueUtils.stringValue(sampleQuery))))); + query.createQueryBuilder( + List.of( + DSL.namedArgument( + "field", + new ReferenceExpression( + sampleField, + OpenSearchTextType.of( + Map.of( + "words", + OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword))))), + DSL.namedArgument( + "query", new LiteralExpression(ExprValueUtils.stringValue(sampleQuery))))); - verify(query).createBuilder(eq(sampleField), - eq(sampleQuery)); + verify(query).createBuilder(eq(sampleField), eq(sampleQuery)); } @Test @@ -58,12 +66,13 @@ void createQueryBuilderTestTypeText() { String sampleQuery = "sample query"; String sampleField = "fieldA"; - query.createQueryBuilder(List.of(DSL.namedArgument("field", - new ReferenceExpression(sampleField, OpenSearchTextType.of())), - DSL.namedArgument("query", - new LiteralExpression(ExprValueUtils.stringValue(sampleQuery))))); + query.createQueryBuilder( + List.of( + DSL.namedArgument( + "field", new ReferenceExpression(sampleField, OpenSearchTextType.of())), + DSL.namedArgument( + "query", new LiteralExpression(ExprValueUtils.stringValue(sampleQuery))))); - verify(query).createBuilder(eq(sampleField), - eq(sampleQuery)); + verify(query).createBuilder(eq(sampleField), eq(sampleQuery)); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/sort/SortQueryBuilderTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/sort/SortQueryBuilderTest.java index e84ed14e43..89a10ad563 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/sort/SortQueryBuilderTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/sort/SortQueryBuilderTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.sort; import static org.hamcrest.MatcherAssert.assertThat; @@ -32,10 +31,7 @@ void build_sortbuilder_from_reference() { void build_sortbuilder_from_nested_function() { assertNotNull( sortQueryBuilder.build( - DSL.nested(DSL.ref("message.info", STRING)), - Sort.SortOption.DEFAULT_ASC - ) - ); + DSL.nested(DSL.ref("message.info", STRING)), Sort.SortOption.DEFAULT_ASC)); } @Test @@ -43,63 +39,56 @@ void build_sortbuilder_from_nested_function_with_path_param() { assertNotNull( sortQueryBuilder.build( DSL.nested(DSL.ref("message.info", STRING), DSL.ref("message", STRING)), - Sort.SortOption.DEFAULT_ASC - ) - ); + Sort.SortOption.DEFAULT_ASC)); } @Test void nested_with_too_many_args_throws_exception() { assertThrows( IllegalArgumentException.class, - () -> sortQueryBuilder.build( - DSL.nested( - DSL.ref("message.info", STRING), - DSL.ref("message", STRING), - DSL.ref("message", STRING) - ), - Sort.SortOption.DEFAULT_ASC - ) - ); + () -> + sortQueryBuilder.build( + DSL.nested( + DSL.ref("message.info", STRING), + DSL.ref("message", STRING), + DSL.ref("message", STRING)), + Sort.SortOption.DEFAULT_ASC)); } @Test void nested_with_too_few_args_throws_exception() { assertThrows( IllegalArgumentException.class, - () -> sortQueryBuilder.build( - DSL.nested(), - Sort.SortOption.DEFAULT_ASC - ) - ); + () -> sortQueryBuilder.build(DSL.nested(), Sort.SortOption.DEFAULT_ASC)); } @Test void nested_with_invalid_arg_type_throws_exception() { assertThrows( IllegalArgumentException.class, - () -> sortQueryBuilder.build( - DSL.nested( - DSL.literal(1) - ), - Sort.SortOption.DEFAULT_ASC - ) - ); + () -> sortQueryBuilder.build(DSL.nested(DSL.literal(1)), Sort.SortOption.DEFAULT_ASC)); } @Test void build_sortbuilder_from_expression_should_throw_exception() { final IllegalStateException exception = - assertThrows(IllegalStateException.class, () -> sortQueryBuilder.build( - new LiteralExpression(new ExprShortValue(1)), Sort.SortOption.DEFAULT_ASC)); + assertThrows( + IllegalStateException.class, + () -> + sortQueryBuilder.build( + new LiteralExpression(new ExprShortValue(1)), Sort.SortOption.DEFAULT_ASC)); assertThat(exception.getMessage(), Matchers.containsString("unsupported expression")); } @Test void build_sortbuilder_from_function_should_throw_exception() { final IllegalStateException exception = - assertThrows(IllegalStateException.class, () -> sortQueryBuilder.build(DSL.equal(DSL.ref( - "intV", INTEGER), DSL.literal(1)), Sort.SortOption.DEFAULT_ASC)); + assertThrows( + IllegalStateException.class, + () -> + sortQueryBuilder.build( + DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(1)), + Sort.SortOption.DEFAULT_ASC)); assertThat(exception.getMessage(), Matchers.containsString("unsupported expression")); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexScanTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexScanTest.java index 494f3ff2d0..00d1c9ecd1 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexScanTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexScanTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.system; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -21,8 +20,7 @@ @ExtendWith(MockitoExtension.class) class OpenSearchSystemIndexScanTest { - @Mock - private OpenSearchSystemRequest request; + @Mock private OpenSearchSystemRequest request; @Test public void queryData() { diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexTest.java index a483f2dad8..1afcfcdc86 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.system; import static org.hamcrest.MatcherAssert.assertThat; @@ -35,29 +34,23 @@ @ExtendWith(MockitoExtension.class) class OpenSearchSystemIndexTest { - @Mock - private OpenSearchClient client; + @Mock private OpenSearchClient client; - @Mock - private Table table; + @Mock private Table table; @Test void testGetFieldTypesOfMetaTable() { OpenSearchSystemIndex systemIndex = new OpenSearchSystemIndex(client, TABLE_INFO); final Map fieldTypes = systemIndex.getFieldTypes(); - assertThat(fieldTypes, anyOf( - hasEntry("TABLE_CAT", STRING) - )); + assertThat(fieldTypes, anyOf(hasEntry("TABLE_CAT", STRING))); } @Test void testGetFieldTypesOfMappingTable() { - OpenSearchSystemIndex systemIndex = new OpenSearchSystemIndex(client, mappingTable( - "test_index")); + OpenSearchSystemIndex systemIndex = + new OpenSearchSystemIndex(client, mappingTable("test_index")); final Map fieldTypes = systemIndex.getFieldTypes(); - assertThat(fieldTypes, anyOf( - hasEntry("COLUMN_NAME", STRING) - )); + assertThat(fieldTypes, anyOf(hasEntry("COLUMN_NAME", STRING))); } @Test @@ -69,8 +62,7 @@ void testIsExist() { @Test void testCreateTable() { Table systemIndex = new OpenSearchSystemIndex(client, TABLE_INFO); - assertThrows(UnsupportedOperationException.class, - () -> systemIndex.create(ImmutableMap.of())); + assertThrows(UnsupportedOperationException.class, () -> systemIndex.create(ImmutableMap.of())); } @Test @@ -78,11 +70,8 @@ void implement() { OpenSearchSystemIndex systemIndex = new OpenSearchSystemIndex(client, TABLE_INFO); NamedExpression projectExpr = named("TABLE_NAME", ref("TABLE_NAME", STRING)); - final PhysicalPlan plan = systemIndex.implement( - project( - relation(TABLE_INFO, table), - projectExpr - )); + final PhysicalPlan plan = + systemIndex.implement(project(relation(TABLE_INFO, table), projectExpr)); assertTrue(plan instanceof ProjectOperator); assertTrue(plan.getChild().get(0) instanceof OpenSearchSystemIndexScan); } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/utils/Utils.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/utils/Utils.java index 85b8889de3..0db87f89d4 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/utils/Utils.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/utils/Utils.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.utils; import com.google.common.collect.ImmutableSet; @@ -36,15 +35,13 @@ public static List group(NamedExpression... exprs) { return Arrays.asList(exprs); } - public static List> sort(Expression expr1, - Sort.SortOption option1) { + public static List> sort( + Expression expr1, Sort.SortOption option1) { return Collections.singletonList(Pair.of(option1, expr1)); } - public static List> sort(Expression expr1, - Sort.SortOption option1, - Expression expr2, - Sort.SortOption option2) { + public static List> sort( + Expression expr1, Sort.SortOption option1, Expression expr2, Sort.SortOption option2) { return Arrays.asList(Pair.of(option1, expr1), Pair.of(option2, expr2)); } From ff059f0511fb86a9fb9327e67d42a25ca28c6b5b Mon Sep 17 00:00:00 2001 From: Yury-Fridlyand Date: Fri, 18 Aug 2023 15:13:03 -0700 Subject: [PATCH 31/42] Fix doctest data (#1902) Signed-off-by: Yury-Fridlyand --- doctest/test_data/nested_objects.json | 2 -- doctest/test_data/wildcard.json | 11 ----------- 2 files changed, 13 deletions(-) diff --git a/doctest/test_data/nested_objects.json b/doctest/test_data/nested_objects.json index fc5f56b4c5..bee976806e 100644 --- a/doctest/test_data/nested_objects.json +++ b/doctest/test_data/nested_objects.json @@ -1,4 +1,2 @@ -{"index":{"_id":"1"}} {"message":{"info":"a","author":"e","dayOfWeek":1},"comment":{"data":"ab","likes":3},"myNum":1,"someField":"b"} -{"index":{"_id":"2"}} {"message":{"info":"b","author":"f","dayOfWeek":2},"comment":{"data":"aa","likes":2},"myNum":2,"someField":"a"} diff --git a/doctest/test_data/wildcard.json b/doctest/test_data/wildcard.json index c91778d8ab..21256ed8dc 100644 --- a/doctest/test_data/wildcard.json +++ b/doctest/test_data/wildcard.json @@ -1,22 +1,11 @@ -{"index":{"_id":"0"}} {"Body":"test wildcard"} -{"index":{"_id":"1"}} {"Body":"test wildcard in the end of the text%"} -{"index":{"_id":"2"}} {"Body":"%test wildcard in the beginning of the text"} -{"index":{"_id":"3"}} {"Body":"test wildcard in % the middle of the text"} -{"index":{"_id":"4"}} {"Body":"test wildcard %% beside each other"} -{"index":{"_id":"5"}} {"Body":"test wildcard in the end of the text_"} -{"index":{"_id":"6"}} {"Body":"_test wildcard in the beginning of the text"} -{"index":{"_id":"7"}} {"Body":"test wildcard in _ the middle of the text"} -{"index":{"_id":"8"}} {"Body":"test wildcard __ beside each other"} -{"index":{"_id":"9"}} {"Body":"test backslash wildcard \\_"} -{"index":{"_id":"10"}} {"Body":"tEsT wIlDcArD sensitive cases"} From ac9b5d86aa18d39356740485c766d8206ef8b43e Mon Sep 17 00:00:00 2001 From: Mitchell Gale Date: Fri, 18 Aug 2023 18:38:27 -0700 Subject: [PATCH 32/42] [Spotless] Applying Google Code Format for legacy directory (pt 1/4) #19 (#1988) * spotless apply for OpenSearch P1. Signed-off-by: Mitchell Gale * Manual spotless changes Signed-off-by: Mitchell Gale * spotless apply for OpenSearch P2. Signed-off-by: Mitchell Gale * 90 files checked after spotless apply for legacy Signed-off-by: Mitchell Gale * Added checkstyle ignore failures to legacy Signed-off-by: Mitchell Gale * Fixed comma issue Signed-off-by: Mitchell Gale * Spotless apply Signed-off-by: Mitchell Gale * Revert build.gradle Signed-off-by: Mitchell Gale --------- Signed-off-by: Mitchell Gale Signed-off-by: Mitchell Gale --- .../ComparisonOperatorBenchmark.java | 6 +- legacy/build.gradle | 3 + .../antlr/semantic/types/base/BaseType.java | 21 +- .../types/function/AggregateFunction.java | 70 +- .../types/operator/ComparisonOperator.java | 88 +- .../syntax/CaseInsensitiveCharStream.java | 92 +- .../visitor/AntlrSqlParseTreeVisitor.java | 667 ++++---- .../visitor/EarlyExitAnalysisException.java | 11 +- .../opensearch/sql/legacy/cursor/Cursor.java | 8 +- .../sql/legacy/cursor/CursorType.java | 45 +- .../sql/legacy/cursor/DefaultCursor.java | 249 +-- .../sql/legacy/domain/ColumnTypeProvider.java | 103 +- .../sql/legacy/domain/Condition.java | 721 +++++---- .../opensearch/sql/legacy/domain/Delete.java | 9 +- .../legacy/domain/bucketpath/BucketPath.java | 40 +- .../legacy/executor/AsyncRestExecutor.java | 253 +-- .../executor/ElasticDefaultRestExecutor.java | 151 +- .../legacy/executor/ElasticHitsExecutor.java | 9 +- .../legacy/executor/ElasticResultHandler.java | 44 +- .../sql/legacy/executor/csv/CSVResult.java | 129 +- .../executor/csv/CSVResultRestExecutor.java | 89 +- .../executor/csv/CSVResultsExtractor.java | 583 +++---- .../executor/csv/CsvExtractorException.java | 11 +- ...ursorActionRequestRestExecutorFactory.java | 20 +- .../cursor/CursorAsyncRestExecutor.java | 140 +- .../executor/cursor/CursorCloseExecutor.java | 116 +- .../executor/cursor/CursorRestExecutor.java | 11 +- .../executor/cursor/CursorResultExecutor.java | 157 +- .../format/BindingTupleResultSet.java | 76 +- .../sql/legacy/executor/format/DataRows.java | 127 +- .../executor/format/DateFieldFormatter.java | 305 ++-- .../legacy/executor/format/DateFormat.java | 226 +-- .../executor/format/DeleteResultSet.java | 49 +- .../executor/format/DescribeResultSet.java | 264 ++- .../executor/join/ElasticJoinExecutor.java | 421 ++--- .../executor/multi/ComperableHitResult.java | 109 +- .../builder/ArithmeticFunctionFactory.java | 326 ++-- .../core/builder/BinaryExpressionBuilder.java | 48 +- .../core/operator/BinaryScalarOperator.java | 86 +- .../operator/DoubleBinaryScalarOperator.java | 59 +- .../operator/DoubleUnaryScalarOperator.java | 43 +- .../expression/domain/BindingTuple.java | 67 +- .../sql/legacy/metrics/BasicCounter.java | 35 +- .../sql/legacy/metrics/Counter.java | 9 +- .../sql/legacy/parser/CaseWhenParser.java | 191 ++- .../sql/legacy/parser/ChildrenType.java | 84 +- .../sql/legacy/parser/ElasticLexer.java | 139 +- .../legacy/query/AggregationQueryAction.java | 776 ++++----- .../sql/legacy/query/DefaultQueryAction.java | 479 +++--- .../sql/legacy/query/DeleteQueryAction.java | 86 +- .../sql/legacy/query/DescribeQueryAction.java | 30 +- .../query/join/BackOffRetryStrategy.java | 311 ++-- .../sql/legacy/query/maker/AggMaker.java | 1437 +++++++++-------- .../core/BindingTupleQueryPlanner.java | 99 +- .../legacy/query/planner/core/ColumnNode.java | 19 +- .../sql/legacy/query/planner/core/Config.java | 224 ++- .../planner/physical/estimation/Cost.java | 18 +- .../physical/node/BatchPhysicalOperator.java | 119 +- .../physical/node/join/BlockHashJoin.java | 147 +- .../physical/node/join/CombinedRow.java | 39 +- .../physical/node/join/DefaultHashTable.java | 173 +- .../physical/node/scroll/BindingTupleRow.java | 43 +- .../planner/resource/blocksize/BlockSize.java | 49 +- .../AnonymizeSensitiveDataRule.java | 81 +- .../spatial/BoundingBoxFilterParams.java | 29 +- .../sql/legacy/spatial/CellFilterParams.java | 55 +- .../legacy/spatial/DistanceFilterParams.java | 29 +- .../antlr/semantic/types/BaseTypeTest.java | 142 +- .../visitor/AntlrSqlParseTreeVisitorTest.java | 167 +- .../executor/AsyncRestExecutorTest.java | 103 +- .../legacy/executor/csv/CSVResultTest.java | 173 +- .../format/DateFieldFormatterTest.java | 1312 ++++++++------- .../unittest/AggregationOptionTest.java | 79 +- .../sql/legacy/unittest/DateFormatTest.java | 419 ++--- .../legacy/unittest/DateFunctionsTest.java | 306 ++-- .../unittest/cursor/DefaultCursorTest.java | 72 +- .../domain/ColumnTypeProviderTest.java | 50 +- .../executor/DeleteResultSetTest.java | 82 +- .../format/BindingTupleResultSetTest.java | 69 +- .../format/CSVResultsExtractorTest.java | 39 +- .../expression/core/BinaryExpressionTest.java | 127 +- .../core/CompoundExpressionTest.java | 15 +- .../unittest/metrics/BasicCounterTest.java | 28 +- .../unittest/parser/BucketPathTest.java | 66 +- .../BindingTupleQueryPlannerExecuteTest.java | 130 +- .../query/DefaultQueryActionTest.java | 378 +++-- .../rewriter/inline/AliasInliningTests.java | 205 +-- .../unittest/utils/BackticksUnquoterTest.java | 44 +- .../sql/legacy/util/AggregationUtils.java | 83 +- .../sql/legacy/util/CheckScriptContents.java | 356 ++-- .../data/type/OpenSearchTextType.java | 2 +- .../value/OpenSearchExprValueFactoryTest.java | 2 +- 92 files changed, 7517 insertions(+), 7685 deletions(-) diff --git a/benchmarks/src/jmh/java/org/opensearch/sql/expression/operator/predicate/ComparisonOperatorBenchmark.java b/benchmarks/src/jmh/java/org/opensearch/sql/expression/operator/predicate/ComparisonOperatorBenchmark.java index d2642dd645..01b2068694 100644 --- a/benchmarks/src/jmh/java/org/opensearch/sql/expression/operator/predicate/ComparisonOperatorBenchmark.java +++ b/benchmarks/src/jmh/java/org/opensearch/sql/expression/operator/predicate/ComparisonOperatorBenchmark.java @@ -38,7 +38,7 @@ @Fork(value = 1) public class ComparisonOperatorBenchmark { - @Param(value = { "int", "string", "date" }) + @Param(value = {"int", "string", "date"}) private String testDataType; private final Map params = @@ -65,9 +65,7 @@ public void testGreaterOperator() { private void run(Function dsl) { ExprValue param = params.get(testDataType); - FunctionExpression func = dsl.apply(new Expression[] { - literal(param), literal(param) - }); + FunctionExpression func = dsl.apply(new Expression[] {literal(param), literal(param)}); func.valueOf(); } } diff --git a/legacy/build.gradle b/legacy/build.gradle index d89f7affe7..fce04ae9ba 100644 --- a/legacy/build.gradle +++ b/legacy/build.gradle @@ -53,6 +53,9 @@ compileJava { } } +checkstyleTest.ignoreFailures = true +checkstyleMain.ignoreFailures = true + // TODO: Similarly, need to fix compiling errors in test source code compileTestJava.options.warnings = false compileTestJava { diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/base/BaseType.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/base/BaseType.java index 280b7b4c76..37e0c4d4b3 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/base/BaseType.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/base/BaseType.java @@ -3,24 +3,21 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.types.base; import java.util.List; import org.opensearch.sql.legacy.antlr.semantic.types.Type; -/** - * Base type interface - */ +/** Base type interface */ public interface BaseType extends Type { - @Override - default Type construct(List others) { - return this; - } + @Override + default Type construct(List others) { + return this; + } - @Override - default String usage() { - return getName(); - } + @Override + default String usage() { + return getName(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/function/AggregateFunction.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/function/AggregateFunction.java index 37e4091b0a..9cebf3dda6 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/function/AggregateFunction.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/function/AggregateFunction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.types.function; import static org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchDataType.DOUBLE; @@ -15,41 +14,38 @@ import org.opensearch.sql.legacy.antlr.semantic.types.Type; import org.opensearch.sql.legacy.antlr.semantic.types.TypeExpression; -/** - * Aggregate function - */ +/** Aggregate function */ public enum AggregateFunction implements TypeExpression { - COUNT( - func().to(INTEGER), // COUNT(*) - func(OPENSEARCH_TYPE).to(INTEGER) - ), - MAX(func(T(NUMBER)).to(T)), - MIN(func(T(NUMBER)).to(T)), - AVG(func(T(NUMBER)).to(DOUBLE)), - SUM(func(T(NUMBER)).to(T)); - - private TypeExpressionSpec[] specifications; - - AggregateFunction(TypeExpressionSpec... specifications) { - this.specifications = specifications; - } - - @Override - public String getName() { - return name(); - } - - @Override - public TypeExpressionSpec[] specifications() { - return specifications; - } - - private static TypeExpressionSpec func(Type... argTypes) { - return new TypeExpressionSpec().map(argTypes); - } - - @Override - public String toString() { - return "Function [" + name() + "]"; - } + COUNT( + func().to(INTEGER), // COUNT(*) + func(OPENSEARCH_TYPE).to(INTEGER)), + MAX(func(T(NUMBER)).to(T)), + MIN(func(T(NUMBER)).to(T)), + AVG(func(T(NUMBER)).to(DOUBLE)), + SUM(func(T(NUMBER)).to(T)); + + private TypeExpressionSpec[] specifications; + + AggregateFunction(TypeExpressionSpec... specifications) { + this.specifications = specifications; + } + + @Override + public String getName() { + return name(); + } + + @Override + public TypeExpressionSpec[] specifications() { + return specifications; + } + + private static TypeExpressionSpec func(Type... argTypes) { + return new TypeExpressionSpec().map(argTypes); + } + + @Override + public String toString() { + return "Function [" + name() + "]"; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/operator/ComparisonOperator.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/operator/ComparisonOperator.java index 993d996df3..19e8f85aa3 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/operator/ComparisonOperator.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/operator/ComparisonOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.types.operator; import static org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchDataType.BOOLEAN; @@ -12,53 +11,50 @@ import java.util.List; import org.opensearch.sql.legacy.antlr.semantic.types.Type; -/** - * Type for comparison operator - */ +/** Type for comparison operator */ public enum ComparisonOperator implements Type { - - EQUAL("="), - NOT_EQUAL("<>"), - NOT_EQUAL2("!="), - GREATER_THAN(">"), - GREATER_THAN_OR_EQUAL_TO(">="), - SMALLER_THAN("<"), - SMALLER_THAN_OR_EQUAL_TO("<="), - IS("IS"); - - /** Actual name representing the operator */ - private final String name; - - ComparisonOperator(String name) { - this.name = name; - } - - @Override - public String getName() { - return name; - } - - @Override - public Type construct(List actualArgs) { - if (actualArgs.size() != 2) { - return TYPE_ERROR; - } - - Type leftType = actualArgs.get(0); - Type rightType = actualArgs.get(1); - if (leftType.isCompatible(rightType) || rightType.isCompatible(leftType)) { - return BOOLEAN; - } - return TYPE_ERROR; - } - - @Override - public String usage() { - return "Please use compatible types from each side."; + EQUAL("="), + NOT_EQUAL("<>"), + NOT_EQUAL2("!="), + GREATER_THAN(">"), + GREATER_THAN_OR_EQUAL_TO(">="), + SMALLER_THAN("<"), + SMALLER_THAN_OR_EQUAL_TO("<="), + IS("IS"); + + /** Actual name representing the operator */ + private final String name; + + ComparisonOperator(String name) { + this.name = name; + } + + @Override + public String getName() { + return name; + } + + @Override + public Type construct(List actualArgs) { + if (actualArgs.size() != 2) { + return TYPE_ERROR; } - @Override - public String toString() { - return "Operator [" + getName() + "]"; + Type leftType = actualArgs.get(0); + Type rightType = actualArgs.get(1); + if (leftType.isCompatible(rightType) || rightType.isCompatible(leftType)) { + return BOOLEAN; } + return TYPE_ERROR; + } + + @Override + public String usage() { + return "Please use compatible types from each side."; + } + + @Override + public String toString() { + return "Operator [" + getName() + "]"; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/syntax/CaseInsensitiveCharStream.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/syntax/CaseInsensitiveCharStream.java index de7e60e9f3..c7cb212826 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/syntax/CaseInsensitiveCharStream.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/syntax/CaseInsensitiveCharStream.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.syntax; import org.antlr.v4.runtime.CharStream; @@ -11,63 +10,64 @@ import org.antlr.v4.runtime.misc.Interval; /** - * Custom stream to convert character to upper case for case insensitive grammar before sending to lexer. + * Custom stream to convert character to upper case for case insensitive grammar before sending to + * lexer. */ public class CaseInsensitiveCharStream implements CharStream { - /** Character stream */ - private final CharStream charStream; + /** Character stream */ + private final CharStream charStream; - public CaseInsensitiveCharStream(String sql) { - this.charStream = CharStreams.fromString(sql); - } + public CaseInsensitiveCharStream(String sql) { + this.charStream = CharStreams.fromString(sql); + } - @Override - public String getText(Interval interval) { - return charStream.getText(interval); - } + @Override + public String getText(Interval interval) { + return charStream.getText(interval); + } - @Override - public void consume() { - charStream.consume(); - } + @Override + public void consume() { + charStream.consume(); + } - @Override - public int LA(int i) { - int c = charStream.LA(i); - if (c <= 0) { - return c; - } - return Character.toUpperCase(c); + @Override + public int LA(int i) { + int c = charStream.LA(i); + if (c <= 0) { + return c; } + return Character.toUpperCase(c); + } - @Override - public int mark() { - return charStream.mark(); - } + @Override + public int mark() { + return charStream.mark(); + } - @Override - public void release(int marker) { - charStream.release(marker); - } + @Override + public void release(int marker) { + charStream.release(marker); + } - @Override - public int index() { - return charStream.index(); - } + @Override + public int index() { + return charStream.index(); + } - @Override - public void seek(int index) { - charStream.seek(index); - } + @Override + public void seek(int index) { + charStream.seek(index); + } - @Override - public int size() { - return charStream.size(); - } + @Override + public int size() { + return charStream.size(); + } - @Override - public String getSourceName() { - return charStream.getSourceName(); - } + @Override + public String getSourceName() { + return charStream.getSourceName(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/AntlrSqlParseTreeVisitor.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/AntlrSqlParseTreeVisitor.java index 90a8274568..00db9a6591 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/AntlrSqlParseTreeVisitor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/AntlrSqlParseTreeVisitor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.visitor; import static java.util.Collections.emptyList; @@ -55,78 +54,74 @@ import org.opensearch.sql.legacy.antlr.parser.OpenSearchLegacySqlParser.TableNamePatternContext; import org.opensearch.sql.legacy.antlr.parser.OpenSearchLegacySqlParserBaseVisitor; -/** - * ANTLR parse tree visitor to drive the analysis process. - */ -public class AntlrSqlParseTreeVisitor extends OpenSearchLegacySqlParserBaseVisitor { - - /** Generic visitor to perform the real action on parse tree */ - private final GenericSqlParseTreeVisitor visitor; - - public AntlrSqlParseTreeVisitor(GenericSqlParseTreeVisitor visitor) { - this.visitor = visitor; - } - - @Override - public T visitRoot(RootContext ctx) { - visitor.visitRoot(); - return super.visitRoot(ctx); - } - - @Override - public T visitUnionSelect(UnionSelectContext ctx) { - T union = visitor.visitOperator("UNION"); - return reduce(union, - asList( - ctx.querySpecification(), - ctx.unionStatement() - ) - ); - } - - @Override - public T visitMinusSelect(MinusSelectContext ctx) { - T minus = visitor.visitOperator("MINUS"); - return reduce(minus, asList(ctx.querySpecification(), ctx.minusStatement())); - } - - @Override - public T visitInPredicate(InPredicateContext ctx) { - T in = visitor.visitOperator("IN"); - PredicateContext field = ctx.predicate(); - ParserRuleContext subquery = (ctx.selectStatement() != null) ? ctx.selectStatement() : ctx.expressions(); - return reduce(in, Arrays.asList(field, subquery)); - } - - @Override - public T visitTableSources(TableSourcesContext ctx) { - if (ctx.tableSource().size() < 2) { - return super.visitTableSources(ctx); - } - T commaJoin = visitor.visitOperator("JOIN"); - return reduce(commaJoin, ctx.tableSource()); - } - - @Override - public T visitTableSourceBase(TableSourceBaseContext ctx) { - if (ctx.joinPart().isEmpty()) { - return super.visitTableSourceBase(ctx); - } - T join = visitor.visitOperator("JOIN"); - return reduce(join, asList(ctx.tableSourceItem(), ctx.joinPart())); - } - - @Override - public T visitInnerJoin(InnerJoinContext ctx) { - return visitJoin(ctx.children, ctx.tableSourceItem()); - } - - @Override - public T visitOuterJoin(OuterJoinContext ctx) { - return visitJoin(ctx.children, ctx.tableSourceItem()); - } +/** ANTLR parse tree visitor to drive the analysis process. */ +public class AntlrSqlParseTreeVisitor + extends OpenSearchLegacySqlParserBaseVisitor { + + /** Generic visitor to perform the real action on parse tree */ + private final GenericSqlParseTreeVisitor visitor; + + public AntlrSqlParseTreeVisitor(GenericSqlParseTreeVisitor visitor) { + this.visitor = visitor; + } + + @Override + public T visitRoot(RootContext ctx) { + visitor.visitRoot(); + return super.visitRoot(ctx); + } + + @Override + public T visitUnionSelect(UnionSelectContext ctx) { + T union = visitor.visitOperator("UNION"); + return reduce(union, asList(ctx.querySpecification(), ctx.unionStatement())); + } + + @Override + public T visitMinusSelect(MinusSelectContext ctx) { + T minus = visitor.visitOperator("MINUS"); + return reduce(minus, asList(ctx.querySpecification(), ctx.minusStatement())); + } + + @Override + public T visitInPredicate(InPredicateContext ctx) { + T in = visitor.visitOperator("IN"); + PredicateContext field = ctx.predicate(); + ParserRuleContext subquery = + (ctx.selectStatement() != null) ? ctx.selectStatement() : ctx.expressions(); + return reduce(in, Arrays.asList(field, subquery)); + } + + @Override + public T visitTableSources(TableSourcesContext ctx) { + if (ctx.tableSource().size() < 2) { + return super.visitTableSources(ctx); + } + T commaJoin = visitor.visitOperator("JOIN"); + return reduce(commaJoin, ctx.tableSource()); + } + + @Override + public T visitTableSourceBase(TableSourceBaseContext ctx) { + if (ctx.joinPart().isEmpty()) { + return super.visitTableSourceBase(ctx); + } + T join = visitor.visitOperator("JOIN"); + return reduce(join, asList(ctx.tableSourceItem(), ctx.joinPart())); + } + + @Override + public T visitInnerJoin(InnerJoinContext ctx) { + return visitJoin(ctx.children, ctx.tableSourceItem()); + } + + @Override + public T visitOuterJoin(OuterJoinContext ctx) { + return visitJoin(ctx.children, ctx.tableSourceItem()); + } /** + *
      * Enforce visit order because ANTLR is generic and unaware.
      *
      * Visiting order is:
@@ -137,275 +132,273 @@ public T visitOuterJoin(OuterJoinContext ctx) {
      *     => HAVING
      *      => ORDER BY
      *       => LIMIT
+     *  
*/ @Override public T visitQuerySpecification(QuerySpecificationContext ctx) { visitor.visitQuery(); - // Always visit FROM clause first to define symbols - FromClauseContext fromClause = ctx.fromClause(); - visit(fromClause.tableSources()); - - if (fromClause.whereExpr != null) { - visit(fromClause.whereExpr); - } - - // Note visit GROUP BY and HAVING later than SELECT for alias definition - T result = visitSelectElements(ctx.selectElements()); - fromClause.groupByItem().forEach(this::visit); - if (fromClause.havingExpr != null) { - visit(fromClause.havingExpr); - } - - if (ctx.orderByClause() != null) { - visitOrderByClause(ctx.orderByClause()); - } - if (ctx.limitClause() != null) { - visitLimitClause(ctx.limitClause()); - } - - visitor.endVisitQuery(); - return result; - } - - @Override - public T visitSubqueryTableItem(SubqueryTableItemContext ctx) { - throw new EarlyExitAnalysisException("Exit when meeting subquery in from"); - } - - /** Visit here instead of tableName because we need alias */ - @Override - public T visitAtomTableItem(AtomTableItemContext ctx) { - String alias = (ctx.alias == null) ? "" : ctx.alias.getText(); - T result = visit(ctx.tableName()); - visitor.visitAs(alias, result); - return result; - } - - @Override - public T visitSimpleTableName(SimpleTableNameContext ctx) { - return visitor.visitIndexName(ctx.getText()); - } - - @Override - public T visitTableNamePattern(TableNamePatternContext ctx) { - return visitor.visitIndexName(ctx.getText()); - } - - @Override - public T visitTableAndTypeName(TableAndTypeNameContext ctx) { - return visitor.visitIndexName(ctx.uid(0).getText()); - } - - @Override - public T visitFullColumnName(FullColumnNameContext ctx) { - return visitor.visitFieldName(ctx.getText()); - } - - @Override - public T visitUdfFunctionCall(UdfFunctionCallContext ctx) { - String funcName = ctx.fullId().getText(); - T func = visitor.visitFunctionName(funcName); - return reduce(func, ctx.functionArgs()); - } - - @Override - public T visitScalarFunctionCall(ScalarFunctionCallContext ctx) { - UnsupportedSemanticVerifier.verify(ctx); - T func = visit(ctx.scalarFunctionName()); - return reduce(func, ctx.functionArgs()); - } - - @Override - public T visitMathOperator(MathOperatorContext ctx) { - UnsupportedSemanticVerifier.verify(ctx); - return super.visitMathOperator(ctx); - } - - @Override - public T visitRegexpPredicate(RegexpPredicateContext ctx) { - UnsupportedSemanticVerifier.verify(ctx); - return super.visitRegexpPredicate(ctx); - } - - @Override - public T visitSelectElements(SelectElementsContext ctx) { - return visitor.visitSelect(ctx.selectElement(). - stream(). - map(this::visit). - collect(Collectors.toList())); - } - - @Override - public T visitSelectStarElement(OpenSearchLegacySqlParser.SelectStarElementContext ctx) { - return visitor.visitSelectAllColumn(); - } - - @Override - public T visitSelectColumnElement(SelectColumnElementContext ctx) { - return visitSelectItem(ctx.fullColumnName(), ctx.uid()); - } - - @Override - public T visitSelectFunctionElement(SelectFunctionElementContext ctx) { - return visitSelectItem(ctx.functionCall(), ctx.uid()); - } - - @Override - public T visitSelectExpressionElement(SelectExpressionElementContext ctx) { - return visitSelectItem(ctx.expression(), ctx.uid()); - } - - @Override - public T visitAggregateWindowedFunction(AggregateWindowedFunctionContext ctx) { - String funcName = ctx.getChild(0).getText(); - T func = visitor.visitFunctionName(funcName); - return reduce(func, ctx.functionArg()); - } - - @Override - public T visitFunctionNameBase(FunctionNameBaseContext ctx) { - return visitor.visitFunctionName(ctx.getText()); - } - - @Override - public T visitBinaryComparisonPredicate(BinaryComparisonPredicateContext ctx) { - if (isNamedArgument(ctx)) { // Essentially named argument is assign instead of comparison - return defaultResult(); - } - - T op = visit(ctx.comparisonOperator()); - return reduce(op, Arrays.asList(ctx.left, ctx.right)); - } - - @Override - public T visitIsExpression(IsExpressionContext ctx) { - T op = visitor.visitOperator("IS"); - return op.reduce(Arrays.asList( - visit(ctx.predicate()), - visitor.visitBoolean(ctx.testValue.getText())) - ); - } - - @Override - public T visitConvertedDataType(OpenSearchLegacySqlParser.ConvertedDataTypeContext ctx) { - if (ctx.getChild(0) != null && !Strings.isNullOrEmpty(ctx.getChild(0).getText())) { - return visitor.visitConvertedType(ctx.getChild(0).getText()); - } else { - return super.visitConvertedDataType(ctx); - } - } - - @Override - public T visitComparisonOperator(ComparisonOperatorContext ctx) { - return visitor.visitOperator(ctx.getText()); - } - - @Override - public T visitConstant(ConstantContext ctx) { - if (ctx.REAL_LITERAL() != null) { - return visitor.visitFloat(ctx.getText()); - } - if (ctx.dateType != null) { - return visitor.visitDate(ctx.getText()); - } - if (ctx.nullLiteral != null) { - return visitor.visitNull(); - } - return super.visitConstant(ctx); - } - - @Override - public T visitStringLiteral(StringLiteralContext ctx) { - return visitor.visitString(ctx.getText()); - } - - @Override - public T visitDecimalLiteral(DecimalLiteralContext ctx) { - return visitor.visitInteger(ctx.getText()); - } - - @Override - public T visitBooleanLiteral(BooleanLiteralContext ctx) { - return visitor.visitBoolean(ctx.getText()); - } - - @Override - protected T defaultResult() { - return visitor.defaultValue(); - } - - @Override - protected T aggregateResult(T aggregate, T nextResult) { - if (nextResult != defaultResult()) { // Simply return non-default value for now - return nextResult; - } - return aggregate; - } - - /** - * Named argument, ex. TOPHITS('size'=3), is under FunctionArgs -> Predicate - * And the function name should be contained in openSearchFunctionNameBase - */ - private boolean isNamedArgument(BinaryComparisonPredicateContext ctx) { - if (ctx.getParent() != null && ctx.getParent().getParent() != null - && ctx.getParent().getParent().getParent() != null - && ctx.getParent().getParent().getParent() instanceof ScalarFunctionCallContext) { - - ScalarFunctionCallContext parent = (ScalarFunctionCallContext) ctx.getParent().getParent().getParent(); - return parent.scalarFunctionName().functionNameBase().openSearchFunctionNameBase() != null; - } - return false; - } - - /** Enforce visiting result of table instead of ON clause as result */ - private T visitJoin(List children, TableSourceItemContext tableCtx) { - T result = defaultResult(); - for (ParseTree child : children) { - if (child == tableCtx) { - result = visit(tableCtx); - } else { - visit(child); - } - } - return result; - } - - /** Visit select items for type check and alias definition */ - private T visitSelectItem(ParserRuleContext item, UidContext uid) { - T result = visit(item); - if (uid != null) { - visitor.visitAs(uid.getText(), result); - } - return result; - } - - private T reduce(T reducer, ParserRuleContext ctx) { - return reduce(reducer, (ctx == null) ? emptyList() : ctx.children); - } - - /** Make constructor apply arguments and return result type */ - private T reduce(T reducer, List nodes) { - List args; - if (nodes == null) { - args = emptyList(); - } else { - args = nodes.stream(). - map(this::visit). - filter(type -> type != defaultResult()). - collect(Collectors.toList()); - } - return reducer.reduce(args); - } - - /** Combine an item and a list of items to a single list */ - private - List asList(Node1 first, List rest) { - - List result = new ArrayList<>(singleton(first)); - result.addAll(rest); - return result; - } - + // Always visit FROM clause first to define symbols + FromClauseContext fromClause = ctx.fromClause(); + visit(fromClause.tableSources()); + + if (fromClause.whereExpr != null) { + visit(fromClause.whereExpr); + } + + // Note visit GROUP BY and HAVING later than SELECT for alias definition + T result = visitSelectElements(ctx.selectElements()); + fromClause.groupByItem().forEach(this::visit); + if (fromClause.havingExpr != null) { + visit(fromClause.havingExpr); + } + + if (ctx.orderByClause() != null) { + visitOrderByClause(ctx.orderByClause()); + } + if (ctx.limitClause() != null) { + visitLimitClause(ctx.limitClause()); + } + + visitor.endVisitQuery(); + return result; + } + + @Override + public T visitSubqueryTableItem(SubqueryTableItemContext ctx) { + throw new EarlyExitAnalysisException("Exit when meeting subquery in from"); + } + + /** Visit here instead of tableName because we need alias */ + @Override + public T visitAtomTableItem(AtomTableItemContext ctx) { + String alias = (ctx.alias == null) ? "" : ctx.alias.getText(); + T result = visit(ctx.tableName()); + visitor.visitAs(alias, result); + return result; + } + + @Override + public T visitSimpleTableName(SimpleTableNameContext ctx) { + return visitor.visitIndexName(ctx.getText()); + } + + @Override + public T visitTableNamePattern(TableNamePatternContext ctx) { + return visitor.visitIndexName(ctx.getText()); + } + + @Override + public T visitTableAndTypeName(TableAndTypeNameContext ctx) { + return visitor.visitIndexName(ctx.uid(0).getText()); + } + + @Override + public T visitFullColumnName(FullColumnNameContext ctx) { + return visitor.visitFieldName(ctx.getText()); + } + + @Override + public T visitUdfFunctionCall(UdfFunctionCallContext ctx) { + String funcName = ctx.fullId().getText(); + T func = visitor.visitFunctionName(funcName); + return reduce(func, ctx.functionArgs()); + } + + @Override + public T visitScalarFunctionCall(ScalarFunctionCallContext ctx) { + UnsupportedSemanticVerifier.verify(ctx); + T func = visit(ctx.scalarFunctionName()); + return reduce(func, ctx.functionArgs()); + } + + @Override + public T visitMathOperator(MathOperatorContext ctx) { + UnsupportedSemanticVerifier.verify(ctx); + return super.visitMathOperator(ctx); + } + + @Override + public T visitRegexpPredicate(RegexpPredicateContext ctx) { + UnsupportedSemanticVerifier.verify(ctx); + return super.visitRegexpPredicate(ctx); + } + + @Override + public T visitSelectElements(SelectElementsContext ctx) { + return visitor.visitSelect( + ctx.selectElement().stream().map(this::visit).collect(Collectors.toList())); + } + + @Override + public T visitSelectStarElement(OpenSearchLegacySqlParser.SelectStarElementContext ctx) { + return visitor.visitSelectAllColumn(); + } + + @Override + public T visitSelectColumnElement(SelectColumnElementContext ctx) { + return visitSelectItem(ctx.fullColumnName(), ctx.uid()); + } + + @Override + public T visitSelectFunctionElement(SelectFunctionElementContext ctx) { + return visitSelectItem(ctx.functionCall(), ctx.uid()); + } + + @Override + public T visitSelectExpressionElement(SelectExpressionElementContext ctx) { + return visitSelectItem(ctx.expression(), ctx.uid()); + } + + @Override + public T visitAggregateWindowedFunction(AggregateWindowedFunctionContext ctx) { + String funcName = ctx.getChild(0).getText(); + T func = visitor.visitFunctionName(funcName); + return reduce(func, ctx.functionArg()); + } + + @Override + public T visitFunctionNameBase(FunctionNameBaseContext ctx) { + return visitor.visitFunctionName(ctx.getText()); + } + + @Override + public T visitBinaryComparisonPredicate(BinaryComparisonPredicateContext ctx) { + if (isNamedArgument(ctx)) { // Essentially named argument is assign instead of comparison + return defaultResult(); + } + + T op = visit(ctx.comparisonOperator()); + return reduce(op, Arrays.asList(ctx.left, ctx.right)); + } + + @Override + public T visitIsExpression(IsExpressionContext ctx) { + T op = visitor.visitOperator("IS"); + return op.reduce( + Arrays.asList(visit(ctx.predicate()), visitor.visitBoolean(ctx.testValue.getText()))); + } + + @Override + public T visitConvertedDataType(OpenSearchLegacySqlParser.ConvertedDataTypeContext ctx) { + if (ctx.getChild(0) != null && !Strings.isNullOrEmpty(ctx.getChild(0).getText())) { + return visitor.visitConvertedType(ctx.getChild(0).getText()); + } else { + return super.visitConvertedDataType(ctx); + } + } + + @Override + public T visitComparisonOperator(ComparisonOperatorContext ctx) { + return visitor.visitOperator(ctx.getText()); + } + + @Override + public T visitConstant(ConstantContext ctx) { + if (ctx.REAL_LITERAL() != null) { + return visitor.visitFloat(ctx.getText()); + } + if (ctx.dateType != null) { + return visitor.visitDate(ctx.getText()); + } + if (ctx.nullLiteral != null) { + return visitor.visitNull(); + } + return super.visitConstant(ctx); + } + + @Override + public T visitStringLiteral(StringLiteralContext ctx) { + return visitor.visitString(ctx.getText()); + } + + @Override + public T visitDecimalLiteral(DecimalLiteralContext ctx) { + return visitor.visitInteger(ctx.getText()); + } + + @Override + public T visitBooleanLiteral(BooleanLiteralContext ctx) { + return visitor.visitBoolean(ctx.getText()); + } + + @Override + protected T defaultResult() { + return visitor.defaultValue(); + } + + @Override + protected T aggregateResult(T aggregate, T nextResult) { + if (nextResult != defaultResult()) { // Simply return non-default value for now + return nextResult; + } + return aggregate; + } + + /** + * Named argument, ex. TOPHITS('size'=3), is under FunctionArgs -> Predicate And the function name + * should be contained in openSearchFunctionNameBase + */ + private boolean isNamedArgument(BinaryComparisonPredicateContext ctx) { + if (ctx.getParent() != null + && ctx.getParent().getParent() != null + && ctx.getParent().getParent().getParent() != null + && ctx.getParent().getParent().getParent() instanceof ScalarFunctionCallContext) { + + ScalarFunctionCallContext parent = + (ScalarFunctionCallContext) ctx.getParent().getParent().getParent(); + return parent.scalarFunctionName().functionNameBase().openSearchFunctionNameBase() != null; + } + return false; + } + + /** Enforce visiting result of table instead of ON clause as result */ + private T visitJoin(List children, TableSourceItemContext tableCtx) { + T result = defaultResult(); + for (ParseTree child : children) { + if (child == tableCtx) { + result = visit(tableCtx); + } else { + visit(child); + } + } + return result; + } + + /** Visit select items for type check and alias definition */ + private T visitSelectItem(ParserRuleContext item, UidContext uid) { + T result = visit(item); + if (uid != null) { + visitor.visitAs(uid.getText(), result); + } + return result; + } + + private T reduce(T reducer, ParserRuleContext ctx) { + return reduce(reducer, (ctx == null) ? emptyList() : ctx.children); + } + + /** Make constructor apply arguments and return result type */ + private T reduce(T reducer, List nodes) { + List args; + if (nodes == null) { + args = emptyList(); + } else { + args = + nodes.stream() + .map(this::visit) + .filter(type -> type != defaultResult()) + .collect(Collectors.toList()); + } + return reducer.reduce(args); + } + + /** Combine an item and a list of items to a single list */ + private List asList( + Node1 first, List rest) { + + List result = new ArrayList<>(singleton(first)); + result.addAll(rest); + return result; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/EarlyExitAnalysisException.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/EarlyExitAnalysisException.java index b0bd01a093..cf583aab40 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/EarlyExitAnalysisException.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/EarlyExitAnalysisException.java @@ -3,15 +3,12 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.visitor; -/** - * Exit visitor early due to some reason. - */ +/** Exit visitor early due to some reason. */ public class EarlyExitAnalysisException extends RuntimeException { - public EarlyExitAnalysisException(String message) { - super(message); - } + public EarlyExitAnalysisException(String message) { + super(message); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/cursor/Cursor.java b/legacy/src/main/java/org/opensearch/sql/legacy/cursor/Cursor.java index d3985259dd..8cc83a5fe2 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/cursor/Cursor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/cursor/Cursor.java @@ -3,19 +3,17 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.cursor; - public interface Cursor { - NullCursor NULL_CURSOR = new NullCursor(); + NullCursor NULL_CURSOR = new NullCursor(); /** - * All cursor's are of the form : + * All cursor's are of the form :
* The serialized form before encoding is upto Cursor implementation */ String generateCursorId(); - CursorType getType(); + CursorType getType(); } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/cursor/CursorType.java b/legacy/src/main/java/org/opensearch/sql/legacy/cursor/CursorType.java index 7c96cb8835..fea47e7e39 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/cursor/CursorType.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/cursor/CursorType.java @@ -3,42 +3,41 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.cursor; import java.util.HashMap; import java.util.Map; /** - * Different types queries for which cursor is supported. - * The result execution, and cursor genreation/parsing will depend on the cursor type. - * NullCursor is the placeholder implementation in case of non-cursor query. + * Different types queries for which cursor is supported. The result execution, and cursor + * generation/parsing will depend on the cursor type. NullCursor is the placeholder implementation + * in case of non-cursor query. */ public enum CursorType { - NULL(null), - DEFAULT("d"), - AGGREGATION("a"), - JOIN("j"); + NULL(null), + DEFAULT("d"), + AGGREGATION("a"), + JOIN("j"); - public String id; + public String id; - CursorType(String id) { - this.id = id; - } + CursorType(String id) { + this.id = id; + } - public String getId() { - return this.id; - } + public String getId() { + return this.id; + } - public static final Map LOOKUP = new HashMap<>(); + public static final Map LOOKUP = new HashMap<>(); - static { - for (CursorType type : CursorType.values()) { - LOOKUP.put(type.getId(), type); - } + static { + for (CursorType type : CursorType.values()) { + LOOKUP.put(type.getId(), type); } + } - public static CursorType getById(String id) { - return LOOKUP.getOrDefault(id, NULL); - } + public static CursorType getById(String id) { + return LOOKUP.getOrDefault(id, NULL); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/cursor/DefaultCursor.java b/legacy/src/main/java/org/opensearch/sql/legacy/cursor/DefaultCursor.java index 856c1e5e2b..c5be0066fc 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/cursor/DefaultCursor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/cursor/DefaultCursor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.cursor; import com.google.common.base.Strings; @@ -21,9 +20,8 @@ import org.json.JSONObject; import org.opensearch.sql.legacy.executor.format.Schema; - /** - * Minimum metdata that will be serialized for generating cursorId for + * Minimum metdata that will be serialized for generating cursorId for
* SELECT .... FROM .. ORDER BY .... queries */ @Getter @@ -31,130 +29,135 @@ @NoArgsConstructor public class DefaultCursor implements Cursor { - /** Make sure all keys are unique to prevent overriding - * and as small as possible to make cursor compact - */ - private static final String FETCH_SIZE = "f"; - private static final String ROWS_LEFT = "l"; - private static final String INDEX_PATTERN = "i"; - private static final String SCROLL_ID = "s"; - private static final String SCHEMA_COLUMNS = "c"; - private static final String FIELD_ALIAS_MAP = "a"; - - /** To get mappings for index to check if type is date needed for - * @see org.opensearch.sql.legacy.executor.format.DateFieldFormatter */ - @NonNull - private String indexPattern; - - /** List of Schema.Column for maintaining field order and generating null values of missing fields */ - @NonNull - private List columns; - - /** To delegate to correct cursor handler to get next page*/ - private final CursorType type = CursorType.DEFAULT; - + /** + * Make sure all keys are unique to prevent overriding and as small as possible to make cursor + * compact + */ + private static final String FETCH_SIZE = "f"; + + private static final String ROWS_LEFT = "l"; + private static final String INDEX_PATTERN = "i"; + private static final String SCROLL_ID = "s"; + private static final String SCHEMA_COLUMNS = "c"; + private static final String FIELD_ALIAS_MAP = "a"; + + /** + * To get mappings for index to check if type is date needed for + * + * @see org.opensearch.sql.legacy.executor.format.DateFieldFormatter + */ + @NonNull private String indexPattern; + + /** + * List of Schema.Column for maintaining field order and generating null values of missing fields + */ + @NonNull private List columns; + + /** To delegate to correct cursor handler to get next page */ + private final CursorType type = CursorType.DEFAULT; + + /** + * Truncate the @see DataRows to respect LIMIT clause and/or to identify last page to close scroll + * context. docsLeft is decremented by fetch_size for call to get page of result. + */ + private long rowsLeft; + + /** + * @see org.opensearch.sql.legacy.executor.format.SelectResultSet + */ + @NonNull private Map fieldAliasMap; + + /** To get next batch of result */ + private String scrollId; + + /** To reduce the number of rows left by fetchSize */ + @NonNull private Integer fetchSize; + + private Integer limit; + + @Override + public CursorType getType() { + return type; + } + + @Override + public String generateCursorId() { + if (rowsLeft <= 0 || Strings.isNullOrEmpty(scrollId)) { + return null; + } + JSONObject json = new JSONObject(); + json.put(FETCH_SIZE, fetchSize); + json.put(ROWS_LEFT, rowsLeft); + json.put(INDEX_PATTERN, indexPattern); + json.put(SCROLL_ID, scrollId); + json.put(SCHEMA_COLUMNS, getSchemaAsJson()); + json.put(FIELD_ALIAS_MAP, fieldAliasMap); + return String.format("%s:%s", type.getId(), encodeCursor(json)); + } + + public static DefaultCursor from(String cursorId) { /** - * Truncate the @see DataRows to respect LIMIT clause and/or to identify last page to close scroll context. - * docsLeft is decremented by fetch_size for call to get page of result. + * It is assumed that cursorId here is the second part of the original cursor passed by the + * client after removing first part which identifies cursor type */ - private long rowsLeft; - - /** @see org.opensearch.sql.legacy.executor.format.SelectResultSet */ - @NonNull - private Map fieldAliasMap; - - /** To get next batch of result */ - private String scrollId; - - /** To reduce the number of rows left by fetchSize */ - @NonNull - private Integer fetchSize; - - private Integer limit; - - @Override - public CursorType getType() { - return type; - } - - @Override - public String generateCursorId() { - if (rowsLeft <=0 || Strings.isNullOrEmpty(scrollId)) { - return null; - } - JSONObject json = new JSONObject(); - json.put(FETCH_SIZE, fetchSize); - json.put(ROWS_LEFT, rowsLeft); - json.put(INDEX_PATTERN, indexPattern); - json.put(SCROLL_ID, scrollId); - json.put(SCHEMA_COLUMNS, getSchemaAsJson()); - json.put(FIELD_ALIAS_MAP, fieldAliasMap); - return String.format("%s:%s", type.getId(), encodeCursor(json)); - } - - public static DefaultCursor from(String cursorId) { - /** - * It is assumed that cursorId here is the second part of the original cursor passed - * by the client after removing first part which identifies cursor type - */ - JSONObject json = decodeCursor(cursorId); - DefaultCursor cursor = new DefaultCursor(); - cursor.setFetchSize(json.getInt(FETCH_SIZE)); - cursor.setRowsLeft(json.getLong(ROWS_LEFT)); - cursor.setIndexPattern(json.getString(INDEX_PATTERN)); - cursor.setScrollId(json.getString(SCROLL_ID)); - cursor.setColumns(getColumnsFromSchema(json.getJSONArray(SCHEMA_COLUMNS))); - cursor.setFieldAliasMap(fieldAliasMap(json.getJSONObject(FIELD_ALIAS_MAP))); - - return cursor; - } - - private JSONArray getSchemaAsJson() { - JSONArray schemaJson = new JSONArray(); - - for (Schema.Column column : columns) { - schemaJson.put(schemaEntry(column.getName(), column.getAlias(), column.getType())); - } - - return schemaJson; + JSONObject json = decodeCursor(cursorId); + DefaultCursor cursor = new DefaultCursor(); + cursor.setFetchSize(json.getInt(FETCH_SIZE)); + cursor.setRowsLeft(json.getLong(ROWS_LEFT)); + cursor.setIndexPattern(json.getString(INDEX_PATTERN)); + cursor.setScrollId(json.getString(SCROLL_ID)); + cursor.setColumns(getColumnsFromSchema(json.getJSONArray(SCHEMA_COLUMNS))); + cursor.setFieldAliasMap(fieldAliasMap(json.getJSONObject(FIELD_ALIAS_MAP))); + + return cursor; + } + + private JSONArray getSchemaAsJson() { + JSONArray schemaJson = new JSONArray(); + + for (Schema.Column column : columns) { + schemaJson.put(schemaEntry(column.getName(), column.getAlias(), column.getType())); } - private JSONObject schemaEntry(String name, String alias, String type) { - JSONObject entry = new JSONObject(); - entry.put("name", name); - if (alias != null) { - entry.put("alias", alias); - } - entry.put("type", type); - return entry; - } - - private static String encodeCursor(JSONObject cursorJson) { - return Base64.getEncoder().encodeToString(cursorJson.toString().getBytes()); - } - - private static JSONObject decodeCursor(String cursorId) { - return new JSONObject(new String(Base64.getDecoder().decode(cursorId))); - } - - private static Map fieldAliasMap(JSONObject json) { - Map fieldToAliasMap = new HashMap<>(); - json.keySet().forEach(key -> fieldToAliasMap.put(key, json.get(key).toString())); - return fieldToAliasMap; - } + return schemaJson; + } - private static List getColumnsFromSchema(JSONArray schema) { - List columns = IntStream. - range(0, schema.length()). - mapToObj(i -> { - JSONObject jsonColumn = schema.getJSONObject(i); - return new Schema.Column( - jsonColumn.getString("name"), - jsonColumn.optString("alias", null), - Schema.Type.valueOf(jsonColumn.getString("type").toUpperCase()) - ); - } - ).collect(Collectors.toList()); - return columns; + private JSONObject schemaEntry(String name, String alias, String type) { + JSONObject entry = new JSONObject(); + entry.put("name", name); + if (alias != null) { + entry.put("alias", alias); } + entry.put("type", type); + return entry; + } + + private static String encodeCursor(JSONObject cursorJson) { + return Base64.getEncoder().encodeToString(cursorJson.toString().getBytes()); + } + + private static JSONObject decodeCursor(String cursorId) { + return new JSONObject(new String(Base64.getDecoder().decode(cursorId))); + } + + private static Map fieldAliasMap(JSONObject json) { + Map fieldToAliasMap = new HashMap<>(); + json.keySet().forEach(key -> fieldToAliasMap.put(key, json.get(key).toString())); + return fieldToAliasMap; + } + + private static List getColumnsFromSchema(JSONArray schema) { + List columns = + IntStream.range(0, schema.length()) + .mapToObj( + i -> { + JSONObject jsonColumn = schema.getJSONObject(i); + return new Schema.Column( + jsonColumn.getString("name"), + jsonColumn.optString("alias", null), + Schema.Type.valueOf(jsonColumn.getString("type").toUpperCase())); + }) + .collect(Collectors.toList()); + return columns; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/ColumnTypeProvider.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/ColumnTypeProvider.java index 3b2691186b..b7d90b66da 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/ColumnTypeProvider.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/ColumnTypeProvider.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain; import com.google.common.collect.ImmutableList; @@ -17,66 +16,64 @@ import org.opensearch.sql.legacy.antlr.semantic.types.special.Product; import org.opensearch.sql.legacy.executor.format.Schema; -/** - * The definition of column type provider - */ +/** The definition of column type provider */ public class ColumnTypeProvider { - private final List typeList; + private final List typeList; - private static final Map TYPE_MAP = - new ImmutableMap.Builder() - .put(OpenSearchDataType.SHORT, Schema.Type.SHORT) - .put(OpenSearchDataType.LONG, Schema.Type.LONG) - .put(OpenSearchDataType.INTEGER, Schema.Type.INTEGER) - .put(OpenSearchDataType.FLOAT, Schema.Type.FLOAT) - .put(OpenSearchDataType.DOUBLE, Schema.Type.DOUBLE) - .put(OpenSearchDataType.KEYWORD, Schema.Type.KEYWORD) - .put(OpenSearchDataType.TEXT, Schema.Type.TEXT) - .put(OpenSearchDataType.STRING, Schema.Type.TEXT) - .put(OpenSearchDataType.DATE, Schema.Type.DATE) - .put(OpenSearchDataType.BOOLEAN, Schema.Type.BOOLEAN) - .put(OpenSearchDataType.UNKNOWN, Schema.Type.DOUBLE) - .build(); - public static final Schema.Type COLUMN_DEFAULT_TYPE = Schema.Type.DOUBLE; + private static final Map TYPE_MAP = + new ImmutableMap.Builder() + .put(OpenSearchDataType.SHORT, Schema.Type.SHORT) + .put(OpenSearchDataType.LONG, Schema.Type.LONG) + .put(OpenSearchDataType.INTEGER, Schema.Type.INTEGER) + .put(OpenSearchDataType.FLOAT, Schema.Type.FLOAT) + .put(OpenSearchDataType.DOUBLE, Schema.Type.DOUBLE) + .put(OpenSearchDataType.KEYWORD, Schema.Type.KEYWORD) + .put(OpenSearchDataType.TEXT, Schema.Type.TEXT) + .put(OpenSearchDataType.STRING, Schema.Type.TEXT) + .put(OpenSearchDataType.DATE, Schema.Type.DATE) + .put(OpenSearchDataType.BOOLEAN, Schema.Type.BOOLEAN) + .put(OpenSearchDataType.UNKNOWN, Schema.Type.DOUBLE) + .build(); + public static final Schema.Type COLUMN_DEFAULT_TYPE = Schema.Type.DOUBLE; - public ColumnTypeProvider(Type type) { - this.typeList = convertOutputColumnType(type); - } + public ColumnTypeProvider(Type type) { + this.typeList = convertOutputColumnType(type); + } - public ColumnTypeProvider() { - this.typeList = new ArrayList<>(); - } + public ColumnTypeProvider() { + this.typeList = new ArrayList<>(); + } - /** - * Get the type of column by index. - * - * @param index column index. - * @return column type. - */ - public Schema.Type get(int index) { - if (typeList.isEmpty()) { - return COLUMN_DEFAULT_TYPE; - } else { - return typeList.get(index); - } + /** + * Get the type of column by index. + * + * @param index column index. + * @return column type. + */ + public Schema.Type get(int index) { + if (typeList.isEmpty()) { + return COLUMN_DEFAULT_TYPE; + } else { + return typeList.get(index); } + } - private List convertOutputColumnType(Type type) { - if (type instanceof Product) { - List types = ((Product) type).getTypes(); - return types.stream().map(t -> convertType(t)).collect(Collectors.toList()); - } else if (type instanceof OpenSearchDataType) { - return ImmutableList.of(convertType(type)); - } else { - return ImmutableList.of(COLUMN_DEFAULT_TYPE); - } + private List convertOutputColumnType(Type type) { + if (type instanceof Product) { + List types = ((Product) type).getTypes(); + return types.stream().map(t -> convertType(t)).collect(Collectors.toList()); + } else if (type instanceof OpenSearchDataType) { + return ImmutableList.of(convertType(type)); + } else { + return ImmutableList.of(COLUMN_DEFAULT_TYPE); } + } - private Schema.Type convertType(Type type) { - try { - return TYPE_MAP.getOrDefault(type, COLUMN_DEFAULT_TYPE); - } catch (Exception e) { - return COLUMN_DEFAULT_TYPE; - } + private Schema.Type convertType(Type type) { + try { + return TYPE_MAP.getOrDefault(type, COLUMN_DEFAULT_TYPE); + } catch (Exception e) { + return COLUMN_DEFAULT_TYPE; } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/Condition.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/Condition.java index ff6b016ddb..8804c543f6 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/Condition.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/Condition.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain; import com.alibaba.druid.sql.ast.SQLExpr; @@ -18,363 +17,368 @@ import org.opensearch.sql.legacy.utils.StringUtils; /** - * - * * @author ansj */ public class Condition extends Where { - public enum OPERATOR { - - EQ, - GT, - LT, - GTE, - LTE, - N, - LIKE, - NLIKE, - REGEXP, - IS, - ISN, - IN, - NIN, - BETWEEN, - NBETWEEN, - GEO_INTERSECTS, - GEO_BOUNDING_BOX, - GEO_DISTANCE, - GEO_POLYGON, - IN_TERMS, - TERM, - IDS_QUERY, - NESTED_COMPLEX, - NOT_EXISTS_NESTED_COMPLEX, - CHILDREN_COMPLEX, - SCRIPT, - NIN_TERMS, - NTERM, - NREGEXP; - - public static Map methodNameToOpear; - - public static Map operStringToOpear; - - public static Map simpleOperStringToOpear; - - private static BiMap negatives; - - private static BiMap simpleReverses; - - static { - methodNameToOpear = new HashMap<>(); - methodNameToOpear.put("term", TERM); - methodNameToOpear.put("matchterm", TERM); - methodNameToOpear.put("match_term", TERM); - methodNameToOpear.put("terms", IN_TERMS); - methodNameToOpear.put("in_terms", IN_TERMS); - methodNameToOpear.put("ids", IDS_QUERY); - methodNameToOpear.put("ids_query", IDS_QUERY); - methodNameToOpear.put("regexp", REGEXP); - methodNameToOpear.put("regexp_query", REGEXP); - } - - static { - operStringToOpear = new HashMap<>(); - operStringToOpear.put("=", EQ); - operStringToOpear.put(">", GT); - operStringToOpear.put("<", LT); - operStringToOpear.put(">=", GTE); - operStringToOpear.put("<=", LTE); - operStringToOpear.put("<>", N); - operStringToOpear.put("LIKE", LIKE); - operStringToOpear.put("NOT", N); - operStringToOpear.put("NOT LIKE", NLIKE); - operStringToOpear.put("IS", IS); - operStringToOpear.put("IS NOT", ISN); - operStringToOpear.put("IN", IN); - operStringToOpear.put("NOT IN", NIN); - operStringToOpear.put("BETWEEN", BETWEEN); - operStringToOpear.put("NOT BETWEEN", NBETWEEN); - operStringToOpear.put("GEO_INTERSECTS", GEO_INTERSECTS); - operStringToOpear.put("GEO_BOUNDING_BOX", GEO_BOUNDING_BOX); - operStringToOpear.put("GEO_DISTANCE", GEO_DISTANCE); - operStringToOpear.put("GEO_POLYGON", GEO_POLYGON); - operStringToOpear.put("NESTED", NESTED_COMPLEX); - operStringToOpear.put("CHILDREN", CHILDREN_COMPLEX); - operStringToOpear.put("SCRIPT", SCRIPT); - } - - static { - simpleOperStringToOpear = new HashMap<>(); - simpleOperStringToOpear.put("=", EQ); - simpleOperStringToOpear.put(">", GT); - simpleOperStringToOpear.put("<", LT); - simpleOperStringToOpear.put(">=", GTE); - simpleOperStringToOpear.put("<=", LTE); - simpleOperStringToOpear.put("<>", N); - } - - static { - negatives = HashBiMap.create(7); - negatives.put(EQ, N); - negatives.put(IN_TERMS, NIN_TERMS); - negatives.put(TERM, NTERM); - negatives.put(GT, LTE); - negatives.put(LT, GTE); - negatives.put(LIKE, NLIKE); - negatives.put(IS, ISN); - negatives.put(IN, NIN); - negatives.put(BETWEEN, NBETWEEN); - negatives.put(NESTED_COMPLEX, NOT_EXISTS_NESTED_COMPLEX); - negatives.put(REGEXP, NREGEXP); - } - - static { - simpleReverses = HashBiMap.create(4); - simpleReverses.put(EQ, EQ); - simpleReverses.put(GT, LT); - simpleReverses.put(GTE, LTE); - simpleReverses.put(N, N); - } - - public OPERATOR negative() throws SqlParseException { - OPERATOR negative = negatives.get(this); - negative = negative != null ? negative : negatives.inverse().get(this); - if (negative == null) { - throw new SqlParseException(StringUtils.format("Negative operator [%s] is not supported.", - this.name())); - } - return negative; - } - - public OPERATOR simpleReverse() throws SqlParseException { - OPERATOR reverse = simpleReverses.get(this); - reverse = reverse != null ? reverse : simpleReverses.inverse().get(this); - if (reverse == null) { - throw new SqlParseException(StringUtils.format("Simple reverse operator [%s] is not supported.", - this.name())); - } - return reverse; - } - - public Boolean isSimpleOperator() { - return simpleOperStringToOpear.containsValue(this); - } + public enum OPERATOR { + EQ, + GT, + LT, + GTE, + LTE, + N, + LIKE, + NLIKE, + REGEXP, + IS, + ISN, + IN, + NIN, + BETWEEN, + NBETWEEN, + GEO_INTERSECTS, + GEO_BOUNDING_BOX, + GEO_DISTANCE, + GEO_POLYGON, + IN_TERMS, + TERM, + IDS_QUERY, + NESTED_COMPLEX, + NOT_EXISTS_NESTED_COMPLEX, + CHILDREN_COMPLEX, + SCRIPT, + NIN_TERMS, + NTERM, + NREGEXP; + + public static Map methodNameToOpear; + + public static Map operStringToOpear; + + public static Map simpleOperStringToOpear; + + private static BiMap negatives; + + private static BiMap simpleReverses; + + static { + methodNameToOpear = new HashMap<>(); + methodNameToOpear.put("term", TERM); + methodNameToOpear.put("matchterm", TERM); + methodNameToOpear.put("match_term", TERM); + methodNameToOpear.put("terms", IN_TERMS); + methodNameToOpear.put("in_terms", IN_TERMS); + methodNameToOpear.put("ids", IDS_QUERY); + methodNameToOpear.put("ids_query", IDS_QUERY); + methodNameToOpear.put("regexp", REGEXP); + methodNameToOpear.put("regexp_query", REGEXP); } - private String name; - - private SQLExpr nameExpr; - - private Object value; - - public SQLExpr getNameExpr() { - return nameExpr; + static { + operStringToOpear = new HashMap<>(); + operStringToOpear.put("=", EQ); + operStringToOpear.put(">", GT); + operStringToOpear.put("<", LT); + operStringToOpear.put(">=", GTE); + operStringToOpear.put("<=", LTE); + operStringToOpear.put("<>", N); + operStringToOpear.put("LIKE", LIKE); + operStringToOpear.put("NOT", N); + operStringToOpear.put("NOT LIKE", NLIKE); + operStringToOpear.put("IS", IS); + operStringToOpear.put("IS NOT", ISN); + operStringToOpear.put("IN", IN); + operStringToOpear.put("NOT IN", NIN); + operStringToOpear.put("BETWEEN", BETWEEN); + operStringToOpear.put("NOT BETWEEN", NBETWEEN); + operStringToOpear.put("GEO_INTERSECTS", GEO_INTERSECTS); + operStringToOpear.put("GEO_BOUNDING_BOX", GEO_BOUNDING_BOX); + operStringToOpear.put("GEO_DISTANCE", GEO_DISTANCE); + operStringToOpear.put("GEO_POLYGON", GEO_POLYGON); + operStringToOpear.put("NESTED", NESTED_COMPLEX); + operStringToOpear.put("CHILDREN", CHILDREN_COMPLEX); + operStringToOpear.put("SCRIPT", SCRIPT); } - public SQLExpr getValueExpr() { - return valueExpr; + static { + simpleOperStringToOpear = new HashMap<>(); + simpleOperStringToOpear.put("=", EQ); + simpleOperStringToOpear.put(">", GT); + simpleOperStringToOpear.put("<", LT); + simpleOperStringToOpear.put(">=", GTE); + simpleOperStringToOpear.put("<=", LTE); + simpleOperStringToOpear.put("<>", N); } - private SQLExpr valueExpr; - - private OPERATOR OPERATOR; - - private Object relationshipType; - - private boolean isNested; - private String nestedPath; - - private boolean isChildren; - private String childType; - - public Condition(CONN conn, String field, SQLExpr nameExpr, String condition, Object obj, SQLExpr valueExpr) - throws SqlParseException { - this(conn, field, nameExpr, condition, obj, valueExpr, null); + static { + negatives = HashBiMap.create(7); + negatives.put(EQ, N); + negatives.put(IN_TERMS, NIN_TERMS); + negatives.put(TERM, NTERM); + negatives.put(GT, LTE); + negatives.put(LT, GTE); + negatives.put(LIKE, NLIKE); + negatives.put(IS, ISN); + negatives.put(IN, NIN); + negatives.put(BETWEEN, NBETWEEN); + negatives.put(NESTED_COMPLEX, NOT_EXISTS_NESTED_COMPLEX); + negatives.put(REGEXP, NREGEXP); } - public Condition(CONN conn, String field, SQLExpr nameExpr, OPERATOR condition, Object obj, SQLExpr valueExpr) - throws SqlParseException { - this(conn, field, nameExpr, condition, obj, valueExpr, null); + static { + simpleReverses = HashBiMap.create(4); + simpleReverses.put(EQ, EQ); + simpleReverses.put(GT, LT); + simpleReverses.put(GTE, LTE); + simpleReverses.put(N, N); } - public Condition(CONN conn, String name, SQLExpr nameExpr, String oper, - Object value, SQLExpr valueExpr, Object relationshipType) throws SqlParseException { - super(conn); - - this.OPERATOR = null; - this.name = name; - this.value = value; - this.nameExpr = nameExpr; - this.valueExpr = valueExpr; - - this.relationshipType = relationshipType; - - if (this.relationshipType != null) { - if (this.relationshipType instanceof NestedType) { - NestedType nestedType = (NestedType) relationshipType; - - this.isNested = true; - this.nestedPath = nestedType.path; - this.isChildren = false; - this.childType = ""; - } else if (relationshipType instanceof ChildrenType) { - ChildrenType childrenType = (ChildrenType) relationshipType; - - this.isNested = false; - this.nestedPath = ""; - this.isChildren = true; - this.childType = childrenType.childType; - } - } else { - this.isNested = false; - this.nestedPath = ""; - this.isChildren = false; - this.childType = ""; - } - - if (OPERATOR.operStringToOpear.containsKey(oper)) { - this.OPERATOR = OPERATOR.operStringToOpear.get(oper); - } else { - throw new SqlParseException("Unsupported operation: " + oper); - } + public OPERATOR negative() throws SqlParseException { + OPERATOR negative = negatives.get(this); + negative = negative != null ? negative : negatives.inverse().get(this); + if (negative == null) { + throw new SqlParseException( + StringUtils.format("Negative operator [%s] is not supported.", this.name())); + } + return negative; } - - public Condition(CONN conn, - String name, - SQLExpr nameExpr, - OPERATOR oper, - Object value, - SQLExpr valueExpr, - Object relationshipType - ) throws SqlParseException { - super(conn); - - this.OPERATOR = null; - this.nameExpr = nameExpr; - this.valueExpr = valueExpr; - this.name = name; - this.value = value; - this.OPERATOR = oper; - this.relationshipType = relationshipType; - - if (this.relationshipType != null) { - if (this.relationshipType instanceof NestedType) { - NestedType nestedType = (NestedType) relationshipType; - - this.isNested = true; - this.nestedPath = nestedType.path; - this.isChildren = false; - this.childType = ""; - } else if (relationshipType instanceof ChildrenType) { - ChildrenType childrenType = (ChildrenType) relationshipType; - - this.isNested = false; - this.nestedPath = ""; - this.isChildren = true; - this.childType = childrenType.childType; - } - } else { - this.isNested = false; - this.nestedPath = ""; - this.isChildren = false; - this.childType = ""; - } + public OPERATOR simpleReverse() throws SqlParseException { + OPERATOR reverse = simpleReverses.get(this); + reverse = reverse != null ? reverse : simpleReverses.inverse().get(this); + if (reverse == null) { + throw new SqlParseException( + StringUtils.format("Simple reverse operator [%s] is not supported.", this.name())); + } + return reverse; } - public String getOpertatorSymbol() throws SqlParseException { - switch (OPERATOR) { - case EQ: - return "=="; - case GT: - return ">"; - case LT: - return "<"; - case GTE: - return ">="; - case LTE: - return "<="; - case N: - return "<>"; - case IS: - return "=="; - - case ISN: - return "!="; - default: - throw new SqlParseException(StringUtils.format("Failed to parse operator [%s]", OPERATOR)); - } + public Boolean isSimpleOperator() { + return simpleOperStringToOpear.containsValue(this); } - - - public String getName() { - return name; + } + + private String name; + + private SQLExpr nameExpr; + + private Object value; + + public SQLExpr getNameExpr() { + return nameExpr; + } + + public SQLExpr getValueExpr() { + return valueExpr; + } + + private SQLExpr valueExpr; + + private OPERATOR OPERATOR; + + private Object relationshipType; + + private boolean isNested; + private String nestedPath; + + private boolean isChildren; + private String childType; + + public Condition( + CONN conn, String field, SQLExpr nameExpr, String condition, Object obj, SQLExpr valueExpr) + throws SqlParseException { + this(conn, field, nameExpr, condition, obj, valueExpr, null); + } + + public Condition( + CONN conn, String field, SQLExpr nameExpr, OPERATOR condition, Object obj, SQLExpr valueExpr) + throws SqlParseException { + this(conn, field, nameExpr, condition, obj, valueExpr, null); + } + + public Condition( + CONN conn, + String name, + SQLExpr nameExpr, + String oper, + Object value, + SQLExpr valueExpr, + Object relationshipType) + throws SqlParseException { + super(conn); + + this.OPERATOR = null; + this.name = name; + this.value = value; + this.nameExpr = nameExpr; + this.valueExpr = valueExpr; + + this.relationshipType = relationshipType; + + if (this.relationshipType != null) { + if (this.relationshipType instanceof NestedType) { + NestedType nestedType = (NestedType) relationshipType; + + this.isNested = true; + this.nestedPath = nestedType.path; + this.isChildren = false; + this.childType = ""; + } else if (relationshipType instanceof ChildrenType) { + ChildrenType childrenType = (ChildrenType) relationshipType; + + this.isNested = false; + this.nestedPath = ""; + this.isChildren = true; + this.childType = childrenType.childType; + } + } else { + this.isNested = false; + this.nestedPath = ""; + this.isChildren = false; + this.childType = ""; } - public void setName(String name) { - this.name = name; + if (OPERATOR.operStringToOpear.containsKey(oper)) { + this.OPERATOR = OPERATOR.operStringToOpear.get(oper); + } else { + throw new SqlParseException("Unsupported operation: " + oper); } - - public Object getValue() { - return value; + } + + public Condition( + CONN conn, + String name, + SQLExpr nameExpr, + OPERATOR oper, + Object value, + SQLExpr valueExpr, + Object relationshipType) + throws SqlParseException { + super(conn); + + this.OPERATOR = null; + this.nameExpr = nameExpr; + this.valueExpr = valueExpr; + this.name = name; + this.value = value; + this.OPERATOR = oper; + this.relationshipType = relationshipType; + + if (this.relationshipType != null) { + if (this.relationshipType instanceof NestedType) { + NestedType nestedType = (NestedType) relationshipType; + + this.isNested = true; + this.nestedPath = nestedType.path; + this.isChildren = false; + this.childType = ""; + } else if (relationshipType instanceof ChildrenType) { + ChildrenType childrenType = (ChildrenType) relationshipType; + + this.isNested = false; + this.nestedPath = ""; + this.isChildren = true; + this.childType = childrenType.childType; + } + } else { + this.isNested = false; + this.nestedPath = ""; + this.isChildren = false; + this.childType = ""; } - - public void setValue(Object value) { - this.value = value; + } + + public String getOpertatorSymbol() throws SqlParseException { + switch (OPERATOR) { + case EQ: + return "=="; + case GT: + return ">"; + case LT: + return "<"; + case GTE: + return ">="; + case LTE: + return "<="; + case N: + return "<>"; + case IS: + return "=="; + + case ISN: + return "!="; + default: + throw new SqlParseException(StringUtils.format("Failed to parse operator [%s]", OPERATOR)); } + } - public OPERATOR getOPERATOR() { - return OPERATOR; - } + public String getName() { + return name; + } - public void setOPERATOR(OPERATOR OPERATOR) { - this.OPERATOR = OPERATOR; - } + public void setName(String name) { + this.name = name; + } - public Object getRelationshipType() { - return relationshipType; - } + public Object getValue() { + return value; + } - public void setRelationshipType(Object relationshipType) { - this.relationshipType = relationshipType; - } + public void setValue(Object value) { + this.value = value; + } - public boolean isNested() { - return isNested; - } + public OPERATOR getOPERATOR() { + return OPERATOR; + } - public void setNested(boolean isNested) { - this.isNested = isNested; - } + public void setOPERATOR(OPERATOR OPERATOR) { + this.OPERATOR = OPERATOR; + } - public String getNestedPath() { - return nestedPath; - } + public Object getRelationshipType() { + return relationshipType; + } - public void setNestedPath(String nestedPath) { - this.nestedPath = nestedPath; - } + public void setRelationshipType(Object relationshipType) { + this.relationshipType = relationshipType; + } - public boolean isChildren() { - return isChildren; - } + public boolean isNested() { + return isNested; + } - public void setChildren(boolean isChildren) { - this.isChildren = isChildren; - } + public void setNested(boolean isNested) { + this.isNested = isNested; + } - public String getChildType() { - return childType; - } + public String getNestedPath() { + return nestedPath; + } - public void setChildType(String childType) { - this.childType = childType; - } + public void setNestedPath(String nestedPath) { + this.nestedPath = nestedPath; + } + + public boolean isChildren() { + return isChildren; + } + + public void setChildren(boolean isChildren) { + this.isChildren = isChildren; + } + + public String getChildType() { + return childType; + } + + public void setChildType(String childType) { + this.childType = childType; + } /** - * Return true if the opear is {@link OPERATOR#NESTED_COMPLEX} + * Return true if the opear is {@link OPERATOR#NESTED_COMPLEX}
* For example, the opear is {@link OPERATOR#NESTED_COMPLEX} when condition is * nested('projects', projects.started_year > 2000 OR projects.name LIKE '%security%') */ @@ -382,40 +386,53 @@ public boolean isNestedComplex() { return OPERATOR.NESTED_COMPLEX == OPERATOR; } - @Override - public String toString() { - String result = ""; - - if (this.isNested()) { - result = "nested condition "; - if (this.getNestedPath() != null) { - result += "on path:" + this.getNestedPath() + " "; - } - } else if (this.isChildren()) { - result = "children condition "; - - if (this.getChildType() != null) { - result += "on child: " + this.getChildType() + " "; - } - } - - if (value instanceof Object[]) { - result += this.conn + " " + this.name + " " + this.OPERATOR + " " + Arrays.toString((Object[]) value); - } else { - result += this.conn + " " + this.name + " " + this.OPERATOR + " " + this.value; - } - - return result; + @Override + public String toString() { + String result = ""; + + if (this.isNested()) { + result = "nested condition "; + if (this.getNestedPath() != null) { + result += "on path:" + this.getNestedPath() + " "; + } + } else if (this.isChildren()) { + result = "children condition "; + + if (this.getChildType() != null) { + result += "on child: " + this.getChildType() + " "; + } + } + + if (value instanceof Object[]) { + result += + this.conn + + " " + + this.name + + " " + + this.OPERATOR + + " " + + Arrays.toString((Object[]) value); + } else { + result += this.conn + " " + this.name + " " + this.OPERATOR + " " + this.value; } - @Override - public Object clone() throws CloneNotSupportedException { - try { - return new Condition(this.getConn(), this.getName(), this.getNameExpr(), - this.getOPERATOR(), this.getValue(), this.getValueExpr(), this.getRelationshipType()); - } catch (SqlParseException e) { + return result; + } + + @Override + public Object clone() throws CloneNotSupportedException { + try { + return new Condition( + this.getConn(), + this.getName(), + this.getNameExpr(), + this.getOPERATOR(), + this.getValue(), + this.getValueExpr(), + this.getRelationshipType()); + } catch (SqlParseException e) { - } - return null; } + return null; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/Delete.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/Delete.java index 587a8b3ef9..efa77da0a5 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/Delete.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/Delete.java @@ -3,12 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain; -/** - * SQL Delete statement. - */ -public class Delete extends Query { - -} +/** SQL Delete statement. */ +public class Delete extends Query {} diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/bucketpath/BucketPath.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/bucketpath/BucketPath.java index 996caae5e2..635d0062a5 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/bucketpath/BucketPath.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/bucketpath/BucketPath.java @@ -3,39 +3,35 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain.bucketpath; import java.util.ArrayDeque; import java.util.Deque; /** - * The bucket path syntax + * The bucket path syntax
* [ , ]* [ , ] * - * https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline.html#buckets-path-syntax + *

https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline.html#buckets-path-syntax */ public class BucketPath { - private Deque pathStack = new ArrayDeque<>(); + private Deque pathStack = new ArrayDeque<>(); - public BucketPath add(Path path) { - if (pathStack.isEmpty()) { - assert path.isMetricPath() : "The last path in the bucket path must be Metric"; - } else { - assert path.isAggPath() : "All the other path in the bucket path must be Agg"; - } - pathStack.push(path); - return this; + public BucketPath add(Path path) { + if (pathStack.isEmpty()) { + assert path.isMetricPath() : "The last path in the bucket path must be Metric"; + } else { + assert path.isAggPath() : "All the other path in the bucket path must be Agg"; } + pathStack.push(path); + return this; + } - /** - * Return the bucket path. - * Return "", if there is no agg or metric available - */ - public String getBucketPath() { - String bucketPath = pathStack.isEmpty() ? "" : pathStack.pop().getPath(); - return pathStack.stream() - .map(path -> path.getSeparator() + path.getPath()) - .reduce(bucketPath, String::concat); - } + /** Return the bucket path. Return "", if there is no agg or metric available */ + public String getBucketPath() { + String bucketPath = pathStack.isEmpty() ? "" : pathStack.pop().getPath(); + return pathStack.stream() + .map(path -> path.getSeparator() + path.getPath()) + .reduce(bucketPath, String::concat); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/AsyncRestExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/AsyncRestExecutor.java index d251585f89..4fdf6391bd 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/AsyncRestExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/AsyncRestExecutor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor; import java.io.IOException; @@ -29,135 +28,141 @@ import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.Transports; -/** - * A RestExecutor wrapper to execute request asynchronously to avoid blocking transport thread. - */ +/** A RestExecutor wrapper to execute request asynchronously to avoid blocking transport thread. */ public class AsyncRestExecutor implements RestExecutor { - /** - * Custom thread pool name managed by OpenSearch - */ - public static final String SQL_WORKER_THREAD_POOL_NAME = "sql-worker"; - - private static final Logger LOG = LogManager.getLogger(AsyncRestExecutor.class); - - /** - * Treat all actions as blocking which means async all actions, - * ex. execute() in csv executor or pretty format executor - */ - private static final Predicate ALL_ACTION_IS_BLOCKING = anyAction -> true; - - /** - * Delegated rest executor to async - */ - private final RestExecutor executor; - - /** - * Request type that expect to async to avoid blocking - */ - private final Predicate isBlocking; - - - AsyncRestExecutor(RestExecutor executor) { - this(executor, ALL_ACTION_IS_BLOCKING); + /** Custom thread pool name managed by OpenSearch */ + public static final String SQL_WORKER_THREAD_POOL_NAME = "sql-worker"; + + private static final Logger LOG = LogManager.getLogger(AsyncRestExecutor.class); + + /** + * Treat all actions as blocking which means async all actions, ex. execute() in csv executor or + * pretty format executor + */ + private static final Predicate ALL_ACTION_IS_BLOCKING = anyAction -> true; + + /** Delegated rest executor to async */ + private final RestExecutor executor; + + /** Request type that expect to async to avoid blocking */ + private final Predicate isBlocking; + + AsyncRestExecutor(RestExecutor executor) { + this(executor, ALL_ACTION_IS_BLOCKING); + } + + AsyncRestExecutor(RestExecutor executor, Predicate isBlocking) { + this.executor = executor; + this.isBlocking = isBlocking; + } + + @Override + public void execute( + Client client, Map params, QueryAction queryAction, RestChannel channel) + throws Exception { + if (isBlockingAction(queryAction) && isRunningInTransportThread()) { + if (LOG.isDebugEnabled()) { + LOG.debug( + "[{}] Async blocking query action [{}] for executor [{}] in current thread [{}]", + QueryContext.getRequestId(), + name(executor), + name(queryAction), + Thread.currentThread().getName()); + } + async(client, params, queryAction, channel); + } else { + if (LOG.isDebugEnabled()) { + LOG.debug( + "[{}] Continue running query action [{}] for executor [{}] in current thread [{}]", + QueryContext.getRequestId(), + name(executor), + name(queryAction), + Thread.currentThread().getName()); + } + doExecuteWithTimeMeasured(client, params, queryAction, channel); } - - AsyncRestExecutor(RestExecutor executor, Predicate isBlocking) { - this.executor = executor; - this.isBlocking = isBlocking; - } - - @Override - public void execute(Client client, Map params, QueryAction queryAction, RestChannel channel) - throws Exception { - if (isBlockingAction(queryAction) && isRunningInTransportThread()) { - if (LOG.isDebugEnabled()) { - LOG.debug("[{}] Async blocking query action [{}] for executor [{}] in current thread [{}]", - QueryContext.getRequestId(), name(executor), name(queryAction), Thread.currentThread().getName()); - } - async(client, params, queryAction, channel); - } else { - if (LOG.isDebugEnabled()) { - LOG.debug("[{}] Continue running query action [{}] for executor [{}] in current thread [{}]", - QueryContext.getRequestId(), name(executor), name(queryAction), Thread.currentThread().getName()); - } + } + + @Override + public String execute(Client client, Map params, QueryAction queryAction) + throws Exception { + // Result is always required and no easy way to async it here. + return executor.execute(client, params, queryAction); + } + + private boolean isBlockingAction(QueryAction queryAction) { + return isBlocking.test(queryAction); + } + + private boolean isRunningInTransportThread() { + return Transports.isTransportThread(Thread.currentThread()); + } + + /** Run given task in thread pool asynchronously */ + private void async( + Client client, Map params, QueryAction queryAction, RestChannel channel) { + + ThreadPool threadPool = client.threadPool(); + Runnable runnable = + () -> { + try { doExecuteWithTimeMeasured(client, params, queryAction, channel); - } - } - - @Override - public String execute(Client client, Map params, QueryAction queryAction) throws Exception { - // Result is always required and no easy way to async it here. - return executor.execute(client, params, queryAction); - } - - private boolean isBlockingAction(QueryAction queryAction) { - return isBlocking.test(queryAction); - } - - private boolean isRunningInTransportThread() { - return Transports.isTransportThread(Thread.currentThread()); - } - - /** - * Run given task in thread pool asynchronously - */ - private void async(Client client, Map params, QueryAction queryAction, RestChannel channel) { - - ThreadPool threadPool = client.threadPool(); - Runnable runnable = () -> { - try { - doExecuteWithTimeMeasured(client, params, queryAction, channel); - } catch (IOException | SqlParseException | OpenSearchException e) { - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); - LOG.warn("[{}] [MCB] async task got an IO/SQL exception: {}", QueryContext.getRequestId(), - e.getMessage()); - channel.sendResponse(new BytesRestResponse(RestStatus.INTERNAL_SERVER_ERROR, e.getMessage())); - } catch (IllegalStateException e) { - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); - LOG.warn("[{}] [MCB] async task got a runtime exception: {}", QueryContext.getRequestId(), - e.getMessage()); - channel.sendResponse(new BytesRestResponse(RestStatus.INSUFFICIENT_STORAGE, - "Memory circuit is broken.")); - } catch (Throwable t) { - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); - LOG.warn("[{}] [MCB] async task got an unknown throwable: {}", QueryContext.getRequestId(), - t.getMessage()); - channel.sendResponse(new BytesRestResponse(RestStatus.INTERNAL_SERVER_ERROR, - String.valueOf(t.getMessage()))); - } finally { - BackOffRetryStrategy.releaseMem(executor); - } + } catch (IOException | SqlParseException | OpenSearchException e) { + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); + LOG.warn( + "[{}] [MCB] async task got an IO/SQL exception: {}", + QueryContext.getRequestId(), + e.getMessage()); + channel.sendResponse( + new BytesRestResponse(RestStatus.INTERNAL_SERVER_ERROR, e.getMessage())); + } catch (IllegalStateException e) { + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); + LOG.warn( + "[{}] [MCB] async task got a runtime exception: {}", + QueryContext.getRequestId(), + e.getMessage()); + channel.sendResponse( + new BytesRestResponse( + RestStatus.INSUFFICIENT_STORAGE, "Memory circuit is broken.")); + } catch (Throwable t) { + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); + LOG.warn( + "[{}] [MCB] async task got an unknown throwable: {}", + QueryContext.getRequestId(), + t.getMessage()); + channel.sendResponse( + new BytesRestResponse( + RestStatus.INTERNAL_SERVER_ERROR, String.valueOf(t.getMessage()))); + } finally { + BackOffRetryStrategy.releaseMem(executor); + } }; - // Preserve context of calling thread to ensure headers of requests are forwarded when running blocking actions - threadPool.schedule( - QueryContext.withCurrentContext(runnable), - new TimeValue(0L), - SQL_WORKER_THREAD_POOL_NAME - ); + // Preserve context of calling thread to ensure headers of requests are forwarded when running + // blocking actions + threadPool.schedule( + QueryContext.withCurrentContext(runnable), new TimeValue(0L), SQL_WORKER_THREAD_POOL_NAME); + } + + /** Time the real execution of Executor and log slow query for troubleshooting */ + private void doExecuteWithTimeMeasured( + Client client, Map params, QueryAction action, RestChannel channel) + throws Exception { + long startTime = System.nanoTime(); + try { + executor.execute(client, params, action, channel); + } finally { + Duration elapsed = Duration.ofNanos(System.nanoTime() - startTime); + int slowLogThreshold = LocalClusterState.state().getSettingValue(Settings.Key.SQL_SLOWLOG); + if (elapsed.getSeconds() >= slowLogThreshold) { + LOG.warn( + "[{}] Slow query: elapsed={} (ms)", QueryContext.getRequestId(), elapsed.toMillis()); + } } + } - /** - * Time the real execution of Executor and log slow query for troubleshooting - */ - private void doExecuteWithTimeMeasured(Client client, - Map params, - QueryAction action, - RestChannel channel) throws Exception { - long startTime = System.nanoTime(); - try { - executor.execute(client, params, action, channel); - } finally { - Duration elapsed = Duration.ofNanos(System.nanoTime() - startTime); - int slowLogThreshold = LocalClusterState.state().getSettingValue(Settings.Key.SQL_SLOWLOG); - if (elapsed.getSeconds() >= slowLogThreshold) { - LOG.warn("[{}] Slow query: elapsed={} (ms)", QueryContext.getRequestId(), elapsed.toMillis()); - } - } - } - - private String name(Object object) { - return object.getClass().getSimpleName(); - } + private String name(Object object) { + return object.getClass().getSimpleName(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/ElasticDefaultRestExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/ElasticDefaultRestExecutor.java index a5dd066536..54c4dd5abb 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/ElasticDefaultRestExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/ElasticDefaultRestExecutor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor; import com.google.common.collect.Maps; @@ -35,90 +34,94 @@ import org.opensearch.sql.legacy.query.join.JoinRequestBuilder; import org.opensearch.sql.legacy.query.multi.MultiQueryRequestBuilder; - public class ElasticDefaultRestExecutor implements RestExecutor { - /** - * Request builder to generate OpenSearch DSL - */ - private final SqlElasticRequestBuilder requestBuilder; + /** Request builder to generate OpenSearch DSL */ + private final SqlElasticRequestBuilder requestBuilder; - private static final Logger LOG = LogManager.getLogger(ElasticDefaultRestExecutor.class); + private static final Logger LOG = LogManager.getLogger(ElasticDefaultRestExecutor.class); - public ElasticDefaultRestExecutor(QueryAction queryAction) { - // Put explain() here to make it run in NIO thread - try { - this.requestBuilder = queryAction.explain(); - } catch (SqlParseException e) { - throw new IllegalStateException("Failed to explain query action", e); - } + public ElasticDefaultRestExecutor(QueryAction queryAction) { + // Put explain() here to make it run in NIO thread + try { + this.requestBuilder = queryAction.explain(); + } catch (SqlParseException e) { + throw new IllegalStateException("Failed to explain query action", e); } + } - /** - * Execute the ActionRequest and returns the REST response using the channel. - */ - @Override - public void execute(Client client, Map params, QueryAction queryAction, RestChannel channel) - throws Exception { - ActionRequest request = requestBuilder.request(); + /** Execute the ActionRequest and returns the REST response using the channel. */ + @Override + public void execute( + Client client, Map params, QueryAction queryAction, RestChannel channel) + throws Exception { + ActionRequest request = requestBuilder.request(); - if (requestBuilder instanceof JoinRequestBuilder) { - ElasticJoinExecutor executor = ElasticJoinExecutor.createJoinExecutor(client, requestBuilder); - executor.run(); - executor.sendResponse(channel); - } else if (requestBuilder instanceof MultiQueryRequestBuilder) { - ElasticHitsExecutor executor = MultiRequestExecutorFactory.createExecutor(client, - (MultiQueryRequestBuilder) requestBuilder); - executor.run(); - sendDefaultResponse(executor.getHits(), channel); - } else if (request instanceof SearchRequest) { - client.search((SearchRequest) request, new RestStatusToXContentListener<>(channel)); - } else if (request instanceof DeleteByQueryRequest) { - requestBuilder.getBuilder().execute( - new BulkIndexByScrollResponseContentListener(channel, Maps.newHashMap())); - } else if (request instanceof GetIndexRequest) { - requestBuilder.getBuilder().execute(new GetIndexRequestRestListener(channel, (GetIndexRequest) request)); - } else if (request instanceof SearchScrollRequest) { - client.searchScroll((SearchScrollRequest) request, new RestStatusToXContentListener<>(channel)); - } else { - throw new Exception(String.format("Unsupported ActionRequest provided: %s", request.getClass().getName())); - } + if (requestBuilder instanceof JoinRequestBuilder) { + ElasticJoinExecutor executor = ElasticJoinExecutor.createJoinExecutor(client, requestBuilder); + executor.run(); + executor.sendResponse(channel); + } else if (requestBuilder instanceof MultiQueryRequestBuilder) { + ElasticHitsExecutor executor = + MultiRequestExecutorFactory.createExecutor( + client, (MultiQueryRequestBuilder) requestBuilder); + executor.run(); + sendDefaultResponse(executor.getHits(), channel); + } else if (request instanceof SearchRequest) { + client.search((SearchRequest) request, new RestStatusToXContentListener<>(channel)); + } else if (request instanceof DeleteByQueryRequest) { + requestBuilder + .getBuilder() + .execute(new BulkIndexByScrollResponseContentListener(channel, Maps.newHashMap())); + } else if (request instanceof GetIndexRequest) { + requestBuilder + .getBuilder() + .execute(new GetIndexRequestRestListener(channel, (GetIndexRequest) request)); + } else if (request instanceof SearchScrollRequest) { + client.searchScroll( + (SearchScrollRequest) request, new RestStatusToXContentListener<>(channel)); + } else { + throw new Exception( + String.format("Unsupported ActionRequest provided: %s", request.getClass().getName())); } + } - @Override - public String execute(Client client, Map params, QueryAction queryAction) throws Exception { - ActionRequest request = requestBuilder.request(); - - if (requestBuilder instanceof JoinRequestBuilder) { - ElasticJoinExecutor executor = ElasticJoinExecutor.createJoinExecutor(client, requestBuilder); - executor.run(); - return ElasticUtils.hitsAsStringResult(executor.getHits(), new MetaSearchResult()); - } else if (requestBuilder instanceof MultiQueryRequestBuilder) { - ElasticHitsExecutor executor = MultiRequestExecutorFactory.createExecutor(client, - (MultiQueryRequestBuilder) requestBuilder); - executor.run(); - return ElasticUtils.hitsAsStringResult(executor.getHits(), new MetaSearchResult()); - } else if (request instanceof SearchRequest) { - ActionFuture future = client.search((SearchRequest) request); - SearchResponse response = future.actionGet(); - return response.toString(); - } else if (request instanceof DeleteByQueryRequest) { - return requestBuilder.get().toString(); - } else if (request instanceof GetIndexRequest) { - return requestBuilder.getBuilder().execute().actionGet().toString(); - } else { - throw new Exception(String.format("Unsupported ActionRequest provided: %s", request.getClass().getName())); - } + @Override + public String execute(Client client, Map params, QueryAction queryAction) + throws Exception { + ActionRequest request = requestBuilder.request(); + if (requestBuilder instanceof JoinRequestBuilder) { + ElasticJoinExecutor executor = ElasticJoinExecutor.createJoinExecutor(client, requestBuilder); + executor.run(); + return ElasticUtils.hitsAsStringResult(executor.getHits(), new MetaSearchResult()); + } else if (requestBuilder instanceof MultiQueryRequestBuilder) { + ElasticHitsExecutor executor = + MultiRequestExecutorFactory.createExecutor( + client, (MultiQueryRequestBuilder) requestBuilder); + executor.run(); + return ElasticUtils.hitsAsStringResult(executor.getHits(), new MetaSearchResult()); + } else if (request instanceof SearchRequest) { + ActionFuture future = client.search((SearchRequest) request); + SearchResponse response = future.actionGet(); + return response.toString(); + } else if (request instanceof DeleteByQueryRequest) { + return requestBuilder.get().toString(); + } else if (request instanceof GetIndexRequest) { + return requestBuilder.getBuilder().execute().actionGet().toString(); + } else { + throw new Exception( + String.format("Unsupported ActionRequest provided: %s", request.getClass().getName())); } + } - private void sendDefaultResponse(SearchHits hits, RestChannel channel) { - try { - String json = ElasticUtils.hitsAsStringResult(hits, new MetaSearchResult()); - BytesRestResponse bytesRestResponse = new BytesRestResponse(RestStatus.OK, json); - channel.sendResponse(bytesRestResponse); - } catch (IOException e) { - e.printStackTrace(); - } + private void sendDefaultResponse(SearchHits hits, RestChannel channel) { + try { + String json = ElasticUtils.hitsAsStringResult(hits, new MetaSearchResult()); + BytesRestResponse bytesRestResponse = new BytesRestResponse(RestStatus.OK, json); + channel.sendResponse(bytesRestResponse); + } catch (IOException e) { + e.printStackTrace(); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/ElasticHitsExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/ElasticHitsExecutor.java index c48eb673bd..62a6d63ef7 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/ElasticHitsExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/ElasticHitsExecutor.java @@ -3,18 +3,15 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor; import java.io.IOException; import org.opensearch.search.SearchHits; import org.opensearch.sql.legacy.exception.SqlParseException; -/** - * Created by Eliran on 21/8/2016. - */ +/** Created by Eliran on 21/8/2016. */ public interface ElasticHitsExecutor { - void run() throws IOException, SqlParseException; + void run() throws IOException, SqlParseException; - SearchHits getHits(); + SearchHits getHits(); } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/ElasticResultHandler.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/ElasticResultHandler.java index ff241fce77..6f753a5e7c 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/ElasticResultHandler.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/ElasticResultHandler.java @@ -3,38 +3,34 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor; import java.util.Map; import org.opensearch.search.SearchHit; -/** - * Created by Eliran on 3/10/2015. - */ +/** Created by Eliran on 3/10/2015. */ public class ElasticResultHandler { - public static Object getFieldValue(SearchHit hit, String field) { - return deepSearchInMap(hit.getSourceAsMap(), field); - } + public static Object getFieldValue(SearchHit hit, String field) { + return deepSearchInMap(hit.getSourceAsMap(), field); + } - private static Object deepSearchInMap(Map fieldsMap, String name) { - if (name.contains(".")) { - String[] path = name.split("\\."); - Map currentObject = fieldsMap; - for (int i = 0; i < path.length - 1; i++) { - Object valueFromCurrentMap = currentObject.get(path[i]); - if (valueFromCurrentMap == null) { - return null; - } - if (!Map.class.isAssignableFrom(valueFromCurrentMap.getClass())) { - return null; - } - currentObject = (Map) valueFromCurrentMap; - } - return currentObject.get(path[path.length - 1]); + private static Object deepSearchInMap(Map fieldsMap, String name) { + if (name.contains(".")) { + String[] path = name.split("\\."); + Map currentObject = fieldsMap; + for (int i = 0; i < path.length - 1; i++) { + Object valueFromCurrentMap = currentObject.get(path[i]); + if (valueFromCurrentMap == null) { + return null; } - - return fieldsMap.get(name); + if (!Map.class.isAssignableFrom(valueFromCurrentMap.getClass())) { + return null; + } + currentObject = (Map) valueFromCurrentMap; + } + return currentObject.get(path[path.length - 1]); } + return fieldsMap.get(name); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CSVResult.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CSVResult.java index 680c0c8e85..28bc559a01 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CSVResult.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CSVResult.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.csv; import com.google.common.collect.ImmutableSet; @@ -12,86 +11,86 @@ import java.util.Set; import java.util.stream.Collectors; -/** - * Created by Eliran on 27/12/2015. - */ +/** Created by Eliran on 27/12/2015. */ public class CSVResult { - private static final Set SENSITIVE_CHAR = ImmutableSet.of("=", "+", "-", "@"); + private static final Set SENSITIVE_CHAR = ImmutableSet.of("=", "+", "-", "@"); - private final List headers; - private final List lines; + private final List headers; + private final List lines; - /** - * Skip sanitizing if string line provided. This constructor is basically used by - * assertion in test code. - */ - public CSVResult(List headers, List lines) { - this.headers = headers; - this.lines = lines; - } + /** + * Skip sanitizing if string line provided. This constructor is basically used by assertion in + * test code. + */ + public CSVResult(List headers, List lines) { + this.headers = headers; + this.lines = lines; + } /** * Sanitize both headers and data lines by: - * 1) First prepend single quote if first char is sensitive (= - + @) - * 2) Second double quote entire cell if any comma found + *

    + *
  1. First prepend single quote if first char is sensitive (= - + @) + *
  2. Second double quote entire cell if any comma found + *
*/ public CSVResult(String separator, List headers, List> lines) { this.headers = sanitizeHeaders(separator, headers); this.lines = sanitizeLines(separator, lines); } - /** - * Return CSV header names which are sanitized because OpenSearch allows - * special character present in field name too. - * @return CSV header name list after sanitized - */ - public List getHeaders() { - return headers; - } - - /** - * Return CSV lines in which each cell is sanitized to avoid CSV injection. - * @return CSV lines after sanitized - */ - public List getLines() { - return lines; + /** + * Return CSV header names which are sanitized because OpenSearch allows special character present + * in field name too. + * + * @return CSV header name list after sanitized + */ + public List getHeaders() { + return headers; + } + + /** + * Return CSV lines in which each cell is sanitized to avoid CSV injection. + * + * @return CSV lines after sanitized + */ + public List getLines() { + return lines; + } + + private List sanitizeHeaders(String separator, List headers) { + return headers.stream() + .map(this::sanitizeCell) + .map(cell -> quoteIfRequired(separator, cell)) + .collect(Collectors.toList()); + } + + private List sanitizeLines(String separator, List> lines) { + List result = new ArrayList<>(); + for (List line : lines) { + result.add( + line.stream() + .map(this::sanitizeCell) + .map(cell -> quoteIfRequired(separator, cell)) + .collect(Collectors.joining(separator))); } + return result; + } - private List sanitizeHeaders(String separator, List headers) { - return headers.stream(). - map(this::sanitizeCell). - map(cell -> quoteIfRequired(separator, cell)). - collect(Collectors.toList()); + private String sanitizeCell(String cell) { + if (isStartWithSensitiveChar(cell)) { + return "'" + cell; } + return cell; + } - private List sanitizeLines(String separator, List> lines) { - List result = new ArrayList<>(); - for (List line : lines) { - result.add(line.stream(). - map(this::sanitizeCell). - map(cell -> quoteIfRequired(separator, cell)). - collect(Collectors.joining(separator))); - } - return result; - } - - private String sanitizeCell(String cell) { - if (isStartWithSensitiveChar(cell)) { - return "'" + cell; - } - return cell; - } - - private String quoteIfRequired(String separator, String cell) { - final String quote = "\""; - return cell.contains(separator) - ? quote + cell.replaceAll("\"", "\"\"") + quote : cell; - } - - private boolean isStartWithSensitiveChar(String cell) { - return SENSITIVE_CHAR.stream(). - anyMatch(cell::startsWith); - } + private String quoteIfRequired(String separator, String cell) { + final String quote = "\""; + return cell.contains(separator) ? quote + cell.replaceAll("\"", "\"\"") + quote : cell; + } + private boolean isStartWithSensitiveChar(String cell) { + return SENSITIVE_CHAR.stream().anyMatch(cell::startsWith); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CSVResultRestExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CSVResultRestExecutor.java index da99652e13..a69ff31a49 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CSVResultRestExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CSVResultRestExecutor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.csv; import com.google.common.base.Joiner; @@ -18,60 +17,64 @@ import org.opensearch.sql.legacy.query.QueryAction; import org.opensearch.sql.legacy.query.join.BackOffRetryStrategy; -/** - * Created by Eliran on 26/12/2015. - */ +/** Created by Eliran on 26/12/2015. */ public class CSVResultRestExecutor implements RestExecutor { - @Override - public void execute(final Client client, final Map params, final QueryAction queryAction, - final RestChannel channel) throws Exception { - - final String csvString = execute(client, params, queryAction); - final BytesRestResponse bytesRestResponse = new BytesRestResponse(RestStatus.OK, csvString); + @Override + public void execute( + final Client client, + final Map params, + final QueryAction queryAction, + final RestChannel channel) + throws Exception { - if (!BackOffRetryStrategy.isHealthy(2 * bytesRestResponse.content().length(), this)) { - throw new IllegalStateException( - "[CSVResultRestExecutor] Memory could be insufficient when sendResponse()."); - } + final String csvString = execute(client, params, queryAction); + final BytesRestResponse bytesRestResponse = new BytesRestResponse(RestStatus.OK, csvString); - channel.sendResponse(bytesRestResponse); + if (!BackOffRetryStrategy.isHealthy(2 * bytesRestResponse.content().length(), this)) { + throw new IllegalStateException( + "[CSVResultRestExecutor] Memory could be insufficient when sendResponse()."); } - @Override - public String execute(final Client client, final Map params, final QueryAction queryAction) - throws Exception { + channel.sendResponse(bytesRestResponse); + } - final Object queryResult = QueryActionElasticExecutor.executeAnyAction(client, queryAction); + @Override + public String execute( + final Client client, final Map params, final QueryAction queryAction) + throws Exception { - final String separator = params.getOrDefault("separator", ","); - final String newLine = params.getOrDefault("newLine", "\n"); + final Object queryResult = QueryActionElasticExecutor.executeAnyAction(client, queryAction); - final boolean flat = getBooleanOrDefault(params, "flat", false); - final boolean includeScore = getBooleanOrDefault(params, "_score", false); - final boolean includeId = getBooleanOrDefault(params, "_id", false); + final String separator = params.getOrDefault("separator", ","); + final String newLine = params.getOrDefault("newLine", "\n"); - final List fieldNames = queryAction.getFieldNames().orElse(null); - final CSVResult result = new CSVResultsExtractor(includeScore, includeId) - .extractResults(queryResult, flat, separator, fieldNames); + final boolean flat = getBooleanOrDefault(params, "flat", false); + final boolean includeScore = getBooleanOrDefault(params, "_score", false); + final boolean includeId = getBooleanOrDefault(params, "_id", false); - return buildString(separator, result, newLine); - } + final List fieldNames = queryAction.getFieldNames().orElse(null); + final CSVResult result = + new CSVResultsExtractor(includeScore, includeId) + .extractResults(queryResult, flat, separator, fieldNames); - private boolean getBooleanOrDefault(Map params, String param, boolean defaultValue) { - boolean flat = defaultValue; - if (params.containsKey(param)) { - flat = Boolean.parseBoolean(params.get(param)); - } - return flat; - } + return buildString(separator, result, newLine); + } - private String buildString(String separator, CSVResult result, String newLine) { - StringBuilder csv = new StringBuilder(); - csv.append(Joiner.on(separator).join(result.getHeaders())); - csv.append(newLine); - csv.append(Joiner.on(newLine).join(result.getLines())); - return csv.toString(); + private boolean getBooleanOrDefault( + Map params, String param, boolean defaultValue) { + boolean flat = defaultValue; + if (params.containsKey(param)) { + flat = Boolean.parseBoolean(params.get(param)); } - + return flat; + } + + private String buildString(String separator, CSVResult result, String newLine) { + StringBuilder csv = new StringBuilder(); + csv.append(Joiner.on(separator).join(result.getHeaders())); + csv.append(newLine); + csv.append(Joiner.on(newLine).join(result.getLines())); + return csv.toString(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CSVResultsExtractor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CSVResultsExtractor.java index a22d96c133..5a3b3bc498 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CSVResultsExtractor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CSVResultsExtractor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.csv; import java.util.ArrayList; @@ -31,320 +30,332 @@ import org.opensearch.sql.legacy.expression.model.ExprValue; import org.opensearch.sql.legacy.utils.Util; -/** - * Created by Eliran on 27/12/2015. - */ +/** Created by Eliran on 27/12/2015. */ public class CSVResultsExtractor { - private final boolean includeScore; - private final boolean includeId; - private int currentLineIndex; - - public CSVResultsExtractor(boolean includeScore, boolean includeId) { - this.includeScore = includeScore; - this.includeId = includeId; - this.currentLineIndex = 0; + private final boolean includeScore; + private final boolean includeId; + private int currentLineIndex; + + public CSVResultsExtractor(boolean includeScore, boolean includeId) { + this.includeScore = includeScore; + this.includeId = includeId; + this.currentLineIndex = 0; + } + + public CSVResult extractResults( + Object queryResult, boolean flat, String separator, final List fieldNames) + throws CsvExtractorException { + + if (queryResult instanceof SearchHits) { + SearchHit[] hits = ((SearchHits) queryResult).getHits(); + List> docsAsMap = new ArrayList<>(); + List headers = createHeadersAndFillDocsMap(flat, hits, docsAsMap, fieldNames); + List> csvLines = createCSVLinesFromDocs(flat, separator, docsAsMap, headers); + return new CSVResult(separator, headers, csvLines); } - - public CSVResult extractResults(Object queryResult, boolean flat, String separator, - final List fieldNames) throws CsvExtractorException { - - if (queryResult instanceof SearchHits) { - SearchHit[] hits = ((SearchHits) queryResult).getHits(); - List> docsAsMap = new ArrayList<>(); - List headers = createHeadersAndFillDocsMap(flat, hits, docsAsMap, fieldNames); - List> csvLines = createCSVLinesFromDocs(flat, separator, docsAsMap, headers); - return new CSVResult(separator, headers, csvLines); - } - if (queryResult instanceof Aggregations) { - List headers = new ArrayList<>(); - List> lines = new ArrayList<>(); - lines.add(new ArrayList()); - handleAggregations((Aggregations) queryResult, headers, lines); - return new CSVResult(separator, headers, lines); - } - // Handle List result. - if (queryResult instanceof List) { - List bindingTuples = (List) queryResult; - List> csvLines = bindingTuples.stream().map(tuple -> { - Map bindingMap = tuple.getBindingMap(); - List rowValues = new ArrayList<>(); - for (String fieldName : fieldNames) { - if (bindingMap.containsKey(fieldName)) { + if (queryResult instanceof Aggregations) { + List headers = new ArrayList<>(); + List> lines = new ArrayList<>(); + lines.add(new ArrayList()); + handleAggregations((Aggregations) queryResult, headers, lines); + return new CSVResult(separator, headers, lines); + } + // Handle List result. + if (queryResult instanceof List) { + List bindingTuples = (List) queryResult; + List> csvLines = + bindingTuples.stream() + .map( + tuple -> { + Map bindingMap = tuple.getBindingMap(); + List rowValues = new ArrayList<>(); + for (String fieldName : fieldNames) { + if (bindingMap.containsKey(fieldName)) { rowValues.add(String.valueOf(bindingMap.get(fieldName).value())); - } else { + } else { rowValues.add(""); + } } - } - return rowValues; - }).collect(Collectors.toList()); + return rowValues; + }) + .collect(Collectors.toList()); - return new CSVResult(separator, fieldNames, csvLines); - } - return null; + return new CSVResult(separator, fieldNames, csvLines); } - - private void handleAggregations(Aggregations aggregations, List headers, List> lines) - throws CsvExtractorException { - if (allNumericAggregations(aggregations)) { - lines.get(this.currentLineIndex) - .addAll(fillHeaderAndCreateLineForNumericAggregations(aggregations, headers)); - return; - } - //aggregations with size one only supported when not metrics. - List aggregationList = aggregations.asList(); - if (aggregationList.size() > 1) { - throw new CsvExtractorException( - "currently support only one aggregation at same level (Except for numeric metrics)"); - } - Aggregation aggregation = aggregationList.get(0); - //we want to skip singleBucketAggregations (nested,reverse_nested,filters) - if (aggregation instanceof SingleBucketAggregation) { - Aggregations singleBucketAggs = ((SingleBucketAggregation) aggregation).getAggregations(); - handleAggregations(singleBucketAggs, headers, lines); - return; - } - if (aggregation instanceof NumericMetricsAggregation) { - handleNumericMetricAggregation(headers, lines.get(currentLineIndex), aggregation); - return; - } - if (aggregation instanceof GeoBounds) { - handleGeoBoundsAggregation(headers, lines, (GeoBounds) aggregation); - return; - } - if (aggregation instanceof TopHits) { - //todo: handle this . it returns hits... maby back to normal? - //todo: read about this usages - // TopHits topHitsAggregation = (TopHits) aggregation; - } - if (aggregation instanceof MultiBucketsAggregation) { - MultiBucketsAggregation bucketsAggregation = (MultiBucketsAggregation) aggregation; - String name = bucketsAggregation.getName(); - //checking because it can comes from sub aggregation again - if (!headers.contains(name)) { - headers.add(name); - } - Collection buckets = bucketsAggregation.getBuckets(); - - //clone current line. - List currentLine = lines.get(this.currentLineIndex); - List clonedLine = new ArrayList<>(currentLine); - - //call handle_Agg with current_line++ - boolean firstLine = true; - for (MultiBucketsAggregation.Bucket bucket : buckets) { - //each bucket need to add new line with current line copied => except for first line - String key = bucket.getKeyAsString(); - if (firstLine) { - firstLine = false; - } else { - currentLineIndex++; - currentLine = new ArrayList(clonedLine); - lines.add(currentLine); - } - currentLine.add(key); - handleAggregations(bucket.getAggregations(), headers, lines); - - } - } + return null; + } + + private void handleAggregations( + Aggregations aggregations, List headers, List> lines) + throws CsvExtractorException { + if (allNumericAggregations(aggregations)) { + lines + .get(this.currentLineIndex) + .addAll(fillHeaderAndCreateLineForNumericAggregations(aggregations, headers)); + return; } - - private void handleGeoBoundsAggregation(List headers, List> lines, - GeoBounds geoBoundsAggregation) { - String geoBoundAggName = geoBoundsAggregation.getName(); - headers.add(geoBoundAggName + ".topLeft.lon"); - headers.add(geoBoundAggName + ".topLeft.lat"); - headers.add(geoBoundAggName + ".bottomRight.lon"); - headers.add(geoBoundAggName + ".bottomRight.lat"); - List line = lines.get(this.currentLineIndex); - line.add(String.valueOf(geoBoundsAggregation.topLeft().getLon())); - line.add(String.valueOf(geoBoundsAggregation.topLeft().getLat())); - line.add(String.valueOf(geoBoundsAggregation.bottomRight().getLon())); - line.add(String.valueOf(geoBoundsAggregation.bottomRight().getLat())); - lines.add(line); + // aggregations with size one only supported when not metrics. + List aggregationList = aggregations.asList(); + if (aggregationList.size() > 1) { + throw new CsvExtractorException( + "currently support only one aggregation at same level (Except for numeric metrics)"); } - - private List fillHeaderAndCreateLineForNumericAggregations(Aggregations aggregations, List header) - throws CsvExtractorException { - List line = new ArrayList<>(); - List aggregationList = aggregations.asList(); - for (Aggregation aggregation : aggregationList) { - handleNumericMetricAggregation(header, line, aggregation); - } - return line; + Aggregation aggregation = aggregationList.get(0); + // we want to skip singleBucketAggregations (nested,reverse_nested,filters) + if (aggregation instanceof SingleBucketAggregation) { + Aggregations singleBucketAggs = ((SingleBucketAggregation) aggregation).getAggregations(); + handleAggregations(singleBucketAggs, headers, lines); + return; } - - private void handleNumericMetricAggregation(List header, List line, Aggregation aggregation) - throws CsvExtractorException { - final String name = aggregation.getName(); - - if (aggregation instanceof NumericMetricsAggregation.SingleValue) { - if (!header.contains(name)) { - header.add(name); - } - NumericMetricsAggregation.SingleValue agg = (NumericMetricsAggregation.SingleValue) aggregation; - line.add(!Double.isInfinite(agg.value()) ? agg.getValueAsString() : "null"); - } else if (aggregation instanceof NumericMetricsAggregation.MultiValue) { - //todo:Numeric MultiValue - Stats,ExtendedStats,Percentile... - if (aggregation instanceof Stats) { - String[] statsHeaders = new String[]{"count", "sum", "avg", "min", "max"}; - boolean isExtendedStats = aggregation instanceof ExtendedStats; - if (isExtendedStats) { - String[] extendedHeaders = new String[]{"sumOfSquares", "variance", "stdDeviation"}; - statsHeaders = Util.concatStringsArrays(statsHeaders, extendedHeaders); - } - mergeHeadersWithPrefix(header, name, statsHeaders); - Stats stats = (Stats) aggregation; - line.add(String.valueOf(stats.getCount())); - line.add(stats.getSumAsString()); - line.add(stats.getAvgAsString()); - line.add(stats.getMinAsString()); - line.add(stats.getMaxAsString()); - if (isExtendedStats) { - ExtendedStats extendedStats = (ExtendedStats) aggregation; - line.add(extendedStats.getSumOfSquaresAsString()); - line.add(extendedStats.getVarianceAsString()); - line.add(extendedStats.getStdDeviationAsString()); - } - } else if (aggregation instanceof Percentiles) { - - final List percentileHeaders = new ArrayList<>(7); - final Percentiles percentiles = (Percentiles) aggregation; - - for (final Percentile p : percentiles) { - percentileHeaders.add(String.valueOf(p.getPercent())); - line.add(percentiles.percentileAsString(p.getPercent())); - } - mergeHeadersWithPrefix(header, name, percentileHeaders.toArray(new String[0])); - } else { - throw new CsvExtractorException( - "unknown NumericMetricsAggregation.MultiValue:" + aggregation.getClass()); - } - + if (aggregation instanceof NumericMetricsAggregation) { + handleNumericMetricAggregation(headers, lines.get(currentLineIndex), aggregation); + return; + } + if (aggregation instanceof GeoBounds) { + handleGeoBoundsAggregation(headers, lines, (GeoBounds) aggregation); + return; + } + if (aggregation instanceof TopHits) { + // todo: handle this . it returns hits... maby back to normal? + // todo: read about this usages + // TopHits topHitsAggregation = (TopHits) aggregation; + } + if (aggregation instanceof MultiBucketsAggregation) { + MultiBucketsAggregation bucketsAggregation = (MultiBucketsAggregation) aggregation; + String name = bucketsAggregation.getName(); + // checking because it can comes from sub aggregation again + if (!headers.contains(name)) { + headers.add(name); + } + Collection buckets = + bucketsAggregation.getBuckets(); + + // clone current line. + List currentLine = lines.get(this.currentLineIndex); + List clonedLine = new ArrayList<>(currentLine); + + // call handle_Agg with current_line++ + boolean firstLine = true; + for (MultiBucketsAggregation.Bucket bucket : buckets) { + // each bucket need to add new line with current line copied => except for first line + String key = bucket.getKeyAsString(); + if (firstLine) { + firstLine = false; } else { - throw new CsvExtractorException("unknown NumericMetricsAggregation" + aggregation.getClass()); + currentLineIndex++; + currentLine = new ArrayList(clonedLine); + lines.add(currentLine); } + currentLine.add(key); + handleAggregations(bucket.getAggregations(), headers, lines); + } } - - private void mergeHeadersWithPrefix(List header, String prefix, String[] newHeaders) { - for (int i = 0; i < newHeaders.length; i++) { - String newHeader = newHeaders[i]; - if (prefix != null && !prefix.equals("")) { - newHeader = prefix + "." + newHeader; - } - if (!header.contains(newHeader)) { - header.add(newHeader); - } - } + } + + private void handleGeoBoundsAggregation( + List headers, List> lines, GeoBounds geoBoundsAggregation) { + String geoBoundAggName = geoBoundsAggregation.getName(); + headers.add(geoBoundAggName + ".topLeft.lon"); + headers.add(geoBoundAggName + ".topLeft.lat"); + headers.add(geoBoundAggName + ".bottomRight.lon"); + headers.add(geoBoundAggName + ".bottomRight.lat"); + List line = lines.get(this.currentLineIndex); + line.add(String.valueOf(geoBoundsAggregation.topLeft().getLon())); + line.add(String.valueOf(geoBoundsAggregation.topLeft().getLat())); + line.add(String.valueOf(geoBoundsAggregation.bottomRight().getLon())); + line.add(String.valueOf(geoBoundsAggregation.bottomRight().getLat())); + lines.add(line); + } + + private List fillHeaderAndCreateLineForNumericAggregations( + Aggregations aggregations, List header) throws CsvExtractorException { + List line = new ArrayList<>(); + List aggregationList = aggregations.asList(); + for (Aggregation aggregation : aggregationList) { + handleNumericMetricAggregation(header, line, aggregation); } - - private boolean allNumericAggregations(Aggregations aggregations) { - List aggregationList = aggregations.asList(); - for (Aggregation aggregation : aggregationList) { - if (!(aggregation instanceof NumericMetricsAggregation)) { - return false; - } + return line; + } + + private void handleNumericMetricAggregation( + List header, List line, Aggregation aggregation) + throws CsvExtractorException { + final String name = aggregation.getName(); + + if (aggregation instanceof NumericMetricsAggregation.SingleValue) { + if (!header.contains(name)) { + header.add(name); + } + NumericMetricsAggregation.SingleValue agg = + (NumericMetricsAggregation.SingleValue) aggregation; + line.add(!Double.isInfinite(agg.value()) ? agg.getValueAsString() : "null"); + } else if (aggregation instanceof NumericMetricsAggregation.MultiValue) { + // todo:Numeric MultiValue - Stats,ExtendedStats,Percentile... + if (aggregation instanceof Stats) { + String[] statsHeaders = new String[] {"count", "sum", "avg", "min", "max"}; + boolean isExtendedStats = aggregation instanceof ExtendedStats; + if (isExtendedStats) { + String[] extendedHeaders = new String[] {"sumOfSquares", "variance", "stdDeviation"}; + statsHeaders = Util.concatStringsArrays(statsHeaders, extendedHeaders); } - return true; - } + mergeHeadersWithPrefix(header, name, statsHeaders); + Stats stats = (Stats) aggregation; + line.add(String.valueOf(stats.getCount())); + line.add(stats.getSumAsString()); + line.add(stats.getAvgAsString()); + line.add(stats.getMinAsString()); + line.add(stats.getMaxAsString()); + if (isExtendedStats) { + ExtendedStats extendedStats = (ExtendedStats) aggregation; + line.add(extendedStats.getSumOfSquaresAsString()); + line.add(extendedStats.getVarianceAsString()); + line.add(extendedStats.getStdDeviationAsString()); + } + } else if (aggregation instanceof Percentiles) { + + final List percentileHeaders = new ArrayList<>(7); + final Percentiles percentiles = (Percentiles) aggregation; - private Aggregation skipAggregations(Aggregation firstAggregation) { - while (firstAggregation instanceof SingleBucketAggregation) { - firstAggregation = getFirstAggregation(((SingleBucketAggregation) firstAggregation).getAggregations()); + for (final Percentile p : percentiles) { + percentileHeaders.add(String.valueOf(p.getPercent())); + line.add(percentiles.percentileAsString(p.getPercent())); } - return firstAggregation; + mergeHeadersWithPrefix(header, name, percentileHeaders.toArray(new String[0])); + } else { + throw new CsvExtractorException( + "unknown NumericMetricsAggregation.MultiValue:" + aggregation.getClass()); + } + + } else { + throw new CsvExtractorException("unknown NumericMetricsAggregation" + aggregation.getClass()); } - - private Aggregation getFirstAggregation(Aggregations aggregations) { - return aggregations.asList().get(0); + } + + private void mergeHeadersWithPrefix(List header, String prefix, String[] newHeaders) { + for (int i = 0; i < newHeaders.length; i++) { + String newHeader = newHeaders[i]; + if (prefix != null && !prefix.equals("")) { + newHeader = prefix + "." + newHeader; + } + if (!header.contains(newHeader)) { + header.add(newHeader); + } } + } + + private boolean allNumericAggregations(Aggregations aggregations) { + List aggregationList = aggregations.asList(); + for (Aggregation aggregation : aggregationList) { + if (!(aggregation instanceof NumericMetricsAggregation)) { + return false; + } + } + return true; + } - private List> createCSVLinesFromDocs(boolean flat, String separator, - List> docsAsMap, - List headers) { - List> csvLines = new ArrayList<>(); - for (Map doc : docsAsMap) { - List line = new ArrayList<>(); - for (String header : headers) { - line.add(findFieldValue(header, doc, flat, separator)); - } - csvLines.add(line); - } - return csvLines; + private Aggregation skipAggregations(Aggregation firstAggregation) { + while (firstAggregation instanceof SingleBucketAggregation) { + firstAggregation = + getFirstAggregation(((SingleBucketAggregation) firstAggregation).getAggregations()); + } + return firstAggregation; + } + + private Aggregation getFirstAggregation(Aggregations aggregations) { + return aggregations.asList().get(0); + } + + private List> createCSVLinesFromDocs( + boolean flat, String separator, List> docsAsMap, List headers) { + List> csvLines = new ArrayList<>(); + for (Map doc : docsAsMap) { + List line = new ArrayList<>(); + for (String header : headers) { + line.add(findFieldValue(header, doc, flat, separator)); + } + csvLines.add(line); + } + return csvLines; + } + + private List createHeadersAndFillDocsMap( + final boolean flat, + final SearchHit[] hits, + final List> docsAsMap, + final List fieldNames) { + final Set csvHeaders = new LinkedHashSet<>(); + if (fieldNames != null) { + csvHeaders.addAll(fieldNames); } - private List createHeadersAndFillDocsMap(final boolean flat, final SearchHit[] hits, - final List> docsAsMap, - final List fieldNames) { - final Set csvHeaders = new LinkedHashSet<>(); - if (fieldNames != null) { - csvHeaders.addAll(fieldNames); - } + for (final SearchHit hit : hits) { + final Map doc = hit.getSourceAsMap(); + final Map fields = hit.getFields(); + for (final DocumentField searchHitField : fields.values()) { + doc.put(searchHitField.getName(), searchHitField.getValue()); + } + + if (this.includeId) { + doc.put("_id", hit.getId()); + } + if (this.includeScore) { + doc.put("_score", hit.getScore()); + } + + // select function as field is a special case where each hit has non-null field (function) + // and sourceAsMap is all columns in index (the same as 'SELECT *') + if (fields.isEmpty()) { + mergeHeaders(csvHeaders, doc, flat); + } + docsAsMap.add(doc); + } - for (final SearchHit hit : hits) { - final Map doc = hit.getSourceAsMap(); - final Map fields = hit.getFields(); - for (final DocumentField searchHitField : fields.values()) { - doc.put(searchHitField.getName(), searchHitField.getValue()); - } - - if (this.includeId) { - doc.put("_id", hit.getId()); - } - if (this.includeScore) { - doc.put("_score", hit.getScore()); - } - - // select function as field is a special case where each hit has non-null field (function) - // and sourceAsMap is all columns in index (the same as 'SELECT *') - if (fields.isEmpty()) { - mergeHeaders(csvHeaders, doc, flat); - } - docsAsMap.add(doc); - } + return new ArrayList<>(csvHeaders); + } - return new ArrayList<>(csvHeaders); - } + private String findFieldValue( + String header, Map doc, boolean flat, String separator) { + if (flat && header.contains(".")) { + String[] split = header.split("\\."); + Object innerDoc = doc; - private String findFieldValue(String header, Map doc, boolean flat, String separator) { - if (flat && header.contains(".")) { - String[] split = header.split("\\."); - Object innerDoc = doc; - - for (String innerField : split) { - if (!(innerDoc instanceof Map)) { - return ""; - } - innerDoc = ((Map) innerDoc).get(innerField); - if (innerDoc == null) { - return ""; - } - } - return innerDoc.toString(); - } else { - if (doc.containsKey(header)) { - return String.valueOf(doc.get(header)); - } + for (String innerField : split) { + if (!(innerDoc instanceof Map)) { + return ""; } - return ""; - } - - private void mergeHeaders(Set headers, Map doc, boolean flat) { - if (!flat) { - headers.addAll(doc.keySet()); - return; + innerDoc = ((Map) innerDoc).get(innerField); + if (innerDoc == null) { + return ""; } - mergeFieldNamesRecursive(headers, doc, ""); + } + return innerDoc.toString(); + } else { + if (doc.containsKey(header)) { + return String.valueOf(doc.get(header)); + } } + return ""; + } - private void mergeFieldNamesRecursive(Set headers, Map doc, String prefix) { - for (Map.Entry field : doc.entrySet()) { - Object value = field.getValue(); - if (value instanceof Map) { - mergeFieldNamesRecursive(headers, (Map) value, prefix + field.getKey() + "."); - } else { - headers.add(prefix + field.getKey()); - } - } + private void mergeHeaders(Set headers, Map doc, boolean flat) { + if (!flat) { + headers.addAll(doc.keySet()); + return; + } + mergeFieldNamesRecursive(headers, doc, ""); + } + + private void mergeFieldNamesRecursive( + Set headers, Map doc, String prefix) { + for (Map.Entry field : doc.entrySet()) { + Object value = field.getValue(); + if (value instanceof Map) { + mergeFieldNamesRecursive( + headers, (Map) value, prefix + field.getKey() + "."); + } else { + headers.add(prefix + field.getKey()); + } } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CsvExtractorException.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CsvExtractorException.java index 7e0f8e8ff9..cb289e4625 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CsvExtractorException.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CsvExtractorException.java @@ -3,14 +3,11 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.csv; -/** - * Created by Eliran on 29/12/2015. - */ +/** Created by Eliran on 29/12/2015. */ public class CsvExtractorException extends Exception { - public CsvExtractorException(String message) { - super(message); - } + public CsvExtractorException(String message) { + super(message); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorActionRequestRestExecutorFactory.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorActionRequestRestExecutorFactory.java index 7c8ed62a07..b4add64f9c 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorActionRequestRestExecutorFactory.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorActionRequestRestExecutorFactory.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.cursor; import org.opensearch.rest.RestRequest; @@ -11,16 +10,17 @@ public class CursorActionRequestRestExecutorFactory { - public static CursorAsyncRestExecutor createExecutor(RestRequest request, String cursorId, Format format) { + public static CursorAsyncRestExecutor createExecutor( + RestRequest request, String cursorId, Format format) { - if (isCursorCloseRequest(request)) { - return new CursorAsyncRestExecutor(new CursorCloseExecutor(cursorId)); - } else { - return new CursorAsyncRestExecutor(new CursorResultExecutor(cursorId, format)); - } + if (isCursorCloseRequest(request)) { + return new CursorAsyncRestExecutor(new CursorCloseExecutor(cursorId)); + } else { + return new CursorAsyncRestExecutor(new CursorResultExecutor(cursorId, format)); } + } - private static boolean isCursorCloseRequest(final RestRequest request) { - return request.path().endsWith("/_sql/close"); - } + private static boolean isCursorCloseRequest(final RestRequest request) { + return request.path().endsWith("/_sql/close"); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorAsyncRestExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorAsyncRestExecutor.java index 9b8e70c168..ffcf2adbf3 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorAsyncRestExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorAsyncRestExecutor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.cursor; import java.io.IOException; @@ -25,84 +24,83 @@ import org.opensearch.threadpool.ThreadPool; public class CursorAsyncRestExecutor { - /** - * Custom thread pool name managed by OpenSearch - */ - public static final String SQL_WORKER_THREAD_POOL_NAME = "sql-worker"; + /** Custom thread pool name managed by OpenSearch */ + public static final String SQL_WORKER_THREAD_POOL_NAME = "sql-worker"; - private static final Logger LOG = LogManager.getLogger(CursorAsyncRestExecutor.class); + private static final Logger LOG = LogManager.getLogger(CursorAsyncRestExecutor.class); - /** - * Delegated rest executor to async - */ - private final CursorRestExecutor executor; + /** Delegated rest executor to async */ + private final CursorRestExecutor executor; + CursorAsyncRestExecutor(CursorRestExecutor executor) { + this.executor = executor; + } - CursorAsyncRestExecutor(CursorRestExecutor executor) { - this.executor = executor; - } + public void execute(Client client, Map params, RestChannel channel) { + async(client, params, channel); + } - public void execute(Client client, Map params, RestChannel channel) { - async(client, params, channel); - } + /** Run given task in thread pool asynchronously */ + private void async(Client client, Map params, RestChannel channel) { - /** - * Run given task in thread pool asynchronously - */ - private void async(Client client, Map params, RestChannel channel) { - - ThreadPool threadPool = client.threadPool(); - Runnable runnable = () -> { - try { - doExecuteWithTimeMeasured(client, params, channel); - } catch (IOException e) { - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); - LOG.warn("[{}] [MCB] async task got an IO/SQL exception: {}", QueryContext.getRequestId(), - e.getMessage()); - e.printStackTrace(); - channel.sendResponse(new BytesRestResponse(RestStatus.INTERNAL_SERVER_ERROR, e.getMessage())); - } catch (IllegalStateException e) { - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); - LOG.warn("[{}] [MCB] async task got a runtime exception: {}", QueryContext.getRequestId(), - e.getMessage()); - e.printStackTrace(); - channel.sendResponse(new BytesRestResponse(RestStatus.INSUFFICIENT_STORAGE, - "Memory circuit is broken.")); - } catch (Throwable t) { - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); - LOG.warn("[{}] [MCB] async task got an unknown throwable: {}", QueryContext.getRequestId(), - t.getMessage()); - t.printStackTrace(); - channel.sendResponse(new BytesRestResponse(RestStatus.INTERNAL_SERVER_ERROR, - String.valueOf(t.getMessage()))); - } finally { - BackOffRetryStrategy.releaseMem(executor); - } + ThreadPool threadPool = client.threadPool(); + Runnable runnable = + () -> { + try { + doExecuteWithTimeMeasured(client, params, channel); + } catch (IOException e) { + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); + LOG.warn( + "[{}] [MCB] async task got an IO/SQL exception: {}", + QueryContext.getRequestId(), + e.getMessage()); + e.printStackTrace(); + channel.sendResponse( + new BytesRestResponse(RestStatus.INTERNAL_SERVER_ERROR, e.getMessage())); + } catch (IllegalStateException e) { + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); + LOG.warn( + "[{}] [MCB] async task got a runtime exception: {}", + QueryContext.getRequestId(), + e.getMessage()); + e.printStackTrace(); + channel.sendResponse( + new BytesRestResponse( + RestStatus.INSUFFICIENT_STORAGE, "Memory circuit is broken.")); + } catch (Throwable t) { + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); + LOG.warn( + "[{}] [MCB] async task got an unknown throwable: {}", + QueryContext.getRequestId(), + t.getMessage()); + t.printStackTrace(); + channel.sendResponse( + new BytesRestResponse( + RestStatus.INTERNAL_SERVER_ERROR, String.valueOf(t.getMessage()))); + } finally { + BackOffRetryStrategy.releaseMem(executor); + } }; - // Preserve context of calling thread to ensure headers of requests are forwarded when running blocking actions - threadPool.schedule( - QueryContext.withCurrentContext(runnable), - new TimeValue(0L), - SQL_WORKER_THREAD_POOL_NAME - ); - } + // Preserve context of calling thread to ensure headers of requests are forwarded when running + // blocking actions + threadPool.schedule( + QueryContext.withCurrentContext(runnable), new TimeValue(0L), SQL_WORKER_THREAD_POOL_NAME); + } - /** - * Time the real execution of Executor and log slow query for troubleshooting - */ - private void doExecuteWithTimeMeasured(Client client, - Map params, - RestChannel channel) throws Exception { - long startTime = System.nanoTime(); - try { - executor.execute(client, params, channel); - } finally { - Duration elapsed = Duration.ofNanos(System.nanoTime() - startTime); - int slowLogThreshold = LocalClusterState.state().getSettingValue(Settings.Key.SQL_SLOWLOG); - if (elapsed.getSeconds() >= slowLogThreshold) { - LOG.warn("[{}] Slow query: elapsed={} (ms)", QueryContext.getRequestId(), elapsed.toMillis()); - } - } + /** Time the real execution of Executor and log slow query for troubleshooting */ + private void doExecuteWithTimeMeasured( + Client client, Map params, RestChannel channel) throws Exception { + long startTime = System.nanoTime(); + try { + executor.execute(client, params, channel); + } finally { + Duration elapsed = Duration.ofNanos(System.nanoTime() - startTime); + int slowLogThreshold = LocalClusterState.state().getSettingValue(Settings.Key.SQL_SLOWLOG); + if (elapsed.getSeconds() >= slowLogThreshold) { + LOG.warn( + "[{}] Slow query: elapsed={} (ms)", QueryContext.getRequestId(), elapsed.toMillis()); + } } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorCloseExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorCloseExecutor.java index 98e89c12e4..7282eaed4c 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorCloseExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorCloseExecutor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.cursor; import static org.opensearch.core.rest.RestStatus.OK; @@ -25,66 +24,69 @@ public class CursorCloseExecutor implements CursorRestExecutor { - private static final Logger LOG = LogManager.getLogger(CursorCloseExecutor.class); - - private static final String SUCCEEDED_TRUE = "{\"succeeded\":true}"; - private static final String SUCCEEDED_FALSE = "{\"succeeded\":false}"; - - private String cursorId; - - public CursorCloseExecutor(String cursorId) { - this.cursorId = cursorId; - } - - public void execute(Client client, Map params, RestChannel channel) throws Exception { - try { - String formattedResponse = execute(client, params); - channel.sendResponse(new BytesRestResponse(OK, "application/json; charset=UTF-8", formattedResponse)); - } catch (IllegalArgumentException | JSONException e) { - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_CUS).increment(); - LOG.error("Error parsing the cursor", e); - channel.sendResponse(new BytesRestResponse(channel, e)); - } catch (OpenSearchException e) { - int status = (e.status().getStatus()); - if (status > 399 && status < 500) { - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_CUS).increment(); - } else if (status > 499) { - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); - } - LOG.error("Error completing cursor request", e); - channel.sendResponse(new BytesRestResponse(channel, e)); - } + private static final Logger LOG = LogManager.getLogger(CursorCloseExecutor.class); + + private static final String SUCCEEDED_TRUE = "{\"succeeded\":true}"; + private static final String SUCCEEDED_FALSE = "{\"succeeded\":false}"; + + private String cursorId; + + public CursorCloseExecutor(String cursorId) { + this.cursorId = cursorId; + } + + public void execute(Client client, Map params, RestChannel channel) + throws Exception { + try { + String formattedResponse = execute(client, params); + channel.sendResponse( + new BytesRestResponse(OK, "application/json; charset=UTF-8", formattedResponse)); + } catch (IllegalArgumentException | JSONException e) { + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_CUS).increment(); + LOG.error("Error parsing the cursor", e); + channel.sendResponse(new BytesRestResponse(channel, e)); + } catch (OpenSearchException e) { + int status = (e.status().getStatus()); + if (status > 399 && status < 500) { + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_CUS).increment(); + } else if (status > 499) { + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); + } + LOG.error("Error completing cursor request", e); + channel.sendResponse(new BytesRestResponse(channel, e)); } + } - public String execute(Client client, Map params) throws Exception { - String[] splittedCursor = cursorId.split(":"); - - if (splittedCursor.length!=2) { - throw new VerificationException("Not able to parse invalid cursor"); - } - - String type = splittedCursor[0]; - CursorType cursorType = CursorType.getById(type); - - switch(cursorType) { - case DEFAULT: - DefaultCursor defaultCursor = DefaultCursor.from(splittedCursor[1]); - return handleDefaultCursorCloseRequest(client, defaultCursor); - case AGGREGATION: - case JOIN: - default: throw new VerificationException("Unsupported cursor type [" + type + "]"); - } + public String execute(Client client, Map params) throws Exception { + String[] splittedCursor = cursorId.split(":"); + if (splittedCursor.length != 2) { + throw new VerificationException("Not able to parse invalid cursor"); } - private String handleDefaultCursorCloseRequest(Client client, DefaultCursor cursor) { - String scrollId = cursor.getScrollId(); - ClearScrollResponse clearScrollResponse = client.prepareClearScroll().addScrollId(scrollId).get(); - if (clearScrollResponse.isSucceeded()) { - return SUCCEEDED_TRUE; - } else { - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); - return SUCCEEDED_FALSE; - } + String type = splittedCursor[0]; + CursorType cursorType = CursorType.getById(type); + + switch (cursorType) { + case DEFAULT: + DefaultCursor defaultCursor = DefaultCursor.from(splittedCursor[1]); + return handleDefaultCursorCloseRequest(client, defaultCursor); + case AGGREGATION: + case JOIN: + default: + throw new VerificationException("Unsupported cursor type [" + type + "]"); + } + } + + private String handleDefaultCursorCloseRequest(Client client, DefaultCursor cursor) { + String scrollId = cursor.getScrollId(); + ClearScrollResponse clearScrollResponse = + client.prepareClearScroll().addScrollId(scrollId).get(); + if (clearScrollResponse.isSucceeded()) { + return SUCCEEDED_TRUE; + } else { + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); + return SUCCEEDED_FALSE; } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorRestExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorRestExecutor.java index 5f294f8e32..4c4b854379 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorRestExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorRestExecutor.java @@ -3,21 +3,16 @@ * SPDX-License-Identifier: Apache-2.0 */ - - package org.opensearch.sql.legacy.executor.cursor; import java.util.Map; import org.opensearch.client.Client; import org.opensearch.rest.RestChannel; -/** - * Interface to execute cursor request. - */ +/** Interface to execute cursor request. */ public interface CursorRestExecutor { - void execute(Client client, Map params, RestChannel channel) - throws Exception; + void execute(Client client, Map params, RestChannel channel) throws Exception; - String execute(Client client, Map params) throws Exception; + String execute(Client client, Map params) throws Exception; } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorResultExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorResultExecutor.java index 9753f8049c..620b8e7b86 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorResultExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorResultExecutor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.cursor; import static org.opensearch.core.rest.RestStatus.OK; @@ -34,99 +33,105 @@ public class CursorResultExecutor implements CursorRestExecutor { - private String cursorId; - private Format format; - - private static final Logger LOG = LogManager.getLogger(CursorResultExecutor.class); - - public CursorResultExecutor(String cursorId, Format format) { - this.cursorId = cursorId; - this.format = format; + private String cursorId; + private Format format; + + private static final Logger LOG = LogManager.getLogger(CursorResultExecutor.class); + + public CursorResultExecutor(String cursorId, Format format) { + this.cursorId = cursorId; + this.format = format; + } + + public void execute(Client client, Map params, RestChannel channel) + throws Exception { + try { + String formattedResponse = execute(client, params); + channel.sendResponse( + new BytesRestResponse(OK, "application/json; charset=UTF-8", formattedResponse)); + } catch (IllegalArgumentException | JSONException e) { + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_CUS).increment(); + LOG.error("Error parsing the cursor", e); + channel.sendResponse(new BytesRestResponse(channel, e)); + } catch (OpenSearchException e) { + int status = (e.status().getStatus()); + if (status > 399 && status < 500) { + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_CUS).increment(); + } else if (status > 499) { + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); + } + LOG.error("Error completing cursor request", e); + channel.sendResponse(new BytesRestResponse(channel, e)); } + } - public void execute(Client client, Map params, RestChannel channel) throws Exception { - try { - String formattedResponse = execute(client, params); - channel.sendResponse(new BytesRestResponse(OK, "application/json; charset=UTF-8", formattedResponse)); - } catch (IllegalArgumentException | JSONException e) { - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_CUS).increment(); - LOG.error("Error parsing the cursor", e); - channel.sendResponse(new BytesRestResponse(channel, e)); - } catch (OpenSearchException e) { - int status = (e.status().getStatus()); - if (status > 399 && status < 500) { - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_CUS).increment(); - } else if (status > 499) { - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); - } - LOG.error("Error completing cursor request", e); - channel.sendResponse(new BytesRestResponse(channel, e)); - } - } - - public String execute(Client client, Map params) throws Exception { - /** - * All cursor's are of the form : - * The serialized form before encoding is upto Cursor implementation - */ - String[] splittedCursor = cursorId.split(":", 2); + public String execute(Client client, Map params) throws Exception { + /** + * All cursor's are of the form : The serialized form before + * encoding is upto Cursor implementation + */ + String[] splittedCursor = cursorId.split(":", 2); - if (splittedCursor.length!=2) { - throw new VerificationException("Not able to parse invalid cursor"); - } - - String type = splittedCursor[0]; - CursorType cursorType = CursorType.getById(type); + if (splittedCursor.length != 2) { + throw new VerificationException("Not able to parse invalid cursor"); + } - switch(cursorType) { - case DEFAULT: - DefaultCursor defaultCursor = DefaultCursor.from(splittedCursor[1]); - return handleDefaultCursorRequest(client, defaultCursor); - case AGGREGATION: - case JOIN: - default: throw new VerificationException("Unsupported cursor type [" + type + "]"); - } + String type = splittedCursor[0]; + CursorType cursorType = CursorType.getById(type); + + switch (cursorType) { + case DEFAULT: + DefaultCursor defaultCursor = DefaultCursor.from(splittedCursor[1]); + return handleDefaultCursorRequest(client, defaultCursor); + case AGGREGATION: + case JOIN: + default: + throw new VerificationException("Unsupported cursor type [" + type + "]"); } + } - private String handleDefaultCursorRequest(Client client, DefaultCursor cursor) { - String previousScrollId = cursor.getScrollId(); - LocalClusterState clusterState = LocalClusterState.state(); - TimeValue scrollTimeout = clusterState.getSettingValue(Settings.Key.SQL_CURSOR_KEEP_ALIVE); - SearchResponse scrollResponse = client.prepareSearchScroll(previousScrollId).setScroll(scrollTimeout).get(); - SearchHits searchHits = scrollResponse.getHits(); - SearchHit[] searchHitArray = searchHits.getHits(); - String newScrollId = scrollResponse.getScrollId(); + private String handleDefaultCursorRequest(Client client, DefaultCursor cursor) { + String previousScrollId = cursor.getScrollId(); + LocalClusterState clusterState = LocalClusterState.state(); + TimeValue scrollTimeout = clusterState.getSettingValue(Settings.Key.SQL_CURSOR_KEEP_ALIVE); + SearchResponse scrollResponse = + client.prepareSearchScroll(previousScrollId).setScroll(scrollTimeout).get(); + SearchHits searchHits = scrollResponse.getHits(); + SearchHit[] searchHitArray = searchHits.getHits(); + String newScrollId = scrollResponse.getScrollId(); - int rowsLeft = (int) cursor.getRowsLeft(); - int fetch = cursor.getFetchSize(); + int rowsLeft = (int) cursor.getRowsLeft(); + int fetch = cursor.getFetchSize(); if (rowsLeft < fetch && rowsLeft < searchHitArray.length) { /** * This condition implies we are on the last page, and we might need to truncate the result from SearchHit[] * Avoid truncating in following two scenarios - * 1. number of rows to be sent equals fetchSize - * 2. size of SearchHit[] is already less that rows that needs to be sent - * + *
    + *
  1. number of rows to be sent equals fetchSize + *
  2. size of SearchHit[] is already less that rows that needs to be sent + *
* Else truncate to desired number of rows */ SearchHit[] newSearchHits = Arrays.copyOf(searchHitArray, rowsLeft); searchHits = new SearchHits(newSearchHits, searchHits.getTotalHits(), searchHits.getMaxScore()); } - rowsLeft = rowsLeft - fetch; + rowsLeft = rowsLeft - fetch; - if (rowsLeft <=0) { - /** Clear the scroll context on last page */ - ClearScrollResponse clearScrollResponse = client.prepareClearScroll().addScrollId(newScrollId).get(); - if (!clearScrollResponse.isSucceeded()) { - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); - LOG.info("Error closing the cursor context {} ", newScrollId); - } - } - - cursor.setRowsLeft(rowsLeft); - cursor.setScrollId(newScrollId); - Protocol protocol = new Protocol(client, searchHits, format.name().toLowerCase(), cursor); - return protocol.cursorFormat(); + if (rowsLeft <= 0) { + /** Clear the scroll context on last page */ + ClearScrollResponse clearScrollResponse = + client.prepareClearScroll().addScrollId(newScrollId).get(); + if (!clearScrollResponse.isSucceeded()) { + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); + LOG.info("Error closing the cursor context {} ", newScrollId); + } } + + cursor.setRowsLeft(rowsLeft); + cursor.setScrollId(newScrollId); + Protocol protocol = new Protocol(client, searchHits, format.name().toLowerCase(), cursor); + return protocol.cursorFormat(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/BindingTupleResultSet.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/BindingTupleResultSet.java index d9eb463572..872442f04f 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/BindingTupleResultSet.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/BindingTupleResultSet.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.format; import static org.opensearch.sql.legacy.executor.format.DateFieldFormatter.FORMAT_JDBC; @@ -18,43 +17,44 @@ import org.opensearch.sql.legacy.expression.model.ExprValue; import org.opensearch.sql.legacy.query.planner.core.ColumnNode; -/** - * The definition of BindingTuple ResultSet. - */ +/** The definition of BindingTuple ResultSet. */ public class BindingTupleResultSet extends ResultSet { - public BindingTupleResultSet(List columnNodes, List bindingTuples) { - this.schema = buildSchema(columnNodes); - this.dataRows = buildDataRows(columnNodes, bindingTuples); - } - - @VisibleForTesting - public static Schema buildSchema(List columnNodes) { - List columnList = columnNodes.stream() - .map(node -> new Schema.Column( - node.getName(), - node.getAlias(), - node.getType())) - .collect(Collectors.toList()); - return new Schema(columnList); - } - - @VisibleForTesting - public static DataRows buildDataRows(List columnNodes, List bindingTuples) { - List rowList = bindingTuples.stream().map(tuple -> { - Map bindingMap = tuple.getBindingMap(); - Map rowMap = new HashMap<>(); - for (ColumnNode column : columnNodes) { - String columnName = column.columnName(); - Object value = bindingMap.get(columnName).value(); - if (column.getType() == Schema.Type.DATE) { - value = DateFormat.getFormattedDate(new Date((Long) value), FORMAT_JDBC); - } - rowMap.put(columnName, value); - } - return new DataRows.Row(rowMap); - }).collect(Collectors.toList()); - - return new DataRows(bindingTuples.size(), bindingTuples.size(), rowList); - } + public BindingTupleResultSet(List columnNodes, List bindingTuples) { + this.schema = buildSchema(columnNodes); + this.dataRows = buildDataRows(columnNodes, bindingTuples); + } + + @VisibleForTesting + public static Schema buildSchema(List columnNodes) { + List columnList = + columnNodes.stream() + .map(node -> new Schema.Column(node.getName(), node.getAlias(), node.getType())) + .collect(Collectors.toList()); + return new Schema(columnList); + } + + @VisibleForTesting + public static DataRows buildDataRows( + List columnNodes, List bindingTuples) { + List rowList = + bindingTuples.stream() + .map( + tuple -> { + Map bindingMap = tuple.getBindingMap(); + Map rowMap = new HashMap<>(); + for (ColumnNode column : columnNodes) { + String columnName = column.columnName(); + Object value = bindingMap.get(columnName).value(); + if (column.getType() == Schema.Type.DATE) { + value = DateFormat.getFormattedDate(new Date((Long) value), FORMAT_JDBC); + } + rowMap.put(columnName, value); + } + return new DataRows.Row(rowMap); + }) + .collect(Collectors.toList()); + + return new DataRows(bindingTuples.size(), bindingTuples.size(), rowList); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DataRows.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DataRows.java index 541d3200a5..fc153afae8 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DataRows.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DataRows.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.format; import java.util.Iterator; @@ -12,76 +11,76 @@ public class DataRows implements Iterable { - private long size; - private long totalHits; - private List rows; - - public DataRows(long size, long totalHits, List rows) { - this.size = size; - this.totalHits = totalHits; - this.rows = rows; + private long size; + private long totalHits; + private List rows; + + public DataRows(long size, long totalHits, List rows) { + this.size = size; + this.totalHits = totalHits; + this.rows = rows; + } + + public DataRows(List rows) { + this.size = rows.size(); + this.totalHits = rows.size(); + this.rows = rows; + } + + public long getSize() { + return size; + } + + public long getTotalHits() { + return totalHits; + } + + // Iterator method for DataRows + @Override + public Iterator iterator() { + return new Iterator() { + private final Iterator iter = rows.iterator(); + + @Override + public boolean hasNext() { + return iter.hasNext(); + } + + @Override + public Row next() { + return iter.next(); + } + + @Override + public void remove() { + throw new UnsupportedOperationException("No changes allowed to DataRows rows"); + } + }; + } + + // Inner class for Row object + public static class Row { + + private Map data; + + public Row(Map data) { + this.data = data; } - public DataRows(List rows) { - this.size = rows.size(); - this.totalHits = rows.size(); - this.rows = rows; + public Map getContents() { + return data; } - public long getSize() { - return size; + public boolean hasField(String field) { + return data.containsKey(field); } - public long getTotalHits() { - return totalHits; + public Object getData(String field) { + return data.get(field); } - // Iterator method for DataRows - @Override - public Iterator iterator() { - return new Iterator() { - private final Iterator iter = rows.iterator(); - - @Override - public boolean hasNext() { - return iter.hasNext(); - } - - @Override - public Row next() { - return iter.next(); - } - - @Override - public void remove() { - throw new UnsupportedOperationException("No changes allowed to DataRows rows"); - } - }; - } - - // Inner class for Row object - public static class Row { - - private Map data; - - public Row(Map data) { - this.data = data; - } - - public Map getContents() { - return data; - } - - public boolean hasField(String field) { - return data.containsKey(field); - } - - public Object getData(String field) { - return data.get(field); - } - - public Object getDataOrDefault(String field, Object defaultValue) { - return data.getOrDefault(field, defaultValue); - } + public Object getDataOrDefault(String field, Object defaultValue) { + return data.getOrDefault(field, defaultValue); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DateFieldFormatter.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DateFieldFormatter.java index aa803975df..dc239abd84 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DateFieldFormatter.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DateFieldFormatter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.format; import com.google.common.annotations.VisibleForTesting; @@ -23,163 +22,169 @@ import org.opensearch.sql.legacy.esdomain.LocalClusterState; import org.opensearch.sql.legacy.esdomain.mapping.FieldMappings; -/** - * Formatter to transform date fields into a consistent format for consumption by clients. - */ +/** Formatter to transform date fields into a consistent format for consumption by clients. */ public class DateFieldFormatter { - private static final Logger LOG = LogManager.getLogger(DateFieldFormatter.class); - public static final String FORMAT_JDBC = "yyyy-MM-dd HH:mm:ss.SSS"; - private static final String FORMAT_DELIMITER = "\\|\\|"; - - private static final String FORMAT_DOT_DATE_AND_TIME = "yyyy-MM-dd'T'HH:mm:ss.SSSZ"; - private static final String FORMAT_DOT_OPENSEARCH_DASHBOARDS_SAMPLE_DATA_LOGS_EXCEPTION = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"; - private static final String FORMAT_DOT_OPENSEARCH_DASHBOARDS_SAMPLE_DATA_FLIGHTS_EXCEPTION = "yyyy-MM-dd'T'HH:mm:ss"; - private static final String - FORMAT_DOT_OPENSEARCH_DASHBOARDS_SAMPLE_DATA_FLIGHTS_EXCEPTION_NO_TIME = "yyyy-MM-dd'T'"; - private static final String FORMAT_DOT_OPENSEARCH_DASHBOARDS_SAMPLE_DATA_ECOMMERCE_EXCEPTION = "yyyy-MM-dd'T'HH:mm:ssXXX"; - private static final String FORMAT_DOT_DATE = DateFormat.getFormatString("date"); - - private final Map> dateFieldFormatMap; - private final Map fieldAliasMap; - private Set dateColumns; - - public DateFieldFormatter(String indexName, List columns, Map fieldAliasMap) { - this.dateFieldFormatMap = getDateFieldFormatMap(indexName); - this.dateColumns = getDateColumns(columns); - this.fieldAliasMap = fieldAliasMap; + private static final Logger LOG = LogManager.getLogger(DateFieldFormatter.class); + public static final String FORMAT_JDBC = "yyyy-MM-dd HH:mm:ss.SSS"; + private static final String FORMAT_DELIMITER = "\\|\\|"; + + private static final String FORMAT_DOT_DATE_AND_TIME = "yyyy-MM-dd'T'HH:mm:ss.SSSZ"; + private static final String FORMAT_DOT_OPENSEARCH_DASHBOARDS_SAMPLE_DATA_LOGS_EXCEPTION = + "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"; + private static final String FORMAT_DOT_OPENSEARCH_DASHBOARDS_SAMPLE_DATA_FLIGHTS_EXCEPTION = + "yyyy-MM-dd'T'HH:mm:ss"; + private static final String + FORMAT_DOT_OPENSEARCH_DASHBOARDS_SAMPLE_DATA_FLIGHTS_EXCEPTION_NO_TIME = "yyyy-MM-dd'T'"; + private static final String FORMAT_DOT_OPENSEARCH_DASHBOARDS_SAMPLE_DATA_ECOMMERCE_EXCEPTION = + "yyyy-MM-dd'T'HH:mm:ssXXX"; + private static final String FORMAT_DOT_DATE = DateFormat.getFormatString("date"); + + private final Map> dateFieldFormatMap; + private final Map fieldAliasMap; + private Set dateColumns; + + public DateFieldFormatter( + String indexName, List columns, Map fieldAliasMap) { + this.dateFieldFormatMap = getDateFieldFormatMap(indexName); + this.dateColumns = getDateColumns(columns); + this.fieldAliasMap = fieldAliasMap; + } + + @VisibleForTesting + protected DateFieldFormatter( + Map> dateFieldFormatMap, + List columns, + Map fieldAliasMap) { + this.dateFieldFormatMap = dateFieldFormatMap; + this.dateColumns = getDateColumns(columns); + this.fieldAliasMap = fieldAliasMap; + } + + /** + * Apply the JDBC date format ({@code yyyy-MM-dd HH:mm:ss.SSS}) to date values in the current row. + * + * @param rowSource The row in which to format the date values. + */ + public void applyJDBCDateFormat(Map rowSource) { + for (String columnName : dateColumns) { + Object columnOriginalDate = rowSource.get(columnName); + if (columnOriginalDate == null) { + // Don't try to parse null date values + continue; + } + + List formats = getFormatsForColumn(columnName); + if (formats == null) { + LOG.warn( + "Could not determine date formats for column {}; returning original value", columnName); + continue; + } + + Date date = parseDateString(formats, columnOriginalDate.toString()); + if (date != null) { + rowSource.put(columnName, DateFormat.getFormattedDate(date, FORMAT_JDBC)); + break; + } else { + LOG.warn("Could not parse date value; returning original value"); + } } - - @VisibleForTesting - protected DateFieldFormatter(Map> dateFieldFormatMap, - List columns, - Map fieldAliasMap) { - this.dateFieldFormatMap = dateFieldFormatMap; - this.dateColumns = getDateColumns(columns); - this.fieldAliasMap = fieldAliasMap; + } + + private List getFormatsForColumn(String columnName) { + // Handle special cases for column names + if (fieldAliasMap.get(columnName) != null) { + // Column was aliased, and we need to find the base name for the column + columnName = fieldAliasMap.get(columnName); + } else if (columnName.split("\\.").length == 2) { + // Column is part of a join, and is qualified by the table alias + columnName = columnName.split("\\.")[1]; } - - /** - * Apply the JDBC date format ({@code yyyy-MM-dd HH:mm:ss.SSS}) to date values in the current row. - * - * @param rowSource The row in which to format the date values. - */ - public void applyJDBCDateFormat(Map rowSource) { - for (String columnName : dateColumns) { - Object columnOriginalDate = rowSource.get(columnName); - if (columnOriginalDate == null) { - // Don't try to parse null date values - continue; - } - - List formats = getFormatsForColumn(columnName); - if (formats == null) { - LOG.warn("Could not determine date formats for column {}; returning original value", columnName); - continue; - } - - Date date = parseDateString(formats, columnOriginalDate.toString()); - if (date != null) { - rowSource.put(columnName, DateFormat.getFormattedDate(date, FORMAT_JDBC)); - break; - } else { - LOG.warn("Could not parse date value; returning original value"); - } + return dateFieldFormatMap.get(columnName); + } + + private Set getDateColumns(List columns) { + return columns.stream() + .filter(column -> column.getType().equals(Schema.Type.DATE.nameLowerCase())) + .map(Schema.Column::getName) + .collect(Collectors.toSet()); + } + + private Map> getDateFieldFormatMap(String indexName) { + LocalClusterState state = LocalClusterState.state(); + Map> formatMap = new HashMap<>(); + + String[] indices = indexName.split("\\|"); + Collection typeProperties = state.getFieldMappings(indices).allMappings(); + + for (FieldMappings fieldMappings : typeProperties) { + for (Map.Entry> field : fieldMappings.data().entrySet()) { + String fieldName = field.getKey(); + Map properties = field.getValue(); + + if (properties.containsKey("format")) { + formatMap.put(fieldName, getFormatsFromProperties(properties.get("format").toString())); + } else { + // Give all field types a format, since operations such as casts + // can change the output type for a field to `date`. + formatMap.put(fieldName, getFormatsFromProperties("date_optional_time")); } + } } - private List getFormatsForColumn(String columnName) { - // Handle special cases for column names - if (fieldAliasMap.get(columnName) != null) { - // Column was aliased, and we need to find the base name for the column - columnName = fieldAliasMap.get(columnName); - } else if (columnName.split("\\.").length == 2) { - // Column is part of a join, and is qualified by the table alias - columnName = columnName.split("\\.")[1]; - } - return dateFieldFormatMap.get(columnName); - } - - private Set getDateColumns(List columns) { - return columns.stream() - .filter(column -> column.getType().equals(Schema.Type.DATE.nameLowerCase())) - .map(Schema.Column::getName) - .collect(Collectors.toSet()); - } - - private Map> getDateFieldFormatMap(String indexName) { - LocalClusterState state = LocalClusterState.state(); - Map> formatMap = new HashMap<>(); - - String[] indices = indexName.split("\\|"); - Collection typeProperties = state.getFieldMappings(indices) - .allMappings(); - - for (FieldMappings fieldMappings: typeProperties) { - for (Map.Entry> field : fieldMappings.data().entrySet()) { - String fieldName = field.getKey(); - Map properties = field.getValue(); - - if (properties.containsKey("format")) { - formatMap.put(fieldName, getFormatsFromProperties(properties.get("format").toString())); - } else { - // Give all field types a format, since operations such as casts - // can change the output type for a field to `date`. - formatMap.put(fieldName, getFormatsFromProperties("date_optional_time")); - } + return formatMap; + } + + private List getFormatsFromProperties(String formatProperty) { + String[] formats = formatProperty.split(FORMAT_DELIMITER); + return Arrays.asList(formats); + } + + private Date parseDateString(List formats, String columnOriginalDate) { + TimeZone originalDefaultTimeZone = TimeZone.getDefault(); + Date parsedDate = null; + + // Apache Commons DateUtils uses the default TimeZone for the JVM when parsing. + // However, since all dates on OpenSearch are stored as UTC, we need to + // parse these values using the UTC timezone. + TimeZone.setDefault(TimeZone.getTimeZone("UTC")); + for (String columnFormat : formats) { + try { + switch (columnFormat) { + case "date_optional_time": + case "strict_date_optional_time": + parsedDate = + DateUtils.parseDate( + columnOriginalDate, + FORMAT_DOT_OPENSEARCH_DASHBOARDS_SAMPLE_DATA_LOGS_EXCEPTION, + FORMAT_DOT_OPENSEARCH_DASHBOARDS_SAMPLE_DATA_FLIGHTS_EXCEPTION, + FORMAT_DOT_OPENSEARCH_DASHBOARDS_SAMPLE_DATA_FLIGHTS_EXCEPTION_NO_TIME, + FORMAT_DOT_OPENSEARCH_DASHBOARDS_SAMPLE_DATA_ECOMMERCE_EXCEPTION, + FORMAT_DOT_DATE_AND_TIME, + FORMAT_DOT_DATE); + break; + case "epoch_millis": + parsedDate = new Date(Long.parseLong(columnOriginalDate)); + break; + case "epoch_second": + parsedDate = new Date(Long.parseLong(columnOriginalDate) * 1000); + break; + default: + String formatString = DateFormat.getFormatString(columnFormat); + if (formatString == null) { + // Custom format; take as-is + formatString = columnFormat; } + parsedDate = DateUtils.parseDate(columnOriginalDate, formatString); } - - return formatMap; - } - - private List getFormatsFromProperties(String formatProperty) { - String[] formats = formatProperty.split(FORMAT_DELIMITER); - return Arrays.asList(formats); + } catch (ParseException | NumberFormatException e) { + LOG.warn( + String.format( + "Could not parse date string %s as %s", columnOriginalDate, columnFormat)); + } } + // Reset default timezone after parsing + TimeZone.setDefault(originalDefaultTimeZone); - private Date parseDateString(List formats, String columnOriginalDate) { - TimeZone originalDefaultTimeZone = TimeZone.getDefault(); - Date parsedDate = null; - - // Apache Commons DateUtils uses the default TimeZone for the JVM when parsing. - // However, since all dates on OpenSearch are stored as UTC, we need to - // parse these values using the UTC timezone. - TimeZone.setDefault(TimeZone.getTimeZone("UTC")); - for (String columnFormat : formats) { - try { - switch (columnFormat) { - case "date_optional_time": - case "strict_date_optional_time": - parsedDate = DateUtils.parseDate( - columnOriginalDate, - FORMAT_DOT_OPENSEARCH_DASHBOARDS_SAMPLE_DATA_LOGS_EXCEPTION, - FORMAT_DOT_OPENSEARCH_DASHBOARDS_SAMPLE_DATA_FLIGHTS_EXCEPTION, - FORMAT_DOT_OPENSEARCH_DASHBOARDS_SAMPLE_DATA_FLIGHTS_EXCEPTION_NO_TIME, - FORMAT_DOT_OPENSEARCH_DASHBOARDS_SAMPLE_DATA_ECOMMERCE_EXCEPTION, - FORMAT_DOT_DATE_AND_TIME, - FORMAT_DOT_DATE); - break; - case "epoch_millis": - parsedDate = new Date(Long.parseLong(columnOriginalDate)); - break; - case "epoch_second": - parsedDate = new Date(Long.parseLong(columnOriginalDate) * 1000); - break; - default: - String formatString = DateFormat.getFormatString(columnFormat); - if (formatString == null) { - // Custom format; take as-is - formatString = columnFormat; - } - parsedDate = DateUtils.parseDate(columnOriginalDate, formatString); - } - } catch (ParseException | NumberFormatException e) { - LOG.warn(String.format("Could not parse date string %s as %s", columnOriginalDate, columnFormat)); - } - } - // Reset default timezone after parsing - TimeZone.setDefault(originalDefaultTimeZone); - - return parsedDate; - } + return parsedDate; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DateFormat.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DateFormat.java index 40151c9413..fc9237918c 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DateFormat.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DateFormat.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.format; import java.time.Instant; @@ -15,112 +14,121 @@ public class DateFormat { - private static Map formatMap = new HashMap<>(); - - static { - // Special cases that are parsed separately - formatMap.put("date_optional_time", ""); - formatMap.put("strict_date_optional_time", ""); - formatMap.put("epoch_millis", ""); - formatMap.put("epoch_second", ""); - - formatMap.put("basic_date", Date.BASIC_DATE); - formatMap.put("basic_date_time", Date.BASIC_DATE + Time.T + Time.BASIC_TIME + Time.MILLIS + Time.TZ); - formatMap.put("basic_date_time_no_millis", Date.BASIC_DATE + Time.T + Time.BASIC_TIME + Time.TZ); - - formatMap.put("basic_ordinal_date", Date.BASIC_ORDINAL_DATE); - formatMap.put("basic_ordinal_date_time", - Date.BASIC_ORDINAL_DATE + Time.T + Time.BASIC_TIME + Time.MILLIS + Time.TZ); - formatMap.put("basic_ordinal_date_time_no_millis", Date.BASIC_ORDINAL_DATE+ Time.T + Time.BASIC_TIME + Time.TZ); - - formatMap.put("basic_time", Time.BASIC_TIME + Time.MILLIS + Time.TZ); - formatMap.put("basic_time_no_millis", Time.BASIC_TIME + Time.TZ); - - formatMap.put("basic_t_time", Time.T + Time.BASIC_TIME + Time.MILLIS + Time.TZ); - formatMap.put("basic_t_time_no_millis", Time.T + Time.BASIC_TIME + Time.TZ); - - formatMap.put("basic_week_date", Date.BASIC_WEEK_DATE); - formatMap.put("basic_week_date_time", Date.BASIC_WEEK_DATE + Time.T + Time.BASIC_TIME + Time.MILLIS + Time.TZ); - formatMap.put("basic_week_date_time_no_millis", Date.BASIC_WEEK_DATE + Time.T + Time.BASIC_TIME + Time.TZ); - - formatMap.put("date", Date.DATE); - formatMap.put("date_hour", Date.DATE + Time.T + Time.HOUR); - formatMap.put("date_hour_minute", Date.DATE + Time.T + Time.HOUR_MINUTE); - formatMap.put("date_hour_minute_second", Date.DATE + Time.T + Time.TIME); - formatMap.put("date_hour_minute_second_fraction", Date.DATE + Time.T + Time.TIME + Time.MILLIS); - formatMap.put("date_hour_minute_second_millis", Date.DATE + Time.T + Time.TIME + Time.MILLIS); - formatMap.put("date_time", Date.DATE + Time.T + Time.TIME + Time.MILLIS + Time.TZZ); - formatMap.put("date_time_no_millis", Date.DATE + Time.T + Time.TIME + Time.TZZ); - - formatMap.put("hour", Time.HOUR); - formatMap.put("hour_minute", Time.HOUR_MINUTE); - formatMap.put("hour_minute_second", Time.TIME); - formatMap.put("hour_minute_second_fraction", Time.TIME + Time.MILLIS); - formatMap.put("hour_minute_second_millis", Time.TIME + Time.MILLIS); - - formatMap.put("ordinal_date", Date.ORDINAL_DATE); - formatMap.put("ordinal_date_time", Date.ORDINAL_DATE + Time.T + Time.TIME + Time.MILLIS + Time.TZZ); - formatMap.put("ordinal_date_time_no_millis", Date.ORDINAL_DATE + Time.T + Time.TIME + Time.TZZ); - - formatMap.put("time", Time.TIME + Time.MILLIS + Time.TZZ); - formatMap.put("time_no_millis", Time.TIME + Time.TZZ); - - formatMap.put("t_time", Time.T + Time.TIME + Time.MILLIS + Time.TZZ); - formatMap.put("t_time_no_millis", Time.T + Time.TIME + Time.TZZ); - - formatMap.put("week_date", Date.WEEK_DATE); - formatMap.put("week_date_time", Date.WEEK_DATE + Time.T + Time.TIME + Time.MILLIS + Time.TZZ); - formatMap.put("week_date_time_no_millis", Date.WEEK_DATE + Time.T + Time.TIME + Time.TZZ); - - // Note: input mapping is "weekyear", but output value is "week_year" - formatMap.put("week_year", Date.WEEKYEAR); - formatMap.put("weekyear_week", Date.WEEKYEAR_WEEK); - formatMap.put("weekyear_week_day", Date.WEEK_DATE); - - formatMap.put("year", Date.YEAR); - formatMap.put("year_month", Date.YEAR_MONTH); - formatMap.put("year_month_day", Date.DATE); - } - - private DateFormat() { - } - - public static String getFormatString(String formatName) { - return formatMap.get(formatName); - } - - public static String getFormattedDate(java.util.Date date, String dateFormat) { - Instant instant = date.toInstant(); - ZonedDateTime zdt = ZonedDateTime.ofInstant(instant, ZoneId.of("Etc/UTC")); - return zdt.format(DateTimeFormatter.ofPattern(dateFormat)); - } - - private static class Date { - static String BASIC_DATE = "yyyyMMdd"; - static String BASIC_ORDINAL_DATE = "yyyyDDD"; - static String BASIC_WEEK_DATE = "YYYY'W'wwu"; - - static String DATE = "yyyy-MM-dd"; - static String ORDINAL_DATE = "yyyy-DDD"; - - static String YEAR = "yyyy"; - static String YEAR_MONTH = "yyyy-MM"; - - static String WEEK_DATE = "YYYY-'W'ww-u"; - static String WEEKYEAR = "YYYY"; - static String WEEKYEAR_WEEK = "YYYY-'W'ww"; - } - - private static class Time { - static String T = "'T'"; - static String BASIC_TIME = "HHmmss"; - static String TIME = "HH:mm:ss"; - - static String HOUR = "HH"; - static String HOUR_MINUTE = "HH:mm"; - - static String MILLIS = ".SSS"; - static String TZ = "Z"; - static String TZZ = "XX"; - } + private static Map formatMap = new HashMap<>(); + + static { + // Special cases that are parsed separately + formatMap.put("date_optional_time", ""); + formatMap.put("strict_date_optional_time", ""); + formatMap.put("epoch_millis", ""); + formatMap.put("epoch_second", ""); + + formatMap.put("basic_date", Date.BASIC_DATE); + formatMap.put( + "basic_date_time", Date.BASIC_DATE + Time.T + Time.BASIC_TIME + Time.MILLIS + Time.TZ); + formatMap.put( + "basic_date_time_no_millis", Date.BASIC_DATE + Time.T + Time.BASIC_TIME + Time.TZ); + + formatMap.put("basic_ordinal_date", Date.BASIC_ORDINAL_DATE); + formatMap.put( + "basic_ordinal_date_time", + Date.BASIC_ORDINAL_DATE + Time.T + Time.BASIC_TIME + Time.MILLIS + Time.TZ); + formatMap.put( + "basic_ordinal_date_time_no_millis", + Date.BASIC_ORDINAL_DATE + Time.T + Time.BASIC_TIME + Time.TZ); + + formatMap.put("basic_time", Time.BASIC_TIME + Time.MILLIS + Time.TZ); + formatMap.put("basic_time_no_millis", Time.BASIC_TIME + Time.TZ); + + formatMap.put("basic_t_time", Time.T + Time.BASIC_TIME + Time.MILLIS + Time.TZ); + formatMap.put("basic_t_time_no_millis", Time.T + Time.BASIC_TIME + Time.TZ); + + formatMap.put("basic_week_date", Date.BASIC_WEEK_DATE); + formatMap.put( + "basic_week_date_time", + Date.BASIC_WEEK_DATE + Time.T + Time.BASIC_TIME + Time.MILLIS + Time.TZ); + formatMap.put( + "basic_week_date_time_no_millis", + Date.BASIC_WEEK_DATE + Time.T + Time.BASIC_TIME + Time.TZ); + + formatMap.put("date", Date.DATE); + formatMap.put("date_hour", Date.DATE + Time.T + Time.HOUR); + formatMap.put("date_hour_minute", Date.DATE + Time.T + Time.HOUR_MINUTE); + formatMap.put("date_hour_minute_second", Date.DATE + Time.T + Time.TIME); + formatMap.put("date_hour_minute_second_fraction", Date.DATE + Time.T + Time.TIME + Time.MILLIS); + formatMap.put("date_hour_minute_second_millis", Date.DATE + Time.T + Time.TIME + Time.MILLIS); + formatMap.put("date_time", Date.DATE + Time.T + Time.TIME + Time.MILLIS + Time.TZZ); + formatMap.put("date_time_no_millis", Date.DATE + Time.T + Time.TIME + Time.TZZ); + + formatMap.put("hour", Time.HOUR); + formatMap.put("hour_minute", Time.HOUR_MINUTE); + formatMap.put("hour_minute_second", Time.TIME); + formatMap.put("hour_minute_second_fraction", Time.TIME + Time.MILLIS); + formatMap.put("hour_minute_second_millis", Time.TIME + Time.MILLIS); + + formatMap.put("ordinal_date", Date.ORDINAL_DATE); + formatMap.put( + "ordinal_date_time", Date.ORDINAL_DATE + Time.T + Time.TIME + Time.MILLIS + Time.TZZ); + formatMap.put("ordinal_date_time_no_millis", Date.ORDINAL_DATE + Time.T + Time.TIME + Time.TZZ); + + formatMap.put("time", Time.TIME + Time.MILLIS + Time.TZZ); + formatMap.put("time_no_millis", Time.TIME + Time.TZZ); + + formatMap.put("t_time", Time.T + Time.TIME + Time.MILLIS + Time.TZZ); + formatMap.put("t_time_no_millis", Time.T + Time.TIME + Time.TZZ); + + formatMap.put("week_date", Date.WEEK_DATE); + formatMap.put("week_date_time", Date.WEEK_DATE + Time.T + Time.TIME + Time.MILLIS + Time.TZZ); + formatMap.put("week_date_time_no_millis", Date.WEEK_DATE + Time.T + Time.TIME + Time.TZZ); + + // Note: input mapping is "weekyear", but output value is "week_year" + formatMap.put("week_year", Date.WEEKYEAR); + formatMap.put("weekyear_week", Date.WEEKYEAR_WEEK); + formatMap.put("weekyear_week_day", Date.WEEK_DATE); + + formatMap.put("year", Date.YEAR); + formatMap.put("year_month", Date.YEAR_MONTH); + formatMap.put("year_month_day", Date.DATE); + } + + private DateFormat() {} + + public static String getFormatString(String formatName) { + return formatMap.get(formatName); + } + + public static String getFormattedDate(java.util.Date date, String dateFormat) { + Instant instant = date.toInstant(); + ZonedDateTime zdt = ZonedDateTime.ofInstant(instant, ZoneId.of("Etc/UTC")); + return zdt.format(DateTimeFormatter.ofPattern(dateFormat)); + } + + private static class Date { + static String BASIC_DATE = "yyyyMMdd"; + static String BASIC_ORDINAL_DATE = "yyyyDDD"; + static String BASIC_WEEK_DATE = "YYYY'W'wwu"; + + static String DATE = "yyyy-MM-dd"; + static String ORDINAL_DATE = "yyyy-DDD"; + + static String YEAR = "yyyy"; + static String YEAR_MONTH = "yyyy-MM"; + + static String WEEK_DATE = "YYYY-'W'ww-u"; + static String WEEKYEAR = "YYYY"; + static String WEEKYEAR_WEEK = "YYYY-'W'ww"; + } + + private static class Time { + static String T = "'T'"; + static String BASIC_TIME = "HHmmss"; + static String TIME = "HH:mm:ss"; + + static String HOUR = "HH"; + static String HOUR_MINUTE = "HH:mm"; + + static String MILLIS = ".SSS"; + static String TZ = "Z"; + static String TZZ = "XX"; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DeleteResultSet.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DeleteResultSet.java index ccecacc432..24afb0a7af 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DeleteResultSet.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DeleteResultSet.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.format; import java.util.Collections; @@ -14,28 +13,28 @@ import org.opensearch.sql.legacy.domain.Delete; public class DeleteResultSet extends ResultSet { - private Delete query; - private Object queryResult; - - public static final String DELETED = "deleted_rows"; - - public DeleteResultSet(Client client, Delete query, Object queryResult) { - this.client = client; - this.query = query; - this.queryResult = queryResult; - this.schema = new Schema(loadColumns()); - this.dataRows = new DataRows(loadRows()); - } - - private List loadColumns() { - return Collections.singletonList(new Schema.Column(DELETED, null, Schema.Type.LONG)); - } - - private List loadRows() { - return Collections.singletonList(new DataRows.Row(loadDeletedData())); - } - - private Map loadDeletedData(){ - return Collections.singletonMap(DELETED, ((BulkByScrollResponse) queryResult).getDeleted()); - } + private Delete query; + private Object queryResult; + + public static final String DELETED = "deleted_rows"; + + public DeleteResultSet(Client client, Delete query, Object queryResult) { + this.client = client; + this.query = query; + this.queryResult = queryResult; + this.schema = new Schema(loadColumns()); + this.dataRows = new DataRows(loadRows()); + } + + private List loadColumns() { + return Collections.singletonList(new Schema.Column(DELETED, null, Schema.Type.LONG)); + } + + private List loadRows() { + return Collections.singletonList(new DataRows.Row(loadDeletedData())); + } + + private Map loadDeletedData() { + return Collections.singletonMap(DELETED, ((BulkByScrollResponse) queryResult).getDeleted()); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DescribeResultSet.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DescribeResultSet.java index 0cccf73268..eba6db2453 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DescribeResultSet.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DescribeResultSet.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.format; import java.util.ArrayList; @@ -21,145 +20,142 @@ public class DescribeResultSet extends ResultSet { - private static final int DEFAULT_NUM_PREC_RADIX = 10; - private static final String IS_AUTOINCREMENT = "NO"; - - /** - * You are not required to set the field type to object explicitly, as this is the default value. - * https://www.elastic.co/guide/en/elasticsearch/reference/current/object.html - */ - public static final String DEFAULT_OBJECT_DATATYPE = "object"; - - private IndexStatement statement; - private Object queryResult; - - public DescribeResultSet(Client client, IndexStatement statement, Object queryResult) { - this.client = client; - this.clusterName = getClusterName(); - this.statement = statement; - this.queryResult = queryResult; - - this.schema = new Schema(statement, loadColumns()); - this.dataRows = new DataRows(loadRows()); + private static final int DEFAULT_NUM_PREC_RADIX = 10; + private static final String IS_AUTOINCREMENT = "NO"; + + /** + * You are not required to set the field type to object explicitly, as this is the default value. + * https://www.elastic.co/guide/en/elasticsearch/reference/current/object.html + */ + public static final String DEFAULT_OBJECT_DATATYPE = "object"; + + private IndexStatement statement; + private Object queryResult; + + public DescribeResultSet(Client client, IndexStatement statement, Object queryResult) { + this.client = client; + this.clusterName = getClusterName(); + this.statement = statement; + this.queryResult = queryResult; + + this.schema = new Schema(statement, loadColumns()); + this.dataRows = new DataRows(loadRows()); + } + + private List loadColumns() { + List columns = new ArrayList<>(); + // Unused Columns are still included in Schema to match JDBC/ODBC standard + columns.add(new Column("TABLE_CAT", null, Type.KEYWORD)); + columns.add(new Column("TABLE_SCHEM", null, Type.KEYWORD)); + columns.add(new Column("TABLE_NAME", null, Type.KEYWORD)); + columns.add(new Column("COLUMN_NAME", null, Type.KEYWORD)); + columns.add(new Column("DATA_TYPE", null, Type.INTEGER)); + columns.add(new Column("TYPE_NAME", null, Type.KEYWORD)); + columns.add(new Column("COLUMN_SIZE", null, Type.INTEGER)); + columns.add(new Column("BUFFER_LENGTH", null, Type.INTEGER)); // Not used + columns.add(new Column("DECIMAL_DIGITS", null, Type.INTEGER)); + columns.add(new Column("NUM_PREC_RADIX", null, Type.INTEGER)); + columns.add(new Column("NULLABLE", null, Type.INTEGER)); + columns.add(new Column("REMARKS", null, Type.KEYWORD)); + columns.add(new Column("COLUMN_DEF", null, Type.KEYWORD)); + columns.add(new Column("SQL_DATA_TYPE", null, Type.INTEGER)); // Not used + columns.add(new Column("SQL_DATETIME_SUB", null, Type.INTEGER)); // Not used + columns.add(new Column("CHAR_OCTET_LENGTH", null, Type.INTEGER)); + columns.add(new Column("ORDINAL_POSITION", null, Type.INTEGER)); + columns.add(new Column("IS_NULLABLE", null, Type.KEYWORD)); + columns.add(new Column("SCOPE_CATALOG", null, Type.KEYWORD)); // Not used + columns.add(new Column("SCOPE_SCHEMA", null, Type.KEYWORD)); // Not used + columns.add(new Column("SCOPE_TABLE", null, Type.KEYWORD)); // Not used + columns.add(new Column("SOURCE_DATA_TYPE", null, Type.SHORT)); // Not used + columns.add(new Column("IS_AUTOINCREMENT", null, Type.KEYWORD)); + columns.add(new Column("IS_GENERATEDCOLUMN", null, Type.KEYWORD)); + + return columns; + } + + private List loadRows() { + List rows = new ArrayList<>(); + GetIndexResponse indexResponse = (GetIndexResponse) queryResult; + Map indexMappings = indexResponse.getMappings(); + + // Iterate through indices in indexMappings + for (Entry indexCursor : indexMappings.entrySet()) { + String index = indexCursor.getKey(); + + if (matchesPatternIfRegex(index, statement.getIndexPattern())) { + rows.addAll(loadIndexData(index, indexCursor.getValue().getSourceAsMap())); + } } - - private List loadColumns() { - List columns = new ArrayList<>(); - // Unused Columns are still included in Schema to match JDBC/ODBC standard - columns.add(new Column("TABLE_CAT", null, Type.KEYWORD)); - columns.add(new Column("TABLE_SCHEM", null, Type.KEYWORD)); - columns.add(new Column("TABLE_NAME", null, Type.KEYWORD)); - columns.add(new Column("COLUMN_NAME", null, Type.KEYWORD)); - columns.add(new Column("DATA_TYPE", null, Type.INTEGER)); - columns.add(new Column("TYPE_NAME", null, Type.KEYWORD)); - columns.add(new Column("COLUMN_SIZE", null, Type.INTEGER)); - columns.add(new Column("BUFFER_LENGTH", null, Type.INTEGER)); // Not used - columns.add(new Column("DECIMAL_DIGITS", null, Type.INTEGER)); - columns.add(new Column("NUM_PREC_RADIX", null, Type.INTEGER)); - columns.add(new Column("NULLABLE", null, Type.INTEGER)); - columns.add(new Column("REMARKS", null, Type.KEYWORD)); - columns.add(new Column("COLUMN_DEF", null, Type.KEYWORD)); - columns.add(new Column("SQL_DATA_TYPE", null, Type.INTEGER)); // Not used - columns.add(new Column("SQL_DATETIME_SUB", null, Type.INTEGER)); // Not used - columns.add(new Column("CHAR_OCTET_LENGTH", null, Type.INTEGER)); - columns.add(new Column("ORDINAL_POSITION", null, Type.INTEGER)); - columns.add(new Column("IS_NULLABLE", null, Type.KEYWORD)); - columns.add(new Column("SCOPE_CATALOG", null, Type.KEYWORD)); // Not used - columns.add(new Column("SCOPE_SCHEMA", null, Type.KEYWORD)); // Not used - columns.add(new Column("SCOPE_TABLE", null, Type.KEYWORD)); // Not used - columns.add(new Column("SOURCE_DATA_TYPE", null, Type.SHORT)); // Not used - columns.add(new Column("IS_AUTOINCREMENT", null, Type.KEYWORD)); - columns.add(new Column("IS_GENERATEDCOLUMN", null, Type.KEYWORD)); - - return columns; + return rows; + } + + @SuppressWarnings("unchecked") + private List loadIndexData(String index, Map mappingMetadata) { + List rows = new ArrayList<>(); + + Map flattenedMetaData = + flattenMappingMetaData(mappingMetadata, "", new HashMap<>()); + int position = 1; // Used as an arbitrary ORDINAL_POSITION value for the time being + for (Entry entry : flattenedMetaData.entrySet()) { + String columnPattern = statement.getColumnPattern(); + + // Check to see if column name matches pattern, if given + if (columnPattern == null || matchesPattern(entry.getKey(), columnPattern)) { + rows.add(new Row(loadRowData(index, entry.getKey(), entry.getValue(), position))); + position++; + } } - private List loadRows() { - List rows = new ArrayList<>(); - GetIndexResponse indexResponse = (GetIndexResponse) queryResult; - Map indexMappings = indexResponse.getMappings(); - - // Iterate through indices in indexMappings - for (Entry indexCursor : indexMappings.entrySet()) { - String index = indexCursor.getKey(); - - if (matchesPatternIfRegex(index, statement.getIndexPattern())) { - rows.addAll(loadIndexData(index, indexCursor.getValue().getSourceAsMap())); - } - } - return rows; - } - - @SuppressWarnings("unchecked") - private List loadIndexData(String index, Map mappingMetadata) { - List rows = new ArrayList<>(); - - Map flattenedMetaData = flattenMappingMetaData(mappingMetadata, "", new HashMap<>()); - int position = 1; // Used as an arbitrary ORDINAL_POSITION value for the time being - for (Entry entry : flattenedMetaData.entrySet()) { - String columnPattern = statement.getColumnPattern(); - - // Check to see if column name matches pattern, if given - if (columnPattern == null || matchesPattern(entry.getKey(), columnPattern)) { - rows.add( - new Row( - loadRowData(index, entry.getKey(), entry.getValue(), position) - ) - ); - position++; - } - } - - return rows; + return rows; + } + + private Map loadRowData(String index, String column, String type, int position) { + Map data = new HashMap<>(); + data.put("TABLE_CAT", clusterName); + data.put("TABLE_NAME", index); + data.put("COLUMN_NAME", column); + data.put("TYPE_NAME", type); + data.put("NUM_PREC_RADIX", DEFAULT_NUM_PREC_RADIX); + data.put("NULLABLE", 2); // TODO Defaulting to 2, need to find a way to check this + data.put("ORDINAL_POSITION", position); // There is no deterministic position of column in table + data.put("IS_NULLABLE", ""); // TODO Defaulting to unknown, need to check this + data.put("IS_AUTOINCREMENT", IS_AUTOINCREMENT); // Defaulting to "NO" + data.put("IS_GENERATEDCOLUMN", ""); // TODO Defaulting to unknown, need to check + + return data; + } + + /** + * To not disrupt old logic, for the time being, ShowQueryAction and DescribeQueryAction are using + * the same 'GetIndexRequestBuilder' that was used in the old ShowQueryAction. Since the format of + * the resulting meta data is different, this method is being used to flatten and retrieve types. + * + *

In the future, should look for a way to generalize this since Schema is currently using + * FieldMappingMetaData whereas here we are using MappingMetaData. + */ + @SuppressWarnings("unchecked") + private Map flattenMappingMetaData( + Map mappingMetaData, String currPath, Map flattenedMapping) { + Map properties = (Map) mappingMetaData.get("properties"); + for (Entry entry : properties.entrySet()) { + Map metaData = (Map) entry.getValue(); + + String fullPath = addToPath(currPath, entry.getKey()); + flattenedMapping.put( + fullPath, (String) metaData.getOrDefault("type", DEFAULT_OBJECT_DATATYPE)); + if (metaData.containsKey("properties")) { + flattenedMapping = flattenMappingMetaData(metaData, fullPath, flattenedMapping); + } } - private Map loadRowData(String index, String column, String type, int position) { - Map data = new HashMap<>(); - data.put("TABLE_CAT", clusterName); - data.put("TABLE_NAME", index); - data.put("COLUMN_NAME", column); - data.put("TYPE_NAME", type); - data.put("NUM_PREC_RADIX", DEFAULT_NUM_PREC_RADIX); - data.put("NULLABLE", 2); // TODO Defaulting to 2, need to find a way to check this - data.put("ORDINAL_POSITION", position); // There is no deterministic position of column in table - data.put("IS_NULLABLE", ""); // TODO Defaulting to unknown, need to check this - data.put("IS_AUTOINCREMENT", IS_AUTOINCREMENT); // Defaulting to "NO" - data.put("IS_GENERATEDCOLUMN", ""); // TODO Defaulting to unknown, need to check - - return data; - } + return flattenedMapping; + } - /** - * To not disrupt old logic, for the time being, ShowQueryAction and DescribeQueryAction are using the same - * 'GetIndexRequestBuilder' that was used in the old ShowQueryAction. Since the format of the resulting meta data - * is different, this method is being used to flatten and retrieve types. - *

- * In the future, should look for a way to generalize this since Schema is currently using FieldMappingMetaData - * whereas here we are using MappingMetaData. - */ - @SuppressWarnings("unchecked") - private Map flattenMappingMetaData(Map mappingMetaData, - String currPath, - Map flattenedMapping) { - Map properties = (Map) mappingMetaData.get("properties"); - for (Entry entry : properties.entrySet()) { - Map metaData = (Map) entry.getValue(); - - String fullPath = addToPath(currPath, entry.getKey()); - flattenedMapping.put(fullPath, (String) metaData.getOrDefault("type", DEFAULT_OBJECT_DATATYPE)); - if (metaData.containsKey("properties")) { - flattenedMapping = flattenMappingMetaData(metaData, fullPath, flattenedMapping); - } - } - - return flattenedMapping; + private String addToPath(String currPath, String field) { + if (currPath.isEmpty()) { + return field; } - private String addToPath(String currPath, String field) { - if (currPath.isEmpty()) { - return field; - } - - return currPath + "." + field; - } + return currPath + "." + field; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/ElasticJoinExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/ElasticJoinExecutor.java index e8536567dd..f0ffafc470 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/ElasticJoinExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/ElasticJoinExecutor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.join; import java.io.IOException; @@ -41,219 +40,239 @@ import org.opensearch.sql.legacy.query.join.TableInJoinRequestBuilder; import org.opensearch.sql.legacy.query.planner.HashJoinQueryPlanRequestBuilder; -/** - * Created by Eliran on 15/9/2015. - */ +/** Created by Eliran on 15/9/2015. */ public abstract class ElasticJoinExecutor implements ElasticHitsExecutor { - private static final Logger LOG = LogManager.getLogger(); - - protected List results; // Keep list to avoid copy to new array in SearchHits - protected MetaSearchResult metaResults; - protected final int MAX_RESULTS_ON_ONE_FETCH = 10000; - private Set aliasesOnReturn; - private boolean allFieldsReturn; - - protected ElasticJoinExecutor(JoinRequestBuilder requestBuilder) { - metaResults = new MetaSearchResult(); - aliasesOnReturn = new HashSet<>(); - List firstTableReturnedField = requestBuilder.getFirstTable().getReturnedFields(); - List secondTableReturnedField = requestBuilder.getSecondTable().getReturnedFields(); - allFieldsReturn = (firstTableReturnedField == null || firstTableReturnedField.size() == 0) - && (secondTableReturnedField == null || secondTableReturnedField.size() == 0); - } - - public void sendResponse(RestChannel channel) throws IOException { - XContentBuilder builder = null; - long len; - try { - builder = ElasticUtils.hitsAsStringResultZeroCopy(results, metaResults, this); - BytesRestResponse bytesRestResponse = new BytesRestResponse(RestStatus.OK, builder); - len = bytesRestResponse.content().length(); - channel.sendResponse(bytesRestResponse); - } catch (IOException e) { - try { - if (builder != null) { - builder.close(); - } - } catch (Exception ex) { - // Ignore. Already logged in channel - } - throw e; + private static final Logger LOG = LogManager.getLogger(); + + protected List results; // Keep list to avoid copy to new array in SearchHits + protected MetaSearchResult metaResults; + protected final int MAX_RESULTS_ON_ONE_FETCH = 10000; + private Set aliasesOnReturn; + private boolean allFieldsReturn; + + protected ElasticJoinExecutor(JoinRequestBuilder requestBuilder) { + metaResults = new MetaSearchResult(); + aliasesOnReturn = new HashSet<>(); + List firstTableReturnedField = requestBuilder.getFirstTable().getReturnedFields(); + List secondTableReturnedField = requestBuilder.getSecondTable().getReturnedFields(); + allFieldsReturn = + (firstTableReturnedField == null || firstTableReturnedField.size() == 0) + && (secondTableReturnedField == null || secondTableReturnedField.size() == 0); + } + + public void sendResponse(RestChannel channel) throws IOException { + XContentBuilder builder = null; + long len; + try { + builder = ElasticUtils.hitsAsStringResultZeroCopy(results, metaResults, this); + BytesRestResponse bytesRestResponse = new BytesRestResponse(RestStatus.OK, builder); + len = bytesRestResponse.content().length(); + channel.sendResponse(bytesRestResponse); + } catch (IOException e) { + try { + if (builder != null) { + builder.close(); } - LOG.debug("[MCB] Successfully send response with size of {}. Thread id = {}", len, - Thread.currentThread().getId()); - } - - public void run() throws IOException, SqlParseException { - long timeBefore = System.currentTimeMillis(); - results = innerRun(); - long joinTimeInMilli = System.currentTimeMillis() - timeBefore; - this.metaResults.setTookImMilli(joinTimeInMilli); + } catch (Exception ex) { + // Ignore. Already logged in channel + } + throw e; } - - - protected abstract List innerRun() throws IOException, SqlParseException; - - public SearchHits getHits() { - return new SearchHits(results.toArray(new SearchHit[results.size()]), new TotalHits(results.size(), - Relation.EQUAL_TO), 1.0f); + LOG.debug( + "[MCB] Successfully send response with size of {}. Thread id = {}", + len, + Thread.currentThread().getId()); + } + + public void run() throws IOException, SqlParseException { + long timeBefore = System.currentTimeMillis(); + results = innerRun(); + long joinTimeInMilli = System.currentTimeMillis() - timeBefore; + this.metaResults.setTookImMilli(joinTimeInMilli); + } + + protected abstract List innerRun() throws IOException, SqlParseException; + + public SearchHits getHits() { + return new SearchHits( + results.toArray(new SearchHit[results.size()]), + new TotalHits(results.size(), Relation.EQUAL_TO), + 1.0f); + } + + public static ElasticJoinExecutor createJoinExecutor( + Client client, SqlElasticRequestBuilder requestBuilder) { + if (requestBuilder instanceof HashJoinQueryPlanRequestBuilder) { + return new QueryPlanElasticExecutor((HashJoinQueryPlanRequestBuilder) requestBuilder); + } else if (requestBuilder instanceof HashJoinElasticRequestBuilder) { + HashJoinElasticRequestBuilder hashJoin = (HashJoinElasticRequestBuilder) requestBuilder; + return new HashJoinElasticExecutor(client, hashJoin); + } else if (requestBuilder instanceof NestedLoopsElasticRequestBuilder) { + NestedLoopsElasticRequestBuilder nestedLoops = + (NestedLoopsElasticRequestBuilder) requestBuilder; + return new NestedLoopsElasticExecutor(client, nestedLoops); + } else { + throw new RuntimeException("Unsuported requestBuilder of type: " + requestBuilder.getClass()); } - - public static ElasticJoinExecutor createJoinExecutor(Client client, SqlElasticRequestBuilder requestBuilder) { - if (requestBuilder instanceof HashJoinQueryPlanRequestBuilder) { - return new QueryPlanElasticExecutor((HashJoinQueryPlanRequestBuilder) requestBuilder); - } else if (requestBuilder instanceof HashJoinElasticRequestBuilder) { - HashJoinElasticRequestBuilder hashJoin = (HashJoinElasticRequestBuilder) requestBuilder; - return new HashJoinElasticExecutor(client, hashJoin); - } else if (requestBuilder instanceof NestedLoopsElasticRequestBuilder) { - NestedLoopsElasticRequestBuilder nestedLoops = (NestedLoopsElasticRequestBuilder) requestBuilder; - return new NestedLoopsElasticExecutor(client, nestedLoops); - } else { - throw new RuntimeException("Unsuported requestBuilder of type: " + requestBuilder.getClass()); - } + } + + protected void mergeSourceAndAddAliases( + Map secondTableHitSource, + SearchHit searchHit, + String t1Alias, + String t2Alias) { + Map results = mapWithAliases(searchHit.getSourceAsMap(), t1Alias); + results.putAll(mapWithAliases(secondTableHitSource, t2Alias)); + searchHit.getSourceAsMap().clear(); + searchHit.getSourceAsMap().putAll(results); + } + + protected Map mapWithAliases(Map source, String alias) { + Map mapWithAliases = new HashMap<>(); + for (Map.Entry fieldNameToValue : source.entrySet()) { + if (!aliasesOnReturn.contains(fieldNameToValue.getKey())) { + mapWithAliases.put(alias + "." + fieldNameToValue.getKey(), fieldNameToValue.getValue()); + } else { + mapWithAliases.put(fieldNameToValue.getKey(), fieldNameToValue.getValue()); + } } - - protected void mergeSourceAndAddAliases(Map secondTableHitSource, SearchHit searchHit, - String t1Alias, String t2Alias) { - Map results = mapWithAliases(searchHit.getSourceAsMap(), t1Alias); - results.putAll(mapWithAliases(secondTableHitSource, t2Alias)); - searchHit.getSourceAsMap().clear(); - searchHit.getSourceAsMap().putAll(results); + return mapWithAliases; + } + + protected void onlyReturnedFields( + Map fieldsMap, List required, boolean allRequired) { + HashMap filteredMap = new HashMap<>(); + if (allFieldsReturn || allRequired) { + filteredMap.putAll(fieldsMap); + return; } - - protected Map mapWithAliases(Map source, String alias) { - Map mapWithAliases = new HashMap<>(); - for (Map.Entry fieldNameToValue : source.entrySet()) { - if (!aliasesOnReturn.contains(fieldNameToValue.getKey())) { - mapWithAliases.put(alias + "." + fieldNameToValue.getKey(), fieldNameToValue.getValue()); - } else { - mapWithAliases.put(fieldNameToValue.getKey(), fieldNameToValue.getValue()); - } - } - return mapWithAliases; + for (Field field : required) { + String name = field.getName(); + String returnName = name; + String alias = field.getAlias(); + if (alias != null && alias != "") { + returnName = alias; + aliasesOnReturn.add(alias); + } + filteredMap.put(returnName, deepSearchInMap(fieldsMap, name)); } - - protected void onlyReturnedFields(Map fieldsMap, List required, boolean allRequired) { - HashMap filteredMap = new HashMap<>(); - if (allFieldsReturn || allRequired) { - filteredMap.putAll(fieldsMap); - return; + fieldsMap.clear(); + fieldsMap.putAll(filteredMap); + } + + protected Object deepSearchInMap(Map fieldsMap, String name) { + if (name.contains(".")) { + String[] path = name.split("\\."); + Map currentObject = fieldsMap; + for (int i = 0; i < path.length - 1; i++) { + Object valueFromCurrentMap = currentObject.get(path[i]); + if (valueFromCurrentMap == null) { + return null; } - for (Field field : required) { - String name = field.getName(); - String returnName = name; - String alias = field.getAlias(); - if (alias != null && alias != "") { - returnName = alias; - aliasesOnReturn.add(alias); - } - filteredMap.put(returnName, deepSearchInMap(fieldsMap, name)); + if (!Map.class.isAssignableFrom(valueFromCurrentMap.getClass())) { + return null; } - fieldsMap.clear(); - fieldsMap.putAll(filteredMap); - + currentObject = (Map) valueFromCurrentMap; + } + return currentObject.get(path[path.length - 1]); } - protected Object deepSearchInMap(Map fieldsMap, String name) { - if (name.contains(".")) { - String[] path = name.split("\\."); - Map currentObject = fieldsMap; - for (int i = 0; i < path.length - 1; i++) { - Object valueFromCurrentMap = currentObject.get(path[i]); - if (valueFromCurrentMap == null) { - return null; - } - if (!Map.class.isAssignableFrom(valueFromCurrentMap.getClass())) { - return null; - } - currentObject = (Map) valueFromCurrentMap; - } - return currentObject.get(path[path.length - 1]); + return fieldsMap.get(name); + } + + protected void addUnmatchedResults( + List combinedResults, + Collection firstTableSearchHits, + List secondTableReturnedFields, + int currentNumOfIds, + int totalLimit, + String t1Alias, + String t2Alias) { + boolean limitReached = false; + for (SearchHitsResult hitsResult : firstTableSearchHits) { + if (!hitsResult.isMatchedWithOtherTable()) { + for (SearchHit hit : hitsResult.getSearchHits()) { + + // todo: decide which id to put or type. or maby its ok this way. just need to doc. + SearchHit unmachedResult = + createUnmachedResult(secondTableReturnedFields, hit.docId(), t1Alias, t2Alias, hit); + combinedResults.add(unmachedResult); + currentNumOfIds++; + if (currentNumOfIds >= totalLimit) { + limitReached = true; + break; + } } - - return fieldsMap.get(name); + } + if (limitReached) { + break; + } } - - - protected void addUnmatchedResults(List combinedResults, - Collection firstTableSearchHits, - List secondTableReturnedFields, int currentNumOfIds, int totalLimit, - String t1Alias, String t2Alias) { - boolean limitReached = false; - for (SearchHitsResult hitsResult : firstTableSearchHits) { - if (!hitsResult.isMatchedWithOtherTable()) { - for (SearchHit hit : hitsResult.getSearchHits()) { - - //todo: decide which id to put or type. or maby its ok this way. just need to doc. - SearchHit unmachedResult = createUnmachedResult(secondTableReturnedFields, hit.docId(), - t1Alias, t2Alias, hit); - combinedResults.add(unmachedResult); - currentNumOfIds++; - if (currentNumOfIds >= totalLimit) { - limitReached = true; - break; - } - - } - } - if (limitReached) { - break; - } - } + } + + protected SearchHit createUnmachedResult( + List secondTableReturnedFields, + int docId, + String t1Alias, + String t2Alias, + SearchHit hit) { + String unmatchedId = hit.getId() + "|0"; + + Map documentFields = new HashMap<>(); + Map metaFields = new HashMap<>(); + hit.getFields() + .forEach( + (fieldName, docField) -> + (MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) + ? metaFields + : documentFields) + .put(fieldName, docField)); + SearchHit searchHit = new SearchHit(docId, unmatchedId, documentFields, metaFields); + + searchHit.sourceRef(hit.getSourceRef()); + searchHit.getSourceAsMap().clear(); + searchHit.getSourceAsMap().putAll(hit.getSourceAsMap()); + Map emptySecondTableHitSource = createNullsSource(secondTableReturnedFields); + + mergeSourceAndAddAliases(emptySecondTableHitSource, searchHit, t1Alias, t2Alias); + + return searchHit; + } + + protected Map createNullsSource(List secondTableReturnedFields) { + Map nulledSource = new HashMap<>(); + for (Field field : secondTableReturnedFields) { + if (!field.getName().equals("*")) { + nulledSource.put(field.getName(), null); + } } - - protected SearchHit createUnmachedResult(List secondTableReturnedFields, int docId, String t1Alias, - String t2Alias, SearchHit hit) { - String unmatchedId = hit.getId() + "|0"; - - Map documentFields = new HashMap<>(); - Map metaFields = new HashMap<>(); - hit.getFields().forEach((fieldName, docField) -> - (MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) ? metaFields : documentFields).put(fieldName, docField)); - SearchHit searchHit = new SearchHit(docId, unmatchedId, documentFields, metaFields); - - searchHit.sourceRef(hit.getSourceRef()); - searchHit.getSourceAsMap().clear(); - searchHit.getSourceAsMap().putAll(hit.getSourceAsMap()); - Map emptySecondTableHitSource = createNullsSource(secondTableReturnedFields); - - mergeSourceAndAddAliases(emptySecondTableHitSource, searchHit, t1Alias, t2Alias); - - return searchHit; + return nulledSource; + } + + protected void updateMetaSearchResults(SearchResponse searchResponse) { + this.metaResults.addSuccessfulShards(searchResponse.getSuccessfulShards()); + this.metaResults.addFailedShards(searchResponse.getFailedShards()); + this.metaResults.addTotalNumOfShards(searchResponse.getTotalShards()); + this.metaResults.updateTimeOut(searchResponse.isTimedOut()); + } + + protected SearchResponse scrollOneTimeWithMax( + Client client, TableInJoinRequestBuilder tableRequest) { + SearchRequestBuilder scrollRequest = + tableRequest + .getRequestBuilder() + .setScroll(new TimeValue(60000)) + .setSize(MAX_RESULTS_ON_ONE_FETCH); + boolean ordered = tableRequest.getOriginalSelect().isOrderdSelect(); + if (!ordered) { + scrollRequest.addSort(FieldSortBuilder.DOC_FIELD_NAME, SortOrder.ASC); } - - protected Map createNullsSource(List secondTableReturnedFields) { - Map nulledSource = new HashMap<>(); - for (Field field : secondTableReturnedFields) { - if (!field.getName().equals("*")) { - nulledSource.put(field.getName(), null); - } - } - return nulledSource; - } - - protected void updateMetaSearchResults(SearchResponse searchResponse) { - this.metaResults.addSuccessfulShards(searchResponse.getSuccessfulShards()); - this.metaResults.addFailedShards(searchResponse.getFailedShards()); - this.metaResults.addTotalNumOfShards(searchResponse.getTotalShards()); - this.metaResults.updateTimeOut(searchResponse.isTimedOut()); - } - - protected SearchResponse scrollOneTimeWithMax(Client client, TableInJoinRequestBuilder tableRequest) { - SearchRequestBuilder scrollRequest = tableRequest.getRequestBuilder() - .setScroll(new TimeValue(60000)).setSize(MAX_RESULTS_ON_ONE_FETCH); - boolean ordered = tableRequest.getOriginalSelect().isOrderdSelect(); - if (!ordered) { - scrollRequest.addSort(FieldSortBuilder.DOC_FIELD_NAME, SortOrder.ASC); - } - SearchResponse responseWithHits = scrollRequest.get(); - //on ordered select - not using SCAN , elastic returns hits on first scroll - //es5.0 elastic always return docs on scan - // if(!ordered) - // responseWithHits = client.prepareSearchScroll(responseWithHits.getScrollId()) - // .setScroll(new TimeValue(600000)).get(); - return responseWithHits; - } - - + SearchResponse responseWithHits = scrollRequest.get(); + // on ordered select - not using SCAN , elastic returns hits on first scroll + // es5.0 elastic always return docs on scan + // if(!ordered) + // responseWithHits = client.prepareSearchScroll(responseWithHits.getScrollId()) + // .setScroll(new TimeValue(600000)).get(); + return responseWithHits; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/ComperableHitResult.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/ComperableHitResult.java index 766ecd3692..fa3514600b 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/ComperableHitResult.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/ComperableHitResult.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.multi; import com.google.common.base.Joiner; @@ -14,72 +13,70 @@ import org.opensearch.search.SearchHit; import org.opensearch.sql.legacy.utils.Util; -/** - * Created by Eliran on 9/9/2016. - */ +/** Created by Eliran on 9/9/2016. */ public class ComperableHitResult { - private SearchHit hit; - private String comperator; - private boolean isAllNull; - private Map flattenMap; + private SearchHit hit; + private String comperator; + private boolean isAllNull; + private Map flattenMap; - public ComperableHitResult(SearchHit hit, String[] fieldsOrder, String seperator) { - this.hit = hit; - Map hitAsMap = hit.getSourceAsMap(); - this.flattenMap = new HashMap<>(); - List results = new ArrayList<>(); - this.isAllNull = true; + public ComperableHitResult(SearchHit hit, String[] fieldsOrder, String seperator) { + this.hit = hit; + Map hitAsMap = hit.getSourceAsMap(); + this.flattenMap = new HashMap<>(); + List results = new ArrayList<>(); + this.isAllNull = true; - for (int i = 0; i < fieldsOrder.length; i++) { - String field = fieldsOrder[i]; - Object result = Util.deepSearchInMap(hitAsMap, field); - if (result == null) { - results.add(""); - } else { - this.isAllNull = false; - results.add(result.toString()); - this.flattenMap.put(field, result); - } - } - this.comperator = Joiner.on(seperator).join(results); + for (int i = 0; i < fieldsOrder.length; i++) { + String field = fieldsOrder[i]; + Object result = Util.deepSearchInMap(hitAsMap, field); + if (result == null) { + results.add(""); + } else { + this.isAllNull = false; + results.add(result.toString()); + this.flattenMap.put(field, result); + } } + this.comperator = Joiner.on(seperator).join(results); + } - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - ComperableHitResult that = (ComperableHitResult) o; + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } - if (!comperator.equals(that.comperator)) { - return false; - } + ComperableHitResult that = (ComperableHitResult) o; - return true; + if (!comperator.equals(that.comperator)) { + return false; } - public boolean isAllNull() { - return isAllNull; - } + return true; + } - @Override - public int hashCode() { - return comperator.hashCode(); - } + public boolean isAllNull() { + return isAllNull; + } - public String getComperator() { - return comperator; - } + @Override + public int hashCode() { + return comperator.hashCode(); + } - public Map getFlattenMap() { - return flattenMap; - } + public String getComperator() { + return comperator; + } - public SearchHit getOriginalHit() { - return hit; - } + public Map getFlattenMap() { + return flattenMap; + } + + public SearchHit getOriginalHit() { + return hit; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/builder/ArithmeticFunctionFactory.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/builder/ArithmeticFunctionFactory.java index afa6f6c439..c1de63fe88 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/builder/ArithmeticFunctionFactory.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/builder/ArithmeticFunctionFactory.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.core.builder; import org.opensearch.sql.legacy.expression.core.operator.BinaryScalarOperator; @@ -12,205 +11,130 @@ import org.opensearch.sql.legacy.expression.core.operator.ScalarOperation; import org.opensearch.sql.legacy.expression.core.operator.UnaryScalarOperator; -/** - * The definition of arithmetic function builder factory. - */ +/** The definition of arithmetic function builder factory. */ public class ArithmeticFunctionFactory { - public static ExpressionBuilder add() { - return new BinaryExpressionBuilder( - new BinaryScalarOperator( - ScalarOperation.ADD, - Math::addExact, - Math::addExact, - Double::sum, - Float::sum)); - } - - public static ExpressionBuilder subtract() { - return new BinaryExpressionBuilder( - new BinaryScalarOperator( - ScalarOperation.ADD, - Math::subtractExact, - Math::subtractExact, - (v1, v2) -> v1 - v2, - (v1, v2) -> v1 - v2)); - } - - public static ExpressionBuilder multiply() { - return new BinaryExpressionBuilder( - new BinaryScalarOperator( - ScalarOperation.MULTIPLY, - Math::multiplyExact, - Math::multiplyExact, - (v1, v2) -> v1 * v2, - (v1, v2) -> v1 * v2 - )); - } - - public static ExpressionBuilder divide() { - return new BinaryExpressionBuilder( - new BinaryScalarOperator( - ScalarOperation.DIVIDE, - (v1, v2) -> v1 / v2, - (v1, v2) -> v1 / v2, - (v1, v2) -> v1 / v2, - (v1, v2) -> v1 / v2 - )); - } - - public static ExpressionBuilder modules() { - return new BinaryExpressionBuilder( - new BinaryScalarOperator( - ScalarOperation.MODULES, - (v1, v2) -> v1 % v2, - (v1, v2) -> v1 % v2, - (v1, v2) -> v1 % v2, - (v1, v2) -> v1 % v2 - )); - } - - public static ExpressionBuilder abs() { - return new UnaryExpressionBuilder( - new UnaryScalarOperator( - ScalarOperation.ABS, - Math::abs, - Math::abs, - Math::abs, - Math::abs - )); - } - - public static ExpressionBuilder acos() { - return new UnaryExpressionBuilder( - new DoubleUnaryScalarOperator( - ScalarOperation.ACOS, - Math::acos - )); - } - - public static ExpressionBuilder asin() { - return new UnaryExpressionBuilder( - new DoubleUnaryScalarOperator( - ScalarOperation.ASIN, - Math::asin - ) - ); - } - - public static ExpressionBuilder atan() { - return new UnaryExpressionBuilder( - new DoubleUnaryScalarOperator( - ScalarOperation.ATAN, - Math::atan - ) - ); - } - - public static ExpressionBuilder atan2() { - return new BinaryExpressionBuilder( - new DoubleBinaryScalarOperator( - ScalarOperation.ATAN2, - Math::atan2 - ) - ); - } - - public static ExpressionBuilder tan() { - return new UnaryExpressionBuilder( - new DoubleUnaryScalarOperator( - ScalarOperation.TAN, - Math::tan - ) - ); - } - - public static ExpressionBuilder cbrt() { - return new UnaryExpressionBuilder( - new DoubleUnaryScalarOperator( - ScalarOperation.CBRT, - Math::cbrt - ) - ); - } - - public static ExpressionBuilder ceil() { - return new UnaryExpressionBuilder( - new DoubleUnaryScalarOperator( - ScalarOperation.CEIL, - Math::ceil - ) - ); - } - - public static ExpressionBuilder cos() { - return new UnaryExpressionBuilder( - new DoubleUnaryScalarOperator( - ScalarOperation.COS, - Math::cos - ) - ); - } - - public static ExpressionBuilder cosh() { - return new UnaryExpressionBuilder( - new DoubleUnaryScalarOperator( - ScalarOperation.COSH, - Math::cosh - ) - ); - } - - public static ExpressionBuilder exp() { - return new UnaryExpressionBuilder( - new DoubleUnaryScalarOperator( - ScalarOperation.EXP, - Math::exp - ) - ); - } - - public static ExpressionBuilder floor() { - return new UnaryExpressionBuilder( - new DoubleUnaryScalarOperator( - ScalarOperation.FLOOR, - Math::floor - ) - ); - } - - public static ExpressionBuilder ln() { - return new UnaryExpressionBuilder( - new DoubleUnaryScalarOperator( - ScalarOperation.LN, - Math::log - ) - ); - } - - public static ExpressionBuilder log() { - return new UnaryExpressionBuilder( - new DoubleUnaryScalarOperator( - ScalarOperation.LOG, - Math::log - ) - ); - } - - public static ExpressionBuilder log2() { - return new UnaryExpressionBuilder( - new DoubleUnaryScalarOperator( - ScalarOperation.LOG2, - (x) -> Math.log(x) / Math.log(2d) - ) - ); - } - - public static ExpressionBuilder log10() { - return new UnaryExpressionBuilder( - new DoubleUnaryScalarOperator( - ScalarOperation.LOG10, - Math::log10 - ) - ); - } + public static ExpressionBuilder add() { + return new BinaryExpressionBuilder( + new BinaryScalarOperator( + ScalarOperation.ADD, Math::addExact, Math::addExact, Double::sum, Float::sum)); + } + + public static ExpressionBuilder subtract() { + return new BinaryExpressionBuilder( + new BinaryScalarOperator( + ScalarOperation.ADD, + Math::subtractExact, + Math::subtractExact, + (v1, v2) -> v1 - v2, + (v1, v2) -> v1 - v2)); + } + + public static ExpressionBuilder multiply() { + return new BinaryExpressionBuilder( + new BinaryScalarOperator( + ScalarOperation.MULTIPLY, + Math::multiplyExact, + Math::multiplyExact, + (v1, v2) -> v1 * v2, + (v1, v2) -> v1 * v2)); + } + + public static ExpressionBuilder divide() { + return new BinaryExpressionBuilder( + new BinaryScalarOperator( + ScalarOperation.DIVIDE, + (v1, v2) -> v1 / v2, + (v1, v2) -> v1 / v2, + (v1, v2) -> v1 / v2, + (v1, v2) -> v1 / v2)); + } + + public static ExpressionBuilder modules() { + return new BinaryExpressionBuilder( + new BinaryScalarOperator( + ScalarOperation.MODULES, + (v1, v2) -> v1 % v2, + (v1, v2) -> v1 % v2, + (v1, v2) -> v1 % v2, + (v1, v2) -> v1 % v2)); + } + + public static ExpressionBuilder abs() { + return new UnaryExpressionBuilder( + new UnaryScalarOperator(ScalarOperation.ABS, Math::abs, Math::abs, Math::abs, Math::abs)); + } + + public static ExpressionBuilder acos() { + return new UnaryExpressionBuilder( + new DoubleUnaryScalarOperator(ScalarOperation.ACOS, Math::acos)); + } + + public static ExpressionBuilder asin() { + return new UnaryExpressionBuilder( + new DoubleUnaryScalarOperator(ScalarOperation.ASIN, Math::asin)); + } + + public static ExpressionBuilder atan() { + return new UnaryExpressionBuilder( + new DoubleUnaryScalarOperator(ScalarOperation.ATAN, Math::atan)); + } + + public static ExpressionBuilder atan2() { + return new BinaryExpressionBuilder( + new DoubleBinaryScalarOperator(ScalarOperation.ATAN2, Math::atan2)); + } + + public static ExpressionBuilder tan() { + return new UnaryExpressionBuilder( + new DoubleUnaryScalarOperator(ScalarOperation.TAN, Math::tan)); + } + + public static ExpressionBuilder cbrt() { + return new UnaryExpressionBuilder( + new DoubleUnaryScalarOperator(ScalarOperation.CBRT, Math::cbrt)); + } + + public static ExpressionBuilder ceil() { + return new UnaryExpressionBuilder( + new DoubleUnaryScalarOperator(ScalarOperation.CEIL, Math::ceil)); + } + + public static ExpressionBuilder cos() { + return new UnaryExpressionBuilder( + new DoubleUnaryScalarOperator(ScalarOperation.COS, Math::cos)); + } + + public static ExpressionBuilder cosh() { + return new UnaryExpressionBuilder( + new DoubleUnaryScalarOperator(ScalarOperation.COSH, Math::cosh)); + } + + public static ExpressionBuilder exp() { + return new UnaryExpressionBuilder( + new DoubleUnaryScalarOperator(ScalarOperation.EXP, Math::exp)); + } + + public static ExpressionBuilder floor() { + return new UnaryExpressionBuilder( + new DoubleUnaryScalarOperator(ScalarOperation.FLOOR, Math::floor)); + } + + public static ExpressionBuilder ln() { + return new UnaryExpressionBuilder(new DoubleUnaryScalarOperator(ScalarOperation.LN, Math::log)); + } + + public static ExpressionBuilder log() { + return new UnaryExpressionBuilder( + new DoubleUnaryScalarOperator(ScalarOperation.LOG, Math::log)); + } + + public static ExpressionBuilder log2() { + return new UnaryExpressionBuilder( + new DoubleUnaryScalarOperator(ScalarOperation.LOG2, (x) -> Math.log(x) / Math.log(2d))); + } + + public static ExpressionBuilder log10() { + return new UnaryExpressionBuilder( + new DoubleUnaryScalarOperator(ScalarOperation.LOG10, Math::log10)); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/builder/BinaryExpressionBuilder.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/builder/BinaryExpressionBuilder.java index 99ddd50248..fcf08180a5 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/builder/BinaryExpressionBuilder.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/builder/BinaryExpressionBuilder.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.core.builder; import java.util.Arrays; @@ -14,33 +13,32 @@ import org.opensearch.sql.legacy.expression.domain.BindingTuple; import org.opensearch.sql.legacy.expression.model.ExprValue; -/** - * The definition of the Expression Builder which has two arguments. - */ +/** The definition of the Expression Builder which has two arguments. */ @RequiredArgsConstructor public class BinaryExpressionBuilder implements ExpressionBuilder { - private final ScalarOperator op; + private final ScalarOperator op; - /** - * Build the expression with two {@link Expression} as arguments. - * @param expressionList expression list. - * @return expression. - */ - @Override - public Expression build(List expressionList) { - Expression e1 = expressionList.get(0); - Expression e2 = expressionList.get(1); + /** + * Build the expression with two {@link Expression} as arguments. + * + * @param expressionList expression list. + * @return expression. + */ + @Override + public Expression build(List expressionList) { + Expression e1 = expressionList.get(0); + Expression e2 = expressionList.get(1); - return new Expression() { - @Override - public ExprValue valueOf(BindingTuple tuple) { - return op.apply(Arrays.asList(e1.valueOf(tuple), e2.valueOf(tuple))); - } + return new Expression() { + @Override + public ExprValue valueOf(BindingTuple tuple) { + return op.apply(Arrays.asList(e1.valueOf(tuple), e2.valueOf(tuple))); + } - @Override - public String toString() { - return String.format("%s(%s,%s)", op.name(), e1, e2); - } - }; - } + @Override + public String toString() { + return String.format("%s(%s,%s)", op.name(), e1, e2); + } + }; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/BinaryScalarOperator.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/BinaryScalarOperator.java index 70d47a3e83..02d29e1ed9 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/BinaryScalarOperator.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/BinaryScalarOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.core.operator; import static org.opensearch.sql.legacy.expression.model.ExprValue.ExprValueKind.DOUBLE_VALUE; @@ -24,54 +23,53 @@ import org.opensearch.sql.legacy.expression.model.ExprValueFactory; /** - * Binary Scalar Operator take two {@link ExprValue} as arguments ans return one {@link ExprValue} as result. + * Binary Scalar Operator take two {@link ExprValue} as arguments ans return one {@link ExprValue} + * as result. */ @RequiredArgsConstructor public class BinaryScalarOperator implements ScalarOperator { - private static final Map numberTypeOrder = - new ImmutableMap.Builder() - .put(INTEGER_VALUE, 0) - .put(LONG_VALUE, 1) - .put(DOUBLE_VALUE, 2) - .put(FLOAT_VALUE, 3) - .build(); + private static final Map numberTypeOrder = + new ImmutableMap.Builder() + .put(INTEGER_VALUE, 0) + .put(LONG_VALUE, 1) + .put(DOUBLE_VALUE, 2) + .put(FLOAT_VALUE, 3) + .build(); - private final ScalarOperation op; - private final BiFunction integerFunc; - private final BiFunction longFunc; - private final BiFunction doubleFunc; - private final BiFunction floatFunc; + private final ScalarOperation op; + private final BiFunction integerFunc; + private final BiFunction longFunc; + private final BiFunction doubleFunc; + private final BiFunction floatFunc; - @Override - public ExprValue apply(List valueList) { - ExprValue v1 = valueList.get(0); - ExprValue v2 = valueList.get(1); - if (!numberTypeOrder.containsKey(v1.kind()) || !numberTypeOrder.containsKey(v2.kind())) { - throw new RuntimeException( - String.format("unexpected operation type: %s(%s, %s) ", op.name(), v1.kind(), v2.kind())); - } - ExprValue.ExprValueKind expectedType = numberTypeOrder.get(v1.kind()) > numberTypeOrder.get(v2.kind()) - ? v1.kind() : v2.kind(); - switch (expectedType) { - case DOUBLE_VALUE: - return ExprValueFactory.from(doubleFunc.apply(getDoubleValue(v1), getDoubleValue(v2))); - case INTEGER_VALUE: - return ExprValueFactory - .from(integerFunc.apply(getIntegerValue(v1), getIntegerValue(v2))); - case LONG_VALUE: - return ExprValueFactory - .from(longFunc.apply(getLongValue(v1), getLongValue(v2))); - case FLOAT_VALUE: - return ExprValueFactory - .from(floatFunc.apply(getFloatValue(v1), getFloatValue(v2))); - default: - throw new RuntimeException(String.format("unexpected operation type: %s(%s, %s)", op.name(), v1.kind(), - v2.kind())); - } + @Override + public ExprValue apply(List valueList) { + ExprValue v1 = valueList.get(0); + ExprValue v2 = valueList.get(1); + if (!numberTypeOrder.containsKey(v1.kind()) || !numberTypeOrder.containsKey(v2.kind())) { + throw new RuntimeException( + String.format("unexpected operation type: %s(%s, %s) ", op.name(), v1.kind(), v2.kind())); } - - @Override - public String name() { - return op.name(); + ExprValue.ExprValueKind expectedType = + numberTypeOrder.get(v1.kind()) > numberTypeOrder.get(v2.kind()) ? v1.kind() : v2.kind(); + switch (expectedType) { + case DOUBLE_VALUE: + return ExprValueFactory.from(doubleFunc.apply(getDoubleValue(v1), getDoubleValue(v2))); + case INTEGER_VALUE: + return ExprValueFactory.from(integerFunc.apply(getIntegerValue(v1), getIntegerValue(v2))); + case LONG_VALUE: + return ExprValueFactory.from(longFunc.apply(getLongValue(v1), getLongValue(v2))); + case FLOAT_VALUE: + return ExprValueFactory.from(floatFunc.apply(getFloatValue(v1), getFloatValue(v2))); + default: + throw new RuntimeException( + String.format( + "unexpected operation type: %s(%s, %s)", op.name(), v1.kind(), v2.kind())); } + } + + @Override + public String name() { + return op.name(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/DoubleBinaryScalarOperator.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/DoubleBinaryScalarOperator.java index 2555b2a53c..12e7aacbaa 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/DoubleBinaryScalarOperator.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/DoubleBinaryScalarOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.core.operator; import static org.opensearch.sql.legacy.expression.model.ExprValueUtils.getDoubleValue; @@ -16,37 +15,41 @@ import org.opensearch.sql.legacy.expression.model.ExprValueFactory; /** - * Double Binary Scalar Operator take two {@link ExprValue} which have double value as arguments ans return one - * {@link ExprDoubleValue} as result. + * Double Binary Scalar Operator take two {@link ExprValue} which have double value as arguments ans + * return one {@link ExprDoubleValue} as result. */ @RequiredArgsConstructor public class DoubleBinaryScalarOperator implements ScalarOperator { - private final ScalarOperation op; - private final BiFunction doubleFunc; + private final ScalarOperation op; + private final BiFunction doubleFunc; - @Override - public ExprValue apply(List exprValues) { - ExprValue exprValue1 = exprValues.get(0); - ExprValue exprValue2 = exprValues.get(1); - if (exprValue1.kind() != exprValue2.kind()) { - throw new RuntimeException(String.format("unexpected operation type: %s(%s,%s)", op.name(), - exprValue1.kind(), exprValue2.kind())); - } - switch (exprValue1.kind()) { - case DOUBLE_VALUE: - case INTEGER_VALUE: - case LONG_VALUE: - case FLOAT_VALUE: - return ExprValueFactory.from(doubleFunc.apply(getDoubleValue(exprValue1), - getDoubleValue(exprValue2))); - default: - throw new RuntimeException(String.format("unexpected operation type: %s(%s,%s)", op.name(), - exprValue1.kind(), exprValue2.kind())); - } + @Override + public ExprValue apply(List exprValues) { + ExprValue exprValue1 = exprValues.get(0); + ExprValue exprValue2 = exprValues.get(1); + if (exprValue1.kind() != exprValue2.kind()) { + throw new RuntimeException( + String.format( + "unexpected operation type: %s(%s,%s)", + op.name(), exprValue1.kind(), exprValue2.kind())); } - - @Override - public String name() { - return op.name(); + switch (exprValue1.kind()) { + case DOUBLE_VALUE: + case INTEGER_VALUE: + case LONG_VALUE: + case FLOAT_VALUE: + return ExprValueFactory.from( + doubleFunc.apply(getDoubleValue(exprValue1), getDoubleValue(exprValue2))); + default: + throw new RuntimeException( + String.format( + "unexpected operation type: %s(%s,%s)", + op.name(), exprValue1.kind(), exprValue2.kind())); } + } + + @Override + public String name() { + return op.name(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/DoubleUnaryScalarOperator.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/DoubleUnaryScalarOperator.java index 736216472f..8242eee8a6 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/DoubleUnaryScalarOperator.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/DoubleUnaryScalarOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.core.operator; import static org.opensearch.sql.legacy.expression.model.ExprValueUtils.getDoubleValue; @@ -16,31 +15,31 @@ import org.opensearch.sql.legacy.expression.model.ExprValueFactory; /** - * Unary Binary Scalar Operator take one {@link ExprValue} which have double value as arguments ans return one - * {@link ExprDoubleValue} as result. + * Unary Binary Scalar Operator take one {@link ExprValue} which have double value as arguments ans + * return one {@link ExprDoubleValue} as result. */ @RequiredArgsConstructor public class DoubleUnaryScalarOperator implements ScalarOperator { - private final ScalarOperation op; - private final Function doubleFunc; + private final ScalarOperation op; + private final Function doubleFunc; - @Override - public ExprValue apply(List exprValues) { - ExprValue exprValue = exprValues.get(0); - switch (exprValue.kind()) { - case DOUBLE_VALUE: - case INTEGER_VALUE: - case LONG_VALUE: - case FLOAT_VALUE: - return ExprValueFactory.from(doubleFunc.apply(getDoubleValue(exprValue))); - default: - throw new RuntimeException(String.format("unexpected operation type: %s(%s)", - op.name(), exprValue.kind())); - } + @Override + public ExprValue apply(List exprValues) { + ExprValue exprValue = exprValues.get(0); + switch (exprValue.kind()) { + case DOUBLE_VALUE: + case INTEGER_VALUE: + case LONG_VALUE: + case FLOAT_VALUE: + return ExprValueFactory.from(doubleFunc.apply(getDoubleValue(exprValue))); + default: + throw new RuntimeException( + String.format("unexpected operation type: %s(%s)", op.name(), exprValue.kind())); } + } - @Override - public String name() { - return op.name(); - } + @Override + public String name() { + return op.name(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/domain/BindingTuple.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/domain/BindingTuple.java index badc7c8355..328f63b7ca 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/domain/BindingTuple.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/domain/BindingTuple.java @@ -3,10 +3,8 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.domain; - import java.util.Map; import java.util.stream.Collectors; import lombok.Builder; @@ -19,42 +17,41 @@ import org.opensearch.sql.legacy.expression.model.ExprValueFactory; /** - * BindingTuple represents the a relationship between bindingName and ExprValue. - * e.g. The operation output column name is bindingName, the value is the ExprValue. + * BindingTuple represents the a relationship between bindingName and ExprValue. e.g. The operation + * output column name is bindingName, the value is the ExprValue. */ @Builder @Getter @EqualsAndHashCode public class BindingTuple { - @Singular("binding") - private final Map bindingMap; - - /** - * Resolve the Binding Name in BindingTuple context. - * - * @param bindingName binding name. - * @return binding value. - */ - public ExprValue resolve(String bindingName) { - return bindingMap.getOrDefault(bindingName, new ExprMissingValue()); - } - - @Override - public String toString() { - return bindingMap.entrySet() - .stream() - .map(entry -> String.format("%s:%s", entry.getKey(), entry.getValue())) - .collect(Collectors.joining(",", "<", ">")); - } - - public static BindingTuple from(Map map) { - return from(new JSONObject(map)); - } - - public static BindingTuple from(JSONObject json) { - Map map = json.toMap(); - BindingTupleBuilder bindingTupleBuilder = BindingTuple.builder(); - map.forEach((key, value) -> bindingTupleBuilder.binding(key, ExprValueFactory.from(value))); - return bindingTupleBuilder.build(); - } + @Singular("binding") + private final Map bindingMap; + + /** + * Resolve the Binding Name in BindingTuple context. + * + * @param bindingName binding name. + * @return binding value. + */ + public ExprValue resolve(String bindingName) { + return bindingMap.getOrDefault(bindingName, new ExprMissingValue()); + } + + @Override + public String toString() { + return bindingMap.entrySet().stream() + .map(entry -> String.format("%s:%s", entry.getKey(), entry.getValue())) + .collect(Collectors.joining(",", "<", ">")); + } + + public static BindingTuple from(Map map) { + return from(new JSONObject(map)); + } + + public static BindingTuple from(JSONObject json) { + Map map = json.toMap(); + BindingTupleBuilder bindingTupleBuilder = BindingTuple.builder(); + map.forEach((key, value) -> bindingTupleBuilder.binding(key, ExprValueFactory.from(value))); + return bindingTupleBuilder.build(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/metrics/BasicCounter.java b/legacy/src/main/java/org/opensearch/sql/legacy/metrics/BasicCounter.java index 8bb15eeb74..88d5f817e8 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/metrics/BasicCounter.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/metrics/BasicCounter.java @@ -3,32 +3,31 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.metrics; import java.util.concurrent.atomic.LongAdder; public class BasicCounter implements Counter { - private LongAdder count = new LongAdder(); + private LongAdder count = new LongAdder(); - @Override - public void increment() { - count.increment(); - } + @Override + public void increment() { + count.increment(); + } - @Override - public void add(long n) { - count.add(n); - } + @Override + public void add(long n) { + count.add(n); + } - @Override - public Long getValue() { - return count.longValue(); - } + @Override + public Long getValue() { + return count.longValue(); + } - @Override - public void reset() { - count.reset(); - } + @Override + public void reset() { + count.reset(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/metrics/Counter.java b/legacy/src/main/java/org/opensearch/sql/legacy/metrics/Counter.java index 7d490704e8..f91731ab0e 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/metrics/Counter.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/metrics/Counter.java @@ -3,16 +3,15 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.metrics; public interface Counter { - void increment(); + void increment(); - void add(long n); + void add(long n); - T getValue(); + T getValue(); - void reset(); + void reset(); } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/parser/CaseWhenParser.java b/legacy/src/main/java/org/opensearch/sql/legacy/parser/CaseWhenParser.java index c711ee2929..d55ee64601 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/parser/CaseWhenParser.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/parser/CaseWhenParser.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.parser; import com.alibaba.druid.sql.ast.SQLExpr; @@ -19,101 +18,119 @@ import org.opensearch.sql.legacy.exception.SqlParseException; import org.opensearch.sql.legacy.utils.Util; -/** - * Created by allwefantasy on 9/3/16. - */ +/** Created by allwefantasy on 9/3/16. */ public class CaseWhenParser { - private SQLCaseExpr caseExpr; - private String alias; - private String tableAlias; - - public CaseWhenParser(SQLCaseExpr caseExpr, String alias, String tableAlias) { - this.alias = alias; - this.tableAlias = tableAlias; - this.caseExpr = caseExpr; + private SQLCaseExpr caseExpr; + private String alias; + private String tableAlias; + + public CaseWhenParser(SQLCaseExpr caseExpr, String alias, String tableAlias) { + this.alias = alias; + this.tableAlias = tableAlias; + this.caseExpr = caseExpr; + } + + public String parse() throws SqlParseException { + List result = new ArrayList<>(); + + if (caseExpr.getValueExpr() != null) { + for (SQLCaseExpr.Item item : caseExpr.getItems()) { + SQLExpr left = caseExpr.getValueExpr(); + SQLExpr right = item.getConditionExpr(); + SQLBinaryOpExpr conditionExpr = + new SQLBinaryOpExpr(left, SQLBinaryOperator.Equality, right); + item.setConditionExpr(conditionExpr); + } + caseExpr.setValueExpr(null); } - public String parse() throws SqlParseException { - List result = new ArrayList<>(); - - if (caseExpr.getValueExpr() != null) { - for (SQLCaseExpr.Item item : caseExpr.getItems()) { - SQLExpr left = caseExpr.getValueExpr(); - SQLExpr right = item.getConditionExpr(); - SQLBinaryOpExpr conditionExpr = new SQLBinaryOpExpr(left, SQLBinaryOperator.Equality, right); - item.setConditionExpr(conditionExpr); - } - caseExpr.setValueExpr(null); - } - - for (SQLCaseExpr.Item item : caseExpr.getItems()) { - SQLExpr conditionExpr = item.getConditionExpr(); - - WhereParser parser = new WhereParser(new SqlParser(), conditionExpr); - String scriptCode = explain(parser.findWhere()); - if (scriptCode.startsWith(" &&")) { - scriptCode = scriptCode.substring(3); - } - if (result.size() == 0) { - result.add("if(" + scriptCode + ")" + "{" + Util.getScriptValueWithQuote(item.getValueExpr(), - "'") + "}"); - } else { - result.add("else if(" + scriptCode + ")" + "{" + Util.getScriptValueWithQuote(item.getValueExpr(), - "'") + "}"); - } - - } - SQLExpr elseExpr = caseExpr.getElseExpr(); - if (elseExpr == null) { - result.add("else { null }"); - } else { - result.add("else {" + Util.getScriptValueWithQuote(elseExpr, "'") + "}"); - } - - - return Joiner.on(" ").join(result); + for (SQLCaseExpr.Item item : caseExpr.getItems()) { + SQLExpr conditionExpr = item.getConditionExpr(); + + WhereParser parser = new WhereParser(new SqlParser(), conditionExpr); + String scriptCode = explain(parser.findWhere()); + if (scriptCode.startsWith(" &&")) { + scriptCode = scriptCode.substring(3); + } + if (result.size() == 0) { + result.add( + "if(" + + scriptCode + + ")" + + "{" + + Util.getScriptValueWithQuote(item.getValueExpr(), "'") + + "}"); + } else { + result.add( + "else if(" + + scriptCode + + ")" + + "{" + + Util.getScriptValueWithQuote(item.getValueExpr(), "'") + + "}"); + } } - - public String explain(Where where) throws SqlParseException { - List codes = new ArrayList<>(); - while (where.getWheres().size() == 1) { - where = where.getWheres().getFirst(); - } - explainWhere(codes, where); - String relation = where.getConn().name().equals("AND") ? " && " : " || "; - return Joiner.on(relation).join(codes); + SQLExpr elseExpr = caseExpr.getElseExpr(); + if (elseExpr == null) { + result.add("else { null }"); + } else { + result.add("else {" + Util.getScriptValueWithQuote(elseExpr, "'") + "}"); } + return Joiner.on(" ").join(result); + } - private void explainWhere(List codes, Where where) throws SqlParseException { - if (where instanceof Condition) { - Condition condition = (Condition) where; - - if (condition.getValue() instanceof ScriptFilter) { - codes.add("(" + ((ScriptFilter) condition.getValue()).getScript() + ")"); - } else if (condition.getOPERATOR() == Condition.OPERATOR.BETWEEN) { - Object[] objs = (Object[]) condition.getValue(); - codes.add("(" + "doc['" + condition.getName() + "'].value >= " + objs[0] + " && doc['" - + condition.getName() + "'].value <=" + objs[1] + ")"); - } else { - SQLExpr nameExpr = condition.getNameExpr(); - SQLExpr valueExpr = condition.getValueExpr(); - if (valueExpr instanceof SQLNullExpr) { - codes.add("(" + "doc['" + nameExpr.toString() + "']" + ".empty)"); - } else { - codes.add("(" + Util.getScriptValueWithQuote(nameExpr, "'") + condition.getOpertatorSymbol() - + Util.getScriptValueWithQuote(valueExpr, "'") + ")"); - } - } + public String explain(Where where) throws SqlParseException { + List codes = new ArrayList<>(); + while (where.getWheres().size() == 1) { + where = where.getWheres().getFirst(); + } + explainWhere(codes, where); + String relation = where.getConn().name().equals("AND") ? " && " : " || "; + return Joiner.on(relation).join(codes); + } + + private void explainWhere(List codes, Where where) throws SqlParseException { + if (where instanceof Condition) { + Condition condition = (Condition) where; + + if (condition.getValue() instanceof ScriptFilter) { + codes.add("(" + ((ScriptFilter) condition.getValue()).getScript() + ")"); + } else if (condition.getOPERATOR() == Condition.OPERATOR.BETWEEN) { + Object[] objs = (Object[]) condition.getValue(); + codes.add( + "(" + + "doc['" + + condition.getName() + + "'].value >= " + + objs[0] + + " && doc['" + + condition.getName() + + "'].value <=" + + objs[1] + + ")"); + } else { + SQLExpr nameExpr = condition.getNameExpr(); + SQLExpr valueExpr = condition.getValueExpr(); + if (valueExpr instanceof SQLNullExpr) { + codes.add("(" + "doc['" + nameExpr.toString() + "']" + ".empty)"); } else { - for (Where subWhere : where.getWheres()) { - List subCodes = new ArrayList<>(); - explainWhere(subCodes, subWhere); - String relation = subWhere.getConn().name().equals("AND") ? "&&" : "||"; - codes.add(Joiner.on(relation).join(subCodes)); - } + codes.add( + "(" + + Util.getScriptValueWithQuote(nameExpr, "'") + + condition.getOpertatorSymbol() + + Util.getScriptValueWithQuote(valueExpr, "'") + + ")"); } + } + } else { + for (Where subWhere : where.getWheres()) { + List subCodes = new ArrayList<>(); + explainWhere(subCodes, subWhere); + String relation = subWhere.getConn().name().equals("AND") ? "&&" : "||"; + codes.add(Joiner.on(relation).join(subCodes)); + } } - + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/parser/ChildrenType.java b/legacy/src/main/java/org/opensearch/sql/legacy/parser/ChildrenType.java index 74945cb94f..27374849df 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/parser/ChildrenType.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/parser/ChildrenType.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.parser; import com.alibaba.druid.sql.ast.SQLExpr; @@ -16,56 +15,55 @@ import org.opensearch.sql.legacy.exception.SqlParseException; import org.opensearch.sql.legacy.utils.Util; -/** - * Created by Razma Tazz on 14/04/2016. - */ +/** Created by Razma Tazz on 14/04/2016. */ public class ChildrenType { - public String field; - public String childType; - public Where where; - private boolean simple; + public String field; + public String childType; + public Where where; + private boolean simple; - public boolean tryFillFromExpr(SQLExpr expr) throws SqlParseException { - if (!(expr instanceof SQLMethodInvokeExpr)) { - return false; - } - SQLMethodInvokeExpr method = (SQLMethodInvokeExpr) expr; - - String methodName = method.getMethodName(); + public boolean tryFillFromExpr(SQLExpr expr) throws SqlParseException { + if (!(expr instanceof SQLMethodInvokeExpr)) { + return false; + } + SQLMethodInvokeExpr method = (SQLMethodInvokeExpr) expr; - if (!methodName.toLowerCase().equals("children")) { - return false; - } + String methodName = method.getMethodName(); - List parameters = method.getParameters(); + if (!methodName.toLowerCase().equals("children")) { + return false; + } - if (parameters.size() != 2) { - throw new SqlParseException( - "on children object only allowed 2 parameters (type, field)/(type, conditions...) "); - } + List parameters = method.getParameters(); - String type = Util.extendedToString(parameters.get(0)); - this.childType = type; + if (parameters.size() != 2) { + throw new SqlParseException( + "on children object only allowed 2 parameters (type, field)/(type, conditions...) "); + } - SQLExpr secondParameter = parameters.get(1); - if (secondParameter instanceof SQLTextLiteralExpr || secondParameter instanceof SQLIdentifierExpr - || secondParameter instanceof SQLPropertyExpr) { - this.field = Util.extendedToString(secondParameter); - this.simple = true; - } else { - Where where = Where.newInstance(); - new WhereParser(new SqlParser()).parseWhere(secondParameter, where); - if (where.getWheres().size() == 0) { - throw new SqlParseException("Failed to parse filter condition"); - } - this.where = where; - simple = false; - } + String type = Util.extendedToString(parameters.get(0)); + this.childType = type; - return true; + SQLExpr secondParameter = parameters.get(1); + if (secondParameter instanceof SQLTextLiteralExpr + || secondParameter instanceof SQLIdentifierExpr + || secondParameter instanceof SQLPropertyExpr) { + this.field = Util.extendedToString(secondParameter); + this.simple = true; + } else { + Where where = Where.newInstance(); + new WhereParser(new SqlParser()).parseWhere(secondParameter, where); + if (where.getWheres().size() == 0) { + throw new SqlParseException("Failed to parse filter condition"); + } + this.where = where; + simple = false; } - public boolean isSimple() { - return simple; - } + return true; + } + + public boolean isSimple() { + return simple; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/parser/ElasticLexer.java b/legacy/src/main/java/org/opensearch/sql/legacy/parser/ElasticLexer.java index 8720c3ba85..67b49fb4ad 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/parser/ElasticLexer.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/parser/ElasticLexer.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.parser; import static com.alibaba.druid.sql.parser.CharTypes.isFirstIdentifierChar; @@ -14,86 +13,82 @@ import com.alibaba.druid.sql.parser.ParserException; import com.alibaba.druid.sql.parser.Token; -/** - * Created by Eliran on 18/8/2015. - */ +/** Created by Eliran on 18/8/2015. */ public class ElasticLexer extends MySqlLexer { - public ElasticLexer(String input) { - super(input); - } + public ElasticLexer(String input) { + super(input); + } + public ElasticLexer(char[] input, int inputLength, boolean skipComment) { + super(input, inputLength, skipComment); + } - public ElasticLexer(char[] input, int inputLength, boolean skipComment) { - super(input, inputLength, skipComment); - } + public void scanIdentifier() { + final char first = ch; + + if (ch == '`') { - public void scanIdentifier() { - final char first = ch; + mark = pos; + bufPos = 1; + char ch; + for (; ; ) { + ch = charAt(++pos); if (ch == '`') { + bufPos++; + ch = charAt(++pos); + break; + } else if (ch == EOI) { + throw new ParserException("illegal identifier"); + } - mark = pos; - bufPos = 1; - char ch; - for (; ; ) { - ch = charAt(++pos); - - if (ch == '`') { - bufPos++; - ch = charAt(++pos); - break; - } else if (ch == EOI) { - throw new ParserException("illegal identifier"); - } - - bufPos++; - continue; - } - - this.ch = charAt(pos); - - stringVal = subString(mark, bufPos); - Token tok = keywods.getKeyword(stringVal); - if (tok != null) { - token = tok; - } else { - token = Token.IDENTIFIER; - } - } else { - - final boolean firstFlag = isFirstIdentifierChar(first); - if (!firstFlag) { - throw new ParserException("illegal identifier"); - } - - mark = pos; - bufPos = 1; - char ch; - for (; ; ) { - ch = charAt(++pos); - - if (!isElasticIdentifierChar(ch)) { - break; - } - - bufPos++; - continue; - } - - this.ch = charAt(pos); - - stringVal = addSymbol(); - Token tok = keywods.getKeyword(stringVal); - if (tok != null) { - token = tok; - } else { - token = Token.IDENTIFIER; - } + bufPos++; + continue; + } + + this.ch = charAt(pos); + + stringVal = subString(mark, bufPos); + Token tok = keywods.getKeyword(stringVal); + if (tok != null) { + token = tok; + } else { + token = Token.IDENTIFIER; + } + } else { + + final boolean firstFlag = isFirstIdentifierChar(first); + if (!firstFlag) { + throw new ParserException("illegal identifier"); + } + + mark = pos; + bufPos = 1; + char ch; + for (; ; ) { + ch = charAt(++pos); + + if (!isElasticIdentifierChar(ch)) { + break; } - } + bufPos++; + continue; + } - private boolean isElasticIdentifierChar(char ch) { - return ch == '*' || ch == ':' || ch == '-' || ch == '.' || ch == ';' || isIdentifierChar(ch); + this.ch = charAt(pos); + + stringVal = addSymbol(); + Token tok = keywods.getKeyword(stringVal); + if (tok != null) { + token = tok; + } else { + token = Token.IDENTIFIER; + } } + } + + private boolean isElasticIdentifierChar(char ch) { + return ch == '*' || ch == ':' || ch == '-' || ch == '.' || ch == ';' || isIdentifierChar(ch); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/AggregationQueryAction.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/AggregationQueryAction.java index 24194e8de5..57af269001 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/AggregationQueryAction.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/AggregationQueryAction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query; import com.alibaba.druid.sql.ast.SQLExpr; @@ -38,457 +37,462 @@ import org.opensearch.sql.legacy.query.maker.AggMaker; import org.opensearch.sql.legacy.query.maker.QueryMaker; -/** - * Transform SQL query to OpenSearch aggregations query - */ +/** Transform SQL query to OpenSearch aggregations query */ public class AggregationQueryAction extends QueryAction { - private final Select select; - private AggMaker aggMaker = new AggMaker(); - private SearchRequestBuilder request; - - public AggregationQueryAction(Client client, Select select) { - super(client, select); - this.select = select; - } - - @Override - public SqlOpenSearchRequestBuilder explain() throws SqlParseException { - this.request = new SearchRequestBuilder(client, SearchAction.INSTANCE); - - if (select.getRowCount() == null) { - select.setRowCount(Select.DEFAULT_LIMIT); - } - - setIndicesAndTypes(); - - setWhere(select.getWhere()); - AggregationBuilder lastAgg = null; - - for (List groupBy : select.getGroupBys()) { - if (!groupBy.isEmpty()) { - Field field = groupBy.get(0); - - //make groupby can reference to field alias - lastAgg = getGroupAgg(field, select); - - if (lastAgg instanceof TermsAggregationBuilder) { - - // TODO: Consider removing that condition - // in theory we should be able to apply this for all types of fields, but - // this change requires too much of related integration tests (e.g. there are comparisons against - // raw javascript dsl, so I'd like to scope the changes as of now to one particular fix for - // scripted functions - - // the condition `field.getName().equals("script")` is to include the CAST cases, since the cast - // method is instance of MethodField with script. => corrects the shard size of CASTs - if (!(field instanceof MethodField) || field instanceof ScriptMethodField - || field.getName().equals("script")) { - //if limit size is too small, increasing shard size is required - if (select.getRowCount() < 200) { - ((TermsAggregationBuilder) lastAgg).shardSize(2000); - for (Hint hint : select.getHints()) { - if (hint.getType() == HintType.SHARD_SIZE) { - if (hint.getParams() != null && hint.getParams().length != 0 - && hint.getParams()[0] != null) { - ((TermsAggregationBuilder) lastAgg).shardSize((Integer) hint.getParams()[0]); - } - } - } - } - - if (select.getRowCount() > 0) { - ((TermsAggregationBuilder) lastAgg).size(select.getRowCount()); - } - } - } - - if (field.isNested()) { - AggregationBuilder nestedBuilder = createNestedAggregation(field); - - if (insertFilterIfExistsAfter(lastAgg, groupBy, nestedBuilder, 1)) { - groupBy.remove(1); - } else { - nestedBuilder.subAggregation(lastAgg); - } + private final Select select; + private AggMaker aggMaker = new AggMaker(); + private SearchRequestBuilder request; - request.addAggregation(wrapNestedIfNeeded(nestedBuilder, field.isReverseNested())); - } else if (field.isChildren()) { - AggregationBuilder childrenBuilder = createChildrenAggregation(field); + public AggregationQueryAction(Client client, Select select) { + super(client, select); + this.select = select; + } - if (insertFilterIfExistsAfter(lastAgg, groupBy, childrenBuilder, 1)) { - groupBy.remove(1); - } else { - childrenBuilder.subAggregation(lastAgg); - } + @Override + public SqlOpenSearchRequestBuilder explain() throws SqlParseException { + this.request = new SearchRequestBuilder(client, SearchAction.INSTANCE); - request.addAggregation(childrenBuilder); - } else { - request.addAggregation(lastAgg); - } + if (select.getRowCount() == null) { + select.setRowCount(Select.DEFAULT_LIMIT); + } - for (int i = 1; i < groupBy.size(); i++) { - field = groupBy.get(i); - AggregationBuilder subAgg = getGroupAgg(field, select); - //ES5.0 termsaggregation with size = 0 not supported anymore -// if (subAgg instanceof TermsAggregationBuilder && !(field instanceof MethodField)) { - -// //((TermsAggregationBuilder) subAgg).size(0); -// } - - if (field.isNested()) { - AggregationBuilder nestedBuilder = createNestedAggregation(field); - - if (insertFilterIfExistsAfter(subAgg, groupBy, nestedBuilder, i + 1)) { - groupBy.remove(i + 1); - i++; - } else { - nestedBuilder.subAggregation(subAgg); - } - - lastAgg.subAggregation(wrapNestedIfNeeded(nestedBuilder, field.isReverseNested())); - } else if (field.isChildren()) { - AggregationBuilder childrenBuilder = createChildrenAggregation(field); - - if (insertFilterIfExistsAfter(subAgg, groupBy, childrenBuilder, i + 1)) { - groupBy.remove(i + 1); - i++; - } else { - childrenBuilder.subAggregation(subAgg); - } - - lastAgg.subAggregation(childrenBuilder); - } else { - lastAgg.subAggregation(subAgg); - } - - lastAgg = subAgg; + setIndicesAndTypes(); + + setWhere(select.getWhere()); + AggregationBuilder lastAgg = null; + + for (List groupBy : select.getGroupBys()) { + if (!groupBy.isEmpty()) { + Field field = groupBy.get(0); + + // make groupby can reference to field alias + lastAgg = getGroupAgg(field, select); + + if (lastAgg instanceof TermsAggregationBuilder) { + + // TODO: Consider removing that condition + // in theory we should be able to apply this for all types of fields, but + // this change requires too much of related integration tests (e.g. there are comparisons + // against + // raw javascript dsl, so I'd like to scope the changes as of now to one particular fix + // for + // scripted functions + + // the condition `field.getName().equals("script")` is to include the CAST cases, since + // the cast + // method is instance of MethodField with script. => corrects the shard size of CASTs + if (!(field instanceof MethodField) + || field instanceof ScriptMethodField + || field.getName().equals("script")) { + // if limit size is too small, increasing shard size is required + if (select.getRowCount() < 200) { + ((TermsAggregationBuilder) lastAgg).shardSize(2000); + for (Hint hint : select.getHints()) { + if (hint.getType() == HintType.SHARD_SIZE) { + if (hint.getParams() != null + && hint.getParams().length != 0 + && hint.getParams()[0] != null) { + ((TermsAggregationBuilder) lastAgg).shardSize((Integer) hint.getParams()[0]); + } } + } } - // explain the field from SELECT and HAVING clause - List combinedList = new ArrayList<>(); - combinedList.addAll(select.getFields()); - if (select.getHaving() != null) { - combinedList.addAll(select.getHaving().getHavingFields()); + if (select.getRowCount() > 0) { + ((TermsAggregationBuilder) lastAgg).size(select.getRowCount()); } - // add aggregation function to each groupBy - explanFields(request, combinedList, lastAgg); - - explainHaving(lastAgg); + } } - if (select.getGroupBys().size() < 1) { - //add aggregation when having no groupBy script - explanFields(request, select.getFields(), lastAgg); + if (field.isNested()) { + AggregationBuilder nestedBuilder = createNestedAggregation(field); - } + if (insertFilterIfExistsAfter(lastAgg, groupBy, nestedBuilder, 1)) { + groupBy.remove(1); + } else { + nestedBuilder.subAggregation(lastAgg); + } - Map groupMap = aggMaker.getGroupMap(); - // add field - if (select.getFields().size() > 0) { - setFields(select.getFields()); -// explanFields(request, select.getFields(), lastAgg); - } + request.addAggregation(wrapNestedIfNeeded(nestedBuilder, field.isReverseNested())); + } else if (field.isChildren()) { + AggregationBuilder childrenBuilder = createChildrenAggregation(field); - // add order - if (lastAgg != null && select.getOrderBys().size() > 0) { - for (Order order : select.getOrderBys()) { - - // check "standard" fields - KVValue temp = groupMap.get(order.getName()); - if (temp != null) { - TermsAggregationBuilder termsBuilder = (TermsAggregationBuilder) temp.value; - switch (temp.key) { - case "COUNT": - termsBuilder.order(BucketOrder.count(isASC(order))); - break; - case "KEY": - termsBuilder.order(BucketOrder.key(isASC(order))); - break; - case "FIELD": - termsBuilder.order(BucketOrder.aggregation(order.getName(), isASC(order))); - break; - default: - throw new SqlParseException(order.getName() + " can not to order"); - } - } else if (order.isScript()) { - // Do not add scripted fields into sort, they must be sorted inside of aggregation - } else { - // TODO: Is there a legit case when we want to add field into sort for aggregation queries? - request.addSort(order.getName(), SortOrder.valueOf(order.getType())); - } - } + if (insertFilterIfExistsAfter(lastAgg, groupBy, childrenBuilder, 1)) { + groupBy.remove(1); + } else { + childrenBuilder.subAggregation(lastAgg); + } + + request.addAggregation(childrenBuilder); + } else { + request.addAggregation(lastAgg); } - setLimitFromHint(this.select.getHints()); + for (int i = 1; i < groupBy.size(); i++) { + field = groupBy.get(i); + AggregationBuilder subAgg = getGroupAgg(field, select); + // ES5.0 termsaggregation with size = 0 not supported anymore + // if (subAgg instanceof TermsAggregationBuilder && !(field instanceof + // MethodField)) { - request.setSearchType(SearchType.DEFAULT); - updateRequestWithIndexAndRoutingOptions(select, request); - updateRequestWithHighlight(select, request); - updateRequestWithCollapse(select, request); - updateRequestWithPostFilter(select, request); - return new SqlOpenSearchRequestBuilder(request); - } + // //((TermsAggregationBuilder) subAgg).size(0); + // } - private AggregationBuilder getGroupAgg(Field groupByField, Select select) throws SqlParseException { - AggregationBuilder lastAgg = null; - Field shadowField = null; - - for (Field selectField : select.getFields()) { - if (selectField instanceof MethodField && selectField.getName().equals("script")) { - MethodField scriptField = (MethodField) selectField; - for (KVValue kv : scriptField.getParams()) { - if (kv.value.equals(groupByField.getName())) { - shadowField = scriptField; - break; - } - } - } - } + if (field.isNested()) { + AggregationBuilder nestedBuilder = createNestedAggregation(field); - if (shadowField == null) { - for (Field selectField: select.getFields()) { - if (selectField.getAlias() != null - && (groupByField.getName().equals(selectField.getAlias()) - || groupByField.getExpression().equals(selectField.getExpression()))) { - shadowField = selectField; - } + if (insertFilterIfExistsAfter(subAgg, groupBy, nestedBuilder, i + 1)) { + groupBy.remove(i + 1); + i++; + } else { + nestedBuilder.subAggregation(subAgg); } - } + lastAgg.subAggregation(wrapNestedIfNeeded(nestedBuilder, field.isReverseNested())); + } else if (field.isChildren()) { + AggregationBuilder childrenBuilder = createChildrenAggregation(field); - if (null != shadowField) { - groupByField.setAlias(shadowField.getAlias()); - groupByField = shadowField; - } + if (insertFilterIfExistsAfter(subAgg, groupBy, childrenBuilder, i + 1)) { + groupBy.remove(i + 1); + i++; + } else { + childrenBuilder.subAggregation(subAgg); + } - lastAgg = aggMaker.makeGroupAgg(groupByField); + lastAgg.subAggregation(childrenBuilder); + } else { + lastAgg.subAggregation(subAgg); + } - // find if we have order for that aggregation. As of now only special case for script fields - if (groupByField.isScriptField()) { - addOrderByScriptFieldIfPresent(select, (TermsAggregationBuilder) lastAgg, groupByField.getExpression()); + lastAgg = subAgg; } + } + + // explain the field from SELECT and HAVING clause + List combinedList = new ArrayList<>(); + combinedList.addAll(select.getFields()); + if (select.getHaving() != null) { + combinedList.addAll(select.getHaving().getHavingFields()); + } + // add aggregation function to each groupBy + explanFields(request, combinedList, lastAgg); + + explainHaving(lastAgg); + } - return lastAgg; + if (select.getGroupBys().size() < 1) { + // add aggregation when having no groupBy script + explanFields(request, select.getFields(), lastAgg); } - private void addOrderByScriptFieldIfPresent(Select select, TermsAggregationBuilder groupByAggregation, - SQLExpr groupByExpression) { - // TODO: Explore other ways to correlate different fields/functions in the query (params?) - // This feels like a hacky way, but it's the best that could be done now. - select - .getOrderBys() - .stream() - .filter(order -> groupByExpression.equals(order.getSortField().getExpression())) - .findFirst() - .ifPresent(orderForGroupBy -> groupByAggregation.order(BucketOrder.key(isASC(orderForGroupBy)))); + Map groupMap = aggMaker.getGroupMap(); + // add field + if (select.getFields().size() > 0) { + setFields(select.getFields()); + // explanFields(request, select.getFields(), lastAgg); } - private AggregationBuilder wrapNestedIfNeeded(AggregationBuilder nestedBuilder, boolean reverseNested) { - if (!reverseNested) { - return nestedBuilder; + // add order + if (lastAgg != null && select.getOrderBys().size() > 0) { + for (Order order : select.getOrderBys()) { + + // check "standard" fields + KVValue temp = groupMap.get(order.getName()); + if (temp != null) { + TermsAggregationBuilder termsBuilder = (TermsAggregationBuilder) temp.value; + switch (temp.key) { + case "COUNT": + termsBuilder.order(BucketOrder.count(isASC(order))); + break; + case "KEY": + termsBuilder.order(BucketOrder.key(isASC(order))); + break; + case "FIELD": + termsBuilder.order(BucketOrder.aggregation(order.getName(), isASC(order))); + break; + default: + throw new SqlParseException(order.getName() + " can not to order"); + } + } else if (order.isScript()) { + // Do not add scripted fields into sort, they must be sorted inside of aggregation + } else { + // TODO: Is there a legit case when we want to add field into sort for aggregation + // queries? + request.addSort(order.getName(), SortOrder.valueOf(order.getType())); } - if (reverseNested && !(nestedBuilder instanceof NestedAggregationBuilder)) { - return nestedBuilder; + } + } + + setLimitFromHint(this.select.getHints()); + + request.setSearchType(SearchType.DEFAULT); + updateRequestWithIndexAndRoutingOptions(select, request); + updateRequestWithHighlight(select, request); + updateRequestWithCollapse(select, request); + updateRequestWithPostFilter(select, request); + return new SqlOpenSearchRequestBuilder(request); + } + + private AggregationBuilder getGroupAgg(Field groupByField, Select select) + throws SqlParseException { + AggregationBuilder lastAgg = null; + Field shadowField = null; + + for (Field selectField : select.getFields()) { + if (selectField instanceof MethodField && selectField.getName().equals("script")) { + MethodField scriptField = (MethodField) selectField; + for (KVValue kv : scriptField.getParams()) { + if (kv.value.equals(groupByField.getName())) { + shadowField = scriptField; + break; + } } - //we need to jump back to root - return AggregationBuilders.reverseNested(nestedBuilder.getName() + "_REVERSED").subAggregation(nestedBuilder); + } } - private AggregationBuilder createNestedAggregation(Field field) { - AggregationBuilder nestedBuilder; + if (shadowField == null) { + for (Field selectField : select.getFields()) { + if (selectField.getAlias() != null + && (groupByField.getName().equals(selectField.getAlias()) + || groupByField.getExpression().equals(selectField.getExpression()))) { + shadowField = selectField; + } + } + } - String nestedPath = field.getNestedPath(); + if (null != shadowField) { + groupByField.setAlias(shadowField.getAlias()); + groupByField = shadowField; + } - if (field.isReverseNested()) { - if (nestedPath == null || !nestedPath.startsWith("~")) { - ReverseNestedAggregationBuilder reverseNestedAggregationBuilder = - AggregationBuilders.reverseNested(getNestedAggName(field)); - if (nestedPath != null) { - reverseNestedAggregationBuilder.path(nestedPath); - } - return reverseNestedAggregationBuilder; - } - nestedPath = nestedPath.substring(1); - } + lastAgg = aggMaker.makeGroupAgg(groupByField); - nestedBuilder = AggregationBuilders.nested(getNestedAggName(field), nestedPath); + // find if we have order for that aggregation. As of now only special case for script fields + if (groupByField.isScriptField()) { + addOrderByScriptFieldIfPresent( + select, (TermsAggregationBuilder) lastAgg, groupByField.getExpression()); + } - return nestedBuilder; + return lastAgg; + } + + private void addOrderByScriptFieldIfPresent( + Select select, TermsAggregationBuilder groupByAggregation, SQLExpr groupByExpression) { + // TODO: Explore other ways to correlate different fields/functions in the query (params?) + // This feels like a hacky way, but it's the best that could be done now. + select.getOrderBys().stream() + .filter(order -> groupByExpression.equals(order.getSortField().getExpression())) + .findFirst() + .ifPresent( + orderForGroupBy -> groupByAggregation.order(BucketOrder.key(isASC(orderForGroupBy)))); + } + + private AggregationBuilder wrapNestedIfNeeded( + AggregationBuilder nestedBuilder, boolean reverseNested) { + if (!reverseNested) { + return nestedBuilder; + } + if (reverseNested && !(nestedBuilder instanceof NestedAggregationBuilder)) { + return nestedBuilder; + } + // we need to jump back to root + return AggregationBuilders.reverseNested(nestedBuilder.getName() + "_REVERSED") + .subAggregation(nestedBuilder); + } + + private AggregationBuilder createNestedAggregation(Field field) { + AggregationBuilder nestedBuilder; + + String nestedPath = field.getNestedPath(); + + if (field.isReverseNested()) { + if (nestedPath == null || !nestedPath.startsWith("~")) { + ReverseNestedAggregationBuilder reverseNestedAggregationBuilder = + AggregationBuilders.reverseNested(getNestedAggName(field)); + if (nestedPath != null) { + reverseNestedAggregationBuilder.path(nestedPath); + } + return reverseNestedAggregationBuilder; + } + nestedPath = nestedPath.substring(1); } - private AggregationBuilder createChildrenAggregation(Field field) { - AggregationBuilder childrenBuilder; + nestedBuilder = AggregationBuilders.nested(getNestedAggName(field), nestedPath); - String childType = field.getChildType(); + return nestedBuilder; + } - childrenBuilder = JoinAggregationBuilders.children(getChildrenAggName(field), childType); + private AggregationBuilder createChildrenAggregation(Field field) { + AggregationBuilder childrenBuilder; - return childrenBuilder; - } + String childType = field.getChildType(); - private String getNestedAggName(Field field) { - String prefix; + childrenBuilder = JoinAggregationBuilders.children(getChildrenAggName(field), childType); - if (field instanceof MethodField) { - String nestedPath = field.getNestedPath(); - if (nestedPath != null) { - prefix = nestedPath; - } else { - prefix = field.getAlias(); - } - } else { - prefix = field.getName(); - } - return prefix + "@NESTED"; - } + return childrenBuilder; + } - private String getChildrenAggName(Field field) { - String prefix; + private String getNestedAggName(Field field) { + String prefix; - if (field instanceof MethodField) { - String childType = field.getChildType(); + if (field instanceof MethodField) { + String nestedPath = field.getNestedPath(); + if (nestedPath != null) { + prefix = nestedPath; + } else { + prefix = field.getAlias(); + } + } else { + prefix = field.getName(); + } + return prefix + "@NESTED"; + } + + private String getChildrenAggName(Field field) { + String prefix; + + if (field instanceof MethodField) { + String childType = field.getChildType(); + + if (childType != null) { + prefix = childType; + } else { + prefix = field.getAlias(); + } + } else { + prefix = field.getName(); + } - if (childType != null) { - prefix = childType; - } else { - prefix = field.getAlias(); - } - } else { - prefix = field.getName(); - } + return prefix + "@CHILDREN"; + } - return prefix + "@CHILDREN"; + private boolean insertFilterIfExistsAfter( + AggregationBuilder agg, List groupBy, AggregationBuilder builder, int nextPosition) + throws SqlParseException { + if (groupBy.size() <= nextPosition) { + return false; } - - private boolean insertFilterIfExistsAfter(AggregationBuilder agg, List groupBy, AggregationBuilder builder, - int nextPosition) throws SqlParseException { - if (groupBy.size() <= nextPosition) { - return false; - } - Field filterFieldCandidate = groupBy.get(nextPosition); - if (!(filterFieldCandidate instanceof MethodField)) { - return false; - } - MethodField methodField = (MethodField) filterFieldCandidate; - if (!methodField.getName().toLowerCase().equals("filter")) { - return false; - } - builder.subAggregation(aggMaker.makeGroupAgg(filterFieldCandidate).subAggregation(agg)); - return true; + Field filterFieldCandidate = groupBy.get(nextPosition); + if (!(filterFieldCandidate instanceof MethodField)) { + return false; } - - private AggregationBuilder updateAggIfNested(AggregationBuilder lastAgg, Field field) { - if (field.isNested()) { - lastAgg = AggregationBuilders.nested(field.getName() + "Nested", field.getNestedPath()) - .subAggregation(lastAgg); - } - return lastAgg; + MethodField methodField = (MethodField) filterFieldCandidate; + if (!methodField.getName().toLowerCase().equals("filter")) { + return false; } - - private boolean isASC(Order order) { - return "ASC".equals(order.getType()); + builder.subAggregation(aggMaker.makeGroupAgg(filterFieldCandidate).subAggregation(agg)); + return true; + } + + private AggregationBuilder updateAggIfNested(AggregationBuilder lastAgg, Field field) { + if (field.isNested()) { + lastAgg = + AggregationBuilders.nested(field.getName() + "Nested", field.getNestedPath()) + .subAggregation(lastAgg); } + return lastAgg; + } - private void setFields(List fields) { - if (select.getFields().size() > 0) { - ArrayList includeFields = new ArrayList<>(); + private boolean isASC(Order order) { + return "ASC".equals(order.getType()); + } - for (Field field : fields) { - if (field != null) { - includeFields.add(field.getName()); - } - } + private void setFields(List fields) { + if (select.getFields().size() > 0) { + ArrayList includeFields = new ArrayList<>(); - request.setFetchSource(includeFields.toArray(new String[0]), null); + for (Field field : fields) { + if (field != null) { + includeFields.add(field.getName()); } - } + } - private void explanFields(SearchRequestBuilder request, List fields, AggregationBuilder groupByAgg) - throws SqlParseException { - for (Field field : fields) { - if (field instanceof MethodField) { - - if (field.getName().equals("script")) { - request.addStoredField(field.getAlias()); - DefaultQueryAction defaultQueryAction = new DefaultQueryAction(client, select); - defaultQueryAction.initialize(request); - List tempFields = Lists.newArrayList(field); - defaultQueryAction.setFields(tempFields); - continue; - } - - AggregationBuilder makeAgg = aggMaker - .withWhere(select.getWhere()) - .makeFieldAgg((MethodField) field, groupByAgg); - if (groupByAgg != null) { - groupByAgg.subAggregation(makeAgg); - } else { - request.addAggregation(makeAgg); - } - } else if (field != null) { - request.addStoredField(field.getName()); - } else { - throw new SqlParseException("it did not support this field method " + field); - } - } + request.setFetchSource(includeFields.toArray(new String[0]), null); } - - private void explainHaving(AggregationBuilder lastAgg) throws SqlParseException { - Having having = select.getHaving(); - if (having != null) { - having.explain(lastAgg, select.getFields()); + } + + private void explanFields( + SearchRequestBuilder request, List fields, AggregationBuilder groupByAgg) + throws SqlParseException { + for (Field field : fields) { + if (field instanceof MethodField) { + + if (field.getName().equals("script")) { + request.addStoredField(field.getAlias()); + DefaultQueryAction defaultQueryAction = new DefaultQueryAction(client, select); + defaultQueryAction.initialize(request); + List tempFields = Lists.newArrayList(field); + defaultQueryAction.setFields(tempFields); + continue; } - } - /** - * Create filters based on - * the Where clause. - * - * @param where the 'WHERE' part of the SQL query. - * @throws SqlParseException - */ - private void setWhere(Where where) throws SqlParseException { - BoolQueryBuilder boolQuery = null; - if (where != null) { - boolQuery = QueryMaker.explain(where, this.select.isQuery); - } - // Used to prevent NullPointerException in old tests as they do not set sqlRequest in QueryAction - if (sqlRequest != null) { - boolQuery = sqlRequest.checkAndAddFilter(boolQuery); + AggregationBuilder makeAgg = + aggMaker.withWhere(select.getWhere()).makeFieldAgg((MethodField) field, groupByAgg); + if (groupByAgg != null) { + groupByAgg.subAggregation(makeAgg); + } else { + request.addAggregation(makeAgg); } - request.setQuery(boolQuery); + } else if (field != null) { + request.addStoredField(field.getName()); + } else { + throw new SqlParseException("it did not support this field method " + field); + } } + } - - /** - * Set indices and types to the search request. - */ - private void setIndicesAndTypes() { - request.setIndices(query.getIndexArr()); + private void explainHaving(AggregationBuilder lastAgg) throws SqlParseException { + Having having = select.getHaving(); + if (having != null) { + having.explain(lastAgg, select.getFields()); } - - private void setLimitFromHint(List hints) { - int from = 0; - int size = 0; - for (Hint hint : hints) { - if (hint.getType() == HintType.DOCS_WITH_AGGREGATION) { - Integer[] params = (Integer[]) hint.getParams(); - if (params.length > 1) { - // if 2 or more are given, use the first as the from and the second as the size - // so it is the same as LIMIT from,size - // except written as /*! DOCS_WITH_AGGREGATION(from,size) */ - from = params[0]; - size = params[1]; - } else if (params.length == 1) { - // if only 1 parameter is given, use it as the size with a from of 0 - size = params[0]; - } - break; - } + } + + /** + * Create filters based on the Where clause. + * + * @param where the 'WHERE' part of the SQL query. + * @throws SqlParseException + */ + private void setWhere(Where where) throws SqlParseException { + BoolQueryBuilder boolQuery = null; + if (where != null) { + boolQuery = QueryMaker.explain(where, this.select.isQuery); + } + // Used to prevent NullPointerException in old tests as they do not set sqlRequest in + // QueryAction + if (sqlRequest != null) { + boolQuery = sqlRequest.checkAndAddFilter(boolQuery); + } + request.setQuery(boolQuery); + } + + /** Set indices and types to the search request. */ + private void setIndicesAndTypes() { + request.setIndices(query.getIndexArr()); + } + + private void setLimitFromHint(List hints) { + int from = 0; + int size = 0; + for (Hint hint : hints) { + if (hint.getType() == HintType.DOCS_WITH_AGGREGATION) { + Integer[] params = (Integer[]) hint.getParams(); + if (params.length > 1) { + // if 2 or more are given, use the first as the from and the second as the size + // so it is the same as LIMIT from,size + // except written as /*! DOCS_WITH_AGGREGATION(from,size) */ + from = params[0]; + size = params[1]; + } else if (params.length == 1) { + // if only 1 parameter is given, use it as the size with a from of 0 + size = params[0]; } - request.setFrom(from); - request.setSize(size); + break; + } } + request.setFrom(from); + request.setSize(size); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/DefaultQueryAction.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/DefaultQueryAction.java index 0ed5043ac8..18c9708df8 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/DefaultQueryAction.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/DefaultQueryAction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query; import com.alibaba.druid.sql.ast.SQLExpr; @@ -50,264 +49,268 @@ import org.opensearch.sql.legacy.rewriter.nestedfield.NestedFieldProjection; import org.opensearch.sql.legacy.utils.SQLFunctions; -/** - * Transform SQL query to standard OpenSearch search query - */ +/** Transform SQL query to standard OpenSearch search query */ public class DefaultQueryAction extends QueryAction { - private final Select select; - private SearchRequestBuilder request; - - private final List fieldNames = new LinkedList<>(); - - public DefaultQueryAction(Client client, Select select) { - super(client, select); - this.select = select; - } - - public void initialize(SearchRequestBuilder request) { - this.request = request; + private final Select select; + private SearchRequestBuilder request; + + private final List fieldNames = new LinkedList<>(); + + public DefaultQueryAction(Client client, Select select) { + super(client, select); + this.select = select; + } + + public void initialize(SearchRequestBuilder request) { + this.request = request; + } + + @Override + public SqlOpenSearchRequestBuilder explain() throws SqlParseException { + Objects.requireNonNull(this.sqlRequest, "SqlRequest is required for OpenSearch request build"); + buildRequest(); + checkAndSetScroll(); + return new SqlOpenSearchRequestBuilder(request); + } + + private void buildRequest() throws SqlParseException { + this.request = new SearchRequestBuilder(client, SearchAction.INSTANCE); + setIndicesAndTypes(); + setFields(select.getFields()); + setWhere(select.getWhere()); + setSorts(select.getOrderBys()); + updateRequestWithIndexAndRoutingOptions(select, request); + updateRequestWithHighlight(select, request); + updateRequestWithCollapse(select, request); + updateRequestWithPostFilter(select, request); + updateRequestWithInnerHits(select, request); + } + + @VisibleForTesting + public void checkAndSetScroll() { + LocalClusterState clusterState = LocalClusterState.state(); + + Integer fetchSize = sqlRequest.fetchSize(); + TimeValue timeValue = clusterState.getSettingValue(Settings.Key.SQL_CURSOR_KEEP_ALIVE); + Integer rowCount = select.getRowCount(); + + if (checkIfScrollNeeded(fetchSize, rowCount)) { + Metrics.getInstance() + .getNumericalMetric(MetricName.DEFAULT_CURSOR_REQUEST_COUNT_TOTAL) + .increment(); + Metrics.getInstance().getNumericalMetric(MetricName.DEFAULT_CURSOR_REQUEST_TOTAL).increment(); + request.setSize(fetchSize).setScroll(timeValue); + } else { + request.setSearchType(SearchType.DFS_QUERY_THEN_FETCH); + setLimit(select.getOffset(), rowCount != null ? rowCount : Select.DEFAULT_LIMIT); } - - @Override - public SqlOpenSearchRequestBuilder explain() throws SqlParseException { - Objects.requireNonNull(this.sqlRequest, "SqlRequest is required for OpenSearch request build"); - buildRequest(); - checkAndSetScroll(); - return new SqlOpenSearchRequestBuilder(request); - } - - private void buildRequest() throws SqlParseException { - this.request = new SearchRequestBuilder(client, SearchAction.INSTANCE); - setIndicesAndTypes(); - setFields(select.getFields()); - setWhere(select.getWhere()); - setSorts(select.getOrderBys()); - updateRequestWithIndexAndRoutingOptions(select, request); - updateRequestWithHighlight(select, request); - updateRequestWithCollapse(select, request); - updateRequestWithPostFilter(select, request); - updateRequestWithInnerHits(select, request); - } - - @VisibleForTesting - public void checkAndSetScroll() { - LocalClusterState clusterState = LocalClusterState.state(); - - Integer fetchSize = sqlRequest.fetchSize(); - TimeValue timeValue = clusterState.getSettingValue(Settings.Key.SQL_CURSOR_KEEP_ALIVE); - Integer rowCount = select.getRowCount(); - - if (checkIfScrollNeeded(fetchSize, rowCount)) { - Metrics.getInstance().getNumericalMetric(MetricName.DEFAULT_CURSOR_REQUEST_COUNT_TOTAL).increment(); - Metrics.getInstance().getNumericalMetric(MetricName.DEFAULT_CURSOR_REQUEST_TOTAL).increment(); - request.setSize(fetchSize).setScroll(timeValue); - } else { - request.setSearchType(SearchType.DFS_QUERY_THEN_FETCH); - setLimit(select.getOffset(), rowCount != null ? rowCount : Select.DEFAULT_LIMIT); + } + + private boolean checkIfScrollNeeded(Integer fetchSize, Integer rowCount) { + return (format != null && format.equals(Format.JDBC)) + && fetchSize > 0 + && (rowCount == null || (rowCount > fetchSize)); + } + + @Override + public Optional> getFieldNames() { + return Optional.of(fieldNames); + } + + public Select getSelect() { + return select; + } + + /** Set indices and types to the search request. */ + private void setIndicesAndTypes() { + request.setIndices(query.getIndexArr()); + } + + /** + * Set source filtering on a search request. + * + * @param fields list of fields to source filter. + */ + public void setFields(List fields) throws SqlParseException { + + if (!select.getFields().isEmpty() && !select.isSelectAll()) { + ArrayList includeFields = new ArrayList<>(); + ArrayList excludeFields = new ArrayList<>(); + + for (Field field : fields) { + if (field instanceof MethodField) { + MethodField method = (MethodField) field; + if (method.getName().toLowerCase().equals("script")) { + handleScriptField(method); + if (method.getExpression() instanceof SQLCastExpr) { + includeFields.add(method.getParams().get(0).toString()); + } + } else if (method.getName().equalsIgnoreCase("include")) { + for (KVValue kvValue : method.getParams()) { + includeFields.add(kvValue.value.toString()); + } + } else if (method.getName().equalsIgnoreCase("exclude")) { + for (KVValue kvValue : method.getParams()) { + excludeFields.add(kvValue.value.toString()); + } + } + } else if (field != null) { + if (isNotNested(field)) { + includeFields.add(field.getName()); + } } - } - - - private boolean checkIfScrollNeeded(Integer fetchSize, Integer rowCount) { - return (format != null && format.equals(Format.JDBC)) - && fetchSize > 0 - && (rowCount == null || (rowCount > fetchSize)); - } + } - @Override - public Optional> getFieldNames() { - return Optional.of(fieldNames); + fieldNames.addAll(includeFields); + request.setFetchSource( + includeFields.toArray(new String[0]), excludeFields.toArray(new String[0])); } + } + private void handleScriptField(final MethodField method) throws SqlParseException { - public Select getSelect() { - return select; - } - - /** - * Set indices and types to the search request. - */ - private void setIndicesAndTypes() { - request.setIndices(query.getIndexArr()); - } - - /** - * Set source filtering on a search request. - * - * @param fields list of fields to source filter. - */ - public void setFields(List fields) throws SqlParseException { - - if (!select.getFields().isEmpty() && !select.isSelectAll()) { - ArrayList includeFields = new ArrayList<>(); - ArrayList excludeFields = new ArrayList<>(); - - for (Field field : fields) { - if (field instanceof MethodField) { - MethodField method = (MethodField) field; - if (method.getName().toLowerCase().equals("script")) { - handleScriptField(method); - if (method.getExpression() instanceof SQLCastExpr) { - includeFields.add(method.getParams().get(0).toString()); - } - } else if (method.getName().equalsIgnoreCase("include")) { - for (KVValue kvValue : method.getParams()) { - includeFields.add(kvValue.value.toString()); - } - } else if (method.getName().equalsIgnoreCase("exclude")) { - for (KVValue kvValue : method.getParams()) { - excludeFields.add(kvValue.value.toString()); - } - } - } else if (field != null) { - if (isNotNested(field)) { - includeFields.add(field.getName()); - } - } - } + final List params = method.getParams(); + final int numOfParams = params.size(); - fieldNames.addAll(includeFields); - request.setFetchSource(includeFields.toArray(new String[0]), excludeFields.toArray(new String[0])); - } + if (2 != numOfParams && 3 != numOfParams) { + throw new SqlParseException( + "scripted_field only allows 'script(name,script)' " + "or 'script(name,lang,script)'"); } - private void handleScriptField(final MethodField method) throws SqlParseException { - - final List params = method.getParams(); - final int numOfParams = params.size(); - - if (2 != numOfParams && 3 != numOfParams) { - throw new SqlParseException("scripted_field only allows 'script(name,script)' " - + "or 'script(name,lang,script)'"); - } - - final String fieldName = params.get(0).value.toString(); - fieldNames.add(fieldName); - - final String secondParam = params.get(1).value.toString(); - final Script script = (2 == numOfParams) ? new Script(secondParam) : - new Script(ScriptType.INLINE, secondParam, params.get(2).value.toString(), Collections.emptyMap()); - request.addScriptField(fieldName, script); + final String fieldName = params.get(0).value.toString(); + fieldNames.add(fieldName); + + final String secondParam = params.get(1).value.toString(); + final Script script = + (2 == numOfParams) + ? new Script(secondParam) + : new Script( + ScriptType.INLINE, + secondParam, + params.get(2).value.toString(), + Collections.emptyMap()); + request.addScriptField(fieldName, script); + } + + /** + * Create filters or queries based on the Where clause. + * + * @param where the 'WHERE' part of the SQL query. + * @throws SqlParseException if the where clause does not represent valid sql + */ + private void setWhere(Where where) throws SqlParseException { + BoolQueryBuilder boolQuery = null; + if (where != null) { + boolQuery = QueryMaker.explain(where, this.select.isQuery); } - - /** - * Create filters or queries based on the Where clause. - * - * @param where the 'WHERE' part of the SQL query. - * @throws SqlParseException if the where clause does not represent valid sql - */ - private void setWhere(Where where) throws SqlParseException { - BoolQueryBuilder boolQuery = null; - if (where != null) { - boolQuery = QueryMaker.explain(where, this.select.isQuery); - } - // Used to prevent NullPointerException in old tests as they do not set sqlRequest in QueryAction - if (sqlRequest != null) { - boolQuery = sqlRequest.checkAndAddFilter(boolQuery); - } - request.setQuery(boolQuery); + // Used to prevent NullPointerException in old tests as they do not set sqlRequest in + // QueryAction + if (sqlRequest != null) { + boolQuery = sqlRequest.checkAndAddFilter(boolQuery); } - - /** - * Add sorts to the OpenSearch query based on the 'ORDER BY' clause. - * - * @param orderBys list of Order object - */ - private void setSorts(List orderBys) { - Map sortBuilderMap = new HashMap<>(); - - for (Order order : orderBys) { - String orderByName = order.getName(); - SortOrder sortOrder = SortOrder.valueOf(order.getType()); - - if (order.getNestedPath() != null) { - request.addSort( - SortBuilders.fieldSort(orderByName) - .order(sortOrder) - .setNestedSort(new NestedSortBuilder(order.getNestedPath()))); - } else if (order.isScript()) { - // TODO: Investigate how to find the type of expression (string or number) - // As of now this shouldn't be a problem, because the support is for date_format function - request.addSort( - SortBuilders - .scriptSort(new Script(orderByName), getScriptSortType(order)) - .order(sortOrder)); - } else if (orderByName.equals(ScoreSortBuilder.NAME)) { - request.addSort(orderByName, sortOrder); - } else { - FieldSortBuilder fieldSortBuilder = sortBuilderMap.computeIfAbsent(orderByName, key -> { - FieldSortBuilder fs = SortBuilders.fieldSort(key); - request.addSort(fs); - return fs; + request.setQuery(boolQuery); + } + + /** + * Add sorts to the OpenSearch query based on the 'ORDER BY' clause. + * + * @param orderBys list of Order object + */ + private void setSorts(List orderBys) { + Map sortBuilderMap = new HashMap<>(); + + for (Order order : orderBys) { + String orderByName = order.getName(); + SortOrder sortOrder = SortOrder.valueOf(order.getType()); + + if (order.getNestedPath() != null) { + request.addSort( + SortBuilders.fieldSort(orderByName) + .order(sortOrder) + .setNestedSort(new NestedSortBuilder(order.getNestedPath()))); + } else if (order.isScript()) { + // TODO: Investigate how to find the type of expression (string or number) + // As of now this shouldn't be a problem, because the support is for date_format function + request.addSort( + SortBuilders.scriptSort(new Script(orderByName), getScriptSortType(order)) + .order(sortOrder)); + } else if (orderByName.equals(ScoreSortBuilder.NAME)) { + request.addSort(orderByName, sortOrder); + } else { + FieldSortBuilder fieldSortBuilder = + sortBuilderMap.computeIfAbsent( + orderByName, + key -> { + FieldSortBuilder fs = SortBuilders.fieldSort(key); + request.addSort(fs); + return fs; }); - setSortParams(fieldSortBuilder, order); - } - } + setSortParams(fieldSortBuilder, order); + } } + } + private void setSortParams(FieldSortBuilder fieldSortBuilder, Order order) { + fieldSortBuilder.order(SortOrder.valueOf(order.getType())); - private void setSortParams(FieldSortBuilder fieldSortBuilder, Order order) { - fieldSortBuilder.order(SortOrder.valueOf(order.getType())); - - SQLExpr expr = order.getSortField().getExpression(); - if (expr instanceof SQLBinaryOpExpr) { - // we set SQLBinaryOpExpr in Field.setExpression() to support ORDER by IS NULL/IS NOT NULL - fieldSortBuilder.missing(getNullOrderString((SQLBinaryOpExpr) expr)); - } + SQLExpr expr = order.getSortField().getExpression(); + if (expr instanceof SQLBinaryOpExpr) { + // we set SQLBinaryOpExpr in Field.setExpression() to support ORDER by IS NULL/IS NOT NULL + fieldSortBuilder.missing(getNullOrderString((SQLBinaryOpExpr) expr)); } - - private String getNullOrderString(SQLBinaryOpExpr expr) { - SQLBinaryOperator operator = expr.getOperator(); - return operator == SQLBinaryOperator.IsNot ? "_first" : "_last"; + } + + private String getNullOrderString(SQLBinaryOpExpr expr) { + SQLBinaryOperator operator = expr.getOperator(); + return operator == SQLBinaryOperator.IsNot ? "_first" : "_last"; + } + + private ScriptSortType getScriptSortType(Order order) { + ScriptSortType scriptSortType; + Schema.Type scriptFunctionReturnType = SQLFunctions.getOrderByFieldType(order.getSortField()); + + // as of now script function return type returns only text and double + switch (scriptFunctionReturnType) { + case TEXT: + scriptSortType = ScriptSortType.STRING; + break; + + case DOUBLE: + case FLOAT: + case INTEGER: + case LONG: + scriptSortType = ScriptSortType.NUMBER; + break; + default: + throw new IllegalStateException("Unknown type: " + scriptFunctionReturnType); } - - private ScriptSortType getScriptSortType(Order order) { - ScriptSortType scriptSortType; - Schema.Type scriptFunctionReturnType = SQLFunctions.getOrderByFieldType(order.getSortField()); - - - // as of now script function return type returns only text and double - switch (scriptFunctionReturnType) { - case TEXT: - scriptSortType = ScriptSortType.STRING; - break; - - case DOUBLE: - case FLOAT: - case INTEGER: - case LONG: - scriptSortType = ScriptSortType.NUMBER; - break; - default: - throw new IllegalStateException("Unknown type: " + scriptFunctionReturnType); - } - return scriptSortType; + return scriptSortType; + } + + /** + * Add from and size to the OpenSearch query based on the 'LIMIT' clause + * + * @param from starts from document at position from + * @param size number of documents to return. + */ + private void setLimit(int from, int size) { + request.setFrom(from); + + if (size > -1) { + request.setSize(size); } + } - /** - * Add from and size to the OpenSearch query based on the 'LIMIT' clause - * - * @param from starts from document at position from - * @param size number of documents to return. - */ - private void setLimit(int from, int size) { - request.setFrom(from); - - if (size > -1) { - request.setSize(size); - } - } + public SearchRequestBuilder getRequestBuilder() { + return request; + } - public SearchRequestBuilder getRequestBuilder() { - return request; - } + private boolean isNotNested(Field field) { + return !field.isNested() || field.isReverseNested(); + } - private boolean isNotNested(Field field) { - return !field.isNested() || field.isReverseNested(); - } - - private void updateRequestWithInnerHits(Select select, SearchRequestBuilder request) { - new NestedFieldProjection(request).project(select.getFields(), select.getNestedJoinType()); - } + private void updateRequestWithInnerHits(Select select, SearchRequestBuilder request) { + new NestedFieldProjection(request).project(select.getFields(), select.getNestedJoinType()); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/DeleteQueryAction.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/DeleteQueryAction.java index 892c5aeb2d..331921345f 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/DeleteQueryAction.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/DeleteQueryAction.java @@ -3,10 +3,8 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query; - import org.opensearch.client.Client; import org.opensearch.index.query.QueryBuilder; import org.opensearch.index.query.QueryBuilders; @@ -20,50 +18,44 @@ public class DeleteQueryAction extends QueryAction { - private final Delete delete; - private DeleteByQueryRequestBuilder request; - - public DeleteQueryAction(Client client, Delete delete) { - super(client, delete); - this.delete = delete; - } - - @Override - public SqlElasticDeleteByQueryRequestBuilder explain() throws SqlParseException { - this.request = new DeleteByQueryRequestBuilder(client, DeleteByQueryAction.INSTANCE); - - setIndicesAndTypes(); - setWhere(delete.getWhere()); - SqlElasticDeleteByQueryRequestBuilder deleteByQueryRequestBuilder = - new SqlElasticDeleteByQueryRequestBuilder(request); - return deleteByQueryRequestBuilder; - } - - - /** - * Set indices and types to the delete by query request. - */ - private void setIndicesAndTypes() { - - DeleteByQueryRequest innerRequest = request.request(); - innerRequest.indices(query.getIndexArr()); + private final Delete delete; + private DeleteByQueryRequestBuilder request; + + public DeleteQueryAction(Client client, Delete delete) { + super(client, delete); + this.delete = delete; + } + + @Override + public SqlElasticDeleteByQueryRequestBuilder explain() throws SqlParseException { + this.request = new DeleteByQueryRequestBuilder(client, DeleteByQueryAction.INSTANCE); + + setIndicesAndTypes(); + setWhere(delete.getWhere()); + SqlElasticDeleteByQueryRequestBuilder deleteByQueryRequestBuilder = + new SqlElasticDeleteByQueryRequestBuilder(request); + return deleteByQueryRequestBuilder; + } + + /** Set indices and types to the delete by query request. */ + private void setIndicesAndTypes() { + + DeleteByQueryRequest innerRequest = request.request(); + innerRequest.indices(query.getIndexArr()); + } + + /** + * Create filters based on the Where clause. + * + * @param where the 'WHERE' part of the SQL query. + * @throws SqlParseException + */ + private void setWhere(Where where) throws SqlParseException { + if (where != null) { + QueryBuilder whereQuery = QueryMaker.explain(where); + request.filter(whereQuery); + } else { + request.filter(QueryBuilders.matchAllQuery()); } - - - /** - * Create filters based on - * the Where clause. - * - * @param where the 'WHERE' part of the SQL query. - * @throws SqlParseException - */ - private void setWhere(Where where) throws SqlParseException { - if (where != null) { - QueryBuilder whereQuery = QueryMaker.explain(where); - request.filter(whereQuery); - } else { - request.filter(QueryBuilders.matchAllQuery()); - } - } - + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/DescribeQueryAction.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/DescribeQueryAction.java index 077d9c28b8..ffc9695d81 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/DescribeQueryAction.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/DescribeQueryAction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query; import org.opensearch.action.admin.indices.get.GetIndexRequestBuilder; @@ -14,22 +13,23 @@ public class DescribeQueryAction extends QueryAction { - private final IndexStatement statement; + private final IndexStatement statement; - public DescribeQueryAction(Client client, IndexStatement statement) { - super(client, null); - this.statement = statement; - } + public DescribeQueryAction(Client client, IndexStatement statement) { + super(client, null); + this.statement = statement; + } - @Override - public QueryStatement getQueryStatement() { - return statement; - } + @Override + public QueryStatement getQueryStatement() { + return statement; + } - @Override - public SqlOpenSearchRequestBuilder explain() { - final GetIndexRequestBuilder indexRequestBuilder = Util.prepareIndexRequestBuilder(client, statement); + @Override + public SqlOpenSearchRequestBuilder explain() { + final GetIndexRequestBuilder indexRequestBuilder = + Util.prepareIndexRequestBuilder(client, statement); - return new SqlOpenSearchRequestBuilder(indexRequestBuilder); - } + return new SqlOpenSearchRequestBuilder(indexRequestBuilder); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/join/BackOffRetryStrategy.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/join/BackOffRetryStrategy.java index 06ec21247a..d767268cb1 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/join/BackOffRetryStrategy.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/join/BackOffRetryStrategy.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.join; import java.util.ArrayList; @@ -22,198 +21,198 @@ public class BackOffRetryStrategy { - private static final Logger LOG = LogManager.getLogger(); - - /** - * Interval (ms) between each retry - */ - private static final long[] intervals = milliseconds(new double[]{4, 8 + 4, 16 + 4}); + private static final Logger LOG = LogManager.getLogger(); - /** - * Delta to randomize interval (ms) - */ - private static final long delta = 4 * 1000; + /** Interval (ms) between each retry */ + private static final long[] intervals = milliseconds(new double[] {4, 8 + 4, 16 + 4}); - private static final int threshold = 85; + /** Delta to randomize interval (ms) */ + private static final long delta = 4 * 1000; - private static IdentityHashMap> memUse = new IdentityHashMap<>(); + private static final int threshold = 85; - private static AtomicLong mem = new AtomicLong(0L); + private static IdentityHashMap> memUse = new IdentityHashMap<>(); - private static long lastTimeoutCleanTime = System.currentTimeMillis(); + private static AtomicLong mem = new AtomicLong(0L); - private static final long RELTIMEOUT = 1000 * 60 * 30; + private static long lastTimeoutCleanTime = System.currentTimeMillis(); - private static final int MAXRETRIES = 999; + private static final long RELTIMEOUT = 1000 * 60 * 30; - private static final Object obj = new Object(); + private static final int MAXRETRIES = 999; - public static final Supplier GET_CB_STATE = () -> isMemoryHealthy() ? 0 : 1; + private static final Object obj = new Object(); - private BackOffRetryStrategy() { + public static final Supplier GET_CB_STATE = () -> isMemoryHealthy() ? 0 : 1; - } + private BackOffRetryStrategy() {} - private static boolean isMemoryHealthy() { - final long freeMemory = Runtime.getRuntime().freeMemory(); - final long totalMemory = Runtime.getRuntime().totalMemory(); - final int memoryUsage = (int) Math.round((double) (totalMemory - freeMemory + mem.get()) - / (double) totalMemory * 100); + private static boolean isMemoryHealthy() { + final long freeMemory = Runtime.getRuntime().freeMemory(); + final long totalMemory = Runtime.getRuntime().totalMemory(); + final int memoryUsage = + (int) + Math.round( + (double) (totalMemory - freeMemory + mem.get()) / (double) totalMemory * 100); - LOG.debug("[MCB1] Memory total, free, allocate: {}, {}, {}", totalMemory, freeMemory, mem.get()); - LOG.debug("[MCB1] Memory usage and limit: {}%, {}%", memoryUsage, threshold); + LOG.debug( + "[MCB1] Memory total, free, allocate: {}, {}, {}", totalMemory, freeMemory, mem.get()); + LOG.debug("[MCB1] Memory usage and limit: {}%, {}%", memoryUsage, threshold); - return memoryUsage < threshold; - } + return memoryUsage < threshold; + } - public static boolean isHealthy() { - for (int i = 0; i < intervals.length; i++) { - if (isMemoryHealthy()) { - return true; - } - - LOG.warn("[MCB1] Memory monitor is unhealthy now, back off retrying: {} attempt, thread id = {}", - i, Thread.currentThread().getId()); - if (ThreadLocalRandom.current().nextBoolean()) { - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_CB).increment(); - LOG.warn("[MCB1] Directly abort on idx {}.", i); - return false; - } - backOffSleep(intervals[i]); - } - - boolean isHealthy = isMemoryHealthy(); - if (!isHealthy) { - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_CB).increment(); - } + public static boolean isHealthy() { + for (int i = 0; i < intervals.length; i++) { + if (isMemoryHealthy()) { + return true; + } - return isHealthy; + LOG.warn( + "[MCB1] Memory monitor is unhealthy now, back off retrying: {} attempt, thread id = {}", + i, + Thread.currentThread().getId()); + if (ThreadLocalRandom.current().nextBoolean()) { + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_CB).increment(); + LOG.warn("[MCB1] Directly abort on idx {}.", i); + return false; + } + backOffSleep(intervals[i]); } - private static boolean isMemoryHealthy(long allocateMemory, int idx, Object key) { - long logMem = mem.get(); - - releaseTimeoutMemory(); - if (idx == 0 && allocateMemory > 0) { - logMem = mem.addAndGet(allocateMemory); - synchronized (BackOffRetryStrategy.class) { - if (memUse.containsKey(key)) { - memUse.put(key, Tuple.tuple(memUse.get(key).v1(), memUse.get(key).v2() + allocateMemory)); - } else { - memUse.put(key, Tuple.tuple(System.currentTimeMillis(), allocateMemory)); - } - } - } - - final long freeMemory = Runtime.getRuntime().freeMemory(); - final long totalMemory = Runtime.getRuntime().totalMemory(); - final int memoryUsage = (int) Math.round((double) (totalMemory - freeMemory + logMem) - / (double) totalMemory * 100); + boolean isHealthy = isMemoryHealthy(); + if (!isHealthy) { + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_CB).increment(); + } - LOG.debug("[MCB] Idx is {}", idx); - LOG.debug("[MCB] Memory total, free, allocate: {}, {}, {}, {}", totalMemory, freeMemory, - allocateMemory, logMem); - LOG.debug("[MCB] Memory usage and limit: {}%, {}%", memoryUsage, threshold); + return isHealthy; + } - return memoryUsage < threshold; + private static boolean isMemoryHealthy(long allocateMemory, int idx, Object key) { + long logMem = mem.get(); + releaseTimeoutMemory(); + if (idx == 0 && allocateMemory > 0) { + logMem = mem.addAndGet(allocateMemory); + synchronized (BackOffRetryStrategy.class) { + if (memUse.containsKey(key)) { + memUse.put(key, Tuple.tuple(memUse.get(key).v1(), memUse.get(key).v2() + allocateMemory)); + } else { + memUse.put(key, Tuple.tuple(System.currentTimeMillis(), allocateMemory)); + } + } } - public static boolean isHealthy(long allocateMemory, Object key) { - if (key == null) { - key = obj; - } + final long freeMemory = Runtime.getRuntime().freeMemory(); + final long totalMemory = Runtime.getRuntime().totalMemory(); + final int memoryUsage = + (int) Math.round((double) (totalMemory - freeMemory + logMem) / (double) totalMemory * 100); + + LOG.debug("[MCB] Idx is {}", idx); + LOG.debug( + "[MCB] Memory total, free, allocate: {}, {}, {}, {}", + totalMemory, + freeMemory, + allocateMemory, + logMem); + LOG.debug("[MCB] Memory usage and limit: {}%, {}%", memoryUsage, threshold); + + return memoryUsage < threshold; + } + + public static boolean isHealthy(long allocateMemory, Object key) { + if (key == null) { + key = obj; + } - for (int i = 0; i < intervals.length; i++) { - if (isMemoryHealthy(allocateMemory, i, key)) { - return true; - } - - LOG.warn("[MCB] Memory monitor is unhealthy now, back off retrying: {} attempt, " - + "executor = {}, thread id = {}", i, key, Thread.currentThread().getId()); - if (ThreadLocalRandom.current().nextBoolean()) { - LOG.warn("[MCB] Directly abort on idx {}, executor is {}.", i, key); - return false; - } - backOffSleep(intervals[i]); - } - return isMemoryHealthy(allocateMemory, MAXRETRIES, key); + for (int i = 0; i < intervals.length; i++) { + if (isMemoryHealthy(allocateMemory, i, key)) { + return true; + } + + LOG.warn( + "[MCB] Memory monitor is unhealthy now, back off retrying: {} attempt, " + + "executor = {}, thread id = {}", + i, + key, + Thread.currentThread().getId()); + if (ThreadLocalRandom.current().nextBoolean()) { + LOG.warn("[MCB] Directly abort on idx {}, executor is {}.", i, key); + return false; + } + backOffSleep(intervals[i]); } + return isMemoryHealthy(allocateMemory, MAXRETRIES, key); + } - public static void backOffSleep(long interval) { - try { - long millis = randomize(interval); + public static void backOffSleep(long interval) { + try { + long millis = randomize(interval); - LOG.info("[MCB] Back off sleeping: {} ms", millis); - Thread.sleep(millis); - } catch (InterruptedException e) { - LOG.error("[MCB] Sleep interrupted", e); - } + LOG.info("[MCB] Back off sleeping: {} ms", millis); + Thread.sleep(millis); + } catch (InterruptedException e) { + LOG.error("[MCB] Sleep interrupted", e); } - - /** - * Generate random interval in [interval-delta, interval+delta) - */ - private static long randomize(long interval) { - // Random number within range generator for JDK 7+ - return ThreadLocalRandom.current().nextLong( - lowerBound(interval), upperBound(interval) - ); + } + + /** Generate random interval in [interval-delta, interval+delta) */ + private static long randomize(long interval) { + // Random number within range generator for JDK 7+ + return ThreadLocalRandom.current().nextLong(lowerBound(interval), upperBound(interval)); + } + + private static long lowerBound(long interval) { + return Math.max(0, interval - delta); + } + + private static long upperBound(long interval) { + return interval + delta; + } + + private static long[] milliseconds(double[] seconds) { + return Arrays.stream(seconds).mapToLong((second) -> (long) (1000 * second)).toArray(); + } + + public static void releaseMem(Object key) { + LOG.debug("[MCB] mem is {} before release", mem); + long v = 0L; + synchronized (BackOffRetryStrategy.class) { + if (memUse.containsKey(key)) { + v = memUse.get(key).v2(); + memUse.remove(key); + } } - - private static long lowerBound(long interval) { - return Math.max(0, interval - delta); + if (v > 0) { + atomicMinusLowBoundZero(mem, v); } + LOG.debug("[MCB] mem is {} after release", mem); + } - private static long upperBound(long interval) { - return interval + delta; + private static void releaseTimeoutMemory() { + long cur = System.currentTimeMillis(); + if (cur - lastTimeoutCleanTime < RELTIMEOUT) { + return; } - private static long[] milliseconds(double[] seconds) { - return Arrays.stream(seconds). - mapToLong((second) -> (long) (1000 * second)). - toArray(); + List bulks = new ArrayList<>(); + Predicate> isTimeout = t -> cur - t.v1() > RELTIMEOUT; + synchronized (BackOffRetryStrategy.class) { + memUse.values().stream().filter(isTimeout).forEach(v -> bulks.add(v.v2())); + memUse.values().removeIf(isTimeout); } - public static void releaseMem(Object key) { - LOG.debug("[MCB] mem is {} before release", mem); - long v = 0L; - synchronized (BackOffRetryStrategy.class) { - if (memUse.containsKey(key)) { - v = memUse.get(key).v2(); - memUse.remove(key); - } - } - if (v > 0) { - atomicMinusLowBoundZero(mem, v); - } - LOG.debug("[MCB] mem is {} after release", mem); + for (long v : bulks) { + atomicMinusLowBoundZero(mem, v); } - private static void releaseTimeoutMemory() { - long cur = System.currentTimeMillis(); - if (cur - lastTimeoutCleanTime < RELTIMEOUT) { - return; - } + lastTimeoutCleanTime = cur; + } - List bulks = new ArrayList<>(); - Predicate> isTimeout = t -> cur - t.v1() > RELTIMEOUT; - synchronized (BackOffRetryStrategy.class) { - memUse.values().stream().filter(isTimeout).forEach(v -> bulks.add(v.v2())); - memUse.values().removeIf(isTimeout); - } - - for (long v : bulks) { - atomicMinusLowBoundZero(mem, v); - } - - lastTimeoutCleanTime = cur; - } - - private static void atomicMinusLowBoundZero(AtomicLong x, Long y) { - long memRes = x.addAndGet(-y); - if (memRes < 0) { - x.compareAndSet(memRes, 0L); - } + private static void atomicMinusLowBoundZero(AtomicLong x, Long y) { + long memRes = x.addAndGet(-y); + if (memRes < 0) { + x.compareAndSet(memRes, 0L); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/maker/AggMaker.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/maker/AggMaker.java index 9c3f1104a7..dcb703cd33 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/maker/AggMaker.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/maker/AggMaker.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.maker; import com.alibaba.druid.sql.ast.expr.SQLAggregateOption; @@ -65,758 +64,788 @@ public class AggMaker { - /** - * The mapping bettwen group fieldName or Alias to the KVValue. - */ - private Map groupMap = new HashMap<>(); - private Where where; - - /** - * - * - * @param field - * @return - * @throws SqlParseException - */ - public AggregationBuilder makeGroupAgg(Field field) throws SqlParseException { - - if (field instanceof MethodField && field.getName().equals("script")) { - MethodField methodField = (MethodField) field; - TermsAggregationBuilder termsBuilder = AggregationBuilders.terms(methodField.getAlias()) - .script(new Script(methodField.getParams().get(1).value.toString())); - extendGroupMap(methodField, new KVValue("KEY", termsBuilder)); - return termsBuilder; - } - - - if (field instanceof MethodField) { - - MethodField methodField = (MethodField) field; - if (methodField.getName().equals("filter")) { - Map paramsAsMap = methodField.getParamsAsMap(); - Where where = (Where) paramsAsMap.get("where"); - return AggregationBuilders.filter(paramsAsMap.get("alias").toString(), - QueryMaker.explain(where)); - } - return makeRangeGroup(methodField); - } else { - String termName = (Strings.isNullOrEmpty(field.getAlias())) ? field.getName() : field.getAlias(); - TermsAggregationBuilder termsBuilder = AggregationBuilders.terms(termName).field(field.getName()); - final KVValue kvValue = new KVValue("KEY", termsBuilder); - groupMap.put(termName, kvValue); - // map the field name with KVValue if it is not yet. The use case is when alias exist, - // the termName is different with fieldName, both of them should be included in the map. - groupMap.putIfAbsent(field.getName(), kvValue); - return termsBuilder; - } + /** The mapping bettwen group fieldName or Alias to the KVValue. */ + private Map groupMap = new HashMap<>(); + + private Where where; + + /** + * @param field + * @return + * @throws SqlParseException + */ + public AggregationBuilder makeGroupAgg(Field field) throws SqlParseException { + + if (field instanceof MethodField && field.getName().equals("script")) { + MethodField methodField = (MethodField) field; + TermsAggregationBuilder termsBuilder = + AggregationBuilders.terms(methodField.getAlias()) + .script(new Script(methodField.getParams().get(1).value.toString())); + extendGroupMap(methodField, new KVValue("KEY", termsBuilder)); + return termsBuilder; } - - /** - * Create aggregation according to the SQL function. - * - * @param field SQL function - * @param parent parentAggregation - * @return AggregationBuilder represents the SQL function - * @throws SqlParseException in case of unrecognized function - */ - public AggregationBuilder makeFieldAgg(MethodField field, AggregationBuilder parent) throws SqlParseException { - extendGroupMap(field, new KVValue("FIELD", parent)); - ValuesSourceAggregationBuilder builder; - field.setAlias(fixAlias(field.getAlias())); - switch (field.getName().toUpperCase()) { - case "SUM": - builder = AggregationBuilders.sum(field.getAlias()); - return addFieldToAgg(field, builder); - case "MAX": - builder = AggregationBuilders.max(field.getAlias()); - return addFieldToAgg(field, builder); - case "MIN": - builder = AggregationBuilders.min(field.getAlias()); - return addFieldToAgg(field, builder); - case "AVG": - builder = AggregationBuilders.avg(field.getAlias()); - return addFieldToAgg(field, builder); - case "STATS": - builder = AggregationBuilders.stats(field.getAlias()); - return addFieldToAgg(field, builder); - case "EXTENDED_STATS": - builder = AggregationBuilders.extendedStats(field.getAlias()); - return addFieldToAgg(field, builder); - case "PERCENTILES": - builder = AggregationBuilders.percentiles(field.getAlias()); - addSpecificPercentiles((PercentilesAggregationBuilder) builder, field.getParams()); - return addFieldToAgg(field, builder); - case "TOPHITS": - return makeTopHitsAgg(field); - case "SCRIPTED_METRIC": - return scriptedMetric(field); - case "COUNT": - extendGroupMap(field, new KVValue("COUNT", parent)); - return addFieldToAgg(field, makeCountAgg(field)); - default: - throw new SqlParseException("the agg function not to define !"); - } + if (field instanceof MethodField) { + + MethodField methodField = (MethodField) field; + if (methodField.getName().equals("filter")) { + Map paramsAsMap = methodField.getParamsAsMap(); + Where where = (Where) paramsAsMap.get("where"); + return AggregationBuilders.filter( + paramsAsMap.get("alias").toString(), QueryMaker.explain(where)); + } + return makeRangeGroup(methodField); + } else { + String termName = + (Strings.isNullOrEmpty(field.getAlias())) ? field.getName() : field.getAlias(); + TermsAggregationBuilder termsBuilder = + AggregationBuilders.terms(termName).field(field.getName()); + final KVValue kvValue = new KVValue("KEY", termsBuilder); + groupMap.put(termName, kvValue); + // map the field name with KVValue if it is not yet. The use case is when alias exist, + // the termName is different with fieldName, both of them should be included in the map. + groupMap.putIfAbsent(field.getName(), kvValue); + return termsBuilder; } - - /** - * With {@link Where} Condition. - */ - public AggMaker withWhere(Where where) { - this.where = where; - return this; + } + + /** + * Create aggregation according to the SQL function. + * + * @param field SQL function + * @param parent parentAggregation + * @return AggregationBuilder represents the SQL function + * @throws SqlParseException in case of unrecognized function + */ + public AggregationBuilder makeFieldAgg(MethodField field, AggregationBuilder parent) + throws SqlParseException { + extendGroupMap(field, new KVValue("FIELD", parent)); + ValuesSourceAggregationBuilder builder; + field.setAlias(fixAlias(field.getAlias())); + switch (field.getName().toUpperCase()) { + case "SUM": + builder = AggregationBuilders.sum(field.getAlias()); + return addFieldToAgg(field, builder); + case "MAX": + builder = AggregationBuilders.max(field.getAlias()); + return addFieldToAgg(field, builder); + case "MIN": + builder = AggregationBuilders.min(field.getAlias()); + return addFieldToAgg(field, builder); + case "AVG": + builder = AggregationBuilders.avg(field.getAlias()); + return addFieldToAgg(field, builder); + case "STATS": + builder = AggregationBuilders.stats(field.getAlias()); + return addFieldToAgg(field, builder); + case "EXTENDED_STATS": + builder = AggregationBuilders.extendedStats(field.getAlias()); + return addFieldToAgg(field, builder); + case "PERCENTILES": + builder = AggregationBuilders.percentiles(field.getAlias()); + addSpecificPercentiles((PercentilesAggregationBuilder) builder, field.getParams()); + return addFieldToAgg(field, builder); + case "TOPHITS": + return makeTopHitsAgg(field); + case "SCRIPTED_METRIC": + return scriptedMetric(field); + case "COUNT": + extendGroupMap(field, new KVValue("COUNT", parent)); + return addFieldToAgg(field, makeCountAgg(field)); + default: + throw new SqlParseException("the agg function not to define !"); } - - private void addSpecificPercentiles(PercentilesAggregationBuilder percentilesBuilder, List params) { - List percentiles = new ArrayList<>(); - for (KVValue kValue : params) { - if (kValue.value.getClass().equals(BigDecimal.class)) { - BigDecimal percentile = (BigDecimal) kValue.value; - percentiles.add(percentile.doubleValue()); - - } else if (kValue.value instanceof Integer) { - percentiles.add(((Integer) kValue.value).doubleValue()); - } - } - if (percentiles.size() > 0) { - double[] percentilesArr = new double[percentiles.size()]; - int i = 0; - for (Double percentile : percentiles) { - percentilesArr[i] = percentile; - i++; - } - percentilesBuilder.percentiles(percentilesArr); - } + } + + /** With {@link Where} Condition. */ + public AggMaker withWhere(Where where) { + this.where = where; + return this; + } + + private void addSpecificPercentiles( + PercentilesAggregationBuilder percentilesBuilder, List params) { + List percentiles = new ArrayList<>(); + for (KVValue kValue : params) { + if (kValue.value.getClass().equals(BigDecimal.class)) { + BigDecimal percentile = (BigDecimal) kValue.value; + percentiles.add(percentile.doubleValue()); + + } else if (kValue.value instanceof Integer) { + percentiles.add(((Integer) kValue.value).doubleValue()); + } } - - private String fixAlias(String alias) { - //because [ is not legal as alias - return alias.replaceAll("\\[", "(").replaceAll("\\]", ")"); + if (percentiles.size() > 0) { + double[] percentilesArr = new double[percentiles.size()]; + int i = 0; + for (Double percentile : percentiles) { + percentilesArr[i] = percentile; + i++; + } + percentilesBuilder.percentiles(percentilesArr); } + } + + private String fixAlias(String alias) { + // because [ is not legal as alias + return alias.replaceAll("\\[", "(").replaceAll("\\]", ")"); + } + + private AggregationBuilder addFieldToAgg( + MethodField field, ValuesSourceAggregationBuilder builder) throws SqlParseException { + KVValue kvValue = field.getParams().get(0); + if (kvValue.key != null && kvValue.key.equals("script")) { + if (kvValue.value instanceof MethodField) { + return builder.script( + new Script(((MethodField) kvValue.value).getParams().get(1).toString())); + } else { + return builder.script(new Script(kvValue.value.toString())); + } + + } else if (kvValue.key != null && kvValue.value.toString().trim().startsWith("def")) { + return builder.script(new Script(kvValue.value.toString())); + } else if (kvValue.key != null + && (kvValue.key.equals("nested") || kvValue.key.equals("reverse_nested"))) { + NestedType nestedType = (NestedType) kvValue.value; + nestedType.addBucketPath(Path.getMetricPath(builder.getName())); + + if (nestedType.isNestedField()) { + builder.field("_index"); + } else { + builder.field(nestedType.field); + } + + AggregationBuilder nestedBuilder; + + String nestedAggName = nestedType.getNestedAggName(); + + if (nestedType.isReverse()) { + if (nestedType.path != null && nestedType.path.startsWith("~")) { + String realPath = nestedType.path.substring(1); + nestedBuilder = AggregationBuilders.nested(nestedAggName, realPath); + nestedBuilder = nestedBuilder.subAggregation(builder); + return AggregationBuilders.reverseNested(nestedAggName + "_REVERSED") + .subAggregation(nestedBuilder); + } else { + ReverseNestedAggregationBuilder reverseNestedAggregationBuilder = + AggregationBuilders.reverseNested(nestedAggName); + if (nestedType.path != null) { + reverseNestedAggregationBuilder.path(nestedType.path); + } + nestedBuilder = reverseNestedAggregationBuilder; + } + } else { + nestedBuilder = AggregationBuilders.nested(nestedAggName, nestedType.path); + } - private AggregationBuilder addFieldToAgg(MethodField field, ValuesSourceAggregationBuilder builder) - throws SqlParseException { - KVValue kvValue = field.getParams().get(0); - if (kvValue.key != null && kvValue.key.equals("script")) { - if (kvValue.value instanceof MethodField) { - return builder.script(new Script(((MethodField) kvValue.value).getParams().get(1).toString())); - } else { - return builder.script(new Script(kvValue.value.toString())); - } - - } else if (kvValue.key != null && kvValue.value.toString().trim().startsWith("def")) { - return builder.script(new Script(kvValue.value.toString())); - } else if (kvValue.key != null && (kvValue.key.equals("nested") || kvValue.key.equals("reverse_nested"))) { - NestedType nestedType = (NestedType) kvValue.value; - nestedType.addBucketPath(Path.getMetricPath(builder.getName())); - - if (nestedType.isNestedField()) { - builder.field("_index"); - } else { - builder.field(nestedType.field); - } - - AggregationBuilder nestedBuilder; - - String nestedAggName = nestedType.getNestedAggName(); - - if (nestedType.isReverse()) { - if (nestedType.path != null && nestedType.path.startsWith("~")) { - String realPath = nestedType.path.substring(1); - nestedBuilder = AggregationBuilders.nested(nestedAggName, realPath); - nestedBuilder = nestedBuilder.subAggregation(builder); - return AggregationBuilders.reverseNested(nestedAggName + "_REVERSED") - .subAggregation(nestedBuilder); - } else { - ReverseNestedAggregationBuilder reverseNestedAggregationBuilder = - AggregationBuilders.reverseNested(nestedAggName); - if (nestedType.path != null) { - reverseNestedAggregationBuilder.path(nestedType.path); - } - nestedBuilder = reverseNestedAggregationBuilder; - } - } else { - nestedBuilder = AggregationBuilders.nested(nestedAggName, nestedType.path); - } - - AggregationBuilder aggregation = nestedBuilder.subAggregation(wrapWithFilterAgg( - nestedType, - builder)); - nestedType.addBucketPath(Path.getAggPath(nestedBuilder.getName())); - return aggregation; - } else if (kvValue.key != null && (kvValue.key.equals("children"))) { - ChildrenType childrenType = (ChildrenType) kvValue.value; - - builder.field(childrenType.field); + AggregationBuilder aggregation = + nestedBuilder.subAggregation(wrapWithFilterAgg(nestedType, builder)); + nestedType.addBucketPath(Path.getAggPath(nestedBuilder.getName())); + return aggregation; + } else if (kvValue.key != null && (kvValue.key.equals("children"))) { + ChildrenType childrenType = (ChildrenType) kvValue.value; - AggregationBuilder childrenBuilder; + builder.field(childrenType.field); - String childrenAggName = childrenType.field + "@CHILDREN"; + AggregationBuilder childrenBuilder; - childrenBuilder = JoinAggregationBuilders.children(childrenAggName, childrenType.childType); + String childrenAggName = childrenType.field + "@CHILDREN"; - return childrenBuilder; - } + childrenBuilder = JoinAggregationBuilders.children(childrenAggName, childrenType.childType); - return builder.field(kvValue.toString()); + return childrenBuilder; } - private AggregationBuilder makeRangeGroup(MethodField field) throws SqlParseException { - switch (field.getName().toLowerCase()) { - case "range": - return rangeBuilder(field); - case "date_histogram": - return dateHistogram(field); - case "date_range": - case "month": - return dateRange(field); - case "histogram": - return histogram(field); - case "geohash_grid": - return geohashGrid(field); - case "geo_bounds": - return geoBounds(field); - case "terms": - return termsAgg(field); - default: - throw new SqlParseException("can define this method " + field); - } - + return builder.field(kvValue.toString()); + } + + private AggregationBuilder makeRangeGroup(MethodField field) throws SqlParseException { + switch (field.getName().toLowerCase()) { + case "range": + return rangeBuilder(field); + case "date_histogram": + return dateHistogram(field); + case "date_range": + case "month": + return dateRange(field); + case "histogram": + return histogram(field); + case "geohash_grid": + return geohashGrid(field); + case "geo_bounds": + return geoBounds(field); + case "terms": + return termsAgg(field); + default: + throw new SqlParseException("can define this method " + field); } - - private AggregationBuilder geoBounds(MethodField field) throws SqlParseException { - String aggName = gettAggNameFromParamsOrAlias(field); - GeoBoundsAggregationBuilder boundsBuilder = new GeoBoundsAggregationBuilder(aggName); - String value; - for (KVValue kv : field.getParams()) { - value = kv.value.toString(); - switch (kv.key.toLowerCase()) { - case "field": - boundsBuilder.field(value); - break; - case "wrap_longitude": - boundsBuilder.wrapLongitude(Boolean.getBoolean(value)); - break; - case "alias": - case "nested": - case "reverse_nested": - case "children": - break; - default: - throw new SqlParseException("geo_bounds err or not define field " + kv.toString()); - } - } - return boundsBuilder; + } + + private AggregationBuilder geoBounds(MethodField field) throws SqlParseException { + String aggName = gettAggNameFromParamsOrAlias(field); + GeoBoundsAggregationBuilder boundsBuilder = new GeoBoundsAggregationBuilder(aggName); + String value; + for (KVValue kv : field.getParams()) { + value = kv.value.toString(); + switch (kv.key.toLowerCase()) { + case "field": + boundsBuilder.field(value); + break; + case "wrap_longitude": + boundsBuilder.wrapLongitude(Boolean.getBoolean(value)); + break; + case "alias": + case "nested": + case "reverse_nested": + case "children": + break; + default: + throw new SqlParseException("geo_bounds err or not define field " + kv.toString()); + } } - - private AggregationBuilder termsAgg(MethodField field) throws SqlParseException { - String aggName = gettAggNameFromParamsOrAlias(field); - TermsAggregationBuilder terms = AggregationBuilders.terms(aggName); - String value; - IncludeExclude include = null, exclude = null; - for (KVValue kv : field.getParams()) { - if (kv.value.toString().contains("doc[")) { - String script = kv.value + "; return " + kv.key; - terms.script(new Script(script)); + return boundsBuilder; + } + + private AggregationBuilder termsAgg(MethodField field) throws SqlParseException { + String aggName = gettAggNameFromParamsOrAlias(field); + TermsAggregationBuilder terms = AggregationBuilders.terms(aggName); + String value; + IncludeExclude include = null, exclude = null; + for (KVValue kv : field.getParams()) { + if (kv.value.toString().contains("doc[")) { + String script = kv.value + "; return " + kv.key; + terms.script(new Script(script)); + } else { + value = kv.value.toString(); + switch (kv.key.toLowerCase()) { + case "field": + terms.field(value); + break; + case "size": + terms.size(Integer.parseInt(value)); + break; + case "shard_size": + terms.shardSize(Integer.parseInt(value)); + break; + case "min_doc_count": + terms.minDocCount(Integer.parseInt(value)); + break; + case "missing": + terms.missing(value); + break; + case "order": + if ("asc".equalsIgnoreCase(value)) { + terms.order(BucketOrder.key(true)); + } else if ("desc".equalsIgnoreCase(value)) { + terms.order(BucketOrder.key(false)); } else { - value = kv.value.toString(); - switch (kv.key.toLowerCase()) { - case "field": - terms.field(value); - break; - case "size": - terms.size(Integer.parseInt(value)); - break; - case "shard_size": - terms.shardSize(Integer.parseInt(value)); - break; - case "min_doc_count": - terms.minDocCount(Integer.parseInt(value)); - break; - case "missing": - terms.missing(value); - break; - case "order": - if ("asc".equalsIgnoreCase(value)) { - terms.order(BucketOrder.key(true)); - } else if ("desc".equalsIgnoreCase(value)) { - terms.order(BucketOrder.key(false)); - } else { - List orderElements = new ArrayList<>(); - try (JsonXContentParser parser = new JsonXContentParser(NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, new JsonFactory().createParser(value))) { - XContentParser.Token currentToken = parser.nextToken(); - if (currentToken == XContentParser.Token.START_OBJECT) { - orderElements.add(InternalOrder.Parser.parseOrderParam(parser)); - } else if (currentToken == XContentParser.Token.START_ARRAY) { - for (currentToken = parser.nextToken(); - currentToken != XContentParser.Token.END_ARRAY; - currentToken = parser.nextToken()) { - if (currentToken == XContentParser.Token.START_OBJECT) { - orderElements.add(InternalOrder.Parser.parseOrderParam(parser)); - } else { - throw new ParsingException(parser.getTokenLocation(), - "Invalid token in order array"); - } - } - } - } catch (IOException e) { - throw new SqlParseException("couldn't parse order: " + e.getMessage()); - } - terms.order(orderElements); - } - break; - case "alias": - case "nested": - case "reverse_nested": - case "children": - break; - case "execution_hint": - terms.executionHint(value); - break; - case "include": - try (XContentParser parser = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, value)) { - parser.nextToken(); - include = IncludeExclude.parseInclude(parser); - } catch (IOException e) { - throw new SqlParseException("parse include[" + value + "] error: " + e.getMessage()); - } - break; - case "exclude": - try (XContentParser parser = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, value)) { - parser.nextToken(); - exclude = IncludeExclude.parseExclude(parser); - } catch (IOException e) { - throw new SqlParseException("parse exclude[" + value + "] error: " + e.getMessage()); - } - break; - default: - throw new SqlParseException("terms aggregation err or not define field " + kv.toString()); + List orderElements = new ArrayList<>(); + try (JsonXContentParser parser = + new JsonXContentParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + new JsonFactory().createParser(value))) { + XContentParser.Token currentToken = parser.nextToken(); + if (currentToken == XContentParser.Token.START_OBJECT) { + orderElements.add(InternalOrder.Parser.parseOrderParam(parser)); + } else if (currentToken == XContentParser.Token.START_ARRAY) { + for (currentToken = parser.nextToken(); + currentToken != XContentParser.Token.END_ARRAY; + currentToken = parser.nextToken()) { + if (currentToken == XContentParser.Token.START_OBJECT) { + orderElements.add(InternalOrder.Parser.parseOrderParam(parser)); + } else { + throw new ParsingException( + parser.getTokenLocation(), "Invalid token in order array"); + } + } } + } catch (IOException e) { + throw new SqlParseException("couldn't parse order: " + e.getMessage()); + } + terms.order(orderElements); } - } - terms.includeExclude(IncludeExclude.merge(include, exclude)); - return terms; - } - - private AbstractAggregationBuilder scriptedMetric(MethodField field) throws SqlParseException { - String aggName = gettAggNameFromParamsOrAlias(field); - ScriptedMetricAggregationBuilder scriptedMetricBuilder = AggregationBuilders.scriptedMetric(aggName); - Map scriptedMetricParams = field.getParamsAsMap(); - if (!scriptedMetricParams.containsKey("map_script") && !scriptedMetricParams.containsKey("map_script_id") - && !scriptedMetricParams.containsKey("map_script_file")) { - throw new SqlParseException( - "scripted metric parameters must contain map_script/map_script_id/map_script_file parameter"); - } - HashMap scriptAdditionalParams = new HashMap<>(); - HashMap reduceScriptAdditionalParams = new HashMap<>(); - for (Map.Entry param : scriptedMetricParams.entrySet()) { - String paramValue = param.getValue().toString(); - if (param.getKey().startsWith("@")) { - if (param.getKey().startsWith("@reduce_")) { - reduceScriptAdditionalParams.put(param.getKey().replace("@reduce_", ""), - param.getValue()); - } else { - scriptAdditionalParams.put(param.getKey().replace("@", ""), param.getValue()); - } - continue; + break; + case "alias": + case "nested": + case "reverse_nested": + case "children": + break; + case "execution_hint": + terms.executionHint(value); + break; + case "include": + try (XContentParser parser = + JsonXContent.jsonXContent.createParser( + NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, value)) { + parser.nextToken(); + include = IncludeExclude.parseInclude(parser); + } catch (IOException e) { + throw new SqlParseException("parse include[" + value + "] error: " + e.getMessage()); } - - switch (param.getKey().toLowerCase()) { - case "map_script": - scriptedMetricBuilder.mapScript(new Script(paramValue)); - break; - case "map_script_id": - scriptedMetricBuilder.mapScript(new Script(ScriptType.STORED, Script.DEFAULT_SCRIPT_LANG, - paramValue, new HashMap<>())); - break; - case "init_script": - scriptedMetricBuilder.initScript(new Script(paramValue)); - break; - case "init_script_id": - scriptedMetricBuilder.initScript(new Script(ScriptType.STORED, Script.DEFAULT_SCRIPT_LANG, - paramValue, new HashMap<>())); - break; - case "combine_script": - scriptedMetricBuilder.combineScript(new Script(paramValue)); - break; - case "combine_script_id": - scriptedMetricBuilder.combineScript(new Script(ScriptType.STORED, Script.DEFAULT_SCRIPT_LANG, - paramValue, new HashMap<>())); - break; - case "reduce_script": - scriptedMetricBuilder.reduceScript(new Script(ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, - paramValue, reduceScriptAdditionalParams)); - break; - case "reduce_script_id": - scriptedMetricBuilder.reduceScript(new Script(ScriptType.STORED, Script.DEFAULT_SCRIPT_LANG, - paramValue, reduceScriptAdditionalParams)); - break; - case "alias": - case "nested": - case "reverse_nested": - case "children": - break; - default: - throw new SqlParseException("scripted_metric err or not define field " + param.getKey()); + break; + case "exclude": + try (XContentParser parser = + JsonXContent.jsonXContent.createParser( + NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, value)) { + parser.nextToken(); + exclude = IncludeExclude.parseExclude(parser); + } catch (IOException e) { + throw new SqlParseException("parse exclude[" + value + "] error: " + e.getMessage()); } + break; + default: + throw new SqlParseException( + "terms aggregation err or not define field " + kv.toString()); } - if (scriptAdditionalParams.size() > 0) { - scriptAdditionalParams.put("_agg", new HashMap<>()); - scriptedMetricBuilder.params(scriptAdditionalParams); - } - - return scriptedMetricBuilder; + } } - - private AggregationBuilder geohashGrid(MethodField field) throws SqlParseException { - String aggName = gettAggNameFromParamsOrAlias(field); - GeoGridAggregationBuilder geoHashGrid = new GeoHashGridAggregationBuilder(aggName); - String value; - for (KVValue kv : field.getParams()) { - value = kv.value.toString(); - switch (kv.key.toLowerCase()) { - case "precision": - geoHashGrid.precision(Integer.parseInt(value)); - break; - case "field": - geoHashGrid.field(value); - break; - case "size": - geoHashGrid.size(Integer.parseInt(value)); - break; - case "shard_size": - geoHashGrid.shardSize(Integer.parseInt(value)); - break; - case "alias": - case "nested": - case "reverse_nested": - case "children": - break; - default: - throw new SqlParseException("geohash grid err or not define field " + kv.toString()); - } - } - return geoHashGrid; + terms.includeExclude(IncludeExclude.merge(include, exclude)); + return terms; + } + + private AbstractAggregationBuilder scriptedMetric(MethodField field) throws SqlParseException { + String aggName = gettAggNameFromParamsOrAlias(field); + ScriptedMetricAggregationBuilder scriptedMetricBuilder = + AggregationBuilders.scriptedMetric(aggName); + Map scriptedMetricParams = field.getParamsAsMap(); + if (!scriptedMetricParams.containsKey("map_script") + && !scriptedMetricParams.containsKey("map_script_id") + && !scriptedMetricParams.containsKey("map_script_file")) { + throw new SqlParseException( + "scripted metric parameters must contain map_script/map_script_id/map_script_file" + + " parameter"); } - - private static final String TIME_FARMAT = "yyyy-MM-dd HH:mm:ss"; - - private ValuesSourceAggregationBuilder dateRange(MethodField field) { - String alias = gettAggNameFromParamsOrAlias(field); - DateRangeAggregationBuilder dateRange = AggregationBuilders.dateRange(alias).format(TIME_FARMAT); - - String value; - List ranges = new ArrayList<>(); - for (KVValue kv : field.getParams()) { - value = kv.value.toString(); - if ("field".equals(kv.key)) { - dateRange.field(value); - } else if ("format".equals(kv.key)) { - dateRange.format(value); - } else if ("time_zone".equals(kv.key)) { - dateRange.timeZone(ZoneOffset.of(value)); - } else if ("from".equals(kv.key)) { - dateRange.addUnboundedFrom(kv.value.toString()); - } else if ("to".equals(kv.key)) { - dateRange.addUnboundedTo(kv.value.toString()); - } else if (!"alias".equals(kv.key) && !"nested".equals(kv.key) && !"children".equals(kv.key)) { - ranges.add(value); - } - } - - for (int i = 1; i < ranges.size(); i++) { - dateRange.addRange(ranges.get(i - 1), ranges.get(i)); + HashMap scriptAdditionalParams = new HashMap<>(); + HashMap reduceScriptAdditionalParams = new HashMap<>(); + for (Map.Entry param : scriptedMetricParams.entrySet()) { + String paramValue = param.getValue().toString(); + if (param.getKey().startsWith("@")) { + if (param.getKey().startsWith("@reduce_")) { + reduceScriptAdditionalParams.put( + param.getKey().replace("@reduce_", ""), param.getValue()); + } else { + scriptAdditionalParams.put(param.getKey().replace("@", ""), param.getValue()); } + continue; + } + + switch (param.getKey().toLowerCase()) { + case "map_script": + scriptedMetricBuilder.mapScript(new Script(paramValue)); + break; + case "map_script_id": + scriptedMetricBuilder.mapScript( + new Script( + ScriptType.STORED, Script.DEFAULT_SCRIPT_LANG, paramValue, new HashMap<>())); + break; + case "init_script": + scriptedMetricBuilder.initScript(new Script(paramValue)); + break; + case "init_script_id": + scriptedMetricBuilder.initScript( + new Script( + ScriptType.STORED, Script.DEFAULT_SCRIPT_LANG, paramValue, new HashMap<>())); + break; + case "combine_script": + scriptedMetricBuilder.combineScript(new Script(paramValue)); + break; + case "combine_script_id": + scriptedMetricBuilder.combineScript( + new Script( + ScriptType.STORED, Script.DEFAULT_SCRIPT_LANG, paramValue, new HashMap<>())); + break; + case "reduce_script": + scriptedMetricBuilder.reduceScript( + new Script( + ScriptType.INLINE, + Script.DEFAULT_SCRIPT_LANG, + paramValue, + reduceScriptAdditionalParams)); + break; + case "reduce_script_id": + scriptedMetricBuilder.reduceScript( + new Script( + ScriptType.STORED, + Script.DEFAULT_SCRIPT_LANG, + paramValue, + reduceScriptAdditionalParams)); + break; + case "alias": + case "nested": + case "reverse_nested": + case "children": + break; + default: + throw new SqlParseException("scripted_metric err or not define field " + param.getKey()); + } + } + if (scriptAdditionalParams.size() > 0) { + scriptAdditionalParams.put("_agg", new HashMap<>()); + scriptedMetricBuilder.params(scriptAdditionalParams); + } - return dateRange; + return scriptedMetricBuilder; + } + + private AggregationBuilder geohashGrid(MethodField field) throws SqlParseException { + String aggName = gettAggNameFromParamsOrAlias(field); + GeoGridAggregationBuilder geoHashGrid = new GeoHashGridAggregationBuilder(aggName); + String value; + for (KVValue kv : field.getParams()) { + value = kv.value.toString(); + switch (kv.key.toLowerCase()) { + case "precision": + geoHashGrid.precision(Integer.parseInt(value)); + break; + case "field": + geoHashGrid.field(value); + break; + case "size": + geoHashGrid.size(Integer.parseInt(value)); + break; + case "shard_size": + geoHashGrid.shardSize(Integer.parseInt(value)); + break; + case "alias": + case "nested": + case "reverse_nested": + case "children": + break; + default: + throw new SqlParseException("geohash grid err or not define field " + kv.toString()); + } + } + return geoHashGrid; + } + + private static final String TIME_FARMAT = "yyyy-MM-dd HH:mm:ss"; + + private ValuesSourceAggregationBuilder dateRange(MethodField field) { + String alias = gettAggNameFromParamsOrAlias(field); + DateRangeAggregationBuilder dateRange = + AggregationBuilders.dateRange(alias).format(TIME_FARMAT); + + String value; + List ranges = new ArrayList<>(); + for (KVValue kv : field.getParams()) { + value = kv.value.toString(); + if ("field".equals(kv.key)) { + dateRange.field(value); + } else if ("format".equals(kv.key)) { + dateRange.format(value); + } else if ("time_zone".equals(kv.key)) { + dateRange.timeZone(ZoneOffset.of(value)); + } else if ("from".equals(kv.key)) { + dateRange.addUnboundedFrom(kv.value.toString()); + } else if ("to".equals(kv.key)) { + dateRange.addUnboundedTo(kv.value.toString()); + } else if (!"alias".equals(kv.key) + && !"nested".equals(kv.key) + && !"children".equals(kv.key)) { + ranges.add(value); + } } - /** - * - * - * @param field - * @return - * @throws SqlParseException - */ - private DateHistogramAggregationBuilder dateHistogram(MethodField field) throws SqlParseException { - String alias = gettAggNameFromParamsOrAlias(field); - DateHistogramAggregationBuilder dateHistogram = AggregationBuilders.dateHistogram(alias).format(TIME_FARMAT); - String value; - for (KVValue kv : field.getParams()) { - if (kv.value.toString().contains("doc[")) { - String script = kv.value + "; return " + kv.key; - dateHistogram.script(new Script(script)); - } else { - value = kv.value.toString(); - switch (kv.key.toLowerCase()) { - case "interval": - dateHistogram.dateHistogramInterval(new DateHistogramInterval(kv.value.toString())); - break; - case "fixed_interval": - dateHistogram.fixedInterval(new DateHistogramInterval(kv.value.toString())); - break; - case "field": - dateHistogram.field(value); - break; - case "format": - dateHistogram.format(value); - break; - case "time_zone": - dateHistogram.timeZone(ZoneOffset.of(value)); - break; - case "min_doc_count": - dateHistogram.minDocCount(Long.parseLong(value)); - break; - case "order": - dateHistogram.order("desc".equalsIgnoreCase(value) ? BucketOrder.key(false) : - BucketOrder.key(true)); - break; - case "extended_bounds": - String[] bounds = value.split(":"); - if (bounds.length == 2) { - dateHistogram.extendedBounds(new LongBounds(bounds[0], bounds[1])); - } - break; - - case "alias": - case "nested": - case "reverse_nested": - case "children": - break; - default: - throw new SqlParseException("date range err or not define field " + kv.toString()); - } - } - } - return dateHistogram; + for (int i = 1; i < ranges.size(); i++) { + dateRange.addRange(ranges.get(i - 1), ranges.get(i)); } - private String gettAggNameFromParamsOrAlias(MethodField field) { - String alias = field.getAlias(); - for (KVValue kv : field.getParams()) { - if (kv.key != null && kv.key.equals("alias")) { - alias = kv.value.toString(); + return dateRange; + } + + /** + * @param field + * @return + * @throws SqlParseException + */ + private DateHistogramAggregationBuilder dateHistogram(MethodField field) + throws SqlParseException { + String alias = gettAggNameFromParamsOrAlias(field); + DateHistogramAggregationBuilder dateHistogram = + AggregationBuilders.dateHistogram(alias).format(TIME_FARMAT); + String value; + for (KVValue kv : field.getParams()) { + if (kv.value.toString().contains("doc[")) { + String script = kv.value + "; return " + kv.key; + dateHistogram.script(new Script(script)); + } else { + value = kv.value.toString(); + switch (kv.key.toLowerCase()) { + case "interval": + dateHistogram.dateHistogramInterval(new DateHistogramInterval(kv.value.toString())); + break; + case "fixed_interval": + dateHistogram.fixedInterval(new DateHistogramInterval(kv.value.toString())); + break; + case "field": + dateHistogram.field(value); + break; + case "format": + dateHistogram.format(value); + break; + case "time_zone": + dateHistogram.timeZone(ZoneOffset.of(value)); + break; + case "min_doc_count": + dateHistogram.minDocCount(Long.parseLong(value)); + break; + case "order": + dateHistogram.order( + "desc".equalsIgnoreCase(value) ? BucketOrder.key(false) : BucketOrder.key(true)); + break; + case "extended_bounds": + String[] bounds = value.split(":"); + if (bounds.length == 2) { + dateHistogram.extendedBounds(new LongBounds(bounds[0], bounds[1])); } + break; + + case "alias": + case "nested": + case "reverse_nested": + case "children": + break; + default: + throw new SqlParseException("date range err or not define field " + kv.toString()); } - return alias; + } } - - private HistogramAggregationBuilder histogram(MethodField field) throws SqlParseException { - String aggName = gettAggNameFromParamsOrAlias(field); - HistogramAggregationBuilder histogram = AggregationBuilders.histogram(aggName); - String value; - for (KVValue kv : field.getParams()) { - if (kv.value.toString().contains("doc[")) { - String script = kv.value + "; return " + kv.key; - histogram.script(new Script(script)); - } else { - value = kv.value.toString(); - switch (kv.key.toLowerCase()) { - case "interval": - histogram.interval(Long.parseLong(value)); - break; - case "field": - histogram.field(value); - break; - case "min_doc_count": - histogram.minDocCount(Long.parseLong(value)); - break; - case "extended_bounds": - String[] bounds = value.split(":"); - if (bounds.length == 2) { - histogram.extendedBounds(Long.valueOf(bounds[0]), Long.valueOf(bounds[1])); - } - break; - case "alias": - case "nested": - case "reverse_nested": - case "children": - break; - case "order": - final BucketOrder order; - switch (value) { - case "key_desc": - order = BucketOrder.key(false); - break; - case "count_asc": - order = BucketOrder.count(true); - break; - case "count_desc": - order = BucketOrder.count(false); - break; - case "key_asc": - default: - order = BucketOrder.key(true); - break; - } - histogram.order(order); - break; - default: - throw new SqlParseException("histogram err or not define field " + kv.toString()); - } + return dateHistogram; + } + + private String gettAggNameFromParamsOrAlias(MethodField field) { + String alias = field.getAlias(); + for (KVValue kv : field.getParams()) { + if (kv.key != null && kv.key.equals("alias")) { + alias = kv.value.toString(); + } + } + return alias; + } + + private HistogramAggregationBuilder histogram(MethodField field) throws SqlParseException { + String aggName = gettAggNameFromParamsOrAlias(field); + HistogramAggregationBuilder histogram = AggregationBuilders.histogram(aggName); + String value; + for (KVValue kv : field.getParams()) { + if (kv.value.toString().contains("doc[")) { + String script = kv.value + "; return " + kv.key; + histogram.script(new Script(script)); + } else { + value = kv.value.toString(); + switch (kv.key.toLowerCase()) { + case "interval": + histogram.interval(Long.parseLong(value)); + break; + case "field": + histogram.field(value); + break; + case "min_doc_count": + histogram.minDocCount(Long.parseLong(value)); + break; + case "extended_bounds": + String[] bounds = value.split(":"); + if (bounds.length == 2) { + histogram.extendedBounds(Long.valueOf(bounds[0]), Long.valueOf(bounds[1])); + } + break; + case "alias": + case "nested": + case "reverse_nested": + case "children": + break; + case "order": + final BucketOrder order; + switch (value) { + case "key_desc": + order = BucketOrder.key(false); + break; + case "count_asc": + order = BucketOrder.count(true); + break; + case "count_desc": + order = BucketOrder.count(false); + break; + case "key_asc": + default: + order = BucketOrder.key(true); + break; } + histogram.order(order); + break; + default: + throw new SqlParseException("histogram err or not define field " + kv.toString()); } - return histogram; + } } + return histogram; + } - /** - * - * - * @param field - * @return - */ - private RangeAggregationBuilder rangeBuilder(MethodField field) { + /** + * @param field + * @return + */ + private RangeAggregationBuilder rangeBuilder(MethodField field) { - // ignore alias param - LinkedList params = field.getParams().stream().filter(kv -> !"alias".equals(kv.key)) - .collect(Collectors.toCollection(LinkedList::new)); + // ignore alias param + LinkedList params = + field.getParams().stream() + .filter(kv -> !"alias".equals(kv.key)) + .collect(Collectors.toCollection(LinkedList::new)); - String fieldName = params.poll().toString(); + String fieldName = params.poll().toString(); - double[] ds = Util.KV2DoubleArr(params); + double[] ds = Util.KV2DoubleArr(params); - RangeAggregationBuilder range = AggregationBuilders.range(field.getAlias()).field(fieldName); + RangeAggregationBuilder range = AggregationBuilders.range(field.getAlias()).field(fieldName); - for (int i = 1; i < ds.length; i++) { - range.addRange(ds[i - 1], ds[i]); - } - - return range; + for (int i = 1; i < ds.length; i++) { + range.addRange(ds[i - 1], ds[i]); } + return range; + } + + /** + * Create count aggregation. + * + * @param field The count function + * @return AggregationBuilder use to count result + */ + private ValuesSourceAggregationBuilder makeCountAgg(MethodField field) { + + // Cardinality is approximate DISTINCT. + if (SQLAggregateOption.DISTINCT.equals(field.getOption())) { + + if (field.getParams().size() == 1) { + return AggregationBuilders.cardinality(field.getAlias()) + .field(field.getParams().get(0).value.toString()); + } else { + Integer precision_threshold = (Integer) (field.getParams().get(1).value); + return AggregationBuilders.cardinality(field.getAlias()) + .precisionThreshold(precision_threshold) + .field(field.getParams().get(0).value.toString()); + } + } - /** - * Create count aggregation. - * - * @param field The count function - * @return AggregationBuilder use to count result - */ - private ValuesSourceAggregationBuilder makeCountAgg(MethodField field) { - - // Cardinality is approximate DISTINCT. - if (SQLAggregateOption.DISTINCT.equals(field.getOption())) { - - if (field.getParams().size() == 1) { - return AggregationBuilders.cardinality(field.getAlias()).field(field.getParams().get(0).value - .toString()); - } else { - Integer precision_threshold = (Integer) (field.getParams().get(1).value); - return AggregationBuilders.cardinality(field.getAlias()).precisionThreshold(precision_threshold) - .field(field.getParams().get(0).value.toString()); - } - - } - - String fieldName = field.getParams().get(0).value.toString(); + String fieldName = field.getParams().get(0).value.toString(); - // In case of count(*) we use '_index' as field parameter to count all documents - if ("*".equals(fieldName)) { - KVValue kvValue = new KVValue(null, "_index"); - field.getParams().set(0, kvValue); - return AggregationBuilders.count(field.getAlias()).field(kvValue.toString()); - } else { - return AggregationBuilders.count(field.getAlias()).field(fieldName); - } + // In case of count(*) we use '_index' as field parameter to count all documents + if ("*".equals(fieldName)) { + KVValue kvValue = new KVValue(null, "_index"); + field.getParams().set(0, kvValue); + return AggregationBuilders.count(field.getAlias()).field(kvValue.toString()); + } else { + return AggregationBuilders.count(field.getAlias()).field(fieldName); } - - /** - * TOPHITS - * - * @param field - * @return - */ - private AbstractAggregationBuilder makeTopHitsAgg(MethodField field) { - String alias = gettAggNameFromParamsOrAlias(field); - TopHitsAggregationBuilder topHits = AggregationBuilders.topHits(alias); - List params = field.getParams(); - String[] include = null; - String[] exclude = null; - for (KVValue kv : params) { - switch (kv.key) { - case "from": - topHits.from((int) kv.value); - break; - case "size": - topHits.size((int) kv.value); - break; - case "include": - include = kv.value.toString().split(","); - break; - case "exclude": - exclude = kv.value.toString().split(","); - break; - case "alias": - case "nested": - case "reverse_nested": - case "children": - break; - default: - topHits.sort(kv.key, SortOrder.valueOf(kv.value.toString().toUpperCase())); - break; - } - } - if (include != null || exclude != null) { - topHits.fetchSource(include, exclude); - } - return topHits; + } + + /** + * TOPHITS + * + * @param field + * @return + */ + private AbstractAggregationBuilder makeTopHitsAgg(MethodField field) { + String alias = gettAggNameFromParamsOrAlias(field); + TopHitsAggregationBuilder topHits = AggregationBuilders.topHits(alias); + List params = field.getParams(); + String[] include = null; + String[] exclude = null; + for (KVValue kv : params) { + switch (kv.key) { + case "from": + topHits.from((int) kv.value); + break; + case "size": + topHits.size((int) kv.value); + break; + case "include": + include = kv.value.toString().split(","); + break; + case "exclude": + exclude = kv.value.toString().split(","); + break; + case "alias": + case "nested": + case "reverse_nested": + case "children": + break; + default: + topHits.sort(kv.key, SortOrder.valueOf(kv.value.toString().toUpperCase())); + break; + } } - - public Map getGroupMap() { - return this.groupMap; + if (include != null || exclude != null) { + topHits.fetchSource(include, exclude); } - - /** - * Wrap the Metric Aggregation with Filter Aggregation if necessary. - * The Filter Aggregation condition is constructed from the nested condition in where clause. - */ - private AggregationBuilder wrapWithFilterAgg(NestedType nestedType, ValuesSourceAggregationBuilder builder) - throws SqlParseException { - if (where != null && where.getWheres() != null) { - List nestedConditionList = where.getWheres().stream() - .filter(condition -> condition instanceof Condition) - .map(condition -> (Condition) condition) - .filter(condition -> condition.isNestedComplex() - || nestedType.path.equalsIgnoreCase(condition.getNestedPath())) - // ignore the OR condition on nested field. - .filter(condition -> CONN.AND.equals(condition.getConn())) - .collect(Collectors.toList()); - if (!nestedConditionList.isEmpty()) { - Where filterWhere = new Where(where.getConn()); - nestedConditionList.forEach(condition -> { - if (condition.isNestedComplex()) { - ((Where) condition.getValue()).getWheres().forEach(filterWhere::addWhere); - } else { - // Since the filter condition is used inside Nested Aggregation,remove the nested attribute. - condition.setNested(false); - condition.setNestedPath(""); - filterWhere.addWhere(condition); - } - }); - FilterAggregationBuilder filterAgg = AggregationBuilders.filter( - nestedType.getFilterAggName(), - QueryMaker.explain(filterWhere)); - nestedType.addBucketPath(Path.getAggPath(filterAgg.getName())); - return filterAgg.subAggregation(builder); - } - } - return builder; + return topHits; + } + + public Map getGroupMap() { + return this.groupMap; + } + + /** + * Wrap the Metric Aggregation with Filter Aggregation if necessary. The Filter Aggregation + * condition is constructed from the nested condition in where clause. + */ + private AggregationBuilder wrapWithFilterAgg( + NestedType nestedType, ValuesSourceAggregationBuilder builder) throws SqlParseException { + if (where != null && where.getWheres() != null) { + List nestedConditionList = + where.getWheres().stream() + .filter(condition -> condition instanceof Condition) + .map(condition -> (Condition) condition) + .filter( + condition -> + condition.isNestedComplex() + || nestedType.path.equalsIgnoreCase(condition.getNestedPath())) + // ignore the OR condition on nested field. + .filter(condition -> CONN.AND.equals(condition.getConn())) + .collect(Collectors.toList()); + if (!nestedConditionList.isEmpty()) { + Where filterWhere = new Where(where.getConn()); + nestedConditionList.forEach( + condition -> { + if (condition.isNestedComplex()) { + ((Where) condition.getValue()).getWheres().forEach(filterWhere::addWhere); + } else { + // Since the filter condition is used inside Nested Aggregation,remove the nested + // attribute. + condition.setNested(false); + condition.setNestedPath(""); + filterWhere.addWhere(condition); + } + }); + FilterAggregationBuilder filterAgg = + AggregationBuilders.filter( + nestedType.getFilterAggName(), QueryMaker.explain(filterWhere)); + nestedType.addBucketPath(Path.getAggPath(filterAgg.getName())); + return filterAgg.subAggregation(builder); + } } - - /** - * The groupMap is used when parsing order by to find out the corresponding field in aggregation. - * There are two cases. - * 1) using alias in order by, e.g. SELECT COUNT(*) as c FROM T GROUP BY age ORDER BY c - * 2) using full name in order by, e.g. SELECT COUNT(*) as c FROM T GROUP BY age ORDER BY COUNT(*) - * Then, the groupMap should support these two cases by maintain the mapping of - * {alias, value} and {full_name, value} - */ - private void extendGroupMap(Field field, KVValue value) { - groupMap.put(field.toString(), value); - if (!StringUtils.isEmpty(field.getAlias())) { - groupMap.putIfAbsent(field.getAlias(), value); - } + return builder; + } + + /** + * The groupMap is used when parsing order by to find out the corresponding field in aggregation. + * There are two cases. + * + *

    + *
  1. using alias in order by, e.g. SELECT COUNT(*) as c FROM T GROUP BY age ORDER BY c + *
  2. using full name in order by, e.g. SELECT COUNT(*) as c FROM T GROUP BY age ORDER BY + * COUNT(*) + *
+ * + * Then, the groupMap should support these two cases by maintain the mapping of {alias, value} and + * {full_name, value} + */ + private void extendGroupMap(Field field, KVValue value) { + groupMap.put(field.toString(), value); + if (!StringUtils.isEmpty(field.getAlias())) { + groupMap.putIfAbsent(field.getAlias(), value); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/BindingTupleQueryPlanner.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/BindingTupleQueryPlanner.java index 01a0e78484..a8fb7cc53c 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/BindingTupleQueryPlanner.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/BindingTupleQueryPlanner.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.core; import com.alibaba.druid.sql.ast.expr.SQLQueryExpr; @@ -17,63 +16,63 @@ import org.opensearch.sql.legacy.query.planner.physical.PhysicalOperator; import org.opensearch.sql.legacy.query.planner.physical.node.scroll.PhysicalScroll; -/** - * The definition of QueryPlanner which return the {@link BindingTuple} as result. - */ +/** The definition of QueryPlanner which return the {@link BindingTuple} as result. */ public class BindingTupleQueryPlanner { - private PhysicalOperator physicalOperator; - @Getter - private List columnNodes; - - public BindingTupleQueryPlanner(Client client, SQLQueryExpr sqlExpr, ColumnTypeProvider columnTypeProvider) { - SQLToOperatorConverter converter = new SQLToOperatorConverter(client, columnTypeProvider); - sqlExpr.accept(converter); - this.physicalOperator = converter.getPhysicalOperator(); - this.columnNodes = converter.getColumnNodes(); - } + private PhysicalOperator physicalOperator; + @Getter private List columnNodes; - /** - * Execute the QueryPlanner. - * @return list of {@link BindingTuple}. - */ - public List execute() { - PhysicalOperator op = physicalOperator; - List tuples = new ArrayList<>(); - try { - op.open(null); - } catch (Exception e) { - throw new RuntimeException(e); - } + public BindingTupleQueryPlanner( + Client client, SQLQueryExpr sqlExpr, ColumnTypeProvider columnTypeProvider) { + SQLToOperatorConverter converter = new SQLToOperatorConverter(client, columnTypeProvider); + sqlExpr.accept(converter); + this.physicalOperator = converter.getPhysicalOperator(); + this.columnNodes = converter.getColumnNodes(); + } - while (op.hasNext()) { - tuples.add(op.next().data()); - } - return tuples; + /** + * Execute the QueryPlanner. + * + * @return list of {@link BindingTuple}. + */ + public List execute() { + PhysicalOperator op = physicalOperator; + List tuples = new ArrayList<>(); + try { + op.open(null); + } catch (Exception e) { + throw new RuntimeException(e); } - /** - * Explain the physical execution plan. - * @return execution plan. - */ - public String explain() { - Explanation explanation = new Explanation(); - physicalOperator.accept(explanation); - return explanation.explain(); + while (op.hasNext()) { + tuples.add(op.next().data()); } + return tuples; + } + + /** + * Explain the physical execution plan. + * + * @return execution plan. + */ + public String explain() { + Explanation explanation = new Explanation(); + physicalOperator.accept(explanation); + return explanation.explain(); + } - private static class Explanation implements PlanNode.Visitor { - private String explain; + private static class Explanation implements PlanNode.Visitor { + private String explain; - public String explain() { - return explain; - } + public String explain() { + return explain; + } - @Override - public boolean visit(PlanNode planNode) { - if (planNode instanceof PhysicalScroll) { - explain = planNode.toString(); - } - return true; - } + @Override + public boolean visit(PlanNode planNode) { + if (planNode instanceof PhysicalScroll) { + explain = planNode.toString(); + } + return true; } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/ColumnNode.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/ColumnNode.java index 753d5ac001..9dd969fb83 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/ColumnNode.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/ColumnNode.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.core; import com.google.common.base.Strings; @@ -14,20 +13,18 @@ import org.opensearch.sql.legacy.executor.format.Schema; import org.opensearch.sql.legacy.expression.core.Expression; -/** - * The definition of column node. - */ +/** The definition of column node. */ @Builder @Setter @Getter @ToString public class ColumnNode { - private String name; - private String alias; - private Schema.Type type; - private Expression expr; + private String name; + private String alias; + private Schema.Type type; + private Expression expr; - public String columnName() { - return Strings.isNullOrEmpty(alias) ? name : alias; - } + public String columnName() { + return Strings.isNullOrEmpty(alias) ? name : alias; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/Config.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/Config.java index 6e04c674cb..304a16756b 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/Config.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/Config.java @@ -3,156 +3,134 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.core; import org.opensearch.sql.legacy.query.planner.resource.blocksize.AdaptiveBlockSize; import org.opensearch.sql.legacy.query.planner.resource.blocksize.BlockSize; import org.opensearch.sql.legacy.query.planner.resource.blocksize.BlockSize.FixedBlockSize; -/** - * Query planner configuration - */ +/** Query planner configuration */ public class Config { - public static final int DEFAULT_BLOCK_SIZE = 10000; - public static final int DEFAULT_SCROLL_PAGE_SIZE = 10000; - public static final int DEFAULT_CIRCUIT_BREAK_LIMIT = 85; - public static final double[] DEFAULT_BACK_OFF_RETRY_INTERVALS = {4, 8 + 4, 16 + 4}; - public static final int DEFAULT_TIME_OUT = 60; - - /** - * Block size for join algorithm - */ - private BlockSize blockSize = new FixedBlockSize(DEFAULT_BLOCK_SIZE); - - /** - * Page size for scroll on each index - */ - private Integer[] scrollPageSizes = {DEFAULT_SCROLL_PAGE_SIZE, DEFAULT_SCROLL_PAGE_SIZE}; - - /** - * Circuit breaker trigger limit (percentage) - */ - private Integer circuitBreakLimit = DEFAULT_CIRCUIT_BREAK_LIMIT; - - /** - * Intervals for back off retry - */ - private double[] backOffRetryIntervals = DEFAULT_BACK_OFF_RETRY_INTERVALS; - - /** - * Total number of rows in final result specified by LIMIT - */ - private int totalLimit; - - /** - * Number of rows fetched from each table specified by JOIN_TABLES_LIMIT hint - */ - private int tableLimit1; - private int tableLimit2; - - /** - * Push down column values in ON of first table to query against second table - */ - private boolean isUseTermsFilterOptimization = false; - - /** - * Total time out (seconds) for the execution - */ - private int timeout = DEFAULT_TIME_OUT; - - - public BlockSize blockSize() { - return blockSize; - } + public static final int DEFAULT_BLOCK_SIZE = 10000; + public static final int DEFAULT_SCROLL_PAGE_SIZE = 10000; + public static final int DEFAULT_CIRCUIT_BREAK_LIMIT = 85; + public static final double[] DEFAULT_BACK_OFF_RETRY_INTERVALS = {4, 8 + 4, 16 + 4}; + public static final int DEFAULT_TIME_OUT = 60; - public void configureBlockSize(Object[] params) { - if (params.length > 0) { - Integer size = (Integer) params[0]; - if (size > 0) { - blockSize = new FixedBlockSize(size); - } else { - blockSize = new AdaptiveBlockSize(0); - } - } - } + /** Block size for join algorithm */ + private BlockSize blockSize = new FixedBlockSize(DEFAULT_BLOCK_SIZE); - public Integer[] scrollPageSize() { - return scrollPageSizes; - } + /** Page size for scroll on each index */ + private Integer[] scrollPageSizes = {DEFAULT_SCROLL_PAGE_SIZE, DEFAULT_SCROLL_PAGE_SIZE}; - public void configureScrollPageSize(Object[] params) { - if (params.length == 1) { - scrollPageSizes = new Integer[]{ - (Integer) params[0], - (Integer) params[0] - }; - } else if (params.length >= 2) { - scrollPageSizes = (Integer[]) params; - } - } + /** Circuit breaker trigger limit (percentage) */ + private Integer circuitBreakLimit = DEFAULT_CIRCUIT_BREAK_LIMIT; - public int circuitBreakLimit() { - return circuitBreakLimit; - } + /** Intervals for back off retry */ + private double[] backOffRetryIntervals = DEFAULT_BACK_OFF_RETRY_INTERVALS; - public void configureCircuitBreakLimit(Object[] params) { - if (params.length > 0) { - circuitBreakLimit = (Integer) params[0]; - } - } + /** Total number of rows in final result specified by LIMIT */ + private int totalLimit; - public double[] backOffRetryIntervals() { - return backOffRetryIntervals; - } + /** Number of rows fetched from each table specified by JOIN_TABLES_LIMIT hint */ + private int tableLimit1; - public void configureBackOffRetryIntervals(Object[] params) { - backOffRetryIntervals = new double[params.length]; - for (int i = 0; i < params.length; i++) { - backOffRetryIntervals[i] = (Integer) params[i]; //Only support integer interval for now - } - } + private int tableLimit2; - public void configureLimit(Integer totalLimit, Integer tableLimit1, Integer tableLimit2) { - if (totalLimit != null) { - this.totalLimit = totalLimit; - } - if (tableLimit1 != null) { - this.tableLimit1 = tableLimit1; - } - if (tableLimit2 != null) { - this.tableLimit2 = tableLimit2; - } - } + /** Push down column values in ON of first table to query against second table */ + private boolean isUseTermsFilterOptimization = false; - public int totalLimit() { - return totalLimit; - } + /** Total time out (seconds) for the execution */ + private int timeout = DEFAULT_TIME_OUT; - public int tableLimit1() { - return tableLimit1; + public BlockSize blockSize() { + return blockSize; + } + + public void configureBlockSize(Object[] params) { + if (params.length > 0) { + Integer size = (Integer) params[0]; + if (size > 0) { + blockSize = new FixedBlockSize(size); + } else { + blockSize = new AdaptiveBlockSize(0); + } } + } + + public Integer[] scrollPageSize() { + return scrollPageSizes; + } - public int tableLimit2() { - return tableLimit2; + public void configureScrollPageSize(Object[] params) { + if (params.length == 1) { + scrollPageSizes = new Integer[] {(Integer) params[0], (Integer) params[0]}; + } else if (params.length >= 2) { + scrollPageSizes = (Integer[]) params; } + } + + public int circuitBreakLimit() { + return circuitBreakLimit; + } - public void configureTermsFilterOptimization(boolean isUseTermFiltersOptimization) { - this.isUseTermsFilterOptimization = isUseTermFiltersOptimization; + public void configureCircuitBreakLimit(Object[] params) { + if (params.length > 0) { + circuitBreakLimit = (Integer) params[0]; } + } - public boolean isUseTermsFilterOptimization() { - return isUseTermsFilterOptimization; + public double[] backOffRetryIntervals() { + return backOffRetryIntervals; + } + + public void configureBackOffRetryIntervals(Object[] params) { + backOffRetryIntervals = new double[params.length]; + for (int i = 0; i < params.length; i++) { + backOffRetryIntervals[i] = (Integer) params[i]; // Only support integer interval for now } + } - public void configureTimeOut(Object[] params) { - if (params.length > 0) { - timeout = (Integer) params[0]; - } + public void configureLimit(Integer totalLimit, Integer tableLimit1, Integer tableLimit2) { + if (totalLimit != null) { + this.totalLimit = totalLimit; + } + if (tableLimit1 != null) { + this.tableLimit1 = tableLimit1; + } + if (tableLimit2 != null) { + this.tableLimit2 = tableLimit2; } + } - public int timeout() { - return timeout; + public int totalLimit() { + return totalLimit; + } + + public int tableLimit1() { + return tableLimit1; + } + + public int tableLimit2() { + return tableLimit2; + } + + public void configureTermsFilterOptimization(boolean isUseTermFiltersOptimization) { + this.isUseTermsFilterOptimization = isUseTermFiltersOptimization; + } + + public boolean isUseTermsFilterOptimization() { + return isUseTermsFilterOptimization; + } + + public void configureTimeOut(Object[] params) { + if (params.length > 0) { + timeout = (Integer) params[0]; } + } + + public int timeout() { + return timeout; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/estimation/Cost.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/estimation/Cost.java index efaf7057b6..86f155d626 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/estimation/Cost.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/estimation/Cost.java @@ -3,22 +3,20 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.physical.estimation; public class Cost implements Comparable { - public static final Cost INFINITY = new Cost(); + public static final Cost INFINITY = new Cost(); - private long inputSize; + private long inputSize; - private long time; + private long time; - public Cost() { - } + public Cost() {} - @Override - public int compareTo(Cost o) { - return 0; - } + @Override + public int compareTo(Cost o) { + return 0; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/BatchPhysicalOperator.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/BatchPhysicalOperator.java index 3b4eb2b48e..19ee573652 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/BatchPhysicalOperator.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/BatchPhysicalOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.physical.node; import static org.opensearch.sql.legacy.query.planner.core.ExecuteParams.ExecuteParamType.RESOURCE_MANAGER; @@ -19,78 +18,74 @@ import org.opensearch.sql.legacy.query.planner.resource.ResourceManager; /** - * Abstraction for physical operators that load large volume of data and generally prefetch for efficiency. + * Abstraction for physical operators that load large volume of data and generally prefetch for + * efficiency. * * @param */ public abstract class BatchPhysicalOperator implements PhysicalOperator { - protected static final Logger LOG = LogManager.getLogger(); + protected static final Logger LOG = LogManager.getLogger(); - /** - * Resource monitor to avoid consuming too much resource - */ - private ResourceManager resourceMgr; + /** Resource monitor to avoid consuming too much resource */ + private ResourceManager resourceMgr; - /** - * Current batch of data - */ - private Iterator> curBatch; + /** Current batch of data */ + private Iterator> curBatch; - @Override - public void open(ExecuteParams params) throws Exception { - //PhysicalOperator.super.open(params); // Child needs to call this super.open() and open its next node too - resourceMgr = params.get(RESOURCE_MANAGER); - } + @Override + public void open(ExecuteParams params) throws Exception { + // Child needs to call this super.open() and open its next node too + // PhysicalOperator.super.open(params); + resourceMgr = params.get(RESOURCE_MANAGER); + } - @Override - public boolean hasNext() { - if (isNoMoreDataInCurrentBatch()) { - LOG.debug("{} No more data in current batch, pre-fetching next batch", this); - Collection> nextBatch = prefetchSafely(); + @Override + public boolean hasNext() { + if (isNoMoreDataInCurrentBatch()) { + LOG.debug("{} No more data in current batch, pre-fetching next batch", this); + Collection> nextBatch = prefetchSafely(); - LOG.debug("{} Pre-fetched {} rows", this, nextBatch.size()); - if (LOG.isTraceEnabled()) { - nextBatch.forEach(row -> LOG.trace("Row pre-fetched: {}", row)); - } + LOG.debug("{} Pre-fetched {} rows", this, nextBatch.size()); + if (LOG.isTraceEnabled()) { + nextBatch.forEach(row -> LOG.trace("Row pre-fetched: {}", row)); + } - curBatch = nextBatch.iterator(); - } - return curBatch.hasNext(); + curBatch = nextBatch.iterator(); } - - @Override - public Row next() { - return curBatch.next(); - } - - /** - * Prefetch next batch safely by checking resource monitor - */ - private Collection> prefetchSafely() { - Objects.requireNonNull(resourceMgr, "ResourceManager is not set so unable to do sanity check"); - - boolean isHealthy = resourceMgr.isHealthy(); - boolean isTimeout = resourceMgr.isTimeout(); - if (isHealthy && !isTimeout) { - try { - return prefetch(); - } catch (Exception e) { - throw new IllegalStateException("Failed to prefetch next batch", e); - } - } - throw new IllegalStateException("Exit due to " + (isHealthy ? "time out" : "insufficient resource")); + return curBatch.hasNext(); + } + + @Override + public Row next() { + return curBatch.next(); + } + + /** Prefetch next batch safely by checking resource monitor */ + private Collection> prefetchSafely() { + Objects.requireNonNull(resourceMgr, "ResourceManager is not set so unable to do sanity check"); + + boolean isHealthy = resourceMgr.isHealthy(); + boolean isTimeout = resourceMgr.isTimeout(); + if (isHealthy && !isTimeout) { + try { + return prefetch(); + } catch (Exception e) { + throw new IllegalStateException("Failed to prefetch next batch", e); + } } - - /** - * Prefetch next batch if current is exhausted. - * - * @return next batch - */ - protected abstract Collection> prefetch() throws Exception; - - private boolean isNoMoreDataInCurrentBatch() { - return curBatch == null || !curBatch.hasNext(); - } - + throw new IllegalStateException( + "Exit due to " + (isHealthy ? "time out" : "insufficient resource")); + } + + /** + * Prefetch next batch if current is exhausted. + * + * @return next batch + */ + protected abstract Collection> prefetch() throws Exception; + + private boolean isNoMoreDataInCurrentBatch() { + return curBatch == null || !curBatch.hasNext(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/BlockHashJoin.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/BlockHashJoin.java index 19c0ae41d2..90bf9923d3 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/BlockHashJoin.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/BlockHashJoin.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.physical.node.join; import static com.alibaba.druid.sql.ast.statement.SQLJoinTableSource.JoinType; @@ -25,91 +24,87 @@ import org.opensearch.sql.legacy.query.planner.physical.estimation.Cost; import org.opensearch.sql.legacy.query.planner.resource.blocksize.BlockSize; -/** - * Block-based Hash Join implementation - */ +/** Block-based Hash Join implementation */ public class BlockHashJoin extends JoinAlgorithm { - /** - * Use terms filter optimization or not - */ - private final boolean isUseTermsFilterOptimization; + /** Use terms filter optimization or not */ + private final boolean isUseTermsFilterOptimization; - public BlockHashJoin(PhysicalOperator left, - PhysicalOperator right, - JoinType type, - JoinCondition condition, - BlockSize blockSize, - boolean isUseTermsFilterOptimization) { - super(left, right, type, condition, blockSize); + public BlockHashJoin( + PhysicalOperator left, + PhysicalOperator right, + JoinType type, + JoinCondition condition, + BlockSize blockSize, + boolean isUseTermsFilterOptimization) { + super(left, right, type, condition, blockSize); - this.isUseTermsFilterOptimization = isUseTermsFilterOptimization; - } + this.isUseTermsFilterOptimization = isUseTermsFilterOptimization; + } - @Override - public Cost estimate() { - return new Cost(); - } + @Override + public Cost estimate() { + return new Cost(); + } - @Override - protected void reopenRight() throws Exception { - Objects.requireNonNull(params, "Execute params is not set so unable to add extra filter"); + @Override + protected void reopenRight() throws Exception { + Objects.requireNonNull(params, "Execute params is not set so unable to add extra filter"); - if (isUseTermsFilterOptimization) { - params.add(ExecuteParams.ExecuteParamType.EXTRA_QUERY_FILTER, queryForPushedDownOnConds()); - } - right.open(params); + if (isUseTermsFilterOptimization) { + params.add(ExecuteParams.ExecuteParamType.EXTRA_QUERY_FILTER, queryForPushedDownOnConds()); } - - @Override - protected List> probe() { - List> combinedRows = new ArrayList<>(); - int totalSize = 0; - - /* Return if already found enough matched rows to give ResourceMgr a chance to check resource usage */ - while (right.hasNext() && totalSize < hashTable.size()) { - Row rightRow = right.next(); - Collection> matchedLeftRows = hashTable.match(rightRow); - - if (!matchedLeftRows.isEmpty()) { - combinedRows.add(new CombinedRow<>(rightRow, matchedLeftRows)); - totalSize += matchedLeftRows.size(); - } - } - return combinedRows; + right.open(params); + } + + @Override + protected List> probe() { + List> combinedRows = new ArrayList<>(); + int totalSize = 0; + + /* Return if already found enough matched rows to give ResourceMgr a chance to check resource usage */ + while (right.hasNext() && totalSize < hashTable.size()) { + Row rightRow = right.next(); + Collection> matchedLeftRows = hashTable.match(rightRow); + + if (!matchedLeftRows.isEmpty()) { + combinedRows.add(new CombinedRow<>(rightRow, matchedLeftRows)); + totalSize += matchedLeftRows.size(); + } } - - /** - * Build query for pushed down conditions in ON - */ - private BoolQueryBuilder queryForPushedDownOnConds() { - BoolQueryBuilder orQuery = boolQuery(); - Map>[] rightNameToLeftValuesGroup = hashTable.rightFieldWithLeftValues(); - - for (Map> rightNameToLeftValues : rightNameToLeftValuesGroup) { - if (LOG.isTraceEnabled()) { - rightNameToLeftValues.forEach((rightName, leftValues) -> - LOG.trace("Right name to left values mapping: {} => {}", rightName, leftValues)); - } - - BoolQueryBuilder andQuery = boolQuery(); - rightNameToLeftValues.forEach( - (rightName, leftValues) -> andQuery.must(termsQuery(rightName, leftValues)) - ); - - if (LOG.isTraceEnabled()) { - LOG.trace("Terms filter optimization: {}", Strings.toString(XContentType.JSON, andQuery)); - } - orQuery.should(andQuery); - } - return orQuery; + return combinedRows; + } + + /** Build query for pushed down conditions in ON */ + private BoolQueryBuilder queryForPushedDownOnConds() { + BoolQueryBuilder orQuery = boolQuery(); + Map>[] rightNameToLeftValuesGroup = + hashTable.rightFieldWithLeftValues(); + + for (Map> rightNameToLeftValues : rightNameToLeftValuesGroup) { + if (LOG.isTraceEnabled()) { + rightNameToLeftValues.forEach( + (rightName, leftValues) -> + LOG.trace("Right name to left values mapping: {} => {}", rightName, leftValues)); + } + + BoolQueryBuilder andQuery = boolQuery(); + rightNameToLeftValues.forEach( + (rightName, leftValues) -> andQuery.must(termsQuery(rightName, leftValues))); + + if (LOG.isTraceEnabled()) { + LOG.trace("Terms filter optimization: {}", Strings.toString(XContentType.JSON, andQuery)); + } + orQuery.should(andQuery); } + return orQuery; + } - /********************************************* - * Getters for Explain - *********************************************/ + /********************************************* + * Getters for Explain + *********************************************/ - public boolean isUseTermsFilterOptimization() { - return isUseTermsFilterOptimization; - } + public boolean isUseTermsFilterOptimization() { + return isUseTermsFilterOptimization; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/CombinedRow.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/CombinedRow.java index e83bbb7d0e..b1fb43441e 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/CombinedRow.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/CombinedRow.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.physical.node.join; import java.util.ArrayList; @@ -19,28 +18,28 @@ */ public class CombinedRow { - private Row rightRow; - private Collection> leftRows; + private Row rightRow; + private Collection> leftRows; - public CombinedRow(Row rightRow, Collection> leftRows) { - this.rightRow = rightRow; - this.leftRows = leftRows; - } + public CombinedRow(Row rightRow, Collection> leftRows) { + this.rightRow = rightRow; + this.leftRows = leftRows; + } - public List> combine() { - List> combinedRows = new ArrayList<>(); - for (Row leftRow : leftRows) { - combinedRows.add(leftRow.combine(rightRow)); - } - return combinedRows; + public List> combine() { + List> combinedRows = new ArrayList<>(); + for (Row leftRow : leftRows) { + combinedRows.add(leftRow.combine(rightRow)); } + return combinedRows; + } - public Collection> leftMatchedRows() { - return Collections.unmodifiableCollection(leftRows); - } + public Collection> leftMatchedRows() { + return Collections.unmodifiableCollection(leftRows); + } - @Override - public String toString() { - return "CombinedRow{rightRow=" + rightRow + ", leftRows=" + leftRows + '}'; - } + @Override + public String toString() { + return "CombinedRow{rightRow=" + rightRow + ", leftRows=" + leftRows + '}'; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/DefaultHashTable.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/DefaultHashTable.java index 733d7a78ab..23e79d2c31 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/DefaultHashTable.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/DefaultHashTable.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.physical.node.join; import static java.util.Collections.emptyList; @@ -22,102 +21,98 @@ import org.opensearch.sql.legacy.query.planner.physical.Row.RowKey; /** - * Hash table implementation. - * In the case of no join condition, hash table degrades to linked list with all rows in block paired to RowKey.NULL + * Hash table implementation. In the case of no join condition, hash table degrades to linked list + * with all rows in block paired to RowKey.NULL * * @param Row data type */ public class DefaultHashTable implements HashTable { - private static final Logger LOG = LogManager.getLogger(); - - /** - * Hash table implementation - */ - private final Multimap> table = ArrayListMultimap.create(); - - /** - * Left join conditions to generate key to build hash table by left rows from block - */ - private final String[] leftJoinFields; - - /** - * Right join conditions to generate key to probe hash table by right rows - */ - private final String[] rightJoinFields; - - - public DefaultHashTable(String[] leftJoinFields, String[] rightJoinFields) { - this.leftJoinFields = leftJoinFields; - this.rightJoinFields = rightJoinFields; + private static final Logger LOG = LogManager.getLogger(); + + /** Hash table implementation */ + private final Multimap> table = ArrayListMultimap.create(); + + /** Left join conditions to generate key to build hash table by left rows from block */ + private final String[] leftJoinFields; + + /** Right join conditions to generate key to probe hash table by right rows */ + private final String[] rightJoinFields; + + public DefaultHashTable(String[] leftJoinFields, String[] rightJoinFields) { + this.leftJoinFields = leftJoinFields; + this.rightJoinFields = rightJoinFields; + } + + /** + * Add row in block to hash table by left conditions in ON. For the duplicate key, append them to + * the list in value (MultiMap) + */ + @Override + public void add(Row row) { + RowKey key = row.key(leftJoinFields); + if (key == RowKey.NULL) { + LOG.debug( + "Skip rows with NULL column value during build: row={}, conditions={}", + row, + leftJoinFields); + } else { + table.put(key, row); } - - /** - * Add row in block to hash table by left conditions in ON. - * For the duplicate key, append them to the list in value (MultiMap) - */ - @Override - public void add(Row row) { - RowKey key = row.key(leftJoinFields); - if (key == RowKey.NULL) { - LOG.debug("Skip rows with NULL column value during build: row={}, conditions={}", row, leftJoinFields); - } else { - table.put(key, row); - } + } + + /** Probe hash table to match right rows by values of right conditions */ + @Override + public Collection> match(Row row) { + RowKey key = row.key(rightJoinFields); + if (key == RowKey.NULL) { + LOG.debug( + "Skip rows with NULL column value during probing: row={}, conditions={}", + row, + rightJoinFields); + return emptyList(); } - - /** - * Probe hash table to match right rows by values of right conditions - */ - @Override - public Collection> match(Row row) { - RowKey key = row.key(rightJoinFields); - if (key == RowKey.NULL) { - LOG.debug("Skip rows with NULL column value during probing: row={}, conditions={}", row, rightJoinFields); - return emptyList(); - } - return table.get(key); // Multimap returns empty list rather null. + return table.get(key); // Multimap returns empty list rather null. + } + + /** Right joined field name with according column value list to push down */ + @SuppressWarnings("unchecked") + @Override + public Map>[] rightFieldWithLeftValues() { + Map> result = + new HashMap<>(); // Eliminate potential duplicate in values + for (RowKey key : table.keySet()) { + Object[] keys = key.keys(); + for (int i = 0; i < keys.length; i++) { + result + .computeIfAbsent(rightJoinFields[i], (k -> new HashSet<>())) + .add(lowercaseIfStr(keys[i])); // Terms stored in lower case in OpenSearch + } } - /** - * Right joined field name with according column value list to push down - */ - @SuppressWarnings("unchecked") - @Override - public Map>[] rightFieldWithLeftValues() { - Map> result = new HashMap<>(); // Eliminate potential duplicate in values - for (RowKey key : table.keySet()) { - Object[] keys = key.keys(); - for (int i = 0; i < keys.length; i++) { - result.computeIfAbsent(rightJoinFields[i], (k -> new HashSet<>())). - add(lowercaseIfStr(keys[i])); // Terms stored in lower case in OpenSearch - } - } - - // Convert value of Map from Guava's Set to JDK list which is expected by OpenSearch writer - for (Entry> entry : result.entrySet()) { - entry.setValue(new ArrayList<>(entry.getValue())); - } - return new Map[]{result}; + // Convert value of Map from Guava's Set to JDK list which is expected by OpenSearch writer + for (Entry> entry : result.entrySet()) { + entry.setValue(new ArrayList<>(entry.getValue())); } - - @Override - public int size() { - return table.size(); - } - - @Override - public boolean isEmpty() { - return table.isEmpty(); - } - - @Override - public void clear() { - table.clear(); - } - - private Object lowercaseIfStr(Object key) { - return key instanceof String ? ((String) key).toLowerCase() : key; - } - + return new Map[] {result}; + } + + @Override + public int size() { + return table.size(); + } + + @Override + public boolean isEmpty() { + return table.isEmpty(); + } + + @Override + public void clear() { + table.clear(); + } + + private Object lowercaseIfStr(Object key) { + return key instanceof String ? ((String) key).toLowerCase() : key; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/BindingTupleRow.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/BindingTupleRow.java index 9e3a190e30..41f500fed1 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/BindingTupleRow.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/BindingTupleRow.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.physical.node.scroll; import java.util.Map; @@ -13,25 +12,25 @@ @RequiredArgsConstructor public class BindingTupleRow implements Row { - private final BindingTuple bindingTuple; - - @Override - public RowKey key(String[] colNames) { - return null; - } - - @Override - public Row combine(Row otherRow) { - throw new RuntimeException("unsupported operation"); - } - - @Override - public void retain(Map colNameAlias) { - // do nothing - } - - @Override - public BindingTuple data() { - return bindingTuple; - } + private final BindingTuple bindingTuple; + + @Override + public RowKey key(String[] colNames) { + return null; + } + + @Override + public Row combine(Row otherRow) { + throw new RuntimeException("unsupported operation"); + } + + @Override + public void retain(Map colNameAlias) { + // do nothing + } + + @Override + public BindingTuple data() { + return bindingTuple; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/blocksize/BlockSize.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/blocksize/BlockSize.java index d68b16b8bb..6e5a2703f4 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/blocksize/BlockSize.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/blocksize/BlockSize.java @@ -3,42 +3,35 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.resource.blocksize; -/** - * Block size calculating logic. - */ +/** Block size calculating logic. */ public interface BlockSize { - /** - * Get block size configured or dynamically. Integer should be sufficient for single block size. - * - * @return block size. - */ - int size(); - + /** + * Get block size configured or dynamically. Integer should be sufficient for single block size. + * + * @return block size. + */ + int size(); - /** - * Default implementation with fixed block size - */ - class FixedBlockSize implements BlockSize { + /** Default implementation with fixed block size */ + class FixedBlockSize implements BlockSize { - private int blockSize; + private int blockSize; - public FixedBlockSize(int blockSize) { - this.blockSize = blockSize; - } - - @Override - public int size() { - return blockSize; - } + public FixedBlockSize(int blockSize) { + this.blockSize = blockSize; + } - @Override - public String toString() { - return "FixedBlockSize with " + "size=" + blockSize; - } + @Override + public int size() { + return blockSize; } + @Override + public String toString() { + return "FixedBlockSize with size=" + blockSize; + } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/identifier/AnonymizeSensitiveDataRule.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/identifier/AnonymizeSensitiveDataRule.java index 2768b269bf..c4f3ee5a10 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/identifier/AnonymizeSensitiveDataRule.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/identifier/AnonymizeSensitiveDataRule.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.identifier; import com.alibaba.druid.sql.ast.expr.SQLBooleanExpr; @@ -17,53 +16,53 @@ import org.opensearch.sql.legacy.rewriter.RewriteRule; /** - * Rewrite rule to anonymize sensitive data in logging queries. - * This rule replace the content of specific nodes (that might involve index data) in AST - * to anonymous content. + * Rewrite rule to anonymize sensitive data in logging queries. This rule replace the content of + * specific nodes (that might involve index data) in AST to anonymous content. */ -public class AnonymizeSensitiveDataRule extends MySqlASTVisitorAdapter implements RewriteRule { +public class AnonymizeSensitiveDataRule extends MySqlASTVisitorAdapter + implements RewriteRule { - @Override - public boolean visit(SQLIdentifierExpr identifierExpr) { - if (identifierExpr.getParent() instanceof SQLExprTableSource) { - identifierExpr.setName("table"); - } else { - identifierExpr.setName("identifier"); - } - return true; + @Override + public boolean visit(SQLIdentifierExpr identifierExpr) { + if (identifierExpr.getParent() instanceof SQLExprTableSource) { + identifierExpr.setName("table"); + } else { + identifierExpr.setName("identifier"); } + return true; + } - @Override - public boolean visit(SQLIntegerExpr integerExpr) { - integerExpr.setNumber(0); - return true; - } + @Override + public boolean visit(SQLIntegerExpr integerExpr) { + integerExpr.setNumber(0); + return true; + } - @Override - public boolean visit(SQLNumberExpr numberExpr) { - numberExpr.setNumber(0); - return true; - } + @Override + public boolean visit(SQLNumberExpr numberExpr) { + numberExpr.setNumber(0); + return true; + } - @Override - public boolean visit(SQLCharExpr charExpr) { - charExpr.setText("string_literal"); - return true; - } + @Override + public boolean visit(SQLCharExpr charExpr) { + charExpr.setText("string_literal"); + return true; + } - @Override - public boolean visit(SQLBooleanExpr booleanExpr) { - booleanExpr.setValue(false); - return true; - } + @Override + public boolean visit(SQLBooleanExpr booleanExpr) { + booleanExpr.setValue(false); + return true; + } - @Override - public boolean match(SQLQueryExpr expr) { - return true; - } + @Override + public boolean match(SQLQueryExpr expr) { + return true; + } - @Override - public void rewrite(SQLQueryExpr expr) { - expr.accept(this); - } + @Override + public void rewrite(SQLQueryExpr expr) { + expr.accept(this); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/spatial/BoundingBoxFilterParams.java b/legacy/src/main/java/org/opensearch/sql/legacy/spatial/BoundingBoxFilterParams.java index df9f4c88b2..fb62f60ae7 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/spatial/BoundingBoxFilterParams.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/spatial/BoundingBoxFilterParams.java @@ -3,26 +3,23 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.spatial; -/** - * Created by Eliran on 1/8/2015. - */ +/** Created by Eliran on 1/8/2015. */ public class BoundingBoxFilterParams { - private Point topLeft; - private Point bottomRight; + private Point topLeft; + private Point bottomRight; - public BoundingBoxFilterParams(Point topLeft, Point bottomRight) { - this.topLeft = topLeft; - this.bottomRight = bottomRight; - } + public BoundingBoxFilterParams(Point topLeft, Point bottomRight) { + this.topLeft = topLeft; + this.bottomRight = bottomRight; + } - public Point getTopLeft() { - return topLeft; - } + public Point getTopLeft() { + return topLeft; + } - public Point getBottomRight() { - return bottomRight; - } + public Point getBottomRight() { + return bottomRight; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/spatial/CellFilterParams.java b/legacy/src/main/java/org/opensearch/sql/legacy/spatial/CellFilterParams.java index fc3dc35f07..6c50c17467 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/spatial/CellFilterParams.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/spatial/CellFilterParams.java @@ -3,36 +3,33 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.spatial; -/** - * Created by Eliran on 15/8/2015. - */ +/** Created by Eliran on 15/8/2015. */ public class CellFilterParams { - private Point geohashPoint; - private int precision; - private boolean neighbors; - - public CellFilterParams(Point geohashPoint, int precision, boolean neighbors) { - this.geohashPoint = geohashPoint; - this.precision = precision; - this.neighbors = neighbors; - } - - public CellFilterParams(Point geohashPoint, int precision) { - this(geohashPoint, precision, false); - } - - public Point getGeohashPoint() { - return geohashPoint; - } - - public int getPrecision() { - return precision; - } - - public boolean isNeighbors() { - return neighbors; - } + private Point geohashPoint; + private int precision; + private boolean neighbors; + + public CellFilterParams(Point geohashPoint, int precision, boolean neighbors) { + this.geohashPoint = geohashPoint; + this.precision = precision; + this.neighbors = neighbors; + } + + public CellFilterParams(Point geohashPoint, int precision) { + this(geohashPoint, precision, false); + } + + public Point getGeohashPoint() { + return geohashPoint; + } + + public int getPrecision() { + return precision; + } + + public boolean isNeighbors() { + return neighbors; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/spatial/DistanceFilterParams.java b/legacy/src/main/java/org/opensearch/sql/legacy/spatial/DistanceFilterParams.java index 1141da08ca..8c419de58d 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/spatial/DistanceFilterParams.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/spatial/DistanceFilterParams.java @@ -3,26 +3,23 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.spatial; -/** - * Created by Eliran on 1/8/2015. - */ +/** Created by Eliran on 1/8/2015. */ public class DistanceFilterParams { - private String distance; - private Point from; + private String distance; + private Point from; - public DistanceFilterParams(String distance, Point from) { - this.distance = distance; - this.from = from; - } + public DistanceFilterParams(String distance, Point from) { + this.distance = distance; + this.from = from; + } - public String getDistance() { - return distance; - } + public String getDistance() { + return distance; + } - public Point getFrom() { - return from; - } + public Point getFrom() { + return from; + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/types/BaseTypeTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/types/BaseTypeTest.java index a8ddfd43e8..0269c6b01c 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/types/BaseTypeTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/types/BaseTypeTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.types; import static org.junit.Assert.assertEquals; @@ -30,78 +29,75 @@ import org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchDataType; import org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchIndex; -/** - * Test base type compatibility - */ +/** Test base type compatibility */ public class BaseTypeTest { - @Test - public void unknownTypeNameShouldReturnUnknown() { - assertEquals(UNKNOWN, OpenSearchDataType.typeOf("this_is_a_new_es_type_we_arent_aware")); - } - - @Test - public void typeOfShouldIgnoreCase() { - assertEquals(INTEGER, OpenSearchDataType.typeOf("Integer")); - } - - @Test - public void sameBaseTypeShouldBeCompatible() { - assertTrue(INTEGER.isCompatible(INTEGER)); - assertTrue(BOOLEAN.isCompatible(BOOLEAN)); - } - - @Test - public void parentBaseTypeShouldBeCompatibleWithSubBaseType() { - assertTrue(NUMBER.isCompatible(DOUBLE)); - assertTrue(DOUBLE.isCompatible(FLOAT)); - assertTrue(FLOAT.isCompatible(INTEGER)); - assertTrue(INTEGER.isCompatible(SHORT)); - assertTrue(INTEGER.isCompatible(LONG)); - assertTrue(STRING.isCompatible(TEXT)); - assertTrue(STRING.isCompatible(KEYWORD)); - assertTrue(DATE.isCompatible(STRING)); - } - - @Test - public void ancestorBaseTypeShouldBeCompatibleWithSubBaseType() { - assertTrue(NUMBER.isCompatible(LONG)); - assertTrue(NUMBER.isCompatible(DOUBLE)); - assertTrue(DOUBLE.isCompatible(INTEGER)); - assertTrue(INTEGER.isCompatible(SHORT)); - assertTrue(INTEGER.isCompatible(LONG)); - } - - @Ignore("Two way compatibility is not necessary") - @Test - public void subBaseTypeShouldBeCompatibleWithParentBaseType() { - assertTrue(KEYWORD.isCompatible(STRING)); - } - - @Test - public void nonRelatedBaseTypeShouldNotBeCompatible() { - assertFalse(SHORT.isCompatible(TEXT)); - assertFalse(DATE.isCompatible(BOOLEAN)); - } - - @Test - public void unknownBaseTypeShouldBeCompatibleWithAnyBaseType() { - assertTrue(UNKNOWN.isCompatible(INTEGER)); - assertTrue(UNKNOWN.isCompatible(KEYWORD)); - assertTrue(UNKNOWN.isCompatible(BOOLEAN)); - } - - @Test - public void anyBaseTypeShouldBeCompatibleWithUnknownBaseType() { - assertTrue(LONG.isCompatible(UNKNOWN)); - assertTrue(TEXT.isCompatible(UNKNOWN)); - assertTrue(DATE.isCompatible(UNKNOWN)); - } - - @Test - public void nestedIndexTypeShouldBeCompatibleWithNestedDataType() { - assertTrue(NESTED.isCompatible(new OpenSearchIndex("test", NESTED_FIELD))); - assertTrue(OPENSEARCH_TYPE.isCompatible(new OpenSearchIndex("test", NESTED_FIELD))); - } - + @Test + public void unknownTypeNameShouldReturnUnknown() { + assertEquals(UNKNOWN, OpenSearchDataType.typeOf("this_is_a_new_es_type_we_arent_aware")); + } + + @Test + public void typeOfShouldIgnoreCase() { + assertEquals(INTEGER, OpenSearchDataType.typeOf("Integer")); + } + + @Test + public void sameBaseTypeShouldBeCompatible() { + assertTrue(INTEGER.isCompatible(INTEGER)); + assertTrue(BOOLEAN.isCompatible(BOOLEAN)); + } + + @Test + public void parentBaseTypeShouldBeCompatibleWithSubBaseType() { + assertTrue(NUMBER.isCompatible(DOUBLE)); + assertTrue(DOUBLE.isCompatible(FLOAT)); + assertTrue(FLOAT.isCompatible(INTEGER)); + assertTrue(INTEGER.isCompatible(SHORT)); + assertTrue(INTEGER.isCompatible(LONG)); + assertTrue(STRING.isCompatible(TEXT)); + assertTrue(STRING.isCompatible(KEYWORD)); + assertTrue(DATE.isCompatible(STRING)); + } + + @Test + public void ancestorBaseTypeShouldBeCompatibleWithSubBaseType() { + assertTrue(NUMBER.isCompatible(LONG)); + assertTrue(NUMBER.isCompatible(DOUBLE)); + assertTrue(DOUBLE.isCompatible(INTEGER)); + assertTrue(INTEGER.isCompatible(SHORT)); + assertTrue(INTEGER.isCompatible(LONG)); + } + + @Ignore("Two way compatibility is not necessary") + @Test + public void subBaseTypeShouldBeCompatibleWithParentBaseType() { + assertTrue(KEYWORD.isCompatible(STRING)); + } + + @Test + public void nonRelatedBaseTypeShouldNotBeCompatible() { + assertFalse(SHORT.isCompatible(TEXT)); + assertFalse(DATE.isCompatible(BOOLEAN)); + } + + @Test + public void unknownBaseTypeShouldBeCompatibleWithAnyBaseType() { + assertTrue(UNKNOWN.isCompatible(INTEGER)); + assertTrue(UNKNOWN.isCompatible(KEYWORD)); + assertTrue(UNKNOWN.isCompatible(BOOLEAN)); + } + + @Test + public void anyBaseTypeShouldBeCompatibleWithUnknownBaseType() { + assertTrue(LONG.isCompatible(UNKNOWN)); + assertTrue(TEXT.isCompatible(UNKNOWN)); + assertTrue(DATE.isCompatible(UNKNOWN)); + } + + @Test + public void nestedIndexTypeShouldBeCompatibleWithNestedDataType() { + assertTrue(NESTED.isCompatible(new OpenSearchIndex("test", NESTED_FIELD))); + assertTrue(OPENSEARCH_TYPE.isCompatible(new OpenSearchIndex("test", NESTED_FIELD))); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/visitor/AntlrSqlParseTreeVisitorTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/visitor/AntlrSqlParseTreeVisitorTest.java index c4e7a7e725..be4b5a5197 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/visitor/AntlrSqlParseTreeVisitorTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/visitor/AntlrSqlParseTreeVisitorTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.visitor; import static java.util.Collections.emptyList; @@ -25,95 +24,97 @@ import org.opensearch.sql.legacy.antlr.semantic.visitor.TypeChecker; import org.opensearch.sql.legacy.exception.SqlFeatureNotImplementedException; -/** - * Test cases for AntlrSqlParseTreeVisitor - */ +/** Test cases for AntlrSqlParseTreeVisitor */ public class AntlrSqlParseTreeVisitorTest { - private TypeChecker analyzer = new TypeChecker(new SemanticContext()) { + private TypeChecker analyzer = + new TypeChecker(new SemanticContext()) { @Override public Type visitIndexName(String indexName) { - return null; // avoid querying mapping on null LocalClusterState + return null; // avoid querying mapping on null LocalClusterState } @Override public Type visitFieldName(String fieldName) { - switch (fieldName) { - case "age": return INTEGER; - case "birthday": return DATE; - default: return UNKNOWN; - } + switch (fieldName) { + case "age": + return INTEGER; + case "birthday": + return DATE; + default: + return UNKNOWN; + } } - }; - - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); - - @Test - public void selectNumberShouldReturnNumberAsQueryVisitingResult() { - Type result = visit("SELECT age FROM test"); - Assert.assertSame(result, INTEGER); - } - - @Test - public void selectNumberAndDateShouldReturnProductOfThemAsQueryVisitingResult() { - Type result = visit("SELECT age, birthday FROM test"); - Assert.assertTrue(result instanceof Product ); - Assert.assertTrue(result.isCompatible(new Product(Arrays.asList(INTEGER, DATE)))); - } - - @Test - public void selectStarShouldReturnEmptyProductAsQueryVisitingResult() { - Type result = visit("SELECT * FROM test"); - Assert.assertTrue(result instanceof Product); - Assert.assertTrue(result.isCompatible(new Product(emptyList()))); - } - - @Test - public void visitSelectNestedFunctionShouldThrowException() { - exceptionRule.expect(SqlFeatureNotImplementedException.class); - exceptionRule.expectMessage("Nested function calls like [abs(log(age))] are not supported yet"); - visit("SELECT abs(log(age)) FROM test"); - } - - @Test - public void visitWhereNestedFunctionShouldThrowException() { - exceptionRule.expect(SqlFeatureNotImplementedException.class); - exceptionRule.expectMessage("Nested function calls like [abs(log(age))] are not supported yet"); - visit("SELECT age FROM test WHERE abs(log(age)) = 1"); - } - - @Test - public void visitMathConstantAsNestedFunctionShouldPass() { - visit("SELECT abs(pi()) FROM test"); - } - - @Test - public void visitSupportedNestedFunctionShouldPass() { - visit("SELECT sum(nested(name.balance)) FROM test"); - } - - @Test - public void visitFunctionAsAggregatorShouldThrowException() { - exceptionRule.expect(SqlFeatureNotImplementedException.class); - exceptionRule.expectMessage("Aggregation calls with function aggregator like [max(abs(age))] are not supported yet"); - visit("SELECT max(abs(age)) FROM test"); - } - - @Test - public void visitUnsupportedOperatorShouldThrowException() { - exceptionRule.expect(SqlFeatureNotImplementedException.class); - exceptionRule.expectMessage("Operator [DIV] is not supported yet"); - visit("SELECT balance DIV age FROM test"); - } - - private ParseTree createParseTree(String sql) { - return new OpenSearchLegacySqlAnalyzer(new SqlAnalysisConfig(true, true, 1000)).analyzeSyntax(sql); - } - - private Type visit(String sql) { - ParseTree parseTree = createParseTree(sql); - return parseTree.accept(new AntlrSqlParseTreeVisitor<>(analyzer)); - } - + }; + + @Rule public ExpectedException exceptionRule = ExpectedException.none(); + + @Test + public void selectNumberShouldReturnNumberAsQueryVisitingResult() { + Type result = visit("SELECT age FROM test"); + Assert.assertSame(result, INTEGER); + } + + @Test + public void selectNumberAndDateShouldReturnProductOfThemAsQueryVisitingResult() { + Type result = visit("SELECT age, birthday FROM test"); + Assert.assertTrue(result instanceof Product); + Assert.assertTrue(result.isCompatible(new Product(Arrays.asList(INTEGER, DATE)))); + } + + @Test + public void selectStarShouldReturnEmptyProductAsQueryVisitingResult() { + Type result = visit("SELECT * FROM test"); + Assert.assertTrue(result instanceof Product); + Assert.assertTrue(result.isCompatible(new Product(emptyList()))); + } + + @Test + public void visitSelectNestedFunctionShouldThrowException() { + exceptionRule.expect(SqlFeatureNotImplementedException.class); + exceptionRule.expectMessage("Nested function calls like [abs(log(age))] are not supported yet"); + visit("SELECT abs(log(age)) FROM test"); + } + + @Test + public void visitWhereNestedFunctionShouldThrowException() { + exceptionRule.expect(SqlFeatureNotImplementedException.class); + exceptionRule.expectMessage("Nested function calls like [abs(log(age))] are not supported yet"); + visit("SELECT age FROM test WHERE abs(log(age)) = 1"); + } + + @Test + public void visitMathConstantAsNestedFunctionShouldPass() { + visit("SELECT abs(pi()) FROM test"); + } + + @Test + public void visitSupportedNestedFunctionShouldPass() { + visit("SELECT sum(nested(name.balance)) FROM test"); + } + + @Test + public void visitFunctionAsAggregatorShouldThrowException() { + exceptionRule.expect(SqlFeatureNotImplementedException.class); + exceptionRule.expectMessage( + "Aggregation calls with function aggregator like [max(abs(age))] are not supported yet"); + visit("SELECT max(abs(age)) FROM test"); + } + + @Test + public void visitUnsupportedOperatorShouldThrowException() { + exceptionRule.expect(SqlFeatureNotImplementedException.class); + exceptionRule.expectMessage("Operator [DIV] is not supported yet"); + visit("SELECT balance DIV age FROM test"); + } + + private ParseTree createParseTree(String sql) { + return new OpenSearchLegacySqlAnalyzer(new SqlAnalysisConfig(true, true, 1000)) + .analyzeSyntax(sql); + } + + private Type visit(String sql) { + ParseTree parseTree = createParseTree(sql); + return parseTree.accept(new AntlrSqlParseTreeVisitor<>(analyzer)); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/executor/AsyncRestExecutorTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/executor/AsyncRestExecutorTest.java index b26e171ce7..9be2517c4a 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/executor/AsyncRestExecutorTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/executor/AsyncRestExecutorTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor; import static java.util.Collections.emptyList; @@ -34,77 +33,69 @@ import org.opensearch.sql.opensearch.setting.OpenSearchSettings; import org.opensearch.threadpool.ThreadPool; -/** - * Test AsyncRestExecutor behavior. - */ +/** Test AsyncRestExecutor behavior. */ @RunWith(MockitoJUnitRunner.Silent.class) public class AsyncRestExecutorTest { - private static final boolean NON_BLOCKING = false; - - @Mock - private RestExecutor executor; + private static final boolean NON_BLOCKING = false; - @Mock - private Client client; + @Mock private RestExecutor executor; - private Map params = emptyMap(); + @Mock private Client client; - @Mock - private QueryAction action; + private Map params = emptyMap(); - @Mock - private RestChannel channel; + @Mock private QueryAction action; - @Mock - private ClusterSettings clusterSettings; + @Mock private RestChannel channel; - @Before - public void setUp() { - when(client.threadPool()).thenReturn(mock(ThreadPool.class)); - when(action.getSqlRequest()).thenReturn(SqlRequest.NULL); - when(clusterSettings.get(ClusterName.CLUSTER_NAME_SETTING)).thenReturn(ClusterName.DEFAULT); + @Mock private ClusterSettings clusterSettings; - OpenSearchSettings settings = spy(new OpenSearchSettings(clusterSettings)); - doReturn(emptyList()).when(settings).getSettings(); - LocalClusterState.state().setPluginSettings(settings); - } + @Before + public void setUp() { + when(client.threadPool()).thenReturn(mock(ThreadPool.class)); + when(action.getSqlRequest()).thenReturn(SqlRequest.NULL); + when(clusterSettings.get(ClusterName.CLUSTER_NAME_SETTING)).thenReturn(ClusterName.DEFAULT); - @Test - public void executeBlockingQuery() throws Exception { - Thread.currentThread().setName(TRANSPORT_WORKER_THREAD_NAME_PREFIX); - execute(); - verifyRunInWorkerThread(); - } + OpenSearchSettings settings = spy(new OpenSearchSettings(clusterSettings)); + doReturn(emptyList()).when(settings).getSettings(); + LocalClusterState.state().setPluginSettings(settings); + } - @Test - public void executeBlockingQueryButNotInTransport() throws Exception { - execute(); - verifyRunInCurrentThread(); - } + @Test + public void executeBlockingQuery() throws Exception { + Thread.currentThread().setName(TRANSPORT_WORKER_THREAD_NAME_PREFIX); + execute(); + verifyRunInWorkerThread(); + } - @Test - public void executeNonBlockingQuery() throws Exception { - execute(anyAction -> NON_BLOCKING); - verifyRunInCurrentThread(); - } + @Test + public void executeBlockingQueryButNotInTransport() throws Exception { + execute(); + verifyRunInCurrentThread(); + } - private void execute() throws Exception { - AsyncRestExecutor asyncExecutor = new AsyncRestExecutor(executor); - asyncExecutor.execute(client, params, action, channel); - } + @Test + public void executeNonBlockingQuery() throws Exception { + execute(anyAction -> NON_BLOCKING); + verifyRunInCurrentThread(); + } - private void execute(Predicate isBlocking) throws Exception { - AsyncRestExecutor asyncExecutor = new AsyncRestExecutor(executor, isBlocking); - asyncExecutor.execute(client, params, action, channel); - } + private void execute() throws Exception { + AsyncRestExecutor asyncExecutor = new AsyncRestExecutor(executor); + asyncExecutor.execute(client, params, action, channel); + } - private void verifyRunInCurrentThread() { - verify(client, never()).threadPool(); - } + private void execute(Predicate isBlocking) throws Exception { + AsyncRestExecutor asyncExecutor = new AsyncRestExecutor(executor, isBlocking); + asyncExecutor.execute(client, params, action, channel); + } - private void verifyRunInWorkerThread() { - verify(client, times(1)).threadPool(); - } + private void verifyRunInCurrentThread() { + verify(client, never()).threadPool(); + } + private void verifyRunInWorkerThread() { + verify(client, times(1)).threadPool(); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/executor/csv/CSVResultTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/executor/csv/CSVResultTest.java index 1a24045881..c877095d8f 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/executor/csv/CSVResultTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/executor/csv/CSVResultTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.csv; import static org.junit.Assert.assertEquals; @@ -13,25 +12,21 @@ import java.util.stream.Collectors; import org.junit.Test; -/** - * Unit tests for {@link CSVResult} - */ +/** Unit tests for {@link CSVResult} */ public class CSVResultTest { - private static final String SEPARATOR = ","; + private static final String SEPARATOR = ","; - @Test - public void getHeadersShouldReturnHeadersSanitized() { - CSVResult csv = csv(headers("name", "=age"), lines(line("John", "30"))); - assertEquals( - headers("name", "'=age"), - csv.getHeaders() - ); - } + @Test + public void getHeadersShouldReturnHeadersSanitized() { + CSVResult csv = csv(headers("name", "=age"), lines(line("John", "30"))); + assertEquals(headers("name", "'=age"), csv.getHeaders()); + } - @Test - public void getLinesShouldReturnLinesSanitized() { - CSVResult csv = csv( + @Test + public void getLinesShouldReturnLinesSanitized() { + CSVResult csv = + csv( headers("name", "city"), lines( line("John", "Seattle"), @@ -39,53 +34,42 @@ public void getLinesShouldReturnLinesSanitized() { line("John", "+Seattle"), line("-John", "Seattle"), line("@John", "Seattle"), - line("John", "Seattle=") - ) - ); - - assertEquals( - line( - "John,Seattle", - "John,'=Seattle", - "John,'+Seattle", - "'-John,Seattle", - "'@John,Seattle", - "John,Seattle=" - ), - csv.getLines() - ); - } - - @Test - public void getHeadersShouldReturnHeadersQuotedIfRequired() { - CSVResult csv = csv(headers("na,me", ",,age"), lines(line("John", "30"))); - assertEquals( - headers("\"na,me\"", "\",,age\""), - csv.getHeaders() - ); - } - - @Test - public void getLinesShouldReturnLinesQuotedIfRequired() { - CSVResult csv = csv(headers("name", "age"), lines(line("John,Smith", "30,,,"))); - assertEquals( - line("\"John,Smith\",\"30,,,\""), - csv.getLines() - ); - } - - @Test - public void getHeadersShouldReturnHeadersBothSanitizedAndQuotedIfRequired() { - CSVResult csv = csv(headers("na,+me", ",,,=age", "=city,"), lines(line("John", "30", "Seattle"))); - assertEquals( - headers("\"na,+me\"", "\",,,=age\"", "\"'=city,\""), - csv.getHeaders() - ); - } - - @Test - public void getLinesShouldReturnLinesBothSanitizedAndQuotedIfRequired() { - CSVResult csv = csv( + line("John", "Seattle="))); + + assertEquals( + line( + "John,Seattle", + "John,'=Seattle", + "John,'+Seattle", + "'-John,Seattle", + "'@John,Seattle", + "John,Seattle="), + csv.getLines()); + } + + @Test + public void getHeadersShouldReturnHeadersQuotedIfRequired() { + CSVResult csv = csv(headers("na,me", ",,age"), lines(line("John", "30"))); + assertEquals(headers("\"na,me\"", "\",,age\""), csv.getHeaders()); + } + + @Test + public void getLinesShouldReturnLinesQuotedIfRequired() { + CSVResult csv = csv(headers("name", "age"), lines(line("John,Smith", "30,,,"))); + assertEquals(line("\"John,Smith\",\"30,,,\""), csv.getLines()); + } + + @Test + public void getHeadersShouldReturnHeadersBothSanitizedAndQuotedIfRequired() { + CSVResult csv = + csv(headers("na,+me", ",,,=age", "=city,"), lines(line("John", "30", "Seattle"))); + assertEquals(headers("\"na,+me\"", "\",,,=age\"", "\"'=city,\""), csv.getHeaders()); + } + + @Test + public void getLinesShouldReturnLinesBothSanitizedAndQuotedIfRequired() { + CSVResult csv = + csv( headers("name", "city"), lines( line("John", "Seattle"), @@ -93,38 +77,33 @@ public void getLinesShouldReturnLinesBothSanitizedAndQuotedIfRequired() { line("John", "+Sea,ttle"), line(",-John", "Seattle"), line(",,,@John", "Seattle"), - line("John", "Seattle=") - ) - ); - - assertEquals( - line( - "John,Seattle", - "John,'=Seattle", - "John,\"'+Sea,ttle\"", - "\",-John\",Seattle", - "\",,,@John\",Seattle", - "John,Seattle=" - ), - csv.getLines() - ); - } - - private CSVResult csv(List headers, List> lines) { - return new CSVResult(SEPARATOR, headers, lines); - } - - private List headers(String... headers) { - return Arrays.stream(headers).collect(Collectors.toList()); - } - - private List line(String... line) { - return Arrays.stream(line).collect(Collectors.toList()); - } - - @SafeVarargs - private final List> lines(List... lines) { - return Arrays.stream(lines).collect(Collectors.toList()); - } - + line("John", "Seattle="))); + + assertEquals( + line( + "John,Seattle", + "John,'=Seattle", + "John,\"'+Sea,ttle\"", + "\",-John\",Seattle", + "\",,,@John\",Seattle", + "John,Seattle="), + csv.getLines()); + } + + private CSVResult csv(List headers, List> lines) { + return new CSVResult(SEPARATOR, headers, lines); + } + + private List headers(String... headers) { + return Arrays.stream(headers).collect(Collectors.toList()); + } + + private List line(String... line) { + return Arrays.stream(line).collect(Collectors.toList()); + } + + @SafeVarargs + private final List> lines(List... lines) { + return Arrays.stream(lines).collect(Collectors.toList()); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/executor/format/DateFieldFormatterTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/executor/format/DateFieldFormatterTest.java index 5807ee2c44..1c2d1bae62 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/executor/format/DateFieldFormatterTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/executor/format/DateFieldFormatterTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.format; import static org.junit.Assert.assertEquals; @@ -18,685 +17,634 @@ public class DateFieldFormatterTest { - @Test - public void testOpenSearchDashboardsSampleDataEcommerceOrderDateField() - { - String columnName = "order_date"; - String dateFormat = "date_optional_time"; - String originalDateValue = "2020-02-24T09:28:48+00:00"; - String expectedDateValue = "2020-02-24 09:28:48.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testOpenSearchDashboardsSampleDataFlightsTimestampField() - { - String columnName = "timestamp"; - String dateFormat = "date_optional_time"; - String originalDateValue = "2020-02-03T00:00:00"; - String expectedDateValue = "2020-02-03 00:00:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testOpenSearchDashboardsSampleDataFlightsTimestampFieldNoTime() - { - String columnName = "timestamp"; - String dateFormat = "date_optional_time"; - String originalDateValue = "2020-02-03T"; - String expectedDateValue = "2020-02-03 00:00:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testOpenSearchDashboardsSampleDataLogsUtcDateField() - { - String columnName = "utc_date"; - String dateFormat = "date_optional_time"; - String originalDateValue = "2020-02-02T00:39:02.912Z"; - String expectedDateValue = "2020-02-02 00:39:02.912"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testEpochMillis() - { - String columnName = "date_field"; - String dateFormat = "epoch_millis"; - String originalDateValue = "727430805000"; - String expectedDateValue = "1993-01-19 08:06:45.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testEpochSecond() - { - String columnName = "date_field"; - String dateFormat = "epoch_second"; - String originalDateValue = "727430805"; - String expectedDateValue = "1993-01-19 08:06:45.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testDateOptionalTimeDateOnly() - { - String columnName = "date_field"; - String dateFormat = "date_optional_time"; - String originalDateValue = "1993-01-19"; - String expectedDateValue = "1993-01-19 00:00:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testDateOptionalTimeDateAndTime() - { - String columnName = "date_field"; - String dateFormat = "date_optional_time"; - String originalDateValue = "1993-01-19T00:06:45.123-0800"; - String expectedDateValue = "1993-01-19 08:06:45.123"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testBasicDate() - { - String columnName = "date_field"; - String dateFormat = "basic_date"; - String originalDateValue = "19930119"; - String expectedDateValue = "1993-01-19 00:00:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testBasicDateTime() - { - String columnName = "date_field"; - String dateFormat = "basic_date_time"; - String originalDateValue = "19930119T120645.123-0800"; - String expectedDateValue = "1993-01-19 20:06:45.123"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testBasicDateTimeNoMillis() - { - String columnName = "date_field"; - String dateFormat = "basic_date_time_no_millis"; - String originalDateValue = "19930119T120645-0800"; - String expectedDateValue = "1993-01-19 20:06:45.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testBasicOrdinalDate() - { - String columnName = "date_field"; - String dateFormat = "basic_ordinal_date"; - String originalDateValue = "1993019"; - String expectedDateValue = "1993-01-19 00:00:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testBasicOrdinalDateTime() - { - String columnName = "date_field"; - String dateFormat = "basic_ordinal_date_time"; - String originalDateValue = "1993019T120645.123-0800"; - String expectedDateValue = "1993-01-19 20:06:45.123"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testBasicOrdinalDateTimeNoMillis() - { - String columnName = "date_field"; - String dateFormat = "basic_ordinal_date_time_no_millis"; - String originalDateValue = "1993019T120645-0800"; - String expectedDateValue = "1993-01-19 20:06:45.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testBasicTime() - { - String columnName = "date_field"; - String dateFormat = "basic_time"; - String originalDateValue = "120645.123-0800"; - String expectedDateValue = "1970-01-01 20:06:45.123"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testBasicTimeNoMillis() - { - String columnName = "date_field"; - String dateFormat = "basic_time_no_millis"; - String originalDateValue = "120645-0800"; - String expectedDateValue = "1970-01-01 20:06:45.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testBasicTTime() - { - String columnName = "date_field"; - String dateFormat = "basic_t_time"; - String originalDateValue = "T120645.123-0800"; - String expectedDateValue = "1970-01-01 20:06:45.123"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testBasicTTimeNoMillis() - { - String columnName = "date_field"; - String dateFormat = "basic_t_time_no_millis"; - String originalDateValue = "T120645-0800"; - String expectedDateValue = "1970-01-01 20:06:45.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testBasicWeekDate() - { - String columnName = "date_field"; - String dateFormat = "basic_week_date"; - String originalDateValue = "1993W042"; - String expectedDateValue = "1993-01-19 00:00:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testBasicWeekDateTime() - { - String columnName = "date_field"; - String dateFormat = "basic_week_date_time"; - String originalDateValue = "1993W042T120645.123-0800"; - String expectedDateValue = "1993-01-19 20:06:45.123"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testBasicWeekDateTimeNoMillis() - { - String columnName = "date_field"; - String dateFormat = "basic_week_date_time_no_millis"; - String originalDateValue = "1993W042T120645-0800"; - String expectedDateValue = "1993-01-19 20:06:45.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testDate() - { - String columnName = "date_field"; - String dateFormat = "date"; - String originalDateValue = "1993-01-19"; - String expectedDateValue = "1993-01-19 00:00:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testDateHour() - { - String columnName = "date_field"; - String dateFormat = "date_hour"; - String originalDateValue = "1993-01-19T12"; - String expectedDateValue = "1993-01-19 12:00:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testDateHourMinute() - { - String columnName = "date_field"; - String dateFormat = "date_hour_minute"; - String originalDateValue = "1993-01-19T12:06"; - String expectedDateValue = "1993-01-19 12:06:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testDateHourMinuteSecond() - { - String columnName = "date_field"; - String dateFormat = "date_hour_minute_second"; - String originalDateValue = "1993-01-19T12:06:45"; - String expectedDateValue = "1993-01-19 12:06:45.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testDateHourMinuteSecondFraction() - { - String columnName = "date_field"; - String dateFormat = "date_hour_minute_second_fraction"; - String originalDateValue = "1993-01-19T12:06:45.123"; - String expectedDateValue = "1993-01-19 12:06:45.123"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testDateHourMinuteSecondMillis() - { - String columnName = "date_field"; - String dateFormat = "date_hour_minute_second_millis"; - String originalDateValue = "1993-01-19T12:06:45.123"; - String expectedDateValue = "1993-01-19 12:06:45.123"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testDateTime() - { - String columnName = "date_field"; - String dateFormat = "date_time"; - String originalDateValue = "1993-01-19T12:06:45.123-0800"; - String expectedDateValue = "1993-01-19 20:06:45.123"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testDateTimeNoMillis() - { - String columnName = "date_field"; - String dateFormat = "date_time_no_millis"; - String originalDateValue = "1993-01-19T12:06:45-0800"; - String expectedDateValue = "1993-01-19 20:06:45.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testHour() - { - String columnName = "date_field"; - String dateFormat = "hour"; - String originalDateValue = "12"; - String expectedDateValue = "1970-01-01 12:00:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testHourMinute() - { - String columnName = "date_field"; - String dateFormat = "hour_minute"; - String originalDateValue = "12:06"; - String expectedDateValue = "1970-01-01 12:06:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testHourMinuteSecond() - { - String columnName = "date_field"; - String dateFormat = "hour_minute_second"; - String originalDateValue = "12:06:45"; - String expectedDateValue = "1970-01-01 12:06:45.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testHourMinuteSecondFraction() - { - String columnName = "date_field"; - String dateFormat = "hour_minute_second_fraction"; - String originalDateValue = "12:06:45.123"; - String expectedDateValue = "1970-01-01 12:06:45.123"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testHourMinuteSecondMillis() - { - String columnName = "date_field"; - String dateFormat = "hour_minute_second_millis"; - String originalDateValue = "12:06:45.123"; - String expectedDateValue = "1970-01-01 12:06:45.123"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testOrdinalDate() - { - String columnName = "date_field"; - String dateFormat = "ordinal_date"; - String originalDateValue = "1993-019"; - String expectedDateValue = "1993-01-19 00:00:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testOrdinalDateTime() - { - String columnName = "date_field"; - String dateFormat = "ordinal_date_time"; - String originalDateValue = "1993-019T12:06:45.123-0800"; - String expectedDateValue = "1993-01-19 20:06:45.123"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testOrdinalDateTimeNoMillis() - { - String columnName = "date_field"; - String dateFormat = "ordinal_date_time_no_millis"; - String originalDateValue = "1993-019T12:06:45-0800"; - String expectedDateValue = "1993-01-19 20:06:45.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testTime() - { - String columnName = "date_field"; - String dateFormat = "time"; - String originalDateValue = "12:06:45.123-0800"; - String expectedDateValue = "1970-01-01 20:06:45.123"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testTimeNoMillis() - { - String columnName = "date_field"; - String dateFormat = "time_no_millis"; - String originalDateValue = "12:06:45-0800"; - String expectedDateValue = "1970-01-01 20:06:45.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testTTime() - { - String columnName = "date_field"; - String dateFormat = "t_time"; - String originalDateValue = "T12:06:45.123-0800"; - String expectedDateValue = "1970-01-01 20:06:45.123"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testTTimeNoMillis() - { - String columnName = "date_field"; - String dateFormat = "t_time_no_millis"; - String originalDateValue = "T12:06:45-0800"; - String expectedDateValue = "1970-01-01 20:06:45.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testWeekDate() - { - String columnName = "date_field"; - String dateFormat = "week_date"; - String originalDateValue = "1993-W04-2"; - String expectedDateValue = "1993-01-19 00:00:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testWeekDateTime() - { - String columnName = "date_field"; - String dateFormat = "week_date_time"; - String originalDateValue = "1993-W04-2T12:06:45.123-0800"; - String expectedDateValue = "1993-01-19 20:06:45.123"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testWeekDateTimeNoMillis() - { - String columnName = "date_field"; - String dateFormat = "week_date_time_no_millis"; - String originalDateValue = "1993-W04-2T12:06:45-0800"; - String expectedDateValue = "1993-01-19 20:06:45.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testWeekyear() - { - String columnName = "date_field"; - String dateFormat = "week_year"; - String originalDateValue = "1993"; - String expectedDateValue = "1993-01-01 00:00:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testWeekyearWeek() - { - String columnName = "date_field"; - String dateFormat = "weekyear_week"; - String originalDateValue = "1993-W04"; - String expectedDateValue = "1993-01-17 00:00:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testWeekyearWeekDay() - { - String columnName = "date_field"; - String dateFormat = "weekyear_week_day"; - String originalDateValue = "1993-W04-2"; - String expectedDateValue = "1993-01-19 00:00:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testYear() - { - String columnName = "date_field"; - String dateFormat = "year"; - String originalDateValue = "1993"; - String expectedDateValue = "1993-01-01 00:00:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testYearMonth() - { - String columnName = "date_field"; - String dateFormat = "year_month"; - String originalDateValue = "1993-01"; - String expectedDateValue = "1993-01-01 00:00:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testYearMonthDay() - { - String columnName = "date_field"; - String dateFormat = "year_month_day"; - String originalDateValue = "1993-01-19"; - String expectedDateValue = "1993-01-19 00:00:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testCustomFormat() - { - String columnName = "date_field"; - String dateFormat = "EEE, MMM d, ''yy"; - - String originalDateValue = "Tue, Jan 19, '93"; - String expectedDateValue = "1993-01-19 00:00:00.000"; - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testMultipleFormats() - { - String columnName = "date_field"; - String dateFormat = "date_optional_time||epoch_millis"; - - String originalDateValue = "1993-01-19"; - String expectedDateValue = "1993-01-19 00:00:00.000"; - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - - originalDateValue = "727401600000"; - expectedDateValue = "1993-01-19 00:00:00.000"; - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testMultipleCustomFormats() - { - String columnName = "date_field"; - String dateFormat = "EEE, MMM d, ''yy||yyMMddHHmmssZ"; - - String originalDateValue = "Tue, Jan 19, '93"; - String expectedDateValue = "1993-01-19 00:00:00.000"; - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - - originalDateValue = "930119000000-0000"; - expectedDateValue = "1993-01-19 00:00:00.000"; - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testNamedAndCustomFormats() - { - String columnName = "date_field"; - String dateFormat = "EEE, MMM d, ''yy||hour_minute_second"; - - String originalDateValue = "Tue, Jan 19, '93"; - String expectedDateValue = "1993-01-19 00:00:00.000"; - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - - originalDateValue = "12:06:45"; - expectedDateValue = "1970-01-01 12:06:45.000"; - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testIncorrectFormat() - { - String columnName = "date_field"; - String dateFormat = "date_optional_time"; - String originalDateValue = "1581724085"; - // Invalid format for date value; should return original value - String expectedDateValue = "1581724085"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testNullDateData() - { - String columnName = "date_field"; - String dateFormat = "date_optional_time"; - String originalDateValue = null; - // Nulls should be preserved - String expectedDateValue = null; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testStrictDateOptionalTimeOrEpochMillsShouldPass() - { - String columnName = "date_field"; - String dateFormat = "strict_date_optional_time||epoch_millis"; - - String originalDateValue = "2015-01-01"; - String expectedDateValue = "2015-01-01 00:00:00.000"; - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - - originalDateValue = "2015-01-01T12:10:30Z"; - expectedDateValue = "2015-01-01 12:10:30.000"; - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - - originalDateValue = "1420070400001"; - expectedDateValue = "2015-01-01 00:00:00.001"; - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - private void verifyFormatting(String columnName, String dateFormatProperty, String originalDateValue, String expectedDateValue) - { - List columns = buildColumnList(columnName); - Map> dateFieldFormatMap = buildDateFieldFormatMap(columnName, dateFormatProperty); - - Map rowSource = new HashMap<>(); - rowSource.put(columnName, originalDateValue); - - DateFieldFormatter dateFieldFormatter = new DateFieldFormatter(dateFieldFormatMap, columns, new HashMap<>()); - executeFormattingAndCompare(dateFieldFormatter, rowSource, columnName, expectedDateValue); - } - - private void executeFormattingAndCompare( - DateFieldFormatter formatter, - Map rowSource, - String columnToCheck, - String expectedDateValue) { - formatter.applyJDBCDateFormat(rowSource); - assertEquals(expectedDateValue, rowSource.get(columnToCheck)); - } - - private List buildColumnList(String columnName) { - return ImmutableList.builder() - .add(new Schema.Column(columnName, null, Schema.Type.DATE)) - .build(); - } - - private Map> buildDateFieldFormatMap(String columnName, String dateFormatProperty) { - return ImmutableMap.>builder() - .put(columnName, Arrays.asList(dateFormatProperty.split("\\|\\|"))) - .build(); - } + @Test + public void testOpenSearchDashboardsSampleDataEcommerceOrderDateField() { + String columnName = "order_date"; + String dateFormat = "date_optional_time"; + String originalDateValue = "2020-02-24T09:28:48+00:00"; + String expectedDateValue = "2020-02-24 09:28:48.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testOpenSearchDashboardsSampleDataFlightsTimestampField() { + String columnName = "timestamp"; + String dateFormat = "date_optional_time"; + String originalDateValue = "2020-02-03T00:00:00"; + String expectedDateValue = "2020-02-03 00:00:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testOpenSearchDashboardsSampleDataFlightsTimestampFieldNoTime() { + String columnName = "timestamp"; + String dateFormat = "date_optional_time"; + String originalDateValue = "2020-02-03T"; + String expectedDateValue = "2020-02-03 00:00:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testOpenSearchDashboardsSampleDataLogsUtcDateField() { + String columnName = "utc_date"; + String dateFormat = "date_optional_time"; + String originalDateValue = "2020-02-02T00:39:02.912Z"; + String expectedDateValue = "2020-02-02 00:39:02.912"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testEpochMillis() { + String columnName = "date_field"; + String dateFormat = "epoch_millis"; + String originalDateValue = "727430805000"; + String expectedDateValue = "1993-01-19 08:06:45.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testEpochSecond() { + String columnName = "date_field"; + String dateFormat = "epoch_second"; + String originalDateValue = "727430805"; + String expectedDateValue = "1993-01-19 08:06:45.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testDateOptionalTimeDateOnly() { + String columnName = "date_field"; + String dateFormat = "date_optional_time"; + String originalDateValue = "1993-01-19"; + String expectedDateValue = "1993-01-19 00:00:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testDateOptionalTimeDateAndTime() { + String columnName = "date_field"; + String dateFormat = "date_optional_time"; + String originalDateValue = "1993-01-19T00:06:45.123-0800"; + String expectedDateValue = "1993-01-19 08:06:45.123"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testBasicDate() { + String columnName = "date_field"; + String dateFormat = "basic_date"; + String originalDateValue = "19930119"; + String expectedDateValue = "1993-01-19 00:00:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testBasicDateTime() { + String columnName = "date_field"; + String dateFormat = "basic_date_time"; + String originalDateValue = "19930119T120645.123-0800"; + String expectedDateValue = "1993-01-19 20:06:45.123"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testBasicDateTimeNoMillis() { + String columnName = "date_field"; + String dateFormat = "basic_date_time_no_millis"; + String originalDateValue = "19930119T120645-0800"; + String expectedDateValue = "1993-01-19 20:06:45.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testBasicOrdinalDate() { + String columnName = "date_field"; + String dateFormat = "basic_ordinal_date"; + String originalDateValue = "1993019"; + String expectedDateValue = "1993-01-19 00:00:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testBasicOrdinalDateTime() { + String columnName = "date_field"; + String dateFormat = "basic_ordinal_date_time"; + String originalDateValue = "1993019T120645.123-0800"; + String expectedDateValue = "1993-01-19 20:06:45.123"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testBasicOrdinalDateTimeNoMillis() { + String columnName = "date_field"; + String dateFormat = "basic_ordinal_date_time_no_millis"; + String originalDateValue = "1993019T120645-0800"; + String expectedDateValue = "1993-01-19 20:06:45.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testBasicTime() { + String columnName = "date_field"; + String dateFormat = "basic_time"; + String originalDateValue = "120645.123-0800"; + String expectedDateValue = "1970-01-01 20:06:45.123"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testBasicTimeNoMillis() { + String columnName = "date_field"; + String dateFormat = "basic_time_no_millis"; + String originalDateValue = "120645-0800"; + String expectedDateValue = "1970-01-01 20:06:45.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testBasicTTime() { + String columnName = "date_field"; + String dateFormat = "basic_t_time"; + String originalDateValue = "T120645.123-0800"; + String expectedDateValue = "1970-01-01 20:06:45.123"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testBasicTTimeNoMillis() { + String columnName = "date_field"; + String dateFormat = "basic_t_time_no_millis"; + String originalDateValue = "T120645-0800"; + String expectedDateValue = "1970-01-01 20:06:45.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testBasicWeekDate() { + String columnName = "date_field"; + String dateFormat = "basic_week_date"; + String originalDateValue = "1993W042"; + String expectedDateValue = "1993-01-19 00:00:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testBasicWeekDateTime() { + String columnName = "date_field"; + String dateFormat = "basic_week_date_time"; + String originalDateValue = "1993W042T120645.123-0800"; + String expectedDateValue = "1993-01-19 20:06:45.123"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testBasicWeekDateTimeNoMillis() { + String columnName = "date_field"; + String dateFormat = "basic_week_date_time_no_millis"; + String originalDateValue = "1993W042T120645-0800"; + String expectedDateValue = "1993-01-19 20:06:45.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testDate() { + String columnName = "date_field"; + String dateFormat = "date"; + String originalDateValue = "1993-01-19"; + String expectedDateValue = "1993-01-19 00:00:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testDateHour() { + String columnName = "date_field"; + String dateFormat = "date_hour"; + String originalDateValue = "1993-01-19T12"; + String expectedDateValue = "1993-01-19 12:00:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testDateHourMinute() { + String columnName = "date_field"; + String dateFormat = "date_hour_minute"; + String originalDateValue = "1993-01-19T12:06"; + String expectedDateValue = "1993-01-19 12:06:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testDateHourMinuteSecond() { + String columnName = "date_field"; + String dateFormat = "date_hour_minute_second"; + String originalDateValue = "1993-01-19T12:06:45"; + String expectedDateValue = "1993-01-19 12:06:45.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testDateHourMinuteSecondFraction() { + String columnName = "date_field"; + String dateFormat = "date_hour_minute_second_fraction"; + String originalDateValue = "1993-01-19T12:06:45.123"; + String expectedDateValue = "1993-01-19 12:06:45.123"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testDateHourMinuteSecondMillis() { + String columnName = "date_field"; + String dateFormat = "date_hour_minute_second_millis"; + String originalDateValue = "1993-01-19T12:06:45.123"; + String expectedDateValue = "1993-01-19 12:06:45.123"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testDateTime() { + String columnName = "date_field"; + String dateFormat = "date_time"; + String originalDateValue = "1993-01-19T12:06:45.123-0800"; + String expectedDateValue = "1993-01-19 20:06:45.123"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testDateTimeNoMillis() { + String columnName = "date_field"; + String dateFormat = "date_time_no_millis"; + String originalDateValue = "1993-01-19T12:06:45-0800"; + String expectedDateValue = "1993-01-19 20:06:45.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testHour() { + String columnName = "date_field"; + String dateFormat = "hour"; + String originalDateValue = "12"; + String expectedDateValue = "1970-01-01 12:00:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testHourMinute() { + String columnName = "date_field"; + String dateFormat = "hour_minute"; + String originalDateValue = "12:06"; + String expectedDateValue = "1970-01-01 12:06:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testHourMinuteSecond() { + String columnName = "date_field"; + String dateFormat = "hour_minute_second"; + String originalDateValue = "12:06:45"; + String expectedDateValue = "1970-01-01 12:06:45.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testHourMinuteSecondFraction() { + String columnName = "date_field"; + String dateFormat = "hour_minute_second_fraction"; + String originalDateValue = "12:06:45.123"; + String expectedDateValue = "1970-01-01 12:06:45.123"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testHourMinuteSecondMillis() { + String columnName = "date_field"; + String dateFormat = "hour_minute_second_millis"; + String originalDateValue = "12:06:45.123"; + String expectedDateValue = "1970-01-01 12:06:45.123"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testOrdinalDate() { + String columnName = "date_field"; + String dateFormat = "ordinal_date"; + String originalDateValue = "1993-019"; + String expectedDateValue = "1993-01-19 00:00:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testOrdinalDateTime() { + String columnName = "date_field"; + String dateFormat = "ordinal_date_time"; + String originalDateValue = "1993-019T12:06:45.123-0800"; + String expectedDateValue = "1993-01-19 20:06:45.123"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testOrdinalDateTimeNoMillis() { + String columnName = "date_field"; + String dateFormat = "ordinal_date_time_no_millis"; + String originalDateValue = "1993-019T12:06:45-0800"; + String expectedDateValue = "1993-01-19 20:06:45.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testTime() { + String columnName = "date_field"; + String dateFormat = "time"; + String originalDateValue = "12:06:45.123-0800"; + String expectedDateValue = "1970-01-01 20:06:45.123"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testTimeNoMillis() { + String columnName = "date_field"; + String dateFormat = "time_no_millis"; + String originalDateValue = "12:06:45-0800"; + String expectedDateValue = "1970-01-01 20:06:45.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testTTime() { + String columnName = "date_field"; + String dateFormat = "t_time"; + String originalDateValue = "T12:06:45.123-0800"; + String expectedDateValue = "1970-01-01 20:06:45.123"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testTTimeNoMillis() { + String columnName = "date_field"; + String dateFormat = "t_time_no_millis"; + String originalDateValue = "T12:06:45-0800"; + String expectedDateValue = "1970-01-01 20:06:45.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testWeekDate() { + String columnName = "date_field"; + String dateFormat = "week_date"; + String originalDateValue = "1993-W04-2"; + String expectedDateValue = "1993-01-19 00:00:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testWeekDateTime() { + String columnName = "date_field"; + String dateFormat = "week_date_time"; + String originalDateValue = "1993-W04-2T12:06:45.123-0800"; + String expectedDateValue = "1993-01-19 20:06:45.123"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testWeekDateTimeNoMillis() { + String columnName = "date_field"; + String dateFormat = "week_date_time_no_millis"; + String originalDateValue = "1993-W04-2T12:06:45-0800"; + String expectedDateValue = "1993-01-19 20:06:45.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testWeekyear() { + String columnName = "date_field"; + String dateFormat = "week_year"; + String originalDateValue = "1993"; + String expectedDateValue = "1993-01-01 00:00:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testWeekyearWeek() { + String columnName = "date_field"; + String dateFormat = "weekyear_week"; + String originalDateValue = "1993-W04"; + String expectedDateValue = "1993-01-17 00:00:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testWeekyearWeekDay() { + String columnName = "date_field"; + String dateFormat = "weekyear_week_day"; + String originalDateValue = "1993-W04-2"; + String expectedDateValue = "1993-01-19 00:00:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testYear() { + String columnName = "date_field"; + String dateFormat = "year"; + String originalDateValue = "1993"; + String expectedDateValue = "1993-01-01 00:00:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testYearMonth() { + String columnName = "date_field"; + String dateFormat = "year_month"; + String originalDateValue = "1993-01"; + String expectedDateValue = "1993-01-01 00:00:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testYearMonthDay() { + String columnName = "date_field"; + String dateFormat = "year_month_day"; + String originalDateValue = "1993-01-19"; + String expectedDateValue = "1993-01-19 00:00:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testCustomFormat() { + String columnName = "date_field"; + String dateFormat = "EEE, MMM d, ''yy"; + + String originalDateValue = "Tue, Jan 19, '93"; + String expectedDateValue = "1993-01-19 00:00:00.000"; + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testMultipleFormats() { + String columnName = "date_field"; + String dateFormat = "date_optional_time||epoch_millis"; + + String originalDateValue = "1993-01-19"; + String expectedDateValue = "1993-01-19 00:00:00.000"; + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + + originalDateValue = "727401600000"; + expectedDateValue = "1993-01-19 00:00:00.000"; + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testMultipleCustomFormats() { + String columnName = "date_field"; + String dateFormat = "EEE, MMM d, ''yy||yyMMddHHmmssZ"; + + String originalDateValue = "Tue, Jan 19, '93"; + String expectedDateValue = "1993-01-19 00:00:00.000"; + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + + originalDateValue = "930119000000-0000"; + expectedDateValue = "1993-01-19 00:00:00.000"; + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testNamedAndCustomFormats() { + String columnName = "date_field"; + String dateFormat = "EEE, MMM d, ''yy||hour_minute_second"; + + String originalDateValue = "Tue, Jan 19, '93"; + String expectedDateValue = "1993-01-19 00:00:00.000"; + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + + originalDateValue = "12:06:45"; + expectedDateValue = "1970-01-01 12:06:45.000"; + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testIncorrectFormat() { + String columnName = "date_field"; + String dateFormat = "date_optional_time"; + String originalDateValue = "1581724085"; + // Invalid format for date value; should return original value + String expectedDateValue = "1581724085"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testNullDateData() { + String columnName = "date_field"; + String dateFormat = "date_optional_time"; + String originalDateValue = null; + // Nulls should be preserved + String expectedDateValue = null; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testStrictDateOptionalTimeOrEpochMillsShouldPass() { + String columnName = "date_field"; + String dateFormat = "strict_date_optional_time||epoch_millis"; + + String originalDateValue = "2015-01-01"; + String expectedDateValue = "2015-01-01 00:00:00.000"; + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + + originalDateValue = "2015-01-01T12:10:30Z"; + expectedDateValue = "2015-01-01 12:10:30.000"; + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + + originalDateValue = "1420070400001"; + expectedDateValue = "2015-01-01 00:00:00.001"; + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + private void verifyFormatting( + String columnName, + String dateFormatProperty, + String originalDateValue, + String expectedDateValue) { + List columns = buildColumnList(columnName); + Map> dateFieldFormatMap = + buildDateFieldFormatMap(columnName, dateFormatProperty); + + Map rowSource = new HashMap<>(); + rowSource.put(columnName, originalDateValue); + + DateFieldFormatter dateFieldFormatter = + new DateFieldFormatter(dateFieldFormatMap, columns, new HashMap<>()); + executeFormattingAndCompare(dateFieldFormatter, rowSource, columnName, expectedDateValue); + } + + private void executeFormattingAndCompare( + DateFieldFormatter formatter, + Map rowSource, + String columnToCheck, + String expectedDateValue) { + formatter.applyJDBCDateFormat(rowSource); + assertEquals(expectedDateValue, rowSource.get(columnToCheck)); + } + + private List buildColumnList(String columnName) { + return ImmutableList.builder() + .add(new Schema.Column(columnName, null, Schema.Type.DATE)) + .build(); + } + + private Map> buildDateFieldFormatMap( + String columnName, String dateFormatProperty) { + return ImmutableMap.>builder() + .put(columnName, Arrays.asList(dateFormatProperty.split("\\|\\|"))) + .build(); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/AggregationOptionTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/AggregationOptionTest.java index e5f44eacf0..526642e8ea 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/AggregationOptionTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/AggregationOptionTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest; import com.alibaba.druid.sql.ast.expr.SQLAggregateOption; @@ -17,55 +16,53 @@ import org.opensearch.sql.legacy.parser.SqlParser; import org.opensearch.sql.legacy.util.SqlParserUtils; -/** - * Unit test class for feature of aggregation options: DISTINCT, ALL, UNIQUE, DEDUPLICATION - */ +/** Unit test class for feature of aggregation options: DISTINCT, ALL, UNIQUE, DEDUPLICATION */ public class AggregationOptionTest { - @Test - public void selectDistinctFieldsShouldHaveAggregationOption() { - List fields = getSelectFields("SELECT DISTINCT gender, city FROM accounts"); - for (Field field: fields) { - Assert.assertEquals(field.getOption(), SQLAggregateOption.DISTINCT); - } + @Test + public void selectDistinctFieldsShouldHaveAggregationOption() { + List fields = getSelectFields("SELECT DISTINCT gender, city FROM accounts"); + for (Field field : fields) { + Assert.assertEquals(field.getOption(), SQLAggregateOption.DISTINCT); } + } - @Test - public void selectWithoutDistinctFieldsShouldNotHaveAggregationOption() { - List fields = getSelectFields("SELECT gender, city FROM accounts"); - for (Field field: fields) { - Assert.assertNull(field.getOption()); - } + @Test + public void selectWithoutDistinctFieldsShouldNotHaveAggregationOption() { + List fields = getSelectFields("SELECT gender, city FROM accounts"); + for (Field field : fields) { + Assert.assertNull(field.getOption()); } + } - @Test - public void selectDistinctWithoutGroupByShouldHaveGroupByItems() { - List> groupBys = getGroupBys("SELECT DISTINCT gender, city FROM accounts"); - Assert.assertFalse(groupBys.isEmpty()); - } + @Test + public void selectDistinctWithoutGroupByShouldHaveGroupByItems() { + List> groupBys = getGroupBys("SELECT DISTINCT gender, city FROM accounts"); + Assert.assertFalse(groupBys.isEmpty()); + } - @Test - public void selectWithoutDistinctWithoutGroupByShouldNotHaveGroupByItems() { - List> groupBys = getGroupBys("SELECT gender, city FROM accounts"); - Assert.assertTrue(groupBys.isEmpty()); - } + @Test + public void selectWithoutDistinctWithoutGroupByShouldNotHaveGroupByItems() { + List> groupBys = getGroupBys("SELECT gender, city FROM accounts"); + Assert.assertTrue(groupBys.isEmpty()); + } - private List> getGroupBys(String query) { - return getSelect(query).getGroupBys(); - } + private List> getGroupBys(String query) { + return getSelect(query).getGroupBys(); + } - private List getSelectFields(String query) { - return getSelect(query).getFields(); - } + private List getSelectFields(String query) { + return getSelect(query).getFields(); + } - private Select getSelect(String query) { - SQLQueryExpr queryExpr = SqlParserUtils.parse(query); - Select select = null; - try { - select = new SqlParser().parseSelect(queryExpr); - } catch (SqlParseException e) { - e.printStackTrace(); - } - return select; + private Select getSelect(String query) { + SQLQueryExpr queryExpr = SqlParserUtils.parse(query); + Select select = null; + try { + select = new SqlParser().parseSelect(queryExpr); + } catch (SqlParseException e) { + e.printStackTrace(); } + return select; + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/DateFormatTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/DateFormatTest.java index 89ac8b4563..3bb7b4a2b6 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/DateFormatTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/DateFormatTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest; import static org.hamcrest.MatcherAssert.assertThat; @@ -40,196 +39,238 @@ public class DateFormatTest { - private static final String SELECT_CNT_FROM_DATE = "SELECT COUNT(*) AS c FROM dates "; - - @Test - public void simpleFormatCondition() { - List q = query(SELECT_CNT_FROM_DATE + "WHERE date_format(creationDate, 'YYYY') < '2018'"); - - assertThat(q, hasQueryWithValue("fieldName", equalTo("creationDate"))); - assertThat(q, hasQueryWithValueGetter(MatcherUtils.featureValueOf("has format", equalTo("YYYY"), f->((RangeQueryBuilder)f).format()))); - } - - @Test - public void equalCondition() { - List q = query(SELECT_CNT_FROM_DATE + "WHERE date_format(creationDate, 'YYYY-MM-dd') = '2018-04-02'"); - - assertThat(q, hasQueryWithValueGetter(MatcherUtils.featureValueOf("has format", equalTo("YYYY-MM-dd"), f->((RangeQueryBuilder)f).format()))); - - // Equality query for date_format is created with a rangeQuery where the 'from' and 'to' values are equal to the value we are equating to - assertThat(q, hasQueryWithValue("from", equalTo(BytesRefs.toBytesRef("2018-04-02")))); // converting string to bytes ref as RangeQueryBuilder stores it this way - assertThat(q, hasQueryWithValue("to", equalTo(BytesRefs.toBytesRef("2018-04-02")))); - } - - @Test - public void orderByTest() { - String query = "SELECT agent, ip, date_format(utc_time, 'dd-MM-YYYY') date " + - "FROM opensearch_dashboards_sample_data_logs " + - "ORDER BY date_format(utc_time, 'dd-MM-YYYY') desc, ip"; - - Select select = getSelect(query); - - List orderBys = select.getOrderBys(); - assertThat(orderBys.size(), equalTo(2)); - - Order formula = orderBys.get(0); - - assertThat(formula.isScript(), is(true)); - assertThat(formula.getType(), is("DESC")); - assertThat(formula.getName(), containsString("DateTimeFormatter.ofPattern")); - - Order ip = orderBys.get(1); - - assertThat(ip.isScript(), is(false)); - assertThat(ip.getName(), is("ip")); - assertThat(ip.getType(), is("ASC")); - } - - @Test - public void groupByWithDescOrder() throws SqlParseException { - String query = "SELECT date_format(utc_time, 'dd-MM-YYYY'), count(*) " + - "FROM opensearch_dashboards_sample_data_logs " + - "GROUP BY date_format(utc_time, 'dd-MM-YYYY') " + - "ORDER BY date_format(utc_time, 'dd-MM-YYYY') DESC"; - - JSONObject aggregation = getAggregation(query); - assertThat(aggregation.getInt("size"), is(getSelect(query).getRowCount())); - assertThat(aggregation.getJSONObject("order").getString("_key"), is("desc")); - } - - @Test - public void groupByWithAscOrder() throws SqlParseException { - String query = "SELECT date_format(utc_time, 'dd-MM-YYYY'), count(*) " + - "FROM opensearch_dashboards_sample_data_logs " + - "GROUP BY date_format(utc_time, 'dd-MM-YYYY') " + - "ORDER BY date_format(utc_time, 'dd-MM-YYYY')"; - - JSONObject aggregation = getAggregation(query); - - assertThat(aggregation.getJSONObject("order").getString("_key"), is("asc")); - } - - @Test - @Ignore("https://github.com/opendistro-for-elasticsearch/sql/issues/158") - public void groupByWithAndAlias() throws SqlParseException { - String query = "SELECT date_format(utc_time, 'dd-MM-YYYY') x, count(*) " + - "FROM opensearch_dashboards_sample_data_logs " + - "GROUP BY x " + - "ORDER BY x"; - - JSONObject aggregation = getAggregation(query); - assertThat(aggregation.getJSONObject("order").getString("_key"), is("asc")); - } - - public JSONObject getAggregation(String query) throws SqlParseException { - Select select = getSelect(query); - - Client client = mock(Client.class); - AggregationQueryAction queryAction = new AggregationQueryAction(client, select); - - String elasticDsl = queryAction.explain().explain(); - JSONObject elasticQuery = new JSONObject(elasticDsl); - - JSONObject aggregations = elasticQuery.getJSONObject("aggregations"); - String dateFormatAggregationKey = getScriptAggregationKey(aggregations, "date_format"); - - return aggregations.getJSONObject(dateFormatAggregationKey).getJSONObject("terms"); - } - - public static String getScriptAggregationKey(JSONObject aggregation, String prefix) { - return aggregation.keySet() - .stream() - .filter(x -> x.startsWith(prefix)) - .findFirst() - .orElseThrow(()-> new RuntimeException("Can't find key" + prefix + " in aggregation " + aggregation)); - } - - @Test - public void notEqualCondition() { - List q = query(SELECT_CNT_FROM_DATE + "WHERE date_format(creationDate, 'YYYY-MM-dd') <> '2018-04-02'"); - - assertThat(q, hasNotQueryWithValue("from", equalTo(BytesRefs.toBytesRef("2018-04-02")))); - assertThat(q, hasNotQueryWithValue("to", equalTo(BytesRefs.toBytesRef("2018-04-02")))); - } - - @Test - public void greaterThanCondition() { - List q = query(SELECT_CNT_FROM_DATE + "WHERE date_format(creationDate, 'YYYY-MM-dd') > '2018-04-02'"); - - assertThat(q, hasQueryWithValue("from", equalTo(BytesRefs.toBytesRef("2018-04-02")))); - assertThat(q, hasQueryWithValue("includeLower", equalTo(false))); - assertThat(q, hasQueryWithValue("includeUpper", equalTo(true))); - } - - @Test - public void greaterThanOrEqualToCondition() { - List q = query(SELECT_CNT_FROM_DATE + "WHERE date_format(creationDate, 'YYYY-MM-dd') >= '2018-04-02'"); - - assertThat(q, hasQueryWithValue("from", equalTo(BytesRefs.toBytesRef("2018-04-02")))); - assertThat(q, hasQueryWithValue("to", equalTo(null))); - assertThat(q, hasQueryWithValue("includeLower", equalTo(true))); - assertThat(q, hasQueryWithValue("includeUpper", equalTo(true))); + private static final String SELECT_CNT_FROM_DATE = "SELECT COUNT(*) AS c FROM dates "; + + @Test + public void simpleFormatCondition() { + List q = + query(SELECT_CNT_FROM_DATE + "WHERE date_format(creationDate, 'YYYY') < '2018'"); + + assertThat(q, hasQueryWithValue("fieldName", equalTo("creationDate"))); + assertThat( + q, + hasQueryWithValueGetter( + MatcherUtils.featureValueOf( + "has format", equalTo("YYYY"), f -> ((RangeQueryBuilder) f).format()))); + } + + @Test + public void equalCondition() { + List q = + query( + SELECT_CNT_FROM_DATE + "WHERE date_format(creationDate, 'YYYY-MM-dd') = '2018-04-02'"); + + assertThat( + q, + hasQueryWithValueGetter( + MatcherUtils.featureValueOf( + "has format", equalTo("YYYY-MM-dd"), f -> ((RangeQueryBuilder) f).format()))); + + // Equality query for date_format is created with a rangeQuery where the 'from' and 'to' values + // are equal to the value we are equating to + assertThat( + q, + hasQueryWithValue( + "from", + equalTo( + BytesRefs.toBytesRef( + "2018-04-02")))); // converting string to bytes ref as RangeQueryBuilder stores + // it this way + assertThat(q, hasQueryWithValue("to", equalTo(BytesRefs.toBytesRef("2018-04-02")))); + } + + @Test + public void orderByTest() { + String query = + "SELECT agent, ip, date_format(utc_time, 'dd-MM-YYYY') date " + + "FROM opensearch_dashboards_sample_data_logs " + + "ORDER BY date_format(utc_time, 'dd-MM-YYYY') desc, ip"; + + Select select = getSelect(query); + + List orderBys = select.getOrderBys(); + assertThat(orderBys.size(), equalTo(2)); + + Order formula = orderBys.get(0); + + assertThat(formula.isScript(), is(true)); + assertThat(formula.getType(), is("DESC")); + assertThat(formula.getName(), containsString("DateTimeFormatter.ofPattern")); + + Order ip = orderBys.get(1); + + assertThat(ip.isScript(), is(false)); + assertThat(ip.getName(), is("ip")); + assertThat(ip.getType(), is("ASC")); + } + + @Test + public void groupByWithDescOrder() throws SqlParseException { + String query = + "SELECT date_format(utc_time, 'dd-MM-YYYY'), count(*) " + + "FROM opensearch_dashboards_sample_data_logs " + + "GROUP BY date_format(utc_time, 'dd-MM-YYYY') " + + "ORDER BY date_format(utc_time, 'dd-MM-YYYY') DESC"; + + JSONObject aggregation = getAggregation(query); + assertThat(aggregation.getInt("size"), is(getSelect(query).getRowCount())); + assertThat(aggregation.getJSONObject("order").getString("_key"), is("desc")); + } + + @Test + public void groupByWithAscOrder() throws SqlParseException { + String query = + "SELECT date_format(utc_time, 'dd-MM-YYYY'), count(*) " + + "FROM opensearch_dashboards_sample_data_logs " + + "GROUP BY date_format(utc_time, 'dd-MM-YYYY') " + + "ORDER BY date_format(utc_time, 'dd-MM-YYYY')"; + + JSONObject aggregation = getAggregation(query); + + assertThat(aggregation.getJSONObject("order").getString("_key"), is("asc")); + } + + @Test + @Ignore("https://github.com/opendistro-for-elasticsearch/sql/issues/158") + public void groupByWithAndAlias() throws SqlParseException { + String query = + "SELECT date_format(utc_time, 'dd-MM-YYYY') x, count(*) " + + "FROM opensearch_dashboards_sample_data_logs " + + "GROUP BY x " + + "ORDER BY x"; + + JSONObject aggregation = getAggregation(query); + assertThat(aggregation.getJSONObject("order").getString("_key"), is("asc")); + } + + public JSONObject getAggregation(String query) throws SqlParseException { + Select select = getSelect(query); + + Client client = mock(Client.class); + AggregationQueryAction queryAction = new AggregationQueryAction(client, select); + + String elasticDsl = queryAction.explain().explain(); + JSONObject elasticQuery = new JSONObject(elasticDsl); + + JSONObject aggregations = elasticQuery.getJSONObject("aggregations"); + String dateFormatAggregationKey = getScriptAggregationKey(aggregations, "date_format"); + + return aggregations.getJSONObject(dateFormatAggregationKey).getJSONObject("terms"); + } + + public static String getScriptAggregationKey(JSONObject aggregation, String prefix) { + return aggregation.keySet().stream() + .filter(x -> x.startsWith(prefix)) + .findFirst() + .orElseThrow( + () -> + new RuntimeException("Can't find key" + prefix + " in aggregation " + aggregation)); + } + + @Test + public void notEqualCondition() { + List q = + query( + SELECT_CNT_FROM_DATE + "WHERE date_format(creationDate, 'YYYY-MM-dd') <> '2018-04-02'"); + + assertThat(q, hasNotQueryWithValue("from", equalTo(BytesRefs.toBytesRef("2018-04-02")))); + assertThat(q, hasNotQueryWithValue("to", equalTo(BytesRefs.toBytesRef("2018-04-02")))); + } + + @Test + public void greaterThanCondition() { + List q = + query( + SELECT_CNT_FROM_DATE + "WHERE date_format(creationDate, 'YYYY-MM-dd') > '2018-04-02'"); + + assertThat(q, hasQueryWithValue("from", equalTo(BytesRefs.toBytesRef("2018-04-02")))); + assertThat(q, hasQueryWithValue("includeLower", equalTo(false))); + assertThat(q, hasQueryWithValue("includeUpper", equalTo(true))); + } + + @Test + public void greaterThanOrEqualToCondition() { + List q = + query( + SELECT_CNT_FROM_DATE + "WHERE date_format(creationDate, 'YYYY-MM-dd') >= '2018-04-02'"); + + assertThat(q, hasQueryWithValue("from", equalTo(BytesRefs.toBytesRef("2018-04-02")))); + assertThat(q, hasQueryWithValue("to", equalTo(null))); + assertThat(q, hasQueryWithValue("includeLower", equalTo(true))); + assertThat(q, hasQueryWithValue("includeUpper", equalTo(true))); + } + + @Test + public void timeZoneCondition() { + List q = + query( + SELECT_CNT_FROM_DATE + + "WHERE date_format(creationDate, 'YYYY-MM-dd', 'America/Phoenix') >" + + " '2018-04-02'"); + + // Used hasProperty here as getter followed convention for obtaining ID and Feature Matcher was + // having issues with generic type to obtain value + assertThat(q, hasQueryWithValue("timeZone", hasProperty("id", equalTo("America/Phoenix")))); + } + + private List query(String sql) { + return translate(parseSql(sql)); + } + + private List translate(SQLQueryExpr expr) { + try { + Select select = new SqlParser().parseSelect(expr); + QueryBuilder whereQuery = QueryMaker.explain(select.getWhere(), select.isQuery); + return ((BoolQueryBuilder) whereQuery).filter(); + } catch (SqlParseException e) { + throw new ParserException("Illegal sql expr: " + expr.toString()); } + } - @Test - public void timeZoneCondition() { - List q = query(SELECT_CNT_FROM_DATE + "WHERE date_format(creationDate, 'YYYY-MM-dd', 'America/Phoenix') > '2018-04-02'"); - - // Used hasProperty here as getter followed convention for obtaining ID and Feature Matcher was having issues with generic type to obtain value - assertThat(q, hasQueryWithValue("timeZone", hasProperty("id", equalTo("America/Phoenix")))); - } - - private List query(String sql) { - return translate(parseSql(sql)); - } - - private List translate(SQLQueryExpr expr) { - try { - Select select = new SqlParser().parseSelect(expr); - QueryBuilder whereQuery = QueryMaker.explain(select.getWhere(), select.isQuery); - return ((BoolQueryBuilder) whereQuery).filter(); - } catch (SqlParseException e) { - throw new ParserException("Illegal sql expr: " + expr.toString()); - } - } - - private SQLQueryExpr parseSql(String sql) { - ElasticSqlExprParser parser = new ElasticSqlExprParser(sql); - SQLExpr expr = parser.expr(); - if (parser.getLexer().token() != Token.EOF) { - throw new ParserException("Illegal sql: " + sql); - } - return (SQLQueryExpr) expr; + private SQLQueryExpr parseSql(String sql) { + ElasticSqlExprParser parser = new ElasticSqlExprParser(sql); + SQLExpr expr = parser.expr(); + if (parser.getLexer().token() != Token.EOF) { + throw new ParserException("Illegal sql: " + sql); } - - private Select getSelect(String query) { - try { - Select select = new SqlParser().parseSelect(parseSql(query)); - if (select.getRowCount() == null){ - select.setRowCount(Select.DEFAULT_LIMIT); - } - return select; - } catch (SqlParseException e) { - throw new RuntimeException(e); - } - } - - private Matcher> hasQueryWithValue(String name, Matcher matcher) { - return hasItem( - hasFieldWithValue("mustClauses", "has mustClauses", - hasItem(hasFieldWithValue(name, "has " + name, matcher)))); - } - - private Matcher> hasNotQueryWithValue(String name, Matcher matcher) { - return hasItem( - hasFieldWithValue("mustClauses", "has mustClauses", - hasItem(hasFieldWithValue("mustNotClauses", "has mustNotClauses", - hasItem(hasFieldWithValue(name, "has " + name, matcher)))))); - } - - private Matcher> hasQueryWithValueGetter(Matcher matcher) { - return hasItem( - hasFieldWithValue("mustClauses", "has mustClauses", - hasItem(matcher))); + return (SQLQueryExpr) expr; + } + + private Select getSelect(String query) { + try { + Select select = new SqlParser().parseSelect(parseSql(query)); + if (select.getRowCount() == null) { + select.setRowCount(Select.DEFAULT_LIMIT); + } + return select; + } catch (SqlParseException e) { + throw new RuntimeException(e); } + } + + private Matcher> hasQueryWithValue( + String name, Matcher matcher) { + return hasItem( + hasFieldWithValue( + "mustClauses", + "has mustClauses", + hasItem(hasFieldWithValue(name, "has " + name, matcher)))); + } + + private Matcher> hasNotQueryWithValue( + String name, Matcher matcher) { + return hasItem( + hasFieldWithValue( + "mustClauses", + "has mustClauses", + hasItem( + hasFieldWithValue( + "mustNotClauses", + "has mustNotClauses", + hasItem(hasFieldWithValue(name, "has " + name, matcher)))))); + } + + private Matcher> hasQueryWithValueGetter(Matcher matcher) { + return hasItem(hasFieldWithValue("mustClauses", "has mustClauses", hasItem(matcher))); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/DateFunctionsTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/DateFunctionsTest.java index 771b0ce1bf..cf1be90665 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/DateFunctionsTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/DateFunctionsTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest; import static org.junit.Assert.assertTrue; @@ -20,181 +19,132 @@ public class DateFunctionsTest { - private static SqlParser parser; - - @BeforeClass - public static void init() { parser = new SqlParser(); } - - /** - * The following unit tests will only cover a subset of the available date functions as the painless script is - * generated from the same template. More thorough testing will be done in integration tests since output will - * differ for each function. - */ - - @Test - public void yearInSelect() { - String query = "SELECT YEAR(creationDate) " + - "FROM dates"; - ScriptField scriptField = getScriptFieldFromQuery(query); - assertTrue( - scriptContainsString( - scriptField, - "doc['creationDate'].value.year")); - } - - @Test - public void yearInWhere() { - String query = "SELECT * " + - "FROM dates " + - "WHERE YEAR(creationDate) > 2012"; - ScriptFilter scriptFilter = getScriptFilterFromQuery(query, parser); - assertTrue( - scriptContainsString( - scriptFilter, - "doc['creationDate'].value.year")); - assertTrue( - scriptHasPattern( - scriptFilter, - "year_\\d+ > 2012")); - } - - @Test - public void weekOfYearInSelect() { - String query = "SELECT WEEK_OF_YEAR(creationDate) " + - "FROM dates"; - ScriptField scriptField = getScriptFieldFromQuery(query); - assertTrue( - scriptContainsString( - scriptField, - "doc['creationDate'].value.get(WeekFields.ISO.weekOfWeekBasedYear())")); - } - - @Test - public void weekOfYearInWhere() { - String query = "SELECT * " + - "FROM dates " + - "WHERE WEEK_OF_YEAR(creationDate) > 15"; - ScriptFilter scriptFilter = getScriptFilterFromQuery(query, parser); - assertTrue( - scriptContainsString( - scriptFilter, - "doc['creationDate'].value.get(WeekFields.ISO.weekOfWeekBasedYear())")); - assertTrue( - scriptHasPattern( - scriptFilter, - "weekOfWeekyear_\\d+ > 15")); - } - - @Test - public void dayOfMonth() { - String query = "SELECT DAY_OF_MONTH(creationDate) " + - "FROM dates"; - ScriptField scriptField = getScriptFieldFromQuery(query); - assertTrue( - scriptContainsString( - scriptField, - "doc['creationDate'].value.dayOfMonth")); - } - - @Test - public void hourOfDay() { - String query = "SELECT HOUR_OF_DAY(creationDate) " + - "FROM dates"; - ScriptField scriptField = getScriptFieldFromQuery(query); - assertTrue( - scriptContainsString( - scriptField, - "doc['creationDate'].value.hour")); - } - - @Test - public void secondOfMinute() { - String query = "SELECT SECOND_OF_MINUTE(creationDate) " + - "FROM dates"; - ScriptField scriptField = getScriptFieldFromQuery(query); - assertTrue( - scriptContainsString( - scriptField, - "doc['creationDate'].value.second")); - } - - @Test - public void month() { - String query = "SELECT MONTH(creationDate) FROM dates"; - ScriptField scriptField = getScriptFieldFromQuery(query); - assertTrue( - scriptContainsString( - scriptField, - "doc['creationDate'].value.monthValue")); - } - - @Test - public void dayofmonth() { - String query = "SELECT DAY_OF_MONTH(creationDate) FROM dates"; - ScriptField scriptField = getScriptFieldFromQuery(query); - assertTrue( - scriptContainsString( - scriptField, - "doc['creationDate'].value.dayOfMonth")); - } - - @Test - public void date() { - String query = "SELECT DATE(creationDate) FROM dates"; - ScriptField scriptField = getScriptFieldFromQuery(query); - assertTrue( - scriptContainsString( - scriptField, - "LocalDate.parse(doc['creationDate'].value.toString(),DateTimeFormatter.ISO_DATE_TIME)")); - } - - @Test - public void monthname() { - String query = "SELECT MONTHNAME(creationDate) FROM dates"; - ScriptField scriptField = getScriptFieldFromQuery(query); - assertTrue( - scriptContainsString( - scriptField, - "doc['creationDate'].value.month")); - } - - @Test - public void timestamp() { - String query = "SELECT TIMESTAMP(creationDate) FROM dates"; - ScriptField scriptField = getScriptFieldFromQuery(query); - assertTrue( - scriptContainsString( - scriptField, - "DateTimeFormatter.ofPattern('yyyy-MM-dd HH:mm:ss')")); - } - - @Test - public void maketime() { - String query = "SELECT MAKETIME(1, 1, 1) FROM dates"; - ScriptField scriptField = getScriptFieldFromQuery(query); - assertTrue( - scriptContainsString( - scriptField, - "LocalTime.of(1, 1, 1).format(DateTimeFormatter.ofPattern('HH:mm:ss'))")); - } - - @Test - public void now() { - String query = "SELECT NOW() FROM dates"; - ScriptField scriptField = getScriptFieldFromQuery(query); - assertTrue( - scriptContainsString( - scriptField, - "System.currentTimeMillis()")); - } - - @Test - public void curdate() { - String query = "SELECT CURDATE() FROM dates"; - ScriptField scriptField = getScriptFieldFromQuery(query); - assertTrue( - scriptContainsString( - scriptField, - "System.currentTimeMillis()")); - } + private static SqlParser parser; + + @BeforeClass + public static void init() { + parser = new SqlParser(); + } + + /** + * The following unit tests will only cover a subset of the available date functions as the + * painless script is generated from the same template. More thorough testing will be done in + * integration tests since output will differ for each function. + */ + @Test + public void yearInSelect() { + String query = "SELECT YEAR(creationDate) " + "FROM dates"; + ScriptField scriptField = getScriptFieldFromQuery(query); + assertTrue(scriptContainsString(scriptField, "doc['creationDate'].value.year")); + } + + @Test + public void yearInWhere() { + String query = "SELECT * " + "FROM dates " + "WHERE YEAR(creationDate) > 2012"; + ScriptFilter scriptFilter = getScriptFilterFromQuery(query, parser); + assertTrue(scriptContainsString(scriptFilter, "doc['creationDate'].value.year")); + assertTrue(scriptHasPattern(scriptFilter, "year_\\d+ > 2012")); + } + + @Test + public void weekOfYearInSelect() { + String query = "SELECT WEEK_OF_YEAR(creationDate) " + "FROM dates"; + ScriptField scriptField = getScriptFieldFromQuery(query); + assertTrue( + scriptContainsString( + scriptField, "doc['creationDate'].value.get(WeekFields.ISO.weekOfWeekBasedYear())")); + } + + @Test + public void weekOfYearInWhere() { + String query = "SELECT * " + "FROM dates " + "WHERE WEEK_OF_YEAR(creationDate) > 15"; + ScriptFilter scriptFilter = getScriptFilterFromQuery(query, parser); + assertTrue( + scriptContainsString( + scriptFilter, "doc['creationDate'].value.get(WeekFields.ISO.weekOfWeekBasedYear())")); + assertTrue(scriptHasPattern(scriptFilter, "weekOfWeekyear_\\d+ > 15")); + } + + @Test + public void dayOfMonth() { + String query = "SELECT DAY_OF_MONTH(creationDate) " + "FROM dates"; + ScriptField scriptField = getScriptFieldFromQuery(query); + assertTrue(scriptContainsString(scriptField, "doc['creationDate'].value.dayOfMonth")); + } + + @Test + public void hourOfDay() { + String query = "SELECT HOUR_OF_DAY(creationDate) " + "FROM dates"; + ScriptField scriptField = getScriptFieldFromQuery(query); + assertTrue(scriptContainsString(scriptField, "doc['creationDate'].value.hour")); + } + + @Test + public void secondOfMinute() { + String query = "SELECT SECOND_OF_MINUTE(creationDate) " + "FROM dates"; + ScriptField scriptField = getScriptFieldFromQuery(query); + assertTrue(scriptContainsString(scriptField, "doc['creationDate'].value.second")); + } + + @Test + public void month() { + String query = "SELECT MONTH(creationDate) FROM dates"; + ScriptField scriptField = getScriptFieldFromQuery(query); + assertTrue(scriptContainsString(scriptField, "doc['creationDate'].value.monthValue")); + } + + @Test + public void dayofmonth() { + String query = "SELECT DAY_OF_MONTH(creationDate) FROM dates"; + ScriptField scriptField = getScriptFieldFromQuery(query); + assertTrue(scriptContainsString(scriptField, "doc['creationDate'].value.dayOfMonth")); + } + + @Test + public void date() { + String query = "SELECT DATE(creationDate) FROM dates"; + ScriptField scriptField = getScriptFieldFromQuery(query); + assertTrue( + scriptContainsString( + scriptField, + "LocalDate.parse(doc['creationDate'].value.toString(),DateTimeFormatter.ISO_DATE_TIME)")); + } + + @Test + public void monthname() { + String query = "SELECT MONTHNAME(creationDate) FROM dates"; + ScriptField scriptField = getScriptFieldFromQuery(query); + assertTrue(scriptContainsString(scriptField, "doc['creationDate'].value.month")); + } + + @Test + public void timestamp() { + String query = "SELECT TIMESTAMP(creationDate) FROM dates"; + ScriptField scriptField = getScriptFieldFromQuery(query); + assertTrue( + scriptContainsString(scriptField, "DateTimeFormatter.ofPattern('yyyy-MM-dd HH:mm:ss')")); + } + + @Test + public void maketime() { + String query = "SELECT MAKETIME(1, 1, 1) FROM dates"; + ScriptField scriptField = getScriptFieldFromQuery(query); + assertTrue( + scriptContainsString( + scriptField, "LocalTime.of(1, 1, 1).format(DateTimeFormatter.ofPattern('HH:mm:ss'))")); + } + + @Test + public void now() { + String query = "SELECT NOW() FROM dates"; + ScriptField scriptField = getScriptFieldFromQuery(query); + assertTrue(scriptContainsString(scriptField, "System.currentTimeMillis()")); + } + + @Test + public void curdate() { + String query = "SELECT CURDATE() FROM dates"; + ScriptField scriptField = getScriptFieldFromQuery(query); + assertTrue(scriptContainsString(scriptField, "System.currentTimeMillis()")); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/cursor/DefaultCursorTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/cursor/DefaultCursorTest.java index d1e032ba1c..1b9662035d 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/cursor/DefaultCursorTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/cursor/DefaultCursorTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.cursor; import static org.hamcrest.MatcherAssert.assertThat; @@ -19,40 +18,39 @@ public class DefaultCursorTest { - @Test - public void checkCursorType() { - DefaultCursor cursor = new DefaultCursor(); - assertEquals(cursor.getType(), CursorType.DEFAULT); - } - - - @Test - public void cursorShouldStartWithCursorTypeID() { - DefaultCursor cursor = new DefaultCursor(); - cursor.setRowsLeft(50); - cursor.setScrollId("dbdskbcdjksbcjkdsbcjk+//"); - cursor.setIndexPattern("myIndex"); - cursor.setFetchSize(500); - cursor.setFieldAliasMap(Collections.emptyMap()); - cursor.setColumns(new ArrayList<>()); - assertThat(cursor.generateCursorId(), startsWith(cursor.getType().getId()+ ":") ); - } - - @Test - public void nullCursorWhenRowLeftIsLessThanEqualZero() { - DefaultCursor cursor = new DefaultCursor(); - assertThat(cursor.generateCursorId(), emptyOrNullString()); - - cursor.setRowsLeft(-10); - assertThat(cursor.generateCursorId(), emptyOrNullString()); - } - - @Test - public void nullCursorWhenScrollIDIsNullOrEmpty() { - DefaultCursor cursor = new DefaultCursor(); - assertThat(cursor.generateCursorId(), emptyOrNullString()); - - cursor.setScrollId(""); - assertThat(cursor.generateCursorId(), emptyOrNullString()); - } + @Test + public void checkCursorType() { + DefaultCursor cursor = new DefaultCursor(); + assertEquals(cursor.getType(), CursorType.DEFAULT); + } + + @Test + public void cursorShouldStartWithCursorTypeID() { + DefaultCursor cursor = new DefaultCursor(); + cursor.setRowsLeft(50); + cursor.setScrollId("dbdskbcdjksbcjkdsbcjk+//"); + cursor.setIndexPattern("myIndex"); + cursor.setFetchSize(500); + cursor.setFieldAliasMap(Collections.emptyMap()); + cursor.setColumns(new ArrayList<>()); + assertThat(cursor.generateCursorId(), startsWith(cursor.getType().getId() + ":")); + } + + @Test + public void nullCursorWhenRowLeftIsLessThanEqualZero() { + DefaultCursor cursor = new DefaultCursor(); + assertThat(cursor.generateCursorId(), emptyOrNullString()); + + cursor.setRowsLeft(-10); + assertThat(cursor.generateCursorId(), emptyOrNullString()); + } + + @Test + public void nullCursorWhenScrollIDIsNullOrEmpty() { + DefaultCursor cursor = new DefaultCursor(); + assertThat(cursor.generateCursorId(), emptyOrNullString()); + + cursor.setScrollId(""); + assertThat(cursor.generateCursorId(), emptyOrNullString()); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/domain/ColumnTypeProviderTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/domain/ColumnTypeProviderTest.java index 205c63ad1d..6599d576b3 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/domain/ColumnTypeProviderTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/domain/ColumnTypeProviderTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.domain; import static org.junit.Assert.assertEquals; @@ -18,28 +17,29 @@ import org.opensearch.sql.legacy.executor.format.Schema; public class ColumnTypeProviderTest { - @Test - public void singleESDataTypeShouldReturnCorrectSchemaType() { - assertEquals(Schema.Type.LONG, new ColumnTypeProvider(OpenSearchDataType.LONG).get(0)); - } - - @Test - public void productTypeShouldReturnCorrectSchemaType() { - ColumnTypeProvider columnTypeProvider = - new ColumnTypeProvider(new Product(ImmutableList.of(OpenSearchDataType.LONG, OpenSearchDataType.SHORT))); - assertEquals(Schema.Type.LONG, columnTypeProvider.get(0)); - assertEquals(Schema.Type.SHORT, columnTypeProvider.get(1)); - } - - @Test - public void unSupportedTypeShouldReturnDefaultSchemaType() { - ColumnTypeProvider columnTypeProvider = new ColumnTypeProvider(SetOperator.UNION); - assertEquals(COLUMN_DEFAULT_TYPE, columnTypeProvider.get(0)); - } - - @Test - public void providerWithoutColumnTypeShouldReturnDefaultSchemaType() { - ColumnTypeProvider columnTypeProvider = new ColumnTypeProvider(); - assertEquals(COLUMN_DEFAULT_TYPE, columnTypeProvider.get(0)); - } + @Test + public void singleESDataTypeShouldReturnCorrectSchemaType() { + assertEquals(Schema.Type.LONG, new ColumnTypeProvider(OpenSearchDataType.LONG).get(0)); + } + + @Test + public void productTypeShouldReturnCorrectSchemaType() { + ColumnTypeProvider columnTypeProvider = + new ColumnTypeProvider( + new Product(ImmutableList.of(OpenSearchDataType.LONG, OpenSearchDataType.SHORT))); + assertEquals(Schema.Type.LONG, columnTypeProvider.get(0)); + assertEquals(Schema.Type.SHORT, columnTypeProvider.get(1)); + } + + @Test + public void unSupportedTypeShouldReturnDefaultSchemaType() { + ColumnTypeProvider columnTypeProvider = new ColumnTypeProvider(SetOperator.UNION); + assertEquals(COLUMN_DEFAULT_TYPE, columnTypeProvider.get(0)); + } + + @Test + public void providerWithoutColumnTypeShouldReturnDefaultSchemaType() { + ColumnTypeProvider columnTypeProvider = new ColumnTypeProvider(); + assertEquals(COLUMN_DEFAULT_TYPE, columnTypeProvider.get(0)); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/DeleteResultSetTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/DeleteResultSetTest.java index a668e9248a..533c2b2989 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/DeleteResultSetTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/DeleteResultSetTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.executor; import static org.hamcrest.MatcherAssert.assertThat; @@ -23,53 +22,52 @@ import org.opensearch.sql.legacy.executor.format.DeleteResultSet; import org.opensearch.sql.legacy.executor.format.Schema; - public class DeleteResultSetTest { - @Mock - NodeClient client; - - @Mock - Delete deleteQuery; + @Mock NodeClient client; - @Test - public void testDeleteResponseForJdbcFormat() throws IOException { + @Mock Delete deleteQuery; - String jsonDeleteResponse = "{\n" + - " \"took\" : 73,\n" + - " \"timed_out\" : false,\n" + - " \"total\" : 1,\n" + - " \"updated\" : 0,\n" + - " \"created\" : 0,\n" + - " \"deleted\" : 10,\n" + - " \"batches\" : 1,\n" + - " \"version_conflicts\" : 0,\n" + - " \"noops\" : 0,\n" + - " \"retries\" : {\n" + - " \"bulk\" : 0,\n" + - " \"search\" : 0\n" + - " },\n" + - " \"throttled_millis\" : 0,\n" + - " \"requests_per_second\" : -1.0,\n" + - " \"throttled_until_millis\" : 0,\n" + - " \"failures\" : [ ]\n" + - "}\n"; + @Test + public void testDeleteResponseForJdbcFormat() throws IOException { - XContentType xContentType = XContentType.JSON; - XContentParser parser = xContentType.xContent().createParser( - NamedXContentRegistry.EMPTY, - DeprecationHandler.THROW_UNSUPPORTED_OPERATION, - jsonDeleteResponse - ); + String jsonDeleteResponse = + "{\n" + + " \"took\" : 73,\n" + + " \"timed_out\" : false,\n" + + " \"total\" : 1,\n" + + " \"updated\" : 0,\n" + + " \"created\" : 0,\n" + + " \"deleted\" : 10,\n" + + " \"batches\" : 1,\n" + + " \"version_conflicts\" : 0,\n" + + " \"noops\" : 0,\n" + + " \"retries\" : {\n" + + " \"bulk\" : 0,\n" + + " \"search\" : 0\n" + + " },\n" + + " \"throttled_millis\" : 0,\n" + + " \"requests_per_second\" : -1.0,\n" + + " \"throttled_until_millis\" : 0,\n" + + " \"failures\" : [ ]\n" + + "}\n"; - BulkByScrollResponse deleteResponse = BulkByScrollResponse.fromXContent(parser); - DeleteResultSet deleteResultSet = new DeleteResultSet(client, deleteQuery, deleteResponse); - Schema schema = deleteResultSet.getSchema(); - DataRows dataRows = deleteResultSet.getDataRows(); + XContentType xContentType = XContentType.JSON; + XContentParser parser = + xContentType + .xContent() + .createParser( + NamedXContentRegistry.EMPTY, + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, + jsonDeleteResponse); - assertThat(schema.getHeaders().size(), equalTo(1)); - assertThat(dataRows.getSize(), equalTo(1L)); - assertThat(dataRows.iterator().next().getData(DeleteResultSet.DELETED), equalTo(10L)); - } + BulkByScrollResponse deleteResponse = BulkByScrollResponse.fromXContent(parser); + DeleteResultSet deleteResultSet = new DeleteResultSet(client, deleteQuery, deleteResponse); + Schema schema = deleteResultSet.getSchema(); + DataRows dataRows = deleteResultSet.getDataRows(); + assertThat(schema.getHeaders().size(), equalTo(1)); + assertThat(dataRows.getSize(), equalTo(1L)); + assertThat(dataRows.iterator().next().getData(DeleteResultSet.DELETED), equalTo(10L)); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/format/BindingTupleResultSetTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/format/BindingTupleResultSetTest.java index d76aa84a5d..fa385fa14b 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/format/BindingTupleResultSetTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/format/BindingTupleResultSetTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.executor.format; import static org.hamcrest.MatcherAssert.assertThat; @@ -27,53 +26,65 @@ public class BindingTupleResultSetTest { - @Test - public void buildDataRowsFromBindingTupleShouldPass() { - assertThat(row( + @Test + public void buildDataRowsFromBindingTupleShouldPass() { + assertThat( + row( Arrays.asList( ColumnNode.builder().name("age").type(Schema.Type.INTEGER).build(), ColumnNode.builder().name("gender").type(Schema.Type.TEXT).build()), - Arrays.asList(BindingTuple.from(ImmutableMap.of("age", 31, "gender", "m")), + Arrays.asList( + BindingTuple.from(ImmutableMap.of("age", 31, "gender", "m")), BindingTuple.from(ImmutableMap.of("age", 31, "gender", "f")), BindingTuple.from(ImmutableMap.of("age", 39, "gender", "m")), BindingTuple.from(ImmutableMap.of("age", 39, "gender", "f")))), - containsInAnyOrder(rowContents(allOf(hasEntry("age", 31), hasEntry("gender", (Object) "m"))), - rowContents(allOf(hasEntry("age", 31), hasEntry("gender", (Object) "f"))), - rowContents(allOf(hasEntry("age", 39), hasEntry("gender", (Object) "m"))), - rowContents(allOf(hasEntry("age", 39), hasEntry("gender", (Object) "f"))))); - } + containsInAnyOrder( + rowContents(allOf(hasEntry("age", 31), hasEntry("gender", (Object) "m"))), + rowContents(allOf(hasEntry("age", 31), hasEntry("gender", (Object) "f"))), + rowContents(allOf(hasEntry("age", 39), hasEntry("gender", (Object) "m"))), + rowContents(allOf(hasEntry("age", 39), hasEntry("gender", (Object) "f"))))); + } - @Test - public void buildDataRowsFromBindingTupleIncludeLongValueShouldPass() { - assertThat(row( + @Test + public void buildDataRowsFromBindingTupleIncludeLongValueShouldPass() { + assertThat( + row( Arrays.asList( ColumnNode.builder().name("longValue").type(Schema.Type.LONG).build(), ColumnNode.builder().name("gender").type(Schema.Type.TEXT).build()), Arrays.asList( BindingTuple.from(ImmutableMap.of("longValue", Long.MAX_VALUE, "gender", "m")), BindingTuple.from(ImmutableMap.of("longValue", Long.MIN_VALUE, "gender", "f")))), - containsInAnyOrder( - rowContents(allOf(hasEntry("longValue", Long.MAX_VALUE), hasEntry("gender", (Object) "m"))), - rowContents(allOf(hasEntry("longValue", Long.MIN_VALUE), hasEntry("gender", (Object) "f"))))); - } + containsInAnyOrder( + rowContents( + allOf(hasEntry("longValue", Long.MAX_VALUE), hasEntry("gender", (Object) "m"))), + rowContents( + allOf(hasEntry("longValue", Long.MIN_VALUE), hasEntry("gender", (Object) "f"))))); + } - @Test - public void buildDataRowsFromBindingTupleIncludeDateShouldPass() { - assertThat(row( + @Test + public void buildDataRowsFromBindingTupleIncludeDateShouldPass() { + assertThat( + row( Arrays.asList( ColumnNode.builder().alias("dateValue").type(Schema.Type.DATE).build(), ColumnNode.builder().alias("gender").type(Schema.Type.TEXT).build()), Arrays.asList( BindingTuple.from(ImmutableMap.of("dateValue", 1529712000000L, "gender", "m")))), - containsInAnyOrder( - rowContents(allOf(hasEntry("dateValue", "2018-06-23 00:00:00.000"), hasEntry("gender", (Object) "m"))))); - } + containsInAnyOrder( + rowContents( + allOf( + hasEntry("dateValue", "2018-06-23 00:00:00.000"), + hasEntry("gender", (Object) "m"))))); + } - private static Matcher rowContents(Matcher> matcher) { - return featureValueOf("DataRows.Row", matcher, DataRows.Row::getContents); - } + private static Matcher rowContents(Matcher> matcher) { + return featureValueOf("DataRows.Row", matcher, DataRows.Row::getContents); + } - private List row(List columnNodes, List bindingTupleList) { - return ImmutableList.copyOf(BindingTupleResultSet.buildDataRows(columnNodes, bindingTupleList).iterator()); - } + private List row( + List columnNodes, List bindingTupleList) { + return ImmutableList.copyOf( + BindingTupleResultSet.buildDataRows(columnNodes, bindingTupleList).iterator()); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/format/CSVResultsExtractorTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/format/CSVResultsExtractorTest.java index b3afff2ce1..be6029f9af 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/format/CSVResultsExtractorTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/format/CSVResultsExtractorTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.executor.format; import static org.hamcrest.MatcherAssert.assertThat; @@ -19,21 +18,25 @@ import org.opensearch.sql.legacy.expression.domain.BindingTuple; public class CSVResultsExtractorTest { - private final CSVResultsExtractor csvResultsExtractor = new CSVResultsExtractor(false, false); - - @Test - public void extractResultsFromBindingTupleListShouldPass() throws CsvExtractorException { - CSVResult csvResult = csv(Arrays.asList(BindingTuple.from(ImmutableMap.of("age", 31, "gender", "m")), - BindingTuple.from(ImmutableMap.of("age", 31, "gender", "f")), - BindingTuple.from(ImmutableMap.of("age", 39, "gender", "m")), - BindingTuple.from(ImmutableMap.of("age", 39, "gender", "f"))), - Arrays.asList("age", "gender")); - - assertThat(csvResult.getHeaders(), contains("age", "gender")); - assertThat(csvResult.getLines(), contains("31,m", "31,f", "39,m", "39,f")); - } - - private CSVResult csv(List bindingTupleList, List fieldNames) throws CsvExtractorException { - return csvResultsExtractor.extractResults(bindingTupleList, false, ",", fieldNames); - } + private final CSVResultsExtractor csvResultsExtractor = new CSVResultsExtractor(false, false); + + @Test + public void extractResultsFromBindingTupleListShouldPass() throws CsvExtractorException { + CSVResult csvResult = + csv( + Arrays.asList( + BindingTuple.from(ImmutableMap.of("age", 31, "gender", "m")), + BindingTuple.from(ImmutableMap.of("age", 31, "gender", "f")), + BindingTuple.from(ImmutableMap.of("age", 39, "gender", "m")), + BindingTuple.from(ImmutableMap.of("age", 39, "gender", "f"))), + Arrays.asList("age", "gender")); + + assertThat(csvResult.getHeaders(), contains("age", "gender")); + assertThat(csvResult.getLines(), contains("31,m", "31,f", "39,m", "39,f")); + } + + private CSVResult csv(List bindingTupleList, List fieldNames) + throws CsvExtractorException { + return csvResultsExtractor.extractResults(bindingTupleList, false, ",", fieldNames); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/BinaryExpressionTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/BinaryExpressionTest.java index 2f802f4f91..37a0666ad3 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/BinaryExpressionTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/BinaryExpressionTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.expression.core; import static org.junit.Assert.assertEquals; @@ -21,69 +20,65 @@ @RunWith(MockitoJUnitRunner.class) public class BinaryExpressionTest extends ExpressionTest { - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); - - @Test - public void addIntegerValueShouldPass() { - assertEquals(2, - apply(ScalarOperation.ADD, ref("intValue"), ref("intValue"))); - } - - @Test - public void multipleAddIntegerValueShouldPass() { - assertEquals(3, - apply(ScalarOperation.ADD, ref("intValue"), - of(ScalarOperation.ADD, ref("intValue"), ref("intValue")))); - } - - @Test - public void addDoubleValueShouldPass() { - assertEquals(4d, - apply(ScalarOperation.ADD, ref("doubleValue"), ref("doubleValue"))); - } - - @Test - public void addDoubleAndIntegerShouldPass() { - assertEquals(3d, - apply(ScalarOperation.ADD, ref("doubleValue"), ref("intValue"))); - } - - @Test - public void divideIntegerValueShouldPass() { - assertEquals(0, - apply(ScalarOperation.DIVIDE, ref("intValue"), ref("intValue2"))); - } - - @Test - public void divideIntegerAndDoubleShouldPass() { - assertEquals(0.5d, - apply(ScalarOperation.DIVIDE, ref("intValue"), ref("doubleValue"))); - } - - @Test - public void subtractIntAndDoubleShouldPass() { - assertEquals(-1d, - apply(ScalarOperation.SUBTRACT, ref("intValue"), ref("doubleValue"))); - } - - @Test - public void multiplyIntAndDoubleShouldPass() { - assertEquals(2d, - apply(ScalarOperation.MULTIPLY, ref("intValue"), ref("doubleValue"))); - } - - @Test - public void modulesIntAndDoubleShouldPass() { - assertEquals(1d, - apply(ScalarOperation.MODULES, ref("intValue"), ref("doubleValue"))); - } - - @Test - public void addIntAndStringShouldPass() { - exceptionRule.expect(RuntimeException.class); - exceptionRule.expectMessage("unexpected operation type: ADD(INTEGER_VALUE, STRING_VALUE)"); - - assertEquals(2, apply(ScalarOperation.ADD, literal(integerValue(1)), literal(stringValue("stringValue")))); - } + @Rule public ExpectedException exceptionRule = ExpectedException.none(); + + @Test + public void addIntegerValueShouldPass() { + assertEquals(2, apply(ScalarOperation.ADD, ref("intValue"), ref("intValue"))); + } + + @Test + public void multipleAddIntegerValueShouldPass() { + assertEquals( + 3, + apply( + ScalarOperation.ADD, + ref("intValue"), + of(ScalarOperation.ADD, ref("intValue"), ref("intValue")))); + } + + @Test + public void addDoubleValueShouldPass() { + assertEquals(4d, apply(ScalarOperation.ADD, ref("doubleValue"), ref("doubleValue"))); + } + + @Test + public void addDoubleAndIntegerShouldPass() { + assertEquals(3d, apply(ScalarOperation.ADD, ref("doubleValue"), ref("intValue"))); + } + + @Test + public void divideIntegerValueShouldPass() { + assertEquals(0, apply(ScalarOperation.DIVIDE, ref("intValue"), ref("intValue2"))); + } + + @Test + public void divideIntegerAndDoubleShouldPass() { + assertEquals(0.5d, apply(ScalarOperation.DIVIDE, ref("intValue"), ref("doubleValue"))); + } + + @Test + public void subtractIntAndDoubleShouldPass() { + assertEquals(-1d, apply(ScalarOperation.SUBTRACT, ref("intValue"), ref("doubleValue"))); + } + + @Test + public void multiplyIntAndDoubleShouldPass() { + assertEquals(2d, apply(ScalarOperation.MULTIPLY, ref("intValue"), ref("doubleValue"))); + } + + @Test + public void modulesIntAndDoubleShouldPass() { + assertEquals(1d, apply(ScalarOperation.MODULES, ref("intValue"), ref("doubleValue"))); + } + + @Test + public void addIntAndStringShouldPass() { + exceptionRule.expect(RuntimeException.class); + exceptionRule.expectMessage("unexpected operation type: ADD(INTEGER_VALUE, STRING_VALUE)"); + + assertEquals( + 2, + apply(ScalarOperation.ADD, literal(integerValue(1)), literal(stringValue("stringValue")))); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/CompoundExpressionTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/CompoundExpressionTest.java index 2e75ee0c8b..3315024a13 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/CompoundExpressionTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/CompoundExpressionTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.expression.core; import static org.junit.Assert.assertEquals; @@ -16,10 +15,12 @@ public class CompoundExpressionTest extends ExpressionTest { - @Test - public void absAndAddShouldPass() { - assertEquals(2.0d, apply(ScalarOperation.ABS, of(ScalarOperation.ADD, - literal(doubleValue(-1.0d)), - literal(integerValue(-1))))); - } + @Test + public void absAndAddShouldPass() { + assertEquals( + 2.0d, + apply( + ScalarOperation.ABS, + of(ScalarOperation.ADD, literal(doubleValue(-1.0d)), literal(integerValue(-1))))); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/BasicCounterTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/BasicCounterTest.java index ebe61109a7..34dc170a37 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/BasicCounterTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/BasicCounterTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.metrics; import static org.hamcrest.MatcherAssert.assertThat; @@ -14,22 +13,21 @@ public class BasicCounterTest { - @Test - public void increment() { - BasicCounter counter = new BasicCounter(); - for (int i=0; i<5; ++i) { - counter.increment(); - } - - assertThat(counter.getValue(), equalTo(5L)); + @Test + public void increment() { + BasicCounter counter = new BasicCounter(); + for (int i = 0; i < 5; ++i) { + counter.increment(); } - @Test - public void incrementN() { - BasicCounter counter = new BasicCounter(); - counter.add(5); + assertThat(counter.getValue(), equalTo(5L)); + } - assertThat(counter.getValue(), equalTo(5L)); - } + @Test + public void incrementN() { + BasicCounter counter = new BasicCounter(); + counter.add(5); + assertThat(counter.getValue(), equalTo(5L)); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/parser/BucketPathTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/parser/BucketPathTest.java index 067143716d..c26740a04c 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/parser/BucketPathTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/parser/BucketPathTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.parser; import static org.junit.Assert.assertEquals; @@ -16,46 +15,45 @@ public class BucketPathTest { - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); + @Rule public ExpectedException exceptionRule = ExpectedException.none(); - private final Path agg1 = Path.getAggPath("projects@NESTED"); - private final Path agg2 = Path.getAggPath("projects@FILTERED"); - private final Path metric = Path.getMetricPath("c"); + private final Path agg1 = Path.getAggPath("projects@NESTED"); + private final Path agg2 = Path.getAggPath("projects@FILTERED"); + private final Path metric = Path.getMetricPath("c"); - @Test - public void bucketPath() { - BucketPath bucketPath = new BucketPath(); - bucketPath.add(metric); - bucketPath.add(agg2); - bucketPath.add(agg1); + @Test + public void bucketPath() { + BucketPath bucketPath = new BucketPath(); + bucketPath.add(metric); + bucketPath.add(agg2); + bucketPath.add(agg1); - assertEquals("projects@NESTED>projects@FILTERED.c", bucketPath.getBucketPath()); - } + assertEquals("projects@NESTED>projects@FILTERED.c", bucketPath.getBucketPath()); + } - @Test - public void bucketPathEmpty() { - BucketPath bucketPath = new BucketPath(); + @Test + public void bucketPathEmpty() { + BucketPath bucketPath = new BucketPath(); - assertEquals("", bucketPath.getBucketPath()); - } + assertEquals("", bucketPath.getBucketPath()); + } - @Test - public void theLastMustBeMetric() { - BucketPath bucketPath = new BucketPath(); + @Test + public void theLastMustBeMetric() { + BucketPath bucketPath = new BucketPath(); - exceptionRule.expect(AssertionError.class); - exceptionRule.expectMessage("The last path in the bucket path must be Metric"); - bucketPath.add(agg1); - } + exceptionRule.expect(AssertionError.class); + exceptionRule.expectMessage("The last path in the bucket path must be Metric"); + bucketPath.add(agg1); + } - @Test - public void allTheOtherMustBeAgg() { - BucketPath bucketPath = new BucketPath(); + @Test + public void allTheOtherMustBeAgg() { + BucketPath bucketPath = new BucketPath(); - exceptionRule.expect(AssertionError.class); - exceptionRule.expectMessage("All the other path in the bucket path must be Agg"); - bucketPath.add(metric); - bucketPath.add(metric); - } + exceptionRule.expect(AssertionError.class); + exceptionRule.expectMessage("All the other path in the bucket path must be Agg"); + bucketPath.add(metric); + bucketPath.add(metric); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/BindingTupleQueryPlannerExecuteTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/BindingTupleQueryPlannerExecuteTest.java index 9f6fcbcc6d..1260b551fb 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/BindingTupleQueryPlannerExecuteTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/BindingTupleQueryPlannerExecuteTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.planner; import static org.hamcrest.MatcherAssert.assertThat; @@ -35,79 +34,78 @@ @RunWith(MockitoJUnitRunner.class) public class BindingTupleQueryPlannerExecuteTest { - @Mock - private Client client; - @Mock - private SearchResponse aggResponse; - @Mock - private ColumnTypeProvider columnTypeProvider; + @Mock private Client client; + @Mock private SearchResponse aggResponse; + @Mock private ColumnTypeProvider columnTypeProvider; - @Before - public void init() { - MockitoAnnotations.initMocks(this); + @Before + public void init() { + MockitoAnnotations.initMocks(this); - ActionFuture mockFuture = mock(ActionFuture.class); - when(client.execute(any(), any())).thenReturn(mockFuture); - when(mockFuture.actionGet()).thenAnswer(invocationOnMock -> aggResponse); - } + ActionFuture mockFuture = mock(ActionFuture.class); + when(client.execute(any(), any())).thenReturn(mockFuture); + when(mockFuture.actionGet()).thenAnswer(invocationOnMock -> aggResponse); + } - @Test - public void testAggregationShouldPass() { - assertThat(query("SELECT gender, MAX(age) as max, MIN(age) as min FROM accounts GROUP BY gender", - mockSearchAggregation()), - containsInAnyOrder( - BindingTuple.from(ImmutableMap.of("gender", "m", "max", 20d, "min", 10d)), - BindingTuple.from(ImmutableMap.of("gender", "f", "max", 40d, "min", 20d)))); - } + @Test + public void testAggregationShouldPass() { + assertThat( + query( + "SELECT gender, MAX(age) as max, MIN(age) as min FROM accounts GROUP BY gender", + mockSearchAggregation()), + containsInAnyOrder( + BindingTuple.from(ImmutableMap.of("gender", "m", "max", 20d, "min", 10d)), + BindingTuple.from(ImmutableMap.of("gender", "f", "max", 40d, "min", 20d)))); + } + protected List query(String sql, MockSearchAggregation mockAgg) { + doAnswer(mockAgg).when(aggResponse).getAggregations(); - protected List query(String sql, MockSearchAggregation mockAgg) { - doAnswer(mockAgg).when(aggResponse).getAggregations(); + BindingTupleQueryPlanner queryPlanner = + new BindingTupleQueryPlanner(client, SqlParserUtils.parse(sql), columnTypeProvider); + return queryPlanner.execute(); + } - BindingTupleQueryPlanner queryPlanner = - new BindingTupleQueryPlanner(client, SqlParserUtils.parse(sql), columnTypeProvider); - return queryPlanner.execute(); - } + private MockSearchAggregation mockSearchAggregation() { + return new MockSearchAggregation( + "{\n" + + " \"sterms#gender\": {\n" + + " \"buckets\": [\n" + + " {\n" + + " \"key\": \"m\",\n" + + " \"doc_count\": 507,\n" + + " \"min#min\": {\n" + + " \"value\": 10\n" + + " },\n" + + " \"max#max\": {\n" + + " \"value\": 20\n" + + " }\n" + + " },\n" + + " {\n" + + " \"key\": \"f\",\n" + + " \"doc_count\": 493,\n" + + " \"min#min\": {\n" + + " \"value\": 20\n" + + " },\n" + + " \"max#max\": {\n" + + " \"value\": 40\n" + + " }\n" + + " }\n" + + " ]\n" + + " }\n" + + "}"); + } - private MockSearchAggregation mockSearchAggregation() { - return new MockSearchAggregation("{\n" - + " \"sterms#gender\": {\n" - + " \"buckets\": [\n" - + " {\n" - + " \"key\": \"m\",\n" - + " \"doc_count\": 507,\n" - + " \"min#min\": {\n" - + " \"value\": 10\n" - + " },\n" - + " \"max#max\": {\n" - + " \"value\": 20\n" - + " }\n" - + " },\n" - + " {\n" - + " \"key\": \"f\",\n" - + " \"doc_count\": 493,\n" - + " \"min#min\": {\n" - + " \"value\": 20\n" - + " },\n" - + " \"max#max\": {\n" - + " \"value\": 40\n" - + " }\n" - + " }\n" - + " ]\n" - + " }\n" - + "}"); - } - - protected static class MockSearchAggregation implements Answer { - private final Aggregations aggregation; + protected static class MockSearchAggregation implements Answer { + private final Aggregations aggregation; - public MockSearchAggregation(String agg) { - aggregation = AggregationUtils.fromJson(agg); - } + public MockSearchAggregation(String agg) { + aggregation = AggregationUtils.fromJson(agg); + } - @Override - public Aggregations answer(InvocationOnMock invocationOnMock) throws Throwable { - return aggregation; - } + @Override + public Aggregations answer(InvocationOnMock invocationOnMock) throws Throwable { + return aggregation; } + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/query/DefaultQueryActionTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/query/DefaultQueryActionTest.java index 57530692d4..11e14e9b48 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/query/DefaultQueryActionTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/query/DefaultQueryActionTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.query; import static org.hamcrest.Matchers.equalTo; @@ -42,228 +41,225 @@ public class DefaultQueryActionTest { - private DefaultQueryAction queryAction; + private DefaultQueryAction queryAction; - private Client mockClient; + private Client mockClient; - private Select mockSelect; + private Select mockSelect; - private SearchRequestBuilder mockRequestBuilder; + private SearchRequestBuilder mockRequestBuilder; - @Before - public void initDefaultQueryAction() { + @Before + public void initDefaultQueryAction() { - mockClient = mock(Client.class); - mockSelect = mock(Select.class); - mockRequestBuilder = mock(SearchRequestBuilder.class); + mockClient = mock(Client.class); + mockSelect = mock(Select.class); + mockRequestBuilder = mock(SearchRequestBuilder.class); - List fields = new LinkedList<>(); - fields.add(new Field("balance", "bbb")); + List fields = new LinkedList<>(); + fields.add(new Field("balance", "bbb")); - doReturn(fields).when(mockSelect).getFields(); - doReturn(null).when(mockRequestBuilder).setFetchSource(any(String[].class), any(String[].class)); - doReturn(null).when(mockRequestBuilder).addScriptField(anyString(), any(Script.class)); + doReturn(fields).when(mockSelect).getFields(); + doReturn(null) + .when(mockRequestBuilder) + .setFetchSource(any(String[].class), any(String[].class)); + doReturn(null).when(mockRequestBuilder).addScriptField(anyString(), any(Script.class)); - queryAction = new DefaultQueryAction(mockClient, mockSelect); - queryAction.initialize(mockRequestBuilder); - } + queryAction = new DefaultQueryAction(mockClient, mockSelect); + queryAction.initialize(mockRequestBuilder); + } - @After - public void cleanup() { - LocalClusterState.state(null); - } + @After + public void cleanup() { + LocalClusterState.state(null); + } - @Test - public void scriptFieldWithTwoParams() throws SqlParseException { + @Test + public void scriptFieldWithTwoParams() throws SqlParseException { - List fields = new LinkedList<>(); - fields.add(createScriptField("script1", "doc['balance'] * 2", - false, true, false)); + List fields = new LinkedList<>(); + fields.add(createScriptField("script1", "doc['balance'] * 2", false, true, false)); - queryAction.setFields(fields); + queryAction.setFields(fields); - final Optional> fieldNames = queryAction.getFieldNames(); - Assert.assertTrue("Field names have not been set", fieldNames.isPresent()); - Assert.assertThat(fieldNames.get().size(), equalTo(1)); - Assert.assertThat(fieldNames.get().get(0), equalTo("script1")); + final Optional> fieldNames = queryAction.getFieldNames(); + Assert.assertTrue("Field names have not been set", fieldNames.isPresent()); + Assert.assertThat(fieldNames.get().size(), equalTo(1)); + Assert.assertThat(fieldNames.get().get(0), equalTo("script1")); - Mockito.verify(mockRequestBuilder).addScriptField(eq("script1"), any(Script.class)); - } + Mockito.verify(mockRequestBuilder).addScriptField(eq("script1"), any(Script.class)); + } - @Test - public void scriptFieldWithThreeParams() throws SqlParseException { + @Test + public void scriptFieldWithThreeParams() throws SqlParseException { - List fields = new LinkedList<>(); - fields.add(createScriptField("script1", "doc['balance'] * 2", - true, true, false)); + List fields = new LinkedList<>(); + fields.add(createScriptField("script1", "doc['balance'] * 2", true, true, false)); - queryAction.setFields(fields); + queryAction.setFields(fields); - final Optional> fieldNames = queryAction.getFieldNames(); - Assert.assertTrue("Field names have not been set", fieldNames.isPresent()); - Assert.assertThat(fieldNames.get().size(), equalTo(1)); - Assert.assertThat(fieldNames.get().get(0), equalTo("script1")); + final Optional> fieldNames = queryAction.getFieldNames(); + Assert.assertTrue("Field names have not been set", fieldNames.isPresent()); + Assert.assertThat(fieldNames.get().size(), equalTo(1)); + Assert.assertThat(fieldNames.get().get(0), equalTo("script1")); - Mockito.verify(mockRequestBuilder).addScriptField(eq("script1"), any(Script.class)); - } + Mockito.verify(mockRequestBuilder).addScriptField(eq("script1"), any(Script.class)); + } - @Test(expected = SqlParseException.class) - public void scriptFieldWithLessThanTwoParams() throws SqlParseException { + @Test(expected = SqlParseException.class) + public void scriptFieldWithLessThanTwoParams() throws SqlParseException { - List fields = new LinkedList<>(); - fields.add(createScriptField("script1", "doc['balance'] * 2", - false, false, false)); + List fields = new LinkedList<>(); + fields.add(createScriptField("script1", "doc['balance'] * 2", false, false, false)); - queryAction.setFields(fields); - } + queryAction.setFields(fields); + } - @Test - public void scriptFieldWithMoreThanThreeParams() throws SqlParseException { + @Test + public void scriptFieldWithMoreThanThreeParams() throws SqlParseException { - List fields = new LinkedList<>(); - fields.add(createScriptField("script1", "doc['balance'] * 2", - false, true, true)); - - queryAction.setFields(fields); - } - - @Test - public void testIfScrollShouldBeOpenWithDifferentFormats() { - int settingFetchSize = 500; - TimeValue timeValue = new TimeValue(120000); - int limit = 2300; - mockLocalClusterStateAndInitializeMetrics(timeValue); - - doReturn(limit).when(mockSelect).getRowCount(); - doReturn(mockRequestBuilder).when(mockRequestBuilder).setSize(settingFetchSize); - SqlRequest mockSqlRequest = mock(SqlRequest.class); - doReturn(settingFetchSize).when(mockSqlRequest).fetchSize(); - queryAction.setSqlRequest(mockSqlRequest); - - Format[] formats = new Format[] {Format.CSV, Format.RAW, Format.JSON, Format.TABLE}; - for (Format format : formats) { - queryAction.setFormat(format); - queryAction.checkAndSetScroll(); - } - - Mockito.verify(mockRequestBuilder, times(4)).setSize(limit); - Mockito.verify(mockRequestBuilder, never()).setScroll(any(TimeValue.class)); - - queryAction.setFormat(Format.JDBC); - queryAction.checkAndSetScroll(); - Mockito.verify(mockRequestBuilder).setSize(settingFetchSize); - Mockito.verify(mockRequestBuilder).setScroll(timeValue); - - } + List fields = new LinkedList<>(); + fields.add(createScriptField("script1", "doc['balance'] * 2", false, true, true)); - @Test - public void testIfScrollShouldBeOpen() { - int settingFetchSize = 500; - TimeValue timeValue = new TimeValue(120000); - int limit = 2300; + queryAction.setFields(fields); + } - doReturn(limit).when(mockSelect).getRowCount(); - doReturn(mockRequestBuilder).when(mockRequestBuilder).setSize(settingFetchSize); - SqlRequest mockSqlRequest = mock(SqlRequest.class); - doReturn(settingFetchSize).when(mockSqlRequest).fetchSize(); - queryAction.setSqlRequest(mockSqlRequest); - queryAction.setFormat(Format.JDBC); + @Test + public void testIfScrollShouldBeOpenWithDifferentFormats() { + int settingFetchSize = 500; + TimeValue timeValue = new TimeValue(120000); + int limit = 2300; + mockLocalClusterStateAndInitializeMetrics(timeValue); - mockLocalClusterStateAndInitializeMetrics(timeValue); - queryAction.checkAndSetScroll(); - Mockito.verify(mockRequestBuilder).setSize(settingFetchSize); - Mockito.verify(mockRequestBuilder).setScroll(timeValue); + doReturn(limit).when(mockSelect).getRowCount(); + doReturn(mockRequestBuilder).when(mockRequestBuilder).setSize(settingFetchSize); + SqlRequest mockSqlRequest = mock(SqlRequest.class); + doReturn(settingFetchSize).when(mockSqlRequest).fetchSize(); + queryAction.setSqlRequest(mockSqlRequest); + Format[] formats = new Format[] {Format.CSV, Format.RAW, Format.JSON, Format.TABLE}; + for (Format format : formats) { + queryAction.setFormat(format); + queryAction.checkAndSetScroll(); } - @Test - public void testIfScrollShouldBeOpenWithDifferentFetchSize() { - TimeValue timeValue = new TimeValue(120000); - int limit = 2300; - mockLocalClusterStateAndInitializeMetrics(timeValue); - - doReturn(limit).when(mockSelect).getRowCount(); - SqlRequest mockSqlRequest = mock(SqlRequest.class); - queryAction.setSqlRequest(mockSqlRequest); - queryAction.setFormat(Format.JDBC); - - int[] fetchSizes = new int[] {0, -10}; - for (int fetch : fetchSizes) { - doReturn(fetch).when(mockSqlRequest).fetchSize(); - queryAction.checkAndSetScroll(); - } - Mockito.verify(mockRequestBuilder, times(2)).setSize(limit); - Mockito.verify(mockRequestBuilder, never()).setScroll(timeValue); - - int userFetchSize = 20; - doReturn(userFetchSize).when(mockSqlRequest).fetchSize(); - doReturn(mockRequestBuilder).when(mockRequestBuilder).setSize(userFetchSize); - queryAction.checkAndSetScroll(); - Mockito.verify(mockRequestBuilder).setSize(20); - Mockito.verify(mockRequestBuilder).setScroll(timeValue); + Mockito.verify(mockRequestBuilder, times(4)).setSize(limit); + Mockito.verify(mockRequestBuilder, never()).setScroll(any(TimeValue.class)); + + queryAction.setFormat(Format.JDBC); + queryAction.checkAndSetScroll(); + Mockito.verify(mockRequestBuilder).setSize(settingFetchSize); + Mockito.verify(mockRequestBuilder).setScroll(timeValue); + } + + @Test + public void testIfScrollShouldBeOpen() { + int settingFetchSize = 500; + TimeValue timeValue = new TimeValue(120000); + int limit = 2300; + + doReturn(limit).when(mockSelect).getRowCount(); + doReturn(mockRequestBuilder).when(mockRequestBuilder).setSize(settingFetchSize); + SqlRequest mockSqlRequest = mock(SqlRequest.class); + doReturn(settingFetchSize).when(mockSqlRequest).fetchSize(); + queryAction.setSqlRequest(mockSqlRequest); + queryAction.setFormat(Format.JDBC); + + mockLocalClusterStateAndInitializeMetrics(timeValue); + queryAction.checkAndSetScroll(); + Mockito.verify(mockRequestBuilder).setSize(settingFetchSize); + Mockito.verify(mockRequestBuilder).setScroll(timeValue); + } + + @Test + public void testIfScrollShouldBeOpenWithDifferentFetchSize() { + TimeValue timeValue = new TimeValue(120000); + int limit = 2300; + mockLocalClusterStateAndInitializeMetrics(timeValue); + + doReturn(limit).when(mockSelect).getRowCount(); + SqlRequest mockSqlRequest = mock(SqlRequest.class); + queryAction.setSqlRequest(mockSqlRequest); + queryAction.setFormat(Format.JDBC); + + int[] fetchSizes = new int[] {0, -10}; + for (int fetch : fetchSizes) { + doReturn(fetch).when(mockSqlRequest).fetchSize(); + queryAction.checkAndSetScroll(); } - - - @Test - public void testIfScrollShouldBeOpenWithDifferentValidFetchSizeAndLimit() { - TimeValue timeValue = new TimeValue(120000); - mockLocalClusterStateAndInitializeMetrics(timeValue); - - int limit = 2300; - doReturn(limit).when(mockSelect).getRowCount(); - SqlRequest mockSqlRequest = mock(SqlRequest.class); - - /** fetchSize <= LIMIT - open scroll*/ - int userFetchSize = 1500; - doReturn(userFetchSize).when(mockSqlRequest).fetchSize(); - doReturn(mockRequestBuilder).when(mockRequestBuilder).setSize(userFetchSize); - queryAction.setSqlRequest(mockSqlRequest); - queryAction.setFormat(Format.JDBC); - - queryAction.checkAndSetScroll(); - Mockito.verify(mockRequestBuilder).setSize(userFetchSize); - Mockito.verify(mockRequestBuilder).setScroll(timeValue); - - /** fetchSize > LIMIT - no scroll */ - userFetchSize = 5000; - doReturn(userFetchSize).when(mockSqlRequest).fetchSize(); - mockRequestBuilder = mock(SearchRequestBuilder.class); - queryAction.initialize(mockRequestBuilder); - queryAction.checkAndSetScroll(); - Mockito.verify(mockRequestBuilder).setSize(limit); - Mockito.verify(mockRequestBuilder, never()).setScroll(timeValue); + Mockito.verify(mockRequestBuilder, times(2)).setSize(limit); + Mockito.verify(mockRequestBuilder, never()).setScroll(timeValue); + + int userFetchSize = 20; + doReturn(userFetchSize).when(mockSqlRequest).fetchSize(); + doReturn(mockRequestBuilder).when(mockRequestBuilder).setSize(userFetchSize); + queryAction.checkAndSetScroll(); + Mockito.verify(mockRequestBuilder).setSize(20); + Mockito.verify(mockRequestBuilder).setScroll(timeValue); + } + + @Test + public void testIfScrollShouldBeOpenWithDifferentValidFetchSizeAndLimit() { + TimeValue timeValue = new TimeValue(120000); + mockLocalClusterStateAndInitializeMetrics(timeValue); + + int limit = 2300; + doReturn(limit).when(mockSelect).getRowCount(); + SqlRequest mockSqlRequest = mock(SqlRequest.class); + + /** fetchSize <= LIMIT - open scroll */ + int userFetchSize = 1500; + doReturn(userFetchSize).when(mockSqlRequest).fetchSize(); + doReturn(mockRequestBuilder).when(mockRequestBuilder).setSize(userFetchSize); + queryAction.setSqlRequest(mockSqlRequest); + queryAction.setFormat(Format.JDBC); + + queryAction.checkAndSetScroll(); + Mockito.verify(mockRequestBuilder).setSize(userFetchSize); + Mockito.verify(mockRequestBuilder).setScroll(timeValue); + + /** fetchSize > LIMIT - no scroll */ + userFetchSize = 5000; + doReturn(userFetchSize).when(mockSqlRequest).fetchSize(); + mockRequestBuilder = mock(SearchRequestBuilder.class); + queryAction.initialize(mockRequestBuilder); + queryAction.checkAndSetScroll(); + Mockito.verify(mockRequestBuilder).setSize(limit); + Mockito.verify(mockRequestBuilder, never()).setScroll(timeValue); + } + + private void mockLocalClusterStateAndInitializeMetrics(TimeValue time) { + LocalClusterState mockLocalClusterState = mock(LocalClusterState.class); + LocalClusterState.state(mockLocalClusterState); + doReturn(time).when(mockLocalClusterState).getSettingValue(Settings.Key.SQL_CURSOR_KEEP_ALIVE); + doReturn(3600L) + .when(mockLocalClusterState) + .getSettingValue(Settings.Key.METRICS_ROLLING_WINDOW); + doReturn(2L).when(mockLocalClusterState).getSettingValue(Settings.Key.METRICS_ROLLING_INTERVAL); + + Metrics.getInstance().registerDefaultMetrics(); + } + + private Field createScriptField( + final String name, + final String script, + final boolean addScriptLanguage, + final boolean addScriptParam, + final boolean addRedundantParam) { + + final List params = new ArrayList<>(); + + params.add(new KVValue("alias", name)); + if (addScriptLanguage) { + params.add(new KVValue("painless")); } - - private void mockLocalClusterStateAndInitializeMetrics(TimeValue time) { - LocalClusterState mockLocalClusterState = mock(LocalClusterState.class); - LocalClusterState.state(mockLocalClusterState); - doReturn(time).when(mockLocalClusterState).getSettingValue( - Settings.Key.SQL_CURSOR_KEEP_ALIVE); - doReturn(3600L).when(mockLocalClusterState).getSettingValue( - Settings.Key.METRICS_ROLLING_WINDOW); - doReturn(2L).when(mockLocalClusterState).getSettingValue( - Settings.Key.METRICS_ROLLING_INTERVAL); - - Metrics.getInstance().registerDefaultMetrics(); - + if (addScriptParam) { + params.add(new KVValue(script)); } - - private Field createScriptField(final String name, final String script, final boolean addScriptLanguage, - final boolean addScriptParam, final boolean addRedundantParam) { - - final List params = new ArrayList<>(); - - params.add(new KVValue("alias", name)); - if (addScriptLanguage) { - params.add(new KVValue("painless")); - } - if (addScriptParam) { - params.add(new KVValue(script)); - } - if (addRedundantParam) { - params.add(new KVValue("Fail the test")); - } - - return new MethodField("script", params, null, null); + if (addRedundantParam) { + params.add(new KVValue("Fail the test")); } + + return new MethodField("script", params, null, null); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/inline/AliasInliningTests.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/inline/AliasInliningTests.java index 0c16a3264a..168725ed11 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/inline/AliasInliningTests.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/inline/AliasInliningTests.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.rewriter.inline; import static org.hamcrest.MatcherAssert.assertThat; @@ -29,101 +28,111 @@ public class AliasInliningTests { - private static final String TEST_MAPPING_FILE = "mappings/semantics.json"; - @Before - public void setUp() throws IOException { - URL url = Resources.getResource(TEST_MAPPING_FILE); - String mappings = Resources.toString(url, Charsets.UTF_8); - mockLocalClusterState(mappings); - } - - @Test - public void orderByAliasedFieldTest() throws SqlParseException { - String originalQuery = "SELECT utc_time date " + - "FROM opensearch_dashboards_sample_data_logs " + - "ORDER BY date DESC"; - String originalDsl = parseAsSimpleQuery(originalQuery); - - String rewrittenQuery = - "SELECT utc_time date " + - "FROM opensearch_dashboards_sample_data_logs " + - "ORDER BY utc_time DESC"; - - String rewrittenDsl = parseAsSimpleQuery(rewrittenQuery); - - assertThat(originalDsl, equalTo(rewrittenDsl)); - } - - @Test - public void orderByAliasedScriptedField() throws SqlParseException { - String originalDsl = parseAsSimpleQuery("SELECT date_format(birthday, 'dd-MM-YYYY') date " + - "FROM bank " + - "ORDER BY date"); - String rewrittenQuery = "SELECT date_format(birthday, 'dd-MM-YYYY') date " + - "FROM bank " + - "ORDER BY date_format(birthday, 'dd-MM-YYYY')"; - - String rewrittenDsl = parseAsSimpleQuery(rewrittenQuery); - assertThat(originalDsl, equalTo(rewrittenDsl)); - } - - @Test - public void groupByAliasedFieldTest() throws SqlParseException { - String originalQuery = "SELECT utc_time date " + - "FROM opensearch_dashboards_sample_data_logs " + - "GROUP BY date"; - - String originalDsl = parseAsAggregationQuery(originalQuery); - - String rewrittenQuery = "SELECT utc_time date " + - "FROM opensearch_dashboards_sample_data_logs " + - "GROUP BY utc_time DESC"; - - String rewrittenDsl = parseAsAggregationQuery(rewrittenQuery); - - assertThat(originalDsl, equalTo(rewrittenDsl)); - } - - @Test - public void groupAndSortBySameExprAlias() throws SqlParseException { - String query = "SELECT date_format(timestamp, 'yyyy-MM') opensearch-table.timestamp_tg, COUNT(*) count, COUNT(DistanceKilometers) opensearch-table.DistanceKilometers_count\n" + - "FROM opensearch_dashboards_sample_data_flights\n" + - "GROUP BY date_format(timestamp, 'yyyy-MM')\n" + - "ORDER BY date_format(timestamp, 'yyyy-MM') DESC\n" + - "LIMIT 2500"; - String dsl = parseAsAggregationQuery(query); - - JSONObject parseQuery = new JSONObject(dsl); - - assertThat(parseQuery.query("/aggregations/opensearch-table.timestamp_tg/terms/script"), notNullValue()); - - } - - @Test - public void groupByAndSortAliased() throws SqlParseException { - String dsl = parseAsAggregationQuery( - "SELECT date_format(utc_time, 'dd-MM-YYYY') date " + - "FROM opensearch_dashboards_sample_data_logs " + - "GROUP BY date " + - "ORDER BY date DESC"); - - JSONObject parsedQuery = new JSONObject(dsl); - - JSONObject query = (JSONObject)parsedQuery.query("/aggregations/date/terms/script"); - - assertThat(query, notNullValue()); - } - - private String parseAsSimpleQuery(String originalQuery) throws SqlParseException { - SqlRequest sqlRequest = new SqlRequest(originalQuery, new JSONObject()); - DefaultQueryAction defaultQueryAction = new DefaultQueryAction(mock(Client.class), - new SqlParser().parseSelect(parse(originalQuery))); - defaultQueryAction.setSqlRequest(sqlRequest); - return defaultQueryAction.explain().explain(); - } - - private String parseAsAggregationQuery(String originalQuery) throws SqlParseException { - return new AggregationQueryAction(mock(Client.class), - new SqlParser().parseSelect(parse(originalQuery))).explain().explain(); - } + private static final String TEST_MAPPING_FILE = "mappings/semantics.json"; + + @Before + public void setUp() throws IOException { + URL url = Resources.getResource(TEST_MAPPING_FILE); + String mappings = Resources.toString(url, Charsets.UTF_8); + mockLocalClusterState(mappings); + } + + @Test + public void orderByAliasedFieldTest() throws SqlParseException { + String originalQuery = + "SELECT utc_time date " + + "FROM opensearch_dashboards_sample_data_logs " + + "ORDER BY date DESC"; + String originalDsl = parseAsSimpleQuery(originalQuery); + + String rewrittenQuery = + "SELECT utc_time date " + + "FROM opensearch_dashboards_sample_data_logs " + + "ORDER BY utc_time DESC"; + + String rewrittenDsl = parseAsSimpleQuery(rewrittenQuery); + + assertThat(originalDsl, equalTo(rewrittenDsl)); + } + + @Test + public void orderByAliasedScriptedField() throws SqlParseException { + String originalDsl = + parseAsSimpleQuery( + "SELECT date_format(birthday, 'dd-MM-YYYY') date " + "FROM bank " + "ORDER BY date"); + String rewrittenQuery = + "SELECT date_format(birthday, 'dd-MM-YYYY') date " + + "FROM bank " + + "ORDER BY date_format(birthday, 'dd-MM-YYYY')"; + + String rewrittenDsl = parseAsSimpleQuery(rewrittenQuery); + assertThat(originalDsl, equalTo(rewrittenDsl)); + } + + @Test + public void groupByAliasedFieldTest() throws SqlParseException { + String originalQuery = + "SELECT utc_time date " + "FROM opensearch_dashboards_sample_data_logs " + "GROUP BY date"; + + String originalDsl = parseAsAggregationQuery(originalQuery); + + String rewrittenQuery = + "SELECT utc_time date " + + "FROM opensearch_dashboards_sample_data_logs " + + "GROUP BY utc_time DESC"; + + String rewrittenDsl = parseAsAggregationQuery(rewrittenQuery); + + assertThat(originalDsl, equalTo(rewrittenDsl)); + } + + @Test + public void groupAndSortBySameExprAlias() throws SqlParseException { + String query = + "SELECT date_format(timestamp, 'yyyy-MM') opensearch-table.timestamp_tg, COUNT(*) count," + + " COUNT(DistanceKilometers) opensearch-table.DistanceKilometers_count\n" + + "FROM opensearch_dashboards_sample_data_flights\n" + + "GROUP BY date_format(timestamp, 'yyyy-MM')\n" + + "ORDER BY date_format(timestamp, 'yyyy-MM') DESC\n" + + "LIMIT 2500"; + String dsl = parseAsAggregationQuery(query); + + JSONObject parseQuery = new JSONObject(dsl); + + assertThat( + parseQuery.query("/aggregations/opensearch-table.timestamp_tg/terms/script"), + notNullValue()); + } + + @Test + public void groupByAndSortAliased() throws SqlParseException { + String dsl = + parseAsAggregationQuery( + "SELECT date_format(utc_time, 'dd-MM-YYYY') date " + + "FROM opensearch_dashboards_sample_data_logs " + + "GROUP BY date " + + "ORDER BY date DESC"); + + JSONObject parsedQuery = new JSONObject(dsl); + + JSONObject query = (JSONObject) parsedQuery.query("/aggregations/date/terms/script"); + + assertThat(query, notNullValue()); + } + + private String parseAsSimpleQuery(String originalQuery) throws SqlParseException { + SqlRequest sqlRequest = new SqlRequest(originalQuery, new JSONObject()); + DefaultQueryAction defaultQueryAction = + new DefaultQueryAction( + mock(Client.class), new SqlParser().parseSelect(parse(originalQuery))); + defaultQueryAction.setSqlRequest(sqlRequest); + return defaultQueryAction.explain().explain(); + } + + private String parseAsAggregationQuery(String originalQuery) throws SqlParseException { + return new AggregationQueryAction( + mock(Client.class), new SqlParser().parseSelect(parse(originalQuery))) + .explain() + .explain(); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/BackticksUnquoterTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/BackticksUnquoterTest.java index b0c6b8a2d8..c7e7f22d5c 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/BackticksUnquoterTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/BackticksUnquoterTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.utils; import static org.hamcrest.MatcherAssert.assertThat; @@ -15,28 +14,29 @@ import org.opensearch.sql.legacy.utils.StringUtils; /** - * To test the functionality of {@link StringUtils#unquoteSingleField} - * and {@link StringUtils#unquoteFullColumn(String, String)} + * To test the functionality of {@link StringUtils#unquoteSingleField} and {@link + * StringUtils#unquoteFullColumn(String, String)} */ public class BackticksUnquoterTest { - @Test - public void assertNotQuotedStringShouldKeepTheSame() { - assertThat(unquoteSingleField("identifier"), equalTo("identifier")); - assertThat(unquoteFullColumn("identifier"), equalTo("identifier")); - } - - @Test - public void assertStringWithOneBackTickShouldKeepTheSame() { - assertThat(unquoteSingleField("`identifier"), equalTo("`identifier")); - assertThat(unquoteFullColumn("`identifier"), equalTo("`identifier")); - } - - @Test - public void assertBackticksQuotedStringShouldBeUnquoted() { - assertThat("identifier", equalTo(unquoteSingleField("`identifier`"))); - - assertThat("identifier1.identifier2", equalTo(unquoteFullColumn("`identifier1`.`identifier2`"))); - assertThat("identifier1.identifier2", equalTo(unquoteFullColumn("`identifier1`.identifier2"))); - } + @Test + public void assertNotQuotedStringShouldKeepTheSame() { + assertThat(unquoteSingleField("identifier"), equalTo("identifier")); + assertThat(unquoteFullColumn("identifier"), equalTo("identifier")); + } + + @Test + public void assertStringWithOneBackTickShouldKeepTheSame() { + assertThat(unquoteSingleField("`identifier"), equalTo("`identifier")); + assertThat(unquoteFullColumn("`identifier"), equalTo("`identifier")); + } + + @Test + public void assertBackticksQuotedStringShouldBeUnquoted() { + assertThat("identifier", equalTo(unquoteSingleField("`identifier`"))); + + assertThat( + "identifier1.identifier2", equalTo(unquoteFullColumn("`identifier1`.`identifier2`"))); + assertThat("identifier1.identifier2", equalTo(unquoteFullColumn("`identifier1`.identifier2"))); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/util/AggregationUtils.java b/legacy/src/test/java/org/opensearch/sql/legacy/util/AggregationUtils.java index 58fa8793ff..85da1d990f 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/util/AggregationUtils.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/util/AggregationUtils.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.util; import com.fasterxml.jackson.core.JsonFactory; @@ -41,42 +40,52 @@ import org.opensearch.search.aggregations.pipeline.PercentilesBucketPipelineAggregationBuilder; public class AggregationUtils { - private final static List entryList = - new ImmutableMap.Builder>().put( - MinAggregationBuilder.NAME, (p, c) -> ParsedMin.fromXContent(p, (String) c)) - .put(MaxAggregationBuilder.NAME, (p, c) -> ParsedMax.fromXContent(p, (String) c)) - .put(SumAggregationBuilder.NAME, (p, c) -> ParsedSum.fromXContent(p, (String) c)) - .put(AvgAggregationBuilder.NAME, (p, c) -> ParsedAvg.fromXContent(p, (String) c)) - .put(StringTerms.NAME, (p, c) -> ParsedStringTerms.fromXContent(p, (String) c)) - .put(LongTerms.NAME, (p, c) -> ParsedLongTerms.fromXContent(p, (String) c)) - .put(DoubleTerms.NAME, (p, c) -> ParsedDoubleTerms.fromXContent(p, (String) c)) - .put(ValueCountAggregationBuilder.NAME, (p, c) -> ParsedValueCount.fromXContent(p, (String) c)) - .put(PercentilesBucketPipelineAggregationBuilder.NAME, - (p, c) -> ParsedPercentilesBucket.fromXContent(p, (String) c)) - .put(DateHistogramAggregationBuilder.NAME, (p, c) -> ParsedDateHistogram.fromXContent(p, (String) c)) - .build() - .entrySet() - .stream() - .map(entry -> new NamedXContentRegistry.Entry(Aggregation.class, new ParseField(entry.getKey()), - entry.getValue())) - .collect(Collectors.toList()); - private final static NamedXContentRegistry namedXContentRegistry = new NamedXContentRegistry(entryList); + private static final List entryList = + new ImmutableMap.Builder>() + .put(MinAggregationBuilder.NAME, (p, c) -> ParsedMin.fromXContent(p, (String) c)) + .put(MaxAggregationBuilder.NAME, (p, c) -> ParsedMax.fromXContent(p, (String) c)) + .put(SumAggregationBuilder.NAME, (p, c) -> ParsedSum.fromXContent(p, (String) c)) + .put(AvgAggregationBuilder.NAME, (p, c) -> ParsedAvg.fromXContent(p, (String) c)) + .put(StringTerms.NAME, (p, c) -> ParsedStringTerms.fromXContent(p, (String) c)) + .put(LongTerms.NAME, (p, c) -> ParsedLongTerms.fromXContent(p, (String) c)) + .put(DoubleTerms.NAME, (p, c) -> ParsedDoubleTerms.fromXContent(p, (String) c)) + .put( + ValueCountAggregationBuilder.NAME, + (p, c) -> ParsedValueCount.fromXContent(p, (String) c)) + .put( + PercentilesBucketPipelineAggregationBuilder.NAME, + (p, c) -> ParsedPercentilesBucket.fromXContent(p, (String) c)) + .put( + DateHistogramAggregationBuilder.NAME, + (p, c) -> ParsedDateHistogram.fromXContent(p, (String) c)) + .build() + .entrySet() + .stream() + .map( + entry -> + new NamedXContentRegistry.Entry( + Aggregation.class, new ParseField(entry.getKey()), entry.getValue())) + .collect(Collectors.toList()); + private static final NamedXContentRegistry namedXContentRegistry = + new NamedXContentRegistry(entryList); - /** - * Populate {@link Aggregations} from JSON string. - * @param json json string - * @return {@link Aggregations} - */ - public static Aggregations fromJson(String json) { - try { - XContentParser xContentParser = new JsonXContentParser( - namedXContentRegistry, - LoggingDeprecationHandler.INSTANCE, - new JsonFactory().createParser(json)); - xContentParser.nextToken(); - return Aggregations.fromXContent(xContentParser); - } catch (IOException e) { - throw new RuntimeException(e); - } + /** + * Populate {@link Aggregations} from JSON string. + * + * @param json json string + * @return {@link Aggregations} + */ + public static Aggregations fromJson(String json) { + try { + XContentParser xContentParser = + new JsonXContentParser( + namedXContentRegistry, + LoggingDeprecationHandler.INSTANCE, + new JsonFactory().createParser(json)); + xContentParser.nextToken(); + return Aggregations.fromXContent(xContentParser); + } catch (IOException e) { + throw new RuntimeException(e); } + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/util/CheckScriptContents.java b/legacy/src/test/java/org/opensearch/sql/legacy/util/CheckScriptContents.java index d627cebb27..7578720624 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/util/CheckScriptContents.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/util/CheckScriptContents.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.util; import static java.util.Collections.emptyList; @@ -58,206 +57,211 @@ public class CheckScriptContents { - private static SQLExpr queryToExpr(String query) { - return new ElasticSqlExprParser(query).expr(); - } + private static SQLExpr queryToExpr(String query) { + return new ElasticSqlExprParser(query).expr(); + } - public static ScriptField getScriptFieldFromQuery(String query) { - try { - Client mockClient = mock(Client.class); - stubMockClient(mockClient); - QueryAction queryAction = OpenSearchActionFactory.create(mockClient, query); - SqlElasticRequestBuilder requestBuilder = queryAction.explain(); + public static ScriptField getScriptFieldFromQuery(String query) { + try { + Client mockClient = mock(Client.class); + stubMockClient(mockClient); + QueryAction queryAction = OpenSearchActionFactory.create(mockClient, query); + SqlElasticRequestBuilder requestBuilder = queryAction.explain(); - SearchRequestBuilder request = (SearchRequestBuilder) requestBuilder.getBuilder(); - List scriptFields = request.request().source().scriptFields(); + SearchRequestBuilder request = (SearchRequestBuilder) requestBuilder.getBuilder(); + List scriptFields = request.request().source().scriptFields(); - assertTrue(scriptFields.size() == 1); + assertTrue(scriptFields.size() == 1); - return scriptFields.get(0); + return scriptFields.get(0); - } catch (SQLFeatureNotSupportedException | SqlParseException | SQLFeatureDisabledException e) { - throw new ParserException("Unable to parse query: " + query, e); - } + } catch (SQLFeatureNotSupportedException | SqlParseException | SQLFeatureDisabledException e) { + throw new ParserException("Unable to parse query: " + query, e); } + } - public static ScriptFilter getScriptFilterFromQuery(String query, SqlParser parser) { - try { - Select select = parser.parseSelect((SQLQueryExpr) queryToExpr(query)); - Where where = select.getWhere(); - - assertTrue(where.getWheres().size() == 1); - assertTrue(((Condition) (where.getWheres().get(0))).getValue() instanceof ScriptFilter); + public static ScriptFilter getScriptFilterFromQuery(String query, SqlParser parser) { + try { + Select select = parser.parseSelect((SQLQueryExpr) queryToExpr(query)); + Where where = select.getWhere(); - return (ScriptFilter) (((Condition) (where.getWheres().get(0))).getValue()); + assertTrue(where.getWheres().size() == 1); + assertTrue(((Condition) (where.getWheres().get(0))).getValue() instanceof ScriptFilter); - } catch (SqlParseException e) { - throw new ParserException("Unable to parse query: " + query); - } - } + return (ScriptFilter) (((Condition) (where.getWheres().get(0))).getValue()); - public static boolean scriptContainsString(ScriptField scriptField, String string) { - return scriptField.script().getIdOrCode().contains(string); + } catch (SqlParseException e) { + throw new ParserException("Unable to parse query: " + query); } + } - public static boolean scriptContainsString(ScriptFilter scriptFilter, String string) { - return scriptFilter.getScript().contains(string); - } + public static boolean scriptContainsString(ScriptField scriptField, String string) { + return scriptField.script().getIdOrCode().contains(string); + } - public static boolean scriptHasPattern(ScriptField scriptField, String regex) { - Pattern pattern = Pattern.compile(regex); - Matcher matcher = pattern.matcher(scriptField.script().getIdOrCode()); - return matcher.find(); - } + public static boolean scriptContainsString(ScriptFilter scriptFilter, String string) { + return scriptFilter.getScript().contains(string); + } - public static boolean scriptHasPattern(ScriptFilter scriptFilter, String regex) { - Pattern pattern = Pattern.compile(regex); - Matcher matcher = pattern.matcher(scriptFilter.getScript()); - return matcher.find(); - } + public static boolean scriptHasPattern(ScriptField scriptField, String regex) { + Pattern pattern = Pattern.compile(regex); + Matcher matcher = pattern.matcher(scriptField.script().getIdOrCode()); + return matcher.find(); + } - public static void stubMockClient(Client mockClient) { - String mappings = "{\n" + - " \"opensearch-sql_test_index_bank\": {\n" + - " \"mappings\": {\n" + - " \"account\": {\n" + - " \"properties\": {\n" + - " \"account_number\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"address\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"age\": {\n" + - " \"type\": \"integer\"\n" + - " },\n" + - " \"balance\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"birthdate\": {\n" + - " \"type\": \"date\"\n" + - " },\n" + - " \"city\": {\n" + - " \"type\": \"keyword\"\n" + - " },\n" + - " \"email\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"employer\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }\n" + - " }\n" + - " },\n" + - " \"firstname\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"gender\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"lastname\": {\n" + - " \"type\": \"keyword\"\n" + - " },\n" + - " \"male\": {\n" + - " \"type\": \"boolean\"\n" + - " },\n" + - " \"state\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"raw\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - " },\n" + - // ==== All required by IndexMetaData.fromXContent() ==== - " \"settings\": {\n" + - " \"index\": {\n" + - " \"number_of_shards\": 5,\n" + - " \"number_of_replicas\": 0,\n" + - " \"version\": {\n" + - " \"created\": \"6050399\"\n" + - " }\n" + - " }\n" + - " },\n" + - " \"mapping_version\": \"1\",\n" + - " \"settings_version\": \"1\",\n" + - " \"aliases_version\": \"1\"\n" + - //======================================================= - " }\n" + - "}"; + public static boolean scriptHasPattern(ScriptFilter scriptFilter, String regex) { + Pattern pattern = Pattern.compile(regex); + Matcher matcher = pattern.matcher(scriptFilter.getScript()); + return matcher.find(); + } - AdminClient mockAdminClient = mock(AdminClient.class); - when(mockClient.admin()).thenReturn(mockAdminClient); + public static void stubMockClient(Client mockClient) { + String mappings = + "{\n" + + " \"opensearch-sql_test_index_bank\": {\n" + + " \"mappings\": {\n" + + " \"account\": {\n" + + " \"properties\": {\n" + + " \"account_number\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"address\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"age\": {\n" + + " \"type\": \"integer\"\n" + + " },\n" + + " \"balance\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"birthdate\": {\n" + + " \"type\": \"date\"\n" + + " },\n" + + " \"city\": {\n" + + " \"type\": \"keyword\"\n" + + " },\n" + + " \"email\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"employer\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }\n" + + " }\n" + + " },\n" + + " \"firstname\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"gender\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"lastname\": {\n" + + " \"type\": \"keyword\"\n" + + " },\n" + + " \"male\": {\n" + + " \"type\": \"boolean\"\n" + + " },\n" + + " \"state\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"raw\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " },\n" + + + // ==== All required by IndexMetaData.fromXContent() ==== + " \"settings\": {\n" + + " \"index\": {\n" + + " \"number_of_shards\": 5,\n" + + " \"number_of_replicas\": 0,\n" + + " \"version\": {\n" + + " \"created\": \"6050399\"\n" + + " }\n" + + " }\n" + + " },\n" + + " \"mapping_version\": \"1\",\n" + + " \"settings_version\": \"1\",\n" + + " \"aliases_version\": \"1\"\n" + + + // ======================================================= + " }\n" + + "}"; - IndicesAdminClient mockIndexClient = mock(IndicesAdminClient.class); - when(mockAdminClient.indices()).thenReturn(mockIndexClient); + AdminClient mockAdminClient = mock(AdminClient.class); + when(mockClient.admin()).thenReturn(mockAdminClient); - ActionFuture mockActionResp = mock(ActionFuture.class); - when(mockIndexClient.getFieldMappings(any(GetFieldMappingsRequest.class))).thenReturn(mockActionResp); - mockLocalClusterState(mappings); - } + IndicesAdminClient mockIndexClient = mock(IndicesAdminClient.class); + when(mockAdminClient.indices()).thenReturn(mockIndexClient); - public static XContentParser createParser(String mappings) throws IOException { - return XContentType.JSON.xContent().createParser( - NamedXContentRegistry.EMPTY, - DeprecationHandler.THROW_UNSUPPORTED_OPERATION, - mappings - ); - } + ActionFuture mockActionResp = mock(ActionFuture.class); + when(mockIndexClient.getFieldMappings(any(GetFieldMappingsRequest.class))) + .thenReturn(mockActionResp); + mockLocalClusterState(mappings); + } - public static void mockLocalClusterState(String mappings) { - LocalClusterState.state().setClusterService(mockClusterService(mappings)); - LocalClusterState.state().setResolver(mockIndexNameExpressionResolver()); - LocalClusterState.state().setPluginSettings(mockPluginSettings()); - } + public static XContentParser createParser(String mappings) throws IOException { + return XContentType.JSON + .xContent() + .createParser( + NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, mappings); + } - public static ClusterService mockClusterService(String mappings) { - ClusterService mockService = mock(ClusterService.class); - ClusterState mockState = mock(ClusterState.class); - Metadata mockMetaData = mock(Metadata.class); + public static void mockLocalClusterState(String mappings) { + LocalClusterState.state().setClusterService(mockClusterService(mappings)); + LocalClusterState.state().setResolver(mockIndexNameExpressionResolver()); + LocalClusterState.state().setPluginSettings(mockPluginSettings()); + } - when(mockService.state()).thenReturn(mockState); - when(mockState.metadata()).thenReturn(mockMetaData); - try { - when(mockMetaData.findMappings(any(), any())).thenReturn( - Map.of(TestsConstants.TEST_INDEX_BANK, IndexMetadata.fromXContent( - createParser(mappings)).mapping())); - } - catch (IOException e) { - throw new IllegalStateException(e); - } - return mockService; - } + public static ClusterService mockClusterService(String mappings) { + ClusterService mockService = mock(ClusterService.class); + ClusterState mockState = mock(ClusterState.class); + Metadata mockMetaData = mock(Metadata.class); - public static IndexNameExpressionResolver mockIndexNameExpressionResolver() { - IndexNameExpressionResolver mockResolver = mock(IndexNameExpressionResolver.class); - when(mockResolver.concreteIndexNames(any(), any(), anyBoolean(), anyString())).thenAnswer( - (Answer) invocation -> { - // Return index expression directly without resolving - Object indexExprs = invocation.getArguments()[3]; - if (indexExprs instanceof String) { - return new String[]{ (String) indexExprs }; - } - return (String[]) indexExprs; - } - ); - return mockResolver; + when(mockService.state()).thenReturn(mockState); + when(mockState.metadata()).thenReturn(mockMetaData); + try { + when(mockMetaData.findMappings(any(), any())) + .thenReturn( + Map.of( + TestsConstants.TEST_INDEX_BANK, + IndexMetadata.fromXContent(createParser(mappings)).mapping())); + } catch (IOException e) { + throw new IllegalStateException(e); } + return mockService; + } - public static OpenSearchSettings mockPluginSettings() { - OpenSearchSettings settings = mock(OpenSearchSettings.class); + public static IndexNameExpressionResolver mockIndexNameExpressionResolver() { + IndexNameExpressionResolver mockResolver = mock(IndexNameExpressionResolver.class); + when(mockResolver.concreteIndexNames(any(), any(), anyBoolean(), anyString())) + .thenAnswer( + (Answer) + invocation -> { + // Return index expression directly without resolving + Object indexExprs = invocation.getArguments()[3]; + if (indexExprs instanceof String) { + return new String[] {(String) indexExprs}; + } + return (String[]) indexExprs; + }); + return mockResolver; + } - // Force return empty list to avoid ClusterSettings be invoked which is a final class and hard to mock. - // In this case, default value in Setting will be returned all the time. - doReturn(emptyList()).when(settings).getSettings(); - return settings; - } + public static OpenSearchSettings mockPluginSettings() { + OpenSearchSettings settings = mock(OpenSearchSettings.class); + // Force return empty list to avoid ClusterSettings be invoked which is a final class and hard + // to mock. + // In this case, default value in Setting will be returned all the time. + doReturn(emptyList()).when(settings).getSettings(); + return settings; + } } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchTextType.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchTextType.java index e7e453ca3f..706d49afda 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchTextType.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchTextType.java @@ -15,7 +15,7 @@ import org.opensearch.sql.data.type.ExprType; /** - * The type of text value. See doc */ public class OpenSearchTextType extends OpenSearchDataType { diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactoryTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactoryTest.java index 3d3a6a5996..bfc06b94c0 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactoryTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactoryTest.java @@ -491,7 +491,7 @@ public void constructNestedArraysOfStringsReturnsFirstIndex() { public void constructMultiNestedArraysOfStringsReturnsFirstIndex() { assertEquals( stringValue("z"), - tupleValue("{\"stringV\":" + "[\"z\",[\"s\"],[\"zz\", \"au\"]]}").get("stringV")); + tupleValue("{\"stringV\":[\"z\",[\"s\"],[\"zz\", \"au\"]]}").get("stringV")); } @Test From 752da2154d337c74ce60752fe8e47b97ea182bb3 Mon Sep 17 00:00:00 2001 From: Yury-Fridlyand Date: Mon, 21 Aug 2023 09:40:29 -0700 Subject: [PATCH 33/42] Add support for `date_nanos` and tests. (#337) (#1976) * Add support for `date_nanos` and tests. (#337) * Add support for `date_nanos` and tests. Signed-off-by: Yury-Fridlyand * Add more IT. Signed-off-by: Yury-Fridlyand --------- Signed-off-by: Yury-Fridlyand * Typo fix in IT. Signed-off-by: Yury-Fridlyand * Address PR feedback. Signed-off-by: Yury-Fridlyand * Spotless Signed-off-by: Yury-Fridlyand --------- Signed-off-by: Yury-Fridlyand --- .../org/opensearch/sql/ppl/DataTypeIT.java | 1 + .../opensearch/sql/ppl/SystemFunctionIT.java | 25 ++++-- .../opensearch/sql/sql/DateTimeFormatsIT.java | 90 +++++++++++++++++++ .../opensearch/sql/sql/SystemFunctionIT.java | 18 +++- integ-test/src/test/resources/datatypes.json | 2 +- .../src/test/resources/date_formats.json | 2 +- .../datatypes_index_mapping.json | 5 +- .../date_formats_index_mapping.json | 2 +- .../data/type/OpenSearchDataType.java | 3 + .../data/type/OpenSearchDataTypeTest.java | 1 + 10 files changed, 133 insertions(+), 16 deletions(-) diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/DataTypeIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/DataTypeIT.java index 8b5a6d498e..fe5c2ff270 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/DataTypeIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/DataTypeIT.java @@ -49,6 +49,7 @@ public void test_nonnumeric_data_types() throws IOException { schema("text_value", "string"), schema("binary_value", "binary"), schema("date_value", "timestamp"), + schema("date_nanos_value", "timestamp"), schema("ip_value", "ip"), schema("object_value", "struct"), schema("nested_value", "array"), diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/SystemFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/SystemFunctionIT.java index d2cd140e99..1c23935f81 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/SystemFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/SystemFunctionIT.java @@ -70,19 +70,28 @@ public void typeof_opensearch_types() throws IOException { response = executeQuery( String.format( - "source=%s | eval " - + "`text` = typeof(text_value), `date` = typeof(date_value)," - + "`boolean` = typeof(boolean_value), `object` = typeof(object_value)," - + "`keyword` = typeof(keyword_value), `ip` = typeof(ip_value)," - + "`binary` = typeof(binary_value), `geo_point` = typeof(geo_point_value)" + "source=%s | eval `text` = typeof(text_value), `date` = typeof(date_value)," + + " `date_nanos` = typeof(date_nanos_value),`boolean` = typeof(boolean_value)," + + " `object` = typeof(object_value),`keyword` = typeof(keyword_value), `ip` =" + + " typeof(ip_value),`binary` = typeof(binary_value), `geo_point` =" + + " typeof(geo_point_value)" // TODO activate this test once `ARRAY` type supported, see // ExpressionAnalyzer::isTypeNotSupported // + ", `nested` = typeof(nested_value)" - + " | fields `text`, `date`, `boolean`, `object`, `keyword`, `ip`, `binary`," - + " `geo_point`", + + " | fields `text`, `date`, `date_nanos`, `boolean`, `object`, `keyword`," + + " `ip`, `binary`, `geo_point`", TEST_INDEX_DATATYPE_NONNUMERIC)); verifyDataRows( response, - rows("TEXT", "TIMESTAMP", "BOOLEAN", "OBJECT", "KEYWORD", "IP", "BINARY", "GEO_POINT")); + rows( + "TEXT", + "TIMESTAMP", + "TIMESTAMP", + "BOOLEAN", + "OBJECT", + "KEYWORD", + "IP", + "BINARY", + "GEO_POINT")); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeFormatsIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeFormatsIT.java index d6f2d2c7f4..13c2eecd56 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeFormatsIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeFormatsIT.java @@ -5,6 +5,7 @@ package org.opensearch.sql.sql; +import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_DATATYPE_NONNUMERIC; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_DATE_FORMATS; import static org.opensearch.sql.legacy.plugin.RestSqlAction.QUERY_API_ENDPOINT; import static org.opensearch.sql.util.MatcherUtils.rows; @@ -29,6 +30,7 @@ public class DateTimeFormatsIT extends SQLIntegTestCase { public void init() throws Exception { super.init(); loadIndex(Index.DATE_FORMATS); + loadIndex(Index.DATA_TYPE_NONNUMERIC); } @Test @@ -139,6 +141,94 @@ public void testNumericFormats() { rows("1970-01-02 03:55:00", "1970-01-01 00:01:40.5")); } + @Test + @SneakyThrows + public void testDateNanosWithFormats() { + String query = + String.format("SELECT hour_minute_second_OR_t_time" + " FROM %s", TEST_INDEX_DATE_FORMATS); + JSONObject result = executeQuery(query); + verifySchema(result, schema("hour_minute_second_OR_t_time", null, "time")); + verifyDataRows(result, rows("09:07:42"), rows("07:07:42.123456789")); + } + + @Test + @SneakyThrows + public void testDateNanosWithFunctions() { + // in memory funcs + String query = + String.format( + "SELECT" + + " hour_minute_second_OR_t_time > TIME '08:07:00'," + + " hour_minute_second_OR_t_time < TIME '08:07:00'," + + " hour_minute_second_OR_t_time = t_time_no_millis," + + " hour_minute_second_OR_t_time <> strict_t_time," + + " hour_minute_second_OR_t_time >= t_time" + + " FROM %s", + TEST_INDEX_DATE_FORMATS); + JSONObject result = executeQuery(query); + verifySchema( + result, + schema("hour_minute_second_OR_t_time > TIME '08:07:00'", null, "boolean"), + schema("hour_minute_second_OR_t_time < TIME '08:07:00'", null, "boolean"), + schema("hour_minute_second_OR_t_time = t_time_no_millis", null, "boolean"), + schema("hour_minute_second_OR_t_time <> strict_t_time", null, "boolean"), + schema("hour_minute_second_OR_t_time >= t_time", null, "boolean")); + verifyDataRows( + result, rows(true, false, true, false, true), rows(false, true, false, true, false)); + // push down + query = + String.format( + "SELECT hour_minute_second_OR_t_time" + + " FROM %s WHERE hour_minute_second_OR_t_time > TIME '08:07:00'", + TEST_INDEX_DATE_FORMATS); + result = executeQuery(query); + verifySchema(result, schema("hour_minute_second_OR_t_time", null, "time")); + verifyDataRows(result, rows("09:07:42")); + query = + String.format( + "SELECT hour_minute_second_OR_t_time" + + " FROM %s WHERE hour_minute_second_OR_t_time < TIME '08:07:00'", + TEST_INDEX_DATE_FORMATS); + result = executeQuery(query); + verifySchema(result, schema("hour_minute_second_OR_t_time", null, "time")); + verifyDataRows(result, rows("07:07:42.123456789")); + } + + @Test + @SneakyThrows + public void testDateNanosOrderBy() { + String query = + String.format( + "SELECT hour_minute_second_OR_t_time" + + " FROM %s ORDER BY hour_minute_second_OR_t_time ASC", + TEST_INDEX_DATE_FORMATS); + JSONObject result = executeQuery(query); + verifySchema(result, schema("hour_minute_second_OR_t_time", null, "time")); + verifyDataRows(result, rows("07:07:42.123456789"), rows("09:07:42")); + } + + @Test + @SneakyThrows + public void testDateNanosGroupBy() { + String query = + String.format( + "SELECT count(*)" + " FROM %s GROUP BY hour_minute_second_OR_t_time", + TEST_INDEX_DATE_FORMATS); + JSONObject result = executeQuery(query); + verifySchema(result, schema("count(*)", null, "integer")); + verifyDataRows(result, rows(1), rows(1)); + } + + @Test + @SneakyThrows + public void testDateNanosWithNanos() { + String query = + String.format("SELECT date_nanos_value" + " FROM %s", TEST_INDEX_DATATYPE_NONNUMERIC); + JSONObject result = executeQuery(query); + verifySchema(result, schema("date_nanos_value", null, "timestamp")); + verifyDataRows(result, rows("2019-03-24 01:34:46.123456789")); + } + protected JSONObject executeQuery(String query) throws IOException { Request request = new Request("POST", QUERY_API_ENDPOINT); request.setJsonEntity(String.format(Locale.ROOT, "{\n" + " \"query\": \"%s\"\n" + "}", query)); diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/SystemFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/SystemFunctionIT.java index 4b39e2925c..d2798728a1 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/SystemFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/SystemFunctionIT.java @@ -55,9 +55,10 @@ public void typeof_opensearch_types() { response = executeJdbcRequest( String.format( - "SELECT typeof(text_value),typeof(date_value), typeof(boolean_value)," - + " typeof(object_value), typeof(keyword_value),typeof(ip_value)," - + " typeof(binary_value), typeof(geo_point_value)" + "SELECT typeof(text_value),typeof(date_value), typeof(date_nanos_value)," + + " typeof(boolean_value), typeof(object_value)," + + " typeof(keyword_value),typeof(ip_value), typeof(binary_value)," + + " typeof(geo_point_value)" // TODO activate this test once `ARRAY` type supported, see // ExpressionAnalyzer::isTypeNotSupported // + ", typeof(nested_value)" @@ -65,6 +66,15 @@ public void typeof_opensearch_types() { TEST_INDEX_DATATYPE_NONNUMERIC)); verifyDataRows( response, - rows("TEXT", "TIMESTAMP", "BOOLEAN", "OBJECT", "KEYWORD", "IP", "BINARY", "GEO_POINT")); + rows( + "TEXT", + "TIMESTAMP", + "TIMESTAMP", + "BOOLEAN", + "OBJECT", + "KEYWORD", + "IP", + "BINARY", + "GEO_POINT")); } } diff --git a/integ-test/src/test/resources/datatypes.json b/integ-test/src/test/resources/datatypes.json index ea3290ee64..70ddd28763 100644 --- a/integ-test/src/test/resources/datatypes.json +++ b/integ-test/src/test/resources/datatypes.json @@ -1,2 +1,2 @@ {"index":{"_id":"1"}} -{"boolean_value": true, "keyword_value": "keyword", "text_value": "text", "binary_value": "U29tZSBiaW5hcnkgYmxvYg==", "date_value": "2020-10-13 13:00:00", "ip_value": "127.0.0.1", "object_value": {"first": "Dale", "last": "Dale"}, "nested_value": [{"first" : "John", "last" : "Smith"}, {"first" : "Alice", "last" : "White"}], "geo_point_value": { "lat": 40.71, "lon": 74.00 }} +{"boolean_value": true, "keyword_value": "keyword", "text_value": "text", "binary_value": "U29tZSBiaW5hcnkgYmxvYg==", "date_value": "2020-10-13 13:00:00", "date_nanos_value": "2019-03-23T21:34:46.123456789-04:00", "ip_value": "127.0.0.1", "object_value": {"first": "Dale", "last": "Dale"}, "nested_value": [{"first" : "John", "last" : "Smith"}, {"first" : "Alice", "last" : "White"}], "geo_point_value": { "lat": 40.71, "lon": 74.00 }} diff --git a/integ-test/src/test/resources/date_formats.json b/integ-test/src/test/resources/date_formats.json index 13d46a0e8c..2ff0c867a3 100644 --- a/integ-test/src/test/resources/date_formats.json +++ b/integ-test/src/test/resources/date_formats.json @@ -1,4 +1,4 @@ {"index": {}} {"epoch_millis": "450608862000.123456", "epoch_second": "450608862.000123456", "date_optional_time": "1984-04-12T09:07:42.000Z", "strict_date_optional_time": "1984-04-12T09:07:42.000Z", "strict_date_optional_time_nanos": "1984-04-12T09:07:42.000123456Z", "basic_date": "19840412", "basic_date_time": "19840412T090742.000Z", "basic_date_time_no_millis": "19840412T090742Z", "basic_ordinal_date": "1984103", "basic_ordinal_date_time": "1984103T090742.000Z", "basic_ordinal_date_time_no_millis": "1984103T090742Z", "basic_time": "090742.000Z", "basic_time_no_millis": "090742Z", "basic_t_time": "T090742.000Z", "basic_t_time_no_millis": "T090742Z", "basic_week_date": "1984W154", "strict_basic_week_date": "1984W154", "basic_week_date_time": "1984W154T090742.000Z", "strict_basic_week_date_time": "1984W154T090742.000Z", "basic_week_date_time_no_millis": "1984W154T090742Z", "strict_basic_week_date_time_no_millis": "1984W154T090742Z", "date": "1984-04-12", "strict_date": "1984-04-12", "date_hour": "1984-04-12T09", "strict_date_hour": "1984-04-12T09", "date_hour_minute": "1984-04-12T09:07", "strict_date_hour_minute": "1984-04-12T09:07", "date_hour_minute_second": "1984-04-12T09:07:42", "strict_date_hour_minute_second": "1984-04-12T09:07:42", "date_hour_minute_second_fraction": "1984-04-12T09:07:42.000", "strict_date_hour_minute_second_fraction": "1984-04-12T09:07:42.000", "date_hour_minute_second_millis": "1984-04-12T09:07:42.000", "strict_date_hour_minute_second_millis": "1984-04-12T09:07:42.000", "date_time": "1984-04-12T09:07:42.000Z", "strict_date_time": "1984-04-12T09:07:42.000123456Z", "date_time_no_millis": "1984-04-12T09:07:42Z", "strict_date_time_no_millis": "1984-04-12T09:07:42Z", "hour": "09", "strict_hour": "09", "hour_minute": "09:07", "strict_hour_minute": "09:07", "hour_minute_second": "09:07:42", "strict_hour_minute_second": "09:07:42", "hour_minute_second_fraction": "09:07:42.000", "strict_hour_minute_second_fraction": "09:07:42.000", "hour_minute_second_millis": "09:07:42.000", "strict_hour_minute_second_millis": "09:07:42.000", "ordinal_date": "1984-103", "strict_ordinal_date": "1984-103", "ordinal_date_time": "1984-103T09:07:42.000123456Z", "strict_ordinal_date_time": "1984-103T09:07:42.000123456Z", "ordinal_date_time_no_millis": "1984-103T09:07:42Z", "strict_ordinal_date_time_no_millis": "1984-103T09:07:42Z", "time": "09:07:42.000Z", "strict_time": "09:07:42.000Z", "time_no_millis": "09:07:42Z", "strict_time_no_millis": "09:07:42Z", "t_time": "T09:07:42.000Z", "strict_t_time": "T09:07:42.000Z", "t_time_no_millis": "T09:07:42Z", "strict_t_time_no_millis": "T09:07:42Z", "week_date": "1984-W15-4", "strict_week_date": "1984-W15-4", "week_date_time": "1984-W15-4T09:07:42.000Z", "strict_week_date_time": "1984-W15-4T09:07:42.000Z", "week_date_time_no_millis": "1984-W15-4T09:07:42Z", "strict_week_date_time_no_millis": "1984-W15-4T09:07:42Z", "weekyear_week_day": "1984-W15-4", "strict_weekyear_week_day": "1984-W15-4", "year_month_day": "1984-04-12", "strict_year_month_day": "1984-04-12", "yyyy-MM-dd": "1984-04-12", "custom_time": "09:07:42 AM", "yyyy-MM-dd_OR_epoch_millis": "1984-04-12", "hour_minute_second_OR_t_time": "09:07:42", "custom_timestamp": "1984-04-12 09:07:42 ---- AM", "custom_date_or_date": "1984-04-12", "custom_date_or_custom_time": "1961-04-12", "custom_time_parser_check": "85476321", "incomplete_1" : 1984, "incomplete_2": null, "incomplete_custom_date": 1999, "incomplete_custom_time" : 10, "incorrect" : null, "epoch_sec" : 42, "epoch_milli" : 42, "custom_no_delimiter_date" : "19841020", "custom_no_delimiter_time" : "102030", "custom_no_delimiter_ts" : "19841020153548"} {"index": {}} -{"epoch_millis": "450608862000.123456", "epoch_second": "450608862.000123456", "date_optional_time": "1984-04-12T09:07:42.000Z", "strict_date_optional_time": "1984-04-12T09:07:42.000Z", "strict_date_optional_time_nanos": "1984-04-12T09:07:42.000123456Z", "basic_date": "19840412", "basic_date_time": "19840412T090742.000Z", "basic_date_time_no_millis": "19840412T090742Z", "basic_ordinal_date": "1984103", "basic_ordinal_date_time": "1984103T090742.000Z", "basic_ordinal_date_time_no_millis": "1984103T090742Z", "basic_time": "090742.000Z", "basic_time_no_millis": "090742Z", "basic_t_time": "T090742.000Z", "basic_t_time_no_millis": "T090742Z", "basic_week_date": "1984W154", "strict_basic_week_date": "1984W154", "basic_week_date_time": "1984W154T090742.000Z", "strict_basic_week_date_time": "1984W154T090742.000Z", "basic_week_date_time_no_millis": "1984W154T090742Z", "strict_basic_week_date_time_no_millis": "1984W154T090742Z", "date": "1984-04-12", "strict_date": "1984-04-12", "date_hour": "1984-04-12T09", "strict_date_hour": "1984-04-12T09", "date_hour_minute": "1984-04-12T09:07", "strict_date_hour_minute": "1984-04-12T09:07", "date_hour_minute_second": "1984-04-12T09:07:42", "strict_date_hour_minute_second": "1984-04-12T09:07:42", "date_hour_minute_second_fraction": "1984-04-12T09:07:42.000", "strict_date_hour_minute_second_fraction": "1984-04-12T09:07:42.000", "date_hour_minute_second_millis": "1984-04-12T09:07:42.000", "strict_date_hour_minute_second_millis": "1984-04-12T09:07:42.000", "date_time": "1984-04-12T09:07:42.000Z", "strict_date_time": "1984-04-12T09:07:42.000123456Z", "date_time_no_millis": "1984-04-12T09:07:42Z", "strict_date_time_no_millis": "1984-04-12T09:07:42Z", "hour": "09", "strict_hour": "09", "hour_minute": "09:07", "strict_hour_minute": "09:07", "hour_minute_second": "09:07:42", "strict_hour_minute_second": "09:07:42", "hour_minute_second_fraction": "09:07:42.000", "strict_hour_minute_second_fraction": "09:07:42.000", "hour_minute_second_millis": "09:07:42.000", "strict_hour_minute_second_millis": "09:07:42.000", "ordinal_date": "1984-103", "strict_ordinal_date": "1984-103", "ordinal_date_time": "1984-103T09:07:42.000123456Z", "strict_ordinal_date_time": "1984-103T09:07:42.000123456Z", "ordinal_date_time_no_millis": "1984-103T09:07:42Z", "strict_ordinal_date_time_no_millis": "1984-103T09:07:42Z", "time": "09:07:42.000Z", "strict_time": "09:07:42.000Z", "time_no_millis": "09:07:42Z", "strict_time_no_millis": "09:07:42Z", "t_time": "T09:07:42.000Z", "strict_t_time": "T09:07:42.000Z", "t_time_no_millis": "T09:07:42Z", "strict_t_time_no_millis": "T09:07:42Z", "week_date": "1984-W15-4", "strict_week_date": "1984-W15-4", "week_date_time": "1984-W15-4T09:07:42.000Z", "strict_week_date_time": "1984-W15-4T09:07:42.000Z", "week_date_time_no_millis": "1984-W15-4T09:07:42Z", "strict_week_date_time_no_millis": "1984-W15-4T09:07:42Z", "weekyear_week_day": "1984-W15-4", "strict_weekyear_week_day": "1984-W15-4", "year_month_day": "1984-04-12", "strict_year_month_day": "1984-04-12", "yyyy-MM-dd": "1984-04-12", "custom_time": "09:07:42 PM", "yyyy-MM-dd_OR_epoch_millis": "450608862000.123456", "hour_minute_second_OR_t_time": "T09:07:42.000Z", "custom_timestamp": "1984-04-12 10:07:42 ---- PM", "custom_date_or_date": "1984-04-12", "custom_date_or_custom_time": "09:07:00", "custom_time_parser_check": "::: 9-32476542", "incomplete_1" : 2012, "incomplete_2": null, "incomplete_custom_date": 3021, "incomplete_custom_time" : 20, "incorrect" : null, "epoch_sec" : 100500, "epoch_milli" : 100500, "custom_no_delimiter_date" : "19610412", "custom_no_delimiter_time" : "090700", "custom_no_delimiter_ts" : "19610412090700"} +{"epoch_millis": "450608862000.123456", "epoch_second": "450608862.000123456", "date_optional_time": "1984-04-12T09:07:42.000Z", "strict_date_optional_time": "1984-04-12T09:07:42.000Z", "strict_date_optional_time_nanos": "1984-04-12T09:07:42.000123456Z", "basic_date": "19840412", "basic_date_time": "19840412T090742.000Z", "basic_date_time_no_millis": "19840412T090742Z", "basic_ordinal_date": "1984103", "basic_ordinal_date_time": "1984103T090742.000Z", "basic_ordinal_date_time_no_millis": "1984103T090742Z", "basic_time": "090742.000Z", "basic_time_no_millis": "090742Z", "basic_t_time": "T090742.000Z", "basic_t_time_no_millis": "T090742Z", "basic_week_date": "1984W154", "strict_basic_week_date": "1984W154", "basic_week_date_time": "1984W154T090742.000Z", "strict_basic_week_date_time": "1984W154T090742.000Z", "basic_week_date_time_no_millis": "1984W154T090742Z", "strict_basic_week_date_time_no_millis": "1984W154T090742Z", "date": "1984-04-12", "strict_date": "1984-04-12", "date_hour": "1984-04-12T09", "strict_date_hour": "1984-04-12T09", "date_hour_minute": "1984-04-12T09:07", "strict_date_hour_minute": "1984-04-12T09:07", "date_hour_minute_second": "1984-04-12T09:07:42", "strict_date_hour_minute_second": "1984-04-12T09:07:42", "date_hour_minute_second_fraction": "1984-04-12T09:07:42.000", "strict_date_hour_minute_second_fraction": "1984-04-12T09:07:42.000", "date_hour_minute_second_millis": "1984-04-12T09:07:42.000", "strict_date_hour_minute_second_millis": "1984-04-12T09:07:42.000", "date_time": "1984-04-12T09:07:42.000Z", "strict_date_time": "1984-04-12T09:07:42.000123456Z", "date_time_no_millis": "1984-04-12T09:07:42Z", "strict_date_time_no_millis": "1984-04-12T09:07:42Z", "hour": "09", "strict_hour": "09", "hour_minute": "09:07", "strict_hour_minute": "09:07", "hour_minute_second": "09:07:42", "strict_hour_minute_second": "09:07:42", "hour_minute_second_fraction": "09:07:42.000", "strict_hour_minute_second_fraction": "09:07:42.000", "hour_minute_second_millis": "09:07:42.000", "strict_hour_minute_second_millis": "09:07:42.000", "ordinal_date": "1984-103", "strict_ordinal_date": "1984-103", "ordinal_date_time": "1984-103T09:07:42.000123456Z", "strict_ordinal_date_time": "1984-103T09:07:42.000123456Z", "ordinal_date_time_no_millis": "1984-103T09:07:42Z", "strict_ordinal_date_time_no_millis": "1984-103T09:07:42Z", "time": "09:07:42.000Z", "strict_time": "09:07:42.000Z", "time_no_millis": "09:07:42Z", "strict_time_no_millis": "09:07:42Z", "t_time": "T09:07:42.000Z", "strict_t_time": "T09:07:42.000Z", "t_time_no_millis": "T09:07:42Z", "strict_t_time_no_millis": "T09:07:42Z", "week_date": "1984-W15-4", "strict_week_date": "1984-W15-4", "week_date_time": "1984-W15-4T09:07:42.000Z", "strict_week_date_time": "1984-W15-4T09:07:42.000Z", "week_date_time_no_millis": "1984-W15-4T09:07:42Z", "strict_week_date_time_no_millis": "1984-W15-4T09:07:42Z", "weekyear_week_day": "1984-W15-4", "strict_weekyear_week_day": "1984-W15-4", "year_month_day": "1984-04-12", "strict_year_month_day": "1984-04-12", "yyyy-MM-dd": "1984-04-12", "custom_time": "09:07:42 PM", "yyyy-MM-dd_OR_epoch_millis": "450608862000.123456", "hour_minute_second_OR_t_time": "T07:07:42.123456789Z", "custom_timestamp": "1984-04-12 10:07:42 ---- PM", "custom_date_or_date": "1984-04-12", "custom_date_or_custom_time": "09:07:00", "custom_time_parser_check": "::: 9-32476542", "incomplete_1" : 2012, "incomplete_2": null, "incomplete_custom_date": 3021, "incomplete_custom_time" : 20, "incorrect" : null, "epoch_sec" : 100500, "epoch_milli" : 100500, "custom_no_delimiter_date" : "19610412", "custom_no_delimiter_time" : "090700", "custom_no_delimiter_ts" : "19610412090700"} diff --git a/integ-test/src/test/resources/indexDefinitions/datatypes_index_mapping.json b/integ-test/src/test/resources/indexDefinitions/datatypes_index_mapping.json index 8c1759b369..5908114201 100644 --- a/integ-test/src/test/resources/indexDefinitions/datatypes_index_mapping.json +++ b/integ-test/src/test/resources/indexDefinitions/datatypes_index_mapping.json @@ -13,10 +13,13 @@ "binary_value": { "type": "binary" }, - "date_value": { + "date_value": { "type" : "date", "format": "yyyy-MM-dd HH:mm:ss" }, + "date_nanos_value": { + "type" : "date_nanos" + }, "ip_value": { "type": "ip" }, diff --git a/integ-test/src/test/resources/indexDefinitions/date_formats_index_mapping.json b/integ-test/src/test/resources/indexDefinitions/date_formats_index_mapping.json index 65811f8d9e..0b6daaacb4 100644 --- a/integ-test/src/test/resources/indexDefinitions/date_formats_index_mapping.json +++ b/integ-test/src/test/resources/indexDefinitions/date_formats_index_mapping.json @@ -298,7 +298,7 @@ "format": "yyyy-MM-dd||epoch_millis" }, "hour_minute_second_OR_t_time" : { - "type" : "date", + "type" : "date_nanos", "format": "hour_minute_second||t_time" }, "custom_timestamp" : { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchDataType.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchDataType.java index d276374539..ddbba61260 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchDataType.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchDataType.java @@ -30,6 +30,7 @@ public enum MappingType { GeoPoint("geo_point", ExprCoreType.UNKNOWN), Binary("binary", ExprCoreType.UNKNOWN), Date("date", ExprCoreType.TIMESTAMP), + DateNanos("date_nanos", ExprCoreType.TIMESTAMP), Object("object", ExprCoreType.STRUCT), Nested("nested", ExprCoreType.ARRAY), Byte("byte", ExprCoreType.BYTE), @@ -130,6 +131,7 @@ public static Map parseMapping(Map i * @param mappingType A mapping type. * @return An instance or inheritor of `OpenSearchDataType`. */ + @SuppressWarnings("unchecked") public static OpenSearchDataType of(MappingType mappingType, Map innerMap) { OpenSearchDataType res = instances.getOrDefault(mappingType.toString(), new OpenSearchDataType(mappingType)); @@ -157,6 +159,7 @@ public static OpenSearchDataType of(MappingType mappingType, Map case Ip: return OpenSearchIpType.of(); case Date: + case DateNanos: // Default date formatter is used when "" is passed as the second parameter String format = (String) innerMap.getOrDefault("format", ""); return OpenSearchDateType.of(format); diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/type/OpenSearchDataTypeTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/type/OpenSearchDataTypeTest.java index b0288dc9a7..82e6222dc4 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/type/OpenSearchDataTypeTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/type/OpenSearchDataTypeTest.java @@ -105,6 +105,7 @@ private static Stream getTestDataWithType() { Arguments.of(MappingType.Double, "double", DOUBLE), Arguments.of(MappingType.Boolean, "boolean", BOOLEAN), Arguments.of(MappingType.Date, "date", TIMESTAMP), + Arguments.of(MappingType.DateNanos, "date", TIMESTAMP), Arguments.of(MappingType.Object, "object", STRUCT), Arguments.of(MappingType.Nested, "nested", ARRAY), Arguments.of(MappingType.GeoPoint, "geo_point", OpenSearchGeoPointType.of()), From 25beda059d652eea62054445e775fae498d33d29 Mon Sep 17 00:00:00 2001 From: Matthew Wells Date: Mon, 21 Aug 2023 09:57:45 -0700 Subject: [PATCH 34/42] Remove Datetime data type (#1980) * Remove Datetime data type (#336) * removed datetime type, updated tests and documentation Signed-off-by: Matthew Wells * removed duplicate test code, replaced calls of ZoneOffset.UTC with a constant Signed-off-by: Matthew Wells * readded test and edited it to return timestamp, fixed minor checkstyle difference Signed-off-by: Matthew Wells * converted all utc timezone/zone id to be ZoneOffset.UTC Signed-off-by: Matthew Wells * Spotless Apply Signed-off-by: Matthew Wells * Added tests back in and updated to work with timestamp Signed-off-by: Matthew Wells * Spotless Apply Signed-off-by: Matthew Wells * removed duplicate tests, renamed test Signed-off-by: Matthew Wells --------- Signed-off-by: Matthew Wells --- .../sql/data/model/ExprDateValue.java | 10 +- .../sql/data/model/ExprDatetimeValue.java | 99 ---- .../sql/data/model/ExprStringValue.java | 22 +- .../sql/data/model/ExprTimeValue.java | 9 +- .../sql/data/model/ExprTimestampValue.java | 26 +- .../opensearch/sql/data/model/ExprValue.java | 7 - .../sql/data/model/ExprValueUtils.java | 11 +- .../sql/data/type/ExprCoreType.java | 3 +- .../org/opensearch/sql/expression/DSL.java | 4 - .../aggregation/AggregatorFunction.java | 10 - .../expression/aggregation/AvgAggregator.java | 25 - .../datetime/DateTimeFormatterUtil.java | 9 +- .../expression/datetime/DateTimeFunction.java | 501 ++++++------------ .../operator/convert/TypeCastOperator.java | 31 +- .../planner/physical/collector/Rounding.java | 34 +- .../opensearch/sql/utils/DateTimeUtils.java | 22 +- .../opensearch/sql/analysis/AnalyzerTest.java | 2 +- .../sql/data/model/DateTimeValueTest.java | 101 +--- .../sql/data/model/ExprValueCompareTest.java | 59 +-- .../sql/data/model/ExprValueUtilsTest.java | 17 +- .../sql/data/type/ExprTypeTest.java | 2 - .../aggregation/AvgAggregatorTest.java | 18 +- .../aggregation/CountAggregatorTest.java | 7 - .../aggregation/MaxAggregatorTest.java | 7 - .../aggregation/MinAggregatorTest.java | 7 - .../datetime/AddTimeAndSubTimeTest.java | 42 +- .../expression/datetime/ConvertTZTest.java | 78 +-- .../datetime/DateAddAndAddDateTest.java | 102 ++-- .../sql/expression/datetime/DateDiffTest.java | 2 +- .../datetime/DateSubAndSubDateTest.java | 80 +-- .../datetime/DateTimeFunctionTest.java | 65 +-- .../sql/expression/datetime/DateTimeTest.java | 32 +- .../expression/datetime/DateTimeTestBase.java | 10 +- .../sql/expression/datetime/ExtractTest.java | 6 +- .../expression/datetime/FromUnixTimeTest.java | 8 +- .../datetime/NowLikeFunctionTest.java | 22 +- .../expression/datetime/StrToDateTest.java | 37 +- .../expression/datetime/TimeStampAddTest.java | 17 +- .../datetime/TimeStampDiffTest.java | 33 +- .../expression/datetime/TimestampTest.java | 20 +- .../expression/datetime/ToSecondsTest.java | 2 - .../datetime/UnixTimeStampTest.java | 3 +- .../datetime/UnixTwoWayConversionTest.java | 17 +- .../sql/expression/datetime/YearweekTest.java | 6 +- .../BuiltinFunctionRepositoryTest.java | 4 +- .../function/WideningTypeRuleTest.java | 9 +- .../convert/TypeCastOperatorTest.java | 29 +- .../BinaryPredicateOperatorTest.java | 4 +- .../system/SystemFunctionsTest.java | 3 - .../physical/AggregationOperatorTest.java | 32 +- .../physical/PhysicalPlanTestBase.java | 21 +- .../opensearch/sql/utils/ComparisonUtil.java | 8 +- docs/dev/img/type-hierarchy-tree-final.png | Bin 0 -> 30902 bytes docs/user/dql/expressions.rst | 6 +- docs/user/dql/functions.rst | 194 ++++--- docs/user/general/datatypes.rst | 127 ++--- docs/user/ppl/functions/datetime.rst | 197 ++++--- docs/user/ppl/functions/system.rst | 2 +- docs/user/ppl/general/datatypes.rst | 37 +- .../sql/ppl/ConvertTZFunctionIT.java | 28 +- .../sql/ppl/DateTimeComparisonIT.java | 356 ------------- .../sql/ppl/DateTimeFunctionIT.java | 173 +++--- .../sql/ppl/DateTimeImplementationIT.java | 28 +- .../opensearch/sql/ppl/SystemFunctionIT.java | 7 +- .../org/opensearch/sql/sql/AggregationIT.java | 39 +- .../sql/sql/ConvertTZFunctionIT.java | 38 +- .../sql/sql/DateTimeComparisonIT.java | 356 ------------- .../sql/sql/DateTimeFunctionIT.java | 169 +++--- .../sql/sql/DateTimeImplementationIT.java | 30 +- .../opensearch/sql/sql/SystemFunctionIT.java | 5 +- .../data/type/OpenSearchDateType.java | 1 - .../value/OpenSearchExprValueFactory.java | 16 +- .../ExpressionAggregationScript.java | 1 - .../dsl/BucketAggregationBuilder.java | 3 +- .../script/filter/lucene/LuceneQuery.java | 10 - .../data/type/OpenSearchDateTypeTest.java | 25 +- .../value/OpenSearchExprValueFactoryTest.java | 25 +- .../AggregationQueryBuilderTest.java | 15 - .../ExpressionAggregationScriptTest.java | 9 - .../dsl/BucketAggregationBuilderTest.java | 2 +- .../filter/ExpressionFilterScriptTest.java | 10 - .../script/filter/FilterQueryBuilderTest.java | 30 +- 82 files changed, 1063 insertions(+), 2611 deletions(-) delete mode 100644 core/src/main/java/org/opensearch/sql/data/model/ExprDatetimeValue.java create mode 100644 docs/dev/img/type-hierarchy-tree-final.png diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprDateValue.java b/core/src/main/java/org/opensearch/sql/data/model/ExprDateValue.java index 3f3f67a4fa..c36cd3ea6d 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprDateValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprDateValue.java @@ -6,13 +6,12 @@ package org.opensearch.sql.data.model; import static org.opensearch.sql.utils.DateTimeFormatters.DATE_TIME_FORMATTER_VARIABLE_NANOS_OPTIONAL; -import static org.opensearch.sql.utils.DateTimeUtils.UTC_ZONE_ID; import com.google.common.base.Objects; import java.time.Instant; import java.time.LocalDate; -import java.time.LocalDateTime; import java.time.LocalTime; +import java.time.ZoneOffset; import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; import java.time.format.DateTimeParseException; @@ -57,14 +56,9 @@ public LocalTime timeValue() { return LocalTime.of(0, 0, 0); } - @Override - public LocalDateTime datetimeValue() { - return LocalDateTime.of(date, timeValue()); - } - @Override public Instant timestampValue() { - return ZonedDateTime.of(date, timeValue(), UTC_ZONE_ID).toInstant(); + return ZonedDateTime.of(date, timeValue(), ZoneOffset.UTC).toInstant(); } @Override diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprDatetimeValue.java b/core/src/main/java/org/opensearch/sql/data/model/ExprDatetimeValue.java deleted file mode 100644 index 305958043f..0000000000 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprDatetimeValue.java +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.sql.data.model; - -import static org.opensearch.sql.utils.DateTimeFormatters.DATE_TIME_FORMATTER_WITH_TZ; -import static org.opensearch.sql.utils.DateTimeUtils.UTC_ZONE_ID; - -import com.google.common.base.Objects; -import java.time.Instant; -import java.time.LocalDate; -import java.time.LocalDateTime; -import java.time.LocalTime; -import java.time.ZonedDateTime; -import java.time.format.DateTimeFormatter; -import java.time.format.DateTimeParseException; -import java.time.temporal.ChronoUnit; -import lombok.RequiredArgsConstructor; -import org.opensearch.sql.data.type.ExprCoreType; -import org.opensearch.sql.data.type.ExprType; -import org.opensearch.sql.exception.SemanticCheckException; - -@RequiredArgsConstructor -public class ExprDatetimeValue extends AbstractExprValue { - private final LocalDateTime datetime; - - /** Constructor with datetime string as input. */ - public ExprDatetimeValue(String datetime) { - try { - this.datetime = LocalDateTime.parse(datetime, DATE_TIME_FORMATTER_WITH_TZ); - } catch (DateTimeParseException e) { - throw new SemanticCheckException( - String.format( - "datetime:%s in unsupported format, please use 'yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]'", - datetime)); - } - } - - @Override - public LocalDateTime datetimeValue() { - return datetime; - } - - @Override - public LocalDate dateValue() { - return datetime.toLocalDate(); - } - - @Override - public LocalTime timeValue() { - return datetime.toLocalTime(); - } - - @Override - public Instant timestampValue() { - return ZonedDateTime.of(datetime, UTC_ZONE_ID).toInstant(); - } - - @Override - public boolean isDateTime() { - return true; - } - - @Override - public int compare(ExprValue other) { - return datetime.compareTo(other.datetimeValue()); - } - - @Override - public boolean equal(ExprValue other) { - return datetime.equals(other.datetimeValue()); - } - - @Override - public String value() { - return String.format( - "%s %s", - DateTimeFormatter.ISO_DATE.format(datetime), - DateTimeFormatter.ISO_TIME.format( - (datetime.getNano() == 0) ? datetime.truncatedTo(ChronoUnit.SECONDS) : datetime)); - } - - @Override - public ExprType type() { - return ExprCoreType.DATETIME; - } - - @Override - public String toString() { - return String.format("DATETIME '%s'", value()); - } - - @Override - public int hashCode() { - return Objects.hashCode(datetime); - } -} diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprStringValue.java b/core/src/main/java/org/opensearch/sql/data/model/ExprStringValue.java index 7745af62b6..f2e63e986d 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprStringValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprStringValue.java @@ -5,6 +5,7 @@ package org.opensearch.sql.data.model; +import java.time.Instant; import java.time.LocalDate; import java.time.LocalDateTime; import java.time.LocalTime; @@ -35,27 +36,20 @@ public String stringValue() { } @Override - public LocalDateTime datetimeValue() { + public Instant timestampValue() { try { - return new ExprDatetimeValue(value).datetimeValue(); + return new ExprTimestampValue(value).timestampValue(); } catch (SemanticCheckException e) { - try { - return new ExprDatetimeValue( - LocalDateTime.of(new ExprDateValue(value).dateValue(), LocalTime.of(0, 0, 0))) - .datetimeValue(); - } catch (SemanticCheckException exception) { - throw new SemanticCheckException( - String.format( - "datetime:%s in unsupported format, please use 'yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]'", - value)); - } + return new ExprTimestampValue( + LocalDateTime.of(new ExprDateValue(value).dateValue(), LocalTime.of(0, 0, 0))) + .timestampValue(); } } @Override public LocalDate dateValue() { try { - return new ExprDatetimeValue(value).dateValue(); + return new ExprTimestampValue(value).dateValue(); } catch (SemanticCheckException e) { return new ExprDateValue(value).dateValue(); } @@ -64,7 +58,7 @@ public LocalDate dateValue() { @Override public LocalTime timeValue() { try { - return new ExprDatetimeValue(value).timeValue(); + return new ExprTimestampValue(value).timeValue(); } catch (SemanticCheckException e) { return new ExprTimeValue(value).timeValue(); } diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprTimeValue.java b/core/src/main/java/org/opensearch/sql/data/model/ExprTimeValue.java index d808af49b1..6b5a4a7c48 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprTimeValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprTimeValue.java @@ -7,12 +7,11 @@ import static java.time.format.DateTimeFormatter.ISO_LOCAL_TIME; import static org.opensearch.sql.utils.DateTimeFormatters.DATE_TIME_FORMATTER_VARIABLE_NANOS_OPTIONAL; -import static org.opensearch.sql.utils.DateTimeUtils.UTC_ZONE_ID; import java.time.Instant; import java.time.LocalDate; -import java.time.LocalDateTime; import java.time.LocalTime; +import java.time.ZoneOffset; import java.time.ZonedDateTime; import java.time.format.DateTimeParseException; import java.util.Objects; @@ -57,12 +56,8 @@ public LocalDate dateValue(FunctionProperties functionProperties) { return LocalDate.now(functionProperties.getQueryStartClock()); } - public LocalDateTime datetimeValue(FunctionProperties functionProperties) { - return LocalDateTime.of(dateValue(functionProperties), timeValue()); - } - public Instant timestampValue(FunctionProperties functionProperties) { - return ZonedDateTime.of(dateValue(functionProperties), timeValue(), UTC_ZONE_ID).toInstant(); + return ZonedDateTime.of(dateValue(functionProperties), timeValue(), ZoneOffset.UTC).toInstant(); } @Override diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprTimestampValue.java b/core/src/main/java/org/opensearch/sql/data/model/ExprTimestampValue.java index 455a379b03..e103dc7253 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprTimestampValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprTimestampValue.java @@ -7,12 +7,12 @@ import static org.opensearch.sql.utils.DateTimeFormatters.DATE_TIME_FORMATTER_VARIABLE_NANOS; import static org.opensearch.sql.utils.DateTimeFormatters.DATE_TIME_FORMATTER_WITHOUT_NANO; -import static org.opensearch.sql.utils.DateTimeUtils.UTC_ZONE_ID; import java.time.Instant; import java.time.LocalDate; import java.time.LocalDateTime; import java.time.LocalTime; +import java.time.ZoneOffset; import java.time.format.DateTimeParseException; import java.time.temporal.ChronoUnit; import java.util.Objects; @@ -32,7 +32,7 @@ public ExprTimestampValue(String timestamp) { try { this.timestamp = LocalDateTime.parse(timestamp, DATE_TIME_FORMATTER_VARIABLE_NANOS) - .atZone(UTC_ZONE_ID) + .atZone(ZoneOffset.UTC) .toInstant(); } catch (DateTimeParseException e) { throw new SemanticCheckException( @@ -42,13 +42,18 @@ public ExprTimestampValue(String timestamp) { } } + /** localDateTime Constructor. */ + public ExprTimestampValue(LocalDateTime localDateTime) { + this.timestamp = localDateTime.atZone(ZoneOffset.UTC).toInstant(); + } + @Override public String value() { return timestamp.getNano() == 0 ? DATE_TIME_FORMATTER_WITHOUT_NANO - .withZone(UTC_ZONE_ID) + .withZone(ZoneOffset.UTC) .format(timestamp.truncatedTo(ChronoUnit.SECONDS)) - : DATE_TIME_FORMATTER_VARIABLE_NANOS.withZone(UTC_ZONE_ID).format(timestamp); + : DATE_TIME_FORMATTER_VARIABLE_NANOS.withZone(ZoneOffset.UTC).format(timestamp); } @Override @@ -63,17 +68,12 @@ public Instant timestampValue() { @Override public LocalDate dateValue() { - return timestamp.atZone(UTC_ZONE_ID).toLocalDate(); + return timestamp.atZone(ZoneOffset.UTC).toLocalDate(); } @Override public LocalTime timeValue() { - return timestamp.atZone(UTC_ZONE_ID).toLocalTime(); - } - - @Override - public LocalDateTime datetimeValue() { - return timestamp.atZone(UTC_ZONE_ID).toLocalDateTime(); + return timestamp.atZone(ZoneOffset.UTC).toLocalTime(); } @Override @@ -88,12 +88,12 @@ public String toString() { @Override public int compare(ExprValue other) { - return timestamp.compareTo(other.timestampValue().atZone(UTC_ZONE_ID).toInstant()); + return timestamp.compareTo(other.timestampValue().atZone(ZoneOffset.UTC).toInstant()); } @Override public boolean equal(ExprValue other) { - return timestamp.equals(other.timestampValue().atZone(UTC_ZONE_ID).toInstant()); + return timestamp.equals(other.timestampValue().atZone(ZoneOffset.UTC).toInstant()); } @Override diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprValue.java b/core/src/main/java/org/opensearch/sql/data/model/ExprValue.java index 86bead77b7..034ed22a75 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprValue.java @@ -8,7 +8,6 @@ import java.io.Serializable; import java.time.Instant; import java.time.LocalDate; -import java.time.LocalDateTime; import java.time.LocalTime; import java.time.temporal.TemporalAmount; import java.util.List; @@ -133,12 +132,6 @@ default LocalDate dateValue() { "invalid to get dateValue from value of type " + type()); } - /** Get datetime value. */ - default LocalDateTime datetimeValue() { - throw new ExpressionEvaluationException( - "invalid to get datetimeValue from value of type " + type()); - } - /** Get interval value. */ default TemporalAmount intervalValue() { throw new ExpressionEvaluationException( diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprValueUtils.java b/core/src/main/java/org/opensearch/sql/data/model/ExprValueUtils.java index a259eb9fba..20813045f2 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprValueUtils.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprValueUtils.java @@ -9,6 +9,7 @@ import java.time.LocalDate; import java.time.LocalDateTime; import java.time.LocalTime; +import java.time.ZoneOffset; import java.time.temporal.TemporalAmount; import java.util.ArrayList; import java.util.LinkedHashMap; @@ -66,10 +67,6 @@ public static ExprValue dateValue(LocalDate value) { return new ExprDateValue(value); } - public static ExprValue datetimeValue(LocalDateTime value) { - return new ExprDatetimeValue(value); - } - public static ExprValue timeValue(LocalTime value) { return new ExprTimeValue(value); } @@ -128,14 +125,14 @@ public static ExprValue fromObjectValue(Object o) { return floatValue((Float) o); } else if (o instanceof LocalDate) { return dateValue((LocalDate) o); - } else if (o instanceof LocalDateTime) { - return datetimeValue((LocalDateTime) o); } else if (o instanceof LocalTime) { return timeValue((LocalTime) o); } else if (o instanceof Instant) { return timestampValue((Instant) o); } else if (o instanceof TemporalAmount) { return intervalValue((TemporalAmount) o); + } else if (o instanceof LocalDateTime) { + return timestampValue(((LocalDateTime) o).toInstant(ZoneOffset.UTC)); } else { throw new ExpressionEvaluationException("unsupported object " + o.getClass()); } @@ -150,8 +147,6 @@ public static ExprValue fromObjectValue(Object o, ExprCoreType type) { return new ExprDateValue((String) o); case TIME: return new ExprTimeValue((String) o); - case DATETIME: - return new ExprDatetimeValue((String) o); default: return fromObjectValue(o); } diff --git a/core/src/main/java/org/opensearch/sql/data/type/ExprCoreType.java b/core/src/main/java/org/opensearch/sql/data/type/ExprCoreType.java index f1979d8666..cbc0c98255 100644 --- a/core/src/main/java/org/opensearch/sql/data/type/ExprCoreType.java +++ b/core/src/main/java/org/opensearch/sql/data/type/ExprCoreType.java @@ -42,8 +42,7 @@ public enum ExprCoreType implements ExprType { /** Date. */ DATE(STRING), TIME(STRING), - DATETIME(STRING, DATE, TIME), - TIMESTAMP(STRING, DATETIME), + TIMESTAMP(STRING, DATE, TIME), INTERVAL(UNDEFINED), /** Struct. */ diff --git a/core/src/main/java/org/opensearch/sql/expression/DSL.java b/core/src/main/java/org/opensearch/sql/expression/DSL.java index 4341668b69..12a7faafb2 100644 --- a/core/src/main/java/org/opensearch/sql/expression/DSL.java +++ b/core/src/main/java/org/opensearch/sql/expression/DSL.java @@ -819,10 +819,6 @@ public static FunctionExpression castTimestamp(Expression value) { return compile(FunctionProperties.None, BuiltinFunctionName.CAST_TO_TIMESTAMP, value); } - public static FunctionExpression castDatetime(Expression value) { - return compile(FunctionProperties.None, BuiltinFunctionName.CAST_TO_DATETIME, value); - } - public static FunctionExpression typeof(Expression value) { return compile(FunctionProperties.None, BuiltinFunctionName.TYPEOF, value); } diff --git a/core/src/main/java/org/opensearch/sql/expression/aggregation/AggregatorFunction.java b/core/src/main/java/org/opensearch/sql/expression/aggregation/AggregatorFunction.java index 4a1d4d309b..bfc92d73c6 100644 --- a/core/src/main/java/org/opensearch/sql/expression/aggregation/AggregatorFunction.java +++ b/core/src/main/java/org/opensearch/sql/expression/aggregation/AggregatorFunction.java @@ -7,7 +7,6 @@ import static org.opensearch.sql.data.type.ExprCoreType.ARRAY; import static org.opensearch.sql.data.type.ExprCoreType.DATE; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; import static org.opensearch.sql.data.type.ExprCoreType.DOUBLE; import static org.opensearch.sql.data.type.ExprCoreType.FLOAT; import static org.opensearch.sql.data.type.ExprCoreType.INTEGER; @@ -71,9 +70,6 @@ private static DefaultFunctionResolver avg() { .put( new FunctionSignature(functionName, Collections.singletonList(DATE)), (functionProperties, arguments) -> new AvgAggregator(arguments, DATE)) - .put( - new FunctionSignature(functionName, Collections.singletonList(DATETIME)), - (functionProperties, arguments) -> new AvgAggregator(arguments, DATETIME)) .put( new FunctionSignature(functionName, Collections.singletonList(TIME)), (functionProperties, arguments) -> new AvgAggregator(arguments, TIME)) @@ -142,9 +138,6 @@ private static DefaultFunctionResolver min() { .put( new FunctionSignature(functionName, Collections.singletonList(DATE)), (functionProperties, arguments) -> new MinAggregator(arguments, DATE)) - .put( - new FunctionSignature(functionName, Collections.singletonList(DATETIME)), - (functionProperties, arguments) -> new MinAggregator(arguments, DATETIME)) .put( new FunctionSignature(functionName, Collections.singletonList(TIME)), (functionProperties, arguments) -> new MinAggregator(arguments, TIME)) @@ -177,9 +170,6 @@ private static DefaultFunctionResolver max() { .put( new FunctionSignature(functionName, Collections.singletonList(DATE)), (functionProperties, arguments) -> new MaxAggregator(arguments, DATE)) - .put( - new FunctionSignature(functionName, Collections.singletonList(DATETIME)), - (functionProperties, arguments) -> new MaxAggregator(arguments, DATETIME)) .put( new FunctionSignature(functionName, Collections.singletonList(TIME)), (functionProperties, arguments) -> new MaxAggregator(arguments, TIME)) diff --git a/core/src/main/java/org/opensearch/sql/expression/aggregation/AvgAggregator.java b/core/src/main/java/org/opensearch/sql/expression/aggregation/AvgAggregator.java index c528968018..c32ebb6071 100644 --- a/core/src/main/java/org/opensearch/sql/expression/aggregation/AvgAggregator.java +++ b/core/src/main/java/org/opensearch/sql/expression/aggregation/AvgAggregator.java @@ -13,7 +13,6 @@ import java.util.List; import java.util.Locale; import org.opensearch.sql.data.model.ExprDateValue; -import org.opensearch.sql.data.model.ExprDatetimeValue; import org.opensearch.sql.data.model.ExprDoubleValue; import org.opensearch.sql.data.model.ExprIntegerValue; import org.opensearch.sql.data.model.ExprNullValue; @@ -47,8 +46,6 @@ public AvgState create() { switch (dataType) { case DATE: return new DateAvgState(); - case DATETIME: - return new DateTimeAvgState(); case TIMESTAMP: return new TimestampAvgState(); case TIME: @@ -128,28 +125,6 @@ protected AvgState iterate(ExprValue value) { } } - protected static class DateTimeAvgState extends AvgState { - @Override - public ExprValue result() { - if (0 == count.integerValue()) { - return ExprNullValue.of(); - } - - return new ExprDatetimeValue( - new ExprTimestampValue( - Instant.ofEpochMilli( - DSL.divide(DSL.literal(total), DSL.literal(count)).valueOf().longValue())) - .datetimeValue()); - } - - @Override - protected AvgState iterate(ExprValue value) { - total = - DSL.add(DSL.literal(total), DSL.literal(value.timestampValue().toEpochMilli())).valueOf(); - return super.iterate(value); - } - } - protected static class TimestampAvgState extends AvgState { @Override public ExprValue result() { diff --git a/core/src/main/java/org/opensearch/sql/expression/datetime/DateTimeFormatterUtil.java b/core/src/main/java/org/opensearch/sql/expression/datetime/DateTimeFormatterUtil.java index 13f9a077e4..d23cbc2df3 100644 --- a/core/src/main/java/org/opensearch/sql/expression/datetime/DateTimeFormatterUtil.java +++ b/core/src/main/java/org/opensearch/sql/expression/datetime/DateTimeFormatterUtil.java @@ -12,6 +12,7 @@ import java.time.LocalDate; import java.time.LocalDateTime; import java.time.LocalTime; +import java.time.ZoneOffset; import java.time.format.DateTimeFormatter; import java.time.format.DateTimeFormatterBuilder; import java.time.format.ResolverStyle; @@ -21,9 +22,9 @@ import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.opensearch.sql.data.model.ExprDatetimeValue; import org.opensearch.sql.data.model.ExprNullValue; import org.opensearch.sql.data.model.ExprStringValue; +import org.opensearch.sql.data.model.ExprTimestampValue; import org.opensearch.sql.data.model.ExprValue; import org.opensearch.sql.expression.function.FunctionProperties; @@ -245,12 +246,12 @@ static ExprValue getFormattedString( /** * Format the date using the date format String. * - * @param dateExpr the date ExprValue of Date/Datetime/Timestamp/String type. + * @param dateExpr the date ExprValue of Date/Timestamp/String type. * @param formatExpr the format ExprValue of String type. * @return Date formatted using format and returned as a String. */ static ExprValue getFormattedDate(ExprValue dateExpr, ExprValue formatExpr) { - final LocalDateTime date = dateExpr.datetimeValue(); + final LocalDateTime date = dateExpr.timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime(); return getFormattedString(formatExpr, DATE_HANDLERS, date); } @@ -364,7 +365,7 @@ static ExprValue parseStringWithDateOrTime( output = LocalDateTime.of(year, month, day, hour, minute, second); } - return new ExprDatetimeValue(output); + return new ExprTimestampValue(output); } /** diff --git a/core/src/main/java/org/opensearch/sql/expression/datetime/DateTimeFunction.java b/core/src/main/java/org/opensearch/sql/expression/datetime/DateTimeFunction.java index d17d59d358..a42a599ad8 100644 --- a/core/src/main/java/org/opensearch/sql/expression/datetime/DateTimeFunction.java +++ b/core/src/main/java/org/opensearch/sql/expression/datetime/DateTimeFunction.java @@ -14,7 +14,6 @@ import static java.time.temporal.ChronoUnit.WEEKS; import static java.time.temporal.ChronoUnit.YEARS; import static org.opensearch.sql.data.type.ExprCoreType.DATE; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; import static org.opensearch.sql.data.type.ExprCoreType.DOUBLE; import static org.opensearch.sql.data.type.ExprCoreType.FLOAT; import static org.opensearch.sql.data.type.ExprCoreType.INTEGER; @@ -41,9 +40,8 @@ import static org.opensearch.sql.utils.DateTimeFormatters.SHORT_DATE_LENGTH; import static org.opensearch.sql.utils.DateTimeFormatters.SINGLE_DIGIT_MONTH_DATE_LENGTH; import static org.opensearch.sql.utils.DateTimeFormatters.SINGLE_DIGIT_YEAR_DATE_LENGTH; -import static org.opensearch.sql.utils.DateTimeUtils.UTC_ZONE_ID; import static org.opensearch.sql.utils.DateTimeUtils.extractDate; -import static org.opensearch.sql.utils.DateTimeUtils.extractDateTime; +import static org.opensearch.sql.utils.DateTimeUtils.extractTimestamp; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableTable; @@ -74,13 +72,13 @@ import lombok.experimental.UtilityClass; import org.apache.commons.lang3.tuple.Pair; import org.opensearch.sql.data.model.ExprDateValue; -import org.opensearch.sql.data.model.ExprDatetimeValue; import org.opensearch.sql.data.model.ExprDoubleValue; import org.opensearch.sql.data.model.ExprIntegerValue; import org.opensearch.sql.data.model.ExprLongValue; import org.opensearch.sql.data.model.ExprNullValue; import org.opensearch.sql.data.model.ExprStringValue; import org.opensearch.sql.data.model.ExprTimeValue; +import org.opensearch.sql.data.model.ExprTimestampValue; import org.opensearch.sql.data.model.ExprValue; import org.opensearch.sql.data.type.ExprCoreType; import org.opensearch.sql.exception.ExpressionEvaluationException; @@ -110,7 +108,7 @@ public class DateTimeFunction { // The number of days from year zero to year 1970. private static final Long DAYS_0000_TO_1970 = (146097 * 5L) - (30L * 365L + 7L); - // MySQL doesn't process any datetime/timestamp values which are greater than + // MySQL doesn't process any timestamp values which are greater than // 32536771199.999999, or equivalent '3001-01-18 23:59:59.999999' UTC private static final Double MYSQL_MAX_TIMESTAMP = 32536771200d; @@ -150,11 +148,6 @@ public class DateTimeFunction { .put("date", "iso", "%Y-%m-%d") .put("date", "eur", "%d.%m.%Y") .put("date", "internal", "%Y%m%d") - .put("datetime", "usa", "%Y-%m-%d %H.%i.%s") - .put("datetime", "jis", "%Y-%m-%d %H:%i:%s") - .put("datetime", "iso", "%Y-%m-%d %H:%i:%s") - .put("datetime", "eur", "%Y-%m-%d %H.%i.%s") - .put("datetime", "internal", "%Y%m%d%H%i%s") .put("time", "usa", "%h:%i:%s %p") .put("time", "jis", "%H:%i:%s") .put("time", "iso", "%H:%i:%s") @@ -255,8 +248,8 @@ private FunctionResolver now(FunctionName functionName) { functionName, implWithProperties( functionProperties -> - new ExprDatetimeValue(formatNow(functionProperties.getQueryStartClock())), - DATETIME)); + new ExprTimestampValue(formatNow(functionProperties.getQueryStartClock())), + TIMESTAMP)); } private FunctionResolver now() { @@ -280,12 +273,12 @@ private FunctionResolver sysdate() { return define( BuiltinFunctionName.SYSDATE.getName(), implWithProperties( - functionProperties -> new ExprDatetimeValue(formatNow(Clock.systemDefaultZone())), - DATETIME), + functionProperties -> new ExprTimestampValue(formatNow(Clock.systemDefaultZone())), + TIMESTAMP), FunctionDSL.implWithProperties( (functionProperties, v) -> - new ExprDatetimeValue(formatNow(Clock.systemDefaultZone(), v.integerValue())), - DATETIME, + new ExprTimestampValue(formatNow(Clock.systemDefaultZone(), v.integerValue())), + TIMESTAMP, INTEGER)); } @@ -329,37 +322,34 @@ private FunctionResolver current_date() { * Specify a start date and add/subtract a temporal amount to/from the date.
* The return type depends on the date type and the interval unit. Detailed supported signatures: *
- * (DATE/DATETIME/TIMESTAMP/TIME, INTERVAL) -> DATETIME
+ * (DATE/TIMESTAMP/TIME, INTERVAL) -> TIMESTAMP
* MySQL has these signatures too
* (DATE, INTERVAL) -> DATE // when interval has no time part
* (TIME, INTERVAL) -> TIME // when interval has no date part
- * (STRING, INTERVAL) -> STRING // when argument has date or datetime string,
- * // result has date or datetime depending on interval type
+ * (STRING, INTERVAL) -> STRING // when argument has date or timestamp string,
+ * // result has date or timestamp depending on interval type
*/ private Stream> get_date_add_date_sub_signatures( SerializableTriFunction function) { return Stream.of( - implWithProperties(nullMissingHandlingWithProperties(function), DATETIME, DATE, INTERVAL), - implWithProperties( - nullMissingHandlingWithProperties(function), DATETIME, DATETIME, INTERVAL), + implWithProperties(nullMissingHandlingWithProperties(function), TIMESTAMP, DATE, INTERVAL), implWithProperties( - nullMissingHandlingWithProperties(function), DATETIME, TIMESTAMP, INTERVAL), - implWithProperties(nullMissingHandlingWithProperties(function), DATETIME, TIME, INTERVAL)); + nullMissingHandlingWithProperties(function), TIMESTAMP, TIMESTAMP, INTERVAL), + implWithProperties(nullMissingHandlingWithProperties(function), TIMESTAMP, TIME, INTERVAL)); } /** * A common signature for `adddate` and `subdate`.
* Adds/subtracts an integer number of days to/from the first argument.
* (DATE, LONG) -> DATE
- * (TIME/DATETIME/TIMESTAMP, LONG) -> DATETIME + * (TIME/TIMESTAMP, LONG) -> TIMESTAMP */ private Stream> get_adddate_subdate_signatures( SerializableTriFunction function) { return Stream.of( implWithProperties(nullMissingHandlingWithProperties(function), DATE, DATE, LONG), - implWithProperties(nullMissingHandlingWithProperties(function), DATETIME, DATETIME, LONG), - implWithProperties(nullMissingHandlingWithProperties(function), DATETIME, TIMESTAMP, LONG), - implWithProperties(nullMissingHandlingWithProperties(function), DATETIME, TIME, LONG)); + implWithProperties(nullMissingHandlingWithProperties(function), TIMESTAMP, TIMESTAMP, LONG), + implWithProperties(nullMissingHandlingWithProperties(function), TIMESTAMP, TIME, LONG)); } private DefaultFunctionResolver adddate() { @@ -374,8 +364,8 @@ private DefaultFunctionResolver adddate() { /** * Adds expr2 to expr1 and returns the result.
- * (TIME, TIME/DATE/DATETIME/TIMESTAMP) -> TIME
- * (DATE/DATETIME/TIMESTAMP, TIME/DATE/DATETIME/TIMESTAMP) -> DATETIME
+ * (TIME, TIME/DATE/TIMESTAMP) -> TIME
+ * (DATE/TIMESTAMP, TIME/DATE/TIMESTAMP) -> TIMESTAMP
* TODO: MySQL has these signatures too
* (STRING, STRING/TIME) -> STRING // second arg - string with time only
* (x, STRING) -> NULL // second arg - string with timestamp
@@ -388,8 +378,6 @@ private DefaultFunctionResolver addtime() { nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), TIME, TIME, TIME), implWithProperties( nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), TIME, TIME, DATE), - implWithProperties( - nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), TIME, TIME, DATETIME), implWithProperties( nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), TIME, @@ -397,56 +385,32 @@ private DefaultFunctionResolver addtime() { TIMESTAMP), implWithProperties( nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - DATETIME, - DATETIME, + TIMESTAMP, + DATE, TIME), implWithProperties( nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - DATETIME, - DATETIME, - DATE), - implWithProperties( - nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - DATETIME, - DATETIME, - DATETIME), - implWithProperties( - nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - DATETIME, - DATETIME, - TIMESTAMP), - implWithProperties( - nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), DATETIME, DATE, TIME), - implWithProperties( - nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), DATETIME, DATE, DATE), - implWithProperties( - nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - DATETIME, + TIMESTAMP, DATE, - DATETIME), + DATE), implWithProperties( nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - DATETIME, + TIMESTAMP, DATE, TIMESTAMP), implWithProperties( nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - DATETIME, + TIMESTAMP, TIMESTAMP, TIME), implWithProperties( nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - DATETIME, + TIMESTAMP, TIMESTAMP, DATE), implWithProperties( nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - DATETIME, TIMESTAMP, - DATETIME), - implWithProperties( - nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - DATETIME, TIMESTAMP, TIMESTAMP)); } @@ -454,21 +418,21 @@ private DefaultFunctionResolver addtime() { /** * Converts date/time from a specified timezone to another specified timezone.
* The supported signatures:
- * (DATETIME, STRING, STRING) -> DATETIME
- * (STRING, STRING, STRING) -> DATETIME + * (TIMESTAMP, STRING, STRING) -> TIMESTAMP
+ * (STRING, STRING, STRING) -> TIMESTAMP */ private DefaultFunctionResolver convert_tz() { return define( BuiltinFunctionName.CONVERT_TZ.getName(), impl( nullMissingHandling(DateTimeFunction::exprConvertTZ), - DATETIME, - DATETIME, + TIMESTAMP, + TIMESTAMP, STRING, STRING), impl( nullMissingHandling(DateTimeFunction::exprConvertTZ), - DATETIME, + TIMESTAMP, STRING, STRING, STRING)); @@ -476,41 +440,25 @@ private DefaultFunctionResolver convert_tz() { /** * Extracts the date part of a date and time value. Also to construct a date type. The supported - * signatures: STRING/DATE/DATETIME/TIMESTAMP -> DATE + * signatures: STRING/DATE/TIMESTAMP -> DATE */ private DefaultFunctionResolver date() { return define( BuiltinFunctionName.DATE.getName(), impl(nullMissingHandling(DateTimeFunction::exprDate), DATE, STRING), impl(nullMissingHandling(DateTimeFunction::exprDate), DATE, DATE), - impl(nullMissingHandling(DateTimeFunction::exprDate), DATE, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprDate), DATE, TIMESTAMP)); } - /* - * Calculates the difference of date part of given values. - * (DATE/DATETIME/TIMESTAMP/TIME, DATE/DATETIME/TIMESTAMP/TIME) -> LONG + /** + * Calculates the difference of date part of given values.
+ * (DATE/TIMESTAMP/TIME, DATE/TIMESTAMP/TIME) -> LONG */ private DefaultFunctionResolver datediff() { return define( BuiltinFunctionName.DATEDIFF.getName(), implWithProperties( nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), LONG, DATE, DATE), - implWithProperties( - nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), - LONG, - DATETIME, - DATE), - implWithProperties( - nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), - LONG, - DATE, - DATETIME), - implWithProperties( - nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), - LONG, - DATETIME, - DATETIME), implWithProperties( nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), LONG, DATE, TIME), implWithProperties( @@ -541,40 +489,20 @@ private DefaultFunctionResolver datediff() { nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), LONG, TIME, - TIMESTAMP), - implWithProperties( - nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), - LONG, - TIMESTAMP, - DATETIME), - implWithProperties( - nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), - LONG, - DATETIME, - TIMESTAMP), - implWithProperties( - nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), - LONG, - TIME, - DATETIME), - implWithProperties( - nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), - LONG, - DATETIME, - TIME)); + TIMESTAMP)); } /** * Specify a datetime with time zone field and a time zone to convert to.
- * Returns a local date time.
- * (STRING, STRING) -> DATETIME
- * (STRING) -> DATETIME + * Returns a local datetime.
+ * (STRING, STRING) -> TIMESTAMP
+ * (STRING) -> TIMESTAMP */ private FunctionResolver datetime() { return define( BuiltinFunctionName.DATETIME.getName(), - impl(nullMissingHandling(DateTimeFunction::exprDateTime), DATETIME, STRING, STRING), - impl(nullMissingHandling(DateTimeFunction::exprDateTimeNoTimezone), DATETIME, STRING)); + impl(nullMissingHandling(DateTimeFunction::exprDateTime), TIMESTAMP, STRING, STRING), + impl(nullMissingHandling(DateTimeFunction::exprDateTimeNoTimezone), TIMESTAMP, STRING)); } private DefaultFunctionResolver date_add() { @@ -593,30 +521,28 @@ private DefaultFunctionResolver date_sub() { .toArray(SerializableFunction[]::new)); } - /** DAY(STRING/DATE/DATETIME/TIMESTAMP). return the day of the month (1-31). */ + /** DAY(STRING/DATE/TIMESTAMP). return the day of the month (1-31). */ private DefaultFunctionResolver day() { return define( BuiltinFunctionName.DAY.getName(), impl(nullMissingHandling(DateTimeFunction::exprDayOfMonth), INTEGER, DATE), - impl(nullMissingHandling(DateTimeFunction::exprDayOfMonth), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprDayOfMonth), INTEGER, TIMESTAMP), impl(nullMissingHandling(DateTimeFunction::exprDayOfMonth), INTEGER, STRING)); } /** - * DAYNAME(STRING/DATE/DATETIME/TIMESTAMP). return the name of the weekday for date, including + * DAYNAME(STRING/DATE/TIMESTAMP). return the name of the weekday for date, including
* Monday, Tuesday, Wednesday, Thursday, Friday, Saturday and Sunday. */ private DefaultFunctionResolver dayName() { return define( BuiltinFunctionName.DAYNAME.getName(), impl(nullMissingHandling(DateTimeFunction::exprDayName), STRING, DATE), - impl(nullMissingHandling(DateTimeFunction::exprDayName), STRING, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprDayName), STRING, TIMESTAMP), impl(nullMissingHandling(DateTimeFunction::exprDayName), STRING, STRING)); } - /** DAYOFMONTH(STRING/DATE/DATETIME/TIMESTAMP). return the day of the month (1-31). */ + /** DAYOFMONTH(STRING/DATE/TIMESTAMP). return the day of the month (1-31). */ private DefaultFunctionResolver dayOfMonth(BuiltinFunctionName name) { return define( name.getName(), @@ -627,14 +553,13 @@ private DefaultFunctionResolver dayOfMonth(BuiltinFunctionName name) { INTEGER, TIME), impl(nullMissingHandling(DateTimeFunction::exprDayOfMonth), INTEGER, DATE), - impl(nullMissingHandling(DateTimeFunction::exprDayOfMonth), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprDayOfMonth), INTEGER, STRING), impl(nullMissingHandling(DateTimeFunction::exprDayOfMonth), INTEGER, TIMESTAMP)); } /** - * DAYOFWEEK(STRING/DATE/DATETIME/TIME/TIMESTAMP). return the weekday index for date (1 = Sunday, - * 2 = Monday, ..., 7 = Saturday). + * DAYOFWEEK(STRING/DATE/TIME/TIMESTAMP). return the weekday index for date (1 = Sunday, 2 = + * Monday, ..., 7 = Saturday). */ private DefaultFunctionResolver dayOfWeek(FunctionName name) { return define( @@ -646,12 +571,11 @@ private DefaultFunctionResolver dayOfWeek(FunctionName name) { INTEGER, TIME), impl(nullMissingHandling(DateTimeFunction::exprDayOfWeek), INTEGER, DATE), - impl(nullMissingHandling(DateTimeFunction::exprDayOfWeek), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprDayOfWeek), INTEGER, TIMESTAMP), impl(nullMissingHandling(DateTimeFunction::exprDayOfWeek), INTEGER, STRING)); } - /** DAYOFYEAR(STRING/DATE/DATETIME/TIMESTAMP). return the day of the year for date (1-366). */ + /** DAYOFYEAR(STRING/DATE/TIMESTAMP). return the day of the year for date (1-366). */ private DefaultFunctionResolver dayOfYear(BuiltinFunctionName dayOfYear) { return define( dayOfYear.getName(), @@ -662,7 +586,6 @@ private DefaultFunctionResolver dayOfYear(BuiltinFunctionName dayOfYear) { INTEGER, TIME), impl(nullMissingHandling(DateTimeFunction::exprDayOfYear), INTEGER, DATE), - impl(nullMissingHandling(DateTimeFunction::exprDayOfYear), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprDayOfYear), INTEGER, TIMESTAMP), impl(nullMissingHandling(DateTimeFunction::exprDayOfYear), INTEGER, STRING)); } @@ -676,7 +599,6 @@ private DefaultFunctionResolver extract() { STRING, TIME), impl(nullMissingHandling(DateTimeFunction::exprExtract), LONG, STRING, DATE), - impl(nullMissingHandling(DateTimeFunction::exprExtract), LONG, STRING, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprExtract), LONG, STRING, TIMESTAMP), impl(nullMissingHandling(DateTimeFunction::exprExtract), LONG, STRING, STRING)); } @@ -691,7 +613,7 @@ private DefaultFunctionResolver from_days() { private FunctionResolver from_unixtime() { return define( BuiltinFunctionName.FROM_UNIXTIME.getName(), - impl(nullMissingHandling(DateTimeFunction::exprFromUnixTime), DATETIME, DOUBLE), + impl(nullMissingHandling(DateTimeFunction::exprFromUnixTime), TIMESTAMP, DOUBLE), impl( nullMissingHandling(DateTimeFunction::exprFromUnixTimeFormat), STRING, DOUBLE, STRING)); } @@ -702,14 +624,13 @@ private DefaultFunctionResolver get_format() { impl(nullMissingHandling(DateTimeFunction::exprGetFormat), STRING, STRING, STRING)); } - /** HOUR(STRING/TIME/DATETIME/DATE/TIMESTAMP). return the hour value for time. */ + /** HOUR(STRING/TIME/DATE/TIMESTAMP). return the hour value for time. */ private DefaultFunctionResolver hour(BuiltinFunctionName name) { return define( name.getName(), impl(nullMissingHandling(DateTimeFunction::exprHour), INTEGER, STRING), impl(nullMissingHandling(DateTimeFunction::exprHour), INTEGER, TIME), impl(nullMissingHandling(DateTimeFunction::exprHour), INTEGER, DATE), - impl(nullMissingHandling(DateTimeFunction::exprHour), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprHour), INTEGER, TIMESTAMP)); } @@ -724,7 +645,6 @@ private DefaultFunctionResolver last_day() { DATE, TIME), impl(nullMissingHandling(DateTimeFunction::exprLastDay), DATE, DATE), - impl(nullMissingHandling(DateTimeFunction::exprLastDay), DATE, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprLastDay), DATE, TIMESTAMP)); } @@ -740,39 +660,36 @@ private FunctionResolver maketime() { impl(nullMissingHandling(DateTimeFunction::exprMakeTime), TIME, DOUBLE, DOUBLE, DOUBLE)); } - /** MICROSECOND(STRING/TIME/DATETIME/TIMESTAMP). return the microsecond value for time. */ + /** MICROSECOND(STRING/TIME/TIMESTAMP). return the microsecond value for time. */ private DefaultFunctionResolver microsecond() { return define( BuiltinFunctionName.MICROSECOND.getName(), impl(nullMissingHandling(DateTimeFunction::exprMicrosecond), INTEGER, STRING), impl(nullMissingHandling(DateTimeFunction::exprMicrosecond), INTEGER, TIME), - impl(nullMissingHandling(DateTimeFunction::exprMicrosecond), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprMicrosecond), INTEGER, TIMESTAMP)); } - /** MINUTE(STRING/TIME/DATETIME/TIMESTAMP). return the minute value for time. */ + /** MINUTE(STRING/TIME/TIMESTAMP). return the minute value for time. */ private DefaultFunctionResolver minute(BuiltinFunctionName name) { return define( name.getName(), impl(nullMissingHandling(DateTimeFunction::exprMinute), INTEGER, STRING), impl(nullMissingHandling(DateTimeFunction::exprMinute), INTEGER, TIME), - impl(nullMissingHandling(DateTimeFunction::exprMinute), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprMinute), INTEGER, DATE), impl(nullMissingHandling(DateTimeFunction::exprMinute), INTEGER, TIMESTAMP)); } - /** MINUTE(STRING/TIME/DATETIME/TIMESTAMP). return the minute value for time. */ + /** MINUTE(STRING/TIME/TIMESTAMP). return the minute value for time. */ private DefaultFunctionResolver minute_of_day() { return define( BuiltinFunctionName.MINUTE_OF_DAY.getName(), impl(nullMissingHandling(DateTimeFunction::exprMinuteOfDay), INTEGER, STRING), impl(nullMissingHandling(DateTimeFunction::exprMinuteOfDay), INTEGER, TIME), impl(nullMissingHandling(DateTimeFunction::exprMinuteOfDay), INTEGER, DATE), - impl(nullMissingHandling(DateTimeFunction::exprMinuteOfDay), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprMinuteOfDay), INTEGER, TIMESTAMP)); } - /** MONTH(STRING/DATE/DATETIME/TIMESTAMP). return the month for date (1-12). */ + /** MONTH(STRING/DATE/TIMESTAMP). return the month for date (1-12). */ private DefaultFunctionResolver month(BuiltinFunctionName month) { return define( month.getName(), @@ -783,17 +700,15 @@ private DefaultFunctionResolver month(BuiltinFunctionName month) { INTEGER, TIME), impl(nullMissingHandling(DateTimeFunction::exprMonth), INTEGER, DATE), - impl(nullMissingHandling(DateTimeFunction::exprMonth), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprMonth), INTEGER, TIMESTAMP), impl(nullMissingHandling(DateTimeFunction::exprMonth), INTEGER, STRING)); } - /** MONTHNAME(STRING/DATE/DATETIME/TIMESTAMP). return the full name of the month for date. */ + /** MONTHNAME(STRING/DATE/TIMESTAMP). return the full name of the month for date. */ private DefaultFunctionResolver monthName() { return define( BuiltinFunctionName.MONTHNAME.getName(), impl(nullMissingHandling(DateTimeFunction::exprMonthName), STRING, DATE), - impl(nullMissingHandling(DateTimeFunction::exprMonthName), STRING, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprMonthName), STRING, TIMESTAMP), impl(nullMissingHandling(DateTimeFunction::exprMonthName), STRING, STRING)); } @@ -819,12 +734,11 @@ private DefaultFunctionResolver period_diff() { impl(nullMissingHandling(DateTimeFunction::exprPeriodDiff), INTEGER, INTEGER, INTEGER)); } - /** QUARTER(STRING/DATE/DATETIME/TIMESTAMP). return the month for date (1-4). */ + /** QUARTER(STRING/DATE/TIMESTAMP). return the month for date (1-4). */ private DefaultFunctionResolver quarter() { return define( BuiltinFunctionName.QUARTER.getName(), impl(nullMissingHandling(DateTimeFunction::exprQuarter), INTEGER, DATE), - impl(nullMissingHandling(DateTimeFunction::exprQuarter), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprQuarter), INTEGER, TIMESTAMP), impl(nullMissingHandling(DateTimeFunction::exprQuarter), INTEGER, STRING)); } @@ -838,14 +752,13 @@ private DefaultFunctionResolver sec_to_time() { impl((nullMissingHandling(DateTimeFunction::exprSecToTimeWithNanos)), TIME, FLOAT)); } - /** SECOND(STRING/TIME/DATETIME/TIMESTAMP). return the second value for time. */ + /** SECOND(STRING/TIME/TIMESTAMP). return the second value for time. */ private DefaultFunctionResolver second(BuiltinFunctionName name) { return define( name.getName(), impl(nullMissingHandling(DateTimeFunction::exprSecond), INTEGER, STRING), impl(nullMissingHandling(DateTimeFunction::exprSecond), INTEGER, TIME), impl(nullMissingHandling(DateTimeFunction::exprSecond), INTEGER, DATE), - impl(nullMissingHandling(DateTimeFunction::exprSecond), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprSecond), INTEGER, TIMESTAMP)); } @@ -861,8 +774,8 @@ private DefaultFunctionResolver subdate() { /** * Subtracts expr2 from expr1 and returns the result.
- * (TIME, TIME/DATE/DATETIME/TIMESTAMP) -> TIME
- * (DATE/DATETIME/TIMESTAMP, TIME/DATE/DATETIME/TIMESTAMP) -> DATETIME
+ * (TIME, TIME/DATE/TIMESTAMP) -> TIME
+ * (DATE/TIMESTAMP, TIME/DATE/TIMESTAMP) -> TIMESTAMP
* TODO: MySQL has these signatures too
* (STRING, STRING/TIME) -> STRING // second arg - string with time only
* (x, STRING) -> NULL // second arg - string with timestamp
@@ -875,8 +788,6 @@ private DefaultFunctionResolver subtime() { nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), TIME, TIME, TIME), implWithProperties( nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), TIME, TIME, DATE), - implWithProperties( - nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), TIME, TIME, DATETIME), implWithProperties( nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), TIME, @@ -884,62 +795,38 @@ private DefaultFunctionResolver subtime() { TIMESTAMP), implWithProperties( nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - DATETIME, - DATETIME, + TIMESTAMP, + TIMESTAMP, TIME), implWithProperties( nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - DATETIME, - DATETIME, + TIMESTAMP, + TIMESTAMP, DATE), implWithProperties( nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - DATETIME, - DATETIME, - DATETIME), - implWithProperties( - nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - DATETIME, - DATETIME, - TIMESTAMP), - implWithProperties( - nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), DATETIME, DATE, TIME), - implWithProperties( - nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), DATETIME, DATE, DATE), - implWithProperties( - nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - DATETIME, - DATE, - DATETIME), - implWithProperties( - nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - DATETIME, - DATE, - TIMESTAMP), - implWithProperties( - nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - DATETIME, TIMESTAMP, + DATE, TIME), implWithProperties( nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - DATETIME, TIMESTAMP, + DATE, DATE), implWithProperties( nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - DATETIME, TIMESTAMP, - DATETIME), + DATE, + TIMESTAMP), implWithProperties( nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - DATETIME, + TIMESTAMP, TIMESTAMP, TIMESTAMP)); } /** - * Extracts a date, time, or datetime from the given string. It accomplishes this using another + * Extracts a date, time, or timestamp from the given string. It accomplishes this using another * string which specifies the input format. */ private DefaultFunctionResolver str_to_date() { @@ -949,21 +836,20 @@ private DefaultFunctionResolver str_to_date() { nullMissingHandlingWithProperties( (functionProperties, arg, format) -> DateTimeFunction.exprStrToDate(functionProperties, arg, format)), - DATETIME, + TIMESTAMP, STRING, STRING)); } /** * Extracts the time part of a date and time value. Also to construct a time type. The supported - * signatures: STRING/DATE/DATETIME/TIME/TIMESTAMP -> TIME + * signatures: STRING/DATE/TIME/TIMESTAMP -> TIME */ private DefaultFunctionResolver time() { return define( BuiltinFunctionName.TIME.getName(), impl(nullMissingHandling(DateTimeFunction::exprTime), TIME, STRING), impl(nullMissingHandling(DateTimeFunction::exprTime), TIME, DATE), - impl(nullMissingHandling(DateTimeFunction::exprTime), TIME, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprTime), TIME, TIME), impl(nullMissingHandling(DateTimeFunction::exprTime), TIME, TIMESTAMP)); } @@ -973,7 +859,6 @@ private DefaultFunctionResolver time() { * (TIME, TIME) -> TIME
* MySQL has these signatures too
* (DATE, DATE) -> TIME // result is > 24 hours
- * (DATETIME, DATETIME) -> TIME // result is > 24 hours
* (TIMESTAMP, TIMESTAMP) -> TIME // result is > 24 hours
* (x, x) -> NULL // when args have different types
* (STRING, STRING) -> TIME // argument strings contain same types only
@@ -985,23 +870,20 @@ private DefaultFunctionResolver timediff() { impl(nullMissingHandling(DateTimeFunction::exprTimeDiff), TIME, TIME, TIME)); } - /** - * TIME_TO_SEC(STRING/TIME/DATETIME/TIMESTAMP). return the time argument, converted to seconds. - */ + /** TIME_TO_SEC(STRING/TIME/TIMESTAMP). return the time argument, converted to seconds. */ private DefaultFunctionResolver time_to_sec() { return define( BuiltinFunctionName.TIME_TO_SEC.getName(), impl(nullMissingHandling(DateTimeFunction::exprTimeToSec), LONG, STRING), impl(nullMissingHandling(DateTimeFunction::exprTimeToSec), LONG, TIME), - impl(nullMissingHandling(DateTimeFunction::exprTimeToSec), LONG, TIMESTAMP), - impl(nullMissingHandling(DateTimeFunction::exprTimeToSec), LONG, DATETIME)); + impl(nullMissingHandling(DateTimeFunction::exprTimeToSec), LONG, TIMESTAMP)); } /** * Extracts the timestamp of a date and time value.
* Input strings may contain a timestamp only in format 'yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]'
- * STRING/DATE/TIME/DATETIME/TIMESTAMP -> TIMESTAMP
- * STRING/DATE/TIME/DATETIME/TIMESTAMP, STRING/DATE/TIME/DATETIME/TIMESTAMP -> TIMESTAMP
+ * STRING/DATE/TIME/TIMESTAMP -> TIMESTAMP
+ * STRING/DATE/TIME/TIMESTAMP, STRING/DATE/TIME/TIMESTAMP -> TIMESTAMP
* All types are converted to TIMESTAMP actually before the function call - it is responsibility *
* of the automatic cast mechanism defined in `ExprCoreType` and performed by `TypeCastOperator`. @@ -1020,27 +902,20 @@ private DefaultFunctionResolver timestamp() { } /** - * Adds an interval of time to the provided DATE/DATETIME/TIME/TIMESTAMP/STRING argument. The - * interval of time added is determined by the given first and second arguments. The first - * argument is an interval type, and must be one of the tokens below... [MICROSECOND, SECOND, - * MINUTE, HOUR, DAY, WEEK, MONTH, QUARTER, YEAR] The second argument is the amount of the - * interval type to be added. The third argument is the DATE/DATETIME/TIME/TIMESTAMP/STRING to add - * to. + * Adds an interval of time to the provided DATE/TIME/TIMESTAMP/STRING argument. The interval of + * time added is determined by the given first and second arguments. The first argument is an + * interval type, and must be one of the tokens below... [MICROSECOND, SECOND, MINUTE, HOUR, DAY, + * WEEK, MONTH, QUARTER, YEAR] The second argument is the amount of the interval type to be added. + * The third argument is the DATE/TIME/TIMESTAMP/STRING to add to. * - * @return The DATETIME representing the summed DATE/DATETIME/TIME/TIMESTAMP and interval. + * @return The TIMESTAMP representing the summed DATE/TIME/TIMESTAMP and interval. */ private DefaultFunctionResolver timestampadd() { return define( BuiltinFunctionName.TIMESTAMPADD.getName(), impl( nullMissingHandling(DateTimeFunction::exprTimestampAdd), - DATETIME, - STRING, - INTEGER, - DATETIME), - impl( - nullMissingHandling(DateTimeFunction::exprTimestampAdd), - DATETIME, + TIMESTAMP, STRING, INTEGER, TIMESTAMP), @@ -1049,18 +924,18 @@ private DefaultFunctionResolver timestampadd() { (functionProperties, part, amount, time) -> exprTimestampAddForTimeType( functionProperties.getQueryStartClock(), part, amount, time)), - DATETIME, + TIMESTAMP, STRING, INTEGER, TIME)); } /** - * Finds the difference between provided DATE/DATETIME/TIME/TIMESTAMP/STRING arguments. The first - * argument is an interval type, and must be one of the tokens below... [MICROSECOND, SECOND, - * MINUTE, HOUR, DAY, WEEK, MONTH, QUARTER, YEAR] The second argument the - * DATE/DATETIME/TIME/TIMESTAMP/STRING representing the start time. The third argument is the - * DATE/DATETIME/TIME/TIMESTAMP/STRING representing the end time. + * Finds the difference between provided DATE/TIME/TIMESTAMP/STRING arguments. The first argument + * is an interval type, and must be one of the tokens below... [MICROSECOND, SECOND, MINUTE, HOUR, + * DAY, WEEK, MONTH, QUARTER, YEAR] The second argument the DATE/TIME/TIMESTAMP/STRING + * representing the start time. The third argument is the DATE/TIME/TIMESTAMP/STRING representing + * the end time. * * @return A LONG representing the difference between arguments, using the given interval type. */ @@ -1069,25 +944,7 @@ private DefaultFunctionResolver timestampdiff() { BuiltinFunctionName.TIMESTAMPDIFF.getName(), impl( nullMissingHandling(DateTimeFunction::exprTimestampDiff), - DATETIME, - STRING, - DATETIME, - DATETIME), - impl( - nullMissingHandling(DateTimeFunction::exprTimestampDiff), - DATETIME, - STRING, - DATETIME, - TIMESTAMP), - impl( - nullMissingHandling(DateTimeFunction::exprTimestampDiff), - DATETIME, - STRING, TIMESTAMP, - DATETIME), - impl( - nullMissingHandling(DateTimeFunction::exprTimestampDiff), - DATETIME, STRING, TIMESTAMP, TIMESTAMP), @@ -1095,20 +952,19 @@ private DefaultFunctionResolver timestampdiff() { nullMissingHandlingWithProperties( (functionProperties, part, startTime, endTime) -> exprTimestampDiffForTimeType(functionProperties, part, startTime, endTime)), - DATETIME, + TIMESTAMP, STRING, TIME, TIME)); } - /** TO_DAYS(STRING/DATE/DATETIME/TIMESTAMP). return the day number of the given date. */ + /** TO_DAYS(STRING/DATE/TIMESTAMP). return the day number of the given date. */ private DefaultFunctionResolver to_days() { return define( BuiltinFunctionName.TO_DAYS.getName(), impl(nullMissingHandling(DateTimeFunction::exprToDays), LONG, STRING), impl(nullMissingHandling(DateTimeFunction::exprToDays), LONG, TIMESTAMP), - impl(nullMissingHandling(DateTimeFunction::exprToDays), LONG, DATE), - impl(nullMissingHandling(DateTimeFunction::exprToDays), LONG, DATETIME)); + impl(nullMissingHandling(DateTimeFunction::exprToDays), LONG, DATE)); } /** @@ -1131,7 +987,6 @@ private FunctionResolver unix_timestamp() { DateTimeFunction.unixTimeStamp(functionProperties.getQueryStartClock()), LONG), impl(nullMissingHandling(DateTimeFunction::unixTimeStampOf), DOUBLE, DATE), - impl(nullMissingHandling(DateTimeFunction::unixTimeStampOf), DOUBLE, DATETIME), impl(nullMissingHandling(DateTimeFunction::unixTimeStampOf), DOUBLE, TIMESTAMP), impl(nullMissingHandling(DateTimeFunction::unixTimeStampOf), DOUBLE, DOUBLE)); } @@ -1154,7 +1009,7 @@ private DefaultFunctionResolver utc_time() { private DefaultFunctionResolver utc_timestamp() { return define( BuiltinFunctionName.UTC_TIMESTAMP.getName(), - implWithProperties(functionProperties -> exprUtcTimeStamp(functionProperties), DATETIME)); + implWithProperties(functionProperties -> exprUtcTimeStamp(functionProperties), TIMESTAMP)); } /** WEEK(DATE[,mode]). return the week number for date. */ @@ -1169,7 +1024,6 @@ private DefaultFunctionResolver week(BuiltinFunctionName week) { INTEGER, TIME), impl(nullMissingHandling(DateTimeFunction::exprWeekWithoutMode), INTEGER, DATE), - impl(nullMissingHandling(DateTimeFunction::exprWeekWithoutMode), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprWeekWithoutMode), INTEGER, TIMESTAMP), impl(nullMissingHandling(DateTimeFunction::exprWeekWithoutMode), INTEGER, STRING), implWithProperties( @@ -1181,7 +1035,6 @@ private DefaultFunctionResolver week(BuiltinFunctionName week) { TIME, INTEGER), impl(nullMissingHandling(DateTimeFunction::exprWeek), INTEGER, DATE, INTEGER), - impl(nullMissingHandling(DateTimeFunction::exprWeek), INTEGER, DATETIME, INTEGER), impl(nullMissingHandling(DateTimeFunction::exprWeek), INTEGER, TIMESTAMP, INTEGER), impl(nullMissingHandling(DateTimeFunction::exprWeek), INTEGER, STRING, INTEGER)); } @@ -1198,17 +1051,15 @@ private DefaultFunctionResolver weekday() { INTEGER, TIME), impl(nullMissingHandling(DateTimeFunction::exprWeekday), INTEGER, DATE), - impl(nullMissingHandling(DateTimeFunction::exprWeekday), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprWeekday), INTEGER, TIMESTAMP), impl(nullMissingHandling(DateTimeFunction::exprWeekday), INTEGER, STRING)); } - /** YEAR(STRING/DATE/DATETIME/TIMESTAMP). return the year for date (1000-9999). */ + /** YEAR(STRING/DATE/TIMESTAMP). return the year for date (1000-9999). */ private DefaultFunctionResolver year() { return define( BuiltinFunctionName.YEAR.getName(), impl(nullMissingHandling(DateTimeFunction::exprYear), INTEGER, DATE), - impl(nullMissingHandling(DateTimeFunction::exprYear), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprYear), INTEGER, TIMESTAMP), impl(nullMissingHandling(DateTimeFunction::exprYear), INTEGER, STRING)); } @@ -1225,7 +1076,6 @@ private DefaultFunctionResolver yearweek() { INTEGER, TIME), impl(nullMissingHandling(DateTimeFunction::exprYearweekWithoutMode), INTEGER, DATE), - impl(nullMissingHandling(DateTimeFunction::exprYearweekWithoutMode), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprYearweekWithoutMode), INTEGER, TIMESTAMP), impl(nullMissingHandling(DateTimeFunction::exprYearweekWithoutMode), INTEGER, STRING), implWithProperties( @@ -1236,7 +1086,6 @@ private DefaultFunctionResolver yearweek() { TIME, INTEGER), impl(nullMissingHandling(DateTimeFunction::exprYearweek), INTEGER, DATE, INTEGER), - impl(nullMissingHandling(DateTimeFunction::exprYearweek), INTEGER, DATETIME, INTEGER), impl(nullMissingHandling(DateTimeFunction::exprYearweek), INTEGER, TIMESTAMP, INTEGER), impl(nullMissingHandling(DateTimeFunction::exprYearweek), INTEGER, STRING, INTEGER)); } @@ -1246,7 +1095,6 @@ private DefaultFunctionResolver yearweek() { * Detailed supported signatures:
* (STRING, STRING) -> STRING
* (DATE, STRING) -> STRING
- * (DATETIME, STRING) -> STRING
* (TIME, STRING) -> STRING
* (TIMESTAMP, STRING) -> STRING */ @@ -1255,8 +1103,6 @@ private DefaultFunctionResolver date_format() { BuiltinFunctionName.DATE_FORMAT.getName(), impl(nullMissingHandling(DateTimeFormatterUtil::getFormattedDate), STRING, STRING, STRING), impl(nullMissingHandling(DateTimeFormatterUtil::getFormattedDate), STRING, DATE, STRING), - impl( - nullMissingHandling(DateTimeFormatterUtil::getFormattedDate), STRING, DATETIME, STRING), implWithProperties( nullMissingHandlingWithProperties( (functionProperties, time, formatString) -> @@ -1299,9 +1145,9 @@ private ExprValue dayOfWeekToday(Clock clock) { * DATE_ADD function implementation for ExprValue. * * @param functionProperties An FunctionProperties object. - * @param datetime ExprValue of Date/Time/Datetime/Timestamp type. + * @param datetime ExprValue of Date/Time/Timestamp type. * @param interval ExprValue of Interval type, the temporal amount to add. - * @return Datetime resulted from `interval` added to `datetime`. + * @return Timestamp resulted from `interval` added to `timestamp`. */ private ExprValue exprAddDateInterval( FunctionProperties functionProperties, ExprValue datetime, ExprValue interval) { @@ -1309,21 +1155,22 @@ private ExprValue exprAddDateInterval( } /** - * Adds or subtracts `interval` to/from `datetime`. + * Adds or subtracts `interval` to/from `timestamp`. * * @param functionProperties An FunctionProperties object. - * @param datetime A Date/Time/Datetime/Timestamp value to change. + * @param datetime A Date/Time/Timestamp value to change. * @param interval An Interval to isAdd or subtract. * @param isAdd A flag: true to isAdd, false to subtract. - * @return Datetime calculated. + * @return Timestamp calculated. */ private ExprValue exprDateApplyInterval( FunctionProperties functionProperties, ExprValue datetime, TemporalAmount interval, Boolean isAdd) { - var dt = extractDateTime(datetime, functionProperties); - return new ExprDatetimeValue(isAdd ? dt.plus(interval) : dt.minus(interval)); + var dt = + extractTimestamp(datetime, functionProperties).atZone(ZoneOffset.UTC).toLocalDateTime(); + return new ExprTimestampValue(isAdd ? dt.plus(interval) : dt.minus(interval)); } /** @@ -1331,7 +1178,6 @@ private ExprValue exprDateApplyInterval( * Detailed supported signatures:
* (STRING, STRING) -> STRING
* (DATE, STRING) -> STRING
- * (DATETIME, STRING) -> STRING
* (TIME, STRING) -> STRING
* (TIMESTAMP, STRING) -> STRING */ @@ -1340,8 +1186,6 @@ private DefaultFunctionResolver time_format() { BuiltinFunctionName.TIME_FORMAT.getName(), impl(nullMissingHandling(DateTimeFormatterUtil::getFormattedTime), STRING, STRING, STRING), impl(nullMissingHandling(DateTimeFormatterUtil::getFormattedTime), STRING, DATE, STRING), - impl( - nullMissingHandling(DateTimeFormatterUtil::getFormattedTime), STRING, DATETIME, STRING), impl(nullMissingHandling(DateTimeFormatterUtil::getFormattedTime), STRING, TIME, STRING), impl( nullMissingHandling(DateTimeFormatterUtil::getFormattedTime), @@ -1354,9 +1198,9 @@ private DefaultFunctionResolver time_format() { * ADDDATE function implementation for ExprValue. * * @param functionProperties An FunctionProperties object. - * @param datetime ExprValue of Time/Date/Datetime/Timestamp type. + * @param datetime ExprValue of Time/Date/Timestamp type. * @param days ExprValue of Long type, representing the number of days to add. - * @return Date/Datetime resulted from days added to `datetime`. + * @return Date/Timestamp resulted from days added to `timestamp`. */ private ExprValue exprAddDateDays( FunctionProperties functionProperties, ExprValue datetime, ExprValue days) { @@ -1364,13 +1208,13 @@ private ExprValue exprAddDateDays( } /** - * Adds or subtracts `days` to/from `datetime`. + * Adds or subtracts `days` to/from `timestamp`. * * @param functionProperties An FunctionProperties object. - * @param datetime A Date/Time/Datetime/Timestamp value to change. + * @param datetime A Date/Time/Timestamp value to change. * @param days A days amount to add or subtract. * @param isAdd A flag: true to add, false to subtract. - * @return Datetime calculated. + * @return Timestamp calculated. */ private ExprValue exprDateApplyDays( FunctionProperties functionProperties, ExprValue datetime, Long days, Boolean isAdd) { @@ -1378,16 +1222,17 @@ private ExprValue exprDateApplyDays( return new ExprDateValue( isAdd ? datetime.dateValue().plusDays(days) : datetime.dateValue().minusDays(days)); } - var dt = extractDateTime(datetime, functionProperties); - return new ExprDatetimeValue(isAdd ? dt.plusDays(days) : dt.minusDays(days)); + var dt = + extractTimestamp(datetime, functionProperties).atZone(ZoneOffset.UTC).toLocalDateTime(); + return new ExprTimestampValue(isAdd ? dt.plusDays(days) : dt.minusDays(days)); } /** * Adds or subtracts time to/from date and returns the result. * * @param functionProperties A FunctionProperties object. - * @param temporal A Date/Time/Datetime/Timestamp value to change. - * @param temporalDelta A Date/Time/Datetime/Timestamp object to add/subtract time from. + * @param temporal A Date/Time/Timestamp value to change. + * @param temporalDelta A Date/Time/Timestamp object to add/subtract time from. * @param isAdd A flag: true to add, false to subtract. * @return A value calculated. */ @@ -1399,19 +1244,19 @@ private ExprValue exprApplyTime( var interval = Duration.between(LocalTime.MIN, temporalDelta.timeValue()); var result = isAdd - ? extractDateTime(temporal, functionProperties).plus(interval) - : extractDateTime(temporal, functionProperties).minus(interval); + ? extractTimestamp(temporal, functionProperties).plus(interval) + : extractTimestamp(temporal, functionProperties).minus(interval); return temporal.type() == TIME - ? new ExprTimeValue(result.toLocalTime()) - : new ExprDatetimeValue(result); + ? new ExprTimeValue(result.atZone(ZoneOffset.UTC).toLocalTime()) + : new ExprTimestampValue(result); } /** * Adds time to date and returns the result. * * @param functionProperties A FunctionProperties object. - * @param temporal A Date/Time/Datetime/Timestamp value to change. - * @param temporalDelta A Date/Time/Datetime/Timestamp object to add time from. + * @param temporal A Date/Time/Timestamp value to change. + * @param temporalDelta A Date/Time/Timestamp object to add time from. * @return A value calculated. */ private ExprValue exprAddTime( @@ -1423,10 +1268,10 @@ private ExprValue exprAddTime( * CONVERT_TZ function implementation for ExprValue. Returns null for time zones outside of +13:00 * and -12:00. * - * @param startingDateTime ExprValue of DateTime that is being converted from + * @param startingDateTime ExprValue of Timestamp that is being converted from * @param fromTz ExprValue of time zone, representing the time to convert from. * @param toTz ExprValue of time zone, representing the time to convert to. - * @return DateTime that has been converted to the to_tz timezone. + * @return Timestamp that has been converted to the to_tz timezone. */ private ExprValue exprConvertTZ(ExprValue startingDateTime, ExprValue fromTz, ExprValue toTz) { if (startingDateTime.type() == ExprCoreType.STRING) { @@ -1442,8 +1287,10 @@ private ExprValue exprConvertTZ(ExprValue startingDateTime, ExprValue fromTz, Ex || !DateTimeUtils.isValidMySqlTimeZoneId(convertedToTz)) { return ExprNullValue.of(); } - ZonedDateTime zonedDateTime = startingDateTime.datetimeValue().atZone(convertedFromTz); - return new ExprDatetimeValue( + ZonedDateTime zonedDateTime = + (startingDateTime.timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime()) + .atZone(convertedFromTz); + return new ExprTimestampValue( zonedDateTime.withZoneSameInstant(convertedToTz).toLocalDateTime()); // Catches exception for invalid timezones. @@ -1484,43 +1331,43 @@ private ExprValue exprDateDiff( } /** - * DateTime implementation for ExprValue. + * Timestamp implementation for ExprValue. * - * @param dateTime ExprValue of String type. + * @param timestamp ExprValue of String type. * @param timeZone ExprValue of String type (or null). * @return ExprValue of date type. */ - private ExprValue exprDateTime(ExprValue dateTime, ExprValue timeZone) { + private ExprValue exprDateTime(ExprValue timestamp, ExprValue timeZone) { String defaultTimeZone = TimeZone.getDefault().getID(); try { LocalDateTime ldtFormatted = - LocalDateTime.parse(dateTime.stringValue(), DATE_TIME_FORMATTER_STRICT_WITH_TZ); + LocalDateTime.parse(timestamp.stringValue(), DATE_TIME_FORMATTER_STRICT_WITH_TZ); if (timeZone.isNull()) { - return new ExprDatetimeValue(ldtFormatted); + return new ExprTimestampValue(ldtFormatted); } - // Used if datetime field is invalid format. + // Used if timestamp field is invalid format. } catch (DateTimeParseException e) { return ExprNullValue.of(); } ExprValue convertTZResult; - ExprDatetimeValue ldt; + ExprTimestampValue tz; String toTz; try { ZonedDateTime zdtWithZoneOffset = - ZonedDateTime.parse(dateTime.stringValue(), DATE_TIME_FORMATTER_STRICT_WITH_TZ); + ZonedDateTime.parse(timestamp.stringValue(), DATE_TIME_FORMATTER_STRICT_WITH_TZ); ZoneId fromTZ = zdtWithZoneOffset.getZone(); - ldt = new ExprDatetimeValue(zdtWithZoneOffset.toLocalDateTime()); + tz = new ExprTimestampValue(zdtWithZoneOffset.toLocalDateTime()); toTz = String.valueOf(fromTZ); } catch (DateTimeParseException e) { - ldt = new ExprDatetimeValue(dateTime.stringValue()); + tz = new ExprTimestampValue(timestamp.stringValue()); toTz = defaultTimeZone; } - convertTZResult = exprConvertTZ(ldt, new ExprStringValue(toTz), timeZone); + convertTZResult = exprConvertTZ(tz, new ExprStringValue(toTz), timeZone); return convertTZResult; } @@ -1549,7 +1396,7 @@ private ExprValue exprDayName(ExprValue date) { /** * Day of Month implementation for ExprValue. * - * @param date ExprValue of Date/Datetime/String/Time/Timestamp type. + * @param date ExprValue of Date/String/Time/Timestamp type. * @return ExprValue. */ private ExprValue exprDayOfMonth(ExprValue date) { @@ -1559,7 +1406,7 @@ private ExprValue exprDayOfMonth(ExprValue date) { /** * Day of Week implementation for ExprValue. * - * @param date ExprValue of Date/Datetime/String/Timstamp type. + * @param date ExprValue of Date/String/Timstamp type. * @return ExprValue. */ private ExprValue exprDayOfWeek(ExprValue date) { @@ -1577,15 +1424,15 @@ private ExprValue exprDayOfYear(ExprValue date) { } /** - * Obtains a formatted long value for a specified part and datetime for the 'extract' function. + * Obtains a formatted long value for a specified part and timestamp for the 'extract' function. * * @param part is an ExprValue which comes from a defined list of accepted values. - * @param datetime the date to be formatted as an ExprValue. + * @param timestamp the date to be formatted as an ExprValue. * @return is a LONG formatted according to the input arguments. */ - public ExprLongValue formatExtractFunction(ExprValue part, ExprValue datetime) { + public ExprLongValue formatExtractFunction(ExprValue part, ExprValue timestamp) { String partName = part.stringValue().toUpperCase(); - LocalDateTime arg = datetime.datetimeValue(); + LocalDateTime arg = timestamp.timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime(); String text = arg.format(DateTimeFormatter.ofPattern(extract_formats.get(partName), Locale.ENGLISH)); @@ -1596,11 +1443,11 @@ public ExprLongValue formatExtractFunction(ExprValue part, ExprValue datetime) { * Implements extract function. Returns a LONG formatted according to the 'part' argument. * * @param part Literal that determines the format of the outputted LONG. - * @param datetime The date/datetime to be formatted. + * @param timestamp The Date/Timestamp to be formatted. * @return A LONG */ - private ExprValue exprExtract(ExprValue part, ExprValue datetime) { - return formatExtractFunction(part, datetime); + private ExprValue exprExtract(ExprValue part, ExprValue timestamp) { + return formatExtractFunction(part, timestamp); } /** @@ -1613,7 +1460,7 @@ private ExprValue exprExtract(ExprValue part, ExprValue datetime) { private ExprValue exprExtractForTime( FunctionProperties functionProperties, ExprValue part, ExprValue time) { return formatExtractFunction( - part, new ExprDatetimeValue(extractDateTime(time, functionProperties))); + part, new ExprTimestampValue(extractTimestamp(time, functionProperties))); } /** @@ -1637,12 +1484,12 @@ private ExprValue exprFromUnixTime(ExprValue time) { if (MYSQL_MAX_TIMESTAMP <= time.doubleValue()) { return ExprNullValue.of(); } - return new ExprDatetimeValue(exprFromUnixTimeImpl(time)); + return new ExprTimestampValue(exprFromUnixTimeImpl(time)); } private LocalDateTime exprFromUnixTimeImpl(ExprValue time) { return LocalDateTime.ofInstant( - Instant.ofEpochSecond((long) Math.floor(time.doubleValue())), UTC_ZONE_ID) + Instant.ofEpochSecond((long) Math.floor(time.doubleValue())), ZoneOffset.UTC) .withNano((int) ((time.doubleValue() % 1) * 1E9)); } @@ -1694,11 +1541,11 @@ private LocalDate getLastDay(LocalDate today) { /** * Returns a DATE for the last day of the month of a given argument. * - * @param datetime A DATE/DATETIME/TIMESTAMP/STRING ExprValue. + * @param timestamp A DATE/TIMESTAMP/STRING ExprValue. * @return An DATE value corresponding to the last day of the month of the given argument. */ - private ExprValue exprLastDay(ExprValue datetime) { - return new ExprDateValue(getLastDay(datetime.dateValue())); + private ExprValue exprLastDay(ExprValue timestamp) { + return new ExprDateValue(getLastDay(timestamp.dateValue())); } /** @@ -1932,9 +1779,9 @@ private ExprValue exprSecond(ExprValue time) { * SUBDATE function implementation for ExprValue. * * @param functionProperties An FunctionProperties object. - * @param date ExprValue of Time/Date/Datetime/Timestamp type. + * @param date ExprValue of Time/Date/Timestamp type. * @param days ExprValue of Long type, representing the number of days to subtract. - * @return Date/Datetime resulted from days subtracted to date. + * @return Date/Timestamp resulted from days subtracted to date. */ private ExprValue exprSubDateDays( FunctionProperties functionProperties, ExprValue date, ExprValue days) { @@ -1945,9 +1792,9 @@ private ExprValue exprSubDateDays( * DATE_SUB function implementation for ExprValue. * * @param functionProperties An FunctionProperties object. - * @param datetime ExprValue of Time/Date/Datetime/Timestamp type. + * @param datetime ExprValue of Time/Date/Timestamp type. * @param expr ExprValue of Interval type, the temporal amount to subtract. - * @return Datetime resulted from expr subtracted to `datetime`. + * @return Timestamp resulted from expr subtracted to `timestamp`. */ private ExprValue exprSubDateInterval( FunctionProperties functionProperties, ExprValue datetime, ExprValue expr) { @@ -1957,8 +1804,8 @@ private ExprValue exprSubDateInterval( /** * Subtracts expr2 from expr1 and returns the result. * - * @param temporal A Date/Time/Datetime/Timestamp value to change. - * @param temporalDelta A Date/Time/Datetime/Timestamp to subtract time from. + * @param temporal A Date/Time/Timestamp value to change. + * @param temporalDelta A Date/Time/Timestamp to subtract time from. * @return A value calculated. */ private ExprValue exprSubTime( @@ -2012,7 +1859,8 @@ private ExprValue exprTimestampAdd( ExprValue partExpr, ExprValue amountExpr, ExprValue datetimeExpr) { String part = partExpr.stringValue(); int amount = amountExpr.integerValue(); - LocalDateTime datetime = datetimeExpr.datetimeValue(); + LocalDateTime timestamp = + datetimeExpr.timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime(); ChronoUnit temporalUnit; switch (part) { @@ -2047,13 +1895,13 @@ private ExprValue exprTimestampAdd( default: return ExprNullValue.of(); } - return new ExprDatetimeValue(datetime.plus(amount, temporalUnit)); + return new ExprTimestampValue(timestamp.plus(amount, temporalUnit)); } private ExprValue exprTimestampAddForTimeType( Clock clock, ExprValue partExpr, ExprValue amountExpr, ExprValue timeExpr) { LocalDateTime datetime = LocalDateTime.of(formatNow(clock).toLocalDate(), timeExpr.timeValue()); - return exprTimestampAdd(partExpr, amountExpr, new ExprDatetimeValue(datetime)); + return exprTimestampAdd(partExpr, amountExpr, new ExprTimestampValue(datetime)); } private ExprValue getTimeDifference(String part, LocalDateTime startTime, LocalDateTime endTime) { @@ -2095,15 +1943,17 @@ private ExprValue getTimeDifference(String part, LocalDateTime startTime, LocalD private ExprValue exprTimestampDiff( ExprValue partExpr, ExprValue startTimeExpr, ExprValue endTimeExpr) { return getTimeDifference( - partExpr.stringValue(), startTimeExpr.datetimeValue(), endTimeExpr.datetimeValue()); + partExpr.stringValue(), + startTimeExpr.timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime(), + endTimeExpr.timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime()); } private ExprValue exprTimestampDiffForTimeType( FunctionProperties fp, ExprValue partExpr, ExprValue startTimeExpr, ExprValue endTimeExpr) { return getTimeDifference( partExpr.stringValue(), - extractDateTime(startTimeExpr, fp), - extractDateTime(endTimeExpr, fp)); + extractTimestamp(startTimeExpr, fp).atZone(ZoneOffset.UTC).toLocalDateTime(), + extractTimestamp(endTimeExpr, fp).atZone(ZoneOffset.UTC).toLocalDateTime()); } /** @@ -2134,8 +1984,9 @@ private ExprValue exprUtcTime(FunctionProperties functionProperties) { */ private ExprValue exprUtcTimeStamp(FunctionProperties functionProperties) { var zdt = - ZonedDateTime.now(functionProperties.getQueryStartClock()).withZoneSameInstant(UTC_ZONE_ID); - return new ExprDatetimeValue(zdt.toLocalDateTime()); + ZonedDateTime.now(functionProperties.getQueryStartClock()) + .withZoneSameInstant(ZoneOffset.UTC); + return new ExprTimestampValue(zdt.toLocalDateTime()); } /** @@ -2151,12 +2002,13 @@ private ExprValue exprToDays(ExprValue date) { /** * To_seconds implementation for ExprValue. * - * @param date ExprValue of Date/Datetime/Timestamp/String type. + * @param date ExprValue of Date/Timestamp/String type. * @return ExprValue. */ private ExprValue exprToSeconds(ExprValue date) { return new ExprLongValue( - date.datetimeValue().toEpochSecond(ZoneOffset.UTC) + DAYS_0000_TO_1970 * SECONDS_PER_DAY); + date.timestampValue().atOffset(ZoneOffset.UTC).toEpochSecond() + + DAYS_0000_TO_1970 * SECONDS_PER_DAY); } /** @@ -2226,7 +2078,7 @@ private ExprValue exprToSecondsForIntType(ExprValue dateExpr) { /** * Week for date implementation for ExprValue. * - * @param date ExprValue of Date/Datetime/Timestamp/String type. + * @param date ExprValue of Date/Timestamp/String type. * @param mode ExprValue of Integer type. */ private ExprValue exprWeek(ExprValue date, ExprValue mode) { @@ -2237,7 +2089,7 @@ private ExprValue exprWeek(ExprValue date, ExprValue mode) { /** * Weekday implementation for ExprValue. * - * @param date ExprValue of Date/Datetime/String/Timstamp type. + * @param date ExprValue of Date/String/Timstamp type. * @return ExprValue. */ private ExprValue exprWeekday(ExprValue date) { @@ -2270,9 +2122,6 @@ private Double unixTimeStampOfImpl(ExprValue value) { switch ((ExprCoreType) value.type()) { case DATE: return value.dateValue().toEpochSecond(LocalTime.MIN, ZoneOffset.UTC) + 0d; - case DATETIME: - return value.datetimeValue().toEpochSecond(ZoneOffset.UTC) - + value.datetimeValue().getNano() / 1E9; case TIMESTAMP: return value.timestampValue().getEpochSecond() + value.timestampValue().getNano() / 1E9; default: @@ -2323,7 +2172,7 @@ private Double unixTimeStampOfImpl(ExprValue value) { * Week for date implementation for ExprValue. When mode is not specified default value mode 0 is * used for default_week_format. * - * @param date ExprValue of Date/Datetime/Timestamp/String type. + * @param date ExprValue of Date/Timestamp/String type. * @return ExprValue. */ private ExprValue exprWeekWithoutMode(ExprValue date) { @@ -2363,7 +2212,7 @@ private ExprIntegerValue extractYearweek(LocalDate date, int mode) { /** * Yearweek for date implementation for ExprValue. * - * @param date ExprValue of Date/Datetime/Time/Timestamp/String type. + * @param date ExprValue of Date/Time/Timestamp/String type. * @param mode ExprValue of Integer type. */ private ExprValue exprYearweek(ExprValue date, ExprValue mode) { @@ -2374,7 +2223,7 @@ private ExprValue exprYearweek(ExprValue date, ExprValue mode) { * Yearweek for date implementation for ExprValue. When mode is not specified default value mode 0 * is used. * - * @param date ExprValue of Date/Datetime/Time/Timestamp/String type. + * @param date ExprValue of Date/Time/Timestamp/String type. * @return ExprValue. */ private ExprValue exprYearweekWithoutMode(ExprValue date) { diff --git a/core/src/main/java/org/opensearch/sql/expression/operator/convert/TypeCastOperator.java b/core/src/main/java/org/opensearch/sql/expression/operator/convert/TypeCastOperator.java index 7c3565f69c..db4b29f3b9 100644 --- a/core/src/main/java/org/opensearch/sql/expression/operator/convert/TypeCastOperator.java +++ b/core/src/main/java/org/opensearch/sql/expression/operator/convert/TypeCastOperator.java @@ -8,7 +8,6 @@ import static org.opensearch.sql.data.type.ExprCoreType.BOOLEAN; import static org.opensearch.sql.data.type.ExprCoreType.BYTE; import static org.opensearch.sql.data.type.ExprCoreType.DATE; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; import static org.opensearch.sql.data.type.ExprCoreType.DOUBLE; import static org.opensearch.sql.data.type.ExprCoreType.FLOAT; import static org.opensearch.sql.data.type.ExprCoreType.INTEGER; @@ -29,7 +28,6 @@ import org.opensearch.sql.data.model.ExprBooleanValue; import org.opensearch.sql.data.model.ExprByteValue; import org.opensearch.sql.data.model.ExprDateValue; -import org.opensearch.sql.data.model.ExprDatetimeValue; import org.opensearch.sql.data.model.ExprDoubleValue; import org.opensearch.sql.data.model.ExprFloatValue; import org.opensearch.sql.data.model.ExprIntegerValue; @@ -58,7 +56,6 @@ public static void register(BuiltinFunctionRepository repository) { repository.register(castToDate()); repository.register(castToTime()); repository.register(castToTimestamp()); - repository.register(castToDatetime()); } private static DefaultFunctionResolver castToString() { @@ -66,8 +63,7 @@ private static DefaultFunctionResolver castToString() { BuiltinFunctionName.CAST_TO_STRING.getName(), Stream.concat( Arrays.asList( - BYTE, SHORT, INTEGER, LONG, FLOAT, DOUBLE, BOOLEAN, TIME, DATE, TIMESTAMP, - DATETIME) + BYTE, SHORT, INTEGER, LONG, FLOAT, DOUBLE, BOOLEAN, TIME, DATE, TIMESTAMP) .stream() .map( type -> @@ -180,7 +176,6 @@ private static DefaultFunctionResolver castToDate() { return FunctionDSL.define( BuiltinFunctionName.CAST_TO_DATE.getName(), impl(nullMissingHandling((v) -> new ExprDateValue(v.stringValue())), DATE, STRING), - impl(nullMissingHandling((v) -> new ExprDateValue(v.dateValue())), DATE, DATETIME), impl(nullMissingHandling((v) -> new ExprDateValue(v.dateValue())), DATE, TIMESTAMP), impl(nullMissingHandling((v) -> v), DATE, DATE)); } @@ -189,21 +184,16 @@ private static DefaultFunctionResolver castToTime() { return FunctionDSL.define( BuiltinFunctionName.CAST_TO_TIME.getName(), impl(nullMissingHandling((v) -> new ExprTimeValue(v.stringValue())), TIME, STRING), - impl(nullMissingHandling((v) -> new ExprTimeValue(v.timeValue())), TIME, DATETIME), impl(nullMissingHandling((v) -> new ExprTimeValue(v.timeValue())), TIME, TIMESTAMP), impl(nullMissingHandling((v) -> v), TIME, TIME)); } - // `DATE`/`TIME`/`DATETIME` -> `DATETIME`/TIMESTAMP` cast tested in BinaryPredicateOperatorTest + // `DATE`/`TIME` -> `TIMESTAMP` cast tested in BinaryPredicateOperatorTest private static DefaultFunctionResolver castToTimestamp() { return FunctionDSL.define( BuiltinFunctionName.CAST_TO_TIMESTAMP.getName(), impl( nullMissingHandling((v) -> new ExprTimestampValue(v.stringValue())), TIMESTAMP, STRING), - impl( - nullMissingHandling((v) -> new ExprTimestampValue(v.timestampValue())), - TIMESTAMP, - DATETIME), impl( nullMissingHandling((v) -> new ExprTimestampValue(v.timestampValue())), TIMESTAMP, @@ -215,21 +205,4 @@ private static DefaultFunctionResolver castToTimestamp() { TIME), impl(nullMissingHandling((v) -> v), TIMESTAMP, TIMESTAMP)); } - - private static DefaultFunctionResolver castToDatetime() { - return FunctionDSL.define( - BuiltinFunctionName.CAST_TO_DATETIME.getName(), - impl(nullMissingHandling((v) -> new ExprDatetimeValue(v.stringValue())), DATETIME, STRING), - impl( - nullMissingHandling((v) -> new ExprDatetimeValue(v.datetimeValue())), - DATETIME, - TIMESTAMP), - impl(nullMissingHandling((v) -> new ExprDatetimeValue(v.datetimeValue())), DATETIME, DATE), - implWithProperties( - nullMissingHandlingWithProperties( - (fp, v) -> new ExprDatetimeValue(((ExprTimeValue) v).datetimeValue(fp))), - DATETIME, - TIME), - impl(nullMissingHandling((v) -> v), DATETIME, DATETIME)); - } } diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/collector/Rounding.java b/core/src/main/java/org/opensearch/sql/planner/physical/collector/Rounding.java index 81a1a0230f..82c8af52cd 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/collector/Rounding.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/collector/Rounding.java @@ -6,17 +6,15 @@ package org.opensearch.sql.planner.physical.collector; import static org.opensearch.sql.data.type.ExprCoreType.DATE; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; import static org.opensearch.sql.data.type.ExprCoreType.DOUBLE; import static org.opensearch.sql.data.type.ExprCoreType.LONG; import static org.opensearch.sql.data.type.ExprCoreType.TIME; import static org.opensearch.sql.data.type.ExprCoreType.TIMESTAMP; -import static org.opensearch.sql.utils.DateTimeUtils.UTC_ZONE_ID; import java.time.Instant; import java.time.LocalDate; -import java.time.LocalDateTime; import java.time.LocalTime; +import java.time.ZoneOffset; import java.time.temporal.ChronoField; import java.util.Arrays; import java.util.concurrent.TimeUnit; @@ -24,7 +22,6 @@ import lombok.Getter; import lombok.RequiredArgsConstructor; import org.opensearch.sql.data.model.ExprDateValue; -import org.opensearch.sql.data.model.ExprDatetimeValue; import org.opensearch.sql.data.model.ExprTimeValue; import org.opensearch.sql.data.model.ExprTimestampValue; import org.opensearch.sql.data.model.ExprValue; @@ -49,9 +46,6 @@ public static Rounding createRounding(SpanExpression span) { if (DOUBLE.isCompatible(type)) { return new DoubleRounding(interval); } - if (type.equals(DATETIME)) { - return new DatetimeRounding(interval, span.getUnit().getName()); - } if (type.equals(TIMESTAMP)) { return new TimestampRounding(interval, span.getUnit().getName()); } @@ -84,26 +78,6 @@ public ExprValue round(ExprValue var) { } } - static class DatetimeRounding extends Rounding { - private final ExprValue interval; - private final DateTimeUnit dateTimeUnit; - - public DatetimeRounding(ExprValue interval, String unit) { - this.interval = interval; - this.dateTimeUnit = DateTimeUnit.resolve(unit); - } - - @Override - public ExprValue round(ExprValue var) { - Instant instant = - Instant.ofEpochMilli( - dateTimeUnit.round( - var.datetimeValue().atZone(UTC_ZONE_ID).toInstant().toEpochMilli(), - interval.integerValue())); - return new ExprDatetimeValue(instant.atZone(UTC_ZONE_ID).toLocalDateTime()); - } - } - static class DateRounding extends Rounding { private final ExprValue interval; private final DateTimeUnit dateTimeUnit; @@ -118,9 +92,9 @@ public ExprValue round(ExprValue var) { Instant instant = Instant.ofEpochMilli( dateTimeUnit.round( - var.dateValue().atStartOfDay().atZone(UTC_ZONE_ID).toInstant().toEpochMilli(), + var.dateValue().atStartOfDay().atZone(ZoneOffset.UTC).toInstant().toEpochMilli(), interval.integerValue())); - return new ExprDateValue(instant.atZone(UTC_ZONE_ID).toLocalDate()); + return new ExprDateValue(instant.atZone(ZoneOffset.UTC).toLocalDate()); } } @@ -144,7 +118,7 @@ public ExprValue round(ExprValue var) { Instant.ofEpochMilli( dateTimeUnit.round( var.timeValue().getLong(ChronoField.MILLI_OF_DAY), interval.integerValue())); - return new ExprTimeValue(instant.atZone(UTC_ZONE_ID).toLocalTime()); + return new ExprTimeValue(instant.atZone(ZoneOffset.UTC).toLocalTime()); } } diff --git a/core/src/main/java/org/opensearch/sql/utils/DateTimeUtils.java b/core/src/main/java/org/opensearch/sql/utils/DateTimeUtils.java index 593b4c4471..62d5f0246d 100644 --- a/core/src/main/java/org/opensearch/sql/utils/DateTimeUtils.java +++ b/core/src/main/java/org/opensearch/sql/utils/DateTimeUtils.java @@ -9,6 +9,7 @@ import java.time.LocalDate; import java.time.LocalDateTime; import java.time.ZoneId; +import java.time.ZoneOffset; import java.time.ZonedDateTime; import lombok.experimental.UtilityClass; import org.opensearch.sql.data.model.ExprTimeValue; @@ -48,9 +49,9 @@ public static long roundWeek(long utcMillis, int interval) { * @return Rounded date/time value in utc millis */ public static long roundMonth(long utcMillis, int interval) { - ZonedDateTime initDateTime = ZonedDateTime.of(1970, 1, 1, 0, 0, 0, 0, UTC_ZONE_ID); + ZonedDateTime initDateTime = ZonedDateTime.of(1970, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); ZonedDateTime zonedDateTime = - Instant.ofEpochMilli(utcMillis).atZone(UTC_ZONE_ID).plusMonths(interval); + Instant.ofEpochMilli(utcMillis).atZone(ZoneOffset.UTC).plusMonths(interval); long monthDiff = (zonedDateTime.getYear() - initDateTime.getYear()) * 12L + zonedDateTime.getMonthValue() @@ -67,9 +68,9 @@ public static long roundMonth(long utcMillis, int interval) { * @return Rounded date/time value in utc millis */ public static long roundQuarter(long utcMillis, int interval) { - ZonedDateTime initDateTime = ZonedDateTime.of(1970, 1, 1, 0, 0, 0, 0, UTC_ZONE_ID); + ZonedDateTime initDateTime = ZonedDateTime.of(1970, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); ZonedDateTime zonedDateTime = - Instant.ofEpochMilli(utcMillis).atZone(UTC_ZONE_ID).plusMonths(interval * 3L); + Instant.ofEpochMilli(utcMillis).atZone(ZoneOffset.UTC).plusMonths(interval * 3L); long monthDiff = ((zonedDateTime.getYear() - initDateTime.getYear()) * 12L + zonedDateTime.getMonthValue() @@ -86,8 +87,8 @@ public static long roundQuarter(long utcMillis, int interval) { * @return Rounded date/time value in utc millis */ public static long roundYear(long utcMillis, int interval) { - ZonedDateTime initDateTime = ZonedDateTime.of(1970, 1, 1, 0, 0, 0, 0, UTC_ZONE_ID); - ZonedDateTime zonedDateTime = Instant.ofEpochMilli(utcMillis).atZone(UTC_ZONE_ID); + ZonedDateTime initDateTime = ZonedDateTime.of(1970, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); + ZonedDateTime zonedDateTime = Instant.ofEpochMilli(utcMillis).atZone(ZoneOffset.UTC); int yearDiff = zonedDateTime.getYear() - initDateTime.getYear(); int yearToAdd = (yearDiff / interval) * interval; return initDateTime.plusYears(yearToAdd).toInstant().toEpochMilli(); @@ -136,11 +137,10 @@ public Boolean isValidMySqlTimeZoneId(ZoneId zone) { * Extracts LocalDateTime from a datetime ExprValue. Uses `FunctionProperties` for * `ExprTimeValue`. */ - public static LocalDateTime extractDateTime( - ExprValue value, FunctionProperties functionProperties) { + public static Instant extractTimestamp(ExprValue value, FunctionProperties functionProperties) { return value instanceof ExprTimeValue - ? ((ExprTimeValue) value).datetimeValue(functionProperties) - : value.datetimeValue(); + ? ((ExprTimeValue) value).timestampValue(functionProperties) + : value.timestampValue(); } /** @@ -151,6 +151,4 @@ public static LocalDate extractDate(ExprValue value, FunctionProperties function ? ((ExprTimeValue) value).dateValue(functionProperties) : value.dateValue(); } - - public static final ZoneId UTC_ZONE_ID = ZoneId.of("UTC"); } diff --git a/core/src/test/java/org/opensearch/sql/analysis/AnalyzerTest.java b/core/src/test/java/org/opensearch/sql/analysis/AnalyzerTest.java index 2f4d6e8ada..8d935b11d2 100644 --- a/core/src/test/java/org/opensearch/sql/analysis/AnalyzerTest.java +++ b/core/src/test/java/org/opensearch/sql/analysis/AnalyzerTest.java @@ -157,7 +157,7 @@ public void filter_relation_with_invalid_qualifiedName_ExpressionEvaluationExcep assertEquals( "= function expected {[BYTE,BYTE],[SHORT,SHORT],[INTEGER,INTEGER],[LONG,LONG]," + "[FLOAT,FLOAT],[DOUBLE,DOUBLE],[STRING,STRING],[BOOLEAN,BOOLEAN],[DATE,DATE]," - + "[TIME,TIME],[DATETIME,DATETIME],[TIMESTAMP,TIMESTAMP],[INTERVAL,INTERVAL]," + + "[TIME,TIME],[TIMESTAMP,TIMESTAMP],[INTERVAL,INTERVAL]," + "[STRUCT,STRUCT],[ARRAY,ARRAY]}, but get [STRING,INTEGER]", exception.getMessage()); } diff --git a/core/src/test/java/org/opensearch/sql/data/model/DateTimeValueTest.java b/core/src/test/java/org/opensearch/sql/data/model/DateTimeValueTest.java index 01fe4a5e4e..b5a3d61211 100644 --- a/core/src/test/java/org/opensearch/sql/data/model/DateTimeValueTest.java +++ b/core/src/test/java/org/opensearch/sql/data/model/DateTimeValueTest.java @@ -10,11 +10,11 @@ import static org.opensearch.sql.data.model.ExprValueUtils.integerValue; import static org.opensearch.sql.data.type.ExprCoreType.TIME; import static org.opensearch.sql.data.type.ExprCoreType.TIMESTAMP; -import static org.opensearch.sql.utils.DateTimeUtils.UTC_ZONE_ID; import java.time.LocalDate; import java.time.LocalDateTime; import java.time.LocalTime; +import java.time.ZoneOffset; import java.time.ZonedDateTime; import org.junit.jupiter.api.Test; import org.opensearch.sql.exception.ExpressionEvaluationException; @@ -36,8 +36,6 @@ public void timeValueInterfaceTest() { // without a FunctionProperties object var exception = assertThrows(ExpressionEvaluationException.class, timeValue::dateValue); assertEquals("invalid to get dateValue from value of type TIME", exception.getMessage()); - exception = assertThrows(ExpressionEvaluationException.class, timeValue::datetimeValue); - assertEquals("invalid to get datetimeValue from value of type TIME", exception.getMessage()); exception = assertThrows(ExpressionEvaluationException.class, timeValue::timestampValue); assertEquals("invalid to get timestampValue from value of type TIME", exception.getMessage()); @@ -45,9 +43,11 @@ public void timeValueInterfaceTest() { var today = LocalDate.now(functionProperties.getQueryStartClock()); assertEquals(today, timeValue.dateValue(functionProperties)); - assertEquals(today.atTime(1, 1, 1), timeValue.datetimeValue(functionProperties)); assertEquals( - ZonedDateTime.of(LocalTime.parse("01:01:01").atDate(today), UTC_ZONE_ID).toInstant(), + today.atTime(1, 1, 1), + LocalDateTime.ofInstant(timeValue.timestampValue(functionProperties), ZoneOffset.UTC)); + assertEquals( + ZonedDateTime.of(LocalTime.parse("01:01:01").atDate(today), ZoneOffset.UTC).toInstant(), timeValue.timestampValue(functionProperties)); assertEquals("01:01:01", timeValue.value()); @@ -63,13 +63,15 @@ public void timestampValueInterfaceTest() { assertEquals(TIMESTAMP, timestampValue.type()); assertEquals( - ZonedDateTime.of(LocalDateTime.parse("2020-07-07T01:01:01"), UTC_ZONE_ID).toInstant(), + ZonedDateTime.of(LocalDateTime.parse("2020-07-07T01:01:01"), ZoneOffset.UTC).toInstant(), timestampValue.timestampValue()); assertEquals("2020-07-07 01:01:01", timestampValue.value()); assertEquals("TIMESTAMP '2020-07-07 01:01:01'", timestampValue.toString()); assertEquals(LocalDate.parse("2020-07-07"), timestampValue.dateValue()); assertEquals(LocalTime.parse("01:01:01"), timestampValue.timeValue()); - assertEquals(LocalDateTime.parse("2020-07-07T01:01:01"), timestampValue.datetimeValue()); + assertEquals( + LocalDateTime.parse("2020-07-07T01:01:01"), + LocalDateTime.ofInstant(timestampValue.timestampValue(), ZoneOffset.UTC)); assertThrows( ExpressionEvaluationException.class, () -> integerValue(1).timestampValue(), @@ -82,32 +84,17 @@ public void dateValueInterfaceTest() { assertEquals(LocalDate.parse("2012-07-07"), dateValue.dateValue()); assertEquals(LocalTime.parse("00:00:00"), dateValue.timeValue()); - assertEquals(LocalDateTime.parse("2012-07-07T00:00:00"), dateValue.datetimeValue()); assertEquals( - ZonedDateTime.of(LocalDateTime.parse("2012-07-07T00:00:00"), UTC_ZONE_ID).toInstant(), + LocalDateTime.parse("2012-07-07T00:00:00"), + LocalDateTime.ofInstant(dateValue.timestampValue(), ZoneOffset.UTC)); + assertEquals( + ZonedDateTime.of(LocalDateTime.parse("2012-07-07T00:00:00"), ZoneOffset.UTC).toInstant(), dateValue.timestampValue()); ExpressionEvaluationException exception = assertThrows(ExpressionEvaluationException.class, () -> integerValue(1).dateValue()); assertEquals("invalid to get dateValue from value of type INTEGER", exception.getMessage()); } - @Test - public void datetimeValueInterfaceTest() { - ExprValue datetimeValue = new ExprDatetimeValue("2020-08-17 19:44:00"); - - assertEquals(LocalDateTime.parse("2020-08-17T19:44:00"), datetimeValue.datetimeValue()); - assertEquals(LocalDate.parse("2020-08-17"), datetimeValue.dateValue()); - assertEquals(LocalTime.parse("19:44:00"), datetimeValue.timeValue()); - assertEquals( - ZonedDateTime.of(LocalDateTime.parse("2020-08-17T19:44:00"), UTC_ZONE_ID).toInstant(), - datetimeValue.timestampValue()); - assertEquals("DATETIME '2020-08-17 19:44:00'", datetimeValue.toString()); - assertThrows( - ExpressionEvaluationException.class, - () -> integerValue(1).datetimeValue(), - "invalid to get datetimeValue from value of type INTEGER"); - } - @Test public void dateInUnsupportedFormat() { SemanticCheckException exception = @@ -137,21 +124,12 @@ public void timestampInUnsupportedFormat() { } @Test - public void datetimeInUnsupportedFormat() { - SemanticCheckException exception = - assertThrows( - SemanticCheckException.class, () -> new ExprDatetimeValue("2020-07-07T01:01:01Z")); - assertEquals( - "datetime:2020-07-07T01:01:01Z in unsupported format, " - + "please use 'yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]'", - exception.getMessage()); - } - - @Test - public void stringDateTimeValue() { + public void stringTimestampValue() { ExprValue stringValue = new ExprStringValue("2020-08-17 19:44:00"); - assertEquals(LocalDateTime.parse("2020-08-17T19:44:00"), stringValue.datetimeValue()); + assertEquals( + LocalDateTime.parse("2020-08-17T19:44:00").atZone(ZoneOffset.UTC).toInstant(), + stringValue.timestampValue()); assertEquals(LocalDate.parse("2020-08-17"), stringValue.dateValue()); assertEquals(LocalTime.parse("19:44:00"), stringValue.timeValue()); assertEquals("\"2020-08-17 19:44:00\"", stringValue.toString()); @@ -159,10 +137,9 @@ public void stringDateTimeValue() { SemanticCheckException exception = assertThrows( SemanticCheckException.class, - () -> new ExprStringValue("2020-07-07T01:01:01Z").datetimeValue()); + () -> new ExprStringValue("2020-07-07T01:01:01Z").timestampValue()); assertEquals( - "datetime:2020-07-07T01:01:01Z in unsupported format, " - + "please use 'yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]'", + "date:2020-07-07T01:01:01Z in unsupported format, " + "please use 'yyyy-MM-dd'", exception.getMessage()); } @@ -170,7 +147,8 @@ public void stringDateTimeValue() { public void stringDateValue() { ExprValue stringValue = new ExprStringValue("2020-08-17"); - assertEquals(LocalDateTime.parse("2020-08-17T00:00:00"), stringValue.datetimeValue()); + assertEquals( + ZonedDateTime.parse("2020-08-17T00:00:00Z").toInstant(), stringValue.timestampValue()); assertEquals(LocalDate.parse("2020-08-17"), stringValue.dateValue()); assertEquals("\"2020-08-17\"", stringValue.toString()); @@ -228,28 +206,9 @@ public void timestampWithVariableNanoPrecision() { assertEquals(LocalDate.parse(dateValue), timestampValue.dateValue()); assertEquals(LocalTime.parse(timeWithNanos), timestampValue.timeValue()); String localDateTime = String.format("%sT%s", dateValue, timeWithNanos); - assertEquals(LocalDateTime.parse(localDateTime), timestampValue.datetimeValue()); - } - } - - @Test - public void datetimeWithVariableNanoPrecision() { - String dateValue = "2020-08-17"; - String timeWithNanosFormat = "10:11:12.%s"; - - // Check all lengths of nanosecond precision, up to max precision accepted - StringBuilder nanos = new StringBuilder(); - for (int nanoPrecision = 1; nanoPrecision <= NANOS_PRECISION_MAX; nanoPrecision++) { - nanos.append(nanoPrecision); - String timeWithNanos = String.format(timeWithNanosFormat, nanos); - - String datetimeString = String.format("%s %s", dateValue, timeWithNanos); - ExprValue datetimeValue = new ExprDatetimeValue(datetimeString); - - assertEquals(LocalDate.parse(dateValue), datetimeValue.dateValue()); - assertEquals(LocalTime.parse(timeWithNanos), datetimeValue.timeValue()); - String localDateTime = String.format("%sT%s", dateValue, timeWithNanos); - assertEquals(LocalDateTime.parse(localDateTime), datetimeValue.datetimeValue()); + assertEquals( + LocalDateTime.parse(localDateTime), + LocalDateTime.ofInstant(timestampValue.timestampValue(), ZoneOffset.UTC)); } } @@ -265,18 +224,6 @@ public void timestampOverMaxNanoPrecision() { exception.getMessage()); } - @Test - public void datetimeOverMaxNanoPrecision() { - SemanticCheckException exception = - assertThrows( - SemanticCheckException.class, - () -> new ExprDatetimeValue("2020-07-07 01:01:01.1234567890")); - assertEquals( - "datetime:2020-07-07 01:01:01.1234567890 in unsupported format, " - + "please use 'yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]'", - exception.getMessage()); - } - @Test public void timeOverMaxNanoPrecision() { SemanticCheckException exception = diff --git a/core/src/test/java/org/opensearch/sql/data/model/ExprValueCompareTest.java b/core/src/test/java/org/opensearch/sql/data/model/ExprValueCompareTest.java index b965dff643..ee30a0f0c6 100644 --- a/core/src/test/java/org/opensearch/sql/data/model/ExprValueCompareTest.java +++ b/core/src/test/java/org/opensearch/sql/data/model/ExprValueCompareTest.java @@ -11,7 +11,7 @@ import static org.opensearch.sql.data.model.ExprValueUtils.LITERAL_FALSE; import static org.opensearch.sql.data.model.ExprValueUtils.LITERAL_MISSING; import static org.opensearch.sql.data.model.ExprValueUtils.LITERAL_NULL; -import static org.opensearch.sql.utils.DateTimeUtils.extractDateTime; +import static org.opensearch.sql.utils.DateTimeUtils.extractTimestamp; import java.time.LocalDate; import java.time.Period; @@ -39,22 +39,6 @@ public void dateValueCompare() { assertEquals(-1, new ExprDateValue("2012-08-07").compareTo(new ExprDateValue("2012-08-08"))); } - @Test - public void datetimeValueCompare() { - assertEquals( - 0, - new ExprDatetimeValue("2012-08-07 18:00:00") - .compareTo(new ExprDatetimeValue("2012-08-07 18:00:00"))); - assertEquals( - 1, - new ExprDatetimeValue("2012-08-07 19:00:00") - .compareTo(new ExprDatetimeValue("2012-08-07 18:00:00"))); - assertEquals( - -1, - new ExprDatetimeValue("2012-08-07 18:00:00") - .compareTo(new ExprDatetimeValue("2012-08-07 19:00:00"))); - } - @Test public void timestampValueCompare() { assertEquals( @@ -73,26 +57,14 @@ public void timestampValueCompare() { private static Stream getEqualDatetimeValuesOfDifferentTypes() { return Stream.of( - Arguments.of( - new ExprTimestampValue("1961-04-12 09:07:00"), - new ExprDatetimeValue("1961-04-12 09:07:00")), Arguments.of( new ExprTimestampValue("1984-11-22 00:00:00"), new ExprDateValue("1984-11-22")), Arguments.of( new ExprTimestampValue(LocalDate.now() + " 00:00:00"), new ExprDateValue(LocalDate.now())), - Arguments.of( - new ExprDatetimeValue(LocalDate.now() + " 17:42:15"), new ExprTimeValue("17:42:15")), - Arguments.of( - new ExprDatetimeValue("2012-08-07 19:14:38"), - new ExprTimestampValue("2012-08-07 19:14:38")), - Arguments.of(new ExprDateValue("2012-08-07"), new ExprDatetimeValue("2012-08-07 00:00:00")), - Arguments.of(new ExprDateValue("2007-01-27"), new ExprDatetimeValue("2007-01-27 00:00:00")), Arguments.of(new ExprDateValue(LocalDate.now()), new ExprTimeValue("00:00:00")), Arguments.of( new ExprTimestampValue("1984-11-22 00:00:00"), new ExprDateValue("1984-11-22")), - Arguments.of( - new ExprTimeValue("19:14:38"), new ExprDatetimeValue(LocalDate.now() + " 19:14:38")), Arguments.of( new ExprTimeValue("17:42:15"), new ExprTimestampValue(LocalDate.now() + " 17:42:15"))); } @@ -106,34 +78,17 @@ private static Stream getEqualDatetimeValuesOfDifferentTypes() { public void compareEqDifferentDateTimeValueTypes(ExprValue left, ExprValue right) { assertEquals( 0, - extractDateTime(left, functionProperties) - .compareTo(extractDateTime(right, functionProperties))); - assertEquals( - 0, - extractDateTime(right, functionProperties) - .compareTo(extractDateTime(left, functionProperties))); + extractTimestamp(left, functionProperties) + .compareTo(extractTimestamp(right, functionProperties))); } private static Stream getNotEqualDatetimeValuesOfDifferentTypes() { return Stream.of( - Arguments.of( - new ExprDatetimeValue("2012-08-07 19:14:38"), - new ExprTimestampValue("1961-04-12 09:07:00")), - Arguments.of(new ExprDatetimeValue("2012-08-07 19:14:38"), new ExprTimeValue("09:07:00")), - Arguments.of( - new ExprDatetimeValue(LocalDate.now() + " 19:14:38"), new ExprTimeValue("09:07:00")), - Arguments.of(new ExprDatetimeValue("2012-08-07 00:00:00"), new ExprDateValue("1961-04-12")), - Arguments.of(new ExprDatetimeValue("1961-04-12 19:14:38"), new ExprDateValue("1961-04-12")), - Arguments.of(new ExprDateValue("1984-11-22"), new ExprDatetimeValue("1961-04-12 19:14:38")), Arguments.of( new ExprDateValue("1984-11-22"), new ExprTimestampValue("2020-09-16 17:30:00")), Arguments.of(new ExprDateValue("1984-11-22"), new ExprTimeValue("19:14:38")), Arguments.of(new ExprTimeValue("19:14:38"), new ExprDateValue(LocalDate.now())), - Arguments.of(new ExprTimeValue("19:14:38"), new ExprDatetimeValue("2012-08-07 09:07:00")), Arguments.of(new ExprTimeValue("19:14:38"), new ExprTimestampValue("1984-02-03 04:05:07")), - Arguments.of( - new ExprTimestampValue("2012-08-07 19:14:38"), - new ExprDatetimeValue("1961-04-12 09:07:00")), Arguments.of(new ExprTimestampValue("2012-08-07 19:14:38"), new ExprTimeValue("09:07:00")), Arguments.of( new ExprTimestampValue(LocalDate.now() + " 19:14:38"), new ExprTimeValue("09:07:00")), @@ -152,12 +107,8 @@ private static Stream getNotEqualDatetimeValuesOfDifferentTypes() { public void compareNeqDifferentDateTimeValueTypes(ExprValue left, ExprValue right) { assertNotEquals( 0, - extractDateTime(left, functionProperties) - .compareTo(extractDateTime(right, functionProperties))); - assertNotEquals( - 0, - extractDateTime(right, functionProperties) - .compareTo(extractDateTime(left, functionProperties))); + extractTimestamp(left, functionProperties) + .compareTo(extractTimestamp(right, functionProperties))); } @Test diff --git a/core/src/test/java/org/opensearch/sql/data/model/ExprValueUtilsTest.java b/core/src/test/java/org/opensearch/sql/data/model/ExprValueUtilsTest.java index c879384955..0baf5052e4 100644 --- a/core/src/test/java/org/opensearch/sql/data/model/ExprValueUtilsTest.java +++ b/core/src/test/java/org/opensearch/sql/data/model/ExprValueUtilsTest.java @@ -13,13 +13,11 @@ import static org.opensearch.sql.data.type.ExprCoreType.ARRAY; import static org.opensearch.sql.data.type.ExprCoreType.BOOLEAN; import static org.opensearch.sql.data.type.ExprCoreType.DATE; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; import static org.opensearch.sql.data.type.ExprCoreType.INTERVAL; import static org.opensearch.sql.data.type.ExprCoreType.STRING; import static org.opensearch.sql.data.type.ExprCoreType.STRUCT; import static org.opensearch.sql.data.type.ExprCoreType.TIME; import static org.opensearch.sql.data.type.ExprCoreType.TIMESTAMP; -import static org.opensearch.sql.utils.DateTimeUtils.UTC_ZONE_ID; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -29,6 +27,7 @@ import java.time.LocalDate; import java.time.LocalDateTime; import java.time.LocalTime; +import java.time.ZoneOffset; import java.time.ZonedDateTime; import java.util.AbstractMap; import java.util.ArrayList; @@ -70,7 +69,6 @@ public class ExprValueUtilsTest { new ExprTupleValue(testTuple), new ExprDateValue("2012-08-07"), new ExprTimeValue("18:00:00"), - new ExprDatetimeValue("2012-08-07 18:00:00"), new ExprTimestampValue("2012-08-07 18:00:00"), new ExprIntervalValue(Duration.ofSeconds(100))); @@ -95,7 +93,6 @@ public class ExprValueUtilsTest { Arrays.asList( ExprValue::dateValue, ExprValue::timeValue, - ExprValue::datetimeValue, ExprValue::timestampValue, ExprValue::intervalValue); private static List> allValueExtractor = @@ -113,7 +110,7 @@ public class ExprValueUtilsTest { ExprCoreType.DOUBLE); private static List nonNumberTypes = Arrays.asList(STRING, BOOLEAN, ARRAY, STRUCT); private static List dateAndTimeTypes = - Arrays.asList(DATE, TIME, DATETIME, TIMESTAMP, INTERVAL); + Arrays.asList(DATE, TIME, TIMESTAMP, INTERVAL); private static List allTypes = Lists.newArrayList(Iterables.concat(numberTypes, nonNumberTypes, dateAndTimeTypes)); @@ -132,8 +129,8 @@ private static Stream getValueTestArgumentStream() { ImmutableMap.of("1", integerValue(1)), LocalDate.parse("2012-08-07"), LocalTime.parse("18:00:00"), - LocalDateTime.parse("2012-08-07T18:00:00"), - ZonedDateTime.of(LocalDateTime.parse("2012-08-07T18:00:00"), UTC_ZONE_ID).toInstant(), + ZonedDateTime.of(LocalDateTime.parse("2012-08-07T18:00:00"), ZoneOffset.UTC) + .toInstant(), Duration.ofSeconds(100)); Stream.Builder builder = Stream.builder(); for (int i = 0; i < expectedValues.size(); i++) { @@ -237,9 +234,6 @@ public void constructDateAndTimeValue() { assertEquals( new ExprDateValue("2012-07-07"), ExprValueUtils.fromObjectValue("2012-07-07", DATE)); assertEquals(new ExprTimeValue("01:01:01"), ExprValueUtils.fromObjectValue("01:01:01", TIME)); - assertEquals( - new ExprDatetimeValue("2012-07-07 01:01:01"), - ExprValueUtils.fromObjectValue("2012-07-07 01:01:01", DATETIME)); assertEquals( new ExprTimestampValue("2012-07-07 01:01:01"), ExprValueUtils.fromObjectValue("2012-07-07 01:01:01", TIMESTAMP)); @@ -260,9 +254,6 @@ public void hashCodeTest() { new ExprDateValue("2012-08-07").hashCode(), new ExprDateValue("2012-08-07").hashCode()); assertEquals( new ExprTimeValue("18:00:00").hashCode(), new ExprTimeValue("18:00:00").hashCode()); - assertEquals( - new ExprDatetimeValue("2012-08-07 18:00:00").hashCode(), - new ExprDatetimeValue("2012-08-07 18:00:00").hashCode()); assertEquals( new ExprTimestampValue("2012-08-07 18:00:00").hashCode(), new ExprTimestampValue("2012-08-07 18:00:00").hashCode()); diff --git a/core/src/test/java/org/opensearch/sql/data/type/ExprTypeTest.java b/core/src/test/java/org/opensearch/sql/data/type/ExprTypeTest.java index 1def15cc6f..ec45c3dfec 100644 --- a/core/src/test/java/org/opensearch/sql/data/type/ExprTypeTest.java +++ b/core/src/test/java/org/opensearch/sql/data/type/ExprTypeTest.java @@ -12,7 +12,6 @@ import static org.opensearch.sql.data.type.ExprCoreType.ARRAY; import static org.opensearch.sql.data.type.ExprCoreType.BOOLEAN; import static org.opensearch.sql.data.type.ExprCoreType.DATE; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; import static org.opensearch.sql.data.type.ExprCoreType.DOUBLE; import static org.opensearch.sql.data.type.ExprCoreType.FLOAT; import static org.opensearch.sql.data.type.ExprCoreType.INTEGER; @@ -45,7 +44,6 @@ public void isCompatible() { assertTrue(TIMESTAMP.isCompatible(STRING)); assertTrue(DATE.isCompatible(STRING)); assertTrue(TIME.isCompatible(STRING)); - assertTrue(DATETIME.isCompatible(STRING)); } @Test diff --git a/core/src/test/java/org/opensearch/sql/expression/aggregation/AvgAggregatorTest.java b/core/src/test/java/org/opensearch/sql/expression/aggregation/AvgAggregatorTest.java index f465a6477e..10551d43a5 100644 --- a/core/src/test/java/org/opensearch/sql/expression/aggregation/AvgAggregatorTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/aggregation/AvgAggregatorTest.java @@ -9,7 +9,6 @@ import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.opensearch.sql.data.type.ExprCoreType.DATE; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; import static org.opensearch.sql.data.type.ExprCoreType.DOUBLE; import static org.opensearch.sql.data.type.ExprCoreType.INTEGER; import static org.opensearch.sql.data.type.ExprCoreType.STRING; @@ -19,6 +18,7 @@ import java.time.LocalDate; import java.time.LocalDateTime; import java.time.LocalTime; +import java.time.ZoneOffset; import java.util.List; import org.junit.jupiter.api.Test; import org.opensearch.sql.data.model.ExprValue; @@ -89,12 +89,6 @@ public void avg_date_no_values() { assertTrue(result.isNull()); } - @Test - public void avg_datetime_no_values() { - ExprValue result = aggregation(DSL.avg(DSL.ref("dummy", DATETIME)), List.of()); - assertTrue(result.isNull()); - } - @Test public void avg_timestamp_no_values() { ExprValue result = aggregation(DSL.avg(DSL.ref("dummy", TIMESTAMP)), List.of()); @@ -113,12 +107,6 @@ public void avg_date() { assertEquals(LocalDate.of(2007, 7, 2), result.dateValue()); } - @Test - public void avg_datetime() { - var result = aggregation(DSL.avg(DSL.datetime(DSL.ref("datetime_value", STRING))), tuples); - assertEquals(LocalDateTime.of(2012, 7, 2, 3, 30), result.datetimeValue()); - } - @Test public void avg_time() { ExprValue result = aggregation(DSL.avg(DSL.time(DSL.ref("time_value", STRING))), tuples); @@ -129,7 +117,9 @@ public void avg_time() { public void avg_timestamp() { var result = aggregation(DSL.avg(DSL.timestamp(DSL.ref("timestamp_value", STRING))), tuples); assertEquals(TIMESTAMP, result.type()); - assertEquals(LocalDateTime.of(2012, 7, 2, 3, 30), result.datetimeValue()); + assertEquals( + LocalDateTime.of(2012, 7, 2, 3, 30), + result.timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime()); } @Test diff --git a/core/src/test/java/org/opensearch/sql/expression/aggregation/CountAggregatorTest.java b/core/src/test/java/org/opensearch/sql/expression/aggregation/CountAggregatorTest.java index 50bd3fedfe..2159780dc0 100644 --- a/core/src/test/java/org/opensearch/sql/expression/aggregation/CountAggregatorTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/aggregation/CountAggregatorTest.java @@ -10,7 +10,6 @@ import static org.opensearch.sql.data.type.ExprCoreType.ARRAY; import static org.opensearch.sql.data.type.ExprCoreType.BOOLEAN; import static org.opensearch.sql.data.type.ExprCoreType.DATE; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; import static org.opensearch.sql.data.type.ExprCoreType.DOUBLE; import static org.opensearch.sql.data.type.ExprCoreType.FLOAT; import static org.opensearch.sql.data.type.ExprCoreType.INTEGER; @@ -63,12 +62,6 @@ public void count_timestamp_field_expression() { assertEquals(4, result.value()); } - @Test - public void count_datetime_field_expression() { - ExprValue result = aggregation(DSL.count(DSL.ref("datetime_value", DATETIME)), tuples); - assertEquals(4, result.value()); - } - @Test public void count_arithmetic_expression() { ExprValue result = diff --git a/core/src/test/java/org/opensearch/sql/expression/aggregation/MaxAggregatorTest.java b/core/src/test/java/org/opensearch/sql/expression/aggregation/MaxAggregatorTest.java index c6cd380ad5..f952eff982 100644 --- a/core/src/test/java/org/opensearch/sql/expression/aggregation/MaxAggregatorTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/aggregation/MaxAggregatorTest.java @@ -9,7 +9,6 @@ import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.opensearch.sql.data.type.ExprCoreType.DATE; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; import static org.opensearch.sql.data.type.ExprCoreType.DOUBLE; import static org.opensearch.sql.data.type.ExprCoreType.FLOAT; import static org.opensearch.sql.data.type.ExprCoreType.INTEGER; @@ -62,12 +61,6 @@ public void test_max_date() { assertEquals("2040-01-01", result.value()); } - @Test - public void test_max_datetime() { - ExprValue result = aggregation(DSL.max(DSL.ref("datetime_value", DATETIME)), tuples); - assertEquals("2040-01-01 07:00:00", result.value()); - } - @Test public void test_max_time() { ExprValue result = aggregation(DSL.max(DSL.ref("time_value", TIME)), tuples); diff --git a/core/src/test/java/org/opensearch/sql/expression/aggregation/MinAggregatorTest.java b/core/src/test/java/org/opensearch/sql/expression/aggregation/MinAggregatorTest.java index 1aee0f3a6c..8a3f3d15a3 100644 --- a/core/src/test/java/org/opensearch/sql/expression/aggregation/MinAggregatorTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/aggregation/MinAggregatorTest.java @@ -9,7 +9,6 @@ import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.opensearch.sql.data.type.ExprCoreType.DATE; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; import static org.opensearch.sql.data.type.ExprCoreType.DOUBLE; import static org.opensearch.sql.data.type.ExprCoreType.FLOAT; import static org.opensearch.sql.data.type.ExprCoreType.INTEGER; @@ -62,12 +61,6 @@ public void test_min_date() { assertEquals("1970-01-01", result.value()); } - @Test - public void test_min_datetime() { - ExprValue result = aggregation(DSL.min(DSL.ref("datetime_value", DATETIME)), tuples); - assertEquals("1970-01-01 19:00:00", result.value()); - } - @Test public void test_min_time() { ExprValue result = aggregation(DSL.min(DSL.ref("time_value", TIME)), tuples); diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/AddTimeAndSubTimeTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/AddTimeAndSubTimeTest.java index eed83f4fa9..49947976b4 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/AddTimeAndSubTimeTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/AddTimeAndSubTimeTest.java @@ -6,13 +6,14 @@ package org.opensearch.sql.expression.datetime; import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; import static org.opensearch.sql.data.type.ExprCoreType.TIME; +import static org.opensearch.sql.data.type.ExprCoreType.TIMESTAMP; import java.time.Instant; import java.time.LocalDate; import java.time.LocalDateTime; import java.time.LocalTime; +import java.time.ZoneOffset; import java.time.temporal.Temporal; import java.util.stream.Stream; import org.junit.jupiter.api.Test; @@ -23,7 +24,7 @@ public class AddTimeAndSubTimeTest extends DateTimeTestBase { @Test - // (TIME, TIME/DATE/DATETIME/TIMESTAMP) -> TIME + // (TIME, TIME/DATE/TIMESTAMP) -> TIME public void return_time_when_first_arg_is_time() { var res = addtime(LocalTime.of(21, 0), LocalTime.of(0, 5)); assertEquals(TIME, res.type()); @@ -70,31 +71,10 @@ public void time_limited_by_24_hours() { } // Function signature is: - // (DATE/DATETIME/TIMESTAMP, TIME/DATE/DATETIME/TIMESTAMP) -> DATETIME + // (DATE/TIMESTAMP, TIME/DATE/TIMESTAMP) -> TIMESTAMP private static Stream getTestData() { return Stream.of( - // DATETIME and TIME/DATE/DATETIME/TIMESTAMP - Arguments.of( - LocalDateTime.of(1961, 4, 12, 9, 7), - LocalTime.of(1, 48), - LocalDateTime.of(1961, 4, 12, 10, 55), - LocalDateTime.of(1961, 4, 12, 7, 19)), - Arguments.of( - LocalDateTime.of(1961, 4, 12, 9, 7), - LocalDate.of(2000, 1, 1), - LocalDateTime.of(1961, 4, 12, 9, 7), - LocalDateTime.of(1961, 4, 12, 9, 7)), - Arguments.of( - LocalDateTime.of(1961, 4, 12, 9, 7), - LocalDateTime.of(1235, 5, 6, 1, 48), - LocalDateTime.of(1961, 4, 12, 10, 55), - LocalDateTime.of(1961, 4, 12, 7, 19)), - Arguments.of( - LocalDateTime.of(1961, 4, 12, 9, 7), - Instant.ofEpochSecond(42), - LocalDateTime.of(1961, 4, 12, 9, 7, 42), - LocalDateTime.of(1961, 4, 12, 9, 6, 18)), - // DATE and TIME/DATE/DATETIME/TIMESTAMP + // DATE and TIME/DATE/TIMESTAMP Arguments.of( LocalDate.of(1961, 4, 12), LocalTime.of(9, 7), @@ -115,7 +95,7 @@ private static Stream getTestData() { Instant.ofEpochSecond(42), LocalDateTime.of(1961, 4, 12, 0, 0, 42), LocalDateTime.of(1961, 4, 11, 23, 59, 18)), - // TIMESTAMP and TIME/DATE/DATETIME/TIMESTAMP + // TIMESTAMP and TIME/DATE/TIMESTAMP Arguments.of( Instant.ofEpochSecond(42), LocalTime.of(9, 7), @@ -154,11 +134,13 @@ public void return_datetime_when_first_arg_is_not_time( LocalDateTime addTimeExpectedResult, LocalDateTime subTimeExpectedResult) { var res = addtime(arg1, arg2); - assertEquals(DATETIME, res.type()); - assertEquals(addTimeExpectedResult, res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals( + addTimeExpectedResult, res.timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime()); res = subtime(arg1, arg2); - assertEquals(DATETIME, res.type()); - assertEquals(subTimeExpectedResult, res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals( + subTimeExpectedResult, res.timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime()); } } diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/ConvertTZTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/ConvertTZTest.java index 17ff4f67ab..707f995138 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/ConvertTZTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/ConvertTZTest.java @@ -6,11 +6,13 @@ package org.opensearch.sql.expression.datetime; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; import static org.opensearch.sql.data.model.ExprValueUtils.nullValue; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; +import static org.opensearch.sql.data.type.ExprCoreType.TIMESTAMP; import org.junit.jupiter.api.Test; -import org.opensearch.sql.data.model.ExprDatetimeValue; +import org.opensearch.sql.data.model.ExprTimestampValue; +import org.opensearch.sql.exception.SemanticCheckException; import org.opensearch.sql.expression.DSL; import org.opensearch.sql.expression.ExpressionTestBase; import org.opensearch.sql.expression.FunctionExpression; @@ -21,32 +23,32 @@ class ConvertTZTest extends ExpressionTestBase { public void invalidDate() { FunctionExpression expr = DSL.convert_tz( - DSL.datetime(DSL.literal("2021-04-31 10:00:00")), + DSL.timestamp(DSL.literal("2021-04-31 10:00:00")), DSL.literal("+00:00"), DSL.literal("+00:00")); - assertEquals(DATETIME, expr.type()); - assertEquals(nullValue(), expr.valueOf()); + assertEquals(TIMESTAMP, expr.type()); + assertThrows(SemanticCheckException.class, expr::valueOf); } @Test public void conversionFromNoOffset() { FunctionExpression expr = DSL.convert_tz( - DSL.datetime(DSL.literal("2008-05-15 22:00:00")), + DSL.timestamp(DSL.literal("2008-05-15 22:00:00")), DSL.literal("+00:00"), DSL.literal("+10:00")); - assertEquals(DATETIME, expr.type()); - assertEquals(new ExprDatetimeValue("2008-05-16 08:00:00"), expr.valueOf()); + assertEquals(TIMESTAMP, expr.type()); + assertEquals(new ExprTimestampValue("2008-05-16 08:00:00"), expr.valueOf()); } @Test public void conversionToInvalidInput3Over() { FunctionExpression expr = DSL.convert_tz( - DSL.datetime(DSL.literal("2008-05-15 22:00:00")), + DSL.timestamp(DSL.literal("2008-05-15 22:00:00")), DSL.literal("+00:00"), DSL.literal("+16:00")); - assertEquals(DATETIME, expr.type()); + assertEquals(TIMESTAMP, expr.type()); assertEquals(nullValue(), expr.valueOf()); } @@ -54,10 +56,10 @@ public void conversionToInvalidInput3Over() { public void conversionToInvalidInput3Under() { FunctionExpression expr = DSL.convert_tz( - DSL.datetime(DSL.literal("2008-05-15 22:00:00")), + DSL.timestamp(DSL.literal("2008-05-15 22:00:00")), DSL.literal("+00:00"), DSL.literal("-16:00")); - assertEquals(DATETIME, expr.type()); + assertEquals(TIMESTAMP, expr.type()); assertEquals(nullValue(), expr.valueOf()); } @@ -65,10 +67,10 @@ public void conversionToInvalidInput3Under() { public void conversionFromPositiveToPositive() { FunctionExpression expr = DSL.convert_tz( - DSL.datetime(DSL.literal("2008-05-15 22:00:00")), + DSL.timestamp(DSL.literal("2008-05-15 22:00:00")), DSL.literal("+15:00"), DSL.literal("+01:00")); - assertEquals(DATETIME, expr.type()); + assertEquals(TIMESTAMP, expr.type()); assertEquals(nullValue(), expr.valueOf()); } @@ -76,10 +78,10 @@ public void conversionFromPositiveToPositive() { public void invalidInput2Under() { FunctionExpression expr = DSL.convert_tz( - DSL.datetime(DSL.literal("2008-05-15 22:00:00")), + DSL.timestamp(DSL.literal("2008-05-15 22:00:00")), DSL.literal("-15:00"), DSL.literal("+01:00")); - assertEquals(DATETIME, expr.type()); + assertEquals(TIMESTAMP, expr.type()); assertEquals(nullValue(), expr.valueOf()); } @@ -87,10 +89,10 @@ public void invalidInput2Under() { public void invalidInput3Over() { FunctionExpression expr = DSL.convert_tz( - DSL.datetime(DSL.literal("2008-05-15 22:00:00")), + DSL.timestamp(DSL.literal("2008-05-15 22:00:00")), DSL.literal("-12:00"), DSL.literal("+15:00")); - assertEquals(DATETIME, expr.type()); + assertEquals(TIMESTAMP, expr.type()); assertEquals(nullValue(), expr.valueOf()); } @@ -98,32 +100,32 @@ public void invalidInput3Over() { public void conversionToPositiveEdge() { FunctionExpression expr = DSL.convert_tz( - DSL.datetime(DSL.literal("2008-05-15 22:00:00")), + DSL.timestamp(DSL.literal("2008-05-15 22:00:00")), DSL.literal("+00:00"), DSL.literal("+14:00")); - assertEquals(DATETIME, expr.type()); - assertEquals(new ExprDatetimeValue("2008-05-16 12:00:00"), expr.valueOf()); + assertEquals(TIMESTAMP, expr.type()); + assertEquals(new ExprTimestampValue("2008-05-16 12:00:00"), expr.valueOf()); } @Test public void conversionToNegativeEdge() { FunctionExpression expr = DSL.convert_tz( - DSL.datetime(DSL.literal("2008-05-15 22:00:00")), + DSL.timestamp(DSL.literal("2008-05-15 22:00:00")), DSL.literal("+00:01"), DSL.literal("-13:59")); - assertEquals(DATETIME, expr.type()); - assertEquals(new ExprDatetimeValue("2008-05-15 08:00:00"), expr.valueOf()); + assertEquals(TIMESTAMP, expr.type()); + assertEquals(new ExprTimestampValue("2008-05-15 08:00:00"), expr.valueOf()); } @Test public void invalidInput2() { FunctionExpression expr = DSL.convert_tz( - DSL.datetime(DSL.literal("2008-05-15 22:00:00")), + DSL.timestamp(DSL.literal("2008-05-15 22:00:00")), DSL.literal("+)()"), DSL.literal("+12:00")); - assertEquals(DATETIME, expr.type()); + assertEquals(TIMESTAMP, expr.type()); assertEquals(nullValue(), expr.valueOf()); } @@ -131,10 +133,10 @@ public void invalidInput2() { public void invalidInput3() { FunctionExpression expr = DSL.convert_tz( - DSL.datetime(DSL.literal("2008-05-15 22:00:00")), + DSL.timestamp(DSL.literal("2008-05-15 22:00:00")), DSL.literal("+00:00"), DSL.literal("test")); - assertEquals(DATETIME, expr.type()); + assertEquals(TIMESTAMP, expr.type()); assertEquals(nullValue(), expr.valueOf()); } @@ -142,7 +144,7 @@ public void invalidInput3() { public void invalidInput1() { FunctionExpression expr = DSL.convert_tz(DSL.literal("test"), DSL.literal("+00:00"), DSL.literal("+00:00")); - assertEquals(DATETIME, expr.type()); + assertEquals(TIMESTAMP, expr.type()); assertEquals(nullValue(), expr.valueOf()); } @@ -150,32 +152,32 @@ public void invalidInput1() { public void invalidDateFeb30() { FunctionExpression expr = DSL.convert_tz( - DSL.datetime(DSL.literal("2021-02-30 10:00:00")), + DSL.timestamp(DSL.literal("2021-02-30 10:00:00")), DSL.literal("+00:00"), DSL.literal("+00:00")); - assertEquals(DATETIME, expr.type()); - assertEquals(nullValue(), expr.valueOf()); + assertEquals(TIMESTAMP, expr.type()); + assertThrows(SemanticCheckException.class, expr::valueOf); } @Test public void invalidDateApril31() { FunctionExpression expr = DSL.convert_tz( - DSL.datetime(DSL.literal("2021-04-31 10:00:00")), + DSL.timestamp(DSL.literal("2021-04-31 10:00:00")), DSL.literal("+00:00"), DSL.literal("+00:00")); - assertEquals(DATETIME, expr.type()); - assertEquals(nullValue(), expr.valueOf()); + assertEquals(TIMESTAMP, expr.type()); + assertThrows(SemanticCheckException.class, expr::valueOf); } @Test public void invalidMonth13() { FunctionExpression expr = DSL.convert_tz( - DSL.datetime(DSL.literal("2021-13-03 10:00:00")), + DSL.timestamp(DSL.literal("2021-13-03 10:00:00")), DSL.literal("+00:00"), DSL.literal("+00:00")); - assertEquals(DATETIME, expr.type()); - assertEquals(nullValue(), expr.valueOf()); + assertEquals(TIMESTAMP, expr.type()); + assertThrows(SemanticCheckException.class, expr::valueOf); } } diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/DateAddAndAddDateTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/DateAddAndAddDateTest.java index 52db0a17e5..519e97bdc6 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/DateAddAndAddDateTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/DateAddAndAddDateTest.java @@ -8,7 +8,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.opensearch.sql.data.type.ExprCoreType.DATE; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; +import static org.opensearch.sql.data.type.ExprCoreType.TIMESTAMP; import java.time.Duration; import java.time.Instant; @@ -29,81 +29,99 @@ private LocalDate today() { @Test public void adddate_returns_datetime_when_args_are_time_and_time_interval() { var res = adddate(LocalTime.MIN, Duration.ofHours(1).plusMinutes(2)); - assertEquals(DATETIME, res.type()); - assertEquals(LocalTime.of(1, 2).atDate(today()), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals( + LocalTime.of(1, 2).atDate(today()), + res.timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime()); } @Test public void date_add_returns_datetime_when_args_are_time_and_time_interval() { var res = date_add(LocalTime.of(10, 20, 30), Duration.ofHours(1).plusMinutes(2).plusSeconds(42)); - assertEquals(DATETIME, res.type()); - assertEquals(LocalTime.of(11, 23, 12).atDate(today()), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals( + LocalTime.of(11, 23, 12).atDate(today()), + res.timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime()); } @Test public void adddate_time_limited_by_24_hours() { var res = adddate(LocalTime.MAX, Duration.ofNanos(1)); - assertEquals(DATETIME, res.type()); - assertEquals(LocalTime.MIN, res.datetimeValue().toLocalTime()); + assertEquals(TIMESTAMP, res.type()); + assertEquals(LocalTime.MIN, res.timestampValue().atZone(ZoneOffset.UTC).toLocalTime()); } @Test public void date_add_time_limited_by_24_hours() { var res = date_add(LocalTime.of(10, 20, 30), Duration.ofHours(20).plusMinutes(50).plusSeconds(7)); - assertEquals(DATETIME, res.type()); - assertEquals(LocalTime.of(7, 10, 37), res.datetimeValue().toLocalTime()); + assertEquals(TIMESTAMP, res.type()); + assertEquals( + LocalTime.of(7, 10, 37), res.timestampValue().atZone(ZoneOffset.UTC).toLocalTime()); } @Test public void adddate_returns_datetime_when_args_are_date_and_date_interval() { var res = adddate(LocalDate.of(2020, 2, 20), Period.of(3, 11, 21)); - assertEquals(DATETIME, res.type()); - assertEquals(LocalDate.of(2024, 2, 10).atStartOfDay(), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals( + LocalDate.of(2024, 2, 10).atStartOfDay(), + res.timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime()); } @Test public void date_add_returns_datetime_when_args_are_date_and_date_interval() { var res = date_add(LocalDate.of(1961, 4, 12), Period.of(50, 50, 50)); - assertEquals(DATETIME, res.type()); - assertEquals(LocalDate.of(2015, 8, 1).atStartOfDay(), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals( + LocalDate.of(2015, 8, 1).atStartOfDay(), + res.timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime()); } @Test public void adddate_returns_datetime_when_args_are_date_and_time_interval() { var res = adddate(LocalDate.of(2020, 2, 20), Duration.ofHours(1).plusMinutes(2)); - assertEquals(DATETIME, res.type()); - assertEquals(LocalDateTime.of(2020, 2, 20, 1, 2), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals( + LocalDateTime.of(2020, 2, 20, 1, 2), + res.timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime()); } @Test public void date_add_returns_datetime_when_args_are_date_and_time_interval() { var res = date_add(LocalDate.of(1961, 4, 12), Duration.ofHours(9).plusMinutes(7)); - assertEquals(DATETIME, res.type()); - assertEquals(LocalDateTime.of(1961, 4, 12, 9, 7), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals( + LocalDateTime.of(1961, 4, 12, 9, 7), + res.timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime()); } @Test public void adddate_returns_datetime_when_args_are_time_and_date_interval() { // Date based on today var res = adddate(LocalTime.of(1, 2, 0), Period.ofDays(1)); - assertEquals(DATETIME, res.type()); - assertEquals(today().plusDays(1).atTime(LocalTime.of(1, 2, 0)), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals( + today().plusDays(1).atTime(LocalTime.of(1, 2, 0)), + res.timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime()); } @Test public void date_add_returns_datetime_when_args_are_time_and_date_interval() { var res = date_add(LocalTime.MIDNIGHT, Period.ofDays(0)); - assertEquals(DATETIME, res.type()); - assertEquals(today().atStartOfDay(), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals( + today().atStartOfDay(), res.timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime()); } @Test public void adddate_returns_datetime_when_first_arg_is_datetime() { var res = adddate(LocalDateTime.of(1961, 4, 12, 9, 7), Duration.ofMinutes(108)); - assertEquals(DATETIME, res.type()); - assertEquals(LocalDateTime.of(1961, 4, 12, 10, 55), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals( + LocalDateTime.of(1961, 4, 12, 10, 55), + res.timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime()); } @Test @@ -111,30 +129,34 @@ public void date_add_returns_datetime_when_first_arg_is_timestamp() { var res = date_add( LocalDateTime.of(1961, 4, 12, 9, 7).toInstant(ZoneOffset.UTC), Duration.ofMinutes(108)); - assertEquals(DATETIME, res.type()); - assertEquals(LocalDateTime.of(1961, 4, 12, 10, 55), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals( + LocalDateTime.of(1961, 4, 12, 10, 55), + res.timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime()); } @Test public void adddate_accepts_negative_interval() { var res = adddate(LocalDateTime.of(2020, 10, 20, 14, 42), Duration.ofDays(-10)); - assertEquals(DATETIME, res.type()); - assertEquals(LocalDateTime.of(2020, 10, 10, 14, 42), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals( + LocalDateTime.of(2020, 10, 10, 14, 42), + res.timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime()); assertEquals(subdate(LocalDateTime.of(2020, 10, 20, 14, 42), Duration.ofDays(10)), res); } @Test public void adddate_has_second_signature_but_not_date_add() { var res = adddate(LocalDateTime.of(1961, 4, 12, 9, 7), 100500); - assertEquals(DATETIME, res.type()); + assertEquals(TIMESTAMP, res.type()); var exception = assertThrows( ExpressionEvaluationException.class, () -> date_add(LocalDateTime.of(1961, 4, 12, 9, 7), 100500)); assertEquals( - "date_add function expected {[DATE,INTERVAL],[DATETIME,INTERVAL]," - + "[TIMESTAMP,INTERVAL],[TIME,INTERVAL]}, but get [DATETIME,INTEGER]", + "date_add function expected {[DATE,INTERVAL],[TIMESTAMP,INTERVAL]," + + "[TIME,INTERVAL]}, but get [TIMESTAMP,INTEGER]", exception.getMessage()); } @@ -148,23 +170,29 @@ public void adddate_returns_date_when_args_are_date_and_days() { @Test public void adddate_returns_datetime_when_args_are_date_but_days() { var res = adddate(LocalDate.of(2000, 1, 1).atStartOfDay(), 2); - assertEquals(DATETIME, res.type()); - assertEquals(LocalDateTime.of(2000, 1, 3, 0, 0), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals( + LocalDateTime.of(2000, 1, 3, 0, 0), + res.timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime()); res = adddate(LocalTime.now(), 2); - assertEquals(DATETIME, res.type()); + assertEquals(TIMESTAMP, res.type()); assertEquals(today().plusDays(2), res.dateValue()); res = adddate(Instant.ofEpochSecond(42), 2); - assertEquals(DATETIME, res.type()); - assertEquals(LocalDateTime.of(1970, 1, 3, 0, 0, 42), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals( + LocalDateTime.of(1970, 1, 3, 0, 0, 42), + res.timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime()); } @Test public void adddate_accepts_negative_days() { var res = adddate(LocalDateTime.of(2020, 10, 20, 8, 16, 32), -40); - assertEquals(DATETIME, res.type()); - assertEquals(LocalDateTime.of(2020, 10, 20, 8, 16, 32).minusDays(40), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals( + LocalDateTime.of(2020, 10, 20, 8, 16, 32).minusDays(40), + res.timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime()); assertEquals(subdate(LocalDateTime.of(2020, 10, 20, 8, 16, 32), 40), res); } } diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/DateDiffTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/DateDiffTest.java index a630758456..16d585d73e 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/DateDiffTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/DateDiffTest.java @@ -33,7 +33,7 @@ public class DateDiffTest extends DateTimeTestBase { private static final LocalDateTime dateTimeSample2 = LocalDateTime.of(1993, 3, 4, 5, 6); // Function signature is: - // (DATE/DATETIME/TIMESTAMP/TIME, DATE/DATETIME/TIMESTAMP/TIME) -> LONG + // (DATE/TIMESTAMP/TIME, DATE/TIMESTAMP/TIME) -> LONG private static Stream getTestData() { // Arguments are: first argument for `DATE_DIFF` function, second argument and expected result. return Stream.of( diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/DateSubAndSubDateTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/DateSubAndSubDateTest.java index 460e12384b..123ecda0bd 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/DateSubAndSubDateTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/DateSubAndSubDateTest.java @@ -8,7 +8,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.opensearch.sql.data.type.ExprCoreType.DATE; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; +import static org.opensearch.sql.data.type.ExprCoreType.TIMESTAMP; import java.time.Duration; import java.time.Instant; @@ -18,10 +18,15 @@ import java.time.Period; import java.time.ZoneOffset; import org.junit.jupiter.api.Test; +import org.opensearch.sql.data.model.ExprValue; import org.opensearch.sql.exception.ExpressionEvaluationException; public class DateSubAndSubDateTest extends DateTimeTestBase { + private LocalDateTime toLocalDateTime(ExprValue res) { + return res.timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime(); + } + private LocalDate today() { return LocalDate.now(functionProperties.getQueryStartClock()); } @@ -29,81 +34,82 @@ private LocalDate today() { @Test public void subdate_returns_datetime_when_args_are_time_and_time_interval() { var res = subdate(LocalTime.of(21, 0), Duration.ofHours(1).plusMinutes(2)); - assertEquals(DATETIME, res.type()); - assertEquals(LocalTime.of(19, 58).atDate(today()), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals(LocalTime.of(19, 58).atDate(today()), toLocalDateTime(res)); } @Test public void date_sub_returns_datetime_when_args_are_time_and_time_interval() { var res = date_sub(LocalTime.of(10, 20, 30), Duration.ofHours(1).plusMinutes(2).plusSeconds(42)); - assertEquals(DATETIME, res.type()); - assertEquals(LocalTime.of(9, 17, 48).atDate(today()), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals(LocalTime.of(9, 17, 48).atDate(today()), toLocalDateTime(res)); } @Test public void subdate_time_limited_by_24_hours() { var res = subdate(LocalTime.MIN, Duration.ofNanos(1)); - assertEquals(DATETIME, res.type()); - assertEquals(LocalTime.MAX, res.datetimeValue().toLocalTime()); + assertEquals(TIMESTAMP, res.type()); + assertEquals(LocalTime.MAX, res.timestampValue().atZone(ZoneOffset.UTC).toLocalTime()); } @Test public void date_sub_time_limited_by_24_hours() { var res = date_sub(LocalTime.of(10, 20, 30), Duration.ofHours(20).plusMinutes(50).plusSeconds(7)); - assertEquals(DATETIME, res.type()); - assertEquals(LocalTime.of(13, 30, 23), res.datetimeValue().toLocalTime()); + assertEquals(TIMESTAMP, res.type()); + assertEquals( + LocalTime.of(13, 30, 23), res.timestampValue().atZone(ZoneOffset.UTC).toLocalTime()); } @Test public void subdate_returns_datetime_when_args_are_date_and_date_interval() { var res = subdate(LocalDate.of(2020, 2, 20), Period.of(3, 11, 21)); - assertEquals(DATETIME, res.type()); - assertEquals(LocalDate.of(2016, 2, 28).atStartOfDay(), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals(LocalDate.of(2016, 2, 28).atStartOfDay(), toLocalDateTime(res)); } @Test public void date_sub_returns_datetime_when_args_are_date_and_date_interval() { var res = date_sub(LocalDate.of(1961, 4, 12), Period.of(50, 50, 50)); - assertEquals(DATETIME, res.type()); - assertEquals(LocalDate.of(1906, 12, 24).atStartOfDay(), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals(LocalDate.of(1906, 12, 24).atStartOfDay(), toLocalDateTime(res)); } @Test public void subdate_returns_datetime_when_args_are_date_and_time_interval() { var res = subdate(LocalDate.of(2020, 2, 20), Duration.ofHours(1).plusMinutes(2)); - assertEquals(DATETIME, res.type()); - assertEquals(LocalDateTime.of(2020, 2, 19, 22, 58), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals(LocalDateTime.of(2020, 2, 19, 22, 58), toLocalDateTime(res)); } @Test public void date_sub_returns_datetime_when_args_are_date_and_time_interval() { var res = date_sub(LocalDate.of(1961, 4, 12), Duration.ofHours(9).plusMinutes(7)); - assertEquals(DATETIME, res.type()); - assertEquals(LocalDateTime.of(1961, 4, 11, 14, 53), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals(LocalDateTime.of(1961, 4, 11, 14, 53), toLocalDateTime(res)); } @Test public void subdate_returns_datetime_when_args_are_time_and_date_interval() { // Date based on today var res = subdate(LocalTime.of(1, 2, 0), Period.ofDays(1)); - assertEquals(DATETIME, res.type()); - assertEquals(today().minusDays(1).atTime(LocalTime.of(1, 2, 0)), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals(today().minusDays(1).atTime(LocalTime.of(1, 2, 0)), toLocalDateTime(res)); } @Test public void date_sub_returns_datetime_when_args_are_time_and_date_interval() { var res = date_sub(LocalTime.MIDNIGHT, Period.ofDays(0)); - assertEquals(DATETIME, res.type()); - assertEquals(today().atStartOfDay(), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals(today().atStartOfDay(), toLocalDateTime(res)); } @Test public void subdate_returns_datetime_when_first_arg_is_datetime() { var res = subdate(LocalDateTime.of(1961, 4, 12, 9, 7), Duration.ofMinutes(108)); - assertEquals(DATETIME, res.type()); - assertEquals(LocalDateTime.of(1961, 4, 12, 7, 19), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals(LocalDateTime.of(1961, 4, 12, 7, 19), toLocalDateTime(res)); } @Test @@ -111,30 +117,30 @@ public void date_sub_returns_datetime_when_first_arg_is_timestamp() { var res = date_sub( LocalDateTime.of(1961, 4, 12, 9, 7).toInstant(ZoneOffset.UTC), Duration.ofMinutes(108)); - assertEquals(DATETIME, res.type()); - assertEquals(LocalDateTime.of(1961, 4, 12, 7, 19), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals(LocalDateTime.of(1961, 4, 12, 7, 19), toLocalDateTime(res)); } @Test public void subdate_accepts_negative_interval() { var res = subdate(LocalDateTime.of(2020, 10, 20, 14, 42), Duration.ofDays(-10)); - assertEquals(DATETIME, res.type()); - assertEquals(LocalDateTime.of(2020, 10, 30, 14, 42), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals(LocalDateTime.of(2020, 10, 30, 14, 42), toLocalDateTime(res)); assertEquals(adddate(LocalDateTime.of(2020, 10, 20, 14, 42), Duration.ofDays(10)), res); } @Test public void subdate_has_second_signature_but_not_date_sub() { var res = subdate(LocalDateTime.of(1961, 4, 12, 9, 7), 100500); - assertEquals(DATETIME, res.type()); + assertEquals(TIMESTAMP, res.type()); var exception = assertThrows( ExpressionEvaluationException.class, () -> date_sub(LocalDateTime.of(1961, 4, 12, 9, 7), 100500)); assertEquals( - "date_sub function expected {[DATE,INTERVAL],[DATETIME,INTERVAL]," - + "[TIMESTAMP,INTERVAL],[TIME,INTERVAL]}, but get [DATETIME,INTEGER]", + "date_sub function expected {[DATE,INTERVAL],[TIMESTAMP,INTERVAL],[TIME,INTERVAL]}, but get" + + " [TIMESTAMP,INTEGER]", exception.getMessage()); } @@ -148,23 +154,23 @@ public void subdate_returns_date_when_args_are_date_and_days() { @Test public void subdate_returns_datetime_when_args_are_date_but_days() { var res = subdate(LocalDate.of(2000, 1, 1).atStartOfDay(), 2); - assertEquals(DATETIME, res.type()); - assertEquals(LocalDateTime.of(1999, 12, 30, 0, 0), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals(LocalDateTime.of(1999, 12, 30, 0, 0), toLocalDateTime(res)); res = subdate(LocalTime.now(), 2); - assertEquals(DATETIME, res.type()); + assertEquals(TIMESTAMP, res.type()); assertEquals(today().minusDays(2), res.dateValue()); res = subdate(Instant.ofEpochSecond(42), 2); - assertEquals(DATETIME, res.type()); - assertEquals(LocalDateTime.of(1969, 12, 30, 0, 0, 42), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals(LocalDateTime.of(1969, 12, 30, 0, 0, 42), toLocalDateTime(res)); } @Test public void subdate_accepts_negative_days() { var res = subdate(LocalDateTime.of(2020, 10, 20, 8, 16, 32), -40); - assertEquals(DATETIME, res.type()); - assertEquals(LocalDateTime.of(2020, 10, 20, 8, 16, 32).plusDays(40), res.datetimeValue()); + assertEquals(TIMESTAMP, res.type()); + assertEquals(LocalDateTime.of(2020, 10, 20, 8, 16, 32).plusDays(40), toLocalDateTime(res)); assertEquals(adddate(LocalDateTime.of(2020, 10, 20, 8, 16, 32), 40), res); } } diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/DateTimeFunctionTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/DateTimeFunctionTest.java index c2a6129626..d4ee7c44da 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/DateTimeFunctionTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/DateTimeFunctionTest.java @@ -32,7 +32,6 @@ import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; import org.opensearch.sql.data.model.ExprDateValue; -import org.opensearch.sql.data.model.ExprDatetimeValue; import org.opensearch.sql.data.model.ExprDoubleValue; import org.opensearch.sql.data.model.ExprIntegerValue; import org.opensearch.sql.data.model.ExprLongValue; @@ -398,10 +397,6 @@ private static Stream getTestDataForDayOfYear() { return Stream.of( Arguments.of( DSL.literal(new ExprDateValue("2020-08-07")), "day_of_year(DATE '2020-08-07')", 220), - Arguments.of( - DSL.literal(new ExprDatetimeValue("2020-08-07 12:23:34")), - "day_of_year(DATETIME '2020-08-07 12:23:34')", - 220), Arguments.of( DSL.literal(new ExprTimestampValue("2020-08-07 12:23:34")), "day_of_year(TIMESTAMP '2020-08-07 12:23:34')", @@ -519,11 +514,6 @@ private static Stream getTestDataForGetFormat() { Arguments.of("DATE", "ISO", "%Y-%m-%d"), Arguments.of("DATE", "EUR", "%d.%m.%Y"), Arguments.of("DATE", "INTERNAL", "%Y%m%d"), - Arguments.of("DATETIME", "USA", "%Y-%m-%d %H.%i.%s"), - Arguments.of("DATETIME", "JIS", "%Y-%m-%d %H:%i:%s"), - Arguments.of("DATETIME", "ISO", "%Y-%m-%d %H:%i:%s"), - Arguments.of("DATETIME", "EUR", "%Y-%m-%d %H.%i.%s"), - Arguments.of("DATETIME", "INTERNAL", "%Y%m%d%H%i%s"), Arguments.of("TIME", "USA", "%h:%i:%s %p"), Arguments.of("TIME", "JIS", "%H:%i:%s"), Arguments.of("TIME", "ISO", "%H:%i:%s"), @@ -572,11 +562,6 @@ public void hour() { assertEquals(integerValue(1), expression.valueOf()); assertEquals("hour(TIMESTAMP '2020-08-17 01:02:03')", expression.toString()); - expression = DSL.hour(DSL.literal(new ExprDatetimeValue("2020-08-17 01:02:03"))); - assertEquals(INTEGER, expression.type()); - assertEquals(integerValue(1), expression.valueOf()); - assertEquals("hour(DATETIME '2020-08-17 01:02:03')", expression.toString()); - expression = DSL.hour(DSL.literal("2020-08-17 01:02:03")); assertEquals(INTEGER, expression.type()); assertEquals(integerValue(1), expression.valueOf()); @@ -617,9 +602,7 @@ public void hourOfDay() { FunctionExpression expression2 = DSL.hour_of_day(DSL.literal("01:02:03")); FunctionExpression expression3 = DSL.hour_of_day(DSL.literal(new ExprTimestampValue("2020-08-17 01:02:03"))); - FunctionExpression expression4 = - DSL.hour_of_day(DSL.literal(new ExprDatetimeValue("2020-08-17 01:02:03"))); - FunctionExpression expression5 = DSL.hour_of_day(DSL.literal("2020-08-17 01:02:03")); + FunctionExpression expression4 = DSL.hour_of_day(DSL.literal("2020-08-17 01:02:03")); assertAll( () -> hourOfDayQuery(expression1, 1), @@ -629,9 +612,7 @@ public void hourOfDay() { () -> hourOfDayQuery(expression3, 1), () -> assertEquals("hour_of_day(TIMESTAMP '2020-08-17 01:02:03')", expression3.toString()), () -> hourOfDayQuery(expression4, 1), - () -> assertEquals("hour_of_day(DATETIME '2020-08-17 01:02:03')", expression4.toString()), - () -> hourOfDayQuery(expression5, 1), - () -> assertEquals("hour_of_day(\"2020-08-17 01:02:03\")", expression5.toString())); + () -> assertEquals("hour_of_day(\"2020-08-17 01:02:03\")", expression4.toString())); } private void invalidHourOfDayQuery(String time) { @@ -731,15 +712,10 @@ public void microsecond() { assertEquals(integerValue(120000), eval(expression)); assertEquals("microsecond(\"01:02:03.12\")", expression.toString()); - expression = DSL.microsecond(DSL.literal(new ExprDatetimeValue("2020-08-17 01:02:03.000010"))); - assertEquals(INTEGER, expression.type()); - assertEquals(integerValue(10), expression.valueOf()); - assertEquals("microsecond(DATETIME '2020-08-17 01:02:03.00001')", expression.toString()); - - expression = DSL.microsecond(DSL.literal(new ExprDatetimeValue("2020-08-17 01:02:03.123456"))); + expression = DSL.microsecond(DSL.literal(new ExprTimestampValue("2020-08-17 01:02:03.123456"))); assertEquals(INTEGER, expression.type()); assertEquals(integerValue(123456), expression.valueOf()); - assertEquals("microsecond(DATETIME '2020-08-17 01:02:03.123456')", expression.toString()); + assertEquals("microsecond(TIMESTAMP '2020-08-17 01:02:03.123456')", expression.toString()); expression = DSL.microsecond(DSL.literal("2020-08-17 01:02:03.123456")); assertEquals(INTEGER, expression.type()); @@ -769,11 +745,6 @@ public void minute() { assertEquals(integerValue(2), expression.valueOf()); assertEquals("minute(TIMESTAMP '2020-08-17 01:02:03')", expression.toString()); - expression = DSL.minute(DSL.literal(new ExprDatetimeValue("2020-08-17 01:02:03"))); - assertEquals(INTEGER, expression.type()); - assertEquals(integerValue(2), expression.valueOf()); - assertEquals("minute(DATETIME '2020-08-17 01:02:03')", expression.toString()); - expression = DSL.minute(DSL.literal("2020-08-17 01:02:03")); assertEquals(INTEGER, expression.type()); assertEquals(integerValue(2), expression.valueOf()); @@ -803,11 +774,6 @@ public void minuteOfDay() { assertEquals(integerValue(62), expression.valueOf()); assertEquals("minute_of_day(TIMESTAMP '2020-08-17 01:02:03')", expression.toString()); - expression = DSL.minute_of_day(DSL.literal(new ExprDatetimeValue("2020-08-17 01:02:03"))); - assertEquals(INTEGER, expression.type()); - assertEquals(integerValue(62), expression.valueOf()); - assertEquals("minute_of_day(DATETIME '2020-08-17 01:02:03')", expression.toString()); - expression = DSL.minute_of_day(DSL.literal("2020-08-17 01:02:03")); assertEquals(INTEGER, expression.type()); assertEquals(integerValue(62), expression.valueOf()); @@ -833,10 +799,6 @@ private static Stream getTestDataForMinuteOfHour() { DSL.literal(new ExprTimestampValue("2020-08-17 01:02:03")), 2, "minute_of_hour(TIMESTAMP '2020-08-17 01:02:03')"), - Arguments.of( - DSL.literal(new ExprDatetimeValue("2020-08-17 01:02:03")), - 2, - "minute_of_hour(DATETIME '2020-08-17 01:02:03')"), Arguments.of( DSL.literal("2020-08-17 01:02:03"), 2, "minute_of_hour(\"2020-08-17 01:02:03\")")); } @@ -894,10 +856,6 @@ private static Stream getTestDataForMonthOfYear() { return Stream.of( Arguments.of( DSL.literal(new ExprDateValue("2020-08-07")), "month_of_year(DATE '2020-08-07')", 8), - Arguments.of( - DSL.literal(new ExprDatetimeValue("2020-08-07 12:23:34")), - "month_of_year(DATETIME '2020-08-07 12:23:34')", - 8), Arguments.of( DSL.literal(new ExprTimestampValue("2020-08-07 12:23:34")), "month_of_year(TIMESTAMP '2020-08-07 12:23:34')", @@ -1052,11 +1010,6 @@ public void second() { assertEquals(INTEGER, expression.type()); assertEquals(integerValue(3), expression.valueOf()); assertEquals("second(TIMESTAMP '2020-08-17 01:02:03')", expression.toString()); - - expression = DSL.second(DSL.literal(new ExprDatetimeValue("2020-08-17 01:02:03"))); - assertEquals(INTEGER, expression.type()); - assertEquals(integerValue(3), expression.valueOf()); - assertEquals("second(DATETIME '2020-08-17 01:02:03')", expression.toString()); } private void secondOfMinuteQuery(FunctionExpression dateExpression, int second, String testExpr) { @@ -1075,11 +1028,7 @@ private static Stream getTestDataForSecondOfMinute() { Arguments.of( DSL.literal(new ExprTimestampValue("2020-08-17 01:02:03")), 3, - "second_of_minute(TIMESTAMP '2020-08-17 01:02:03')"), - Arguments.of( - DSL.literal(new ExprDatetimeValue("2020-08-17 01:02:03")), - 3, - "second_of_minute(DATETIME '2020-08-17 01:02:03')")); + "second_of_minute(TIMESTAMP '2020-08-17 01:02:03')")); } @ParameterizedTest(name = "{2}") @@ -1253,10 +1202,6 @@ private void validateStringFormat( private static Stream getTestDataForWeekFormats() { return Stream.of( Arguments.of(DSL.literal(new ExprDateValue("2019-01-05")), "DATE '2019-01-05'", 0), - Arguments.of( - DSL.literal(new ExprDatetimeValue("2019-01-05 01:02:03")), - "DATETIME '2019-01-05 01:02:03'", - 0), Arguments.of( DSL.literal(new ExprTimestampValue("2019-01-05 01:02:03")), "TIMESTAMP '2019-01-05 01:02:03'", diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/DateTimeTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/DateTimeTest.java index d857122534..4bec093b57 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/DateTimeTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/DateTimeTest.java @@ -7,7 +7,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.opensearch.sql.data.model.ExprValueUtils.nullValue; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; +import static org.opensearch.sql.data.type.ExprCoreType.TIMESTAMP; import java.time.LocalDateTime; import java.time.ZoneId; @@ -15,7 +15,7 @@ import java.time.format.DateTimeFormatter; import java.util.TimeZone; import org.junit.jupiter.api.Test; -import org.opensearch.sql.data.model.ExprDatetimeValue; +import org.opensearch.sql.data.model.ExprTimestampValue; import org.opensearch.sql.expression.DSL; import org.opensearch.sql.expression.ExpressionTestBase; import org.opensearch.sql.expression.FunctionExpression; @@ -25,23 +25,23 @@ class DateTimeTest extends ExpressionTestBase { @Test public void noTimeZoneNoField2() { FunctionExpression expr = DSL.datetime(DSL.literal("2008-05-15 22:00:00")); - assertEquals(DATETIME, expr.type()); - assertEquals(new ExprDatetimeValue("2008-05-15 22:00:00"), expr.valueOf()); + assertEquals(TIMESTAMP, expr.type()); + assertEquals(new ExprTimestampValue("2008-05-15 22:00:00"), expr.valueOf()); } @Test public void positiveTimeZoneNoField2() { FunctionExpression expr = DSL.datetime(DSL.literal("2008-05-15 22:00:00+01:00")); - assertEquals(DATETIME, expr.type()); - assertEquals(new ExprDatetimeValue("2008-05-15 22:00:00"), expr.valueOf()); + assertEquals(TIMESTAMP, expr.type()); + assertEquals(new ExprTimestampValue("2008-05-15 22:00:00"), expr.valueOf()); } @Test public void positiveField1WrittenField2() { FunctionExpression expr = DSL.datetime(DSL.literal("2008-05-15 22:00:00+01:00"), DSL.literal("America/Los_Angeles")); - assertEquals(DATETIME, expr.type()); - assertEquals(new ExprDatetimeValue("2008-05-15 14:00:00"), expr.valueOf()); + assertEquals(TIMESTAMP, expr.type()); + assertEquals(new ExprTimestampValue("2008-05-15 14:00:00"), expr.valueOf()); } // When no timezone argument is passed inside the datetime field, it assumes local time. @@ -57,23 +57,23 @@ public void localDateTimeConversion() { .atZone(ZoneId.of(TimeZone.getDefault().getID())) .withZoneSameInstant(ZoneId.of(timeZone)); FunctionExpression expr = DSL.datetime(DSL.literal(dt), DSL.literal(timeZone)); - assertEquals(DATETIME, expr.type()); - assertEquals(new ExprDatetimeValue(timeZoneLocal.toLocalDateTime()), expr.valueOf()); + assertEquals(TIMESTAMP, expr.type()); + assertEquals(new ExprTimestampValue(timeZoneLocal.toLocalDateTime()), expr.valueOf()); } @Test public void negativeField1WrittenField2() { FunctionExpression expr = DSL.datetime(DSL.literal("2008-05-15 22:00:00-11:00"), DSL.literal("America/Los_Angeles")); - assertEquals(DATETIME, expr.type()); - assertEquals(new ExprDatetimeValue("2008-05-16 02:00:00"), expr.valueOf()); + assertEquals(TIMESTAMP, expr.type()); + assertEquals(new ExprTimestampValue("2008-05-16 02:00:00"), expr.valueOf()); } @Test public void negativeField1PositiveField2() { FunctionExpression expr = DSL.datetime(DSL.literal("2008-05-15 22:00:00-12:00"), DSL.literal("+15:00")); - assertEquals(DATETIME, expr.type()); + assertEquals(TIMESTAMP, expr.type()); assertEquals(nullValue(), expr.valueOf()); } @@ -81,7 +81,7 @@ public void negativeField1PositiveField2() { public void twentyFourHourDifference() { FunctionExpression expr = DSL.datetime(DSL.literal("2008-05-15 22:00:00-14:00"), DSL.literal("+10:00")); - assertEquals(DATETIME, expr.type()); + assertEquals(TIMESTAMP, expr.type()); assertEquals(nullValue(), expr.valueOf()); } @@ -89,14 +89,14 @@ public void twentyFourHourDifference() { public void negativeToNull() { FunctionExpression expr = DSL.datetime(DSL.literal("2008-05-15 22:00:00-11:00"), DSL.literal(nullValue())); - assertEquals(DATETIME, expr.type()); + assertEquals(TIMESTAMP, expr.type()); assertEquals(nullValue(), expr.valueOf()); } @Test public void invalidDate() { FunctionExpression expr = DSL.datetime(DSL.literal("2008-04-31 22:00:00-11:00")); - assertEquals(DATETIME, expr.type()); + assertEquals(TIMESTAMP, expr.type()); assertEquals(nullValue(), expr.valueOf()); } } diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/DateTimeTestBase.java b/core/src/test/java/org/opensearch/sql/expression/datetime/DateTimeTestBase.java index 023a3574aa..865c162f76 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/DateTimeTestBase.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/DateTimeTestBase.java @@ -11,10 +11,10 @@ import java.time.LocalDate; import java.time.LocalDateTime; import java.time.LocalTime; +import java.time.ZoneOffset; import java.time.temporal.Temporal; import java.util.List; import org.opensearch.sql.data.model.ExprDateValue; -import org.opensearch.sql.data.model.ExprDatetimeValue; import org.opensearch.sql.data.model.ExprTimeValue; import org.opensearch.sql.data.model.ExprTimestampValue; import org.opensearch.sql.data.model.ExprValue; @@ -91,7 +91,8 @@ protected Long datediff(Temporal first, Temporal second) { } protected LocalDateTime fromUnixTime(Double value) { - return fromUnixTime(DSL.literal(value)).valueOf().datetimeValue(); + return LocalDateTime.ofInstant( + fromUnixTime(DSL.literal(value)).valueOf().timestampValue(), ZoneOffset.UTC); } protected FunctionExpression fromUnixTime(Expression value) { @@ -109,7 +110,8 @@ protected FunctionExpression fromUnixTime(Expression value, Expression format) { } protected LocalDateTime fromUnixTime(Long value) { - return fromUnixTime(DSL.literal(value)).valueOf().datetimeValue(); + return LocalDateTime.ofInstant( + fromUnixTime(DSL.literal(value)).valueOf().timestampValue(), ZoneOffset.UTC); } protected String fromUnixTime(Long value, String format) { @@ -223,7 +225,7 @@ protected Double unixTimeStampOf(LocalDate value) { } protected Double unixTimeStampOf(LocalDateTime value) { - return unixTimeStampOf(DSL.literal(new ExprDatetimeValue(value))).valueOf().doubleValue(); + return unixTimeStampOf(DSL.literal(new ExprTimestampValue(value))).valueOf().doubleValue(); } protected Double unixTimeStampOf(Instant value) { diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/ExtractTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/ExtractTest.java index 820158b722..02d50d0b59 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/ExtractTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/ExtractTest.java @@ -16,8 +16,8 @@ import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; import org.opensearch.sql.data.model.ExprDateValue; -import org.opensearch.sql.data.model.ExprDatetimeValue; import org.opensearch.sql.data.model.ExprTimeValue; +import org.opensearch.sql.data.model.ExprTimestampValue; import org.opensearch.sql.data.model.ExprValue; import org.opensearch.sql.expression.DSL; import org.opensearch.sql.expression.Expression; @@ -72,12 +72,12 @@ private static Stream getDateResultsForExtractFunction() { }) public void testExtractWithDatetime(String part, long expected) { FunctionExpression datetimeExpression = - DSL.extract(DSL.literal(part), DSL.literal(new ExprDatetimeValue(datetimeInput))); + DSL.extract(DSL.literal(part), DSL.literal(new ExprTimestampValue(datetimeInput))); assertEquals(LONG, datetimeExpression.type()); assertEquals(expected, eval(datetimeExpression).longValue()); assertEquals( - String.format("extract(\"%s\", DATETIME '2023-02-11 10:11:12.123')", part), + String.format("extract(\"%s\", TIMESTAMP '2023-02-11 10:11:12.123')", part), datetimeExpression.toString()); } diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/FromUnixTimeTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/FromUnixTimeTest.java index 8fcc6904b2..a6d1da003f 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/FromUnixTimeTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/FromUnixTimeTest.java @@ -46,7 +46,9 @@ public void checkOfLong(Long value) { LocalDateTime.of(1970, 1, 1, 0, 0, 0).plus(value, ChronoUnit.SECONDS), fromUnixTime(value)); assertEquals( LocalDateTime.of(1970, 1, 1, 0, 0, 0).plus(value, ChronoUnit.SECONDS), - eval(fromUnixTime(DSL.literal(new ExprLongValue(value)))).datetimeValue()); + LocalDateTime.ofInstant( + eval(fromUnixTime(DSL.literal(new ExprLongValue(value)))).timestampValue(), + ZoneOffset.UTC)); } private static Stream getDoubleSamples() { @@ -76,7 +78,9 @@ public void checkOfDouble(Double value) { valueAsString); assertEquals( LocalDateTime.ofEpochSecond(intPart, (int) Math.round(fracPart * 1E9), ZoneOffset.UTC), - eval(fromUnixTime(DSL.literal(new ExprDoubleValue(value)))).datetimeValue(), + LocalDateTime.ofInstant( + eval(fromUnixTime(DSL.literal(new ExprDoubleValue(value)))).timestampValue(), + ZoneOffset.UTC), valueAsString); } diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/NowLikeFunctionTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/NowLikeFunctionTest.java index 0e5c00084f..8b795786c0 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/NowLikeFunctionTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/NowLikeFunctionTest.java @@ -9,13 +9,13 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.opensearch.sql.data.type.ExprCoreType.DATE; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; import static org.opensearch.sql.data.type.ExprCoreType.TIME; -import static org.opensearch.sql.utils.DateTimeUtils.UTC_ZONE_ID; +import static org.opensearch.sql.data.type.ExprCoreType.TIMESTAMP; import java.time.LocalDate; import java.time.LocalDateTime; import java.time.LocalTime; +import java.time.ZoneOffset; import java.time.ZonedDateTime; import java.time.temporal.ChronoUnit; import java.time.temporal.Temporal; @@ -45,7 +45,7 @@ class NowLikeFunctionTest extends ExpressionTestBase { void now() { test_now_like_functions( DSL::now, - DATETIME, + TIMESTAMP, false, () -> LocalDateTime.now(functionProperties.getQueryStartClock())); } @@ -54,7 +54,7 @@ void now() { void current_timestamp() { test_now_like_functions( DSL::current_timestamp, - DATETIME, + TIMESTAMP, false, () -> LocalDateTime.now(functionProperties.getQueryStartClock())); } @@ -63,7 +63,7 @@ void current_timestamp() { void localtimestamp() { test_now_like_functions( DSL::localtimestamp, - DATETIME, + TIMESTAMP, false, () -> LocalDateTime.now(functionProperties.getQueryStartClock())); } @@ -72,14 +72,14 @@ void localtimestamp() { void localtime() { test_now_like_functions( DSL::localtime, - DATETIME, + TIMESTAMP, false, () -> LocalDateTime.now(functionProperties.getQueryStartClock())); } @Test void sysdate() { - test_now_like_functions(DSL::sysdate, DATETIME, true, LocalDateTime::now); + test_now_like_functions(DSL::sysdate, TIMESTAMP, true, LocalDateTime::now); } @Test @@ -128,14 +128,14 @@ void utc_time() { @Test void utc_timestamp() { test_now_like_functions( - DSL::utc_timestamp, DATETIME, false, () -> utcDateTimeNow(functionProperties)); + DSL::utc_timestamp, TIMESTAMP, false, () -> utcDateTimeNow(functionProperties)); } private static LocalDateTime utcDateTimeNow(FunctionProperties functionProperties) { ZonedDateTime zonedDateTime = LocalDateTime.now(functionProperties.getQueryStartClock()) .atZone(TimeZone.getDefault().toZoneId()); - return zonedDateTime.withZoneSameInstant(UTC_ZONE_ID).toLocalDateTime(); + return zonedDateTime.withZoneSameInstant(ZoneOffset.UTC).toLocalDateTime(); } /** @@ -249,8 +249,8 @@ private Temporal extractValue(FunctionExpression func) { switch ((ExprCoreType) func.type()) { case DATE: return func.valueOf().dateValue(); - case DATETIME: - return func.valueOf().datetimeValue(); + case TIMESTAMP: + return LocalDateTime.ofInstant(func.valueOf().timestampValue(), ZoneOffset.UTC); case TIME: return func.valueOf().timeValue(); // unreachable code diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/StrToDateTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/StrToDateTest.java index 42d4aab1f6..7f0861d9c3 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/StrToDateTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/StrToDateTest.java @@ -6,21 +6,23 @@ package org.opensearch.sql.expression.datetime; import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; +import static org.opensearch.sql.data.type.ExprCoreType.TIMESTAMP; import static org.opensearch.sql.data.type.ExprCoreType.UNDEFINED; +import java.time.Instant; import java.time.LocalDate; import java.time.LocalDateTime; import java.time.LocalTime; +import java.time.ZoneOffset; import java.util.stream.Stream; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; -import org.opensearch.sql.data.model.ExprDatetimeValue; import org.opensearch.sql.data.model.ExprNullValue; import org.opensearch.sql.data.model.ExprStringValue; import org.opensearch.sql.data.model.ExprTimeValue; +import org.opensearch.sql.data.model.ExprTimestampValue; import org.opensearch.sql.data.model.ExprValue; import org.opensearch.sql.data.type.ExprCoreType; import org.opensearch.sql.expression.DSL; @@ -34,23 +36,23 @@ private static Stream getTestDataForStrToDate() { return Stream.of( // Date arguments Arguments.of( - "01,5,2013", "%d,%m,%Y", new ExprDatetimeValue("2013-05-01 00:00:00"), DATETIME), + "01,5,2013", "%d,%m,%Y", new ExprTimestampValue("2013-05-01 00:00:00"), TIMESTAMP), Arguments.of( - "May 1, 2013", "%M %d, %Y", new ExprDatetimeValue("2013-05-01 00:00:00"), DATETIME), + "May 1, 2013", "%M %d, %Y", new ExprTimestampValue("2013-05-01 00:00:00"), TIMESTAMP), Arguments.of( "May 1, 2013 - 9,23,11", "%M %d, %Y - %h,%i,%s", - new ExprDatetimeValue("2013-05-01 09:23:11"), - DATETIME), + new ExprTimestampValue("2013-05-01 09:23:11"), + TIMESTAMP), Arguments.of( - "2000,1,1", "%Y,%m,%d", new ExprDatetimeValue("2000-01-01 00:00:00"), DATETIME), + "2000,1,1", "%Y,%m,%d", new ExprTimestampValue("2000-01-01 00:00:00"), TIMESTAMP), Arguments.of( - "2000,1,1,10", "%Y,%m,%d,%h", new ExprDatetimeValue("2000-01-01 10:00:00"), DATETIME), + "2000,1,1,10", "%Y,%m,%d,%h", new ExprTimestampValue("2000-01-01 10:00:00"), TIMESTAMP), Arguments.of( "2000,1,1,10,11", "%Y,%m,%d,%h,%i", - new ExprDatetimeValue("2000-01-01 10:11:00"), - DATETIME), + new ExprTimestampValue("2000-01-01 10:11:00"), + TIMESTAMP), // Invalid Arguments (should return null) Arguments.of("a09:30:17", "a%h:%i:%s", ExprNullValue.of(), UNDEFINED), @@ -108,20 +110,22 @@ public void test_str_to_date_with_time_type(String parsed, String format) { ExprValue result = eval(expression); - assertEquals(DATETIME, result.type()); - assertEquals(getExpectedTimeResult(9, 23, 11), result.datetimeValue()); + assertEquals(TIMESTAMP, result.type()); + assertEquals( + getExpectedTimeResult(9, 23, 11), + LocalDateTime.ofInstant(result.timestampValue(), ZoneOffset.UTC)); } @Test public void test_str_to_date_with_date_format() { - LocalDateTime arg = LocalDateTime.of(2023, 2, 27, 10, 11, 12); + Instant arg = Instant.parse("2023-02-27T10:11:12Z"); String format = "%Y,%m,%d %h,%i,%s"; FunctionExpression dateFormatExpr = DSL.date_format( functionProperties, - DSL.literal(new ExprDatetimeValue(arg)), + DSL.literal(new ExprTimestampValue(arg)), DSL.literal(new ExprStringValue(format))); String dateFormatResult = eval(dateFormatExpr).stringValue(); @@ -130,7 +134,7 @@ public void test_str_to_date_with_date_format() { functionProperties, DSL.literal(new ExprStringValue(dateFormatResult)), DSL.literal(new ExprStringValue(format))); - LocalDateTime strToDateResult = eval(strToDateExpr).datetimeValue(); + Instant strToDateResult = eval(strToDateExpr).timestampValue(); assertEquals(arg, strToDateResult); } @@ -156,7 +160,8 @@ public void test_str_to_date_with_time_format() { functionProperties, DSL.literal(new ExprStringValue(timeFormatResult)), DSL.literal(new ExprStringValue(format))); - LocalDateTime strToDateResult = eval(strToDateExpr).datetimeValue(); + LocalDateTime strToDateResult = + LocalDateTime.ofInstant(eval(strToDateExpr).timestampValue(), ZoneOffset.UTC); assertEquals(getExpectedTimeResult(HOURS, MINUTES, SECONDS), strToDateResult); } diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/TimeStampAddTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/TimeStampAddTest.java index 243eb6bb7b..13f4f20704 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/TimeStampAddTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/TimeStampAddTest.java @@ -18,7 +18,6 @@ import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; import org.opensearch.sql.data.model.ExprDateValue; -import org.opensearch.sql.data.model.ExprDatetimeValue; import org.opensearch.sql.data.model.ExprIntegerValue; import org.opensearch.sql.data.model.ExprNullValue; import org.opensearch.sql.data.model.ExprStringValue; @@ -43,12 +42,6 @@ private static Stream getTestDataForTimestampAdd() { Arguments.of("MINUTE", 1, new ExprDateValue("2003-01-02"), "2003-01-02 00:01:00"), Arguments.of("WEEK", 1, new ExprDateValue("2003-01-02"), "2003-01-09 00:00:00"), - // Datetime - Arguments.of( - "MINUTE", 1, new ExprDatetimeValue("2003-01-02 00:00:00"), "2003-01-02 00:01:00"), - Arguments.of( - "WEEK", 1, new ExprDatetimeValue("2003-01-02 00:00:00"), "2003-01-09 00:00:00"), - // Timestamp Arguments.of( "MINUTE", 1, new ExprTimestampValue("2003-01-02 00:00:00"), "2003-01-02 00:01:00"), @@ -125,7 +118,7 @@ private static FunctionExpression timestampaddQuery( @MethodSource("getTestDataForTimestampAdd") public void testTimestampadd(String unit, int amount, ExprValue datetimeExpr, String expected) { FunctionExpression expr = timestampaddQuery(unit, amount, datetimeExpr); - assertEquals(new ExprDatetimeValue(expected), eval(expr)); + assertEquals(new ExprTimestampValue(expected), eval(expr)); } private static Stream getTestDataForTestAddingDatePartToTime() { @@ -165,7 +158,7 @@ public void testAddingDatePartToTime( LocalDateTime expected1 = LocalDateTime.of(expectedDate, LocalTime.parse(timeArg)); - assertEquals(new ExprDatetimeValue(expected1), eval(expr)); + assertEquals(new ExprTimestampValue(expected1), eval(expr)); } @Test @@ -184,7 +177,7 @@ public void testAddingTimePartToTime() { LocalDateTime expected = LocalDateTime.of(LocalDate.now(), LocalTime.parse(timeArg).plusMinutes(addedInterval)); - assertEquals(new ExprDatetimeValue(expected), eval(expr)); + assertEquals(new ExprTimestampValue(expected), eval(expr)); } @Test @@ -196,15 +189,11 @@ public void testDifferentInputTypesHaveSameResult() { FunctionExpression stringExpr = timestampaddQuery(part, amount, new ExprStringValue("2000-01-01 00:00:00")); - FunctionExpression datetimeExpr = - timestampaddQuery(part, amount, new ExprDatetimeValue("2000-01-01 00:00:00")); - FunctionExpression timestampExpr = timestampaddQuery(part, amount, new ExprTimestampValue("2000-01-01 00:00:00")); assertAll( () -> assertEquals(eval(dateExpr), eval(stringExpr)), - () -> assertEquals(eval(dateExpr), eval(datetimeExpr)), () -> assertEquals(eval(dateExpr), eval(timestampExpr))); } diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/TimeStampDiffTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/TimeStampDiffTest.java index 061420ceee..b5ac3b078f 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/TimeStampDiffTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/TimeStampDiffTest.java @@ -21,7 +21,6 @@ import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; import org.opensearch.sql.data.model.ExprDateValue; -import org.opensearch.sql.data.model.ExprDatetimeValue; import org.opensearch.sql.data.model.ExprNullValue; import org.opensearch.sql.data.model.ExprStringValue; import org.opensearch.sql.data.model.ExprTimeValue; @@ -80,8 +79,6 @@ private static ExprValue generateArg( return new ExprTimestampValue(arg.toInstant(ZoneOffset.UTC)); case "DATE": return new ExprDateValue(arg.toLocalDate()); - case "DATETIME": - return new ExprDatetimeValue(arg); case "STRING": return new ExprStringValue( String.format( @@ -118,7 +115,7 @@ private static Stream getGeneralTestDataForTimestampDiff() { final String[] intervalTypes = ArrayUtils.addAll(timeIntervalTypes, dateIntervalTypes); // TIME type not included here as it is a special case handled by a different test - final String[] expressionTypes = {"DATE", "DATETIME", "TIMESTAMP", "STRING"}; + final String[] expressionTypes = {"DATE", "TIMESTAMP", "STRING"}; final LocalDateTime baseDateTime = LocalDateTime.of(2000, 1, 1, 0, 0, 0); final int intervalDifference = 5; @@ -159,30 +156,30 @@ private static Stream getCornerCaseTestDataForTimestampDiff() { // Test around Leap Year Arguments.of( "DAY", - new ExprDatetimeValue("2019-02-28 00:00:00"), - new ExprDatetimeValue("2019-03-01 00:00:00"), + new ExprTimestampValue("2019-02-28 00:00:00"), + new ExprTimestampValue("2019-03-01 00:00:00"), 1), Arguments.of( "DAY", - new ExprDatetimeValue("2020-02-28 00:00:00"), - new ExprDatetimeValue("2020-03-01 00:00:00"), + new ExprTimestampValue("2020-02-28 00:00:00"), + new ExprTimestampValue("2020-03-01 00:00:00"), 2), // Test around year change Arguments.of( "SECOND", - new ExprDatetimeValue("2019-12-31 23:59:59"), - new ExprDatetimeValue("2020-01-01 00:00:00"), + new ExprTimestampValue("2019-12-31 23:59:59"), + new ExprTimestampValue("2020-01-01 00:00:00"), 1), Arguments.of( "DAY", - new ExprDatetimeValue("2019-12-31 23:59:59"), - new ExprDatetimeValue("2020-01-01 00:00:00"), + new ExprTimestampValue("2019-12-31 23:59:59"), + new ExprTimestampValue("2020-01-01 00:00:00"), 0), Arguments.of( "DAY", - new ExprDatetimeValue("2019-12-31 00:00:00"), - new ExprDatetimeValue("2020-01-01 00:00:00"), + new ExprTimestampValue("2019-12-31 00:00:00"), + new ExprTimestampValue("2020-01-01 00:00:00"), 1)); } @@ -295,13 +292,6 @@ public void testDifferentInputTypesHaveSameResult() { new ExprStringValue("2000-01-01 00:00:00"), new ExprStringValue("2000-01-02 00:00:00")); - FunctionExpression datetimeExpr = - timestampdiffQuery( - functionProperties, - part, - new ExprDatetimeValue("2000-01-01 00:00:00"), - new ExprDatetimeValue("2000-01-02 00:00:00")); - FunctionExpression timestampExpr = timestampdiffQuery( functionProperties, @@ -311,7 +301,6 @@ public void testDifferentInputTypesHaveSameResult() { assertAll( () -> assertEquals(eval(dateExpr), eval(stringExpr)), - () -> assertEquals(eval(dateExpr), eval(datetimeExpr)), () -> assertEquals(eval(dateExpr), eval(timestampExpr))); } diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/TimestampTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/TimestampTest.java index 7d25c0041b..5aebec9e78 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/TimestampTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/TimestampTest.java @@ -8,12 +8,12 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.opensearch.sql.data.type.ExprCoreType.TIMESTAMP; -import static org.opensearch.sql.utils.DateTimeUtils.UTC_ZONE_ID; import java.time.Instant; import java.time.LocalDate; import java.time.LocalDateTime; import java.time.LocalTime; +import java.time.ZoneOffset; import java.util.stream.Stream; import org.junit.jupiter.api.DisplayNameGeneration; import org.junit.jupiter.api.DisplayNameGenerator; @@ -39,7 +39,9 @@ public void timestamp_one_arg_string() { expr = DSL.timestamp(functionProperties, DSL.literal("1961-04-12 09:07:00.123456")); assertEquals(TIMESTAMP, expr.type()); - assertEquals(LocalDateTime.of(1961, 4, 12, 9, 7, 0, 123456000), expr.valueOf().datetimeValue()); + assertEquals( + LocalDateTime.of(1961, 4, 12, 9, 7, 0, 123456000), + expr.valueOf().timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime()); } /** @@ -71,7 +73,8 @@ public void timestamp_one_arg_string_invalid_format(String value, String testNam public void timestamp_one_arg_time() { var expr = DSL.timestamp(functionProperties, DSL.time(DSL.literal("22:33:44"))); assertEquals(TIMESTAMP, expr.type()); - var refValue = LocalDate.now().atTime(LocalTime.of(22, 33, 44)).atZone(UTC_ZONE_ID).toInstant(); + var refValue = + LocalDate.now().atTime(LocalTime.of(22, 33, 44)).atZone(ZoneOffset.UTC).toInstant(); assertEquals(new ExprTimestampValue(refValue), expr.valueOf()); } @@ -79,17 +82,10 @@ public void timestamp_one_arg_time() { public void timestamp_one_arg_date() { var expr = DSL.timestamp(functionProperties, DSL.date(DSL.literal("2077-12-15"))); assertEquals(TIMESTAMP, expr.type()); - var refValue = LocalDate.of(2077, 12, 15).atStartOfDay().atZone(UTC_ZONE_ID).toInstant(); + var refValue = LocalDate.of(2077, 12, 15).atStartOfDay().atZone(ZoneOffset.UTC).toInstant(); assertEquals(new ExprTimestampValue(refValue), expr.valueOf()); } - @Test - public void timestamp_one_arg_datetime() { - var expr = DSL.timestamp(functionProperties, DSL.datetime(DSL.literal("1961-04-12 09:07:00"))); - assertEquals(TIMESTAMP, expr.type()); - assertEquals(LocalDateTime.of(1961, 4, 12, 9, 7, 0), expr.valueOf().datetimeValue()); - } - @Test public void timestamp_one_arg_timestamp() { var refValue = new ExprTimestampValue(Instant.ofEpochSecond(10050042)); @@ -100,7 +96,7 @@ public void timestamp_one_arg_timestamp() { } private static Instant dateTime2Instant(LocalDateTime dt) { - return dt.atZone(UTC_ZONE_ID).toInstant(); + return dt.atZone(ZoneOffset.UTC).toInstant(); } private static ExprTimestampValue dateTime2ExprTs(LocalDateTime dt) { diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/ToSecondsTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/ToSecondsTest.java index 7aa824e61d..910fe42a52 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/ToSecondsTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/ToSecondsTest.java @@ -20,7 +20,6 @@ import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; import org.opensearch.sql.data.model.ExprDateValue; -import org.opensearch.sql.data.model.ExprDatetimeValue; import org.opensearch.sql.data.model.ExprIntervalValue; import org.opensearch.sql.data.model.ExprLongValue; import org.opensearch.sql.data.model.ExprNullValue; @@ -52,7 +51,6 @@ private static Stream getTestDataForToSeconds() { Arguments.of(new ExprStringValue("2009-11-29 00:00:00"), new ExprLongValue(63426672000L)), Arguments.of(new ExprStringValue("2009-11-29 13:43:32"), new ExprLongValue(63426721412L)), Arguments.of(new ExprDateValue("2009-11-29"), new ExprLongValue(63426672000L)), - Arguments.of(new ExprDatetimeValue("2009-11-29 13:43:32"), new ExprLongValue(63426721412L)), Arguments.of( new ExprTimestampValue("2009-11-29 13:43:32"), new ExprLongValue(63426721412L))); } diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/UnixTimeStampTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/UnixTimeStampTest.java index c979b68302..7373b126c5 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/UnixTimeStampTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/UnixTimeStampTest.java @@ -20,7 +20,6 @@ import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; import org.opensearch.sql.data.model.ExprDateValue; -import org.opensearch.sql.data.model.ExprDatetimeValue; import org.opensearch.sql.data.model.ExprDoubleValue; import org.opensearch.sql.data.model.ExprNullValue; import org.opensearch.sql.data.model.ExprTimestampValue; @@ -81,7 +80,7 @@ public void checkOfDateTime(LocalDateTime value) { assertEquals(value.toEpochSecond(ZoneOffset.UTC), unixTimeStampOf(value)); assertEquals( value.toEpochSecond(ZoneOffset.UTC), - eval(unixTimeStampOf(DSL.literal(new ExprDatetimeValue(value)))).longValue()); + eval(unixTimeStampOf(DSL.literal(new ExprTimestampValue(value)))).longValue()); } private static Stream getInstantSamples() { diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/UnixTwoWayConversionTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/UnixTwoWayConversionTest.java index c74b062fba..75aed94e03 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/UnixTwoWayConversionTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/UnixTwoWayConversionTest.java @@ -6,17 +6,17 @@ package org.opensearch.sql.expression.datetime; import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.opensearch.sql.utils.DateTimeUtils.UTC_ZONE_ID; import java.time.LocalDateTime; +import java.time.ZoneOffset; import java.util.stream.Stream; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; -import org.opensearch.sql.data.model.ExprDatetimeValue; import org.opensearch.sql.data.model.ExprDoubleValue; import org.opensearch.sql.data.model.ExprLongValue; +import org.opensearch.sql.data.model.ExprTimestampValue; import org.opensearch.sql.expression.DSL; public class UnixTwoWayConversionTest extends DateTimeTestBase { @@ -28,11 +28,14 @@ public void checkConvertNow() { @Test public void checkConvertNow_with_eval() { - assertEquals(getExpectedNow(), eval(fromUnixTime(unixTimeStampExpr())).datetimeValue()); + assertEquals( + getExpectedNow(), + LocalDateTime.ofInstant( + eval(fromUnixTime(unixTimeStampExpr())).timestampValue(), ZoneOffset.UTC)); } private LocalDateTime getExpectedNow() { - return LocalDateTime.now(functionProperties.getQueryStartClock().withZone(UTC_ZONE_ID)) + return LocalDateTime.now(functionProperties.getQueryStartClock().withZone(ZoneOffset.UTC)) .withNano(0); } @@ -86,7 +89,9 @@ public void convertDateTime2Epoch2DateTime(LocalDateTime value) { assertEquals(value, fromUnixTime(unixTimeStampOf(value))); assertEquals( value, - eval(fromUnixTime(unixTimeStampOf(DSL.literal(new ExprDatetimeValue(value))))) - .datetimeValue()); + LocalDateTime.ofInstant( + eval(fromUnixTime(unixTimeStampOf(DSL.literal(new ExprTimestampValue(value))))) + .timestampValue(), + ZoneOffset.UTC)); } } diff --git a/core/src/test/java/org/opensearch/sql/expression/datetime/YearweekTest.java b/core/src/test/java/org/opensearch/sql/expression/datetime/YearweekTest.java index 4f7208d141..3533886f9c 100644 --- a/core/src/test/java/org/opensearch/sql/expression/datetime/YearweekTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/datetime/YearweekTest.java @@ -19,8 +19,8 @@ import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; import org.opensearch.sql.data.model.ExprDateValue; -import org.opensearch.sql.data.model.ExprDatetimeValue; import org.opensearch.sql.data.model.ExprTimeValue; +import org.opensearch.sql.data.model.ExprTimestampValue; import org.opensearch.sql.data.model.ExprValue; import org.opensearch.sql.exception.SemanticCheckException; import org.opensearch.sql.expression.DSL; @@ -137,7 +137,7 @@ public void yearweekModeInUnsupportedFormat() { FunctionExpression expression1 = DSL.yearweek( functionProperties, - DSL.literal(new ExprDatetimeValue("2019-01-05 10:11:12")), + DSL.literal(new ExprTimestampValue("2019-01-05 10:11:12")), DSL.literal(8)); SemanticCheckException exception = assertThrows(SemanticCheckException.class, () -> eval(expression1)); @@ -146,7 +146,7 @@ public void yearweekModeInUnsupportedFormat() { FunctionExpression expression2 = DSL.yearweek( functionProperties, - DSL.literal(new ExprDatetimeValue("2019-01-05 10:11:12")), + DSL.literal(new ExprTimestampValue("2019-01-05 10:11:12")), DSL.literal(-1)); exception = assertThrows(SemanticCheckException.class, () -> eval(expression2)); assertEquals("mode:-1 is invalid, please use mode value between 0-7", exception.getMessage()); diff --git a/core/src/test/java/org/opensearch/sql/expression/function/BuiltinFunctionRepositoryTest.java b/core/src/test/java/org/opensearch/sql/expression/function/BuiltinFunctionRepositoryTest.java index 3ee12f59d4..237477050d 100644 --- a/core/src/test/java/org/opensearch/sql/expression/function/BuiltinFunctionRepositoryTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/function/BuiltinFunctionRepositoryTest.java @@ -16,10 +16,10 @@ import static org.mockito.Mockito.when; import static org.opensearch.sql.data.type.ExprCoreType.BOOLEAN; import static org.opensearch.sql.data.type.ExprCoreType.BYTE; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; import static org.opensearch.sql.data.type.ExprCoreType.INTEGER; import static org.opensearch.sql.data.type.ExprCoreType.STRING; import static org.opensearch.sql.data.type.ExprCoreType.STRUCT; +import static org.opensearch.sql.data.type.ExprCoreType.TIMESTAMP; import static org.opensearch.sql.data.type.ExprCoreType.UNDEFINED; import static org.opensearch.sql.expression.function.BuiltinFunctionName.CAST_TO_BOOLEAN; @@ -126,7 +126,7 @@ void resolve_should_not_cast_arguments_in_cast_function() { FunctionImplementation function = repo.resolve( Collections.emptyList(), - registerFunctionResolver(CAST_TO_BOOLEAN.getName(), DATETIME, BOOLEAN)) + registerFunctionResolver(CAST_TO_BOOLEAN.getName(), TIMESTAMP, BOOLEAN)) .apply(functionProperties, ImmutableList.of(mockExpression)); assertEquals("cast_to_boolean(string)", function.toString()); } diff --git a/core/src/test/java/org/opensearch/sql/expression/function/WideningTypeRuleTest.java b/core/src/test/java/org/opensearch/sql/expression/function/WideningTypeRuleTest.java index 3b6e5f7586..9de1e65108 100644 --- a/core/src/test/java/org/opensearch/sql/expression/function/WideningTypeRuleTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/function/WideningTypeRuleTest.java @@ -10,7 +10,6 @@ import static org.opensearch.sql.data.type.ExprCoreType.BOOLEAN; import static org.opensearch.sql.data.type.ExprCoreType.BYTE; import static org.opensearch.sql.data.type.ExprCoreType.DATE; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; import static org.opensearch.sql.data.type.ExprCoreType.DOUBLE; import static org.opensearch.sql.data.type.ExprCoreType.FLOAT; import static org.opensearch.sql.data.type.ExprCoreType.INTEGER; @@ -58,12 +57,8 @@ class WideningTypeRuleTest { .put(STRING, TIMESTAMP, 1) .put(STRING, DATE, 1) .put(STRING, TIME, 1) - .put(STRING, DATETIME, 1) - .put(DATE, DATETIME, 1) - .put(TIME, DATETIME, 1) - .put(DATE, TIMESTAMP, 2) - .put(TIME, TIMESTAMP, 2) - .put(DATETIME, TIMESTAMP, 1) + .put(DATE, TIMESTAMP, 1) + .put(TIME, TIMESTAMP, 1) .put(UNDEFINED, BYTE, 1) .put(UNDEFINED, SHORT, 2) .put(UNDEFINED, INTEGER, 3) diff --git a/core/src/test/java/org/opensearch/sql/expression/operator/convert/TypeCastOperatorTest.java b/core/src/test/java/org/opensearch/sql/expression/operator/convert/TypeCastOperatorTest.java index 7803a4dbca..44a3ccabbd 100644 --- a/core/src/test/java/org/opensearch/sql/expression/operator/convert/TypeCastOperatorTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/operator/convert/TypeCastOperatorTest.java @@ -10,7 +10,6 @@ import static org.opensearch.sql.data.type.ExprCoreType.BOOLEAN; import static org.opensearch.sql.data.type.ExprCoreType.BYTE; import static org.opensearch.sql.data.type.ExprCoreType.DATE; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; import static org.opensearch.sql.data.type.ExprCoreType.DOUBLE; import static org.opensearch.sql.data.type.ExprCoreType.FLOAT; import static org.opensearch.sql.data.type.ExprCoreType.INTEGER; @@ -27,7 +26,6 @@ import org.opensearch.sql.data.model.ExprBooleanValue; import org.opensearch.sql.data.model.ExprByteValue; import org.opensearch.sql.data.model.ExprDateValue; -import org.opensearch.sql.data.model.ExprDatetimeValue; import org.opensearch.sql.data.model.ExprDoubleValue; import org.opensearch.sql.data.model.ExprFloatValue; import org.opensearch.sql.data.model.ExprIntegerValue; @@ -72,12 +70,8 @@ private static Stream timestamp() { return Stream.of(new ExprTimestampValue("2020-12-24 01:01:01")); } - private static Stream datetime() { - return Stream.of(new ExprDatetimeValue("2020-12-24 01:01:01")); - } - @ParameterizedTest(name = "castString({0})") - @MethodSource({"numberData", "stringData", "boolData", "date", "time", "timestamp", "datetime"}) + @MethodSource({"numberData", "stringData", "boolData", "date", "time", "timestamp"}) void castToString(ExprValue value) { FunctionExpression expression = DSL.castString(DSL.literal(value)); assertEquals(STRING, expression.type()); @@ -299,7 +293,7 @@ void castToDate() { assertEquals(DATE, expression.type()); assertEquals(new ExprDateValue("2012-08-07"), expression.valueOf()); - expression = DSL.castDate(DSL.literal(new ExprDatetimeValue("2012-08-07 01:01:01"))); + expression = DSL.castDate(DSL.literal(new ExprTimestampValue("2012-08-07 01:01:01"))); assertEquals(DATE, expression.type()); assertEquals(new ExprDateValue("2012-08-07"), expression.valueOf()); @@ -318,7 +312,7 @@ void castToTime() { assertEquals(TIME, expression.type()); assertEquals(new ExprTimeValue("01:01:01"), expression.valueOf()); - expression = DSL.castTime(DSL.literal(new ExprDatetimeValue("2012-08-07 01:01:01"))); + expression = DSL.castTime(DSL.literal(new ExprTimestampValue("2012-08-07 01:01:01"))); assertEquals(TIME, expression.type()); assertEquals(new ExprTimeValue("01:01:01"), expression.valueOf()); @@ -337,7 +331,7 @@ void castToTimestamp() { assertEquals(TIMESTAMP, expression.type()); assertEquals(new ExprTimestampValue("2012-08-07 01:01:01"), expression.valueOf()); - expression = DSL.castTimestamp(DSL.literal(new ExprDatetimeValue("2012-08-07 01:01:01"))); + expression = DSL.castTimestamp(DSL.literal(new ExprTimestampValue("2012-08-07 01:01:01"))); assertEquals(TIMESTAMP, expression.type()); assertEquals(new ExprTimestampValue("2012-08-07 01:01:01"), expression.valueOf()); @@ -345,19 +339,4 @@ void castToTimestamp() { assertEquals(TIMESTAMP, expression.type()); assertEquals(new ExprTimestampValue("2012-08-07 01:01:01"), expression.valueOf()); } - - @Test - void castToDatetime() { - FunctionExpression expression = DSL.castDatetime(DSL.literal("2012-08-07 01:01:01")); - assertEquals(DATETIME, expression.type()); - assertEquals(new ExprDatetimeValue("2012-08-07 01:01:01"), expression.valueOf()); - - expression = DSL.castDatetime(DSL.literal(new ExprTimestampValue("2012-08-07 01:01:01"))); - assertEquals(DATETIME, expression.type()); - assertEquals(new ExprDatetimeValue("2012-08-07 01:01:01"), expression.valueOf()); - - expression = DSL.castDatetime(DSL.literal(new ExprDateValue("2012-08-07"))); - assertEquals(DATETIME, expression.type()); - assertEquals(new ExprDatetimeValue("2012-08-07 00:00:00"), expression.valueOf()); - } } diff --git a/core/src/test/java/org/opensearch/sql/expression/operator/predicate/BinaryPredicateOperatorTest.java b/core/src/test/java/org/opensearch/sql/expression/operator/predicate/BinaryPredicateOperatorTest.java index e6290553ce..55dfbd35c2 100644 --- a/core/src/test/java/org/opensearch/sql/expression/operator/predicate/BinaryPredicateOperatorTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/operator/predicate/BinaryPredicateOperatorTest.java @@ -18,7 +18,6 @@ import static org.opensearch.sql.data.model.ExprValueUtils.fromObjectValue; import static org.opensearch.sql.data.type.ExprCoreType.ARRAY; import static org.opensearch.sql.data.type.ExprCoreType.BOOLEAN; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; import static org.opensearch.sql.data.type.ExprCoreType.INTEGER; import static org.opensearch.sql.data.type.ExprCoreType.STRUCT; import static org.opensearch.sql.data.type.ExprCoreType.TIMESTAMP; @@ -472,11 +471,10 @@ private void assertStringRepr( if (v1.type() == v2.type()) { assertEquals(String.format("%s(%s, %s)", function, v1, v2), functionExpression.toString()); } else { - var widerType = v1.type() == TIMESTAMP || v2.type() == TIMESTAMP ? TIMESTAMP : DATETIME; assertEquals( String.format( "%s(%s, %s)", - function, getExpectedStringRepr(widerType, v1), getExpectedStringRepr(widerType, v2)), + function, getExpectedStringRepr(TIMESTAMP, v1), getExpectedStringRepr(TIMESTAMP, v2)), functionExpression.toString()); } } diff --git a/core/src/test/java/org/opensearch/sql/expression/system/SystemFunctionsTest.java b/core/src/test/java/org/opensearch/sql/expression/system/SystemFunctionsTest.java index ac4153f59f..4b15704c77 100644 --- a/core/src/test/java/org/opensearch/sql/expression/system/SystemFunctionsTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/system/SystemFunctionsTest.java @@ -11,7 +11,6 @@ import java.time.Duration; import java.time.Instant; import java.time.LocalDate; -import java.time.LocalDateTime; import java.time.LocalTime; import java.util.LinkedHashMap; import java.util.List; @@ -21,7 +20,6 @@ import org.opensearch.sql.data.model.ExprByteValue; import org.opensearch.sql.data.model.ExprCollectionValue; import org.opensearch.sql.data.model.ExprDateValue; -import org.opensearch.sql.data.model.ExprDatetimeValue; import org.opensearch.sql.data.model.ExprDoubleValue; import org.opensearch.sql.data.model.ExprFloatValue; import org.opensearch.sql.data.model.ExprIntegerValue; @@ -49,7 +47,6 @@ void typeof() { assertEquals("BOOLEAN", typeofGetValue(ExprBooleanValue.of(false))); assertEquals("BYTE", typeofGetValue(new ExprByteValue(0))); assertEquals("DATE", typeofGetValue(new ExprDateValue(LocalDate.now()))); - assertEquals("DATETIME", typeofGetValue(new ExprDatetimeValue(LocalDateTime.now()))); assertEquals("DOUBLE", typeofGetValue(new ExprDoubleValue(0))); assertEquals("FLOAT", typeofGetValue(new ExprFloatValue(0))); assertEquals("INTEGER", typeofGetValue(new ExprIntegerValue(0))); diff --git a/core/src/test/java/org/opensearch/sql/planner/physical/AggregationOperatorTest.java b/core/src/test/java/org/opensearch/sql/planner/physical/AggregationOperatorTest.java index 0f3f4bd61f..ee784045d0 100644 --- a/core/src/test/java/org/opensearch/sql/planner/physical/AggregationOperatorTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/physical/AggregationOperatorTest.java @@ -10,7 +10,6 @@ import static org.hamcrest.Matchers.containsInRelativeOrder; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.opensearch.sql.data.type.ExprCoreType.DATE; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; import static org.opensearch.sql.data.type.ExprCoreType.DOUBLE; import static org.opensearch.sql.data.type.ExprCoreType.FLOAT; import static org.opensearch.sql.data.type.ExprCoreType.INTEGER; @@ -25,7 +24,6 @@ import java.util.List; import org.junit.jupiter.api.Test; import org.opensearch.sql.data.model.ExprDateValue; -import org.opensearch.sql.data.model.ExprDatetimeValue; import org.opensearch.sql.data.model.ExprStringValue; import org.opensearch.sql.data.model.ExprTimeValue; import org.opensearch.sql.data.model.ExprTimestampValue; @@ -153,21 +151,21 @@ public void minute_span() { PhysicalPlan plan = new AggregationOperator( testScan(datetimeInputs), - Collections.singletonList(DSL.named("count", DSL.count(DSL.ref("minute", DATETIME)))), + Collections.singletonList(DSL.named("count", DSL.count(DSL.ref("minute", TIMESTAMP)))), Collections.singletonList( - DSL.named("span", DSL.span(DSL.ref("minute", DATETIME), DSL.literal(5), "m")))); + DSL.named("span", DSL.span(DSL.ref("minute", TIMESTAMP), DSL.literal(5), "m")))); List result = execute(plan); assertEquals(3, result.size()); assertThat( result, containsInRelativeOrder( ExprValueUtils.tupleValue( - ImmutableMap.of("span", new ExprDatetimeValue("2020-12-31 23:50:00"), "count", 1)), + ImmutableMap.of("span", new ExprTimestampValue("2020-12-31 23:50:00"), "count", 1)), ExprValueUtils.tupleValue( - ImmutableMap.of("span", new ExprDatetimeValue("2021-01-01 00:00:00"), "count", 3)), + ImmutableMap.of("span", new ExprTimestampValue("2021-01-01 00:00:00"), "count", 3)), ExprValueUtils.tupleValue( ImmutableMap.of( - "span", new ExprDatetimeValue("2021-01-01 00:05:00"), "count", 1)))); + "span", new ExprTimestampValue("2021-01-01 00:05:00"), "count", 1)))); plan = new AggregationOperator( @@ -296,23 +294,23 @@ public void month_span() { plan = new AggregationOperator( testScan(dateInputs), - Collections.singletonList(DSL.named("count", DSL.count(DSL.ref("quarter", DATETIME)))), + Collections.singletonList(DSL.named("count", DSL.count(DSL.ref("quarter", TIMESTAMP)))), Collections.singletonList( - DSL.named("span", DSL.span(DSL.ref("quarter", DATETIME), DSL.literal(2), "M")))); + DSL.named("span", DSL.span(DSL.ref("quarter", TIMESTAMP), DSL.literal(2), "M")))); result = execute(plan); assertEquals(4, result.size()); assertThat( result, containsInRelativeOrder( ExprValueUtils.tupleValue( - ImmutableMap.of("span", new ExprDatetimeValue("2020-09-01 00:00:00"), "count", 1)), + ImmutableMap.of("span", new ExprTimestampValue("2020-09-01 00:00:00"), "count", 1)), ExprValueUtils.tupleValue( - ImmutableMap.of("span", new ExprDatetimeValue("2020-11-01 00:00:00"), "count", 1)), + ImmutableMap.of("span", new ExprTimestampValue("2020-11-01 00:00:00"), "count", 1)), ExprValueUtils.tupleValue( - ImmutableMap.of("span", new ExprDatetimeValue("2021-01-01 00:00:00"), "count", 1)), + ImmutableMap.of("span", new ExprTimestampValue("2021-01-01 00:00:00"), "count", 1)), ExprValueUtils.tupleValue( ImmutableMap.of( - "span", new ExprDatetimeValue("2021-05-01 00:00:00"), "count", 2)))); + "span", new ExprTimestampValue("2021-05-01 00:00:00"), "count", 2)))); plan = new AggregationOperator( @@ -340,19 +338,19 @@ public void quarter_span() { PhysicalPlan plan = new AggregationOperator( testScan(dateInputs), - Collections.singletonList(DSL.named("count", DSL.count(DSL.ref("quarter", DATETIME)))), + Collections.singletonList(DSL.named("count", DSL.count(DSL.ref("quarter", TIMESTAMP)))), Collections.singletonList( - DSL.named("span", DSL.span(DSL.ref("quarter", DATETIME), DSL.literal(2), "q")))); + DSL.named("span", DSL.span(DSL.ref("quarter", TIMESTAMP), DSL.literal(2), "q")))); List result = execute(plan); assertEquals(2, result.size()); assertThat( result, containsInRelativeOrder( ExprValueUtils.tupleValue( - ImmutableMap.of("span", new ExprDatetimeValue("2020-07-01 00:00:00"), "count", 2)), + ImmutableMap.of("span", new ExprTimestampValue("2020-07-01 00:00:00"), "count", 2)), ExprValueUtils.tupleValue( ImmutableMap.of( - "span", new ExprDatetimeValue("2021-01-01 00:00:00"), "count", 3)))); + "span", new ExprTimestampValue("2021-01-01 00:00:00"), "count", 3)))); plan = new AggregationOperator( diff --git a/core/src/test/java/org/opensearch/sql/planner/physical/PhysicalPlanTestBase.java b/core/src/test/java/org/opensearch/sql/planner/physical/PhysicalPlanTestBase.java index 003e59959f..6399f945ed 100644 --- a/core/src/test/java/org/opensearch/sql/planner/physical/PhysicalPlanTestBase.java +++ b/core/src/test/java/org/opensearch/sql/planner/physical/PhysicalPlanTestBase.java @@ -14,7 +14,6 @@ import java.util.List; import java.util.Map; import org.opensearch.sql.data.model.ExprDateValue; -import org.opensearch.sql.data.model.ExprDatetimeValue; import org.opensearch.sql.data.model.ExprTimeValue; import org.opensearch.sql.data.model.ExprTimestampValue; import org.opensearch.sql.data.model.ExprValue; @@ -109,35 +108,35 @@ public class PhysicalPlanTestBase { ImmutableMap.of( "day", new ExprDateValue("2021-01-03"), "month", new ExprDateValue("2021-02-04"), - "quarter", new ExprDatetimeValue("2021-01-01 12:25:02"), + "quarter", new ExprTimestampValue("2021-01-01 12:25:02"), "year", new ExprTimestampValue("2013-01-01 12:25:02")))) .add( ExprValueUtils.tupleValue( ImmutableMap.of( "day", new ExprDateValue("2021-01-01"), "month", new ExprDateValue("2021-03-17"), - "quarter", new ExprDatetimeValue("2021-05-17 12:25:01"), + "quarter", new ExprTimestampValue("2021-05-17 12:25:01"), "year", new ExprTimestampValue("2021-01-01 12:25:02")))) .add( ExprValueUtils.tupleValue( ImmutableMap.of( "day", new ExprDateValue("2021-01-04"), "month", new ExprDateValue("2021-02-08"), - "quarter", new ExprDatetimeValue("2021-06-08 12:25:02"), + "quarter", new ExprTimestampValue("2021-06-08 12:25:02"), "year", new ExprTimestampValue("2016-01-01 12:25:02")))) .add( ExprValueUtils.tupleValue( ImmutableMap.of( "day", new ExprDateValue("2021-01-02"), "month", new ExprDateValue("2020-12-12"), - "quarter", new ExprDatetimeValue("2020-12-12 12:25:03"), + "quarter", new ExprTimestampValue("2020-12-12 12:25:03"), "year", new ExprTimestampValue("1999-01-01 12:25:02")))) .add( ExprValueUtils.tupleValue( ImmutableMap.of( "day", new ExprDateValue("2021-01-01"), "month", new ExprDateValue("2021-02-28"), - "quarter", new ExprDatetimeValue("2020-09-28 12:25:01"), + "quarter", new ExprTimestampValue("2020-09-28 12:25:01"), "year", new ExprTimestampValue("2018-01-01 12:25:02")))) .build(); @@ -147,31 +146,31 @@ public class PhysicalPlanTestBase { ExprValueUtils.tupleValue( ImmutableMap.of( "hour", new ExprTimeValue("17:17:00"), - "minute", new ExprDatetimeValue("2020-12-31 23:54:12"), + "minute", new ExprTimestampValue("2020-12-31 23:54:12"), "second", new ExprTimestampValue("2021-01-01 00:00:05")))) .add( ExprValueUtils.tupleValue( ImmutableMap.of( "hour", new ExprTimeValue("18:17:00"), - "minute", new ExprDatetimeValue("2021-01-01 00:05:12"), + "minute", new ExprTimestampValue("2021-01-01 00:05:12"), "second", new ExprTimestampValue("2021-01-01 00:00:12")))) .add( ExprValueUtils.tupleValue( ImmutableMap.of( "hour", new ExprTimeValue("17:15:00"), - "minute", new ExprDatetimeValue("2021-01-01 00:03:12"), + "minute", new ExprTimestampValue("2021-01-01 00:03:12"), "second", new ExprTimestampValue("2021-01-01 00:00:17")))) .add( ExprValueUtils.tupleValue( ImmutableMap.of( "hour", new ExprTimeValue("19:01:00"), - "minute", new ExprDatetimeValue("2021-01-01 00:02:12"), + "minute", new ExprTimestampValue("2021-01-01 00:02:12"), "second", new ExprTimestampValue("2021-01-01 00:00:03")))) .add( ExprValueUtils.tupleValue( ImmutableMap.of( "hour", new ExprTimeValue("18:50:00"), - "minute", new ExprDatetimeValue("2021-01-01 00:00:12"), + "minute", new ExprTimestampValue("2021-01-01 00:00:12"), "second", new ExprTimestampValue("2021-01-01 00:00:13")))) .build(); diff --git a/core/src/test/java/org/opensearch/sql/utils/ComparisonUtil.java b/core/src/test/java/org/opensearch/sql/utils/ComparisonUtil.java index b25f4d1053..0d9fe80339 100644 --- a/core/src/test/java/org/opensearch/sql/utils/ComparisonUtil.java +++ b/core/src/test/java/org/opensearch/sql/utils/ComparisonUtil.java @@ -10,7 +10,7 @@ import static org.opensearch.sql.data.model.ExprValueUtils.getIntegerValue; import static org.opensearch.sql.data.model.ExprValueUtils.getLongValue; import static org.opensearch.sql.data.model.ExprValueUtils.getStringValue; -import static org.opensearch.sql.utils.DateTimeUtils.extractDateTime; +import static org.opensearch.sql.utils.DateTimeUtils.extractTimestamp; import org.opensearch.sql.data.model.ExprValue; import org.opensearch.sql.data.type.ExprCoreType; @@ -29,8 +29,8 @@ public static int compare(FunctionProperties functionProperties, ExprValue v1, E } else if (v1.isNull() || v2.isNull()) { throw new ExpressionEvaluationException("invalid to call compare operation on null value"); } else if (v1.type() != v2.type() && v1.isDateTime() && v2.isDateTime()) { - return extractDateTime(v1, functionProperties) - .compareTo(extractDateTime(v2, functionProperties)); + return extractTimestamp(v1, functionProperties) + .compareTo(extractTimestamp(v2, functionProperties)); } return compare(v1, v2); } @@ -67,8 +67,6 @@ public static int compare(ExprValue v1, ExprValue v2) { return v1.timeValue().compareTo(v2.timeValue()); case DATE: return v1.dateValue().compareTo(v2.dateValue()); - case DATETIME: - return v1.datetimeValue().compareTo(v2.datetimeValue()); case TIMESTAMP: return v1.timestampValue().compareTo(v2.timestampValue()); default: diff --git a/docs/dev/img/type-hierarchy-tree-final.png b/docs/dev/img/type-hierarchy-tree-final.png new file mode 100644 index 0000000000000000000000000000000000000000..883e581efbdbe3fc0f9b0a8902fa6187cb44988a GIT binary patch literal 30902 zcmeFa2UL?;_dXg#jU$f1GJuK!D>~9F2tsH!EFh>Th(fTUAc9H>B?%f)7(`SYP?4e{ zB1jVfr6eFt2PuLvLWCd@kWgY0LLec@J@1=<=*)b-|NY;)?z;D`Z(Vtsc zd{S9-#O3-oWyg?BxAlf9Uq1dteOigyi)jnf12JTiUr#TzwkTf`bv?}Vm;DaZw;7lF zQUXtl?+G>Iu@mxzOUbJzduT+fmf%b&c<|X{}OUC?KsoH*+L!H-r9M+G}j_YuGKrzSP1)Uahp;3`+R} zUz&%g=-gKM(Lbj@*K{ceBXoCB>-Q^t_S*mOtg)n!{v4lBU?zCCii!^|=zlNWhjqxP z;eWuJ(uF44PW~nNlvQqWv9K(%QR=XX_$sWAke;LTtPgP5l$W$ix|m1~VfcuhVtYM> zfh=B$=7V6Px%R&Y1;0z0DsO_w#!KYlseY zVKNCL`NgReI;*_n`{92l#L~5cyn9)yEgfZC$3yK}&Y85%-X{e`qYGoI)3ToQGX7jE z7wDwcl+b~~Ej}e)pRohoN*%fd2llu6JztAz99l1#nP)>gDEVP88u|;v2%;`()IOyS zjmN~7?7+A!0hT30r!TArFX8tFP>5GrQhIfy&Ks7#T8T zO(9FT5>M^-9-g%3F2>>JwB8Ki_q9EBNZf_hyW5nSf3SY5V?mfpabkX`fa}QL5L1tE z^o`S166W_F>SBAp%R1HSi@WvT+yB3<-cIe_6ZvKPQ6bYdm@kl)yIpfL-=;y=HC(=7 zHmo=67XYn&C5;o4vw#^D+%rftGcv>L;ZGNRy4^RtY2=^^P_Au>;&KxyV=k((x>yPP zy?qh%QH0}8=-MJa%mx|+5t0X8L4>Q${|kQQIy+ON&J6FpPE@%1?MHJm4{NU>74=r< zvel|SJ`B_$pZCUyVlN-QYAlHaqIJn(VRmucOs5&;`ZLPa&Z^fEu0KA32*I&PF=A#@ z{Q_~6twy4xKbKU|By|(!9=>WY6y+xMm9!BHS|s>#{i=B)tH;v5z93E=wr=lQgayVI z^Ez%&d&;F;oW3ZqZOX$Jl2?6uc~ffM^=xmOa@AFOa){e3=;-SNyQzuhxH@(y><&XJ zlfYc8FXHsIIUy^65wirlyWA3^7K5{x3+$aHxZ%;7=ovdPn$v-4R7N2}wkxA9FGDMb z;t;EWVk%u4hAf|r3fqVYQ3Ge;I=g~&I2^fo5!zw~xas=8bd+URC_Yd|X-)-4bbIoD zU;BS%ll7mwVKD)Vr(EpZT{4yNmebzaS<`4_lu&YuJ?ZO>?+3TKPa~f2P~yTi9`d^9 zQe%h=Tne3=o!(sTa??2|wN29%_=3RL8%H*gS2nDL4&qW^&UM3{JGfFu-E%`1a%Z|4 z!KcMPvdF7(Ehi+$$5d2$011Qwezr{Z?CrvP`1YrYKsJELY%FmF;%1F;wT`PZ^ZuYP z!xoa9`!4Ig*aR*|>*xErX&Ch)E^EmqGG8_kdRi{(o-cw4%TcQ8%z)?wAS?}j@D$W# zQ+Oqt!nBxrXxOV(vSBgsu*#Jk&B!|A5DqMP?<1}hZ?6a9l`4A_(hM@$Rjv_>1m^p= z!zfCy{}53^X%#U&WB*(O{6#O*8`zopK$zvyXsHc3kU>lB74(r zmt)EF&LZHY+v2@%Lc*>bfwNoB3Ax{|5ufm$cP92mW}?@0gnp(Fu?&)W&6p=uB#lqS z!oesqH(U@?O^TzDZ)B|zw11%LC5z3AefetxXfK6kZ7)aO*PzZg2l;XcEn5rOGHVPp8j4*+rZVQ)o zC=7ZvL3as(c=ZHj4`0tC%_TVfs7aA)Q8z17?WDL(^aIFn970U@k-guWrTFLg`|nBX zNHN$$IKq&n?*abVPWaJsM_<{a*I6X_qbX=qOI2DZ9zSF`KfTU)TVVyMGe>vpW}qWd z_Q|*DiJ7B(n_}xQ+gKJ2Do7QApsXd7ycXqZR3m>X-ON?~6s@D|)hp>n=@jN-4&rid zuQ_cy4JVlW`*9;lKJkF?&5~2vUkjgdhnPuEmmC>lt z7aKhnaFQf!O^4utJi&r@mzZUb)3U@Fm7#weR=ZzSM8&Nk)=Vlr1KZAuHB?-wpEU+O zJc66{d$lPC5z@N&y8t5wff07q3Wx0Kky)s}fTqC%h52#&gga!R0rZmJO~`C(s&JA( z@Y;X`!^AKB?=Z;!NA_k}CCMkmfi=u%#fWRHvJ%Ybeg_sY4bvB)5eJu{5!unOAC!}< zSiGm&eE5ohlIRkF9cA@U&J-0VajbYXht~jaOQ)m&Sh7BKnOk&-czEc^pwlOFB&&G3>XjsSYBNBW$&+<9#U3p&5TkA(Ul>`pyS)%fT4(8GR24^TQC?EXd zwxpX$O$-wQH(*22&~lcJd3jCC(1X2Y8R^(Xu~?$!lNOIrJN?ci&Pro-I5iEoR#Sv8X{N&Ysn+u?Vu^A%&9 zm!p^VZnUJ6SrVcS{*cSJg4PqCJ=`mPxeUTn`8fc8)H)W|kjTR|oQtuHtEnQ%batGE zUR8)Mr#(K-!932qzAE>(mb!b8XN)k}F86`Ue8Q39Cr2%4l>$3AbTXcwNSkuS28${m zzmoF9`Rah*Z{c*yL4%ecB;LWX^pt!mD$Uam~H!Y+KMVjUq-9ZXIw;8cV9FJ`Aa)2 zrI&^A$V4+Uv3N8=2aRZ;e0Nb{|8Vgf)MZ+Jn62&{l**Z04sn+wKOmh>c8W|a0U%RE zO{|xYp;AGT06eZQdHl15iNIwgeFY1AG$p|4>0^!GUKe$NA#P^gccu3CBChKmBnN-C zTH-`m&PgId2mi!HU*^K5oPi9FrZFeU{8WyM&eMbS(@u&^C&fl0J(WfyDoDwN{Udso zhkjE{<81quT5qk462cP4nzE@a*$vq37T7SQSYW<7l(=Hm(^Xo)XSX|z>gf*X%}22i zIV^7S%qbB^Ip-Tukn*!;kB>o7F@5OU`y)`Q1W$tO67Njs7CJ*LaOhPGlfAF%SI=L`r2c$C_fYmb|PTnQVp zJ>;*$U6*dVkk&2jr7i_g1<d7|@S6ToN0v{EQ7VQ+s{r{2QyJINVLVFF#3GO0ley`3oq+u3* z3D$HnaB_#vBp%veMCKh5-uBO_l0-Inywh04332v@qAAq1T|}paMH1GQ#T%j9QoG&p z{8L^uo7mHGg-3NVec{Vj>dg$g!F(tj_H6Ziu#=7ND4PjX@iKJ9Yy~~%=vNcnF!H6~Ii;WXo^zPGIpCS#LUmdV`9S1W*RT^EoOaWrhE-XT_#j?@5d`+( zj3gS;uaeQ6LUBQd`Tib+F|h_{fh1cVPM?T>#QZQYYf&g^9k*-g^M$?dtJj>8rV+0O z;EH+gz3erJpZu)zPd0Jh6&@{mvW<*+t+n~r4h0`A zw2#|N&)KN2dp$6z)px_PJvgqdXAhWV-c1ZWfVaMJxIbz`7}DBByc_~m6cZg6SCImb zG_MOdv<~faph53YcQE%LKi~tNz(EwwK^3mOvOXL+c*+yZZs+EYdARHL)oY_JUEm#! zSO2Y0d(?2DhVRQLVBZ!x5P99Q*@ba5pQuVtO*)Y9)V+84?R;DV|LSFyapW)0z_|#p zkssdJf*q(J4ZOW?qr_>)KCZ6P&)+cf8k>w860-DF7QdQVO{%&RS5xied@XlJ7?Khs zw3YVLBsD?ijs#QRc{G$;Vx50(V_Sc8b)(+AGezPh%&MNe;I;XWBq{X(geLfX!sHYiqA6GrtZ# z)|2El5sQO3?e8>1huR9f`*OL}n~JJPox7ch{ZX4XbMKf33G|+m*BeKf)OQxS`+8$~ z(Rp3W0lm(+XTS0lx^+40`#4>msdgaB`XxySM~<6DXsejT9IUt^w-t}$JH zIj8lkFwQM2>&#K<*TH_4-xaOgAR>WQk8w1rJEg8frQiZN!ydb7xazn+f z5g?%7g#mv~3I{noj9pRHd}gyhSNG%HT@+r@d@83GGrzCZ?B&ffv9HRmRgrjh-V$cq zPO0X3GO?0l`wJ&2aA4DD9qD=Ijx1Y_YYoe5wD^q*Rj;KEY1(&VF>UY<@6!BtwI-tGukCJ!LK`hL}IFhLc3DOI#-MJ|BX_1SM0QBL_(X-mNd%(T%1^c0gEta|$@vW3U;1gKuBe9zH zb5Kbp9O%z>pr?E>U+4eozV{lk=3&3l32ILLgMGASpWyd?!TAFo;VX7vlC*f+UjSbL zeHsHn5D&xWqtq@#SfQnVhh;R*BC*k{gtBd2s?BP2wrNM%wRF{N`NNhCC&y4kpi|ZX zIB_ZZmJR0PS^lK;UX9UzLcXc1eTQXph2mle682fI_15LDB1cQiNq~54gCNOlCTq*- zwq&hUU>6bVeurU#S$(ZW0l@@!Z~kibktxtBG#5j^fL@&gUTq4)!5|?H-W76c7V0h- zel~hw|5od>0<-MRn>R;$ zwg50!KN7?9U_8_VtK$WiV zgL$o&n)q0C-xH?)t^qNzWk_|MWZ$hR2l;tn$SLUwL=Iy4W_TqlAZxQc^o@rp$JHTn zE^2gkQ+m?9kxxE5Blcd$5uxSpI{Ls8Os1y%0D5hK0 z#B(`(T(@LS;^qUr;Yib2D7CwAMrD@0(&zGGO*3G3WG5wmW%#oXb5Mb65-+9SFbq=d zoKlH{(^XYJVAZv^n=aO7Eab%yxS&4{NHuKH#;-iV04mfpzO z*QR|fuk9L0%{V>BU3ZCEc8wwIH|^5+up}+>KR~Ke$fGhsG2I__RHgWivLN<8(d9M|7akW1gg>w%!)(&Q4kCD@Euvhd$T@?t%YsGp~@!r zT(R-8tI^TX*w=bUwaPM1-=o}@xS>65mD6EewAqV1(#7=Td9GZt(BTog#^)Z@FW{Gl z#vi1Wqvu>{_ZDUYXj^L!X2k(ve{N5Zxp;j`v65O@TV!-K;9VSsyvvKweYe06YRpYM z&3FLav3@L__(-y-T3$q8x=Q-4$p1iuyXPWDZIh z1yYS6APNQQC(H^H_V9~z_ftdp5<3(+EJ1Qn-vF950;Gag+t-(`qVm2C}9=_3z~x}pUHZyu|u z$GGNUVeJ@SC4&YOr)~t=ny3d~-w+taTZKtE(FoVZrM|oyC zM^oo@FEKC6jO--cfDJxF?T~(cB|Xh);|E3*5`Oh`(|B4$0JaSpEStD4XrwRbtje0< zt&3$R{KFn^i9&Wi9_=RfVEz3{(`sS_{v|H`i?N%Y;fBfzHD`t5^q-b6qfw6x>pIVV zJ0hXgF`MH&W7!O&k{CGo*$^+2HI$5wQ*$n-YDm0~^#*be4UPsCr}`XzVfY>q3A-;vy#+UCs|s-e58$*(Sgct` z;Mtj5+xncp;Idi7m8q&-H4)s-wbNAyQVZsgb5`u3L66joqIzKUKRXZiDY`8z1+W~f z%YQ!G*PQlIt6yRW7*}eP+EdZlOO;y)wBBW^;`#1M@9y)t+bT$B#}l|RSLLUp)IEoB z#PBJe_DZ~}s9&FNxsbmWS?Hqo@+-}54(ekv4^FaL0`ENhA4GRIvn-|3pALWSSOAgM z>)hp$8jkeI>49hc?O$e{%1Ls0?LQnpsXKzVZ@246p=wFn7V>3a7F8F(`P&&XHy6?p z5g*hrDAPMKbr3NhmA1uSs5jF=;x+XR*LVMx3&%Kx&RGS+-NBqC`<<;YN$IjoQq>Qq z2sA>lfm`vwN57h`Xm11V3HF03MF0w4_2`7xw>xZXeM`{UP4Y)76e~y^9SAg3RExrm z30a52M17{lfSjOS#MUL_Li3aI!o6@k3v$;z0A+1#K7!sTGw|YoTBgnmi-+C8U+VG%~G-@?{ZcpB{)4)gh zb2h3m$oFnm0$0XyuK`w8hzXa1Hi*FyCC9Gg&XC_Wb$E zs2A5+0k}tj=r;UNWwaJBhlkC6i9pU+(P(J_yHn>arZ>U&{gwr$ea-h%1a6X#vK?Nh z!?H~M`m5*gLYINzX4G%89pC%K3n$xSL6%k@nXC_<1P<~q6^gp$<7&->6S57dK&354 z&wZ<~a#k|-tB-eZ_Xf(xQ)^*|9OU3*KaXFvc$`HtDTL)Q3pKqfu}Qq^?M{wK^lDYp&P$P~k&{CmFu zOJ-D8Y(;8X)+WpfNB*dZ=5e1f_Y0iHG?^J}T#GaVDVXEuEp|*sR_Dgnb#*lF1UL8g zrnm~l9hzthO*f==N12`aaK&Quty&FW$#2tMy=Pqv2sbCr!#J?A3M}byBGY={on(jRl#4B9G1>~vC~^q#~);;Y2>tRRJoz82qRpQF4<>-8q?woJRR48H+L5}fnw)o}i>ND@{mibTep zq5PF1%E%~mq0t3fK(GzY*v1t7{^;J#D-3}DIr2V(<+n;PsL;iCq#jJEzjB{o^@q_y zAU}|f+b*t(mmysbsz{-vNFMR5qrc`i0<&y{t?{D$$yI06u9i%PMBi+d2l^t2LwZz= z7pgM?>h%0t-m14e2RJ7>eeuR89M{j*6(8vPMOd3yaeIwJR#st@4UacB40-4>a98;Y z{6J6-?aO?WN&!uL=vYQ?*&A+Zrg_wybU4|VNIz||;7GuGL0ncoH}I$iaCbM`)grG1 zxERU6)#o1IRW-BjzsFr4dZe-P=^4>$z11$GITRbg4S$;$T;c#zk__pR&&#vyD#mY>LvlS3SfN9XfyFNQqZ~c3Zvus1?y~=%8m-L?*N8%42Cw zutNsGK(QW0&a|9hX}U@VU4p6kL@emOAb3J3^?)%4Kp+1d2Cn#2WG1mo6NR4elyCo*$i$ z-^(|Yw3V0-{VDAm0JC^v^HA7k^r;BXbtI?nM_LKY+WhFc5Bnt!I!*=RfS8xkGxdt> zz2d3yhVx2CRgW%A3|`^2<^hN>0^cR*9`x1Bj>KhudpJ2mkkgO5?gK6(z>IEd48K*~ z(NVU%!?Ni;8Wf|{kpkAeHjEn#;;#pk_0dE4d(sCLLP_DL>RbEjOBeyAp#gF!Sn=1~ z4$GS}=QbWs*JW&l_&|1%R)h_E@h4ub2d<~RrK)~f!!@up)gR8%xKzu9T z_EUi7wHJCA#dbrZVkF&D06j%1#B z!ewLgwm+U9Fy{|oL2Smyc8{utVr{+in(UH7!1$4QI*m`Zf?3E75^S#au8g`OU;zT+ zp{;xj23ul9Ci{!N?11sKc=sg0eCY4@z!b#!PJT z-bk71m6_u2CN?$?+7ihU`kF*|HRJRAZfe`t09*7YaCQZ7@~-IP<&l3M4t6@BqkwfK zyq1ZE6!!N1Z4I#g`*|MACIX5ls4m{CU8+VSO?b^8e?*jO5Vm~ohfY$TNmhGIRySH3) z&lXFH1d>LLfKx*cV9g zBPsp@T9ZKHMm{IpPw#wm)eGNykjAZ1*g6(z9yc;76mV=c>J#eXDxV|-aYB@M5Mlz& zep2W*uM_~3mAve7E|meZDK|ST4cb=u1`pJ?;@NjpcQHh*3<@JKV9_Q_`$otgW|?yN za-i|Bd=!{H55@6sn7c0nYj?0GfJ|UYq61Vfgd_W&%f%9;c`^HKxL11SJ}*`R^8*BY zOMjIb8+6o~FeC`cpb!_n<0eopIN>X{rpQ%B@%M4d`SgXef`kJx7Zm8q1a*!n2#}S_ zC&Gk4yrQh;Rggel;NvNon`=D?bNz!1ccCZV+vxQUVC_>m5C(4q<+I-qj~$dR=N}$X`-j+$4PTcH|>;)=^#)x{VM3NLk0{hN-m%`gIlON0^xhKa7aYp$KRGSYQAJUn5weO3_` z|3wtk3Y?dbvUep>-EoShJ17k+Yu^!anof-GIE?OPPXQ5!VRy9kmEET8UMAO@&E}XM z-A2*wY6p4K8bSVcDMEtDTZaEd@zkF`tZFPOwY1i=9=!S}u;14NX85OyA2!$Y z1nJ2W;c_v6LACtk8VD!(8Wo<9;{zD99&@HHgOl6x6NK&B!A2PM83a?PTidfbbNR)r zPG|kwEQ|AE7-T5}AkqPC!vM@*nrQ0?7+ceE@`qSVLPKe#W3`sXUvc`kBLSX*2>r)r zgP>K;6F!tZb%F`(fy5VCMx_^z<0AbYbu-BLG8b$HZaK#{qS+ z@`1tB`tq1Oh}@*OVxw`xX#zad;=c_Qn2m#8n54dPu04Y%JueosOXmQ3`OmKmY%qfM z&+Zl|@S*Vp5(^8Chr9Z6%_e0yaZx|AZNejc*msA#w)2o;&aVYxqL}*;j|6zQUT`d;b>aPw8pD z+sg~2{fQgI8ZEkd$L$$X+b^+-S2s?a1t10psup+!m&_;HMMm$bj&BlC82OY-yLHfQ z)*8H;`~HA@h)!QVvgvVnwVkPetwTodm@oZSz-HJ+)8D20Qy5SZKqw&=L6j||S|~s< zD$hlaU2v03`*iC_bRh zmO@^~_tOJu$MfGD^Y1e+gqy`QGA(MP1p^W?*VSqK_v*LEgofmeRZiD`Sm< z>VJLW@(+MmX2170$MJdoFoCf91>I%;I?Pl~hzoGz<^VrPtO3QBV=5Vf(@T`09PF)H zk@*P1vc`VaUNFUvb{BC$Ld=-LFE~W&v^W4^qtm6<;APGZvz5 zj_6exn~m7D9osBBI>@h{AVKhmS8*2V|N7gFP%_(Y{ojP=r;Pzl%Xdxl^LG4b3xC{) zf0)p}zY;=~`REZL;J&O1p!pf$w4u3yJ#CXhuF&`U4{ly}d(P=i)g&N6QTw7tKf~(X z`JZ>owR(#wE*MJuZ~+C3E%~}C5toIg4$0A!^n0SR+H_kz7xt(SW_CUL+z&X*`&(%c zC+z*mg&igA#e%)gQfg6-ux^ zHw2N#I3*7yZO-=CHopVUSS4M#U*<;s-hJYaJUvGPa-!~jtAIr(&@;AD2TnSVKN!pZZ5nv@pT&`PWfx*27 zLuXcyYKpyR4-r5iy?Od^Z~m=>(PgXu<3VWTC%^EwoufMndUS*Y66ewIp^shge|{h+i3kd4-L0ajhj=O}lpSXn-mGMc@=LQv$qoGTHo!ajM|I`)iqjsvXtzdtL1eyKwk}#biWS(c6kemN zUI9;Nt{Q5-AV(d^>KBp=t4KZ!e$ZA^6$bXfY(R9F;E7e(nWC~lepSxN$Xht)8q%X4 z1m%e~E@ol=U1giVwVi$yxeEM@uEa>^)l&`7+U`Nb@EtNpYg|2T@~YpIOsdw6=*#bY z>NKo(s36Dn#c9lo)01EH^0(_H-#urQY{VdhGIbA8;=4uz)-N~61ckSp(Yb3b9_g^e>r+^zMV_@r?vq-+oSc3@t-1&7KKktJrfu{v0CPjzgC;)~mIE(om#U~Q{qUKmuE z=^<4;v!u&N+%qcTRyH3dOrK}D8gPuH$Gt|{i;OiBrF))rDvI|hh3peVXJT6&x)C`~ z$phSu``0GdB-)Nk{Fevp1#b$$Ci!{BCR$a*WSyeYX59b?AFT4HAnz`KH%!?!?&ZqkIo_fQd3^+k;Iu=YB9BzTY=$%vAT>coZA^Cqg24Z1H^#?O zR=NIkAMAk(o>!u~Lrwanu2o8vO3Yu1 z1d=|6u{esZt9KYAc6+H-oRHmPVHLE?m)kllecOlkIALOpB_*IPme_N_v*_t9P&7_3 zpr}t>Jr-zPP3`B~{WWX?G9--0Y_YUFks@N|*X*1p7-$VzPEiMVfDAH#WB_=pIJ?4; zh#~+)uGk#7)`lVKsWo6v+B)XJ%eIb`@D}?CASM=z;O+r32S~p2rgtbW0H5u`M!3&_ z%r?%o0z!HLR~)cKnrT0@DXC27-*@(seYM~hG|dmF7@1*fP+Bmb$wh5 zuVV1+=K6+4%88W3M=ofjl|R9>L?zvvk{8_E{6cgmSQWA8`%rYiMSdK3fdY}B0lJW! ztJIZMq!bB=(s7D1L;|`i>LBLJ{9^u02O?Tl@D~UY-^M|2QDbTB{%Ur@t1E8OrNdAR zH1U$geJBi2wT8feLw!)Rjo^&ER(TWtQ=CLWhkdbU^}Dq(128!v?9KQU0DE;2Oh$${ zSXe^_QLd=7=KwI`l#Fh2{}J}bBg>X zjy<~>OE(0KU-z2cWKX#bl5lI^ zRm1rhI8z&daXkd#X$O#f7ll)xJYl#Qlz@zqcDS z1W8D)mI{?}ZGj}4E*Cn&$bi0%?DFNXpc{O36UJ3b<}5593uu(BKqrAZW3@=YXwsK2 z_y=+`$VC9XpbZ!0yF#a3HolE%BklV2N>|3v1m&Gf_$@moyhT~YtC@f~DBHp{jaAFg zkTS@D5s)w_kd~>BF6hdF)@x(8^laJ}i<#F3Fl)`XVG{5V$|O7XF#%NGZBT&T&&;@4 zO(QVdP;@}`J~7g%KObff%H)v;c!jMYdb$4h2h4D)_g=9cBFBh(i6AS z0-#o45a&bI4uZEJgCnaE8}L_UKd$j?>V8cVzY$P;`pZGUp2wzEiaZveK=tiba^Q*y z7k#X5KnAVPJr2nAGpBfh_)tiWQRQ|YcCDNwJ;qjy+9#0KA`23Pkt}8u4X=aB3hJjb zL>*RUxBahx-auncHwnrgPWb~_@4&v1#fFMh067=r=2k637vp)X>c&^ZeVVC?8|mFo zyhL@i>51`IE*Q80wc~wu%n^Z#4^)<+8|HwT@}Km{?v)(ZCSR6@+eszGk$j$pL#m9I z`biUAS~=z)jjagOkV5PSUE4vR+OMWwf=X;C-tk-2j2MXzU!`POVL~E6wH1uY{;9qbAkjpcGwWjPk-(ly!PI3WX;jD zHz=LBh8KyQB9}153=qzRM9}}{4vf`!fPzzS7p#||ub%BH+c@pyy~yikSm9k(?ktsX zq(veWpQGeu8Af1Z!1V@ITFxqip0ope^L6*wUN6l#h_BI*9UYUd0wPAV%Yai1dj@7P zvpX#JIFpBn+fSO9+&3$CrJfYs15G5fAgB*Mwk>}k|Jh5>*trta^d#nM7;KASAKg1S zwfq%V{KKrkc>(=H-cw>u9&;MZV)k@cK65X_whmD{>vLAFznAzVdoEyACE3Cn3EDHI zxu{)@BBspEcMXJ4lO33Cv;%1BB*~2=J1P0%qh3=ka|V@cCFu@5?n#@43NZs|N(@@I zZ6P{q-c`>wMK|cl_&HxVveFiX(>KO+^B8yvGX}?(8jj4~GNvJaq^|ta5Pcf8F1vUe zVY>VG4OaZP7@YLDE&^410hlq_lld3_qvn!d^o8p2l69t_3`pTEarFnzn=9PqOW+Ee z@7Kz#U;HK-3uhjSn?b9A3Hkxb?N2bWgFV7Xku4ewi76rL&Ysl<#JVD1>UI;yuurJ9S^C6Grt3o`NY zHa5pqM~bx5D4DFGG0e@ZAdYGycSNxFDSh?<91vO00B4mCHEqIAR$2B1(t)%r z&jU1^T_XP*tckFcWt#{2{)dL=J4_ANh3YGz>Q9(EF>!yNfEw7NHk-qgQFW?}Rc8+N z6Ahe)NP5_kw&RCO`Y`Cz!#Nk6XrNH^{35-!^r#+>;A4vz-Hg>l$yj#_U2CYQE6zg3 z;+SX`5kP2*_xAIwuG3}=aKONx8UxS3Y(vGkN)D(I!d>;0mc!|MIYW3NDG z?-ti48vbZN0*7(;>KYS&JsQUZl%NQ{@nNQ8*tFdpmL(xBW+tv2tU&KswoCqS#BaT# z8dcK=J28%jzCG^W=Vk~N-^VrZo3#QBI3(Aw42TN|NO>$TAxN!{8^PA*i_3NFD@aSl zw0i#8dxtNn0V``RS(Qtiy$GG$&&rS7D=ixBCBV!l+YB-#wD&n7!&#-PM#`di$5q`y z&__*l2X+c_i`i=)0I+2bF&7ZQ@&P>~hn3WwUE`Jhh&X<&=AiC2uC4@SVh;6^N}7JY z=`Lcficfb<6xai!=&Ex zmWMi_pP&ALOn+dhUMb2(J$$Cw5}TfiWN z-CxLcoL+w58Zr+4a4Or4obAFhSN-A~ukxYCn2KKy7ftym>)Y@u+dk)y9@WF#j-HI7*P=T##_|!blO2w3w zdoZakMGjnz<`|eO`e)@h%Ll4jTzzF@F!WW6VjGk&tAb@FFwVmp^?2@RxQ(fwm+Grp{agKOR(X{t2*M1k4a3EdVn^?-PttpV~qUYP-^`WK<>y&EH9Es_pE z-|sKbcp!&W&abF|OF2KDr7DVo4Sq4%cTN*P2nlyImZBjNPnoP8Az<)?48swE$pFM9 z=($iEH^WsDk#o(~?3UqSIZXs~Nrv_n=t4udglinv0aaLI4lY#VGLGDUG9dZe;fky| zD=xuJBSBS(a^jfP0tHtfJO)p~#dQB$FVL~W;K+B^x{Rr)3dI$mFl;q|WMwbPU%?4U zfQxNl?WuSiVvM;DVcsX*E0A;){x;%2tgWN2rs7Sqb zCeImRIvdsUD0}3!sIl;@kMR6qbd`KV_{oqXwpVvv`ABaj^CJlA==EV``e|FZJKY#T zxv^Z`-@9{4diAq5!iBQH{MZ)CTlc(zQH-g%{>(3&LDN`(bR2{0q!li~KsC6J70}qA zw__p9chLN!&Q0U?-bFKHlTO!agE}NARyB^mFwO;8X|gixIk;ALR#p_rUUW*eoy<%nF^ed-3?t^OL>r~_mq7)0XQ4I{Ah-o3D0|IB?HN`x$-SSSDrzRd zCCZ3bfQsAtJVg8KD0DS$wj~mJ3mC(ypp;9Pnjj)3n03XLpB#$Xg?*B6#lOkP5P(Ra zSoYxb$=%2{rSbzv3wuz$tRK742(C`-!6xop2vDm)^AdCM6NdppNkFEf{AbZ-^WJLjDe7y-epmg(OvmbMkzwvjn~RFEv2Xa55NuIQ8@e*+A<5;C!ad zj42*cikTVHbm4s4rQMsukyOD&#ituPLH@Q>MONIlJJxYVYy$~@2u z2=oAup-ESPcJ7zeDbC@9+=O)o2!mb0Z%@nX?!u6_tf5>9D3Mn<3W~0SWHg%1pkL3J z7XTnF1U;7+EHvpc6*pC{mHsve;!JSCfBNK}b*U;KjRrAV+bXA}b+?az#=5lh?#~bv z_NS*2UpHga_NQG(TIK!>SHZ158^S?7BBaV|HFyu3Jpxq6UGmb8pc9iZz45R&f@z?x&khVjF?2>gdlKmdG-#yjMfIRNu4X+Iz2J}^tp-gN4!_S%_nAhno z8NTPTww4IcCLF3#4}!oX(C$UFGa!1_qwd9^d?Ns=)Pl<*fSK;Flv!1yCD37o^lvaq z5Q|50$RYY^KUhnUNSip1j|uMY0&ia9XHoQd3C4@-6-_MV{WcM9u;GxO_s3s;egS;m zrMo~$ugqyIYu2L`@;=?BeB2HlG=dHn#$JdA)xhuzH7|}?KLb#`HZB@Szl&oHJSUU@ zOqDe%kO|d=kifrNcaT(vAY_x@7cN^?gzw6g?+sVVi`?(*p!%pT<^6e;R9`CpwUxaC5nA?UxJY}3w0>xcCMmig38;~l(LJ+f=t5U*6v+x z3-O~+9r6-!Ys(o2M)#F==YP9c5Dr!S40xiMA` z>npxLvl!Z|3pnqWf*8*dt04|LhJ9YboKCHtdr@QLU8vBkUa`E<_wqlC_YBuC@@?Hm zD_gG&okQIlp^X7^^`%7J`}JM*1%Vw5U!(C}FW-t^ed(2Aexho< z*^AfR!IbaPGz+z&c{AuQk>p1`Fr|j;nU6;8kDsZHE*0gfp~lK^d+BBj+VTBbzqt!-&d#2?cjSgF1+2%=4Rsf!30SU zv$+W<8f8=<(4o(UIqjb<`=_9;E2nArp5wq;;%Xp&d$5{^bB54Mp~%nHTk-h8V_Zb@h754n9XB;q+zXpA~-_jOE!lJyN9l zB)#{e+c3?CxjnYhC}Ph5_KiO^E^~Zi)-%dm>NK~tw3)=R&u>^WV}!gC^aF%6u#dOD z_c{{{M7aiYvssFMi`u97v#?@t_G|tOQD8JV^r>=D6m6S*O2K9ny!T4|BimT*Yqj5j@z9P zpUA(?d{roVO8H=uc>bW=#6dm3mH{e}uzd}-e^!lj&7WSmT>Dlh{I^1%A}cf6-K^dl zihV*0e5)^=+!orKAubvM<;QCQ2QEf%;m-&`O)UL@Lh08gyff?*p|4dv#7QKtKrED9gI{jTsyWpM)E203g5d z{0-^re2l2$%^aH7C*m5op|dO@1HNnokR3{gAaRt8I$|x>4RU4VBYb@WI(XVPfo@qpRcidrA`T|~fxHEpiu}($nY_aV z0Qlj#0&P9f2m`eVLzN%{IB_1~IEy#11F=~Xtq+`}d^hRvpnqeK@NtxzZMXAPkR|!s zBU)QX?w^HJh*t@~{G`B{2@G>Hd`TcW^NaIoK!g2ZK=|rCnjSDgaZGeP5%Ew>KHkC})80lxr|1SKAT0!*yulrCKuB12apT1|XyC5X~80cRa}U9kHI#CJkIF@bPT z2XK>R4vK7;A3dXg{#i7r@9w2wg2jV+k7yklB>_UDXKd`6fmR+P zWh00-tm%R)yWitL{1GC6NO6MhYT=|;sZflzU22gaqXB?sMsSz~s0fC%Xz-O1A%aDq zYYa2p=iZUqTLZdt@~@mI9mtmTL1A6Mw>`O8z5!HDz8%%% z;D++Xs$vwzpZUA9cWJlmD>1Uc_tikHPnMddf)1i`d;-xo6As)=x1-!4Qj84EKopfj z6W?UpNV+?YeSCk%U{cCB0|xQ)uyEN7S-{4cW5nQlIn+QwTeitilr2%KB*(;xfF17x*-LY#S*RW9J)A*PRgkAVlccDxPfU{m&_B-aUC7VyO) zQ2SFwgi5WnJNjWW)aOS50eEDf;Xd@$E9c;t2K=9O9kBBz)FVT!Fud>p2Spt~Xc$8~ zTfng&g^K)T=)t_*fw~ZO_dMhUT*d@{3B*~bQC_3RSvIGnsg~xC;sq@ z@rq@{?FlhHpfoKK`#%8)n}lv3@+21$El7XMzwRWY^{}j|4*=JS_b$6~9%99V-ea6T z5Fvs(4CpIS#-I6n`uUfKFiMAT^&bzSo5qP$;G|^%^iA}8(38w=8sHCs`{Q>-7O2Za zLdJvo{*E5Yc=K$)A9(5RBOV#`DaogM&42XCeDh-X>sWq}`^QMX{@d8@I;k-ud7p4u zNc=#$=Ui`TdLJOaVh^%&%TviK$v4<1mI){>p9RvWv?xKAMuOxVxyTB(vOP9}HAf)` zdgRRhbJE_IQodiO1kkBQh0TJ_c&s@k3ZfM`96SY;OF{T4o)8x?LqH+Ckn+pLSsDXG z@kfp*>sl=V$ogZE?Rm>yypP(cYp?xZ-V|XojO%_w3GGZ%iD#s^p$~;;flG8=8d@Zc z2#($_GTfC_n?*Y(vUzw(gnblvSX!P-s;ar#95+6fG!d~#oF88$4mMl`ruIo1Xo8o5 z%mlH^p>4P_mRn zNe}_q^F{31?XIocpFMwlzt5NN^L+R|&%^WObDz)qRsSXm&fNf~JqImkI}{<7YsHf~ zB^Fdb9x#>;>Ny79p&*D26I&pt1qjoFl1^RG4=Q37UIW_`e2$t5>#|gtt6ayNprxPS zlo$8&G&>!ZI3fnvtaf+=q!A zN*7iDBd3xXwgP{v79CSQUE?M5Q^38kHK6yfwC6YOIz+Y=#(|p48<7rL4mdn=8Ms=3 zG7u=gjV?Lds}OXccdx?_3o3mib!)^4Fc@o03UImI5sU-hqecb85k=srr$)K{!O$72 zvmuD!P)(+NXk+C@W6@AO3riSkG);7)9vP_;EGr!F%it9 zyH_J_14n6QMbH$P_LiccdgTI zHTYqJf`!b=dl?QlkWC@IB^Diompg!1CiA09BpeYEF(ItPjN{!#$7OZ$Pnh0E&M^z^ zABZub!jF^zCcfXZZ;%f(5IeSP*v<@TH1dCJ<5`0%3@plB!wHqSC5gh!qsIYjxyu76WL~IS83- zd0pRpmkVjil(Asdi}^v;5>kP3Z6j;{X8qVFFKnT-$k1EV|;H(Y^WA z2rhx)uWRONikG`2qgOO)Nwxx6>Us_x_p4EEL9F&s1^d8PE=E6>=KE(luv>JAQs%q5{EJzA+tg7gyl;q?&ubZP}iga%ur4xim$f=7GU5Dse&_fAJ!F+twnzvRT|rZ>0?xI=m}^`}c2S zcOR!Ucy?w^Ho1^q^&CzCUP0LoSuR84jn!H_H`K=4JvMuQC<;>(y%oPBv~y{cSW6e- z;Vo8o0h-)&93~c*+%{x4V8=W6J$ZpZx-SQ&dFEdu{I~fB6@}Xh8q~ literal 0 HcmV?d00001 diff --git a/docs/user/dql/expressions.rst b/docs/user/dql/expressions.rst index af264b2f16..123bba046a 100644 --- a/docs/user/dql/expressions.rst +++ b/docs/user/dql/expressions.rst @@ -168,7 +168,7 @@ Here is an example for different type of comparison operators:: | True | True | False | True | False | False | +---------+----------+---------+----------+----------+---------+ -It is possible to compare datetimes. When comparing different datetime types, for example `DATE` and `TIME`, both converted to `DATETIME`. +It is possible to compare datetimes. When comparing different datetime types, for example `DATE` and `TIME`, both converted to `TIMESTAMP`. The following rule is applied on coversion: a `TIME` applied to today's date; `DATE` is interpreted at midnight. See example below:: os> SELECT current_time() > current_date() AS `now.time > today`, typeof(current_time()) AS `now.time.type`, typeof(current_date()) AS `now.date.type`; @@ -184,7 +184,7 @@ The following rule is applied on coversion: a `TIME` applied to today's date; `D +------------------+-----------------+------------+ | now.time = now | now.time.type | now.type | |------------------+-----------------+------------| - | True | TIME | DATETIME | + | True | TIME | TIMESTAMP | +------------------+-----------------+------------+ os> SELECT subtime(now(), current_time()) = current_date() AS `midnight = now.date`, typeof(subtime(now(), current_time())) AS `midnight.type`, typeof(current_date()) AS `now.date.type`; @@ -192,7 +192,7 @@ The following rule is applied on coversion: a `TIME` applied to today's date; `D +-----------------------+-----------------+-----------------+ | midnight = now.date | midnight.type | now.date.type | |-----------------------+-----------------+-----------------| - | True | DATETIME | DATE | + | True | TIMESTAMP | DATE | +-----------------------+-----------------+-----------------+ diff --git a/docs/user/dql/functions.rst b/docs/user/dql/functions.rst index 19260e8bea..5af21df2bf 100644 --- a/docs/user/dql/functions.rst +++ b/docs/user/dql/functions.rst @@ -1145,15 +1145,15 @@ Description Usage: adddate(date, INTERVAL expr unit)/ adddate(date, expr) adds the time interval of second argument to date; adddate(date, days) adds the second argument as integer number of days to date. If first argument is TIME, today's date is used; if first argument is DATE, time at midnight is used. -Argument type: DATE/DATETIME/TIMESTAMP/TIME, INTERVAL/LONG +Argument type: DATE/TIMESTAMP/TIME, INTERVAL/LONG Return type map: -(DATE/DATETIME/TIMESTAMP/TIME, INTERVAL) -> DATETIME +(DATE/TIMESTAMP/TIME, INTERVAL) -> TIMESTAMP (DATE, LONG) -> DATE -(DATETIME/TIMESTAMP/TIME, LONG) -> DATETIME +(TIMESTAMP/TIME, LONG) -> TIMESTAMP Synonyms: `DATE_ADD`_ when invoked with the INTERVAL form of the second argument. @@ -1178,13 +1178,13 @@ Description Usage: addtime(expr1, expr2) adds expr2 to expr1 and returns the result. If argument is TIME, today's date is used; if argument is DATE, time at midnight is used. -Argument type: DATE/DATETIME/TIMESTAMP/TIME, DATE/DATETIME/TIMESTAMP/TIME +Argument type: DATE/TIMESTAMP/TIME, DATE/TIMESTAMP/TIME Return type map: -(DATE/DATETIME/TIMESTAMP, DATE/DATETIME/TIMESTAMP/TIME) -> DATETIME +(DATE/TIMESTAMP, DATE/TIMESTAMP/TIME) -> TIMESTAMP -(TIME, DATE/DATETIME/TIMESTAMP/TIME) -> TIME +(TIME, DATE/TIMESTAMP/TIME) -> TIME Antonyms: `SUBTIME`_ @@ -1222,7 +1222,7 @@ Example:: | 10:26:12 | +---------------------------+ - os> SELECT ADDTIME(TIMESTAMP('2007-02-28 10:20:30'), DATETIME('2002-03-04 20:40:50')) AS `'2007-02-28 10:20:30' + '20:40:50'` + os> SELECT ADDTIME(TIMESTAMP('2007-02-28 10:20:30'), TIMESTAMP('2002-03-04 20:40:50')) AS `'2007-02-28 10:20:30' + '20:40:50'` fetched rows / total rows = 1/1 +--------------------------------------+ | '2007-02-28 10:20:30' + '20:40:50' | @@ -1237,11 +1237,11 @@ CONVERT_TZ Description >>>>>>>>>>> -Usage: convert_tz(datetime, from_timezone, to_timezone) constructs a datetime object converted from the from_timezone to the to_timezone. +Usage: convert_tz(timestamp, from_timezone, to_timezone) constructs a timestamp object converted from the from_timezone to the to_timezone. -Argument type: DATETIME, STRING, STRING +Argument type: TIMESTAMP, STRING, STRING -Return type: DATETIME +Return type: TIMESTAMP Example:: @@ -1262,7 +1262,7 @@ Example:: | 2010-10-09 23:10:10 | +---------------------------------------------------------+ -When the datedate, or either of the two time zone fields are invalid format, then the result is null. In this example any datetime that is not will result in null. +When the datedate, or either of the two time zone fields are invalid format, then the result is null. In this example any timestamp that is not will result in null. Example:: os> SELECT CONVERT_TZ("test", "+01:00", "-10:00") @@ -1273,7 +1273,7 @@ Example:: | null | +------------------------------------------+ -When the datetime, or either of the two time zone fields are invalid format, then the result is null. In this example any timezone that is not <+HH:mm> or <-HH:mm> will result in null. +When the timestamp, or either of the two time zone fields are invalid format, then the result is null. In this example any timezone that is not <+HH:mm> or <-HH:mm> will result in null. Example:: os> SELECT CONVERT_TZ("2010-10-10 10:10:10", "test", "-10:00") @@ -1440,9 +1440,9 @@ DATE Description >>>>>>>>>>> -Usage: date(expr) constructs a date type with the input string expr as a date. If the argument is of date/datetime/timestamp, it extracts the date value part from the expression. +Usage: date(expr) constructs a date type with the input string expr as a date. If the argument is of date/timestamp, it extracts the date value part from the expression. -Argument type: STRING/DATE/DATETIME/TIMESTAMP +Argument type: STRING/DATE/TIMESTAMP Return type: DATE @@ -1463,15 +1463,15 @@ DATETIME Description >>>>>>>>>>> -Usage: datetime(datetime)/ datetime(date, to_timezone) Converts the datetime to a new timezone +Usage: datetime(timestamp)/ datetime(timestamp, to_timezone) Converts the timestamp to a new timezone -Argument type: DATETIME/STRING +Argument type: TIMESTAMP/STRING Return type map: -(DATETIME, STRING) -> DATETIME +(TIMESTAMP, STRING) -> TIMESTAMP -(DATETIME) -> DATETIME +(TIMESTAMP) -> TIMESTAMP Example:: @@ -1560,9 +1560,9 @@ Description Usage: date_add(date, INTERVAL expr unit) adds the interval expr to date. If first argument is TIME, today's date is used; if first argument is DATE, time at midnight is used. -Argument type: DATE/DATETIME/TIMESTAMP/TIME, INTERVAL +Argument type: DATE/TIMESTAMP/TIME, INTERVAL -Return type: DATETIME +Return type: TIMESTAMP Synonyms: `ADDDATE`_ @@ -1663,7 +1663,7 @@ If an argument of type TIME is provided, the local date is used. * - x - x, for any smallcase/uppercase alphabet except [aydmshiHIMYDSEL] -Argument type: STRING/DATE/DATETIME/TIME/TIMESTAMP, STRING +Argument type: STRING/DATE/TIME/TIMESTAMP, STRING Return type: STRING @@ -1686,9 +1686,9 @@ Description Usage: date_sub(date, INTERVAL expr unit) subtracts the interval expr from date. If first argument is TIME, today's date is used; if first argument is DATE, time at midnight is used. -Argument type: DATE/DATETIME/TIMESTAMP/TIME, INTERVAL +Argument type: DATE/TIMESTAMP/TIME, INTERVAL -Return type: DATETIME +Return type: TIMESTAMP Synonyms: `SUBDATE`_ @@ -1710,7 +1710,7 @@ DATEDIFF Usage: Calculates the difference of date parts of the given values. If the first argument is time, today's date is used. -Argument type: DATE/DATETIME/TIMESTAMP/TIME, DATE/DATETIME/TIMESTAMP/TIME +Argument type: DATE/TIMESTAMP/TIME, DATE/TIMESTAMP/TIME Return type: LONG @@ -1733,7 +1733,7 @@ Description Usage: day(date) extracts the day of the month for date, in the range 1 to 31. -Argument type: STRING/DATE/DATETIME/TIME/TIMESTAMP +Argument type: STRING/DATE/TIME/TIMESTAMP Return type: INTEGER @@ -1758,7 +1758,7 @@ Description Usage: dayname(date) returns the name of the weekday for date, including Monday, Tuesday, Wednesday, Thursday, Friday, Saturday and Sunday. -Argument type: STRING/DATE/DATETIME/TIMESTAMP +Argument type: STRING/DATE/TIMESTAMP Return type: STRING @@ -1781,7 +1781,7 @@ Description Usage: dayofmonth(date) extracts the day of the month for date, in the range 1 to 31. -Argument type: STRING/DATE/DATETIME/TIME/TIMESTAMP +Argument type: STRING/DATE/TIME/TIMESTAMP Return type: INTEGER @@ -1806,7 +1806,7 @@ Description Usage: day_of_month(date) extracts the day of the month for date, in the range 1 to 31. -Argument type: STRING/DATE/TIME/DATETIME/TIMESTAMP +Argument type: STRING/DATE/TIME/TIMESTAMP Return type: INTEGER @@ -1833,7 +1833,7 @@ Usage: dayofweek(date) returns the weekday index for date (1 = Sunday, 2 = Monda The `day_of_week` function is also provided as an alias. -Argument type: STRING/DATE/DATETIME/TIMESTAMP +Argument type: STRING/DATE/TIMESTAMP Return type: INTEGER @@ -1858,7 +1858,7 @@ Usage: dayofyear(date) returns the day of the year for date, in the range 1 to If an argument of type `TIME` is given, the function will use the current date. The function `day_of_year`_ is also provided as an alias. -Argument type: STRING/DATE/DATETIME/TIME/TIMESTAMP +Argument type: STRING/DATE/TIME/TIMESTAMP Return type: INTEGER @@ -1872,14 +1872,6 @@ Example:: | 239 | +---------------------------------+ - os> SELECT DAYOFYEAR(DATETIME('2020-08-26 00:00:00')) - fetched rows / total rows = 1/1 - +----------------------------------------------+ - | DAYOFYEAR(DATETIME('2020-08-26 00:00:00')) | - |----------------------------------------------| - | 239 | - +----------------------------------------------+ - os> SELECT DAYOFYEAR(TIMESTAMP('2020-08-26 00:00:00')) fetched rows / total rows = 1/1 +-----------------------------------------------+ @@ -1898,7 +1890,7 @@ Description If an argument of type `TIME` is given, the function will use the current date. This function is an alias to the `dayofyear`_ function -Argument type: STRING/DATE/DATETIME/TIME/TIMESTAMP +Argument type: STRING/DATE/TIME/TIMESTAMP Return type: INTEGER @@ -1912,14 +1904,6 @@ Example:: | 239 | +-----------------------------------+ - os> SELECT DAY_OF_YEAR(DATETIME('2020-08-26 00:00:00')) - fetched rows / total rows = 1/1 - +------------------------------------------------+ - | DAY_OF_YEAR(DATETIME('2020-08-26 00:00:00')) | - |------------------------------------------------| - | 239 | - +------------------------------------------------+ - os> SELECT DAY_OF_YEAR(TIMESTAMP('2020-08-26 00:00:00')) fetched rows / total rows = 1/1 +-------------------------------------------------+ @@ -2030,7 +2014,7 @@ FROM_UNIXTIME Description >>>>>>>>>>> -Usage: Returns a representation of the argument given as a datetime or character string value. Perform reverse conversion for `UNIX_TIMESTAMP`_ function. +Usage: Returns a representation of the argument given as a timestamp or character string value. Perform reverse conversion for `UNIX_TIMESTAMP`_ function. If second argument is provided, it is used to format the result in the same way as the format string used for the `DATE_FORMAT`_ function. If timestamp is outside of range 1970-01-01 00:00:00 - 3001-01-18 23:59:59.999999 (0 to 32536771199.999999 epoch time), function returns NULL. @@ -2038,7 +2022,7 @@ Argument type: DOUBLE, STRING Return type map: -DOUBLE -> DATETIME +DOUBLE -> TIMESTAMP DOUBLE, STRING -> STRING @@ -2070,7 +2054,7 @@ Description Usage: Returns a string value containing string format specifiers based on the input arguments. Argument type: TYPE, STRING -TYPE must be one of the following tokens: [DATE, TIME, DATETIME, TIMESTAMP]. +TYPE must be one of the following tokens: [DATE, TIME, TIMESTAMP]. STRING must be one of the following tokens: ["USA", "JIS", "ISO", "EUR", "INTERNAL"] (" can be replaced by '). Examples:: @@ -2093,7 +2077,7 @@ Description Usage: hour(time) extracts the hour value for time. Different from the time of day value, the time value has a large range and can be greater than 23, so the return value of hour(time) can be also greater than 23. The function `hour_of_day` is also provided as an alias. -Argument type: STRING/TIME/DATETIME/TIMESTAMP +Argument type: STRING/TIME/TIMESTAMP Return type: INTEGER @@ -2113,7 +2097,7 @@ LAST_DAY Usage: Returns the last day of the month as a DATE for a valid argument. -Argument type: DATE/DATETIME/STRING/TIMESTAMP/TIME +Argument type: DATE/STRING/TIMESTAMP/TIME Return type: DATE @@ -2238,9 +2222,9 @@ MICROSECOND Description >>>>>>>>>>> -Usage: microsecond(expr) returns the microseconds from the time or datetime expression expr as a number in the range from 0 to 999999. +Usage: microsecond(expr) returns the microseconds from the time or timestamp expression expr as a number in the range from 0 to 999999. -Argument type: STRING/TIME/DATETIME/TIMESTAMP +Argument type: STRING/TIME/TIMESTAMP Return type: INTEGER @@ -2264,7 +2248,7 @@ Description Usage: minute(time) returns the minute for time, in the range 0 to 59. The `minute_of_hour` function is provided as an alias. -Argument type: STRING/TIME/DATETIME/TIMESTAMP +Argument type: STRING/TIME/TIMESTAMP Return type: INTEGER @@ -2287,7 +2271,7 @@ Description Usage: minute_of_day(time) returns the minute value for time within a 24 hour day, in the range 0 to 1439. -Argument type: STRING/TIME/DATETIME/TIMESTAMP +Argument type: STRING/TIME/TIMESTAMP Return type: INTEGER @@ -2312,7 +2296,7 @@ Usage: month(date) returns the month for date, in the range 1 to 12 for January If an argument of type `TIME` is given, the function will use the current date. The function `month_of_year` is also provided as an alias. -Argument type: STRING/DATE/DATETIME/TIME/TIMESTAMP +Argument type: STRING/DATE/TIME/TIMESTAMP Return type: INTEGER @@ -2344,7 +2328,7 @@ Description Usage: monthname(date) returns the full name of the month for date. -Argument type: STRING/DATE/DATETIME/TIMESTAMP +Argument type: STRING/DATE/TIMESTAMP Return type: STRING @@ -2368,9 +2352,9 @@ Description Returns the current date and time as a value in 'YYYY-MM-DD hh:mm:ss' format. The value is expressed in the cluster time zone. `NOW()` returns a constant time that indicates the time at which the statement began to execute. This differs from the behavior for `SYSDATE() <#sysdate>`_, which returns the exact time at which it executes. -Return type: DATETIME +Return type: TIMESTAMP -Specification: NOW() -> DATETIME +Specification: NOW() -> TIMESTAMP Example:: @@ -2437,7 +2421,7 @@ Description Usage: quarter(date) returns the quarter of the year for date, in the range 1 to 4. -Argument type: STRING/DATE/DATETIME/TIMESTAMP +Argument type: STRING/DATE/TIMESTAMP Return type: INTEGER @@ -2503,7 +2487,7 @@ Description Usage: second(time) returns the second for time, in the range 0 to 59. The function `second_of_minute` is provided as an alias -Argument type: STRING/TIME/DATETIME/TIMESTAMP +Argument type: STRING/TIME/TIMESTAMP Return type: INTEGER @@ -2532,14 +2516,14 @@ STR_TO_DATE Description >>>>>>>>>>> -Usage: str_to_date(string, string) is used to extract a DATETIME from the first argument string using the formats specified in the second argument string. -The input argument must have enough information to be parsed as a DATE, DATETIME, or TIME. +Usage: str_to_date(string, string) is used to extract a TIMESTAMP from the first argument string using the formats specified in the second argument string. +The input argument must have enough information to be parsed as a DATE, TIMESTAMP, or TIME. Acceptable string format specifiers are the same as those used in the `DATE_FORMAT`_ function. -It returns NULL when a statement cannot be parsed due to an invalid pair of arguments, and when 0 is provided for any DATE field. Otherwise, it will return a DATETIME with the parsed values (as well as default values for any field that was not parsed). +It returns NULL when a statement cannot be parsed due to an invalid pair of arguments, and when 0 is provided for any DATE field. Otherwise, it will return a TIMESTAMP with the parsed values (as well as default values for any field that was not parsed). Argument type: STRING, STRING -Return type: DATETIME +Return type: TIMESTAMP Example:: @@ -2561,15 +2545,15 @@ Description Usage: subdate(date, INTERVAL expr unit) / subdate(date, days) subtracts the time interval expr from date; subdate(date, days) subtracts the second argument as integer number of days from date. If first argument is TIME, today's date is used; if first argument is DATE, time at midnight is used. -Argument type: DATE/DATETIME/TIMESTAMP/TIME, INTERVAL/LONG +Argument type: DATE/TIMESTAMP/TIME, INTERVAL/LONG Return type map: -(DATE/DATETIME/TIMESTAMP/TIME, INTERVAL) -> DATETIME +(DATE/TIMESTAMP/TIME, INTERVAL) -> TIMESTAMP (DATE, LONG) -> DATE -(DATETIME/TIMESTAMP/TIME, LONG) -> DATETIME +(TIMESTAMP/TIME, LONG) -> TIMESTAMP Synonyms: `DATE_SUB`_ when invoked with the INTERVAL form of the second argument. @@ -2594,13 +2578,13 @@ Description Usage: subtime(expr1, expr2) subtracts expr2 from expr1 and returns the result. If argument is TIME, today's date is used; if argument is DATE, time at midnight is used. -Argument type: DATE/DATETIME/TIMESTAMP/TIME, DATE/DATETIME/TIMESTAMP/TIME +Argument type: DATE/TIMESTAMP/TIME, DATE/TIMESTAMP/TIME Return type map: -(DATE/DATETIME/TIMESTAMP, DATE/DATETIME/TIMESTAMP/TIME) -> DATETIME +(DATE/TIMESTAMP, DATE/TIMESTAMP/TIME) -> TIMESTAMP -(TIME, DATE/DATETIME/TIMESTAMP/TIME) -> TIME +(TIME, DATE/TIMESTAMP/TIME) -> TIME Antonyms: `ADDTIME`_ @@ -2638,7 +2622,7 @@ Example:: | 10:14:48 | +---------------------------+ - os> SELECT SUBTIME(TIMESTAMP('2007-03-01 10:20:30'), DATETIME('2002-03-04 20:40:50')) AS `'2007-03-01 10:20:30' - '20:40:50'` + os> SELECT SUBTIME(TIMESTAMP('2007-03-01 10:20:30'), TIMESTAMP('2002-03-04 20:40:50')) AS `'2007-03-01 10:20:30' - '20:40:50'` fetched rows / total rows = 1/1 +--------------------------------------+ | '2007-03-01 10:20:30' - '20:40:50' | @@ -2659,9 +2643,9 @@ If the argument is given, it specifies a fractional seconds precision from 0 to Optional argument type: INTEGER -Return type: DATETIME +Return type: TIMESTAMP -Specification: SYSDATE([INTEGER]) -> DATETIME +Specification: SYSDATE([INTEGER]) -> TIMESTAMP Example:: @@ -2680,9 +2664,9 @@ TIME Description >>>>>>>>>>> -Usage: time(expr) constructs a time type with the input string expr as a time. If the argument is of date/datetime/time/timestamp, it extracts the time value part from the expression. +Usage: time(expr) constructs a time type with the input string expr as a time. If the argument is of date/time/timestamp, it extracts the time value part from the expression. -Argument type: STRING/DATE/DATETIME/TIME/TIMESTAMP +Argument type: STRING/DATE/TIME/TIMESTAMP Return type: TIME @@ -2706,7 +2690,7 @@ Usage: time_format(time, format) formats the time argument using the specifiers This supports a subset of the time format specifiers available for the `date_format`_ function. Using date format specifiers supported by `date_format`_ will return 0 or null. Acceptable format specifiers are listed in the table below. -If an argument of type DATE is passed in, it is treated as a DATETIME at midnight (i.e., 00:00:00). +If an argument of type DATE is passed in, it is treated as a TIMESTAMP at midnight (i.e., 00:00:00). .. list-table:: The following table describes the available specifier arguments. :widths: 20 80 @@ -2736,7 +2720,7 @@ If an argument of type DATE is passed in, it is treated as a DATETIME at midnigh - Time, 24-hour (hh:mm:ss) -Argument type: STRING/DATE/DATETIME/TIME/TIMESTAMP, STRING +Argument type: STRING/DATE/TIME/TIMESTAMP, STRING Return type: STRING @@ -2759,7 +2743,7 @@ Description Usage: time_to_sec(time) returns the time argument, converted to seconds. -Argument type: STRING/TIME/DATETIME/TIMESTAMP +Argument type: STRING/TIME/TIMESTAMP Return type: LONG @@ -2804,15 +2788,15 @@ Description >>>>>>>>>>> Usage: timestamp(expr) constructs a timestamp type with the input string `expr` as an timestamp. If the argument is not a string, it casts `expr` to timestamp type with default timezone UTC. If argument is a time, it applies today's date before cast. -With two arguments `timestamp(expr1, expr2)` adds the time expression `expr2` to the date or datetime expression `expr1` and returns the result as a timestamp value. +With two arguments `timestamp(expr1, expr2)` adds the time expression `expr2` to the date or timestamp expression `expr1` and returns the result as a timestamp value. -Argument type: STRING/DATE/TIME/DATETIME/TIMESTAMP +Argument type: STRING/DATE/TIME/TIMESTAMP Return type map: -(STRING/DATE/TIME/DATETIME/TIMESTAMP) -> TIMESTAMP +(STRING/DATE/TIME/TIMESTAMP) -> TIMESTAMP -(STRING/DATE/TIME/DATETIME/TIMESTAMP, STRING/DATE/TIME/DATETIME/TIMESTAMP) -> TIMESTAMP +(STRING/DATE/TIME/TIMESTAMP, STRING/DATE/TIME/TIMESTAMP) -> TIMESTAMP Example:: @@ -2831,11 +2815,11 @@ TIMESTAMPADD Description >>>>>>>>>>> -Usage: Returns a DATETIME value based on a passed in DATE/DATETIME/TIME/TIMESTAMP/STRING argument and an INTERVAL and INTEGER argument which determine the amount of time to be added. -If the third argument is a STRING, it must be formatted as a valid DATETIME. If only a TIME is provided, a DATETIME is still returned with the DATE portion filled in using the current date. -If the third argument is a DATE, it will be automatically converted to a DATETIME. +Usage: Returns a TIMESTAMP value based on a passed in DATE/TIME/TIMESTAMP/STRING argument and an INTERVAL and INTEGER argument which determine the amount of time to be added. +If the third argument is a STRING, it must be formatted as a valid TIMESTAMP. If only a TIME is provided, a TIMESTAMP is still returned with the DATE portion filled in using the current date. +If the third argument is a DATE, it will be automatically converted to a TIMESTAMP. -Argument type: INTERVAL, INTEGER, DATE/DATETIME/TIME/TIMESTAMP/STRING +Argument type: INTERVAL, INTEGER, DATE/TIME/TIMESTAMP/STRING INTERVAL must be one of the following tokens: [MICROSECOND, SECOND, MINUTE, HOUR, DAY, WEEK, MONTH, QUARTER, YEAR] Examples:: @@ -2856,11 +2840,11 @@ Description >>>>>>>>>>> Usage: TIMESTAMPDIFF(interval, start, end) returns the difference between the start and end date/times in interval units. -If a TIME is provided as an argument, it will be converted to a DATETIME with the DATE portion filled in using the current date. -Arguments will be automatically converted to a DATETIME/TIME/TIMESTAMP when appropriate. -Any argument that is a STRING must be formatted as a valid DATETIME. +If a TIME is provided as an argument, it will be converted to a TIMESTAMP with the DATE portion filled in using the current date. +Arguments will be automatically converted to a TIME/TIMESTAMP when appropriate. +Any argument that is a STRING must be formatted as a valid TIMESTAMP. -Argument type: INTERVAL, DATE/DATETIME/TIME/TIMESTAMP/STRING, DATE/DATETIME/TIME/TIMESTAMP/STRING +Argument type: INTERVAL, DATE/TIME/TIMESTAMP/STRING, DATE/TIME/TIMESTAMP/STRING INTERVAL must be one of the following tokens: [MICROSECOND, SECOND, MINUTE, HOUR, DAY, WEEK, MONTH, QUARTER, YEAR] Examples:: @@ -2882,7 +2866,7 @@ Description Usage: to_days(date) returns the day number (the number of days since year 0) of the given date. Returns NULL if date is invalid. -Argument type: STRING/DATE/DATETIME/TIMESTAMP +Argument type: STRING/DATE/TIMESTAMP Return type: LONG @@ -2906,7 +2890,7 @@ Description Usage: to_seconds(date) returns the number of seconds since the year 0 of the given value. Returns NULL if value is invalid. An argument of a LONG type can be used. It must be formatted as YMMDD, YYMMDD, YYYMMDD or YYYYMMDD. Note that a LONG type argument cannot have leading 0s as it will be parsed using an octal numbering system. -Argument type: STRING/LONG/DATE/DATETIME/TIME/TIMESTAMP +Argument type: STRING/LONG/DATE/TIME/TIMESTAMP Return type: LONG @@ -2928,11 +2912,11 @@ Description >>>>>>>>>>> Usage: Converts given argument to Unix time (seconds since January 1st, 1970 at 00:00:00 UTC). If no argument given, it returns current Unix time. -The date argument may be a DATE, DATETIME, or TIMESTAMP string, or a number in YYMMDD, YYMMDDhhmmss, YYYYMMDD, or YYYYMMDDhhmmss format. If the argument includes a time part, it may optionally include a fractional seconds part. +The date argument may be a DATE, TIMESTAMP, or TIMESTAMP string, or a number in YYMMDD, YYMMDDhhmmss, YYYYMMDD, or YYYYMMDDhhmmss format. If the argument includes a time part, it may optionally include a fractional seconds part. If argument is in invalid format or outside of range 1970-01-01 00:00:00 - 3001-01-18 23:59:59.999999 (0 to 32536771199.999999 epoch time), function returns NULL. You can use `FROM_UNIXTIME`_ to do reverse conversion. -Argument type: /DOUBLE/DATE/DATETIME/TIMESTAMP +Argument type: /DOUBLE/DATE/TIMESTAMP Return type: DOUBLE @@ -3009,9 +2993,9 @@ Description Returns the current UTC timestamp as a value in 'YYYY-MM-DD hh:mm:ss'. -Return type: DATETIME +Return type: TIMESTAMP -Specification: UTC_TIMESTAMP() -> DATETIME +Specification: UTC_TIMESTAMP() -> TIMESTAMP Example:: @@ -3075,7 +3059,7 @@ The functions `weekofyear` and `week_of_year` is also provided as an alias. - 1-53 - with a Monday in this year -Argument type: DATE/DATETIME/TIME/TIMESTAMP/STRING +Argument type: DATE/TIME/TIMESTAMP/STRING Return type: INTEGER @@ -3100,7 +3084,7 @@ Usage: weekday(date) returns the weekday index for date (0 = Monday, 1 = Tuesday It is similar to the `dayofweek`_ function, but returns different indexes for each day. -Argument type: STRING/DATE/DATETIME/TIME/TIMESTAMP +Argument type: STRING/DATE/TIME/TIMESTAMP Return type: INTEGER @@ -3124,7 +3108,7 @@ Description The week_of_year function is a synonym for the `week`_ function. If an argument of type `TIME` is given, the function will use the current date. -Argument type: DATE/DATETIME/TIME/TIMESTAMP/STRING +Argument type: DATE/TIME/TIMESTAMP/STRING Return type: INTEGER @@ -3148,7 +3132,7 @@ Description The weekofyear function is a synonym for the `week`_ function. If an argument of type `TIME` is given, the function will use the current date. -Argument type: DATE/DATETIME/TIME/TIMESTAMP/STRING +Argument type: DATE/TIME/TIMESTAMP/STRING Return type: INTEGER @@ -3171,7 +3155,7 @@ Description Usage: year(date) returns the year for date, in the range 1000 to 9999, or 0 for the “zero” date. -Argument type: STRING/DATE/DATETIME/TIMESTAMP +Argument type: STRING/DATE/TIMESTAMP Return type: INTEGER @@ -3194,7 +3178,7 @@ Description Usage: yearweek(date) returns the year and week for date as an integer. It accepts and optional mode arguments aligned with those available for the `WEEK`_ function. -Argument type: STRING/DATE/DATETIME/TIME/TIMESTAMP +Argument type: STRING/DATE/TIME/TIMESTAMP Return type: INTEGER @@ -4514,6 +4498,6 @@ Example:: +----------------+---------------+-----------------+------------------+ | typeof(date) | typeof(int) | typeof(now()) | typeof(column) | |----------------+---------------+-----------------+------------------| - | DATE | INTEGER | DATETIME | OBJECT | + | DATE | INTEGER | TIMESTAMP | OBJECT | +----------------+---------------+-----------------+------------------+ diff --git a/docs/user/general/datatypes.rst b/docs/user/general/datatypes.rst index a265ffd4c9..c423bd7b10 100644 --- a/docs/user/general/datatypes.rst +++ b/docs/user/general/datatypes.rst @@ -40,8 +40,6 @@ The OpenSearch SQL Engine support the following data types. +---------------------+ | timestamp | +---------------------+ -| datetime | -+---------------------+ | date | +---------------------+ | date_nanos | @@ -128,53 +126,51 @@ Type Conversion Matrix The following matrix illustrates the conversions allowed by our query engine for all the built-in data types as well as types provided by OpenSearch storage engine. -+--------------+------------------------------------------------+---------+------------------------------+-----------------------------------------------+--------------------------+---------------------+ -| Data Types | Numeric Type Family | BOOLEAN | String Type Family | Datetime Type Family | OpenSearch Type Family | Complex Type Family | -| +------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+----------+-----------+-----+--------+-----------+---------+ -| | BYTE | SHORT | INTEGER | LONG | FLOAT | DOUBLE | BOOLEAN | TEXT_KEYWORD | TEXT | STRING | TIMESTAMP | DATE | TIME | DATETIME | INTERVAL | GEO_POINT | IP | BINARY | STRUCT | ARRAY | -+==============+======+=======+=========+======+=======+========+=========+==============+======+========+===========+======+======+==========+==========+===========+=====+========+===========+=========+ -| UNDEFINED | IE | IE | IE | IE | IE | IE | IE | IE | IE | IE | IE | IE | IE | IE | IE | IE | IE | IE | IE | IE | -+--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+----------+-----------+-----+--------+-----------+---------+ -| BYTE | N/A | IE | IE | IE | IE | IE | X | X | X | E | X | X | X | X | X | X | X | X | X | X | -+--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+----------+-----------+-----+--------+-----------+---------+ -| SHORT | E | N/A | IE | IE | IE | IE | X | X | X | E | X | X | X | X | X | X | X | X | X | X | -+--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+----------+-----------+-----+--------+-----------+---------+ -| INTEGER | E | E | N/A | IE | IE | IE | X | X | X | E | X | X | X | X | X | X | X | X | X | X | -+--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+----------+-----------+-----+--------+-----------+---------+ -| LONG | E | E | E | N/A | IE | IE | X | X | X | E | X | X | X | X | X | X | X | X | X | X | -+--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+----------+-----------+-----+--------+-----------+---------+ -| FLOAT | E | E | E | E | N/A | IE | X | X | X | E | X | X | X | X | X | X | X | X | X | X | -+--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+----------+-----------+-----+--------+-----------+---------+ -| DOUBLE | E | E | E | E | E | N/A | X | X | X | E | X | X | X | X | X | X | X | X | X | X | -+--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+----------+-----------+-----+--------+-----------+---------+ -| BOOLEAN | E | E | E | E | E | E | N/A | X | X | E | X | X | X | X | X | X | X | X | X | X | -+--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+----------+-----------+-----+--------+-----------+---------+ -| TEXT_KEYWORD | | | | | | | | N/A | | IE | | | | X | X | X | X | X | X | X | -+--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+----------+-----------+-----+--------+-----------+---------+ -| TEXT | | | | | | | | | N/A | IE | | | | X | X | X | X | X | X | X | -+--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+----------+-----------+-----+--------+-----------+---------+ -| STRING | E | E | E | E | E | E | IE | X | X | N/A | IE | IE | IE | IE | X | X | X | X | X | X | -+--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+----------+-----------+-----+--------+-----------+---------+ -| TIMESTAMP | X | X | X | X | X | X | X | X | X | E | N/A | IE | IE | IE | X | X | X | X | X | X | -+--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+----------+-----------+-----+--------+-----------+---------+ -| DATE | X | X | X | X | X | X | X | X | X | E | E | N/A | IE | E | X | X | X | X | X | X | -+--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+----------+-----------+-----+--------+-----------+---------+ -| TIME | X | X | X | X | X | X | X | X | X | E | E | E | N/A | E | X | X | X | X | X | X | -+--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+----------+-----------+-----+--------+-----------+---------+ -| DATETIME | X | X | X | X | X | X | X | X | X | E | E | E | E | N/A | X | X | X | X | X | X | -+--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+----------+-----------+-----+--------+-----------+---------+ -| INTERVAL | X | X | X | X | X | X | X | X | X | E | X | X | X | X | N/A | X | X | X | X | X | -+--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+----------+-----------+-----+--------+-----------+---------+ -| GEO_POINT | X | X | X | X | X | X | X | X | X | | X | X | X | X | X | N/A | X | X | X | X | -+--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+----------+-----------+-----+--------+-----------+---------+ -| IP | X | X | X | X | X | X | X | X | X | | X | X | X | X | X | X | N/A | X | X | X | -+--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+----------+-----------+-----+--------+-----------+---------+ -| BINARY | X | X | X | X | X | X | X | X | X | | X | X | X | X | X | X | X | N/A | X | X | -+--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+----------+-----------+-----+--------+-----------+---------+ -| STRUCT | X | X | X | X | X | X | X | X | X | | X | X | X | X | X | X | X | X | N/A | X | -+--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+----------+-----------+-----+--------+-----------+---------+ -| ARRAY | X | X | X | X | X | X | X | X | X | | X | X | X | X | X | X | X | X | X | N/A | -+--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+----------+-----------+-----+--------+-----------+---------+ ++--------------+------------------------------------------------+---------+------------------------------+------------------------------------+--------------------------+---------------------+ +| Data Types | Numeric Type Family | BOOLEAN | String Type Family | Datetime Type Family | OpenSearch Type Family | Complex Type Family | +| +------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+-----------+-----+--------+-----------+---------+ +| | BYTE | SHORT | INTEGER | LONG | FLOAT | DOUBLE | BOOLEAN | TEXT_KEYWORD | TEXT | STRING | TIMESTAMP | DATE | TIME | INTERVAL | GEO_POINT | IP | BINARY | STRUCT | ARRAY | ++==============+======+=======+=========+======+=======+========+=========+==============+======+========+===========+======+======+==========+===========+=====+========+===========+=========+ +| UNDEFINED | IE | IE | IE | IE | IE | IE | IE | IE | IE | IE | IE | IE | IE | IE | IE | IE | IE | IE | IE | ++--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+-----------+-----+--------+-----------+---------+ +| BYTE | N/A | IE | IE | IE | IE | IE | X | X | X | E | X | X | X | X | X | X | X | X | X | ++--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+-----------+-----+--------+-----------+---------+ +| SHORT | E | N/A | IE | IE | IE | IE | X | X | X | E | X | X | X | X | X | X | X | X | X | ++--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+-----------+-----+--------+-----------+---------+ +| INTEGER | E | E | N/A | IE | IE | IE | X | X | X | E | X | X | X | X | X | X | X | X | X | ++--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+-----------+-----+--------+-----------+---------+ +| LONG | E | E | E | N/A | IE | IE | X | X | X | E | X | X | X | X | X | X | X | X | X | ++--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+-----------+-----+--------+-----------+---------+ +| FLOAT | E | E | E | E | N/A | IE | X | X | X | E | X | X | X | X | X | X | X | X | X | ++--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+-----------+-----+--------+-----------+---------+ +| DOUBLE | E | E | E | E | E | N/A | X | X | X | E | X | X | X | X | X | X | X | X | X | ++--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+-----------+--------------+-----------+---------+ +| BOOLEAN | E | E | E | E | E | E | N/A | X | X | E | X | X | X | X | X | X | X | X | X | ++--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+-----------+-----+--------+-----------+---------+ +| TEXT_KEYWORD | | | | | | | | N/A | | IE | | | | X | X | X | X | X | X | ++--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+-----------+-----+--------+-----------+---------+ +| TEXT | | | | | | | | | N/A | IE | | | | X | X | X | X | X | X | ++--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+-----------+-----+--------+-----------+---------+ +| STRING | E | E | E | E | E | E | IE | X | X | N/A | IE | IE | IE | X | X | X | X | X | X | ++--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+-----------+-----+--------+-----------+---------+ +| TIMESTAMP | X | X | X | X | X | X | X | X | X | E | N/A | IE | IE | X | X | X | X | X | X | ++--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+-----------+-----+--------+-----------+---------+ +| DATE | X | X | X | X | X | X | X | X | X | E | E | N/A | IE | X | X | X | X | X | X | ++--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+-----------+-----+--------+-----------+---------+ +| TIME | X | X | X | X | X | X | X | X | X | E | E | E | N/A | X | X | X | X | X | X | ++--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+-----------+-----+--------+-----------+---------+ +| INTERVAL | X | X | X | X | X | X | X | X | X | E | X | X | X | N/A | X | X | X | X | X | ++--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+-----------+-----+--------+-----------+---------+ +| GEO_POINT | X | X | X | X | X | X | X | X | X | | X | X | X | X | N/A | X | X | X | X | ++--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+-----------+-----+--------+-----------+---------+ +| IP | X | X | X | X | X | X | X | X | X | | X | X | X | X | X | N/A | X | X | X | ++--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+-----------+-----+--------+-----------+---------+ +| BINARY | X | X | X | X | X | X | X | X | X | | X | X | X | X | X | X | N/A | X | X | ++--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+-----------+-----+--------+-----------+---------+ +| STRUCT | X | X | X | X | X | X | X | X | X | | X | X | X | X | X | X | X | N/A | X | ++--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+-----------+-----+--------+-----------+---------+ +| ARRAY | X | X | X | X | X | X | X | X | X | | X | X | X | X | X | X | X | X | N/A | ++--------------+------+-------+---------+------+-------+--------+---------+--------------+------+--------+-----------+------+------+----------+-----------+-----+--------+-----------+---------+ Note that: @@ -236,7 +232,7 @@ Numeric values ranged from -2147483648 to +2147483647 are recognized as integer Date and Time Data Types ======================== -The datetime types supported by the SQL plugin are ``DATE``, ``TIME``, ``DATETIME``, ``TIMESTAMP``, and ``INTERVAL``, with date and time being used to represent temporal values. By default, the OpenSearch DSL uses ``date`` type as the only date and time related type as it contains all information about an absolute time point. To integrate with SQL language each of the types other than timestamp hold part of the temporal or timezone information. This information can be used to explicitly clarify the date and time types reflected in the datetime functions (see `Functions `_ for details), where some functions might have restrictions in the input argument type. +The datetime types supported by the SQL plugin are ``DATE``, ``TIME``, ``TIMESTAMP``, and ``INTERVAL``, with date and time being used to represent temporal values. By default, the OpenSearch DSL uses ``date`` type as the only date and time related type as it contains all information about an absolute time point. To integrate with SQL language each of the types other than timestamp hold part of the temporal or timezone information. This information can be used to explicitly clarify the date and time types reflected in the datetime functions (see `Functions `_ for details), where some functions might have restrictions in the input argument type. Date ---- @@ -262,19 +258,6 @@ Time represents the time on the clock or watch with no regard for which timezone +------+-----------------------+----------------------------------------------+ -Datetime --------- - -Datetime type is the combination of date and time. The conversion rule of date or time to datetime is described in `Conversion between date and time types`_. Datetime type does not contain timezone information. For an absolute time point that contains both date time and timezone information, see `Timestamp`_. - -+----------+----------------------------------+--------------------------------------------------------------------+ -| Type | Syntax | Range | -+==========+==================================+====================================================================+ -| Datetime | 'yyyy-MM-dd hh:mm:ss[.fraction]' | '0001-01-01 00:00:00.000000000' to '9999-12-31 23:59:59.999999999' | -+----------+----------------------------------+--------------------------------------------------------------------+ - - - Timestamp --------- @@ -304,16 +287,14 @@ The expr is any expression that can be iterated to a quantity value eventually, Conversion between date and time types -------------------------------------- -Basically the date and time types except interval can be converted to each other, but might suffer some alteration of the value or some information loss, for example extracting the time value from a datetime value, or convert a date value to a datetime value and so forth. Here lists the summary of the conversion rules that SQL plugin supports for each of the types: +Basically the date and time types except interval can be converted to each other, but might suffer some alteration of the value or some information loss, for example extracting the time value from a timestamp value, or convert a date value to a timestamp value and so forth. Here lists the summary of the conversion rules that SQL plugin supports for each of the types: Conversion from DATE >>>>>>>>>>>>>>>>>>>> - Since the date value does not have any time information, conversion to `Time`_ type is not useful, and will always return a zero time value '00:00:00'. -- Conversion from date to datetime has a data fill-up due to the lack of time information, and it attaches the time '00:00:00' to the original date by default and forms a datetime instance. For example, the result to covert date '2020-08-17' to datetime type is datetime '2020-08-17 00:00:00'. - -- Conversion to timestamp is to alternate both the time value and the timezone information, and it attaches the zero time value '00:00:00' and the session timezone (UTC by default) to the date. For example, the result to covert date '2020-08-17' to datetime type with session timezone UTC is datetime '2020-08-17 00:00:00' UTC. +- Conversion to timestamp is to alternate both the time value and the timezone information, and it attaches the zero time value '00:00:00' and the session timezone (UTC by default) to the date. For example, the result to covert date '2020-08-17' to timestamp type with session timezone UTC is timestamp '2020-08-17 00:00:00' UTC. Conversion from TIME @@ -322,20 +303,10 @@ Conversion from TIME - When time value is converted to any other datetime types, the date part of the new value is filled up with today's date, like with the `CURDATE` function. For example, a time value X converted to a timestamp would produce today's date at time X. -Conversion from DATETIME ->>>>>>>>>>>>>>>>>>>>>>>> - -- Conversion from datetime to date is to extract the date part from the datetime value. For example, the result to convert datetime '2020-08-17 14:09:00' to date is date '2020-08-08'. - -- Conversion to time is to extract the time part from the datetime value. For example, the result to convert datetime '2020-08-17 14:09:00' to time is time '14:09:00'. - -- Since the datetime type does not contain timezone information, the conversion to timestamp needs to fill up the timezone part with the session timezone. For example, the result to convert datetime '2020-08-17 14:09:00' with system timezone of UTC, to timestamp is timestamp '2020-08-17 14:09:00' UTC. - - Conversion from TIMESTAMP >>>>>>>>>>>>>>>>>>>>>>>>> -- Conversion from timestamp is much more straightforward. To convert it to date is to extract the date value, and conversion to time is to extract the time value. Conversion to datetime, it will extracts the datetime value and leave the timezone information over. For example, the result to convert datetime '2020-08-17 14:09:00' UTC to date is date '2020-08-17', to time is '14:09:00' and to datetime is datetime '2020-08-17 14:09:00'. +- Conversion from timestamp is much more straightforward. To convert it to date is to extract the date value, and conversion to time is to extract the time value. For example, the result to convert timestamp '2020-08-17 14:09:00' UTC to date is date '2020-08-17', to time is '14:09:00'. Conversion from string to date and time types --------------------------------------------- diff --git a/docs/user/ppl/functions/datetime.rst b/docs/user/ppl/functions/datetime.rst index f7c4091753..9e75e41136 100644 --- a/docs/user/ppl/functions/datetime.rst +++ b/docs/user/ppl/functions/datetime.rst @@ -17,15 +17,15 @@ Description Usage: adddate(date, INTERVAL expr unit) / adddate(date, days) adds the interval of second argument to date; adddate(date, days) adds the second argument as integer number of days to date. If first argument is TIME, today's date is used; if first argument is DATE, time at midnight is used. -Argument type: DATE/DATETIME/TIMESTAMP/TIME, INTERVAL/LONG +Argument type: DATE/TIMESTAMP/TIME, INTERVAL/LONG Return type map: -(DATE/DATETIME/TIMESTAMP/TIME, INTERVAL) -> DATETIME +(DATE/TIMESTAMP/TIME, INTERVAL) -> TIMESTAMP (DATE, LONG) -> DATE -(DATETIME/TIMESTAMP/TIME, LONG) -> DATETIME +(TIMESTAMP/TIME, LONG) -> TIMESTAMP Synonyms: `DATE_ADD`_ when invoked with the INTERVAL form of the second argument. @@ -51,13 +51,13 @@ Description Usage: addtime(expr1, expr2) adds expr2 to expr1 and returns the result. If argument is TIME, today's date is used; if argument is DATE, time at midnight is used. -Argument type: DATE/DATETIME/TIMESTAMP/TIME, DATE/DATETIME/TIMESTAMP/TIME +Argument type: DATE/TIMESTAMP/TIME, DATE/TIMESTAMP/TIME Return type map: -(DATE/DATETIME/TIMESTAMP, DATE/DATETIME/TIMESTAMP/TIME) -> DATETIME +(DATE/TIMESTAMP, DATE/TIMESTAMP/TIME) -> TIMESTAMP -(TIME, DATE/DATETIME/TIMESTAMP/TIME) -> TIME +(TIME, DATE/TIMESTAMP/TIME) -> TIME Antonyms: `SUBTIME`_ @@ -95,7 +95,7 @@ Example:: | 10:26:12 | +---------------------------+ - os> source=people | eval `'2007-02-28 10:20:30' + '20:40:50'` = ADDTIME(TIMESTAMP('2007-02-28 10:20:30'), DATETIME('2002-03-04 20:40:50')) | fields `'2007-02-28 10:20:30' + '20:40:50'` + os> source=people | eval `'2007-02-28 10:20:30' + '20:40:50'` = ADDTIME(TIMESTAMP('2007-02-28 10:20:30'), TIMESTAMP('2002-03-04 20:40:50')) | fields `'2007-02-28 10:20:30' + '20:40:50'` fetched rows / total rows = 1/1 +--------------------------------------+ | '2007-02-28 10:20:30' + '20:40:50' | @@ -110,13 +110,13 @@ CONVERT_TZ Description >>>>>>>>>>> -Usage: convert_tz(datetime, from_timezone, to_timezone) constructs a local datetime converted from the from_timezone to the to_timezone. CONVERT_TZ returns null when any of the three function arguments are invalid, i.e. datetime is not in the format yyyy-MM-dd HH:mm:ss or the timeszone is not in (+/-)HH:mm. It also is invalid for invalid dates, such as February 30th and invalid timezones, which are ones outside of -13:59 and +14:00. +Usage: convert_tz(timestamp, from_timezone, to_timezone) constructs a local timestamp converted from the from_timezone to the to_timezone. CONVERT_TZ returns null when any of the three function arguments are invalid, i.e. timestamp is not in the format yyyy-MM-dd HH:mm:ss or the timeszone is not in (+/-)HH:mm. It also is invalid for invalid dates, such as February 30th and invalid timezones, which are ones outside of -13:59 and +14:00. -Argument type: DATETIME, STRING, STRING +Argument type: TIMESTAMP, STRING, STRING -Return type: DATETIME +Return type: TIMESTAMP -Conversion from +00:00 timezone to +10:00 timezone. Returns the datetime argument converted from +00:00 to +10:00 +Conversion from +00:00 timezone to +10:00 timezone. Returns the timestamp argument converted from +00:00 to +10:00 Example:: os> source=people | eval `convert_tz('2008-05-15 12:00:00','+00:00','+10:00')` = convert_tz('2008-05-15 12:00:00','+00:00','+10:00') | fields `convert_tz('2008-05-15 12:00:00','+00:00','+10:00')` @@ -349,9 +349,9 @@ DATE Description >>>>>>>>>>> -Usage: date(expr) constructs a date type with the input string expr as a date. If the argument is of date/datetime/timestamp, it extracts the date value part from the expression. +Usage: date(expr) constructs a date type with the input string expr as a date. If the argument is of date/timestamp, it extracts the date value part from the expression. -Argument type: STRING/DATE/DATETIME/TIMESTAMP +Argument type: STRING/DATE/TIMESTAMP Return type: DATE @@ -398,9 +398,9 @@ Description Usage: date_add(date, INTERVAL expr unit) adds the interval expr to date. If first argument is TIME, today's date is used; if first argument is DATE, time at midnight is used. -Argument type: DATE/DATETIME/TIMESTAMP/TIME, INTERVAL +Argument type: DATE/TIMESTAMP/TIME, INTERVAL -Return type: DATETIME +Return type: TIMESTAMP Synonyms: `ADDDATE`_ @@ -501,7 +501,7 @@ If an argument of type TIME is provided, the local date is used. * - x - x, for any smallcase/uppercase alphabet except [aydmshiHIMYDSEL] -Argument type: STRING/DATE/DATETIME/TIME/TIMESTAMP, STRING +Argument type: STRING/DATE/TIME/TIMESTAMP, STRING Return type: STRING @@ -522,18 +522,18 @@ DATETIME Description >>>>>>>>>>> -Usage: DATETIME(datetime)/ DATETIME(date, to_timezone) Converts the datetime to a new timezone +Usage: DATETIME(timestamp)/ DATETIME(date, to_timezone) Converts the datetime to a new timezone -Argument type: DATETIME/STRING +Argument type: timestamp/STRING Return type map: -(DATETIME, STRING) -> DATETIME +(TIMESTAMP, STRING) -> TIMESTAMP -(DATETIME) -> DATETIME +(TIMESTAMP) -> TIMESTAMP -Converting datetime with timezone to the second argument timezone. +Converting timestamp with timezone to the second argument timezone. Example:: os> source=people | eval `DATETIME('2004-02-28 23:00:00-10:00', '+10:00')` = DATETIME('2004-02-28 23:00:00-10:00', '+10:00') | fields `DATETIME('2004-02-28 23:00:00-10:00', '+10:00')` @@ -545,7 +545,7 @@ Example:: +---------------------------------------------------+ - The valid timezone range for convert_tz is (-13:59, +14:00) inclusive. Timezones outside of the range will result in null. +The valid timezone range for convert_tz is (-13:59, +14:00) inclusive. Timezones outside of the range will result in null. Example:: os> source=people | eval `DATETIME('2008-01-01 02:00:00', '-14:00')` = DATETIME('2008-01-01 02:00:00', '-14:00') | fields `DATETIME('2008-01-01 02:00:00', '-14:00')` @@ -556,17 +556,6 @@ Example:: | null | +---------------------------------------------+ -The valid timezone range for convert_tz is (-13:59, +14:00) inclusive. Timezones outside of the range will result in null. -Example:: - - os> source=people | eval `DATETIME('2008-02-30 02:00:00', '-00:00')` = DATETIME('2008-02-30 02:00:00', '-00:00') | fields `DATETIME('2008-02-30 02:00:00', '-00:00')` - fetched rows / total rows = 1/1 - +---------------------------------------------+ - | DATETIME('2008-02-30 02:00:00', '-00:00') | - |---------------------------------------------| - | null | - +---------------------------------------------+ - DATE_SUB -------- @@ -576,9 +565,9 @@ Description Usage: date_sub(date, INTERVAL expr unit) subtracts the interval expr from date. If first argument is TIME, today's date is used; if first argument is DATE, time at midnight is used. -Argument type: DATE/DATETIME/TIMESTAMP/TIME, INTERVAL +Argument type: DATE/TIMESTAMP/TIME, INTERVAL -Return type: DATETIME +Return type: TIMESTAMP Synonyms: `SUBDATE`_ @@ -600,7 +589,7 @@ DATEDIFF Usage: Calculates the difference of date parts of given values. If the first argument is time, today's date is used. -Argument type: DATE/DATETIME/TIMESTAMP/TIME, DATE/DATETIME/TIMESTAMP/TIME +Argument type: DATE/TIMESTAMP/TIME, DATE/TIMESTAMP/TIME Return type: LONG @@ -623,7 +612,7 @@ Description Usage: day(date) extracts the day of the month for date, in the range 1 to 31. -Argument type: STRING/DATE/DATETIME/TIMESTAMP +Argument type: STRING/DATE/TIMESTAMP Return type: INTEGER @@ -648,7 +637,7 @@ Description Usage: dayname(date) returns the name of the weekday for date, including Monday, Tuesday, Wednesday, Thursday, Friday, Saturday and Sunday. -Argument type: STRING/DATE/DATETIME/TIMESTAMP +Argument type: STRING/DATE/TIMESTAMP Return type: STRING @@ -671,7 +660,7 @@ Description Usage: dayofmonth(date) extracts the day of the month for date, in the range 1 to 31. -Argument type: STRING/DATE/DATETIME/TIMESTAMP +Argument type: STRING/DATE/TIMESTAMP Return type: INTEGER @@ -696,7 +685,7 @@ Description Usage: day_of_month(date) extracts the day of the month for date, in the range 1 to 31. -Argument type: STRING/DATE/DATETIME/TIMESTAMP +Argument type: STRING/DATE/TIMESTAMP Return type: INTEGER @@ -721,7 +710,7 @@ Description Usage: dayofweek(date) returns the weekday index for date (1 = Sunday, 2 = Monday, ..., 7 = Saturday). -Argument type: STRING/DATE/DATETIME/TIMESTAMP +Argument type: STRING/DATE/TIMESTAMP Return type: INTEGER @@ -746,7 +735,7 @@ Description Usage: day_of_week(date) returns the weekday index for date (1 = Sunday, 2 = Monday, ..., 7 = Saturday). -Argument type: STRING/DATE/DATETIME/TIMESTAMP +Argument type: STRING/DATE/TIMESTAMP Return type: INTEGER @@ -771,7 +760,7 @@ Description Usage: dayofyear(date) returns the day of the year for date, in the range 1 to 366. -Argument type: STRING/DATE/DATETIME/TIMESTAMP +Argument type: STRING/DATE/TIMESTAMP Return type: INTEGER @@ -796,7 +785,7 @@ Description Usage: day_of_year(date) returns the day of the year for date, in the range 1 to 366. -Argument type: STRING/DATE/DATETIME/TIMESTAMP +Argument type: STRING/DATE/TIMESTAMP Return type: INTEGER @@ -913,14 +902,14 @@ FROM_UNIXTIME Description >>>>>>>>>>> -Usage: Returns a representation of the argument given as a datetime or character string value. Perform reverse conversion for `UNIX_TIMESTAMP`_ function. +Usage: Returns a representation of the argument given as a timestamp or character string value. Perform reverse conversion for `UNIX_TIMESTAMP`_ function. If second argument is provided, it is used to format the result in the same way as the format string used for the `DATE_FORMAT`_ function. If timestamp is outside of range 1970-01-01 00:00:00 - 3001-01-18 23:59:59.999999 (0 to 32536771199.999999 epoch time), function returns NULL. Argument type: DOUBLE, STRING Return type map: -DOUBLE -> DATETIME +DOUBLE -> TIMESTAMP DOUBLE, STRING -> STRING @@ -951,7 +940,7 @@ Description Usage: Returns a string value containing string format specifiers based on the input arguments. -Argument type: TYPE, STRING, where TYPE must be one of the following tokens: [DATE, TIME, DATETIME, TIMESTAMP], and +Argument type: TYPE, STRING, where TYPE must be one of the following tokens: [DATE, TIME, TIMESTAMP], and STRING must be one of the following tokens: ["USA", "JIS", "ISO", "EUR", "INTERNAL"] (" can be replaced by '). Examples:: @@ -973,7 +962,7 @@ Description Usage: hour(time) extracts the hour value for time. Different from the time of day value, the time value has a large range and can be greater than 23, so the return value of hour(time) can be also greater than 23. -Argument type: STRING/TIME/DATETIME/TIMESTAMP +Argument type: STRING/TIME/TIMESTAMP Return type: INTEGER @@ -998,7 +987,7 @@ Description Usage: hour_of_day(time) extracts the hour value for time. Different from the time of day value, the time value has a large range and can be greater than 23, so the return value of hour_of_day(time) can be also greater than 23. -Argument type: STRING/TIME/DATETIME/TIMESTAMP +Argument type: STRING/TIME/TIMESTAMP Return type: INTEGER @@ -1020,7 +1009,7 @@ LAST_DAY Usage: Returns the last day of the month as a DATE for a valid argument. -Argument type: DATE/DATETIME/STRING/TIMESTAMP/TIME +Argument type: DATE/STRING/TIMESTAMP/TIME Return type: DATE @@ -1145,9 +1134,9 @@ MICROSECOND Description >>>>>>>>>>> -Usage: microsecond(expr) returns the microseconds from the time or datetime expression expr as a number in the range from 0 to 999999. +Usage: microsecond(expr) returns the microseconds from the time or timestamp expression expr as a number in the range from 0 to 999999. -Argument type: STRING/TIME/DATETIME/TIMESTAMP +Argument type: STRING/TIME/TIMESTAMP Return type: INTEGER @@ -1170,7 +1159,7 @@ Description Usage: minute(time) returns the minute for time, in the range 0 to 59. -Argument type: STRING/TIME/DATETIME/TIMESTAMP +Argument type: STRING/TIME/TIMESTAMP Return type: INTEGER @@ -1195,7 +1184,7 @@ Description Usage: minute(time) returns the amount of minutes in the day, in the range of 0 to 1439. -Argument type: STRING/TIME/DATETIME/TIMESTAMP +Argument type: STRING/TIME/TIMESTAMP Return type: INTEGER @@ -1218,7 +1207,7 @@ Description Usage: minute(time) returns the minute for time, in the range 0 to 59. -Argument type: STRING/TIME/DATETIME/TIMESTAMP +Argument type: STRING/TIME/TIMESTAMP Return type: INTEGER @@ -1243,7 +1232,7 @@ Description Usage: month(date) returns the month for date, in the range 1 to 12 for January to December. -Argument type: STRING/DATE/DATETIME/TIMESTAMP +Argument type: STRING/DATE/TIMESTAMP Return type: INTEGER @@ -1268,7 +1257,7 @@ Description Usage: month_of_year(date) returns the month for date, in the range 1 to 12 for January to December. -Argument type: STRING/DATE/DATETIME/TIMESTAMP +Argument type: STRING/DATE/TIMESTAMP Return type: INTEGER @@ -1293,7 +1282,7 @@ Description Usage: monthname(date) returns the full name of the month for date. -Argument type: STRING/DATE/DATETIME/TIMESTAMP +Argument type: STRING/DATE/TIMESTAMP Return type: STRING @@ -1317,9 +1306,9 @@ Description Returns the current date and time as a value in 'YYYY-MM-DD hh:mm:ss' format. The value is expressed in the cluster time zone. `NOW()` returns a constant time that indicates the time at which the statement began to execute. This differs from the behavior for `SYSDATE() <#sysdate>`_, which returns the exact time at which it executes. -Return type: DATETIME +Return type: TIMESTAMP -Specification: NOW() -> DATETIME +Specification: NOW() -> TIMESTAMP Example:: @@ -1386,7 +1375,7 @@ Description Usage: quarter(date) returns the quarter of the year for date, in the range 1 to 4. -Argument type: STRING/DATE/DATETIME/TIMESTAMP +Argument type: STRING/DATE/TIMESTAMP Return type: INTEGER @@ -1435,7 +1424,7 @@ Description Usage: second(time) returns the second for time, in the range 0 to 59. -Argument type: STRING/TIME/DATETIME/TIMESTAMP +Argument type: STRING/TIME/TIMESTAMP Return type: INTEGER @@ -1460,7 +1449,7 @@ Description Usage: second_of_minute(time) returns the second for time, in the range 0 to 59. -Argument type: STRING/TIME/DATETIME/TIMESTAMP +Argument type: STRING/TIME/TIMESTAMP Return type: INTEGER @@ -1483,14 +1472,14 @@ STR_TO_DATE Description >>>>>>>>>>> -Usage: str_to_date(string, string) is used to extract a DATETIME from the first argument string using the formats specified in the second argument string. -The input argument must have enough information to be parsed as a DATE, DATETIME, or TIME. +Usage: str_to_date(string, string) is used to extract a TIMESTAMP from the first argument string using the formats specified in the second argument string. +The input argument must have enough information to be parsed as a DATE, TIMESTAMP, or TIME. Acceptable string format specifiers are the same as those used in the `DATE_FORMAT`_ function. -It returns NULL when a statement cannot be parsed due to an invalid pair of arguments, and when 0 is provided for any DATE field. Otherwise, it will return a DATETIME with the parsed values (as well as default values for any field that was not parsed). +It returns NULL when a statement cannot be parsed due to an invalid pair of arguments, and when 0 is provided for any DATE field. Otherwise, it will return a TIMESTAMP with the parsed values (as well as default values for any field that was not parsed). Argument type: STRING, STRING -Return type: DATETIME +Return type: TIMESTAMP Example:: @@ -1512,15 +1501,15 @@ Description Usage: subdate(date, INTERVAL expr unit) / subdate(date, days) subtracts the interval expr from date; subdate(date, days) subtracts the second argument as integer number of days from date. If first argument is TIME, today's date is used; if first argument is DATE, time at midnight is used. -Argument type: DATE/DATETIME/TIMESTAMP/TIME, INTERVAL/LONG +Argument type: DATE/TIMESTAMP/TIME, INTERVAL/LONG Return type map: -(DATE/DATETIME/TIMESTAMP/TIME, INTERVAL) -> DATETIME +(DATE/TIMESTAMP/TIME, INTERVAL) -> TIMESTAMP (DATE, LONG) -> DATE -(DATETIME/TIMESTAMP/TIME, LONG) -> DATETIME +(TIMESTAMP/TIME, LONG) -> TIMESTAMP Synonyms: `DATE_SUB`_ when invoked with the INTERVAL form of the second argument. @@ -1545,13 +1534,13 @@ Description Usage: subtime(expr1, expr2) subtracts expr2 from expr1 and returns the result. If argument is TIME, today's date is used; if argument is DATE, time at midnight is used. -Argument type: DATE/DATETIME/TIMESTAMP/TIME, DATE/DATETIME/TIMESTAMP/TIME +Argument type: DATE/TIMESTAMP/TIME, DATE/TIMESTAMP/TIME Return type map: -(DATE/DATETIME/TIMESTAMP, DATE/DATETIME/TIMESTAMP/TIME) -> DATETIME +(DATE/TIMESTAMP, DATE/TIMESTAMP/TIME) -> TIMESTAMP -(TIME, DATE/DATETIME/TIMESTAMP/TIME) -> TIME +(TIME, DATE/TIMESTAMP/TIME) -> TIME Antonyms: `ADDTIME`_ @@ -1589,7 +1578,7 @@ Example:: | 10:14:48 | +---------------------------+ - os> source=people | eval `'2007-03-01 10:20:30' - '20:40:50'` = SUBTIME(TIMESTAMP('2007-03-01 10:20:30'), DATETIME('2002-03-04 20:40:50')) | fields `'2007-03-01 10:20:30' - '20:40:50'` + os> source=people | eval `'2007-03-01 10:20:30' - '20:40:50'` = SUBTIME(TIMESTAMP('2007-03-01 10:20:30'), TIMESTAMP('2002-03-04 20:40:50')) | fields `'2007-03-01 10:20:30' - '20:40:50'` fetched rows / total rows = 1/1 +--------------------------------------+ | '2007-03-01 10:20:30' - '20:40:50' | @@ -1610,9 +1599,9 @@ If the argument is given, it specifies a fractional seconds precision from 0 to Optional argument type: INTEGER -Return type: DATETIME +Return type: TIMESTAMP -Specification: SYSDATE([INTEGER]) -> DATETIME +Specification: SYSDATE([INTEGER]) -> TIMESTAMP Example:: @@ -1631,9 +1620,9 @@ TIME Description >>>>>>>>>>> -Usage: time(expr) constructs a time type with the input string expr as a time. If the argument is of date/datetime/time/timestamp, it extracts the time value part from the expression. +Usage: time(expr) constructs a time type with the input string expr as a time. If the argument is of date/time/timestamp, it extracts the time value part from the expression. -Argument type: STRING/DATE/DATETIME/TIME/TIMESTAMP +Argument type: STRING/DATE/TIME/TIMESTAMP Return type: TIME @@ -1682,7 +1671,7 @@ Usage: time_format(time, format) formats the time argument using the specifiers This supports a subset of the time format specifiers available for the `date_format`_ function. Using date format specifiers supported by `date_format`_ will return 0 or null. Acceptable format specifiers are listed in the table below. -If an argument of type DATE is passed in, it is treated as a DATETIME at midnight (i.e., 00:00:00). +If an argument of type DATE is passed in, it is treated as a TIMESTAMP at midnight (i.e., 00:00:00). .. list-table:: The following table describes the available specifier arguments. :widths: 20 80 @@ -1712,7 +1701,7 @@ If an argument of type DATE is passed in, it is treated as a DATETIME at midnigh - Time, 24-hour (hh:mm:ss) -Argument type: STRING/DATE/DATETIME/TIME/TIMESTAMP, STRING +Argument type: STRING/DATE/TIME/TIMESTAMP, STRING Return type: STRING @@ -1735,7 +1724,7 @@ Description Usage: time_to_sec(time) returns the time argument, converted to seconds. -Argument type: STRING/TIME/DATETIME/TIMESTAMP +Argument type: STRING/TIME/TIMESTAMP Return type: LONG @@ -1780,15 +1769,15 @@ Description >>>>>>>>>>> Usage: timestamp(expr) constructs a timestamp type with the input string `expr` as an timestamp. If the argument is not a string, it casts `expr` to timestamp type with default timezone UTC. If argument is a time, it applies today's date before cast. -With two arguments `timestamp(expr1, expr2)` adds the time expression `expr2` to the date or datetime expression `expr1` and returns the result as a timestamp value. +With two arguments `timestamp(expr1, expr2)` adds the time expression `expr2` to the date or timestamp expression `expr1` and returns the result as a timestamp value. -Argument type: STRING/DATE/TIME/DATETIME/TIMESTAMP +Argument type: STRING/DATE/TIME/TIMESTAMP Return type map: -(STRING/DATE/TIME/DATETIME/TIMESTAMP) -> TIMESTAMP +(STRING/DATE/TIME/TIMESTAMP) -> TIMESTAMP -(STRING/DATE/TIME/DATETIME/TIMESTAMP, STRING/DATE/TIME/DATETIME/TIMESTAMP) -> TIMESTAMP +(STRING/DATE/TIME/TIMESTAMP, STRING/DATE/TIME/TIMESTAMP) -> TIMESTAMP Example:: @@ -1807,11 +1796,11 @@ TIMESTAMPADD Description >>>>>>>>>>> -Usage: Returns a DATETIME value based on a passed in DATE/DATETIME/TIME/TIMESTAMP/STRING argument and an INTERVAL and INTEGER argument which determine the amount of time to be added. -If the third argument is a STRING, it must be formatted as a valid DATETIME. If only a TIME is provided, a DATETIME is still returned with the DATE portion filled in using the current date. -If the third argument is a DATE, it will be automatically converted to a DATETIME. +Usage: Returns a TIMESTAMP value based on a passed in DATE/TIME/TIMESTAMP/STRING argument and an INTERVAL and INTEGER argument which determine the amount of time to be added. +If the third argument is a STRING, it must be formatted as a valid TIMESTAMP. If only a TIME is provided, a TIMESTAMP is still returned with the DATE portion filled in using the current date. +If the third argument is a DATE, it will be automatically converted to a TIMESTAMP. -Argument type: INTERVAL, INTEGER, DATE/DATETIME/TIME/TIMESTAMP/STRING +Argument type: INTERVAL, INTEGER, DATE/TIME/TIMESTAMP/STRING INTERVAL must be one of the following tokens: [MICROSECOND, SECOND, MINUTE, HOUR, DAY, WEEK, MONTH, QUARTER, YEAR] @@ -1833,11 +1822,11 @@ Description >>>>>>>>>>> Usage: TIMESTAMPDIFF(interval, start, end) returns the difference between the start and end date/times in interval units. -If a TIME is provided as an argument, it will be converted to a DATETIME with the DATE portion filled in using the current date. -Arguments will be automatically converted to a DATETIME/TIME/TIMESTAMP when appropriate. -Any argument that is a STRING must be formatted as a valid DATETIME. +If a TIME is provided as an argument, it will be converted to a TIMESTAMP with the DATE portion filled in using the current date. +Arguments will be automatically converted to a TIME/TIMESTAMP when appropriate. +Any argument that is a STRING must be formatted as a valid TIMESTAMP. -Argument type: INTERVAL, DATE/DATETIME/TIME/TIMESTAMP/STRING, DATE/DATETIME/TIME/TIMESTAMP/STRING +Argument type: INTERVAL, DATE/TIME/TIMESTAMP/STRING, DATE/TIME/TIMESTAMP/STRING INTERVAL must be one of the following tokens: [MICROSECOND, SECOND, MINUTE, HOUR, DAY, WEEK, MONTH, QUARTER, YEAR] @@ -1860,7 +1849,7 @@ Description Usage: to_days(date) returns the day number (the number of days since year 0) of the given date. Returns NULL if date is invalid. -Argument type: STRING/DATE/DATETIME/TIMESTAMP +Argument type: STRING/DATE/TIMESTAMP Return type: LONG @@ -1884,7 +1873,7 @@ Description Usage: to_seconds(date) returns the number of seconds since the year 0 of the given value. Returns NULL if value is invalid. An argument of a LONG type can be used. It must be formatted as YMMDD, YYMMDD, YYYMMDD or YYYYMMDD. Note that a LONG type argument cannot have leading 0s as it will be parsed using an octal numbering system. -Argument type: STRING/LONG/DATE/DATETIME/TIME/TIMESTAMP +Argument type: STRING/LONG/DATE/TIME/TIMESTAMP Return type: LONG @@ -1906,11 +1895,11 @@ Description >>>>>>>>>>> Usage: Converts given argument to Unix time (seconds since Epoch - very beginning of year 1970). If no argument given, it returns the current Unix time. -The date argument may be a DATE, DATETIME, or TIMESTAMP string, or a number in YYMMDD, YYMMDDhhmmss, YYYYMMDD, or YYYYMMDDhhmmss format. If the argument includes a time part, it may optionally include a fractional seconds part. +The date argument may be a DATE, or TIMESTAMP string, or a number in YYMMDD, YYMMDDhhmmss, YYYYMMDD, or YYYYMMDDhhmmss format. If the argument includes a time part, it may optionally include a fractional seconds part. If argument is in invalid format or outside of range 1970-01-01 00:00:00 - 3001-01-18 23:59:59.999999 (0 to 32536771199.999999 epoch time), function returns NULL. You can use `FROM_UNIXTIME`_ to do reverse conversion. -Argument type: /DOUBLE/DATE/DATETIME/TIMESTAMP +Argument type: /DOUBLE/DATE/TIMESTAMP Return type: DOUBLE @@ -1979,9 +1968,9 @@ Description Returns the current UTC timestamp as a value in 'YYYY-MM-DD hh:mm:ss'. -Return type: DATETIME +Return type: TIMESTAMP -Specification: UTC_TIMESTAMP() -> DATETIME +Specification: UTC_TIMESTAMP() -> TIMESTAMP Example:: @@ -2043,7 +2032,7 @@ Usage: week(date[, mode]) returns the week number for date. If the mode argument - 1-53 - with a Monday in this year -Argument type: DATE/DATETIME/TIMESTAMP/STRING +Argument type: DATE/TIMESTAMP/STRING Return type: INTEGER @@ -2070,7 +2059,7 @@ Usage: weekday(date) returns the weekday index for date (0 = Monday, 1 = Tuesday It is similar to the `dayofweek`_ function, but returns different indexes for each day. -Argument type: STRING/DATE/DATETIME/TIME/TIMESTAMP +Argument type: STRING/DATE/TIME/TIMESTAMP Return type: INTEGER @@ -2134,7 +2123,7 @@ Usage: week_of_year(date[, mode]) returns the week number for date. If the mode - 1-53 - with a Monday in this year -Argument type: DATE/DATETIME/TIMESTAMP/STRING +Argument type: DATE/TIMESTAMP/STRING Return type: INTEGER @@ -2159,7 +2148,7 @@ Description Usage: year(date) returns the year for date, in the range 1000 to 9999, or 0 for the “zero” date. -Argument type: STRING/DATE/DATETIME/TIMESTAMP +Argument type: STRING/DATE/TIMESTAMP Return type: INTEGER @@ -2182,7 +2171,7 @@ Description Usage: yearweek(date) returns the year and week for date as an integer. It accepts and optional mode arguments aligned with those available for the `WEEK`_ function. -Argument type: STRING/DATE/DATETIME/TIME/TIMESTAMP +Argument type: STRING/DATE/TIME/TIMESTAMP Return type: INTEGER diff --git a/docs/user/ppl/functions/system.rst b/docs/user/ppl/functions/system.rst index fbe9860dce..cfe0414c49 100644 --- a/docs/user/ppl/functions/system.rst +++ b/docs/user/ppl/functions/system.rst @@ -27,5 +27,5 @@ Example:: +----------------+---------------+-----------------+------------------+ | typeof(date) | typeof(int) | typeof(now()) | typeof(column) | |----------------+---------------+-----------------+------------------| - | DATE | INTEGER | DATETIME | OBJECT | + | DATE | INTEGER | TIMESTAMP | OBJECT | +----------------+---------------+-----------------+------------------+ diff --git a/docs/user/ppl/general/datatypes.rst b/docs/user/ppl/general/datatypes.rst index cabc689526..18555dec3d 100644 --- a/docs/user/ppl/general/datatypes.rst +++ b/docs/user/ppl/general/datatypes.rst @@ -39,8 +39,6 @@ The PPL support the following data types. +---------------+ | timestamp | +---------------+ -| datetime | -+---------------+ | date | +---------------+ | time | @@ -114,7 +112,7 @@ Numeric values ranged from -2147483648 to +2147483647 are recognized as integer Date and Time Data Types ======================== -The date and time data types are the types that represent temporal values and PPL plugin supports types including DATE, TIME, DATETIME, TIMESTAMP and INTERVAL. By default, the OpenSearch DSL uses date type as the only date and time related type, which has contained all information about an absolute time point. To integrate with PPL language, each of the types other than timestamp is holding part of temporal or timezone information, and the usage to explicitly clarify the date and time types is reflected in the datetime functions (see `Functions `_ for details), where some functions might have restrictions in the input argument type. +The date and time data types are the types that represent temporal values and PPL plugin supports types including DATE, TIME, TIMESTAMP and INTERVAL. By default, the OpenSearch DSL uses date type as the only date and time related type, which has contained all information about an absolute time point. To integrate with PPL language, each of the types other than timestamp is holding part of temporal or timezone information, and the usage to explicitly clarify the date and time types is reflected in the datetime functions (see `Functions `_ for details), where some functions might have restrictions in the input argument type. Date @@ -141,19 +139,6 @@ Time represents the time on the clock or watch with no regard for which timezone +------+-----------------------+----------------------------------------+ -Datetime --------- - -Datetime type is the combination of date and time. The conversion rule of date or time to datetime is described in `Conversion between date and time types`_. Datetime type does not contain timezone information. For an absolute time point that contains both date time and timezone information, see `Timestamp`_. - -+----------+----------------------------------+--------------------------------------------------------------+ -| Type | Syntax | Range | -+==========+==================================+==============================================================+ -| Datetime | 'yyyy-MM-dd hh:mm:ss[.fraction]' | '0001-01-01 00:00:00.000000' to '9999-12-31 23:59:59.999999' | -+----------+----------------------------------+--------------------------------------------------------------+ - - - Timestamp --------- @@ -183,38 +168,26 @@ The expr is any expression that can be iterated to a quantity value eventually, Conversion between date and time types -------------------------------------- -Basically the date and time types except interval can be converted to each other, but might suffer some alteration of the value or some information loss, for example extracting the time value from a datetime value, or convert a date value to a datetime value and so forth. Here lists the summary of the conversion rules that PPL plugin supports for each of the types: +Basically the date and time types except interval can be converted to each other, but might suffer some alteration of the value or some information loss, for example extracting the time value from a timestamp value, or convert a date value to a timestamp value and so forth. Here lists the summary of the conversion rules that PPL plugin supports for each of the types: Conversion from DATE >>>>>>>>>>>>>>>>>>>> - Since the date value does not have any time information, conversion to `Time`_ type is not useful, and will always return a zero time value '00:00:00'. -- Conversion from date to datetime has a data fill-up due to the lack of time information, and it attaches the time '00:00:00' to the original date by default and forms a datetime instance. For example, the result to covert date '2020-08-17' to datetime type is datetime '2020-08-17 00:00:00'. - -- Conversion to timestamp is to alternate both the time value and the timezone information, and it attaches the zero time value '00:00:00' and the session timezone (UTC by default) to the date. For example, the result to covert date '2020-08-17' to datetime type with session timezone UTC is datetime '2020-08-17 00:00:00' UTC. +- Conversion to timestamp is to alternate both the time value and the timezone information, and it attaches the zero time value '00:00:00' and the session timezone (UTC by default) to the date. For example, the result to covert date '2020-08-17' to timestamp type with session timezone UTC is timestamp '2020-08-17 00:00:00' UTC. Conversion from TIME >>>>>>>>>>>>>>>>>>>> -- Time value cannot be converted to any other date and time types since it does not contain any date information, so it is not meaningful to give no date info to a date/datetime/timestamp instance. - - -Conversion from DATETIME ->>>>>>>>>>>>>>>>>>>>>>>> - -- Conversion from datetime to date is to extract the date part from the datetime value. For example, the result to convert datetime '2020-08-17 14:09:00' to date is date '2020-08-08'. - -- Conversion to time is to extract the time part from the datetime value. For example, the result to convert datetime '2020-08-17 14:09:00' to time is time '14:09:00'. - -- Since the datetime type does not contain timezone information, the conversion to timestamp needs to fill up the timezone part with the session timezone. For example, the result to convert datetime '2020-08-17 14:09:00' with system timezone of UTC, to timestamp is timestamp '2020-08-17 14:09:00' UTC. +- Time value cannot be converted to any other date and time types since it does not contain any date information, so it is not meaningful to give no date info to a date/timestamp instance. Conversion from TIMESTAMP >>>>>>>>>>>>>>>>>>>>>>>>> -- Conversion from timestamp is much more straightforward. To convert it to date is to extract the date value, and conversion to time is to extract the time value. Conversion to datetime, it will extracts the datetime value and leave the timezone information over. For example, the result to convert datetime '2020-08-17 14:09:00' UTC to date is date '2020-08-17', to time is '14:09:00' and to datetime is datetime '2020-08-17 14:09:00'. +- Conversion from timestamp is much more straightforward. To convert it to date is to extract the date value, and conversion to time is to extract the time value. For example, the result to convert timestamp '2020-08-17 14:09:00' UTC to date is date '2020-08-17', to time is '14:09:00'. String Data Types diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/ConvertTZFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/ConvertTZFunctionIT.java index 105669c7ca..a0749387d5 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/ConvertTZFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/ConvertTZFunctionIT.java @@ -30,7 +30,7 @@ public void inRangeZeroToPositive() throws IOException { "source=%s | eval f = convert_tz('2008-05-15 12:00:00','+00:00','+10:00') | fields" + " f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2008-05-15 22:00:00")); } @@ -42,7 +42,7 @@ public void inRangeZeroToZero() throws IOException { "source=%s | eval f = convert_tz('2021-05-12 00:00:00','-00:00','+00:00') | fields" + " f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2021-05-12 00:00:00")); } @@ -54,7 +54,7 @@ public void inRangePositiveToPositive() throws IOException { "source=%s | eval f = convert_tz('2021-05-12 00:00:00','+10:00','+11:00') | fields" + " f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2021-05-12 01:00:00")); } @@ -66,7 +66,7 @@ public void inRangeNegativeToPositive() throws IOException { "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-08:00','+09:00') | fields" + " f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2021-05-13 04:34:50")); } @@ -78,7 +78,7 @@ public void inRangeNoTZChange() throws IOException { "source=%s | eval f = convert_tz('2021-05-12 11:34:50','+09:00','+09:00') | fields" + " f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2021-05-12 11:34:50")); } @@ -90,7 +90,7 @@ public void inRangeTwentyFourHourChange() throws IOException { "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-12:00','+12:00') | fields" + " f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2021-05-13 11:34:50")); } @@ -102,7 +102,7 @@ public void inRangeFifteenMinuteTZ() throws IOException { "source=%s | eval f = convert_tz('2021-05-12 13:00:00','+09:30','+05:45') | fields" + " f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2021-05-12 09:15:00")); } @@ -114,7 +114,7 @@ public void nullFromFieldUnder() throws IOException { "source=%s | eval f = convert_tz('2021-05-30 11:34:50','-17:00','+08:00') | fields" + " f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } @@ -126,7 +126,7 @@ public void nullToFieldOver() throws IOException { "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-12:00','+15:00') | fields" + " f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } @@ -137,7 +137,7 @@ public void nullFromGarbageInput1() throws IOException { String.format( "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-12:00','test') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } @@ -148,7 +148,7 @@ public void nullFromGarbageInput2() throws IOException { String.format( "source=%s | eval f = convert_tz('2021test','-12:00','+00:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } @@ -160,7 +160,7 @@ public void nullDateTimeInvalidDateValueFebruary() throws IOException { "source=%s | eval f = convert_tz('2021-02-30 10:00:00','+00:00','+00:00') | fields" + " f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } @@ -172,7 +172,7 @@ public void nullDateTimeInvalidDateValueApril() throws IOException { "source=%s | eval f = convert_tz('2021-04-31 10:00:00','+00:00','+00:00') | fields" + " f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } @@ -184,7 +184,7 @@ public void nullDateTimeInvalidDateValueMonth() throws IOException { "source=%s | eval f = convert_tz('2021-13-03 10:00:00','+00:00','+00:00') | fields" + " f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeComparisonIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeComparisonIT.java index 6f6b5cc297..7cc083cbb6 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeComparisonIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeComparisonIT.java @@ -98,32 +98,6 @@ public static Iterable compareTwoTimes() { $("TIME('19:16:03') <= TIME('04:12:42')", "lte3", false))); } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") - public static Iterable compareTwoDateTimes() { - return Arrays.asList( - $$( - $("DATETIME('2020-09-16 10:20:30') = DATETIME('2020-09-16 10:20:30')", "eq1", true), - $("DATETIME('2020-09-16 10:20:30') = DATETIME('1961-04-12 09:07:00')", "eq2", false), - $("DATETIME('2020-09-16 10:20:30') != DATETIME('1984-12-15 22:15:07')", "neq1", true), - $("DATETIME('1984-12-15 22:15:08') != DATETIME('1984-12-15 22:15:07')", "neq2", true), - $("DATETIME('1961-04-12 09:07:00') != DATETIME('1961-04-12 09:07:00')", "neq3", false), - $("DATETIME('1984-12-15 22:15:07') > DATETIME('1961-04-12 22:15:07')", "gt1", true), - $("DATETIME('1984-12-15 22:15:07') > DATETIME('1984-12-15 22:15:06')", "gt2", true), - $("DATETIME('1984-12-15 22:15:07') > DATETIME('2020-09-16 10:20:30')", "gt3", false), - $("DATETIME('1961-04-12 09:07:00') < DATETIME('1984-12-15 09:07:00')", "lt1", true), - $("DATETIME('1984-12-15 22:15:07') < DATETIME('1984-12-15 22:15:08')", "lt2", true), - $("DATETIME('1984-12-15 22:15:07') < DATETIME('1961-04-12 09:07:00')", "lt3", false), - $("DATETIME('1984-12-15 22:15:07') >= DATETIME('1961-04-12 09:07:00')", "gte1", true), - $("DATETIME('1984-12-15 22:15:07') >= DATETIME('1984-12-15 22:15:07')", "gte2", true), - $("DATETIME('1984-12-15 22:15:07') >= DATETIME('2020-09-16 10:20:30')", "gte3", false), - $("DATETIME('1961-04-12 09:07:00') <= DATETIME('1984-12-15 22:15:07')", "lte1", true), - $("DATETIME('1961-04-12 09:07:00') <= DATETIME('1961-04-12 09:07:00')", "lte2", true), - $( - "DATETIME('2020-09-16 10:20:30') <= DATETIME('1961-04-12 09:07:00')", - "lte3", - false))); - } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareTwoTimestamps() { return Arrays.asList( @@ -161,22 +135,6 @@ public static Iterable compareEqTimestampWithOtherTypes() { var today = LocalDate.now().toString(); return Arrays.asList( $$( - $( - "TIMESTAMP('2020-09-16 10:20:30') = DATETIME('2020-09-16 10:20:30')", - "ts_dt_t", - true), - $( - "DATETIME('2020-09-16 10:20:30') = TIMESTAMP('2020-09-16 10:20:30')", - "dt_ts_t", - true), - $( - "TIMESTAMP('2020-09-16 10:20:30') = DATETIME('1961-04-12 09:07:00')", - "ts_dt_f", - false), - $( - "DATETIME('1961-04-12 09:07:00') = TIMESTAMP('1984-12-15 22:15:07')", - "dt_ts_f", - false), $("TIMESTAMP('2020-09-16 00:00:00') = DATE('2020-09-16')", "ts_d_t", true), $("DATE('2020-09-16') = TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), $("TIMESTAMP('2020-09-16 10:20:30') = DATE('1961-04-12')", "ts_d_f", false), @@ -187,37 +145,6 @@ public static Iterable compareEqTimestampWithOtherTypes() { $("TIME('09:07:00') = TIMESTAMP('1984-12-15 22:15:07')", "t_ts_f", false))); } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") - public static Iterable compareEqDateTimeWithOtherTypes() { - var today = LocalDate.now().toString(); - return Arrays.asList( - $$( - $( - "DATETIME('2020-09-16 10:20:30') = TIMESTAMP('2020-09-16 10:20:30')", - "dt_ts_t", - true), - $( - "TIMESTAMP('2020-09-16 10:20:30') = DATETIME('2020-09-16 10:20:30')", - "ts_dt_t", - true), - $( - "DATETIME('2020-09-16 10:20:30') = TIMESTAMP('1961-04-12 09:07:00')", - "dt_ts_f", - false), - $( - "TIMESTAMP('1961-04-12 09:07:00') = DATETIME('1984-12-15 22:15:07')", - "ts_dt_f", - false), - $("DATETIME('2020-09-16 00:00:00') = DATE('2020-09-16')", "dt_d_t", true), - $("DATE('2020-09-16') = DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') = DATE('1961-04-12')", "dt_d_f", false), - $("DATE('1961-04-12') = DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), - $("DATETIME('" + today + " 10:20:30') = TIME('10:20:30')", "dt_t_t", true), - $("TIME('10:20:30') = DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') = TIME('09:07:00')", "dt_t_f", false), - $("TIME('09:07:00') = DATETIME('1984-12-15 22:15:07')", "t_dt_f", false))); - } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareEqDateWithOtherTypes() { var today = LocalDate.now().toString(); @@ -227,10 +154,6 @@ public static Iterable compareEqDateWithOtherTypes() { $("TIMESTAMP('2020-09-16 00:00:00') = DATE('2020-09-16')", "ts_d_t", true), $("DATE('2020-09-16') = TIMESTAMP('1961-04-12 09:07:00')", "d_ts_f", false), $("TIMESTAMP('1984-12-15 09:07:00') = DATE('1984-12-15')", "ts_d_f", false), - $("DATE('2020-09-16') = DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') = DATE('2020-09-16')", "dt_d_t", true), - $("DATE('1961-04-12') = DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), - $("DATETIME('1961-04-12 10:20:30') = DATE('1961-04-12')", "dt_d_f", false), $("DATE('" + today + "') = TIME('00:00:00')", "d_t_t", true), $("TIME('00:00:00') = DATE('" + today + "')", "t_d_t", true), $("DATE('2020-09-16') = TIME('09:07:00')", "d_t_f", false), @@ -242,10 +165,6 @@ public static Iterable compareEqTimeWithOtherTypes() { var today = LocalDate.now().toString(); return Arrays.asList( $$( - $("TIME('10:20:30') = DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('" + today + " 10:20:30') = TIME('10:20:30')", "dt_t_t", true), - $("TIME('09:07:00') = DATETIME('1961-04-12 09:07:00')", "t_dt_f", false), - $("DATETIME('" + today + " 09:07:00') = TIME('10:20:30')", "dt_t_f", false), $("TIME('10:20:30') = TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), $("TIMESTAMP('" + today + " 10:20:30') = TIME('10:20:30')", "ts_t_t", true), $("TIME('22:15:07') = TIMESTAMP('1984-12-15 22:15:07')", "t_ts_f", false), @@ -261,22 +180,6 @@ public static Iterable compareNeqTimestampWithOtherTypes() { var today = LocalDate.now().toString(); return Arrays.asList( $$( - $( - "TIMESTAMP('2020-09-16 10:20:30') != DATETIME('1961-04-12 09:07:00')", - "ts_dt_t", - true), - $( - "DATETIME('1961-04-12 09:07:00') != TIMESTAMP('1984-12-15 22:15:07')", - "dt_ts_t", - true), - $( - "TIMESTAMP('2020-09-16 10:20:30') != DATETIME('2020-09-16 10:20:30')", - "ts_dt_f", - false), - $( - "DATETIME('2020-09-16 10:20:30') != TIMESTAMP('2020-09-16 10:20:30')", - "dt_ts_f", - false), $("TIMESTAMP('2020-09-16 10:20:30') != DATE('1961-04-12')", "ts_d_t", true), $("DATE('1961-04-12') != TIMESTAMP('1984-12-15 22:15:07')", "d_ts_t", true), $("TIMESTAMP('2020-09-16 00:00:00') != DATE('2020-09-16')", "ts_d_f", false), @@ -287,37 +190,6 @@ public static Iterable compareNeqTimestampWithOtherTypes() { $("TIME('10:20:30') != TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false))); } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") - public static Iterable compareNeqDateTimeWithOtherTypes() { - var today = LocalDate.now().toString(); - return Arrays.asList( - $$( - $( - "DATETIME('2020-09-16 10:20:30') != TIMESTAMP('1961-04-12 09:07:00')", - "dt_ts_t", - true), - $( - "TIMESTAMP('1961-04-12 09:07:00') != DATETIME('1984-12-15 22:15:07')", - "ts_dt_t", - true), - $( - "DATETIME('2020-09-16 10:20:30') != TIMESTAMP('2020-09-16 10:20:30')", - "dt_ts_f", - false), - $( - "TIMESTAMP('2020-09-16 10:20:30') != DATETIME('2020-09-16 10:20:30')", - "ts_dt_f", - false), - $("DATETIME('2020-09-16 10:20:30') != DATE('1961-04-12')", "dt_d_t", true), - $("DATE('1961-04-12') != DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') != DATE('2020-09-16')", "dt_d_f", false), - $("DATE('2020-09-16') != DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), - $("DATETIME('2020-09-16 10:20:30') != TIME('09:07:00')", "dt_t_t", true), - $("TIME('09:07:00') != DATETIME('1984-12-15 22:15:07')", "t_dt_t", true), - $("DATETIME('" + today + " 10:20:30') != TIME('10:20:30')", "dt_t_f", false), - $("TIME('10:20:30') != DATETIME('" + today + " 10:20:30')", "t_dt_f", false))); - } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareNeqDateWithOtherTypes() { var today = LocalDate.now().toString(); @@ -327,10 +199,6 @@ public static Iterable compareNeqDateWithOtherTypes() { $("TIMESTAMP('1984-12-15 09:07:00') != DATE('1984-12-15')", "ts_d_t", true), $("DATE('2020-09-16') != TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), $("TIMESTAMP('2020-09-16 00:00:00') != DATE('2020-09-16')", "ts_d_f", false), - $("DATE('1961-04-12') != DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), - $("DATETIME('1961-04-12 10:20:30') != DATE('1961-04-12')", "dt_d_t", true), - $("DATE('2020-09-16') != DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), - $("DATETIME('2020-09-16 00:00:00') != DATE('2020-09-16')", "dt_d_f", false), $("DATE('2020-09-16') != TIME('09:07:00')", "d_t_t", true), $("TIME('09:07:00') != DATE('" + today + "')", "t_d_t", true), $("DATE('" + today + "') != TIME('00:00:00')", "d_t_f", false), @@ -342,10 +210,6 @@ public static Iterable compareNeqTimeWithOtherTypes() { var today = LocalDate.now().toString(); return Arrays.asList( $$( - $("TIME('09:07:00') != DATETIME('1961-04-12 09:07:00')", "t_dt_t", true), - $("DATETIME('" + today + " 09:07:00') != TIME('10:20:30')", "dt_t_t", true), - $("TIME('10:20:30') != DATETIME('" + today + " 10:20:30')", "t_dt_f", false), - $("DATETIME('" + today + " 10:20:30') != TIME('10:20:30')", "dt_t_f", false), $("TIME('22:15:07') != TIMESTAMP('1984-12-15 22:15:07')", "t_ts_t", true), $("TIMESTAMP('1984-12-15 10:20:30') != TIME('10:20:30')", "ts_t_t", true), $("TIME('10:20:30') != TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false), @@ -361,22 +225,6 @@ public static Iterable compareLtTimestampWithOtherTypes() { var today = LocalDate.now().toString(); return Arrays.asList( $$( - $( - "TIMESTAMP('2020-09-16 10:20:30') < DATETIME('2061-04-12 09:07:00')", - "ts_dt_t", - true), - $( - "DATETIME('1961-04-12 09:07:00') < TIMESTAMP('1984-12-15 22:15:07')", - "dt_ts_t", - true), - $( - "TIMESTAMP('2020-09-16 10:20:30') < DATETIME('2020-09-16 10:20:30')", - "ts_dt_f", - false), - $( - "DATETIME('2020-09-16 10:20:30') < TIMESTAMP('1961-04-12 09:07:00')", - "dt_ts_f", - false), $("TIMESTAMP('2020-09-16 10:20:30') < DATE('2077-04-12')", "ts_d_t", true), $("DATE('1961-04-12') < TIMESTAMP('1984-12-15 22:15:07')", "d_ts_t", true), $("TIMESTAMP('2020-09-16 10:20:30') < DATE('1961-04-12')", "ts_d_f", false), @@ -387,37 +235,6 @@ public static Iterable compareLtTimestampWithOtherTypes() { $("TIME('20:50:40') < TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false))); } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") - public static Iterable compareLtDateTimeWithOtherTypes() { - var today = LocalDate.now().toString(); - return Arrays.asList( - $$( - $( - "DATETIME('2020-09-16 10:20:30') < TIMESTAMP('2077-04-12 09:07:00')", - "dt_ts_t", - true), - $( - "TIMESTAMP('1961-04-12 09:07:00') < DATETIME('1984-12-15 22:15:07')", - "ts_dt_t", - true), - $( - "DATETIME('2020-09-16 10:20:30') < TIMESTAMP('2020-09-16 10:20:30')", - "dt_ts_f", - false), - $( - "TIMESTAMP('2020-09-16 10:20:30') < DATETIME('1984-12-15 22:15:07')", - "ts_dt_f", - false), - $("DATETIME('2020-09-16 10:20:30') < DATE('3077-04-12')", "dt_d_t", true), - $("DATE('1961-04-12') < DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') < DATE('2020-09-16')", "dt_d_f", false), - $("DATE('2020-09-16') < DATETIME('1961-04-12 09:07:00')", "d_dt_f", false), - $("DATETIME('2020-09-16 10:20:30') < TIME('09:07:00')", "dt_t_t", true), - $("TIME('09:07:00') < DATETIME('3077-12-15 22:15:07')", "t_dt_t", true), - $("DATETIME('" + today + " 10:20:30') < TIME('10:20:30')", "dt_t_f", false), - $("TIME('20:40:50') < DATETIME('" + today + " 10:20:30')", "t_dt_f", false))); - } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLtDateWithOtherTypes() { return Arrays.asList( @@ -426,10 +243,6 @@ public static Iterable compareLtDateWithOtherTypes() { $("TIMESTAMP('1961-04-12 09:07:00') < DATE('1984-12-15')", "ts_d_t", true), $("DATE('2020-09-16') < TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), $("TIMESTAMP('2077-04-12 09:07:00') < DATE('2020-09-16')", "ts_d_f", false), - $("DATE('1961-04-12') < DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), - $("DATETIME('1961-04-12 10:20:30') < DATE('1984-11-15')", "dt_d_t", true), - $("DATE('2020-09-16') < DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), - $("DATETIME('2020-09-16 00:00:00') < DATE('1984-03-22')", "dt_d_f", false), $("DATE('2020-09-16') < TIME('09:07:00')", "d_t_t", true), $("TIME('09:07:00') < DATE('3077-04-12')", "t_d_t", true), $("DATE('3077-04-12') < TIME('00:00:00')", "d_t_f", false), @@ -441,10 +254,6 @@ public static Iterable compareLtTimeWithOtherTypes() { var today = LocalDate.now().toString(); return Arrays.asList( $$( - $("TIME('09:07:00') < DATETIME('3077-04-12 09:07:00')", "t_dt_t", true), - $("DATETIME('" + today + " 09:07:00') < TIME('10:20:30')", "dt_t_t", true), - $("TIME('10:20:30') < DATETIME('" + today + " 10:20:30')", "t_dt_f", false), - $("DATETIME('" + today + " 20:40:50') < TIME('10:20:30')", "dt_t_f", false), $("TIME('22:15:07') < TIMESTAMP('3077-12-15 22:15:07')", "t_ts_t", true), $("TIMESTAMP('1984-12-15 10:20:30') < TIME('10:20:30')", "ts_t_t", true), $("TIME('10:20:30') < TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false), @@ -460,22 +269,6 @@ public static Iterable compareGtTimestampWithOtherTypes() { var today = LocalDate.now().toString(); return Arrays.asList( $$( - $( - "TIMESTAMP('2020-09-16 10:20:30') > DATETIME('2020-09-16 10:20:25')", - "ts_dt_t", - true), - $( - "DATETIME('2020-09-16 10:20:30') > TIMESTAMP('1961-04-12 09:07:00')", - "dt_ts_t", - true), - $( - "TIMESTAMP('2020-09-16 10:20:30') > DATETIME('2061-04-12 09:07:00')", - "ts_dt_f", - false), - $( - "DATETIME('1961-04-12 09:07:00') > TIMESTAMP('1984-12-15 09:07:00')", - "dt_ts_f", - false), $("TIMESTAMP('2020-09-16 10:20:30') > DATE('1961-04-12')", "ts_d_t", true), $("DATE('2020-09-16') > TIMESTAMP('2020-09-15 22:15:07')", "d_ts_t", true), $("TIMESTAMP('2020-09-16 10:20:30') > DATE('2077-04-12')", "ts_d_f", false), @@ -486,37 +279,6 @@ public static Iterable compareGtTimestampWithOtherTypes() { $("TIME('09:07:00') > TIMESTAMP('3077-12-15 22:15:07')", "t_ts_f", false))); } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") - public static Iterable compareGtDateTimeWithOtherTypes() { - var today = LocalDate.now().toString(); - return Arrays.asList( - $$( - $( - "DATETIME('2020-09-16 10:20:31') > TIMESTAMP('2020-09-16 10:20:30')", - "dt_ts_t", - true), - $( - "TIMESTAMP('2020-09-16 10:20:30') > DATETIME('1984-12-15 22:15:07')", - "ts_dt_t", - true), - $( - "DATETIME('2020-09-16 10:20:30') > TIMESTAMP('2077-04-12 09:07:00')", - "dt_ts_f", - false), - $( - "TIMESTAMP('1961-04-12 09:07:00') > DATETIME('1961-04-12 09:07:00')", - "ts_dt_f", - false), - $("DATETIME('3077-04-12 10:20:30') > DATE('2020-09-16')", "dt_d_t", true), - $("DATE('2020-09-16') > DATETIME('1961-04-12 09:07:00')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') > DATE('2020-09-16')", "dt_d_f", false), - $("DATE('1961-04-12') > DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), - $("DATETIME('3077-04-12 10:20:30') > TIME('09:07:00')", "dt_t_t", true), - $("TIME('20:40:50') > DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('" + today + " 10:20:30') > TIME('10:20:30')", "dt_t_f", false), - $("TIME('09:07:00') > DATETIME('3077-12-15 22:15:07')", "t_dt_f", false))); - } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGtDateWithOtherTypes() { return Arrays.asList( @@ -525,10 +287,6 @@ public static Iterable compareGtDateWithOtherTypes() { $("TIMESTAMP('2077-04-12 09:07:00') > DATE('2020-09-16')", "ts_d_t", true), $("DATE('2020-09-16') > TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), $("TIMESTAMP('1961-04-12 09:07:00') > DATE('1984-12-15')", "ts_d_f", false), - $("DATE('1984-12-15') > DATETIME('1961-04-12 09:07:00')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') > DATE('1984-03-22')", "dt_d_t", true), - $("DATE('2020-09-16') > DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), - $("DATETIME('1961-04-12 10:20:30') > DATE('1984-11-15')", "dt_d_f", false), $("DATE('3077-04-12') > TIME('00:00:00')", "d_t_t", true), $("TIME('00:00:00') > DATE('2020-09-16')", "t_d_t", true), $("DATE('2020-09-16') > TIME('09:07:00')", "d_t_f", false), @@ -540,10 +298,6 @@ public static Iterable compareGtTimeWithOtherTypes() { var today = LocalDate.now().toString(); return Arrays.asList( $$( - $("TIME('09:07:00') > DATETIME('1961-04-12 09:07:00')", "t_dt_t", true), - $("DATETIME('" + today + " 20:40:50') > TIME('10:20:30')", "dt_t_t", true), - $("TIME('10:20:30') > DATETIME('" + today + " 10:20:30')", "t_dt_f", false), - $("DATETIME('" + today + " 09:07:00') > TIME('10:20:30')", "dt_t_f", false), $("TIME('22:15:07') > TIMESTAMP('1984-12-15 22:15:07')", "t_ts_t", true), $("TIMESTAMP('" + today + " 20:50:42') > TIME('10:20:30')", "ts_t_t", true), $("TIME('10:20:30') > TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false), @@ -559,22 +313,6 @@ public static Iterable compareLteTimestampWithOtherTypes() { var today = LocalDate.now().toString(); return Arrays.asList( $$( - $( - "TIMESTAMP('2020-09-16 10:20:30') <= DATETIME('2020-09-16 10:20:30')", - "ts_dt_t", - true), - $( - "DATETIME('1961-04-12 09:07:00') <= TIMESTAMP('1984-12-15 22:15:07')", - "dt_ts_t", - true), - $( - "TIMESTAMP('2020-09-16 10:20:30') <= DATETIME('1961-04-12 09:07:00')", - "ts_dt_f", - false), - $( - "DATETIME('2020-09-16 10:20:30') <= TIMESTAMP('1961-04-12 09:07:00')", - "dt_ts_f", - false), $("TIMESTAMP('2020-09-16 10:20:30') <= DATE('2077-04-12')", "ts_d_t", true), $("DATE('2020-09-16') <= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), $("TIMESTAMP('2020-09-16 10:20:30') <= DATE('1961-04-12')", "ts_d_f", false), @@ -585,37 +323,6 @@ public static Iterable compareLteTimestampWithOtherTypes() { $("TIME('20:50:40') <= TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false))); } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") - public static Iterable compareLteDateTimeWithOtherTypes() { - var today = LocalDate.now().toString(); - return Arrays.asList( - $$( - $( - "DATETIME('2020-09-16 10:20:30') <= TIMESTAMP('2020-09-16 10:20:30')", - "dt_ts_t", - true), - $( - "TIMESTAMP('1961-04-12 09:07:00') <= DATETIME('1984-12-15 22:15:07')", - "ts_dt_t", - true), - $( - "DATETIME('3077-09-16 10:20:30') <= TIMESTAMP('2077-04-12 09:07:00')", - "dt_ts_f", - false), - $( - "TIMESTAMP('2020-09-16 10:20:30') <= DATETIME('1984-12-15 22:15:07')", - "ts_dt_f", - false), - $("DATETIME('2020-09-16 00:00:00') <= DATE('2020-09-16')", "dt_d_t", true), - $("DATE('1961-04-12') <= DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') <= DATE('1984-04-12')", "dt_d_f", false), - $("DATE('2020-09-16') <= DATETIME('1961-04-12 09:07:00')", "d_dt_f", false), - $("DATETIME('" + today + " 10:20:30') <= TIME('10:20:30')", "dt_t_t", true), - $("TIME('09:07:00') <= DATETIME('3077-12-15 22:15:07')", "t_dt_t", true), - $("DATETIME('3077-09-16 10:20:30') <= TIME('19:07:00')", "dt_t_f", false), - $("TIME('20:40:50') <= DATETIME('" + today + " 10:20:30')", "t_dt_f", false))); - } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLteDateWithOtherTypes() { return Arrays.asList( @@ -624,10 +331,6 @@ public static Iterable compareLteDateWithOtherTypes() { $("TIMESTAMP('1961-04-12 09:07:00') <= DATE('1984-12-15')", "ts_d_t", true), $("DATE('2020-09-16') <= TIMESTAMP('1961-04-12 09:07:00')", "d_ts_f", false), $("TIMESTAMP('2077-04-12 09:07:00') <= DATE('2020-09-16')", "ts_d_f", false), - $("DATE('2020-09-16') <= DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), - $("DATETIME('1961-04-12 10:20:30') <= DATE('1984-11-15')", "dt_d_t", true), - $("DATE('2077-04-12') <= DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), - $("DATETIME('2020-09-16 00:00:00') <= DATE('1984-03-22')", "dt_d_f", false), $("DATE('2020-09-16') <= TIME('09:07:00')", "d_t_t", true), $("TIME('09:07:00') <= DATE('3077-04-12')", "t_d_t", true), $("DATE('3077-04-12') <= TIME('00:00:00')", "d_t_f", false), @@ -639,10 +342,6 @@ public static Iterable compareLteTimeWithOtherTypes() { var today = LocalDate.now().toString(); return Arrays.asList( $$( - $("TIME('10:20:30') <= DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('" + today + " 09:07:00') <= TIME('10:20:30')", "dt_t_t", true), - $("TIME('09:07:00') <= DATETIME('1961-04-12 09:07:00')", "t_dt_f", false), - $("DATETIME('" + today + " 20:40:50') <= TIME('10:20:30')", "dt_t_f", false), $("TIME('10:20:30') <= TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), $("TIMESTAMP('1984-12-15 10:20:30') <= TIME('10:20:30')", "ts_t_t", true), $("TIME('22:15:07') <= TIMESTAMP('1984-12-15 22:15:07')", "t_ts_f", false), @@ -658,22 +357,6 @@ public static Iterable compareGteTimestampWithOtherTypes() { var today = LocalDate.now().toString(); return Arrays.asList( $$( - $( - "TIMESTAMP('2020-09-16 10:20:30') >= DATETIME('2020-09-16 10:20:30')", - "ts_dt_t", - true), - $( - "DATETIME('2020-09-16 10:20:30') >= TIMESTAMP('1961-04-12 09:07:00')", - "dt_ts_t", - true), - $( - "TIMESTAMP('2020-09-16 10:20:30') >= DATETIME('2061-04-12 09:07:00')", - "ts_dt_f", - false), - $( - "DATETIME('1961-04-12 09:07:00') >= TIMESTAMP('1984-12-15 09:07:00')", - "dt_ts_f", - false), $("TIMESTAMP('2020-09-16 10:20:30') >= DATE('1961-04-12')", "ts_d_t", true), $("DATE('2020-09-16') >= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), $("TIMESTAMP('2020-09-16 10:20:30') >= DATE('2077-04-12')", "ts_d_f", false), @@ -684,37 +367,6 @@ public static Iterable compareGteTimestampWithOtherTypes() { $("TIME('09:07:00') >= TIMESTAMP('3077-12-15 22:15:07')", "t_ts_f", false))); } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") - public static Iterable compareGteDateTimeWithOtherTypes() { - var today = LocalDate.now().toString(); - return Arrays.asList( - $$( - $( - "DATETIME('2020-09-16 10:20:30') >= TIMESTAMP('2020-09-16 10:20:30')", - "dt_ts_t", - true), - $( - "TIMESTAMP('2020-09-16 10:20:30') >= DATETIME('1984-12-15 22:15:07')", - "ts_dt_t", - true), - $( - "DATETIME('2020-09-16 10:20:30') >= TIMESTAMP('2077-04-12 09:07:00')", - "dt_ts_f", - false), - $( - "TIMESTAMP('1961-04-12 00:00:00') >= DATETIME('1961-04-12 09:07:00')", - "ts_dt_f", - false), - $("DATETIME('2020-09-16 00:00:00') >= DATE('2020-09-16')", "dt_d_t", true), - $("DATE('2020-09-16') >= DATETIME('1961-04-12 09:07:00')", "d_dt_t", true), - $("DATETIME('1961-04-12 09:07:00') >= DATE('2020-09-16')", "dt_d_f", false), - $("DATE('1961-04-12') >= DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), - $("DATETIME('" + today + " 10:20:30') >= TIME('10:20:30')", "dt_t_t", true), - $("TIME('20:40:50') >= DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('1961-04-12 09:07:00') >= TIME('09:07:00')", "dt_t_f", false), - $("TIME('09:07:00') >= DATETIME('3077-12-15 22:15:07')", "t_dt_f", false))); - } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGteDateWithOtherTypes() { return Arrays.asList( @@ -723,10 +375,6 @@ public static Iterable compareGteDateWithOtherTypes() { $("TIMESTAMP('2077-04-12 09:07:00') >= DATE('2020-09-16')", "ts_d_t", true), $("DATE('1961-04-12') >= TIMESTAMP('1961-04-12 09:07:00')", "d_ts_f", false), $("TIMESTAMP('1961-04-12 09:07:00') >= DATE('1984-12-15')", "ts_d_f", false), - $("DATE('2020-09-16') >= DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') >= DATE('1984-03-22')", "dt_d_t", true), - $("DATE('1960-12-15') >= DATETIME('1961-04-12 09:07:00')", "d_dt_f", false), - $("DATETIME('1961-04-12 10:20:30') >= DATE('1984-11-15')", "dt_d_f", false), $("DATE('3077-04-12') >= TIME('00:00:00')", "d_t_t", true), $("TIME('00:00:00') >= DATE('2020-09-16')", "t_d_t", true), $("DATE('2020-09-16') >= TIME('09:07:00')", "d_t_f", false), @@ -738,10 +386,6 @@ public static Iterable compareGteTimeWithOtherTypes() { var today = LocalDate.now().toString(); return Arrays.asList( $$( - $("TIME('10:20:30') >= DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('" + today + " 20:40:50') >= TIME('10:20:30')", "dt_t_t", true), - $("TIME('09:07:00') >= DATETIME('3077-04-12 09:07:00')", "t_dt_f", false), - $("DATETIME('" + today + " 09:07:00') >= TIME('10:20:30')", "dt_t_f", false), $("TIME('10:20:30') >= TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), $("TIMESTAMP('" + today + " 20:50:42') >= TIME('10:20:30')", "ts_t_t", true), $("TIME('22:15:07') >= TIMESTAMP('3077-12-15 22:15:07')", "t_ts_f", false), diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeFunctionIT.java index 1df87a87b3..3ea6897087 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeFunctionIT.java @@ -67,25 +67,15 @@ public void testAddDateWithDays() throws IOException { + " f = adddate(timestamp('2020-09-16 17:30:00'), 1)" + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2020-09-17 17:30:00")); - result = - executeQuery( - String.format( - "source=%s | eval " - + " f = adddate(DATETIME('2020-09-16 07:40:00'), 1)" - + " | fields f", - TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows("2020-09-17 07:40:00")); - result = executeQuery( String.format( "source=%s | eval " + " f = adddate(TIME('07:40:00'), 0)" + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows(LocalDate.now() + " 07:40:00")); } @@ -98,17 +88,7 @@ public void testAddDateWithInterval() throws IOException { + " f = adddate(timestamp('2020-09-16 17:30:00'), interval 1 day)" + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows("2020-09-17 17:30:00")); - - result = - executeQuery( - String.format( - "source=%s | eval " - + " f = adddate(DATETIME('2020-09-16 17:30:00'), interval 1 day)" - + " | fields f", - TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2020-09-17 17:30:00")); result = @@ -118,7 +98,7 @@ public void testAddDateWithInterval() throws IOException { + " f = adddate(date('2020-09-16'), interval 1 day) " + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2020-09-17 00:00:00")); result = @@ -128,7 +108,7 @@ public void testAddDateWithInterval() throws IOException { + " f = adddate(date('2020-09-16'), interval 1 hour)" + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2020-09-16 01:00:00")); result = @@ -138,7 +118,7 @@ public void testAddDateWithInterval() throws IOException { + " f = adddate(TIME('07:40:00'), interval 1 day)" + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome( result.getJSONArray("datarows"), rows( @@ -155,7 +135,7 @@ public void testAddDateWithInterval() throws IOException { + " f = adddate(TIME('07:40:00'), interval 1 hour)" + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome( result.getJSONArray("datarows"), rows( @@ -173,7 +153,7 @@ public void testConvertTZ() throws IOException { "source=%s | eval f = convert_tz('2008-05-15 12:00:00','+00:00','+10:00') | fields" + " f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2008-05-15 22:00:00")); result = @@ -182,7 +162,7 @@ public void testConvertTZ() throws IOException { "source=%s | eval f = convert_tz('2021-05-12 00:00:00','-00:00','+00:00') | fields" + " f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2021-05-12 00:00:00")); result = @@ -191,7 +171,7 @@ public void testConvertTZ() throws IOException { "source=%s | eval f = convert_tz('2021-05-12 00:00:00','+10:00','+11:00') | fields" + " f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2021-05-12 01:00:00")); result = @@ -200,7 +180,7 @@ public void testConvertTZ() throws IOException { "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-08:00','+09:00') | fields" + " f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2021-05-13 04:34:50")); result = @@ -209,7 +189,7 @@ public void testConvertTZ() throws IOException { "source=%s | eval f = convert_tz('2021-05-12 11:34:50','+09:00','+09:00') | fields" + " f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2021-05-12 11:34:50")); result = @@ -218,7 +198,7 @@ public void testConvertTZ() throws IOException { "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-12:00','+12:00') | fields" + " f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2021-05-13 11:34:50")); result = @@ -227,7 +207,7 @@ public void testConvertTZ() throws IOException { "source=%s | eval f = convert_tz('2021-05-12 13:00:00','+09:30','+05:45') | fields" + " f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2021-05-12 09:15:00")); result = @@ -236,7 +216,7 @@ public void testConvertTZ() throws IOException { "source=%s | eval f = convert_tz('2021-05-30 11:34:50','-17:00','+08:00') | fields" + " f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); result = @@ -245,7 +225,7 @@ public void testConvertTZ() throws IOException { "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-12:00','+15:00') | fields" + " f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } @@ -258,17 +238,7 @@ public void testDateAdd() throws IOException { + " f = date_add(timestamp('2020-09-16 17:30:00'), interval 1 day)" + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows("2020-09-17 17:30:00")); - - result = - executeQuery( - String.format( - "source=%s | eval " - + " f = date_add(DATETIME('2020-09-16 17:30:00'), interval 1 day)" - + " | fields f", - TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2020-09-17 17:30:00")); result = @@ -278,7 +248,7 @@ public void testDateAdd() throws IOException { + " f = date_add(date('2020-09-16'), interval 1 day)" + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2020-09-17 00:00:00")); result = @@ -288,7 +258,7 @@ public void testDateAdd() throws IOException { + " f = date_add(date('2020-09-16'), interval 1 hour)" + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2020-09-16 01:00:00")); result = @@ -298,7 +268,7 @@ public void testDateAdd() throws IOException { + " f = date_add(TIME('07:40:00'), interval 1 day)" + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome( result.getJSONArray("datarows"), rows( @@ -315,7 +285,7 @@ public void testDateAdd() throws IOException { + " f = date_add(TIME('07:40:00'), interval 1 hour)" + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome( result.getJSONArray("datarows"), rows( @@ -329,7 +299,7 @@ public void testDateAdd() throws IOException { String.format( "source=%s | eval " + " f = DATE_ADD(birthdate, INTERVAL 1 YEAR)" + " | fields f", TEST_INDEX_BANK)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifyDataRows( result, rows("2018-10-23 00:00:00"), @@ -349,7 +319,7 @@ public void testDateTime() throws IOException { "source=%s | eval f = DATETIME('2008-12-25 05:30:00+00:00', 'America/Los_Angeles')" + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2008-12-24 21:30:00")); result = @@ -357,7 +327,7 @@ public void testDateTime() throws IOException { String.format( "source=%s | eval f = DATETIME('2008-12-25 05:30:00+00:00', '+01:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2008-12-25 06:30:00")); result = @@ -365,7 +335,7 @@ public void testDateTime() throws IOException { String.format( "source=%s | eval f = DATETIME('2008-12-25 05:30:00-05:00', '+05:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2008-12-25 15:30:00")); result = @@ -373,7 +343,7 @@ public void testDateTime() throws IOException { String.format( "source=%s | eval f = DATETIME('2004-02-28 23:00:00-10:00', '+10:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2004-02-29 19:00:00")); result = @@ -381,7 +351,7 @@ public void testDateTime() throws IOException { String.format( "source=%s | eval f = DATETIME('2003-02-28 23:00:00-10:00', '+10:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2003-03-01 19:00:00")); result = @@ -389,7 +359,7 @@ public void testDateTime() throws IOException { String.format( "source=%s | eval f = DATETIME('2008-12-25 05:30:00+00:00', '+14:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2008-12-25 19:30:00")); result = @@ -397,7 +367,7 @@ public void testDateTime() throws IOException { String.format( "source=%s | eval f = DATETIME('2008-01-01 02:00:00+10:00', '-10:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2007-12-31 06:00:00")); result = @@ -405,7 +375,7 @@ public void testDateTime() throws IOException { String.format( "source=%s | eval f = DATETIME('2008-01-01 02:00:00+10:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2008-01-01 02:00:00")); result = @@ -413,7 +383,7 @@ public void testDateTime() throws IOException { String.format( "source=%s | eval f = DATETIME('2008-01-01 02:00:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2008-01-01 02:00:00")); result = @@ -421,7 +391,7 @@ public void testDateTime() throws IOException { String.format( "source=%s | eval f = DATETIME('2008-01-01 02:00:00+15:00', '-12:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); result = @@ -429,7 +399,7 @@ public void testDateTime() throws IOException { String.format( "source=%s | eval f = DATETIME('2008-01-01 02:00:00+10:00', '-14:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); result = @@ -437,7 +407,7 @@ public void testDateTime() throws IOException { String.format( "source=%s | eval f = DATETIME('2008-01-01 02:00:00', '-14:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } @@ -450,17 +420,7 @@ public void testDateSub() throws IOException { + " f = date_sub(timestamp('2020-09-16 17:30:00'), interval 1 day)" + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows("2020-09-15 17:30:00")); - - result = - executeQuery( - String.format( - "source=%s | eval " - + " f = date_sub(DATETIME('2020-09-16 17:30:00'), interval 1 day)" - + " | fields f", - TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2020-09-15 17:30:00")); result = @@ -470,7 +430,7 @@ public void testDateSub() throws IOException { + " f = date_sub(date('2020-09-16'), interval 1 day)" + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2020-09-15 00:00:00")); result = @@ -480,7 +440,7 @@ public void testDateSub() throws IOException { + " f = date_sub(date('2020-09-16'), interval 1 hour)" + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2020-09-15 23:00:00")); result = @@ -490,7 +450,7 @@ public void testDateSub() throws IOException { + " f = date_sub(TIME('07:40:00'), interval 1 day)" + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome( result.getJSONArray("datarows"), rows( @@ -507,7 +467,7 @@ public void testDateSub() throws IOException { + " f = date_sub(TIME('07:40:00'), interval 1 hour)" + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome( result.getJSONArray("datarows"), rows( @@ -1051,7 +1011,7 @@ public void testSubDateDays() throws IOException { + " f = subdate(timestamp('2020-09-16 17:30:00'), 1)" + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2020-09-15 17:30:00")); result = @@ -1067,7 +1027,7 @@ public void testSubDateDays() throws IOException { String.format( "source=%s | eval " + " f = subdate(TIME('07:40:00'), 0)" + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows(LocalDate.now() + " 07:40:00")); } @@ -1080,17 +1040,7 @@ public void testSubDateInterval() throws IOException { + " f = subdate(timestamp('2020-09-16 17:30:00'), interval 1 day)" + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows("2020-09-15 17:30:00")); - - result = - executeQuery( - String.format( - "source=%s | eval " - + " f = subdate(DATETIME('2020-09-16 17:30:00'), interval 1 day)" - + " | fields f", - TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2020-09-15 17:30:00")); result = @@ -1100,7 +1050,7 @@ public void testSubDateInterval() throws IOException { + " f = subdate(date('2020-09-16'), interval 1 day) " + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2020-09-15 00:00:00")); result = @@ -1110,7 +1060,7 @@ public void testSubDateInterval() throws IOException { + " f = subdate(date('2020-09-16'), interval 1 hour)" + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2020-09-15 23:00:00")); result = @@ -1120,7 +1070,7 @@ public void testSubDateInterval() throws IOException { + " f = subdate(TIME('07:40:00'), interval 1 day)" + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome( result.getJSONArray("datarows"), rows( @@ -1137,7 +1087,7 @@ public void testSubDateInterval() throws IOException { + " f = subdate(TIME('07:40:00'), interval 1 hour)" + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome( result.getJSONArray("datarows"), rows( @@ -1316,17 +1266,17 @@ public void testAddTime() throws IOException { + " DATE('2004-01-01')), `'2004-01-01' + '23:59:59'` =" + " ADDTIME(DATE('2004-01-01'), TIME('23:59:59')), `'10:20:30' + '00:05:42'` =" + " ADDTIME(TIME('10:20:30'), TIME('00:05:42')), `'15:42:13' + '09:07:00'` =" - + " ADDTIME(TIMESTAMP('1999-12-31 15:42:13'), DATETIME('1961-04-12 09:07:00'))" + + " ADDTIME(TIMESTAMP('1999-12-31 15:42:13'), TIMESTAMP('1961-04-12 09:07:00'))" + " | fields `'2008-12-12' + 0`, `'23:59:59' + 0`, `'2004-01-01' + '23:59:59'`," + " `'10:20:30' + '00:05:42'`, `'15:42:13' + '09:07:00'`", TEST_INDEX_DATE)); verifySchema( result, - schema("'2008-12-12' + 0", null, "datetime"), + schema("'2008-12-12' + 0", null, "timestamp"), schema("'23:59:59' + 0", null, "time"), - schema("'2004-01-01' + '23:59:59'", null, "datetime"), + schema("'2004-01-01' + '23:59:59'", null, "timestamp"), schema("'10:20:30' + '00:05:42'", null, "time"), - schema("'15:42:13' + '09:07:00'", null, "datetime")); + schema("'15:42:13' + '09:07:00'", null, "timestamp")); verifySome( result.getJSONArray("datarows"), rows( @@ -1347,17 +1297,17 @@ public void testSubTime() throws IOException { + " DATE('2004-01-01')), `'2004-01-01' - '23:59:59'` =" + " SUBTIME(DATE('2004-01-01'), TIME('23:59:59')), `'10:20:30' - '00:05:42'` =" + " SUBTIME(TIME('10:20:30'), TIME('00:05:42')), `'15:42:13' - '09:07:00'` =" - + " SUBTIME(TIMESTAMP('1999-12-31 15:42:13'), DATETIME('1961-04-12 09:07:00'))" + + " SUBTIME(TIMESTAMP('1999-12-31 15:42:13'), TIMESTAMP('1961-04-12 09:07:00'))" + " | fields `'2008-12-12' - 0`, `'23:59:59' - 0`, `'2004-01-01' - '23:59:59'`," + " `'10:20:30' - '00:05:42'`, `'15:42:13' - '09:07:00'`", TEST_INDEX_DATE)); verifySchema( result, - schema("'2008-12-12' - 0", null, "datetime"), + schema("'2008-12-12' - 0", null, "timestamp"), schema("'23:59:59' - 0", null, "time"), - schema("'2004-01-01' - '23:59:59'", null, "datetime"), + schema("'2004-01-01' - '23:59:59'", null, "timestamp"), schema("'10:20:30' - '00:05:42'", null, "time"), - schema("'15:42:13' - '09:07:00'", null, "datetime")); + schema("'15:42:13' - '09:07:00'", null, "timestamp")); verifySome( result.getJSONArray("datarows"), rows( @@ -1378,8 +1328,8 @@ public void testFromUnixTime() throws IOException { TEST_INDEX_DATE)); verifySchema( result, - schema("f1", null, "datetime"), - schema("f2", null, "datetime"), + schema("f1", null, "timestamp"), + schema("f2", null, "timestamp"), schema("f3", null, "string")); verifySome( result.getJSONArray("datarows"), @@ -1427,6 +1377,7 @@ public void testPeriodDiff() throws IOException { verifySome(result.getJSONArray("datarows"), rows(11, -25)); } + @Test public void testDateDiff() throws IOException { var result = executeQuery( @@ -1435,7 +1386,7 @@ public void testDateDiff() throws IOException { + " 00:00:00'), TIMESTAMP('2000-01-01 23:59:59')), `'2001-02-01' -" + " '2004-01-01'` = DATEDIFF(DATE('2001-02-01'), TIMESTAMP('2004-01-01" + " 00:00:00')), `'2004-01-01' - '2002-02-01'` = DATEDIFF(TIMESTAMP('2004-01-01" - + " 00:00:00'), DATETIME('2002-02-01 14:25:30')), `today - today` =" + + " 00:00:00'), TIMESTAMP('2002-02-01 14:25:30')), `today - today` =" + " DATEDIFF(TIME('23:59:59'), TIME('00:00:00')) | fields `'2000-01-02' -" + " '2000-01-01'`, `'2001-02-01' - '2004-01-01'`, `'2004-01-01' -" + " '2002-02-01'`, `today - today`", @@ -1519,7 +1470,7 @@ public void testToSeconds() throws IOException { String.format( "source=%s | eval f1 = to_seconds(date('2008-10-07')) | " + "eval f2 = to_seconds('2020-09-16 07:40:00') | " - + "eval f3 = to_seconds(DATETIME('2020-09-16 07:40:00')) | fields f1, f2, f3", + + "eval f3 = to_seconds(TIMESTAMP('2020-09-16 07:40:00')) | fields f1, f2, f3", TEST_INDEX_DATE)); verifySchema( result, schema("f1", null, "long"), schema("f2", null, "long"), schema("f3", null, "long")); @@ -1533,7 +1484,7 @@ public void testStrToDate() throws IOException { String.format( "source=%s | eval f = str_to_date('01,5,2013', '%s') | fields f", TEST_INDEX_DATE, "%d,%m,%Y")); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2013-05-01 00:00:00")); } @@ -1544,7 +1495,7 @@ public void testTimeStampAdd() throws IOException { String.format( "source=%s | eval f = timestampadd(YEAR, 15, '2001-03-06 00:00:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2016-03-06 00:00:00")); } @@ -1556,7 +1507,7 @@ public void testTimestampDiff() throws IOException { "source=%s | eval f = timestampdiff(YEAR, '1997-01-01 00:00:00', '2001-03-06" + " 00:00:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows(4)); } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeImplementationIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeImplementationIT.java index dd86470a39..f9dc7d8027 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeImplementationIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeImplementationIT.java @@ -30,7 +30,7 @@ public void inRangeZeroToStringTZ() throws IOException { "source=%s | eval f = DATETIME('2008-12-25 05:30:00+00:00', 'America/Los_Angeles')" + " | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2008-12-24 21:30:00")); } @@ -41,7 +41,7 @@ public void inRangeZeroToPositive() throws IOException { String.format( "source=%s | eval f = DATETIME('2008-12-25 05:30:00+00:00', '+01:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2008-12-25 06:30:00")); } @@ -52,7 +52,7 @@ public void inRangeNegativeToPositive() throws IOException { String.format( "source=%s | eval f = DATETIME('2008-12-25 05:30:00-05:00', '+05:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2008-12-25 15:30:00")); } @@ -63,7 +63,7 @@ public void inRangeTwentyHourOffset() throws IOException { String.format( "source=%s | eval f = DATETIME('2004-02-28 23:00:00-10:00', '+10:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2004-02-29 19:00:00")); } @@ -74,7 +74,7 @@ public void inRangeYearChange() throws IOException { String.format( "source=%s | eval f = DATETIME('2008-01-01 02:00:00+10:00', '-10:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2007-12-31 06:00:00")); } @@ -85,7 +85,7 @@ public void inRangeZeroToMax() throws IOException { String.format( "source=%s | eval f = DATETIME('2008-12-25 05:30:00+00:00', '+14:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2008-12-25 19:30:00")); } @@ -96,7 +96,7 @@ public void inRangeNoToTZ() throws IOException { String.format( "source=%s | eval f = DATETIME('2008-01-01 02:00:00+10:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2008-01-01 02:00:00")); } @@ -107,7 +107,7 @@ public void inRangeNoTZ() throws IOException { String.format( "source=%s | eval f = DATETIME('2008-01-01 02:00:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows("2008-01-01 02:00:00")); } @@ -118,7 +118,7 @@ public void nullField3Over() throws IOException { String.format( "source=%s | eval f = DATETIME('2008-01-01 02:00:00+15:00', '-12:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } @@ -129,7 +129,7 @@ public void nullField2Under() throws IOException { String.format( "source=%s | eval f = DATETIME('2008-01-01 02:00:00+10:00', '-14:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } @@ -140,7 +140,7 @@ public void nullTField3Over() throws IOException { String.format( "source=%s | eval f = DATETIME('2008-01-01 02:00:00', '+15:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } @@ -151,7 +151,7 @@ public void nullDateTimeInvalidDateValueFebruary() throws IOException { String.format( "source=%s | eval f = DATETIME('2021-02-30 10:00:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } @@ -162,7 +162,7 @@ public void nullDateTimeInvalidDateValueApril() throws IOException { String.format( "source=%s | eval f = DATETIME('2021-04-31 10:00:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } @@ -173,7 +173,7 @@ public void nullDateTimeInvalidDateValueMonth() throws IOException { String.format( "source=%s | eval f = DATETIME('2021-13-03 10:00:00') | fields f", TEST_INDEX_DATE)); - verifySchema(result, schema("f", null, "datetime")); + verifySchema(result, schema("f", null, "timestamp")); verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/SystemFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/SystemFunctionIT.java index 1c23935f81..c1356ce838 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/SystemFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/SystemFunctionIT.java @@ -45,11 +45,10 @@ public void typeof_sql_types() throws IOException { "source=%s | eval " + "`timestamp` = typeof(CAST('1961-04-12 09:07:00' AS TIMESTAMP))," + "`time` = typeof(CAST('09:07:00' AS TIME))," - + "`date` = typeof(CAST('1961-04-12' AS DATE))," - + "`datetime` = typeof(DATETIME('1961-04-12 09:07:00'))" - + " | fields `timestamp`, `time`, `date`, `datetime`", + + "`date` = typeof(CAST('1961-04-12' AS DATE))" + + " | fields `timestamp`, `time`, `date`", TEST_INDEX_DATATYPE_NUMERIC)); - verifyDataRows(response, rows("TIMESTAMP", "TIME", "DATE", "DATETIME")); + verifyDataRows(response, rows("TIMESTAMP", "TIME", "DATE")); } @Test diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/AggregationIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/AggregationIT.java index 339cd56370..3f71499f97 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/AggregationIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/AggregationIT.java @@ -287,13 +287,14 @@ public void testPushDownAggregationOnNullDateTimeValuesReturnsNull() throws IOEx var response = executeQuery( String.format( - "SELECT " + "max(datetime(NULL)), min(datetime(NULL)), avg(datetime(NULL)) from %s", + "SELECT " + + "max(timestamp(NULL)), min(timestamp(NULL)), avg(timestamp(NULL)) from %s", TEST_INDEX_CALCS)); verifySchema( response, - schema("max(datetime(NULL))", null, "datetime"), - schema("min(datetime(NULL))", null, "datetime"), - schema("avg(datetime(NULL))", null, "datetime")); + schema("max(timestamp(NULL))", null, "timestamp"), + schema("min(timestamp(NULL))", null, "timestamp"), + schema("avg(timestamp(NULL))", null, "timestamp")); verifyDataRows(response, rows(null, null, null)); } @@ -480,8 +481,8 @@ public void testMinDateTimePushedDown() throws IOException { var response = executeQuery( String.format( - "SELECT min(datetime(CAST(time0 AS STRING)))" + " from %s", TEST_INDEX_CALCS)); - verifySchema(response, schema("min(datetime(CAST(time0 AS STRING)))", null, "datetime")); + "SELECT min(timestamp(CAST(time0 AS STRING)))" + " from %s", TEST_INDEX_CALCS)); + verifySchema(response, schema("min(timestamp(CAST(time0 AS STRING)))", null, "timestamp")); verifyDataRows(response, rows("1899-12-30 21:07:32")); } @@ -490,8 +491,8 @@ public void testMaxDateTimePushedDown() throws IOException { var response = executeQuery( String.format( - "SELECT max(datetime(CAST(time0 AS STRING)))" + " from %s", TEST_INDEX_CALCS)); - verifySchema(response, schema("max(datetime(CAST(time0 AS STRING)))", null, "datetime")); + "SELECT max(timestamp(CAST(time0 AS STRING)))" + " from %s", TEST_INDEX_CALCS)); + verifySchema(response, schema("max(timestamp(CAST(time0 AS STRING)))", null, "timestamp")); verifyDataRows(response, rows("1900-01-01 20:36:00")); } @@ -500,8 +501,8 @@ public void testAvgDateTimePushedDown() throws IOException { var response = executeQuery( String.format( - "SELECT avg(datetime(CAST(time0 AS STRING)))" + " from %s", TEST_INDEX_CALCS)); - verifySchema(response, schema("avg(datetime(CAST(time0 AS STRING)))", null, "datetime")); + "SELECT avg(timestamp(CAST(time0 AS STRING)))" + " from %s", TEST_INDEX_CALCS)); + verifySchema(response, schema("avg(timestamp(CAST(time0 AS STRING)))", null, "timestamp")); verifyDataRows(response, rows("1900-01-01 03:35:00.236")); } @@ -591,13 +592,15 @@ public void testMinDateTimeInMemory() throws IOException { var response = executeQuery( String.format( - "SELECT min(datetime(CAST(time0 AS STRING)))" + "SELECT min(timestamp(CAST(time0 AS STRING)))" + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); verifySchema( response, schema( - "min(datetime(CAST(time0 AS STRING))) OVER(PARTITION BY datetime1)", null, "datetime")); + "min(timestamp(CAST(time0 AS STRING))) OVER(PARTITION BY datetime1)", + null, + "timestamp")); verifySome(response.getJSONArray("datarows"), rows("1899-12-30 21:07:32")); } @@ -606,13 +609,15 @@ public void testMaxDateTimeInMemory() throws IOException { var response = executeQuery( String.format( - "SELECT max(datetime(CAST(time0 AS STRING)))" + "SELECT max(timestamp(CAST(time0 AS STRING)))" + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); verifySchema( response, schema( - "max(datetime(CAST(time0 AS STRING))) OVER(PARTITION BY datetime1)", null, "datetime")); + "max(timestamp(CAST(time0 AS STRING))) OVER(PARTITION BY datetime1)", + null, + "timestamp")); verifySome(response.getJSONArray("datarows"), rows("1900-01-01 20:36:00")); } @@ -621,13 +626,15 @@ public void testAvgDateTimeInMemory() throws IOException { var response = executeQuery( String.format( - "SELECT avg(datetime(CAST(time0 AS STRING)))" + "SELECT avg(timestamp(CAST(time0 AS STRING)))" + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); verifySchema( response, schema( - "avg(datetime(CAST(time0 AS STRING))) OVER(PARTITION BY datetime1)", null, "datetime")); + "avg(timestamp(CAST(time0 AS STRING))) OVER(PARTITION BY datetime1)", + null, + "timestamp")); verifySome(response.getJSONArray("datarows"), rows("1900-01-01 03:35:00.236")); } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/ConvertTZFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/ConvertTZFunctionIT.java index 76600b6561..776c4de290 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/ConvertTZFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/ConvertTZFunctionIT.java @@ -26,7 +26,7 @@ public void init() throws Exception { public void inRangeZeroToPositive() throws IOException { var result = executeJdbcRequest("SELECT convert_tz('2008-05-15 12:00:00','+00:00','+10:00')"); verifySchema( - result, schema("convert_tz('2008-05-15 12:00:00','+00:00','+10:00')", null, "datetime")); + result, schema("convert_tz('2008-05-15 12:00:00','+00:00','+10:00')", null, "timestamp")); verifyDataRows(result, rows("2008-05-15 22:00:00")); } @@ -34,7 +34,7 @@ public void inRangeZeroToPositive() throws IOException { public void inRangeNegativeZeroToPositiveZero() throws IOException { var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 00:00:00','-00:00','+00:00')"); verifySchema( - result, schema("convert_tz('2021-05-12 00:00:00','-00:00','+00:00')", null, "datetime")); + result, schema("convert_tz('2021-05-12 00:00:00','-00:00','+00:00')", null, "timestamp")); verifyDataRows(result, rows("2021-05-12 00:00:00")); } @@ -42,7 +42,7 @@ public void inRangeNegativeZeroToPositiveZero() throws IOException { public void inRangePositiveToPositive() throws IOException { var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 00:00:00','+10:00','+11:00')"); verifySchema( - result, schema("convert_tz('2021-05-12 00:00:00','+10:00','+11:00')", null, "datetime")); + result, schema("convert_tz('2021-05-12 00:00:00','+10:00','+11:00')", null, "timestamp")); verifyDataRows(result, rows("2021-05-12 01:00:00")); } @@ -50,7 +50,7 @@ public void inRangePositiveToPositive() throws IOException { public void inRangeNegativeToPositive() throws IOException { var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 11:34:50','-08:00','+09:00')"); verifySchema( - result, schema("convert_tz('2021-05-12 11:34:50','-08:00','+09:00')", null, "datetime")); + result, schema("convert_tz('2021-05-12 11:34:50','-08:00','+09:00')", null, "timestamp")); verifyDataRows(result, rows("2021-05-13 04:34:50")); } @@ -58,7 +58,7 @@ public void inRangeNegativeToPositive() throws IOException { public void inRangeSameTimeZone() throws IOException { var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 11:34:50','+09:00','+09:00')"); verifySchema( - result, schema("convert_tz('2021-05-12 11:34:50','+09:00','+09:00')", null, "datetime")); + result, schema("convert_tz('2021-05-12 11:34:50','+09:00','+09:00')", null, "timestamp")); verifyDataRows(result, rows("2021-05-12 11:34:50")); } @@ -66,7 +66,7 @@ public void inRangeSameTimeZone() throws IOException { public void inRangeTwentyFourHourTimeOffset() throws IOException { var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 11:34:50','-12:00','+12:00')"); verifySchema( - result, schema("convert_tz('2021-05-12 11:34:50','-12:00','+12:00')", null, "datetime")); + result, schema("convert_tz('2021-05-12 11:34:50','-12:00','+12:00')", null, "timestamp")); verifyDataRows(result, rows("2021-05-13 11:34:50")); } @@ -74,7 +74,7 @@ public void inRangeTwentyFourHourTimeOffset() throws IOException { public void inRangeFifteenMinuteTimeZones() throws IOException { var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 13:00:00','+09:30','+05:45')"); verifySchema( - result, schema("convert_tz('2021-05-12 13:00:00','+09:30','+05:45')", null, "datetime")); + result, schema("convert_tz('2021-05-12 13:00:00','+09:30','+05:45')", null, "timestamp")); verifyDataRows(result, rows("2021-05-12 09:15:00")); } @@ -82,7 +82,7 @@ public void inRangeFifteenMinuteTimeZones() throws IOException { public void inRangeRandomTimes() throws IOException { var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 13:00:00','+09:31','+05:11')"); verifySchema( - result, schema("convert_tz('2021-05-12 13:00:00','+09:31','+05:11')", null, "datetime")); + result, schema("convert_tz('2021-05-12 13:00:00','+09:31','+05:11')", null, "timestamp")); verifyDataRows(result, rows("2021-05-12 08:40:00")); } @@ -90,7 +90,7 @@ public void inRangeRandomTimes() throws IOException { public void nullField2Under() throws IOException { var result = executeJdbcRequest("SELECT convert_tz('2021-05-30 11:34:50','-14:00','+08:00')"); verifySchema( - result, schema("convert_tz('2021-05-30 11:34:50','-14:00','+08:00')", null, "datetime")); + result, schema("convert_tz('2021-05-30 11:34:50','-14:00','+08:00')", null, "timestamp")); verifyDataRows(result, rows(new Object[] {null})); } @@ -98,7 +98,7 @@ public void nullField2Under() throws IOException { public void nullField3Over() throws IOException { var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 11:34:50','-12:00','+14:01')"); verifySchema( - result, schema("convert_tz('2021-05-12 11:34:50','-12:00','+14:01')", null, "datetime")); + result, schema("convert_tz('2021-05-12 11:34:50','-12:00','+14:01')", null, "timestamp")); verifyDataRows(result, rows(new Object[] {null})); } @@ -106,7 +106,7 @@ public void nullField3Over() throws IOException { public void inRangeMinOnPoint() throws IOException { var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 15:00:00','-13:59','-13:59')"); verifySchema( - result, schema("convert_tz('2021-05-12 15:00:00','-13:59','-13:59')", null, "datetime")); + result, schema("convert_tz('2021-05-12 15:00:00','-13:59','-13:59')", null, "timestamp")); verifyDataRows(result, rows("2021-05-12 15:00:00")); } @@ -118,7 +118,7 @@ public void inRangeMinOnPoint() throws IOException { public void nullField3InvalidInput() throws IOException { var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 11:34:50','+10:0','+14:01')"); verifySchema( - result, schema("convert_tz('2021-05-12 11:34:50','+10:0','+14:01')", null, "datetime")); + result, schema("convert_tz('2021-05-12 11:34:50','+10:0','+14:01')", null, "timestamp")); verifyDataRows(result, rows(new Object[] {null})); } @@ -126,16 +126,16 @@ public void nullField3InvalidInput() throws IOException { public void nullField2InvalidInput() throws IOException { var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 11:34:50','+14:01','****')"); verifySchema( - result, schema("convert_tz('2021-05-12 11:34:50','+14:01','****')", null, "datetime")); + result, schema("convert_tz('2021-05-12 11:34:50','+14:01','****')", null, "timestamp")); verifyDataRows(result, rows(new Object[] {null})); } - // Invalid input in the datetime field of CONVERT_TZ results in a null field. It is any input + // Invalid input in the timestamp field of CONVERT_TZ results in a null field. It is any input // which is not of the format `yyyy-MM-dd HH:mm:ss` @Test - public void nullDateTimeInvalidInput() throws IOException { + public void nulltimestampInvalidInput() throws IOException { var result = executeJdbcRequest("SELECT convert_tz('2021----','+00:00','+00:00')"); - verifySchema(result, schema("convert_tz('2021----','+00:00','+00:00')", null, "datetime")); + verifySchema(result, schema("convert_tz('2021----','+00:00','+00:00')", null, "timestamp")); verifyDataRows(result, rows(new Object[] {null})); } @@ -143,7 +143,7 @@ public void nullDateTimeInvalidInput() throws IOException { public void nullDateTimeInvalidDateValueFebruary() throws IOException { var result = executeJdbcRequest("SELECT convert_tz('2021-02-30 10:00:00','+00:00','+00:00')"); verifySchema( - result, schema("convert_tz('2021-02-30 10:00:00','+00:00','+00:00')", null, "datetime")); + result, schema("convert_tz('2021-02-30 10:00:00','+00:00','+00:00')", null, "timestamp")); verifyDataRows(result, rows(new Object[] {null})); } @@ -151,7 +151,7 @@ public void nullDateTimeInvalidDateValueFebruary() throws IOException { public void nullDateTimeInvalidDateValueApril() throws IOException { var result = executeJdbcRequest("SELECT convert_tz('2021-04-31 10:00:00','+00:00','+00:00')"); verifySchema( - result, schema("convert_tz('2021-04-31 10:00:00','+00:00','+00:00')", null, "datetime")); + result, schema("convert_tz('2021-04-31 10:00:00','+00:00','+00:00')", null, "timestamp")); verifyDataRows(result, rows(new Object[] {null})); } @@ -159,7 +159,7 @@ public void nullDateTimeInvalidDateValueApril() throws IOException { public void nullDateTimeInvalidDateValueMonth() throws IOException { var result = executeJdbcRequest("SELECT convert_tz('2021-13-03 10:00:00','+00:00','+00:00')"); verifySchema( - result, schema("convert_tz('2021-13-03 10:00:00','+00:00','+00:00')", null, "datetime")); + result, schema("convert_tz('2021-13-03 10:00:00','+00:00','+00:00')", null, "timestamp")); verifyDataRows(result, rows(new Object[] {null})); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeComparisonIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeComparisonIT.java index 432daef82f..af3d81e374 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeComparisonIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeComparisonIT.java @@ -105,32 +105,6 @@ public static Iterable compareTwoTimes() { $("TIME('19:16:03') <= TIME('04:12:42')", "lte3", false))); } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") - public static Iterable compareTwoDateTimes() { - return Arrays.asList( - $$( - $("DATETIME('2020-09-16 10:20:30') = DATETIME('2020-09-16 10:20:30')", "eq1", true), - $("DATETIME('2020-09-16 10:20:30') = DATETIME('1961-04-12 09:07:00')", "eq2", false), - $("DATETIME('2020-09-16 10:20:30') != DATETIME('1984-12-15 22:15:07')", "neq1", true), - $("DATETIME('1984-12-15 22:15:08') != DATETIME('1984-12-15 22:15:07')", "neq2", true), - $("DATETIME('1961-04-12 09:07:00') != DATETIME('1961-04-12 09:07:00')", "neq3", false), - $("DATETIME('1984-12-15 22:15:07') > DATETIME('1961-04-12 22:15:07')", "gt1", true), - $("DATETIME('1984-12-15 22:15:07') > DATETIME('1984-12-15 22:15:06')", "gt2", true), - $("DATETIME('1984-12-15 22:15:07') > DATETIME('2020-09-16 10:20:30')", "gt3", false), - $("DATETIME('1961-04-12 09:07:00') < DATETIME('1984-12-15 09:07:00')", "lt1", true), - $("DATETIME('1984-12-15 22:15:07') < DATETIME('1984-12-15 22:15:08')", "lt2", true), - $("DATETIME('1984-12-15 22:15:07') < DATETIME('1961-04-12 09:07:00')", "lt3", false), - $("DATETIME('1984-12-15 22:15:07') >= DATETIME('1961-04-12 09:07:00')", "gte1", true), - $("DATETIME('1984-12-15 22:15:07') >= DATETIME('1984-12-15 22:15:07')", "gte2", true), - $("DATETIME('1984-12-15 22:15:07') >= DATETIME('2020-09-16 10:20:30')", "gte3", false), - $("DATETIME('1961-04-12 09:07:00') <= DATETIME('1984-12-15 22:15:07')", "lte1", true), - $("DATETIME('1961-04-12 09:07:00') <= DATETIME('1961-04-12 09:07:00')", "lte2", true), - $( - "DATETIME('2020-09-16 10:20:30') <= DATETIME('1961-04-12 09:07:00')", - "lte3", - false))); - } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareTwoTimestamps() { return Arrays.asList( @@ -168,22 +142,6 @@ public static Iterable compareEqTimestampWithOtherTypes() { var today = LocalDate.now().toString(); return Arrays.asList( $$( - $( - "TIMESTAMP('2020-09-16 10:20:30') = DATETIME('2020-09-16 10:20:30')", - "ts_dt_t", - true), - $( - "DATETIME('2020-09-16 10:20:30') = TIMESTAMP('2020-09-16 10:20:30')", - "dt_ts_t", - true), - $( - "TIMESTAMP('2020-09-16 10:20:30') = DATETIME('1961-04-12 09:07:00')", - "ts_dt_f", - false), - $( - "DATETIME('1961-04-12 09:07:00') = TIMESTAMP('1984-12-15 22:15:07')", - "dt_ts_f", - false), $("TIMESTAMP('2020-09-16 00:00:00') = DATE('2020-09-16')", "ts_d_t", true), $("DATE('2020-09-16') = TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), $("TIMESTAMP('2020-09-16 10:20:30') = DATE('1961-04-12')", "ts_d_f", false), @@ -194,37 +152,6 @@ public static Iterable compareEqTimestampWithOtherTypes() { $("TIME('09:07:00') = TIMESTAMP('1984-12-15 22:15:07')", "t_ts_f", false))); } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") - public static Iterable compareEqDateTimeWithOtherTypes() { - var today = LocalDate.now().toString(); - return Arrays.asList( - $$( - $( - "DATETIME('2020-09-16 10:20:30') = TIMESTAMP('2020-09-16 10:20:30')", - "dt_ts_t", - true), - $( - "TIMESTAMP('2020-09-16 10:20:30') = DATETIME('2020-09-16 10:20:30')", - "ts_dt_t", - true), - $( - "DATETIME('2020-09-16 10:20:30') = TIMESTAMP('1961-04-12 09:07:00')", - "dt_ts_f", - false), - $( - "TIMESTAMP('1961-04-12 09:07:00') = DATETIME('1984-12-15 22:15:07')", - "ts_dt_f", - false), - $("DATETIME('2020-09-16 00:00:00') = DATE('2020-09-16')", "dt_d_t", true), - $("DATE('2020-09-16') = DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') = DATE('1961-04-12')", "dt_d_f", false), - $("DATE('1961-04-12') = DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), - $("DATETIME('" + today + " 10:20:30') = TIME('10:20:30')", "dt_t_t", true), - $("TIME('10:20:30') = DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') = TIME('09:07:00')", "dt_t_f", false), - $("TIME('09:07:00') = DATETIME('1984-12-15 22:15:07')", "t_dt_f", false))); - } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareEqDateWithOtherTypes() { var today = LocalDate.now().toString(); @@ -234,10 +161,6 @@ public static Iterable compareEqDateWithOtherTypes() { $("TIMESTAMP('2020-09-16 00:00:00') = DATE('2020-09-16')", "ts_d_t", true), $("DATE('2020-09-16') = TIMESTAMP('1961-04-12 09:07:00')", "d_ts_f", false), $("TIMESTAMP('1984-12-15 09:07:00') = DATE('1984-12-15')", "ts_d_f", false), - $("DATE('2020-09-16') = DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') = DATE('2020-09-16')", "dt_d_t", true), - $("DATE('1961-04-12') = DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), - $("DATETIME('1961-04-12 10:20:30') = DATE('1961-04-12')", "dt_d_f", false), $("DATE('" + today + "') = TIME('00:00:00')", "d_t_t", true), $("TIME('00:00:00') = DATE('" + today + "')", "t_d_t", true), $("DATE('2020-09-16') = TIME('09:07:00')", "d_t_f", false), @@ -249,10 +172,6 @@ public static Iterable compareEqTimeWithOtherTypes() { var today = LocalDate.now().toString(); return Arrays.asList( $$( - $("TIME('10:20:30') = DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('" + today + " 10:20:30') = TIME('10:20:30')", "dt_t_t", true), - $("TIME('09:07:00') = DATETIME('1961-04-12 09:07:00')", "t_dt_f", false), - $("DATETIME('" + today + " 09:07:00') = TIME('10:20:30')", "dt_t_f", false), $("TIME('10:20:30') = TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), $("TIMESTAMP('" + today + " 10:20:30') = TIME('10:20:30')", "ts_t_t", true), $("TIME('22:15:07') = TIMESTAMP('1984-12-15 22:15:07')", "t_ts_f", false), @@ -268,22 +187,6 @@ public static Iterable compareNeqTimestampWithOtherTypes() { var today = LocalDate.now().toString(); return Arrays.asList( $$( - $( - "TIMESTAMP('2020-09-16 10:20:30') != DATETIME('1961-04-12 09:07:00')", - "ts_dt_t", - true), - $( - "DATETIME('1961-04-12 09:07:00') != TIMESTAMP('1984-12-15 22:15:07')", - "dt_ts_t", - true), - $( - "TIMESTAMP('2020-09-16 10:20:30') != DATETIME('2020-09-16 10:20:30')", - "ts_dt_f", - false), - $( - "DATETIME('2020-09-16 10:20:30') != TIMESTAMP('2020-09-16 10:20:30')", - "dt_ts_f", - false), $("TIMESTAMP('2020-09-16 10:20:30') != DATE('1961-04-12')", "ts_d_t", true), $("DATE('1961-04-12') != TIMESTAMP('1984-12-15 22:15:07')", "d_ts_t", true), $("TIMESTAMP('2020-09-16 00:00:00') != DATE('2020-09-16')", "ts_d_f", false), @@ -294,37 +197,6 @@ public static Iterable compareNeqTimestampWithOtherTypes() { $("TIME('10:20:30') != TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false))); } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") - public static Iterable compareNeqDateTimeWithOtherTypes() { - var today = LocalDate.now().toString(); - return Arrays.asList( - $$( - $( - "DATETIME('2020-09-16 10:20:30') != TIMESTAMP('1961-04-12 09:07:00')", - "dt_ts_t", - true), - $( - "TIMESTAMP('1961-04-12 09:07:00') != DATETIME('1984-12-15 22:15:07')", - "ts_dt_t", - true), - $( - "DATETIME('2020-09-16 10:20:30') != TIMESTAMP('2020-09-16 10:20:30')", - "dt_ts_f", - false), - $( - "TIMESTAMP('2020-09-16 10:20:30') != DATETIME('2020-09-16 10:20:30')", - "ts_dt_f", - false), - $("DATETIME('2020-09-16 10:20:30') != DATE('1961-04-12')", "dt_d_t", true), - $("DATE('1961-04-12') != DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') != DATE('2020-09-16')", "dt_d_f", false), - $("DATE('2020-09-16') != DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), - $("DATETIME('2020-09-16 10:20:30') != TIME('09:07:00')", "dt_t_t", true), - $("TIME('09:07:00') != DATETIME('1984-12-15 22:15:07')", "t_dt_t", true), - $("DATETIME('" + today + " 10:20:30') != TIME('10:20:30')", "dt_t_f", false), - $("TIME('10:20:30') != DATETIME('" + today + " 10:20:30')", "t_dt_f", false))); - } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareNeqDateWithOtherTypes() { var today = LocalDate.now().toString(); @@ -334,10 +206,6 @@ public static Iterable compareNeqDateWithOtherTypes() { $("TIMESTAMP('1984-12-15 09:07:00') != DATE('1984-12-15')", "ts_d_t", true), $("DATE('2020-09-16') != TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), $("TIMESTAMP('2020-09-16 00:00:00') != DATE('2020-09-16')", "ts_d_f", false), - $("DATE('1961-04-12') != DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), - $("DATETIME('1961-04-12 10:20:30') != DATE('1961-04-12')", "dt_d_t", true), - $("DATE('2020-09-16') != DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), - $("DATETIME('2020-09-16 00:00:00') != DATE('2020-09-16')", "dt_d_f", false), $("DATE('2020-09-16') != TIME('09:07:00')", "d_t_t", true), $("TIME('09:07:00') != DATE('" + today + "')", "t_d_t", true), $("DATE('" + today + "') != TIME('00:00:00')", "d_t_f", false), @@ -349,10 +217,6 @@ public static Iterable compareNeqTimeWithOtherTypes() { var today = LocalDate.now().toString(); return Arrays.asList( $$( - $("TIME('09:07:00') != DATETIME('1961-04-12 09:07:00')", "t_dt_t", true), - $("DATETIME('" + today + " 09:07:00') != TIME('10:20:30')", "dt_t_t", true), - $("TIME('10:20:30') != DATETIME('" + today + " 10:20:30')", "t_dt_f", false), - $("DATETIME('" + today + " 10:20:30') != TIME('10:20:30')", "dt_t_f", false), $("TIME('22:15:07') != TIMESTAMP('1984-12-15 22:15:07')", "t_ts_t", true), $("TIMESTAMP('1984-12-15 10:20:30') != TIME('10:20:30')", "ts_t_t", true), $("TIME('10:20:30') != TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false), @@ -368,22 +232,6 @@ public static Iterable compareLtTimestampWithOtherTypes() { var today = LocalDate.now().toString(); return Arrays.asList( $$( - $( - "TIMESTAMP('2020-09-16 10:20:30') < DATETIME('2061-04-12 09:07:00')", - "ts_dt_t", - true), - $( - "DATETIME('1961-04-12 09:07:00') < TIMESTAMP('1984-12-15 22:15:07')", - "dt_ts_t", - true), - $( - "TIMESTAMP('2020-09-16 10:20:30') < DATETIME('2020-09-16 10:20:30')", - "ts_dt_f", - false), - $( - "DATETIME('2020-09-16 10:20:30') < TIMESTAMP('1961-04-12 09:07:00')", - "dt_ts_f", - false), $("TIMESTAMP('2020-09-16 10:20:30') < DATE('2077-04-12')", "ts_d_t", true), $("DATE('1961-04-12') < TIMESTAMP('1984-12-15 22:15:07')", "d_ts_t", true), $("TIMESTAMP('2020-09-16 10:20:30') < DATE('1961-04-12')", "ts_d_f", false), @@ -394,37 +242,6 @@ public static Iterable compareLtTimestampWithOtherTypes() { $("TIME('20:50:40') < TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false))); } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") - public static Iterable compareLtDateTimeWithOtherTypes() { - var today = LocalDate.now().toString(); - return Arrays.asList( - $$( - $( - "DATETIME('2020-09-16 10:20:30') < TIMESTAMP('2077-04-12 09:07:00')", - "dt_ts_t", - true), - $( - "TIMESTAMP('1961-04-12 09:07:00') < DATETIME('1984-12-15 22:15:07')", - "ts_dt_t", - true), - $( - "DATETIME('2020-09-16 10:20:30') < TIMESTAMP('2020-09-16 10:20:30')", - "dt_ts_f", - false), - $( - "TIMESTAMP('2020-09-16 10:20:30') < DATETIME('1984-12-15 22:15:07')", - "ts_dt_f", - false), - $("DATETIME('2020-09-16 10:20:30') < DATE('3077-04-12')", "dt_d_t", true), - $("DATE('1961-04-12') < DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') < DATE('2020-09-16')", "dt_d_f", false), - $("DATE('2020-09-16') < DATETIME('1961-04-12 09:07:00')", "d_dt_f", false), - $("DATETIME('2020-09-16 10:20:30') < TIME('09:07:00')", "dt_t_t", true), - $("TIME('09:07:00') < DATETIME('3077-12-15 22:15:07')", "t_dt_t", true), - $("DATETIME('" + today + " 10:20:30') < TIME('10:20:30')", "dt_t_f", false), - $("TIME('20:40:50') < DATETIME('" + today + " 10:20:30')", "t_dt_f", false))); - } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLtDateWithOtherTypes() { return Arrays.asList( @@ -433,10 +250,6 @@ public static Iterable compareLtDateWithOtherTypes() { $("TIMESTAMP('1961-04-12 09:07:00') < DATE('1984-12-15')", "ts_d_t", true), $("DATE('2020-09-16') < TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), $("TIMESTAMP('2077-04-12 09:07:00') < DATE('2020-09-16')", "ts_d_f", false), - $("DATE('1961-04-12') < DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), - $("DATETIME('1961-04-12 10:20:30') < DATE('1984-11-15')", "dt_d_t", true), - $("DATE('2020-09-16') < DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), - $("DATETIME('2020-09-16 00:00:00') < DATE('1984-03-22')", "dt_d_f", false), $("DATE('2020-09-16') < TIME('09:07:00')", "d_t_t", true), $("TIME('09:07:00') < DATE('3077-04-12')", "t_d_t", true), $("DATE('3077-04-12') < TIME('00:00:00')", "d_t_f", false), @@ -448,10 +261,6 @@ public static Iterable compareLtTimeWithOtherTypes() { var today = LocalDate.now().toString(); return Arrays.asList( $$( - $("TIME('09:07:00') < DATETIME('3077-04-12 09:07:00')", "t_dt_t", true), - $("DATETIME('" + today + " 09:07:00') < TIME('10:20:30')", "dt_t_t", true), - $("TIME('10:20:30') < DATETIME('" + today + " 10:20:30')", "t_dt_f", false), - $("DATETIME('" + today + " 20:40:50') < TIME('10:20:30')", "dt_t_f", false), $("TIME('22:15:07') < TIMESTAMP('3077-12-15 22:15:07')", "t_ts_t", true), $("TIMESTAMP('1984-12-15 10:20:30') < TIME('10:20:30')", "ts_t_t", true), $("TIME('10:20:30') < TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false), @@ -467,22 +276,6 @@ public static Iterable compareGtTimestampWithOtherTypes() { var today = LocalDate.now().toString(); return Arrays.asList( $$( - $( - "TIMESTAMP('2020-09-16 10:20:30') > DATETIME('2020-09-16 10:20:25')", - "ts_dt_t", - true), - $( - "DATETIME('2020-09-16 10:20:30') > TIMESTAMP('1961-04-12 09:07:00')", - "dt_ts_t", - true), - $( - "TIMESTAMP('2020-09-16 10:20:30') > DATETIME('2061-04-12 09:07:00')", - "ts_dt_f", - false), - $( - "DATETIME('1961-04-12 09:07:00') > TIMESTAMP('1984-12-15 09:07:00')", - "dt_ts_f", - false), $("TIMESTAMP('2020-09-16 10:20:30') > DATE('1961-04-12')", "ts_d_t", true), $("DATE('2020-09-16') > TIMESTAMP('2020-09-15 22:15:07')", "d_ts_t", true), $("TIMESTAMP('2020-09-16 10:20:30') > DATE('2077-04-12')", "ts_d_f", false), @@ -493,37 +286,6 @@ public static Iterable compareGtTimestampWithOtherTypes() { $("TIME('09:07:00') > TIMESTAMP('3077-12-15 22:15:07')", "t_ts_f", false))); } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") - public static Iterable compareGtDateTimeWithOtherTypes() { - var today = LocalDate.now().toString(); - return Arrays.asList( - $$( - $( - "DATETIME('2020-09-16 10:20:31') > TIMESTAMP('2020-09-16 10:20:30')", - "dt_ts_t", - true), - $( - "TIMESTAMP('2020-09-16 10:20:30') > DATETIME('1984-12-15 22:15:07')", - "ts_dt_t", - true), - $( - "DATETIME('2020-09-16 10:20:30') > TIMESTAMP('2077-04-12 09:07:00')", - "dt_ts_f", - false), - $( - "TIMESTAMP('1961-04-12 09:07:00') > DATETIME('1961-04-12 09:07:00')", - "ts_dt_f", - false), - $("DATETIME('3077-04-12 10:20:30') > DATE('2020-09-16')", "dt_d_t", true), - $("DATE('2020-09-16') > DATETIME('1961-04-12 09:07:00')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') > DATE('2020-09-16')", "dt_d_f", false), - $("DATE('1961-04-12') > DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), - $("DATETIME('3077-04-12 10:20:30') > TIME('09:07:00')", "dt_t_t", true), - $("TIME('20:40:50') > DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('" + today + " 10:20:30') > TIME('10:20:30')", "dt_t_f", false), - $("TIME('09:07:00') > DATETIME('3077-12-15 22:15:07')", "t_dt_f", false))); - } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGtDateWithOtherTypes() { return Arrays.asList( @@ -532,10 +294,6 @@ public static Iterable compareGtDateWithOtherTypes() { $("TIMESTAMP('2077-04-12 09:07:00') > DATE('2020-09-16')", "ts_d_t", true), $("DATE('2020-09-16') > TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), $("TIMESTAMP('1961-04-12 09:07:00') > DATE('1984-12-15')", "ts_d_f", false), - $("DATE('1984-12-15') > DATETIME('1961-04-12 09:07:00')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') > DATE('1984-03-22')", "dt_d_t", true), - $("DATE('2020-09-16') > DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), - $("DATETIME('1961-04-12 10:20:30') > DATE('1984-11-15')", "dt_d_f", false), $("DATE('3077-04-12') > TIME('00:00:00')", "d_t_t", true), $("TIME('00:00:00') > DATE('2020-09-16')", "t_d_t", true), $("DATE('2020-09-16') > TIME('09:07:00')", "d_t_f", false), @@ -547,10 +305,6 @@ public static Iterable compareGtTimeWithOtherTypes() { var today = LocalDate.now().toString(); return Arrays.asList( $$( - $("TIME('09:07:00') > DATETIME('1961-04-12 09:07:00')", "t_dt_t", true), - $("DATETIME('" + today + " 20:40:50') > TIME('10:20:30')", "dt_t_t", true), - $("TIME('10:20:30') > DATETIME('" + today + " 10:20:30')", "t_dt_f", false), - $("DATETIME('" + today + " 09:07:00') > TIME('10:20:30')", "dt_t_f", false), $("TIME('22:15:07') > TIMESTAMP('1984-12-15 22:15:07')", "t_ts_t", true), $("TIMESTAMP('" + today + " 20:50:42') > TIME('10:20:30')", "ts_t_t", true), $("TIME('10:20:30') > TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false), @@ -566,22 +320,6 @@ public static Iterable compareLteTimestampWithOtherTypes() { var today = LocalDate.now().toString(); return Arrays.asList( $$( - $( - "TIMESTAMP('2020-09-16 10:20:30') <= DATETIME('2020-09-16 10:20:30')", - "ts_dt_t", - true), - $( - "DATETIME('1961-04-12 09:07:00') <= TIMESTAMP('1984-12-15 22:15:07')", - "dt_ts_t", - true), - $( - "TIMESTAMP('2020-09-16 10:20:30') <= DATETIME('1961-04-12 09:07:00')", - "ts_dt_f", - false), - $( - "DATETIME('2020-09-16 10:20:30') <= TIMESTAMP('1961-04-12 09:07:00')", - "dt_ts_f", - false), $("TIMESTAMP('2020-09-16 10:20:30') <= DATE('2077-04-12')", "ts_d_t", true), $("DATE('2020-09-16') <= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), $("TIMESTAMP('2020-09-16 10:20:30') <= DATE('1961-04-12')", "ts_d_f", false), @@ -592,37 +330,6 @@ public static Iterable compareLteTimestampWithOtherTypes() { $("TIME('20:50:40') <= TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false))); } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") - public static Iterable compareLteDateTimeWithOtherTypes() { - var today = LocalDate.now().toString(); - return Arrays.asList( - $$( - $( - "DATETIME('2020-09-16 10:20:30') <= TIMESTAMP('2020-09-16 10:20:30')", - "dt_ts_t", - true), - $( - "TIMESTAMP('1961-04-12 09:07:00') <= DATETIME('1984-12-15 22:15:07')", - "ts_dt_t", - true), - $( - "DATETIME('3077-09-16 10:20:30') <= TIMESTAMP('2077-04-12 09:07:00')", - "dt_ts_f", - false), - $( - "TIMESTAMP('2020-09-16 10:20:30') <= DATETIME('1984-12-15 22:15:07')", - "ts_dt_f", - false), - $("DATETIME('2020-09-16 00:00:00') <= DATE('2020-09-16')", "dt_d_t", true), - $("DATE('1961-04-12') <= DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') <= DATE('1984-04-12')", "dt_d_f", false), - $("DATE('2020-09-16') <= DATETIME('1961-04-12 09:07:00')", "d_dt_f", false), - $("DATETIME('" + today + " 10:20:30') <= TIME('10:20:30')", "dt_t_t", true), - $("TIME('09:07:00') <= DATETIME('3077-12-15 22:15:07')", "t_dt_t", true), - $("DATETIME('3077-09-16 10:20:30') <= TIME('19:07:00')", "dt_t_f", false), - $("TIME('20:40:50') <= DATETIME('" + today + " 10:20:30')", "t_dt_f", false))); - } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLteDateWithOtherTypes() { return Arrays.asList( @@ -631,10 +338,6 @@ public static Iterable compareLteDateWithOtherTypes() { $("TIMESTAMP('1961-04-12 09:07:00') <= DATE('1984-12-15')", "ts_d_t", true), $("DATE('2020-09-16') <= TIMESTAMP('1961-04-12 09:07:00')", "d_ts_f", false), $("TIMESTAMP('2077-04-12 09:07:00') <= DATE('2020-09-16')", "ts_d_f", false), - $("DATE('2020-09-16') <= DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), - $("DATETIME('1961-04-12 10:20:30') <= DATE('1984-11-15')", "dt_d_t", true), - $("DATE('2077-04-12') <= DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), - $("DATETIME('2020-09-16 00:00:00') <= DATE('1984-03-22')", "dt_d_f", false), $("DATE('2020-09-16') <= TIME('09:07:00')", "d_t_t", true), $("TIME('09:07:00') <= DATE('3077-04-12')", "t_d_t", true), $("DATE('3077-04-12') <= TIME('00:00:00')", "d_t_f", false), @@ -646,10 +349,6 @@ public static Iterable compareLteTimeWithOtherTypes() { var today = LocalDate.now().toString(); return Arrays.asList( $$( - $("TIME('10:20:30') <= DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('" + today + " 09:07:00') <= TIME('10:20:30')", "dt_t_t", true), - $("TIME('09:07:00') <= DATETIME('1961-04-12 09:07:00')", "t_dt_f", false), - $("DATETIME('" + today + " 20:40:50') <= TIME('10:20:30')", "dt_t_f", false), $("TIME('10:20:30') <= TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), $("TIMESTAMP('1984-12-15 10:20:30') <= TIME('10:20:30')", "ts_t_t", true), $("TIME('22:15:07') <= TIMESTAMP('1984-12-15 22:15:07')", "t_ts_f", false), @@ -665,22 +364,6 @@ public static Iterable compareGteTimestampWithOtherTypes() { var today = LocalDate.now().toString(); return Arrays.asList( $$( - $( - "TIMESTAMP('2020-09-16 10:20:30') >= DATETIME('2020-09-16 10:20:30')", - "ts_dt_t", - true), - $( - "DATETIME('2020-09-16 10:20:30') >= TIMESTAMP('1961-04-12 09:07:00')", - "dt_ts_t", - true), - $( - "TIMESTAMP('2020-09-16 10:20:30') >= DATETIME('2061-04-12 09:07:00')", - "ts_dt_f", - false), - $( - "DATETIME('1961-04-12 09:07:00') >= TIMESTAMP('1984-12-15 09:07:00')", - "dt_ts_f", - false), $("TIMESTAMP('2020-09-16 10:20:30') >= DATE('1961-04-12')", "ts_d_t", true), $("DATE('2020-09-16') >= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), $("TIMESTAMP('2020-09-16 10:20:30') >= DATE('2077-04-12')", "ts_d_f", false), @@ -691,37 +374,6 @@ public static Iterable compareGteTimestampWithOtherTypes() { $("TIME('09:07:00') >= TIMESTAMP('3077-12-15 22:15:07')", "t_ts_f", false))); } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") - public static Iterable compareGteDateTimeWithOtherTypes() { - var today = LocalDate.now().toString(); - return Arrays.asList( - $$( - $( - "DATETIME('2020-09-16 10:20:30') >= TIMESTAMP('2020-09-16 10:20:30')", - "dt_ts_t", - true), - $( - "TIMESTAMP('2020-09-16 10:20:30') >= DATETIME('1984-12-15 22:15:07')", - "ts_dt_t", - true), - $( - "DATETIME('2020-09-16 10:20:30') >= TIMESTAMP('2077-04-12 09:07:00')", - "dt_ts_f", - false), - $( - "TIMESTAMP('1961-04-12 00:00:00') >= DATETIME('1961-04-12 09:07:00')", - "ts_dt_f", - false), - $("DATETIME('2020-09-16 00:00:00') >= DATE('2020-09-16')", "dt_d_t", true), - $("DATE('2020-09-16') >= DATETIME('1961-04-12 09:07:00')", "d_dt_t", true), - $("DATETIME('1961-04-12 09:07:00') >= DATE('2020-09-16')", "dt_d_f", false), - $("DATE('1961-04-12') >= DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), - $("DATETIME('" + today + " 10:20:30') >= TIME('10:20:30')", "dt_t_t", true), - $("TIME('20:40:50') >= DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('1961-04-12 09:07:00') >= TIME('09:07:00')", "dt_t_f", false), - $("TIME('09:07:00') >= DATETIME('3077-12-15 22:15:07')", "t_dt_f", false))); - } - @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGteDateWithOtherTypes() { return Arrays.asList( @@ -730,10 +382,6 @@ public static Iterable compareGteDateWithOtherTypes() { $("TIMESTAMP('2077-04-12 09:07:00') >= DATE('2020-09-16')", "ts_d_t", true), $("DATE('1961-04-12') >= TIMESTAMP('1961-04-12 09:07:00')", "d_ts_f", false), $("TIMESTAMP('1961-04-12 09:07:00') >= DATE('1984-12-15')", "ts_d_f", false), - $("DATE('2020-09-16') >= DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') >= DATE('1984-03-22')", "dt_d_t", true), - $("DATE('1960-12-15') >= DATETIME('1961-04-12 09:07:00')", "d_dt_f", false), - $("DATETIME('1961-04-12 10:20:30') >= DATE('1984-11-15')", "dt_d_f", false), $("DATE('3077-04-12') >= TIME('00:00:00')", "d_t_t", true), $("TIME('00:00:00') >= DATE('2020-09-16')", "t_d_t", true), $("DATE('2020-09-16') >= TIME('09:07:00')", "d_t_f", false), @@ -745,10 +393,6 @@ public static Iterable compareGteTimeWithOtherTypes() { var today = LocalDate.now().toString(); return Arrays.asList( $$( - $("TIME('10:20:30') >= DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('" + today + " 20:40:50') >= TIME('10:20:30')", "dt_t_t", true), - $("TIME('09:07:00') >= DATETIME('3077-04-12 09:07:00')", "t_dt_f", false), - $("DATETIME('" + today + " 09:07:00') >= TIME('10:20:30')", "dt_t_f", false), $("TIME('10:20:30') >= TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), $("TIMESTAMP('" + today + " 20:50:42') >= TIME('10:20:30')", "ts_t_t", true), $("TIME('22:15:07') >= TIMESTAMP('3077-12-15 22:15:07')", "t_ts_f", false), diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeFunctionIT.java index 33eb8b693f..0ec77f9f31 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeFunctionIT.java @@ -94,15 +94,15 @@ public void testAddDateWithDays() throws IOException { verifyDataRows(result, rows("2020-09-17")); result = executeQuery("select adddate(timestamp('2020-09-16 17:30:00'), 1)"); - verifySchema(result, schema("adddate(timestamp('2020-09-16 17:30:00'), 1)", null, "datetime")); + verifySchema(result, schema("adddate(timestamp('2020-09-16 17:30:00'), 1)", null, "timestamp")); verifyDataRows(result, rows("2020-09-17 17:30:00")); - result = executeQuery("select adddate(DATETIME('2020-09-16 07:40:00'), 1)"); - verifySchema(result, schema("adddate(DATETIME('2020-09-16 07:40:00'), 1)", null, "datetime")); + result = executeQuery("select adddate(TIMESTAMP('2020-09-16 07:40:00'), 1)"); + verifySchema(result, schema("adddate(TIMESTAMP('2020-09-16 07:40:00'), 1)", null, "timestamp")); verifyDataRows(result, rows("2020-09-17 07:40:00")); result = executeQuery("select adddate(TIME('07:40:00'), 0)"); - verifySchema(result, schema("adddate(TIME('07:40:00'), 0)", null, "datetime")); + verifySchema(result, schema("adddate(TIME('07:40:00'), 0)", null, "timestamp")); verifyDataRows(result, rows(LocalDate.now() + " 07:40:00")); } @@ -112,25 +112,19 @@ public void testAddDateWithInterval() throws IOException { executeQuery("select adddate(timestamp('2020-09-16 17:30:00'), interval 1 day)"); verifySchema( result, - schema("adddate(timestamp('2020-09-16 17:30:00'), interval 1 day)", null, "datetime")); - verifyDataRows(result, rows("2020-09-17 17:30:00")); - - result = executeQuery("select adddate(DATETIME('2020-09-16 17:30:00'), interval 1 day)"); - verifySchema( - result, - schema("adddate(DATETIME('2020-09-16 17:30:00'), interval 1 day)", null, "datetime")); + schema("adddate(timestamp('2020-09-16 17:30:00'), interval 1 day)", null, "timestamp")); verifyDataRows(result, rows("2020-09-17 17:30:00")); result = executeQuery("select adddate(date('2020-09-16'), interval 1 day)"); - verifySchema(result, schema("adddate(date('2020-09-16'), interval 1 day)", null, "datetime")); + verifySchema(result, schema("adddate(date('2020-09-16'), interval 1 day)", null, "timestamp")); verifyDataRows(result, rows("2020-09-17 00:00:00")); result = executeQuery("select adddate(date('2020-09-16'), interval 1 hour)"); - verifySchema(result, schema("adddate(date('2020-09-16'), interval 1 hour)", null, "datetime")); + verifySchema(result, schema("adddate(date('2020-09-16'), interval 1 hour)", null, "timestamp")); verifyDataRows(result, rows("2020-09-16 01:00:00")); result = executeQuery("select adddate(TIME('07:40:00'), interval 1 day)"); - verifySchema(result, schema("adddate(TIME('07:40:00'), interval 1 day)", null, "datetime")); + verifySchema(result, schema("adddate(TIME('07:40:00'), interval 1 day)", null, "timestamp")); verifyDataRows( result, rows( @@ -141,7 +135,7 @@ public void testAddDateWithInterval() throws IOException { .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); result = executeQuery("select adddate(TIME('07:40:00'), interval 1 hour)"); - verifySchema(result, schema("adddate(TIME('07:40:00'), interval 1 hour)", null, "datetime")); + verifySchema(result, schema("adddate(TIME('07:40:00'), interval 1 hour)", null, "timestamp")); verifyDataRows( result, rows( @@ -157,25 +151,26 @@ public void testDateAdd() throws IOException { executeQuery("select date_add(timestamp('2020-09-16 17:30:00'), interval 1 day)"); verifySchema( result, - schema("date_add(timestamp('2020-09-16 17:30:00'), interval 1 day)", null, "datetime")); + schema("date_add(timestamp('2020-09-16 17:30:00'), interval 1 day)", null, "timestamp")); verifyDataRows(result, rows("2020-09-17 17:30:00")); - result = executeQuery("select date_add(DATETIME('2020-09-16 17:30:00'), interval 1 day)"); + result = executeQuery("select date_add(TIMESTAMP('2020-09-16 17:30:00'), interval 1 day)"); verifySchema( result, - schema("date_add(DATETIME('2020-09-16 17:30:00'), interval 1 day)", null, "datetime")); + schema("date_add(TIMESTAMP('2020-09-16 17:30:00'), interval 1 day)", null, "timestamp")); verifyDataRows(result, rows("2020-09-17 17:30:00")); result = executeQuery("select date_add(date('2020-09-16'), interval 1 day)"); - verifySchema(result, schema("date_add(date('2020-09-16'), interval 1 day)", null, "datetime")); + verifySchema(result, schema("date_add(date('2020-09-16'), interval 1 day)", null, "timestamp")); verifyDataRows(result, rows("2020-09-17 00:00:00")); result = executeQuery("select date_add(date('2020-09-16'), interval 1 hour)"); - verifySchema(result, schema("date_add(date('2020-09-16'), interval 1 hour)", null, "datetime")); + verifySchema( + result, schema("date_add(date('2020-09-16'), interval 1 hour)", null, "timestamp")); verifyDataRows(result, rows("2020-09-16 01:00:00")); result = executeQuery("select date_add(TIME('07:40:00'), interval 1 day)"); - verifySchema(result, schema("date_add(TIME('07:40:00'), interval 1 day)", null, "datetime")); + verifySchema(result, schema("date_add(TIME('07:40:00'), interval 1 day)", null, "timestamp")); verifyDataRows( result, rows( @@ -186,7 +181,7 @@ public void testDateAdd() throws IOException { .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); result = executeQuery("select date_add(TIME('07:40:00'), interval 1 hour)"); - verifySchema(result, schema("date_add(TIME('07:40:00'), interval 1 hour)", null, "datetime")); + verifySchema(result, schema("date_add(TIME('07:40:00'), interval 1 hour)", null, "timestamp")); verifyDataRows( result, rows( @@ -199,7 +194,7 @@ public void testDateAdd() throws IOException { executeQuery( String.format("SELECT DATE_ADD(birthdate, INTERVAL 1 YEAR) FROM %s", TEST_INDEX_BANK)); - verifySchema(result, schema("DATE_ADD(birthdate, INTERVAL 1 YEAR)", null, "datetime")); + verifySchema(result, schema("DATE_ADD(birthdate, INTERVAL 1 YEAR)", null, "timestamp")); verifyDataRows( result, rows("2018-10-23 00:00:00"), @@ -217,25 +212,26 @@ public void testDateSub() throws IOException { executeQuery("select date_sub(timestamp('2020-09-16 17:30:00'), interval 1 day)"); verifySchema( result, - schema("date_sub(timestamp('2020-09-16 17:30:00'), interval 1 day)", null, "datetime")); + schema("date_sub(timestamp('2020-09-16 17:30:00'), interval 1 day)", null, "timestamp")); verifyDataRows(result, rows("2020-09-15 17:30:00")); - result = executeQuery("select date_sub(DATETIME('2020-09-16 17:30:00'), interval 1 day)"); + result = executeQuery("select date_sub(TIMESTAMP('2020-09-16 17:30:00'), interval 1 day)"); verifySchema( result, - schema("date_sub(DATETIME('2020-09-16 17:30:00'), interval 1 day)", null, "datetime")); + schema("date_sub(TIMESTAMP('2020-09-16 17:30:00'), interval 1 day)", null, "timestamp")); verifyDataRows(result, rows("2020-09-15 17:30:00")); result = executeQuery("select date_sub(date('2020-09-16'), interval 1 day)"); - verifySchema(result, schema("date_sub(date('2020-09-16'), interval 1 day)", null, "datetime")); + verifySchema(result, schema("date_sub(date('2020-09-16'), interval 1 day)", null, "timestamp")); verifyDataRows(result, rows("2020-09-15 00:00:00")); result = executeQuery("select date_sub(date('2020-09-16'), interval 1 hour)"); - verifySchema(result, schema("date_sub(date('2020-09-16'), interval 1 hour)", null, "datetime")); + verifySchema( + result, schema("date_sub(date('2020-09-16'), interval 1 hour)", null, "timestamp")); verifyDataRows(result, rows("2020-09-15 23:00:00")); result = executeQuery("select date_sub(TIME('07:40:00'), interval 1 day)"); - verifySchema(result, schema("date_sub(TIME('07:40:00'), interval 1 day)", null, "datetime")); + verifySchema(result, schema("date_sub(TIME('07:40:00'), interval 1 day)", null, "timestamp")); verifyDataRows( result, rows( @@ -246,7 +242,7 @@ public void testDateSub() throws IOException { .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); result = executeQuery("select date_sub(TIME('07:40:00'), interval 1 hour)"); - verifySchema(result, schema("date_sub(TIME('07:40:00'), interval 1 hour)", null, "datetime")); + verifySchema(result, schema("date_sub(TIME('07:40:00'), interval 1 hour)", null, "timestamp")); verifyDataRows( result, rows( @@ -314,11 +310,11 @@ public void testDayOfMonthAliasesReturnTheSameResults() throws IOException { result1 = executeQuery( String.format( - "SELECT dayofmonth(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + "SELECT dayofmonth(timestamp(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); result2 = executeQuery( String.format( - "SELECT day_of_month(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + "SELECT day_of_month(timestamp(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); result1 = @@ -371,11 +367,11 @@ public void testDayOfWeekAliasesReturnTheSameResults() throws IOException { result1 = executeQuery( String.format( - "SELECT dayofweek(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + "SELECT dayofweek(timestamp(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); result2 = executeQuery( String.format( - "SELECT day_of_week(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + "SELECT day_of_week(timestamp(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); result1 = @@ -409,8 +405,8 @@ public void testDayOfYearWithUnderscores() throws IOException { verifySchema(result, schema("day_of_year(date('2020-09-16'))", null, "integer")); verifyDataRows(result, rows(260)); - result = executeQuery("select day_of_year(datetime('2020-09-16 00:00:00'))"); - verifySchema(result, schema("day_of_year(datetime('2020-09-16 00:00:00'))", null, "integer")); + result = executeQuery("select day_of_year(timestamp('2020-09-16 00:00:00'))"); + verifySchema(result, schema("day_of_year(timestamp('2020-09-16 00:00:00'))", null, "integer")); verifyDataRows(result, rows(260)); result = executeQuery("select day_of_year(timestamp('2020-09-16 00:00:00'))"); @@ -436,11 +432,11 @@ public void testDayOfYearAlternateSyntaxesReturnTheSameResults() throws IOExcept result1 = executeQuery( String.format( - "SELECT dayofyear(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + "SELECT dayofyear(timestamp(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); result2 = executeQuery( String.format( - "SELECT day_of_year(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + "SELECT day_of_year(timestamp(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); result1 = @@ -489,10 +485,6 @@ public void testHourOfDayWithUnderscores() throws IOException { verifySchema(result, schema("hour_of_day(timestamp('2020-09-16 17:30:00'))", null, "integer")); verifyDataRows(result, rows(17)); - result = executeQuery("select hour_of_day(datetime('2020-09-16 17:30:00'))"); - verifySchema(result, schema("hour_of_day(datetime('2020-09-16 17:30:00'))", null, "integer")); - verifyDataRows(result, rows(17)); - result = executeQuery("select hour_of_day(time('17:30:00'))"); verifySchema(result, schema("hour_of_day(time('17:30:00'))", null, "integer")); verifyDataRows(result, rows(17)); @@ -511,7 +503,7 @@ public void testExtractWithDatetime() throws IOException { JSONObject datetimeResult = executeQuery( String.format( - "SELECT extract(DAY_SECOND FROM datetime(cast(datetime0 AS STRING))) FROM %s LIMIT" + "SELECT extract(DAY_SECOND FROM timestamp(cast(datetime0 AS STRING))) FROM %s LIMIT" + " 1", TEST_INDEX_CALCS)); verifyDataRows(datetimeResult, rows(9101735)); @@ -561,11 +553,11 @@ public void testHourFunctionAliasesReturnTheSameResults() throws IOException { result1 = executeQuery( String.format( - "SELECT hour(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + "SELECT hour(timestamp(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); result2 = executeQuery( String.format( - "SELECT hour_of_day(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + "SELECT hour_of_day(timestamp(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); result1 = @@ -663,8 +655,9 @@ public void testMinuteOfDay() throws IOException { result, schema("minute_of_day(timestamp('2020-09-16 17:30:00'))", null, "integer")); verifyDataRows(result, rows(1050)); - result = executeQuery("select minute_of_day(datetime('2020-09-16 17:30:00'))"); - verifySchema(result, schema("minute_of_day(datetime('2020-09-16 17:30:00'))", null, "integer")); + result = executeQuery("select minute_of_day(timestamp('2020-09-16 17:30:00'))"); + verifySchema( + result, schema("minute_of_day(timestamp('2020-09-16 17:30:00'))", null, "integer")); verifyDataRows(result, rows(1050)); result = executeQuery("select minute_of_day(time('17:30:00'))"); @@ -710,11 +703,11 @@ public void testMinuteFunctionAliasesReturnTheSameResults() throws IOException { result1 = executeQuery( String.format( - "SELECT minute(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + "SELECT minute(timestamp(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); result2 = executeQuery( String.format( - "SELECT minute_of_hour(datetime(CAST(time0 AS STRING))) FROM %s", + "SELECT minute_of_hour(timestamp(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); @@ -750,8 +743,9 @@ public void testMonthOfYearTypes() throws IOException { verifySchema(result, schema("month_of_year(date('2020-09-16'))", null, "integer")); verifyDataRows(result, rows(9)); - result = executeQuery("select month_of_year(datetime('2020-09-16 00:00:00'))"); - verifySchema(result, schema("month_of_year(datetime('2020-09-16 00:00:00'))", null, "integer")); + result = executeQuery("select month_of_year(timestamp('2020-09-16 00:00:00'))"); + verifySchema( + result, schema("month_of_year(timestamp('2020-09-16 00:00:00'))", null, "integer")); verifyDataRows(result, rows(9)); result = executeQuery("select month_of_year(timestamp('2020-09-16 00:00:00'))"); @@ -778,11 +772,12 @@ public void testMonthAlternateSyntaxesReturnTheSameResults() throws IOException result1 = executeQuery( String.format( - "SELECT month(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + "SELECT month(timestamp(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); result2 = executeQuery( String.format( - "SELECT month_of_year(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + "SELECT month_of_year(timestamp(CAST(time0 AS STRING))) FROM %s", + TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); result1 = @@ -876,11 +871,11 @@ public void testSecondFunctionAliasesReturnTheSameResults() throws IOException { result1 = executeQuery( String.format( - "SELECT second(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + "SELECT second(timestamp(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); result2 = executeQuery( String.format( - "SELECT second_of_minute(datetime(CAST(time0 AS STRING))) FROM %s", + "SELECT second_of_minute(timestamp(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); @@ -934,15 +929,15 @@ public void testSubDateWithDays() throws IOException { verifyDataRows(result, rows("2020-09-15")); result = executeQuery("select subdate(timestamp('2020-09-16 17:30:00'), 1)"); - verifySchema(result, schema("subdate(timestamp('2020-09-16 17:30:00'), 1)", null, "datetime")); + verifySchema(result, schema("subdate(timestamp('2020-09-16 17:30:00'), 1)", null, "timestamp")); verifyDataRows(result, rows("2020-09-15 17:30:00")); - result = executeQuery("select subdate(DATETIME('2020-09-16 07:40:00'), 1)"); - verifySchema(result, schema("subdate(DATETIME('2020-09-16 07:40:00'), 1)", null, "datetime")); + result = executeQuery("select subdate(TIMESTAMP('2020-09-16 07:40:00'), 1)"); + verifySchema(result, schema("subdate(TIMESTAMP('2020-09-16 07:40:00'), 1)", null, "timestamp")); verifyDataRows(result, rows("2020-09-15 07:40:00")); result = executeQuery("select subdate(TIME('07:40:00'), 0)"); - verifySchema(result, schema("subdate(TIME('07:40:00'), 0)", null, "datetime")); + verifySchema(result, schema("subdate(TIME('07:40:00'), 0)", null, "timestamp")); verifyDataRows(result, rows(LocalDate.now() + " 07:40:00")); } @@ -952,25 +947,25 @@ public void testSubDateWithInterval() throws IOException { executeQuery("select subdate(timestamp('2020-09-16 17:30:00'), interval 1 day)"); verifySchema( result, - schema("subdate(timestamp('2020-09-16 17:30:00'), interval 1 day)", null, "datetime")); + schema("subdate(timestamp('2020-09-16 17:30:00'), interval 1 day)", null, "timestamp")); verifyDataRows(result, rows("2020-09-15 17:30:00")); - result = executeQuery("select subdate(DATETIME('2020-09-16 17:30:00'), interval 1 day)"); + result = executeQuery("select subdate(TIMESTAMP('2020-09-16 17:30:00'), interval 1 day)"); verifySchema( result, - schema("subdate(DATETIME('2020-09-16 17:30:00'), interval 1 day)", null, "datetime")); + schema("subdate(TIMESTAMP('2020-09-16 17:30:00'), interval 1 day)", null, "timestamp")); verifyDataRows(result, rows("2020-09-15 17:30:00")); result = executeQuery("select subdate(date('2020-09-16'), interval 1 day)"); - verifySchema(result, schema("subdate(date('2020-09-16'), interval 1 day)", null, "datetime")); + verifySchema(result, schema("subdate(date('2020-09-16'), interval 1 day)", null, "timestamp")); verifyDataRows(result, rows("2020-09-15 00:00:00")); result = executeQuery("select subdate(date('2020-09-16'), interval 1 hour)"); - verifySchema(result, schema("subdate(date('2020-09-16'), interval 1 hour)", null, "datetime")); + verifySchema(result, schema("subdate(date('2020-09-16'), interval 1 hour)", null, "timestamp")); verifyDataRows(result, rows("2020-09-15 23:00:00")); result = executeQuery("select subdate(TIME('07:40:00'), interval 1 day)"); - verifySchema(result, schema("subdate(TIME('07:40:00'), interval 1 day)", null, "datetime")); + verifySchema(result, schema("subdate(TIME('07:40:00'), interval 1 day)", null, "timestamp")); verifyDataRows( result, rows( @@ -981,7 +976,7 @@ public void testSubDateWithInterval() throws IOException { .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); result = executeQuery("select subdate(TIME('07:40:00'), interval 1 hour)"); - verifySchema(result, schema("subdate(TIME('07:40:00'), interval 1 hour)", null, "datetime")); + verifySchema(result, schema("subdate(TIME('07:40:00'), interval 1 hour)", null, "timestamp")); verifyDataRows( result, rows( @@ -1045,7 +1040,7 @@ public void testToSeconds() throws IOException { result = executeQuery( String.format( - "SELECT to_seconds(datetime(cast(datetime0 AS string))) FROM %s LIMIT 2", + "SELECT to_seconds(timestamp(cast(datetime0 AS string))) FROM %s LIMIT 2", TEST_INDEX_CALCS)); verifyDataRows(result, rows(63256587455L), rows(63258064234L)); @@ -1142,7 +1137,7 @@ public void testWeekAlternateSyntaxesReturnTheSameResults() throws IOException { result1.getJSONArray("datarows").similar(result3.getJSONArray("datarows")); compareWeekResults("date0", TEST_INDEX_CALCS); - compareWeekResults("datetime(CAST(time0 AS STRING))", TEST_INDEX_CALCS); + compareWeekResults("timestamp(CAST(time0 AS STRING))", TEST_INDEX_CALCS); compareWeekResults("CAST(time0 AS STRING)", TEST_INDEX_CALCS); compareWeekResults("datetime0", TEST_INDEX_CALCS); } @@ -1215,8 +1210,8 @@ public void testFromUnixTime() throws IOException { + "FROM_UNIXTIME(1662601316, '%T') f3"); verifySchema( result, - schema("FROM_UNIXTIME(200300400)", "f1", "datetime"), - schema("FROM_UNIXTIME(12224.12)", "f2", "datetime"), + schema("FROM_UNIXTIME(200300400)", "f1", "timestamp"), + schema("FROM_UNIXTIME(12224.12)", "f2", "timestamp"), schema("FROM_UNIXTIME(1662601316, '%T')", "f3", "keyword")); verifySome( result.getJSONArray("datarows"), @@ -1272,21 +1267,21 @@ public void testAddTime() throws IOException { + " ADDTIME(TIME('23:59:59'), DATE('2004-01-01')) AS `'23:59:59' + 0`," + " ADDTIME(DATE('2004-01-01'), TIME('23:59:59')) AS `'2004-01-01' + '23:59:59'`," + " ADDTIME(TIME('10:20:30'), TIME('00:05:42')) AS `'10:20:30' + '00:05:42'`," - + " ADDTIME(TIMESTAMP('1999-12-31 15:42:13'), DATETIME('1961-04-12 09:07:00')) AS" + + " ADDTIME(TIMESTAMP('1999-12-31 15:42:13'), TIMESTAMP('1961-04-12 09:07:00')) AS" + " `'15:42:13' + '09:07:00'`"); verifySchema( result, - schema("ADDTIME(DATE('2008-12-12'), DATE('2008-11-15'))", "'2008-12-12' + 0", "datetime"), + schema("ADDTIME(DATE('2008-12-12'), DATE('2008-11-15'))", "'2008-12-12' + 0", "timestamp"), schema("ADDTIME(TIME('23:59:59'), DATE('2004-01-01'))", "'23:59:59' + 0", "time"), schema( "ADDTIME(DATE('2004-01-01'), TIME('23:59:59'))", "'2004-01-01' + '23:59:59'", - "datetime"), + "timestamp"), schema("ADDTIME(TIME('10:20:30'), TIME('00:05:42'))", "'10:20:30' + '00:05:42'", "time"), schema( - "ADDTIME(TIMESTAMP('1999-12-31 15:42:13'), DATETIME('1961-04-12 09:07:00'))", + "ADDTIME(TIMESTAMP('1999-12-31 15:42:13'), TIMESTAMP('1961-04-12 09:07:00'))", "'15:42:13' + '09:07:00'", - "datetime")); + "timestamp")); verifyDataRows( result, rows( @@ -1305,21 +1300,21 @@ public void testSubTime() throws IOException { + " SUBTIME(TIME('23:59:59'), DATE('2004-01-01')) AS `'23:59:59' - 0`," + " SUBTIME(DATE('2004-01-01'), TIME('23:59:59')) AS `'2004-01-01' - '23:59:59'`," + " SUBTIME(TIME('10:20:30'), TIME('00:05:42')) AS `'10:20:30' - '00:05:42'`," - + " SUBTIME(TIMESTAMP('1999-12-31 15:42:13'), DATETIME('1961-04-12 09:07:00')) AS" + + " SUBTIME(TIMESTAMP('1999-12-31 15:42:13'), TIMESTAMP('1961-04-12 09:07:00')) AS" + " `'15:42:13' - '09:07:00'`"); verifySchema( result, - schema("SUBTIME(DATE('2008-12-12'), DATE('2008-11-15'))", "'2008-12-12' - 0", "datetime"), + schema("SUBTIME(DATE('2008-12-12'), DATE('2008-11-15'))", "'2008-12-12' - 0", "timestamp"), schema("SUBTIME(TIME('23:59:59'), DATE('2004-01-01'))", "'23:59:59' - 0", "time"), schema( "SUBTIME(DATE('2004-01-01'), TIME('23:59:59'))", "'2004-01-01' - '23:59:59'", - "datetime"), + "timestamp"), schema("SUBTIME(TIME('10:20:30'), TIME('00:05:42'))", "'10:20:30' - '00:05:42'", "time"), schema( - "SUBTIME(TIMESTAMP('1999-12-31 15:42:13'), DATETIME('1961-04-12 09:07:00'))", + "SUBTIME(TIMESTAMP('1999-12-31 15:42:13'), TIMESTAMP('1961-04-12 09:07:00'))", "'15:42:13' - '09:07:00'", - "datetime")); + "timestamp")); verifyDataRows( result, rows( @@ -1336,7 +1331,7 @@ public void testDateDiff() throws IOException { "SELECT DATEDIFF(TIMESTAMP('2000-01-02 00:00:00'), TIMESTAMP('2000-01-01 23:59:59')) AS" + " `'2000-01-02' - '2000-01-01'`, DATEDIFF(DATE('2001-02-01')," + " TIMESTAMP('2004-01-01 00:00:00')) AS `'2001-02-01' - '2004-01-01'`," - + " DATEDIFF(TIMESTAMP('2004-01-01 00:00:00'), DATETIME('2002-02-01 14:25:30')) AS" + + " DATEDIFF(TIMESTAMP('2004-01-01 00:00:00'), TIMESTAMP('2002-02-01 14:25:30')) AS" + " `'2004-01-01' - '2002-02-01'`, DATEDIFF(TIME('23:59:59'), TIME('00:00:00')) AS" + " `today - today`"); verifySchema( @@ -1350,7 +1345,7 @@ public void testDateDiff() throws IOException { "'2001-02-01' - '2004-01-01'", "long"), schema( - "DATEDIFF(TIMESTAMP('2004-01-01 00:00:00'), DATETIME('2002-02-01 14:25:30'))", + "DATEDIFF(TIMESTAMP('2004-01-01 00:00:00'), TIMESTAMP('2002-02-01 14:25:30'))", "'2004-01-01' - '2002-02-01'", "long"), schema("DATEDIFF(TIME('23:59:59'), TIME('00:00:00'))", "today - today", "long")); @@ -1446,12 +1441,12 @@ public void testDateBracket() throws IOException { verifyDataRows(result, rows("2020-09-16")); } - private void compareBrackets(String query1, String query2, String datetime) throws IOException { - JSONObject result1 = executeQuery("select " + query1 + " '" + datetime + "'"); - JSONObject result2 = executeQuery("select {" + query2 + " '" + datetime + "'}"); + private void compareBrackets(String query1, String query2, String timestamp) throws IOException { + JSONObject result1 = executeQuery("select " + query1 + " '" + timestamp + "'"); + JSONObject result2 = executeQuery("select {" + query2 + " '" + timestamp + "'}"); - verifyDataRows(result1, rows(datetime)); - verifyDataRows(result2, rows(datetime)); + verifyDataRows(result1, rows(timestamp)); + verifyDataRows(result2, rows(timestamp)); } @Test diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeImplementationIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeImplementationIT.java index 8ffa1df8f3..490272d950 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeImplementationIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeImplementationIT.java @@ -28,7 +28,7 @@ public void inRangeZeroToStringTZ() throws IOException { executeJdbcRequest("SELECT DATETIME('2008-12-25 05:30:00+00:00', 'America/Los_Angeles')"); verifySchema( result, - schema("DATETIME('2008-12-25 05:30:00+00:00', 'America/Los_Angeles')", null, "datetime")); + schema("DATETIME('2008-12-25 05:30:00+00:00', 'America/Los_Angeles')", null, "timestamp")); verifyDataRows(result, rows("2008-12-24 21:30:00")); } @@ -36,7 +36,7 @@ public void inRangeZeroToStringTZ() throws IOException { public void inRangeZeroToPositive() throws IOException { var result = executeJdbcRequest("SELECT DATETIME('2008-12-25 05:30:00+00:00', '+01:00')"); verifySchema( - result, schema("DATETIME('2008-12-25 05:30:00+00:00', '+01:00')", null, "datetime")); + result, schema("DATETIME('2008-12-25 05:30:00+00:00', '+01:00')", null, "timestamp")); verifyDataRows(result, rows("2008-12-25 06:30:00")); } @@ -44,7 +44,7 @@ public void inRangeZeroToPositive() throws IOException { public void inRangeNegativeToPositive() throws IOException { var result = executeJdbcRequest("SELECT DATETIME('2008-12-25 05:30:00-05:00', '+05:00')"); verifySchema( - result, schema("DATETIME('2008-12-25 05:30:00-05:00', '+05:00')", null, "datetime")); + result, schema("DATETIME('2008-12-25 05:30:00-05:00', '+05:00')", null, "timestamp")); verifyDataRows(result, rows("2008-12-25 15:30:00")); } @@ -52,7 +52,7 @@ public void inRangeNegativeToPositive() throws IOException { public void inRangeTwentyHourOffset() throws IOException { var result = executeJdbcRequest("SELECT DATETIME('2004-02-28 23:00:00-10:00', '+10:00')"); verifySchema( - result, schema("DATETIME('2004-02-28 23:00:00-10:00', '+10:00')", null, "datetime")); + result, schema("DATETIME('2004-02-28 23:00:00-10:00', '+10:00')", null, "timestamp")); verifyDataRows(result, rows("2004-02-29 19:00:00")); } @@ -60,21 +60,21 @@ public void inRangeTwentyHourOffset() throws IOException { public void inRangeYearChange() throws IOException { var result = executeJdbcRequest("SELECT DATETIME('2008-01-01 02:00:00+10:00', '-10:00')"); verifySchema( - result, schema("DATETIME('2008-01-01 02:00:00+10:00', '-10:00')", null, "datetime")); + result, schema("DATETIME('2008-01-01 02:00:00+10:00', '-10:00')", null, "timestamp")); verifyDataRows(result, rows("2007-12-31 06:00:00")); } @Test public void inRangeZeroNoToTZ() throws IOException { var result = executeJdbcRequest("SELECT DATETIME('2008-01-01 02:00:00+10:00')"); - verifySchema(result, schema("DATETIME('2008-01-01 02:00:00+10:00')", null, "datetime")); + verifySchema(result, schema("DATETIME('2008-01-01 02:00:00+10:00')", null, "timestamp")); verifyDataRows(result, rows("2008-01-01 02:00:00")); } @Test public void inRangeZeroNoTZ() throws IOException { var result = executeJdbcRequest("SELECT DATETIME('2008-01-01 02:00:00')"); - verifySchema(result, schema("DATETIME('2008-01-01 02:00:00')", null, "datetime")); + verifySchema(result, schema("DATETIME('2008-01-01 02:00:00')", null, "timestamp")); verifyDataRows(result, rows("2008-01-01 02:00:00")); } @@ -82,7 +82,7 @@ public void inRangeZeroNoTZ() throws IOException { public void inRangeZeroDayConvert() throws IOException { var result = executeJdbcRequest("SELECT DATETIME('2008-01-01 02:00:00+12:00', '-12:00')"); verifySchema( - result, schema("DATETIME('2008-01-01 02:00:00+12:00', '-12:00')", null, "datetime")); + result, schema("DATETIME('2008-01-01 02:00:00+12:00', '-12:00')", null, "timestamp")); verifyDataRows(result, rows("2007-12-31 02:00:00")); } @@ -90,7 +90,7 @@ public void inRangeZeroDayConvert() throws IOException { public void inRangeJustInRangeNegative() throws IOException { var result = executeJdbcRequest("SELECT DATETIME('2008-01-01 02:00:00+10:00', '-13:59')"); verifySchema( - result, schema("DATETIME('2008-01-01 02:00:00+10:00', '-13:59')", null, "datetime")); + result, schema("DATETIME('2008-01-01 02:00:00+10:00', '-13:59')", null, "timestamp")); verifyDataRows(result, rows("2007-12-31 02:01:00")); } @@ -98,7 +98,7 @@ public void inRangeJustInRangeNegative() throws IOException { public void inRangeJustInRangePositive() throws IOException { var result = executeJdbcRequest("SELECT DATETIME('2008-01-01 02:00:00+14:00', '-10:00')"); verifySchema( - result, schema("DATETIME('2008-01-01 02:00:00+14:00', '-10:00')", null, "datetime")); + result, schema("DATETIME('2008-01-01 02:00:00+14:00', '-10:00')", null, "timestamp")); verifyDataRows(result, rows("2007-12-31 02:00:00")); } @@ -106,7 +106,7 @@ public void inRangeJustInRangePositive() throws IOException { public void nullField3Under() throws IOException { var result = executeJdbcRequest("SELECT DATETIME('2008-01-01 02:00:00+10:00', '-14:01')"); verifySchema( - result, schema("DATETIME('2008-01-01 02:00:00+10:00', '-14:01')", null, "datetime")); + result, schema("DATETIME('2008-01-01 02:00:00+10:00', '-14:01')", null, "timestamp")); verifyDataRows(result, rows(new Object[] {null})); } @@ -114,28 +114,28 @@ public void nullField3Under() throws IOException { public void nullField1Over() throws IOException { var result = executeJdbcRequest("SELECT DATETIME('2008-01-01 02:00:00+14:01', '-10:00')"); verifySchema( - result, schema("DATETIME('2008-01-01 02:00:00+14:01', '-10:00')", null, "datetime")); + result, schema("DATETIME('2008-01-01 02:00:00+14:01', '-10:00')", null, "timestamp")); verifyDataRows(result, rows(new Object[] {null})); } @Test public void nullDateTimeInvalidDateValueFebruary() throws IOException { var result = executeJdbcRequest("SELECT DATETIME('2021-02-30 10:00:00')"); - verifySchema(result, schema("DATETIME('2021-02-30 10:00:00')", null, "datetime")); + verifySchema(result, schema("DATETIME('2021-02-30 10:00:00')", null, "timestamp")); verifyDataRows(result, rows(new Object[] {null})); } @Test public void nullDateTimeInvalidDateValueApril() throws IOException { var result = executeJdbcRequest("SELECT DATETIME('2021-04-31 10:00:00')"); - verifySchema(result, schema("DATETIME('2021-04-31 10:00:00')", null, "datetime")); + verifySchema(result, schema("DATETIME('2021-04-31 10:00:00')", null, "timestamp")); verifyDataRows(result, rows(new Object[] {null})); } @Test public void nullDateTimeInvalidDateValueMonth() throws IOException { var result = executeJdbcRequest("SELECT DATETIME('2021-13-03 10:00:00')"); - verifySchema(result, schema("DATETIME('2021-13-03 10:00:00')", null, "datetime")); + verifySchema(result, schema("DATETIME('2021-13-03 10:00:00')", null, "timestamp")); verifyDataRows(result, rows(new Object[] {null})); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/SystemFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/SystemFunctionIT.java index d2798728a1..7129d058c0 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/SystemFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/SystemFunctionIT.java @@ -35,9 +35,8 @@ public void typeof_sql_types() { "SELECT" + " typeof(CAST('1961-04-12 09:07:00' AS TIMESTAMP))," + " typeof(CAST('09:07:00' AS TIME))," - + " typeof(CAST('1961-04-12' AS DATE))," - + " typeof(DATETIME('1961-04-12 09:07:00'))"); - verifyDataRows(response, rows("TIMESTAMP", "TIME", "DATE", "DATETIME")); + + " typeof(CAST('1961-04-12' AS DATE))"); + verifyDataRows(response, rows("TIMESTAMP", "TIME", "DATE")); } @Test diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchDateType.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchDateType.java index d0a924c494..7e6bee77c2 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchDateType.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchDateType.java @@ -375,7 +375,6 @@ public static boolean isDateTypeCompatible(ExprType exprType) { } switch ((ExprCoreType) exprType) { case TIMESTAMP: - case DATETIME: case DATE: case TIME: return true; diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactory.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactory.java index 22c2ece4a7..3341e01ab2 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactory.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactory.java @@ -8,7 +8,6 @@ import static org.opensearch.sql.data.type.ExprCoreType.ARRAY; import static org.opensearch.sql.data.type.ExprCoreType.BOOLEAN; import static org.opensearch.sql.data.type.ExprCoreType.DATE; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; import static org.opensearch.sql.data.type.ExprCoreType.DOUBLE; import static org.opensearch.sql.data.type.ExprCoreType.FLOAT; import static org.opensearch.sql.data.type.ExprCoreType.INTEGER; @@ -20,7 +19,6 @@ import static org.opensearch.sql.utils.DateTimeFormatters.DATE_TIME_FORMATTER; import static org.opensearch.sql.utils.DateTimeFormatters.STRICT_HOUR_MINUTE_SECOND_FORMATTER; import static org.opensearch.sql.utils.DateTimeFormatters.STRICT_YEAR_MONTH_DAY_FORMATTER; -import static org.opensearch.sql.utils.DateTimeUtils.UTC_ZONE_ID; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; @@ -29,6 +27,7 @@ import java.time.Instant; import java.time.LocalDate; import java.time.LocalTime; +import java.time.ZoneOffset; import java.time.ZonedDateTime; import java.time.format.DateTimeParseException; import java.time.temporal.TemporalAccessor; @@ -132,8 +131,6 @@ public void extendTypeMapping(Map typeMapping) { .put( OpenSearchDateType.of(TIMESTAMP), OpenSearchExprValueFactory::createOpenSearchDateType) - .put( - OpenSearchDateType.of(DATETIME), OpenSearchExprValueFactory::createOpenSearchDateType) .put( OpenSearchDataType.of(OpenSearchDataType.MappingType.Ip), (c, dt) -> new OpenSearchExprIpValue(c.stringValue())) @@ -241,11 +238,12 @@ private static ExprValue parseDateTimeString(String value, OpenSearchDateType da ZonedDateTime zonedDateTime = DateFormatters.from(accessor); switch (returnFormat) { case TIME: - return new ExprTimeValue(zonedDateTime.withZoneSameLocal(UTC_ZONE_ID).toLocalTime()); + return new ExprTimeValue(zonedDateTime.withZoneSameLocal(ZoneOffset.UTC).toLocalTime()); case DATE: - return new ExprDateValue(zonedDateTime.withZoneSameLocal(UTC_ZONE_ID).toLocalDate()); + return new ExprDateValue(zonedDateTime.withZoneSameLocal(ZoneOffset.UTC).toLocalDate()); default: - return new ExprTimestampValue(zonedDateTime.withZoneSameLocal(UTC_ZONE_ID).toInstant()); + return new ExprTimestampValue( + zonedDateTime.withZoneSameLocal(ZoneOffset.UTC).toInstant()); } } catch (IllegalArgumentException ignored) { // nothing to do, try another format @@ -291,9 +289,9 @@ private static ExprValue createOpenSearchDateType(Content value, ExprType type) Instant instant = Instant.ofEpochMilli(epochMillis); switch ((ExprCoreType) returnFormat) { case TIME: - return new ExprTimeValue(LocalTime.from(instant.atZone(UTC_ZONE_ID))); + return new ExprTimeValue(LocalTime.from(instant.atZone(ZoneOffset.UTC))); case DATE: - return new ExprDateValue(LocalDate.ofInstant(instant, UTC_ZONE_ID)); + return new ExprDateValue(LocalDate.ofInstant(instant, ZoneOffset.UTC)); default: return new ExprTimestampValue(instant); } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScript.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScript.java index 7e7b2e959a..06cca5dcc6 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScript.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScript.java @@ -49,7 +49,6 @@ public Object execute() { // Can't get timestamp from `ExprTimeValue` return MILLIS.between(LocalTime.MIN, expr.timeValue()); case DATE: - case DATETIME: case TIMESTAMP: return expr.timestampValue().toEpochMilli(); default: diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/BucketAggregationBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/BucketAggregationBuilder.java index 4485626742..ff66ec425a 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/BucketAggregationBuilder.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/BucketAggregationBuilder.java @@ -6,7 +6,6 @@ package org.opensearch.sql.opensearch.storage.script.aggregation.dsl; import static org.opensearch.sql.data.type.ExprCoreType.DATE; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; import static org.opensearch.sql.data.type.ExprCoreType.TIME; import static org.opensearch.sql.data.type.ExprCoreType.TIMESTAMP; @@ -66,7 +65,7 @@ private CompositeValuesSourceBuilder buildCompositeValuesSourceBuilder( .missingOrder(missingOrder) .order(sortOrder); // Time types values are converted to LONG in ExpressionAggregationScript::execute - if (List.of(TIMESTAMP, TIME, DATE, DATETIME).contains(expr.getDelegated().type())) { + if (List.of(TIMESTAMP, TIME, DATE).contains(expr.getDelegated().type())) { sourceBuilder.userValuetypeHint(ValueType.LONG); } return helper.build(expr.getDelegated(), sourceBuilder::field, sourceBuilder::script); diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/LuceneQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/LuceneQuery.java index 753c2bbbc7..11533c754e 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/LuceneQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/LuceneQuery.java @@ -14,7 +14,6 @@ import org.opensearch.sql.data.model.ExprBooleanValue; import org.opensearch.sql.data.model.ExprByteValue; import org.opensearch.sql.data.model.ExprDateValue; -import org.opensearch.sql.data.model.ExprDatetimeValue; import org.opensearch.sql.data.model.ExprDoubleValue; import org.opensearch.sql.data.model.ExprFloatValue; import org.opensearch.sql.data.model.ExprIntegerValue; @@ -225,15 +224,6 @@ private ExprValue cast(FunctionExpression castFunction) { return new ExprTimeValue(expr.valueOf().timeValue()); } }) - .put( - BuiltinFunctionName.CAST_TO_DATETIME.getName(), - expr -> { - if (expr.type().equals(ExprCoreType.STRING)) { - return new ExprDatetimeValue(expr.valueOf().stringValue()); - } else { - return new ExprDatetimeValue(expr.valueOf().datetimeValue()); - } - }) .put( BuiltinFunctionName.CAST_TO_TIMESTAMP.getName(), expr -> { diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/type/OpenSearchDateTypeTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/type/OpenSearchDateTypeTest.java index a9511f8c0b..34738224e7 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/type/OpenSearchDateTypeTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/type/OpenSearchDateTypeTest.java @@ -12,7 +12,6 @@ import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; import static org.opensearch.sql.data.type.ExprCoreType.DATE; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; import static org.opensearch.sql.data.type.ExprCoreType.TIME; import static org.opensearch.sql.data.type.ExprCoreType.TIMESTAMP; import static org.opensearch.sql.opensearch.data.type.OpenSearchDateType.SUPPORTED_NAMED_DATETIME_FORMATS; @@ -43,14 +42,14 @@ class OpenSearchDateTypeTest { private static final String timeFormatString = "hourMinuteSecond"; - private static final String datetimeFormatString = "basic_date_time"; + private static final String timestampFormatString = "basic_date_time"; private static final OpenSearchDateType defaultDateType = OpenSearchDateType.of(defaultFormatString); private static final OpenSearchDateType dateDateType = OpenSearchDateType.of(dateFormatString); private static final OpenSearchDateType timeDateType = OpenSearchDateType.of(timeFormatString); private static final OpenSearchDateType datetimeDateType = - OpenSearchDateType.of(datetimeFormatString); + OpenSearchDateType.of(timestampFormatString); @Test public void isCompatible() { @@ -59,25 +58,16 @@ public void isCompatible() { () -> assertTrue(TIMESTAMP.isCompatible(defaultDateType)), () -> assertTrue(TIMESTAMP.isCompatible(dateDateType)), () -> assertTrue(TIMESTAMP.isCompatible(timeDateType)), - () -> assertTrue(TIMESTAMP.isCompatible(datetimeDateType)), - - // datetime - () -> assertFalse(DATETIME.isCompatible(defaultDateType)), - () -> assertTrue(DATETIME.isCompatible(dateDateType)), - () -> assertTrue(DATETIME.isCompatible(timeDateType)), - () -> assertFalse(DATETIME.isCompatible(datetimeDateType)), // time type () -> assertFalse(TIME.isCompatible(defaultDateType)), () -> assertFalse(TIME.isCompatible(dateDateType)), () -> assertTrue(TIME.isCompatible(timeDateType)), - () -> assertFalse(TIME.isCompatible(datetimeDateType)), // date type () -> assertFalse(DATE.isCompatible(defaultDateType)), () -> assertTrue(DATE.isCompatible(dateDateType)), - () -> assertFalse(DATE.isCompatible(timeDateType)), - () -> assertFalse(DATE.isCompatible(datetimeDateType))); + () -> assertFalse(DATE.isCompatible(timeDateType))); } // `typeName` and `legacyTypeName` return the same thing for date objects: @@ -88,8 +78,7 @@ public void check_typeName() { // always use the MappingType of "DATE" () -> assertEquals("DATE", defaultDateType.typeName()), () -> assertEquals("DATE", timeDateType.typeName()), - () -> assertEquals("DATE", dateDateType.typeName()), - () -> assertEquals("DATE", datetimeDateType.typeName())); + () -> assertEquals("DATE", dateDateType.typeName())); } @Test @@ -98,8 +87,7 @@ public void check_legacyTypeName() { // always use the legacy "DATE" type () -> assertEquals("DATE", defaultDateType.legacyTypeName()), () -> assertEquals("DATE", timeDateType.legacyTypeName()), - () -> assertEquals("DATE", dateDateType.legacyTypeName()), - () -> assertEquals("DATE", datetimeDateType.legacyTypeName())); + () -> assertEquals("DATE", dateDateType.legacyTypeName())); } @Test @@ -108,8 +96,7 @@ public void check_exprTypeName() { // exprType changes based on type (no datetime): () -> assertEquals(TIMESTAMP, defaultDateType.getExprType()), () -> assertEquals(TIME, timeDateType.getExprType()), - () -> assertEquals(DATE, dateDateType.getExprType()), - () -> assertEquals(TIMESTAMP, datetimeDateType.getExprType())); + () -> assertEquals(DATE, dateDateType.getExprType())); } private static Stream getAllSupportedFormats() { diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactoryTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactoryTest.java index bfc06b94c0..83e26f85e4 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactoryTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactoryTest.java @@ -24,7 +24,6 @@ import static org.opensearch.sql.data.type.ExprCoreType.BOOLEAN; import static org.opensearch.sql.data.type.ExprCoreType.BYTE; import static org.opensearch.sql.data.type.ExprCoreType.DATE; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; import static org.opensearch.sql.data.type.ExprCoreType.DOUBLE; import static org.opensearch.sql.data.type.ExprCoreType.FLOAT; import static org.opensearch.sql.data.type.ExprCoreType.INTEGER; @@ -34,7 +33,6 @@ import static org.opensearch.sql.data.type.ExprCoreType.STRUCT; import static org.opensearch.sql.data.type.ExprCoreType.TIME; import static org.opensearch.sql.data.type.ExprCoreType.TIMESTAMP; -import static org.opensearch.sql.utils.DateTimeUtils.UTC_ZONE_ID; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; @@ -42,6 +40,7 @@ import java.time.Instant; import java.time.LocalDate; import java.time.LocalTime; +import java.time.ZoneOffset; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -50,7 +49,6 @@ import org.junit.jupiter.api.Test; import org.opensearch.sql.data.model.ExprCollectionValue; import org.opensearch.sql.data.model.ExprDateValue; -import org.opensearch.sql.data.model.ExprDatetimeValue; import org.opensearch.sql.data.model.ExprTimeValue; import org.opensearch.sql.data.model.ExprTimestampValue; import org.opensearch.sql.data.model.ExprTupleValue; @@ -72,7 +70,6 @@ class OpenSearchExprValueFactoryTest { .put("doubleV", OpenSearchDataType.of(DOUBLE)) .put("stringV", OpenSearchDataType.of(STRING)) .put("dateV", OpenSearchDateType.of(DATE)) - .put("datetimeV", OpenSearchDateType.of(DATETIME)) .put("timeV", OpenSearchDateType.of(TIME)) .put("timestampV", OpenSearchDateType.of(TIMESTAMP)) .put("datetimeDefaultV", OpenSearchDateType.of()) @@ -248,7 +245,7 @@ public void constructDates() { () -> assertEquals( new ExprDateValue( - LocalDate.ofInstant(Instant.ofEpochMilli(450576000000L), UTC_ZONE_ID)), + LocalDate.ofInstant(Instant.ofEpochMilli(450576000000L), ZoneOffset.UTC)), constructFromObject("dateV", 450576000000L)), () -> assertEquals( @@ -270,7 +267,7 @@ public void constructTimes() { () -> assertEquals( new ExprTimeValue( - LocalTime.from(Instant.ofEpochMilli(1420070400001L).atZone(UTC_ZONE_ID))), + LocalTime.from(Instant.ofEpochMilli(1420070400001L).atZone(ZoneOffset.UTC))), constructFromObject("timeV", 1420070400001L)), () -> assertEquals( @@ -337,14 +334,6 @@ public void constructDatetime() { assertEquals( new ExprTimestampValue("2015-01-01 12:10:30"), constructFromObject("timestampV", "2015-01-01 12:10:30")), - () -> - assertEquals( - new ExprDatetimeValue("2015-01-01 12:10:30"), - constructFromObject("datetimeV", "2015-01-01 12:10:30")), - () -> - assertEquals( - new ExprDatetimeValue("2015-01-01 12:10:30"), - constructFromObject("datetimeDefaultV", "2015-01-01 12:10:30")), () -> assertEquals( new ExprTimestampValue(Instant.ofEpochMilli(1420070400001L)), @@ -366,7 +355,7 @@ public void constructDatetime() { @Test public void constructDatetime_fromCustomFormat() { assertEquals( - new ExprDatetimeValue("2015-01-01 12:10:30"), + new ExprTimestampValue("2015-01-01 12:10:30"), constructFromObject("customFormatV", "2015-01-01-12-10-30")); IllegalArgumentException exception = @@ -378,11 +367,11 @@ public void constructDatetime_fromCustomFormat() { exception.getMessage()); assertEquals( - new ExprDatetimeValue("2015-01-01 12:10:30"), + new ExprTimestampValue("2015-01-01 12:10:30"), constructFromObject("customAndEpochMillisV", "2015-01-01 12:10:30")); assertEquals( - new ExprDatetimeValue("2015-01-01 12:10:30"), + new ExprTimestampValue("2015-01-01 12:10:30"), constructFromObject("customAndEpochMillisV", "2015-01-01-12-10-30")); } @@ -626,7 +615,7 @@ public void constructBinaryArrayReturnsFirstIndex() { @Test public void constructArrayOfCustomEpochMillisReturnsFirstIndex() { assertEquals( - new ExprDatetimeValue("2015-01-01 12:10:30"), + new ExprTimestampValue("2015-01-01 12:10:30"), tupleValue("{\"customAndEpochMillisV\":[\"2015-01-01 12:10:30\",\"1999-11-09 01:09:44\"]}") .get("customAndEpochMillisV")); } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilderTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilderTest.java index 6485dce124..1bb988dacd 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilderTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilderTest.java @@ -13,7 +13,6 @@ import static org.mockito.Mockito.doAnswer; import static org.opensearch.sql.common.utils.StringUtils.format; import static org.opensearch.sql.data.type.ExprCoreType.DATE; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; import static org.opensearch.sql.data.type.ExprCoreType.DOUBLE; import static org.opensearch.sql.data.type.ExprCoreType.INTEGER; import static org.opensearch.sql.data.type.ExprCoreType.STRING; @@ -150,20 +149,6 @@ void should_build_type_mapping_for_field_reference() { map("name", OpenSearchDataType.of(STRING)))); } - @Test - void should_build_type_mapping_for_datetime_type() { - assertThat( - buildTypeMapping( - Arrays.asList( - named( - "avg(datetime)", - new AvgAggregator(Arrays.asList(ref("datetime", DATETIME)), DATETIME))), - Arrays.asList(named("datetime", ref("datetime", DATETIME)))), - containsInAnyOrder( - map("avg(datetime)", OpenSearchDateType.of(DATETIME)), - map("datetime", OpenSearchDateType.of(DATETIME)))); - } - @Test void should_build_type_mapping_for_timestamp_type() { assertThat( diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScriptTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScriptTest.java index 520e301301..6d90cce704 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScriptTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScriptTest.java @@ -34,7 +34,6 @@ import org.opensearch.search.lookup.LeafSearchLookup; import org.opensearch.search.lookup.SearchLookup; import org.opensearch.sql.data.model.ExprDateValue; -import org.opensearch.sql.data.model.ExprDatetimeValue; import org.opensearch.sql.data.model.ExprTimestampValue; import org.opensearch.sql.expression.DSL; import org.opensearch.sql.expression.Expression; @@ -113,14 +112,6 @@ void can_execute_expression_interpret_dates_for_aggregation() { .shouldMatch(new ExprDateValue(LocalDate.of(1961, 4, 12)).timestampValue().toEpochMilli()); } - @Test - void can_execute_expression_interpret_datetimes_for_aggregation() { - assertThat() - .docValues("datetime", "1984-03-17 22:16:42") - .evaluate(DSL.datetime(ref("datetime", STRING))) - .shouldMatch(new ExprDatetimeValue("1984-03-17 22:16:42").timestampValue().toEpochMilli()); - } - @Test void can_execute_expression_interpret_times_for_aggregation() { assertThat() diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/BucketAggregationBuilderTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/BucketAggregationBuilderTest.java index d11d7da2fe..4250b3297f 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/BucketAggregationBuilderTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/BucketAggregationBuilderTest.java @@ -137,7 +137,7 @@ void should_build_bucket_with_parse_expression() { @ParameterizedTest(name = "{0}") @EnumSource( value = ExprCoreType.class, - names = {"TIMESTAMP", "TIME", "DATE", "DATETIME"}) + names = {"TIMESTAMP", "TIME", "DATE"}) void terms_bucket_for_datetime_types_uses_long(ExprType dataType) { assertEquals( "{\n" diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/ExpressionFilterScriptTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/ExpressionFilterScriptTest.java index cca51c8f4a..df754887cf 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/ExpressionFilterScriptTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/ExpressionFilterScriptTest.java @@ -15,7 +15,6 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import static org.opensearch.sql.data.type.ExprCoreType.DATE; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; import static org.opensearch.sql.data.type.ExprCoreType.FLOAT; import static org.opensearch.sql.data.type.ExprCoreType.INTEGER; import static org.opensearch.sql.data.type.ExprCoreType.STRING; @@ -116,15 +115,6 @@ void can_execute_expression_with_timestamp_field() { .shouldMatch(); } - @Test - void can_execute_expression_with_datetime_field() { - ExprTimestampValue ts = new ExprTimestampValue("2020-08-04 10:00:00"); - assertThat() - .docValues("birthday", ZonedDateTime.parse("2020-08-04T10:00:00Z")) - .filterBy(DSL.equal(ref("birthday", DATETIME), new LiteralExpression(ts))) - .shouldMatch(); - } - @Test void can_execute_expression_with_date_field() { ExprDateValue date = new ExprDateValue("2020-08-04"); diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/FilterQueryBuilderTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/FilterQueryBuilderTest.java index 1fc2d5ee29..90b982e017 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/FilterQueryBuilderTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/FilterQueryBuilderTest.java @@ -13,7 +13,6 @@ import static org.opensearch.sql.data.type.ExprCoreType.BOOLEAN; import static org.opensearch.sql.data.type.ExprCoreType.BYTE; import static org.opensearch.sql.data.type.ExprCoreType.DATE; -import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; import static org.opensearch.sql.data.type.ExprCoreType.DOUBLE; import static org.opensearch.sql.data.type.ExprCoreType.FLOAT; import static org.opensearch.sql.data.type.ExprCoreType.INTEGER; @@ -42,7 +41,6 @@ import org.opensearch.sql.common.antlr.SyntaxCheckException; import org.opensearch.sql.common.utils.StringUtils; import org.opensearch.sql.data.model.ExprDateValue; -import org.opensearch.sql.data.model.ExprDatetimeValue; import org.opensearch.sql.data.model.ExprTimeValue; import org.opensearch.sql.data.model.ExprTimestampValue; import org.opensearch.sql.data.model.ExprTupleValue; @@ -1787,7 +1785,7 @@ void cast_to_date_in_filter() { buildQuery( DSL.equal( ref("date_value", DATE), - DSL.castDate(literal(new ExprDatetimeValue("2021-11-08 17:00:00")))))); + DSL.castDate(literal(new ExprTimestampValue("2021-11-08 17:00:00")))))); } @Test @@ -1817,32 +1815,6 @@ void cast_to_time_in_filter() { DSL.castTime(literal(new ExprTimestampValue("2021-11-08 17:00:00")))))); } - @Test - void cast_to_datetime_in_filter() { - String json = - "{\n" - + " \"term\" : {\n" - + " \"datetime_value\" : {\n" - + " \"value\" : \"2021-11-08 17:00:00\",\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + "}"; - - assertJsonEquals( - json, - buildQuery( - DSL.equal( - ref("datetime_value", DATETIME), - DSL.castDatetime(literal("2021-11-08 17:00:00"))))); - assertJsonEquals( - json, - buildQuery( - DSL.equal( - ref("datetime_value", DATETIME), - DSL.castDatetime(literal(new ExprTimestampValue("2021-11-08 17:00:00")))))); - } - @Test void cast_to_timestamp_in_filter() { String json = From 7e3a718f1b7d9100fbac2ee8317fd35042b63b39 Mon Sep 17 00:00:00 2001 From: Yury-Fridlyand Date: Mon, 21 Aug 2023 10:25:12 -0700 Subject: [PATCH 35/42] Run IT tests with security plugin (#335) (#1986) * Run IT tests with security plugin (#335) * Add extra IT flow. Signed-off-by: Yury-Fridlyand * Remove unneeded files. Signed-off-by: Yury-Fridlyand * Typo fix. Signed-off-by: Yury-Fridlyand * Fix GHA matrix syntax. Signed-off-by: Yury-Fridlyand * Fix GHA matrix syntax. Signed-off-by: Yury-Fridlyand * Code clean up. Signed-off-by: Yury-Fridlyand * Optimize downloading. Signed-off-by: Yury-Fridlyand * Apply suggestions from code review Signed-off-by: Yury-Fridlyand Co-authored-by: Andrew Carbonetto * Update integ-test/build.gradle Signed-off-by: Yury-Fridlyand Co-authored-by: Andrew Carbonetto * Typo fix. Signed-off-by: Yury-Fridlyand * Rework implementation. Signed-off-by: Yury-Fridlyand * Address PR review. Signed-off-by: Yury-Fridlyand * Address PR feedback + some fixes. Signed-off-by: Yury-Fridlyand --------- Signed-off-by: Yury-Fridlyand Co-authored-by: Andrew Carbonetto * Minor fix. Signed-off-by: Yury-Fridlyand * Address PR feedback. Signed-off-by: Yury-Fridlyand * Typo fix. Signed-off-by: Yury-Fridlyand --------- Signed-off-by: Yury-Fridlyand Co-authored-by: Andrew Carbonetto --- .../workflows/integ-tests-with-security.yml | 43 +++++ integ-test/build.gradle | 163 +++++++++++++++++- .../sql/legacy/OpenSearchSQLRestTestCase.java | 85 +++++---- .../CrossClusterSearchIT.java | 52 ++++-- 4 files changed, 293 insertions(+), 50 deletions(-) create mode 100644 .github/workflows/integ-tests-with-security.yml rename integ-test/src/test/java/org/opensearch/sql/{ppl => security}/CrossClusterSearchIT.java (79%) diff --git a/.github/workflows/integ-tests-with-security.yml b/.github/workflows/integ-tests-with-security.yml new file mode 100644 index 0000000000..0d54b8cfef --- /dev/null +++ b/.github/workflows/integ-tests-with-security.yml @@ -0,0 +1,43 @@ +name: Security Plugin IT + +on: + pull_request: + push: + branches-ignore: + - 'dependabot/**' + paths: + - 'integ-test/**' + - '.github/workflows/integ-tests-with-security.yml' + +jobs: + security-it: + strategy: + fail-fast: false + matrix: + os: [ ubuntu-latest, windows-latest, macos-latest ] + java: [ 11, 17 ] + + runs-on: ${{ matrix.os }} + + steps: + - uses: actions/checkout@v3 + + - name: Set up JDK ${{ matrix.java }} + uses: actions/setup-java@v3 + with: + distribution: 'temurin' + java-version: ${{ matrix.java }} + + - name: Build with Gradle + run: ./gradlew integTestWithSecurity + + - name: Upload test reports + if: ${{ always() }} + uses: actions/upload-artifact@v2 + continue-on-error: true + with: + name: test-reports-${{ matrix.os }}-${{ matrix.java }} + path: | + integ-test/build/reports/** + integ-test/build/testclusters/*/logs/* + integ-test/build/testclusters/*/config/* diff --git a/integ-test/build.gradle b/integ-test/build.gradle index 6ee9cb425e..7cb0983670 100644 --- a/integ-test/build.gradle +++ b/integ-test/build.gradle @@ -24,7 +24,10 @@ import org.opensearch.gradle.test.RestIntegTestTask import org.opensearch.gradle.testclusters.StandaloneRestIntegTestTask +import org.opensearch.gradle.testclusters.OpenSearchCluster +import groovy.xml.XmlParser +import java.nio.file.Paths import java.util.concurrent.Callable import java.util.stream.Collectors @@ -62,6 +65,81 @@ ext { projectSubstitutions = [:] licenseFile = rootProject.file('LICENSE.TXT') noticeFile = rootProject.file('NOTICE') + + getSecurityPluginDownloadLink = { -> + var repo = "https://aws.oss.sonatype.org/content/repositories/snapshots/org/opensearch/plugin/" + + "opensearch-security/$opensearch_build/" + var metadataFile = Paths.get(projectDir.toString(), "build", "maven-metadata.xml").toAbsolutePath().toFile() + download.run { + src repo + "maven-metadata.xml" + dest metadataFile + } + def metadata = new XmlParser().parse(metadataFile) + def securitySnapshotVersion = metadata.versioning.snapshotVersions[0].snapshotVersion[0].value[0].text() + + return repo + "opensearch-security-${securitySnapshotVersion}.zip" + } + + File downloadedSecurityPlugin = null + + configureSecurityPlugin = { OpenSearchCluster cluster -> + + cluster.getNodes().forEach { node -> + var creds = node.getCredentials() + if (creds.isEmpty()) { + creds.add(Map.of('useradd', 'admin', '-p', 'admin')) + } else { + creds.get(0).putAll(Map.of('useradd', 'admin', '-p', 'admin')) + } + } + + var projectAbsPath = projectDir.getAbsolutePath() + + // add a check to avoid re-downloading multiple times during single test run + if (downloadedSecurityPlugin == null) { + downloadedSecurityPlugin = Paths.get(projectAbsPath, 'bin', 'opensearch-security-snapshot.zip').toFile() + download.run { + src getSecurityPluginDownloadLink() + dest downloadedSecurityPlugin + } + } + + // Config below including files are copied from security demo configuration + ['esnode.pem', 'esnode-key.pem', 'root-ca.pem'].forEach { file -> + File local = Paths.get(projectAbsPath, 'bin', file).toFile() + download.run { + src "https://raw.githubusercontent.com/opensearch-project/security/main/bwc-test/src/test/resources/security/" + file + dest local + overwrite false + } + cluster.extraConfigFile file, local + } + [ + // config copied from security plugin demo configuration + 'plugins.security.ssl.transport.pemcert_filepath' : 'esnode.pem', + 'plugins.security.ssl.transport.pemkey_filepath' : 'esnode-key.pem', + 'plugins.security.ssl.transport.pemtrustedcas_filepath' : 'root-ca.pem', + 'plugins.security.ssl.transport.enforce_hostname_verification' : 'false', + // https is disabled to simplify test debugging + 'plugins.security.ssl.http.enabled' : 'false', + 'plugins.security.ssl.http.pemcert_filepath' : 'esnode.pem', + 'plugins.security.ssl.http.pemkey_filepath' : 'esnode-key.pem', + 'plugins.security.ssl.http.pemtrustedcas_filepath' : 'root-ca.pem', + 'plugins.security.allow_unsafe_democertificates' : 'true', + + 'plugins.security.allow_default_init_securityindex' : 'true', + 'plugins.security.authcz.admin_dn' : 'CN=kirk,OU=client,O=client,L=test,C=de', + 'plugins.security.audit.type' : 'internal_opensearch', + 'plugins.security.enable_snapshot_restore_privilege' : 'true', + 'plugins.security.check_snapshot_restore_write_privileges' : 'true', + 'plugins.security.restapi.roles_enabled' : '["all_access", "security_rest_api_access"]', + 'plugins.security.system_indices.enabled' : 'true' + ].forEach { name, value -> + cluster.setting name, value + } + + cluster.plugin provider((Callable) (() -> (RegularFile) (() -> downloadedSecurityPlugin))) + } } tasks.withType(licenseHeaders.class) { @@ -108,6 +186,7 @@ dependencies { testImplementation group: 'com.h2database', name: 'h2', version: '2.2.220' testImplementation group: 'org.xerial', name: 'sqlite-jdbc', version: '3.41.2.2' testImplementation group: 'com.google.code.gson', name: 'gson', version: '2.8.9' + testCompileOnly 'org.apiguardian:apiguardian-api:1.1.2' // Needed for BWC tests zipArchive group: 'org.opensearch.plugin', name:'opensearch-sql-plugin', version: "${bwcVersion}-SNAPSHOT" @@ -128,21 +207,28 @@ compileTestJava { } testClusters.all { - testDistribution = 'archive' - // debug with command, ./gradlew opensearch-sql:run -DdebugJVM. --debug-jvm does not work with keystore. if (System.getProperty("debugJVM") != null) { jvmArgs '-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5005' } } -testClusters.integTest { - plugin ":opensearch-sql-plugin" - setting "plugins.query.datasources.encryption.masterkey", "1234567812345678" -} - testClusters { + integTest { + testDistribution = 'archive' + plugin ":opensearch-sql-plugin" + setting "plugins.query.datasources.encryption.masterkey", "1234567812345678" + } remoteCluster { + testDistribution = 'archive' + plugin ":opensearch-sql-plugin" + } + integTestWithSecurity { + testDistribution = 'archive' + plugin ":opensearch-sql-plugin" + } + remoteIntegTestWithSecurity { + testDistribution = 'archive' plugin ":opensearch-sql-plugin" } } @@ -223,6 +309,65 @@ task integJdbcTest(type: RestIntegTestTask) { } } +task integTestWithSecurity(type: RestIntegTestTask) { + useCluster testClusters.integTestWithSecurity + useCluster testClusters.remoteIntegTestWithSecurity + + systemProperty "cluster.names", + getClusters().stream().map(cluster -> cluster.getName()).collect(Collectors.joining(",")) + + getClusters().forEach { cluster -> + configureSecurityPlugin(cluster) + } + + useJUnitPlatform() + dependsOn ':opensearch-sql-plugin:bundlePlugin' + testLogging { + events "passed", "skipped", "failed" + } + afterTest { desc, result -> + logger.quiet "${desc.className}.${desc.name}: ${result.resultType} ${(result.getEndTime() - result.getStartTime())/1000}s" + } + + systemProperty 'tests.security.manager', 'false' + systemProperty 'project.root', project.projectDir.absolutePath + + // Set default query size limit + systemProperty 'defaultQuerySizeLimit', '10000' + + // Tell the test JVM if the cluster JVM is running under a debugger so that tests can use longer timeouts for + // requests. The 'doFirst' delays reading the debug setting on the cluster till execution time. + doFirst { + systemProperty 'cluster.debug', getDebug() + getClusters().forEach { cluster -> + + String allTransportSocketURI = cluster.nodes.stream().flatMap { node -> + node.getAllTransportPortURI().stream() + }.collect(Collectors.joining(",")) + String allHttpSocketURI = cluster.nodes.stream().flatMap { node -> + node.getAllHttpSocketURI().stream() + }.collect(Collectors.joining(",")) + + systemProperty "tests.rest.${cluster.name}.http_hosts", "${-> allHttpSocketURI}" + systemProperty "tests.rest.${cluster.name}.transport_hosts", "${-> allTransportSocketURI}" + } + + systemProperty "https", "false" + systemProperty "user", "admin" + systemProperty "password", "admin" + } + + if (System.getProperty("test.debug") != null) { + jvmArgs '-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=*:5005' + } + + // NOTE: this IT config discovers only junit5 (jupiter) tests. + // https://github.com/opensearch-project/sql/issues/1974 + filter { + includeTestsMatching 'org.opensearch.sql.security.CrossClusterSearchIT' + } +} + // Run PPL ITs and new, legacy and comparison SQL ITs with new SQL engine enabled integTest { useCluster testClusters.remoteCluster @@ -305,8 +450,8 @@ integTest { // Exclude JDBC related tests exclude 'org/opensearch/sql/jdbc/**' - // Exclude this IT until running IT with security plugin enabled is ready - exclude 'org/opensearch/sql/ppl/CrossClusterSearchIT.class' + // Exclude this IT, because they executed in another task (:integTestWithSecurity) + exclude 'org/opensearch/sql/security/**' } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/OpenSearchSQLRestTestCase.java b/integ-test/src/test/java/org/opensearch/sql/legacy/OpenSearchSQLRestTestCase.java index 385c9bc6ba..d73e3468d4 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/OpenSearchSQLRestTestCase.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/OpenSearchSQLRestTestCase.java @@ -5,8 +5,6 @@ package org.opensearch.sql.legacy; -import static java.util.Collections.unmodifiableList; - import java.io.IOException; import java.util.ArrayList; import java.util.List; @@ -49,8 +47,22 @@ public abstract class OpenSearchSQLRestTestCase extends OpenSearchRestTestCase { private static final Logger LOG = LogManager.getLogger(); - public static final String REMOTE_CLUSTER = "remoteCluster"; public static final String MATCH_ALL_REMOTE_CLUSTER = "*"; + // Requires to insert cluster name and cluster transport address (host:port) + public static final String REMOTE_CLUSTER_SETTING = + "{" + + "\"persistent\": {" + + " \"cluster\": {" + + " \"remote\": {" + + " \"%s\": {" + + " \"seeds\": [" + + " \"%s\"" + + " ]" + + " }" + + " }" + + " }" + + "}" + + "}"; private static RestClient remoteClient; @@ -106,27 +118,24 @@ protected RestClient buildClient(Settings settings, HttpHost[] hosts) throws IOE } // Modified from initClient in OpenSearchRestTestCase - public void initRemoteClient() throws IOException { - if (remoteClient == null) { - assert remoteAdminClient == null; - String cluster = getTestRestCluster(REMOTE_CLUSTER); - String[] stringUrls = cluster.split(","); - List hosts = new ArrayList<>(stringUrls.length); - for (String stringUrl : stringUrls) { - int portSeparator = stringUrl.lastIndexOf(':'); - if (portSeparator < 0) { - throw new IllegalArgumentException("Illegal cluster url [" + stringUrl + "]"); - } - String host = stringUrl.substring(0, portSeparator); - int port = Integer.valueOf(stringUrl.substring(portSeparator + 1)); - hosts.add(buildHttpHost(host, port)); + public void initRemoteClient(String clusterName) throws IOException { + remoteClient = remoteAdminClient = initClient(clusterName); + } + + /** Configure http client for the given cluster. */ + public RestClient initClient(String clusterName) throws IOException { + String[] stringUrls = getTestRestCluster(clusterName).split(","); + List hosts = new ArrayList<>(stringUrls.length); + for (String stringUrl : stringUrls) { + int portSeparator = stringUrl.lastIndexOf(':'); + if (portSeparator < 0) { + throw new IllegalArgumentException("Illegal cluster url [" + stringUrl + "]"); } - final List clusterHosts = unmodifiableList(hosts); - remoteClient = buildClient(restClientSettings(), clusterHosts.toArray(new HttpHost[0])); - remoteAdminClient = buildClient(restAdminSettings(), clusterHosts.toArray(new HttpHost[0])); + String host = stringUrl.substring(0, portSeparator); + int port = Integer.parseInt(stringUrl.substring(portSeparator + 1)); + hosts.add(buildHttpHost(host, port)); } - assert remoteClient != null; - assert remoteAdminClient != null; + return buildClient(restClientSettings(), hosts.toArray(new HttpHost[0])); } /** Get a comma delimited list of [host:port] to which to send REST requests. */ @@ -200,6 +209,27 @@ protected static void wipeAllOpenSearchIndices(RestClient client) throws IOExcep } } + /** + * Configure authentication and pass builder to superclass to configure other stuff.
+ * By default, auth is configure when https is set only. + */ + protected static void configureClient(RestClientBuilder builder, Settings settings) + throws IOException { + String userName = System.getProperty("user"); + String password = System.getProperty("password"); + if (userName != null && password != null) { + builder.setHttpClientConfigCallback( + httpClientBuilder -> { + BasicCredentialsProvider credentialsProvider = new BasicCredentialsProvider(); + credentialsProvider.setCredentials( + new AuthScope(null, -1), + new UsernamePasswordCredentials(userName, password.toCharArray())); + return httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider); + }); + } + OpenSearchRestTestCase.configureClient(builder, settings); + } + protected static void configureHttpsClient( RestClientBuilder builder, Settings settings, HttpHost httpHost) throws IOException { Map headers = ThreadContext.buildDefaultHeaders(settings); @@ -259,16 +289,13 @@ protected static void configureHttpsClient( * Initialize rest client to remote cluster, and create a connection to it from the coordinating * cluster. */ - public void configureMultiClusters() throws IOException { - initRemoteClient(); + public void configureMultiClusters(String remote) throws IOException { + initRemoteClient(remote); Request connectionRequest = new Request("PUT", "_cluster/settings"); String connectionSetting = - "{\"persistent\": {\"cluster\": {\"remote\": {\"" - + REMOTE_CLUSTER - + "\": {\"seeds\": [\"" - + getTestTransportCluster(REMOTE_CLUSTER).split(",")[0] - + "\"]}}}}}"; + String.format( + REMOTE_CLUSTER_SETTING, remote, getTestTransportCluster(remote).split(",")[0]); connectionRequest.setJsonEntity(connectionSetting); adminClient().performRequest(connectionRequest); } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/CrossClusterSearchIT.java b/integ-test/src/test/java/org/opensearch/sql/security/CrossClusterSearchIT.java similarity index 79% rename from integ-test/src/test/java/org/opensearch/sql/ppl/CrossClusterSearchIT.java rename to integ-test/src/test/java/org/opensearch/sql/security/CrossClusterSearchIT.java index 19e3debdf0..086f32cba7 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/CrossClusterSearchIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/security/CrossClusterSearchIT.java @@ -3,7 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ -package org.opensearch.sql.ppl; +package org.opensearch.sql.security; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_ACCOUNT; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK; @@ -14,15 +14,30 @@ import static org.opensearch.sql.util.MatcherUtils.verifyDataRows; import java.io.IOException; +import lombok.SneakyThrows; import org.json.JSONObject; -import org.junit.Rule; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import org.junit.rules.ExpectedException; import org.opensearch.client.ResponseException; +import org.opensearch.sql.ppl.PPLIntegTestCase; +/** Cross Cluster Search tests to be executed with security plugin. */ public class CrossClusterSearchIT extends PPLIntegTestCase { - @Rule public ExpectedException exceptionRule = ExpectedException.none(); + static { + // find a remote cluster + String[] clusterNames = System.getProperty("cluster.names").split(","); + var remote = "remoteCluster"; + for (var cluster : clusterNames) { + if (cluster.startsWith("remote")) { + remote = cluster; + break; + } + } + REMOTE_CLUSTER = remote; + } + + public static final String REMOTE_CLUSTER; private static final String TEST_INDEX_BANK_REMOTE = REMOTE_CLUSTER + ":" + TEST_INDEX_BANK; private static final String TEST_INDEX_DOG_REMOTE = REMOTE_CLUSTER + ":" + TEST_INDEX_DOG; @@ -30,14 +45,25 @@ public class CrossClusterSearchIT extends PPLIntegTestCase { MATCH_ALL_REMOTE_CLUSTER + ":" + TEST_INDEX_DOG; private static final String TEST_INDEX_ACCOUNT_REMOTE = REMOTE_CLUSTER + ":" + TEST_INDEX_ACCOUNT; + private static boolean initialized = false; + + @SneakyThrows + @BeforeEach + public void initialize() { + if (!initialized) { + setUpIndices(); + initialized = true; + } + } + @Override - public void init() throws IOException { - configureMultiClusters(); + protected void init() throws Exception { + configureMultiClusters(REMOTE_CLUSTER); loadIndex(Index.BANK); loadIndex(Index.BANK, remoteClient()); loadIndex(Index.DOG); loadIndex(Index.DOG, remoteClient()); - loadIndex(Index.ACCOUNT, remoteClient()); + loadIndex(Index.ACCOUNT); } @Test @@ -55,11 +81,13 @@ public void testMatchAllCrossClusterSearchAllFields() throws IOException { @Test public void testCrossClusterSearchWithoutLocalFieldMappingShouldFail() throws IOException { - exceptionRule.expect(ResponseException.class); - exceptionRule.expectMessage("400 Bad Request"); - exceptionRule.expectMessage("IndexNotFoundException"); - - executeQuery(String.format("search source=%s", TEST_INDEX_ACCOUNT_REMOTE)); + var exception = + assertThrows( + ResponseException.class, + () -> executeQuery(String.format("search source=%s", TEST_INDEX_ACCOUNT_REMOTE))); + assertTrue( + exception.getMessage().contains("IndexNotFoundException") + && exception.getMessage().contains("400 Bad Request")); } @Test From 445c9e646a9a2805deed64c3fc984c97a5abde43 Mon Sep 17 00:00:00 2001 From: Mitchell Gale Date: Mon, 21 Aug 2023 13:20:14 -0700 Subject: [PATCH 36/42] [Spotless] Applying Google Code Format for legacy directory (pt 3/4) #21 (#1991) * Spotless apply for legacy pt 3 Signed-off-by: Mitchell Gale * spotless apply Signed-off-by: Mitchell Gale --------- Signed-off-by: Mitchell Gale Signed-off-by: Mitchell Gale --- .../antlr/OpenSearchLegacySqlAnalyzer.java | 138 +- .../types/base/OpenSearchDataType.java | 178 +- .../semantic/types/base/OpenSearchIndex.java | 92 +- .../function/OpenSearchScalarFunction.java | 139 +- .../types/function/ScalarFunction.java | 200 +-- .../antlr/semantic/types/special/Product.java | 85 +- .../visitor/OpenSearchMappingLoader.java | 332 ++-- .../sql/legacy/antlr/visitor/Reducible.java | 19 +- .../sql/legacy/cursor/NullCursor.java | 29 +- .../opensearch/sql/legacy/domain/Order.java | 89 +- .../opensearch/sql/legacy/domain/Paramer.java | 266 +-- .../opensearch/sql/legacy/domain/Query.java | 60 +- .../sql/legacy/domain/QueryActionRequest.java | 11 +- .../sql/legacy/domain/QueryStatement.java | 8 +- .../sql/legacy/domain/ScriptMethodField.java | 30 +- .../sql/legacy/domain/SearchResult.java | 215 ++- .../opensearch/sql/legacy/domain/Select.java | 314 ++-- .../sql/legacy/domain/bucketpath/Path.java | 84 +- .../sql/legacy/esdomain/OpenSearchClient.java | 85 +- .../executor/QueryActionElasticExecutor.java | 139 +- .../sql/legacy/executor/RestExecutor.java | 13 +- .../adapter/QueryPlanQueryAction.java | 39 +- .../adapter/QueryPlanRequestBuilder.java | 63 +- .../format/OpenSearchErrorMessage.java | 80 +- .../format/PrettyFormatRestExecutor.java | 145 +- .../sql/legacy/executor/format/Protocol.java | 395 ++--- .../sql/legacy/executor/format/ResultSet.java | 84 +- .../sql/legacy/executor/format/Schema.java | 248 +-- .../executor/format/SelectResultSet.java | 1436 ++++++++--------- .../join/NestedLoopsElasticExecutor.java | 596 +++---- .../join/QueryPlanElasticExecutor.java | 48 +- .../executor/join/SearchHitsResult.java | 47 +- .../core/operator/ScalarOperation.java | 49 +- .../core/operator/ScalarOperator.java | 29 +- .../sql/legacy/metrics/NumericMetric.java | 48 +- .../sql/legacy/metrics/RollingCounter.java | 141 +- .../sql/legacy/parser/NestedType.java | 180 +-- .../sql/legacy/parser/ScriptFilter.java | 151 +- .../sql/legacy/parser/SelectParser.java | 8 +- .../sql/legacy/plugin/RestSQLQueryAction.java | 44 +- .../sql/legacy/plugin/RestSqlAction.java | 475 +++--- .../sql/legacy/plugin/RestSqlStatsAction.java | 120 +- .../sql/legacy/plugin/SearchDao.java | 64 +- .../legacy/query/OpenSearchActionFactory.java | 326 ++-- .../sql/legacy/query/QueryAction.java | 388 ++--- .../NestedLoopsElasticRequestBuilder.java | 154 +- .../join/OpenSearchHashJoinQueryAction.java | 218 ++- .../query/join/OpenSearchJoinQueryAction.java | 199 ++- .../OpenSearchJoinQueryActionFactory.java | 55 +- .../OpenSearchNestedLoopsQueryAction.java | 72 +- .../sql/legacy/query/maker/QueryMaker.java | 126 +- .../OpenSearchMultiQueryActionFactory.java | 23 +- .../sql/legacy/query/planner/core/Plan.java | 24 +- .../legacy/query/planner/core/PlanNode.java | 71 +- .../query/planner/core/QueryParams.java | 91 +- .../query/planner/core/QueryPlanner.java | 147 +- .../query/planner/logical/node/Project.java | 197 ++- .../logical/rule/ProjectionPushDown.java | 93 +- .../logical/rule/SelectionPushDown.java | 42 +- .../planner/physical/PhysicalOperator.java | 61 +- .../query/planner/physical/PhysicalPlan.java | 117 +- .../legacy/query/planner/physical/Row.java | 172 +- .../node/project/PhysicalProject.java | 57 +- .../physical/node/scroll/PhysicalScroll.java | 80 +- .../planner/physical/node/scroll/Scroll.java | 301 ++-- .../SearchAggregationResponseHelper.java | 129 +- .../physical/node/scroll/SearchHitRow.java | 274 ++-- .../planner/physical/node/sort/QuickSort.java | 118 +- .../planner/resource/ResourceManager.java | 78 +- .../request/PreparedStatementRequest.java | 304 ++-- .../sql/legacy/rewriter/RewriteRule.java | 31 +- .../legacy/rewriter/RewriteRuleExecutor.java | 75 +- .../nestedfield/NestedFieldProjection.java | 242 +-- .../nestedfield/NestedFieldRewriter.java | 96 +- .../legacy/rewriter/nestedfield/Scope.java | 112 +- .../legacy/rewriter/nestedfield/Select.java | 55 +- .../rewriter/ordinal/OrdinalRewriterRule.java | 219 +-- .../rewriter/subquery/NestedQueryContext.java | 83 +- .../rewriter/subquery/RewriterContext.java | 103 +- .../rewriter/subquery/rewriter/Rewriter.java | 23 +- .../subquery/rewriter/RewriterFactory.java | 41 +- .../opensearch/sql/legacy/spatial/Point.java | 29 +- .../legacy/spatial/PolygonFilterParams.java | 19 +- .../spatial/RangeDistanceFilterParams.java | 27 +- .../sql/legacy/utils/QueryDataAnonymizer.java | 55 +- .../antlr/semantic/types/ProductTypeTest.java | 88 +- .../legacy/executor/format/ResultSetTest.java | 45 +- .../RestSQLQueryActionCursorFallbackTest.java | 58 +- .../legacy/plugin/RestSQLQueryActionTest.java | 139 +- .../node/scroll/SearchHitRowTest.java | 4 +- .../unittest/NestedFieldProjectionTest.java | 635 ++++---- .../unittest/NestedFieldRewriterTest.java | 1231 +++++++------- .../legacy/unittest/OpenSearchClientTest.java | 62 +- .../PreparedStatementRequestTest.java | 125 +- .../legacy/unittest/QueryFunctionsTest.java | 443 ++--- .../expression/core/RefExpressionTest.java | 69 +- .../unittest/metrics/NumericMetricTest.java | 28 +- .../unittest/metrics/RollingCounterTest.java | 84 +- .../planner/OpenSearchActionFactoryTest.java | 82 +- .../planner/QueryPlannerBatchTest.java | 355 ++-- .../planner/QueryPlannerConfigTest.java | 528 +++--- .../planner/QueryPlannerExecuteTest.java | 1176 +++++--------- .../planner/QueryPlannerExplainTest.java | 64 +- .../planner/QueryPlannerMonitorTest.java | 189 +-- .../unittest/planner/QueryPlannerTest.java | 383 +++-- .../SearchAggregationResponseHelperTest.java | 600 +++---- .../rewriter/RewriteRuleExecutorTest.java | 41 +- .../ordinal/OrdinalRewriterRuleTest.java | 247 ++- .../subquery/NestedQueryContextTest.java | 79 +- .../unittest/utils/PrettyFormatterTest.java | 80 +- .../unittest/utils/QueryContextTest.java | 82 +- .../utils/QueryDataAnonymizerTest.java | 137 +- ...enSearchAggregationResponseParserTest.java | 5 +- 113 files changed, 9183 insertions(+), 10019 deletions(-) diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/OpenSearchLegacySqlAnalyzer.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/OpenSearchLegacySqlAnalyzer.java index b44e2bbb41..bb063f4df4 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/OpenSearchLegacySqlAnalyzer.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/OpenSearchLegacySqlAnalyzer.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr; import java.util.Optional; @@ -25,84 +24,77 @@ import org.opensearch.sql.legacy.antlr.visitor.EarlyExitAnalysisException; import org.opensearch.sql.legacy.esdomain.LocalClusterState; -/** - * Entry point for ANTLR generated parser to perform strict syntax and semantic analysis. - */ +/** Entry point for ANTLR generated parser to perform strict syntax and semantic analysis. */ public class OpenSearchLegacySqlAnalyzer { - private static final Logger LOG = LogManager.getLogger(); - - /** Original sql query */ - private final SqlAnalysisConfig config; - - public OpenSearchLegacySqlAnalyzer(SqlAnalysisConfig config) { - this.config = config; - } - - public Optional analyze(String sql, LocalClusterState clusterState) { - // Perform analysis for SELECT only for now because of extra code changes required for SHOW/DESCRIBE. - if (!isSelectStatement(sql) || !config.isAnalyzerEnabled()) { - return Optional.empty(); - } + private static final Logger LOG = LogManager.getLogger(); - try { - return Optional.of(analyzeSemantic( - analyzeSyntax(sql), - clusterState - )); - } catch (EarlyExitAnalysisException e) { - // Expected if configured so log on debug level to avoid always logging stack trace - LOG.debug("Analysis exits early and will skip remaining process", e); - return Optional.empty(); - } - } + /** Original sql query */ + private final SqlAnalysisConfig config; - /** - * Build lexer and parser to perform syntax analysis only. - * Runtime exception with clear message is thrown for any verification error. - * - * @return parse tree - */ - public ParseTree analyzeSyntax(String sql) { - OpenSearchLegacySqlParser parser = createParser(createLexer(sql)); - parser.addErrorListener(new SyntaxAnalysisErrorListener()); - return parser.root(); - } + public OpenSearchLegacySqlAnalyzer(SqlAnalysisConfig config) { + this.config = config; + } - /** - * Perform semantic analysis based on syntax analysis output - parse tree. - * - * @param tree parse tree - * @param clusterState cluster state required for index mapping query - */ - public Type analyzeSemantic(ParseTree tree, LocalClusterState clusterState) { - return tree.accept(new AntlrSqlParseTreeVisitor<>(createAnalyzer(clusterState))); + public Optional analyze(String sql, LocalClusterState clusterState) { + // Perform analysis for SELECT only for now because of extra code changes required for + // SHOW/DESCRIBE. + if (!isSelectStatement(sql) || !config.isAnalyzerEnabled()) { + return Optional.empty(); } - /** Factory method for semantic analyzer to help assemble all required components together */ - private SemanticAnalyzer createAnalyzer(LocalClusterState clusterState) { - SemanticContext context = new SemanticContext(); - OpenSearchMappingLoader - mappingLoader = new OpenSearchMappingLoader(context, clusterState, config.getAnalysisThreshold()); - TypeChecker typeChecker = new TypeChecker(context, config.isFieldSuggestionEnabled()); - return new SemanticAnalyzer(mappingLoader, typeChecker); + try { + return Optional.of(analyzeSemantic(analyzeSyntax(sql), clusterState)); + } catch (EarlyExitAnalysisException e) { + // Expected if configured so log on debug level to avoid always logging stack trace + LOG.debug("Analysis exits early and will skip remaining process", e); + return Optional.empty(); } - - private OpenSearchLegacySqlParser createParser(Lexer lexer) { - return new OpenSearchLegacySqlParser( - new CommonTokenStream(lexer)); - } - - private OpenSearchLegacySqlLexer createLexer(String sql) { - return new OpenSearchLegacySqlLexer( - new CaseInsensitiveCharStream(sql)); - } - - private boolean isSelectStatement(String sql) { - sql = sql.replaceAll("\\R", " ").trim(); - int endOfFirstWord = sql.indexOf(' '); - String firstWord = sql.substring(0, endOfFirstWord > 0 ? endOfFirstWord : sql.length()); - return "SELECT".equalsIgnoreCase(firstWord); - } - + } + + /** + * Build lexer and parser to perform syntax analysis only. Runtime exception with clear message is + * thrown for any verification error. + * + * @return parse tree + */ + public ParseTree analyzeSyntax(String sql) { + OpenSearchLegacySqlParser parser = createParser(createLexer(sql)); + parser.addErrorListener(new SyntaxAnalysisErrorListener()); + return parser.root(); + } + + /** + * Perform semantic analysis based on syntax analysis output - parse tree. + * + * @param tree parse tree + * @param clusterState cluster state required for index mapping query + */ + public Type analyzeSemantic(ParseTree tree, LocalClusterState clusterState) { + return tree.accept(new AntlrSqlParseTreeVisitor<>(createAnalyzer(clusterState))); + } + + /** Factory method for semantic analyzer to help assemble all required components together */ + private SemanticAnalyzer createAnalyzer(LocalClusterState clusterState) { + SemanticContext context = new SemanticContext(); + OpenSearchMappingLoader mappingLoader = + new OpenSearchMappingLoader(context, clusterState, config.getAnalysisThreshold()); + TypeChecker typeChecker = new TypeChecker(context, config.isFieldSuggestionEnabled()); + return new SemanticAnalyzer(mappingLoader, typeChecker); + } + + private OpenSearchLegacySqlParser createParser(Lexer lexer) { + return new OpenSearchLegacySqlParser(new CommonTokenStream(lexer)); + } + + private OpenSearchLegacySqlLexer createLexer(String sql) { + return new OpenSearchLegacySqlLexer(new CaseInsensitiveCharStream(sql)); + } + + private boolean isSelectStatement(String sql) { + sql = sql.replaceAll("\\R", " ").trim(); + int endOfFirstWord = sql.indexOf(' '); + String firstWord = sql.substring(0, endOfFirstWord > 0 ? endOfFirstWord : sql.length()); + return "SELECT".equalsIgnoreCase(firstWord); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/base/OpenSearchDataType.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/base/OpenSearchDataType.java index eab40c2dc7..00ef4afdf1 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/base/OpenSearchDataType.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/base/OpenSearchDataType.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.types.base; import static org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchIndex.IndexType.NESTED_FIELD; @@ -13,105 +12,102 @@ import java.util.Map; import org.opensearch.sql.legacy.antlr.semantic.types.Type; -/** - * Base type hierarchy based on OpenSearch data type - */ +/** Base type hierarchy based on OpenSearch data type */ public enum OpenSearchDataType implements BaseType { - - TYPE_ERROR, - UNKNOWN, - - SHORT, LONG, - INTEGER(SHORT, LONG), - FLOAT(INTEGER), - DOUBLE(FLOAT), - NUMBER(DOUBLE), - - KEYWORD, - TEXT(KEYWORD), - STRING(TEXT), - - DATE_NANOS, - DATE(DATE_NANOS, STRING), - - BOOLEAN, - - OBJECT, NESTED, - COMPLEX(OBJECT, NESTED), - - GEO_POINT, - - OPENSEARCH_TYPE( - NUMBER, - //STRING, move to under DATE because DATE is compatible - DATE, - BOOLEAN, - COMPLEX, - GEO_POINT - ); - - - /** - * Java Enum's valueOf() may thrown "enum constant not found" exception. - * And Java doesn't provide a contains method. - * So this static map is necessary for check and efficiency. - */ - private static final Map ALL_BASE_TYPES; - static { - ImmutableMap.Builder builder = new ImmutableMap.Builder<>(); - for (OpenSearchDataType type : OpenSearchDataType.values()) { - builder.put(type.name(), type); - } - ALL_BASE_TYPES = builder.build(); + TYPE_ERROR, + UNKNOWN, + + SHORT, + LONG, + INTEGER(SHORT, LONG), + FLOAT(INTEGER), + DOUBLE(FLOAT), + NUMBER(DOUBLE), + + KEYWORD, + TEXT(KEYWORD), + STRING(TEXT), + + DATE_NANOS, + DATE(DATE_NANOS, STRING), + + BOOLEAN, + + OBJECT, + NESTED, + COMPLEX(OBJECT, NESTED), + + GEO_POINT, + + OPENSEARCH_TYPE( + NUMBER, + // STRING, move to under DATE because DATE is compatible + DATE, + BOOLEAN, + COMPLEX, + GEO_POINT); + + /** + * Java Enum's valueOf() may thrown "enum constant not found" exception. And Java doesn't provide + * a contains method. So this static map is necessary for check and efficiency. + */ + private static final Map ALL_BASE_TYPES; + + static { + ImmutableMap.Builder builder = new ImmutableMap.Builder<>(); + for (OpenSearchDataType type : OpenSearchDataType.values()) { + builder.put(type.name(), type); } + ALL_BASE_TYPES = builder.build(); + } - public static OpenSearchDataType typeOf(String str) { - return ALL_BASE_TYPES.getOrDefault(toUpper(str), UNKNOWN); - } + public static OpenSearchDataType typeOf(String str) { + return ALL_BASE_TYPES.getOrDefault(toUpper(str), UNKNOWN); + } - /** Parent of current base type */ - private OpenSearchDataType parent; + /** Parent of current base type */ + private OpenSearchDataType parent; - OpenSearchDataType(OpenSearchDataType... compatibleTypes) { - for (OpenSearchDataType subType : compatibleTypes) { - subType.parent = this; - } + OpenSearchDataType(OpenSearchDataType... compatibleTypes) { + for (OpenSearchDataType subType : compatibleTypes) { + subType.parent = this; } - - @Override - public String getName() { - return name(); + } + + @Override + public String getName() { + return name(); + } + + /** + * For base type, compatibility means this (current type) is ancestor of other in the base type + * hierarchy. + */ + @Override + public boolean isCompatible(Type other) { + // Skip compatibility check if type is unknown + if (this == UNKNOWN || other == UNKNOWN) { + return true; } - /** - * For base type, compatibility means this (current type) is ancestor of other - * in the base type hierarchy. - */ - @Override - public boolean isCompatible(Type other) { - // Skip compatibility check if type is unknown - if (this == UNKNOWN || other == UNKNOWN) { - return true; - } - - if (!(other instanceof OpenSearchDataType)) { - // Nested data type is compatible with nested index type for type expression use - if (other instanceof OpenSearchIndex && ((OpenSearchIndex) other).type() == NESTED_FIELD) { - return isCompatible(NESTED); - } - return false; - } - - // One way compatibility: parent base type is compatible with children - OpenSearchDataType cur = (OpenSearchDataType) other; - while (cur != null && cur != this) { - cur = cur.parent; - } - return cur != null; + if (!(other instanceof OpenSearchDataType)) { + // Nested data type is compatible with nested index type for type expression use + if (other instanceof OpenSearchIndex && ((OpenSearchIndex) other).type() == NESTED_FIELD) { + return isCompatible(NESTED); + } + return false; } - @Override - public String toString() { - return "OpenSearch Data Type [" + getName() + "]"; + // One way compatibility: parent base type is compatible with children + OpenSearchDataType cur = (OpenSearchDataType) other; + while (cur != null && cur != this) { + cur = cur.parent; } + return cur != null; + } + + @Override + public String toString() { + return "OpenSearch Data Type [" + getName() + "]"; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/base/OpenSearchIndex.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/base/OpenSearchIndex.java index b3d971100b..2c790f15aa 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/base/OpenSearchIndex.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/base/OpenSearchIndex.java @@ -3,68 +3,66 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.types.base; import java.util.Objects; import org.opensearch.sql.legacy.antlr.semantic.types.Type; -/** - * Index type is not Enum because essentially each index is a brand new type. - */ +/** Index type is not Enum because essentially each index is a brand new type. */ public class OpenSearchIndex implements BaseType { - public enum IndexType { - INDEX, NESTED_FIELD, INDEX_PATTERN - } + public enum IndexType { + INDEX, + NESTED_FIELD, + INDEX_PATTERN + } - private final String indexName; - private final IndexType indexType; + private final String indexName; + private final IndexType indexType; - public OpenSearchIndex(String indexName, IndexType indexType) { - this.indexName = indexName; - this.indexType = indexType; - } + public OpenSearchIndex(String indexName, IndexType indexType) { + this.indexName = indexName; + this.indexType = indexType; + } - public IndexType type() { - return indexType; - } + public IndexType type() { + return indexType; + } - @Override - public String getName() { - return indexName; - } + @Override + public String getName() { + return indexName; + } - @Override - public boolean isCompatible(Type other) { - return equals(other); - } + @Override + public boolean isCompatible(Type other) { + return equals(other); + } - @Override - public String usage() { - return indexType.name(); - } + @Override + public String usage() { + return indexType.name(); + } - @Override - public String toString() { - return indexType + " [" + indexName + "]"; - } + @Override + public String toString() { + return indexType + " [" + indexName + "]"; + } - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - OpenSearchIndex index = (OpenSearchIndex) o; - return Objects.equals(indexName, index.indexName) - && indexType == index.indexType; + @Override + public boolean equals(Object o) { + if (this == o) { + return true; } - - @Override - public int hashCode() { - return Objects.hash(indexName, indexType); + if (o == null || getClass() != o.getClass()) { + return false; } + OpenSearchIndex index = (OpenSearchIndex) o; + return Objects.equals(indexName, index.indexName) && indexType == index.indexType; + } + + @Override + public int hashCode() { + return Objects.hash(indexName, indexType); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/function/OpenSearchScalarFunction.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/function/OpenSearchScalarFunction.java index 93e1950d50..435a5ca968 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/function/OpenSearchScalarFunction.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/function/OpenSearchScalarFunction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.types.function; import static org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchDataType.BOOLEAN; @@ -16,87 +15,73 @@ import org.opensearch.sql.legacy.antlr.semantic.types.Type; import org.opensearch.sql.legacy.antlr.semantic.types.TypeExpression; -/** - * OpenSearch special scalar functions - */ +/** OpenSearch special scalar functions */ public enum OpenSearchScalarFunction implements TypeExpression { + DATE_HISTOGRAM(), // this is aggregate function + DAY_OF_MONTH(func(DATE).to(INTEGER)), + DAY_OF_YEAR(func(DATE).to(INTEGER)), + DAY_OF_WEEK(func(DATE).to(INTEGER)), + EXCLUDE(), // can only be used in SELECT? + EXTENDED_STATS(), // need confirm + FIELD(), // couldn't find test cases related + FILTER(), + GEO_BOUNDING_BOX(func(GEO_POINT, NUMBER, NUMBER, NUMBER, NUMBER).to(BOOLEAN)), + GEO_CELL(), // optional arg or overloaded spec is required. + GEO_DISTANCE(func(GEO_POINT, STRING, NUMBER, NUMBER).to(BOOLEAN)), + GEO_DISTANCE_RANGE(func(GEO_POINT, STRING, NUMBER, NUMBER).to(BOOLEAN)), + GEO_INTERSECTS(), // ? + GEO_POLYGON(), // varargs is required for 2nd arg + HISTOGRAM(), // same as date_histogram + HOUR_OF_DAY(func(DATE).to(INTEGER)), + INCLUDE(), // same as exclude + IN_TERMS(), // varargs + MATCHPHRASE(func(STRING, STRING).to(BOOLEAN), func(STRING).to(STRING)), // slop arg is optional + MATCH_PHRASE(MATCHPHRASE.specifications()), + MATCHQUERY(func(STRING, STRING).to(BOOLEAN), func(STRING).to(STRING)), + MATCH_QUERY(MATCHQUERY.specifications()), + MINUTE_OF_DAY(func(DATE).to(INTEGER)), // or long? + MINUTE_OF_HOUR(func(DATE).to(INTEGER)), + MONTH_OF_YEAR(func(DATE).to(INTEGER)), + MULTIMATCH(), // kw arguments + MULTI_MATCH(MULTIMATCH.specifications()), + NESTED(), // overloaded + PERCENTILES(), // ? + REGEXP_QUERY(), // ? + REVERSE_NESTED(), // need overloaded + QUERY(func(STRING).to(BOOLEAN)), + RANGE(), // aggregate function + SCORE(), // semantic problem? + SECOND_OF_MINUTE(func(DATE).to(INTEGER)), + STATS(), + TERM(), // semantic problem + TERMS(), // semantic problem + TOPHITS(), // only available in SELECT + WEEK_OF_YEAR(func(DATE).to(INTEGER)), + WILDCARDQUERY(func(STRING, STRING).to(BOOLEAN), func(STRING).to(STRING)), + WILDCARD_QUERY(WILDCARDQUERY.specifications()); - DATE_HISTOGRAM(), // this is aggregate function - DAY_OF_MONTH(func(DATE).to(INTEGER)), - DAY_OF_YEAR(func(DATE).to(INTEGER)), - DAY_OF_WEEK(func(DATE).to(INTEGER)), - EXCLUDE(), // can only be used in SELECT? - EXTENDED_STATS(), // need confirm - FIELD(), // couldn't find test cases related - FILTER(), - GEO_BOUNDING_BOX(func(GEO_POINT, NUMBER, NUMBER, NUMBER, NUMBER).to(BOOLEAN)), - GEO_CELL(), // optional arg or overloaded spec is required. - GEO_DISTANCE(func(GEO_POINT, STRING, NUMBER, NUMBER).to(BOOLEAN)), - GEO_DISTANCE_RANGE(func(GEO_POINT, STRING, NUMBER, NUMBER).to(BOOLEAN)), - GEO_INTERSECTS(), //? - GEO_POLYGON(), // varargs is required for 2nd arg - HISTOGRAM(), // same as date_histogram - HOUR_OF_DAY(func(DATE).to(INTEGER)), - INCLUDE(), // same as exclude - IN_TERMS(), // varargs - MATCHPHRASE( - func(STRING, STRING).to(BOOLEAN), - func(STRING).to(STRING) - ), //slop arg is optional - MATCH_PHRASE(MATCHPHRASE.specifications()), - MATCHQUERY( - func(STRING, STRING).to(BOOLEAN), - func(STRING).to(STRING) - ), - MATCH_QUERY(MATCHQUERY.specifications()), - MINUTE_OF_DAY(func(DATE).to(INTEGER)), // or long? - MINUTE_OF_HOUR(func(DATE).to(INTEGER)), - MONTH_OF_YEAR(func(DATE).to(INTEGER)), - MULTIMATCH(), // kw arguments - MULTI_MATCH(MULTIMATCH.specifications()), - NESTED(), // overloaded - PERCENTILES(), //? - REGEXP_QUERY(), //? - REVERSE_NESTED(), // need overloaded - QUERY(func(STRING).to(BOOLEAN)), - RANGE(), // aggregate function - SCORE(), // semantic problem? - SECOND_OF_MINUTE(func(DATE).to(INTEGER)), - STATS(), - TERM(), // semantic problem - TERMS(), // semantic problem - TOPHITS(), // only available in SELECT - WEEK_OF_YEAR(func(DATE).to(INTEGER)), - WILDCARDQUERY( - func(STRING, STRING).to(BOOLEAN), - func(STRING).to(STRING) - ), - WILDCARD_QUERY(WILDCARDQUERY.specifications()); - - - private final TypeExpressionSpec[] specifications; - - OpenSearchScalarFunction(TypeExpressionSpec... specifications) { - this.specifications = specifications; - } + private final TypeExpressionSpec[] specifications; - @Override - public String getName() { - return name(); - } + OpenSearchScalarFunction(TypeExpressionSpec... specifications) { + this.specifications = specifications; + } - @Override - public TypeExpressionSpec[] specifications() { - return specifications; - } + @Override + public String getName() { + return name(); + } - private static TypeExpressionSpec func(Type... argTypes) { - return new TypeExpressionSpec().map(argTypes); - } + @Override + public TypeExpressionSpec[] specifications() { + return specifications; + } - @Override - public String toString() { - return "Function [" + name() + "]"; - } + private static TypeExpressionSpec func(Type... argTypes) { + return new TypeExpressionSpec().map(argTypes); + } + @Override + public String toString() { + return "Function [" + name() + "]"; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/function/ScalarFunction.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/function/ScalarFunction.java index e993562df8..5dfada7ca8 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/function/ScalarFunction.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/function/ScalarFunction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.types.function; import static org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchDataType.BOOLEAN; @@ -18,123 +17,98 @@ import org.opensearch.sql.legacy.antlr.semantic.types.TypeExpression; import org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchDataType; -/** - * Scalar SQL function - */ +/** Scalar SQL function */ public enum ScalarFunction implements TypeExpression { + ABS(func(T(NUMBER)).to(T)), // translate to Java: T ABS(T) + ACOS(func(T(NUMBER)).to(DOUBLE)), + ADD(func(T(NUMBER), NUMBER).to(T)), + ASCII(func(T(STRING)).to(INTEGER)), + ASIN(func(T(NUMBER)).to(DOUBLE)), + ATAN(func(T(NUMBER)).to(DOUBLE)), + ATAN2(func(T(NUMBER), NUMBER).to(DOUBLE)), + CAST(), + CBRT(func(T(NUMBER)).to(T)), + CEIL(func(T(NUMBER)).to(T)), + CONCAT(), // TODO: varargs support required + CONCAT_WS(), + COS(func(T(NUMBER)).to(DOUBLE)), + COSH(func(T(NUMBER)).to(DOUBLE)), + COT(func(T(NUMBER)).to(DOUBLE)), + CURDATE(func().to(OpenSearchDataType.DATE)), + DATE(func(OpenSearchDataType.DATE).to(OpenSearchDataType.DATE)), + DATE_FORMAT( + func(OpenSearchDataType.DATE, STRING).to(STRING), + func(OpenSearchDataType.DATE, STRING, STRING).to(STRING)), + DAYOFMONTH(func(OpenSearchDataType.DATE).to(INTEGER)), + DEGREES(func(T(NUMBER)).to(DOUBLE)), + DIVIDE(func(T(NUMBER), NUMBER).to(T)), + E(func().to(DOUBLE)), + EXP(func(T(NUMBER)).to(T)), + EXPM1(func(T(NUMBER)).to(T)), + FLOOR(func(T(NUMBER)).to(T)), + IF(func(BOOLEAN, OPENSEARCH_TYPE, OPENSEARCH_TYPE).to(OPENSEARCH_TYPE)), + IFNULL(func(OPENSEARCH_TYPE, OPENSEARCH_TYPE).to(OPENSEARCH_TYPE)), + ISNULL(func(OPENSEARCH_TYPE).to(INTEGER)), + LEFT(func(T(STRING), INTEGER).to(T)), + LENGTH(func(STRING).to(INTEGER)), + LN(func(T(NUMBER)).to(DOUBLE)), + LOCATE(func(STRING, STRING, INTEGER).to(INTEGER), func(STRING, STRING).to(INTEGER)), + LOG(func(T(NUMBER)).to(DOUBLE), func(T(NUMBER), NUMBER).to(DOUBLE)), + LOG2(func(T(NUMBER)).to(DOUBLE)), + LOG10(func(T(NUMBER)).to(DOUBLE)), + LOWER(func(T(STRING)).to(T), func(T(STRING), STRING).to(T)), + LTRIM(func(T(STRING)).to(T)), + MAKETIME(func(INTEGER, INTEGER, INTEGER).to(OpenSearchDataType.DATE)), + MODULUS(func(T(NUMBER), NUMBER).to(T)), + MONTH(func(OpenSearchDataType.DATE).to(INTEGER)), + MONTHNAME(func(OpenSearchDataType.DATE).to(STRING)), + MULTIPLY(func(T(NUMBER), NUMBER).to(NUMBER)), + NOW(func().to(OpenSearchDataType.DATE)), + PI(func().to(DOUBLE)), + POW(func(T(NUMBER)).to(T), func(T(NUMBER), NUMBER).to(T)), + POWER(func(T(NUMBER)).to(T), func(T(NUMBER), NUMBER).to(T)), + RADIANS(func(T(NUMBER)).to(DOUBLE)), + RAND(func().to(NUMBER), func(T(NUMBER)).to(T)), + REPLACE(func(T(STRING), STRING, STRING).to(T)), + RIGHT(func(T(STRING), INTEGER).to(T)), + RINT(func(T(NUMBER)).to(T)), + ROUND(func(T(NUMBER)).to(T)), + RTRIM(func(T(STRING)).to(T)), + SIGN(func(T(NUMBER)).to(T)), + SIGNUM(func(T(NUMBER)).to(T)), + SIN(func(T(NUMBER)).to(DOUBLE)), + SINH(func(T(NUMBER)).to(DOUBLE)), + SQRT(func(T(NUMBER)).to(T)), + SUBSTRING(func(T(STRING), INTEGER, INTEGER).to(T)), + SUBTRACT(func(T(NUMBER), NUMBER).to(T)), + TAN(func(T(NUMBER)).to(DOUBLE)), + TIMESTAMP(func(OpenSearchDataType.DATE).to(OpenSearchDataType.DATE)), + TRIM(func(T(STRING)).to(T)), + UPPER(func(T(STRING)).to(T), func(T(STRING), STRING).to(T)), + YEAR(func(OpenSearchDataType.DATE).to(INTEGER)); - ABS(func(T(NUMBER)).to(T)), // translate to Java: T ABS(T) - ACOS(func(T(NUMBER)).to(DOUBLE)), - ADD(func(T(NUMBER), NUMBER).to(T)), - ASCII(func(T(STRING)).to(INTEGER)), - ASIN(func(T(NUMBER)).to(DOUBLE)), - ATAN(func(T(NUMBER)).to(DOUBLE)), - ATAN2(func(T(NUMBER), NUMBER).to(DOUBLE)), - CAST(), - CBRT(func(T(NUMBER)).to(T)), - CEIL(func(T(NUMBER)).to(T)), - CONCAT(), // TODO: varargs support required - CONCAT_WS(), - COS(func(T(NUMBER)).to(DOUBLE)), - COSH(func(T(NUMBER)).to(DOUBLE)), - COT(func(T(NUMBER)).to(DOUBLE)), - CURDATE(func().to(OpenSearchDataType.DATE)), - DATE(func(OpenSearchDataType.DATE).to(OpenSearchDataType.DATE)), - DATE_FORMAT( - func(OpenSearchDataType.DATE, STRING).to(STRING), - func(OpenSearchDataType.DATE, STRING, STRING).to(STRING) - ), - DAYOFMONTH(func(OpenSearchDataType.DATE).to(INTEGER)), - DEGREES(func(T(NUMBER)).to(DOUBLE)), - DIVIDE(func(T(NUMBER), NUMBER).to(T)), - E(func().to(DOUBLE)), - EXP(func(T(NUMBER)).to(T)), - EXPM1(func(T(NUMBER)).to(T)), - FLOOR(func(T(NUMBER)).to(T)), - IF(func(BOOLEAN, OPENSEARCH_TYPE, OPENSEARCH_TYPE).to(OPENSEARCH_TYPE)), - IFNULL(func(OPENSEARCH_TYPE, OPENSEARCH_TYPE).to(OPENSEARCH_TYPE)), - ISNULL(func(OPENSEARCH_TYPE).to(INTEGER)), - LEFT(func(T(STRING), INTEGER).to(T)), - LENGTH(func(STRING).to(INTEGER)), - LN(func(T(NUMBER)).to(DOUBLE)), - LOCATE( - func(STRING, STRING, INTEGER).to(INTEGER), - func(STRING, STRING).to(INTEGER) - ), - LOG( - func(T(NUMBER)).to(DOUBLE), - func(T(NUMBER), NUMBER).to(DOUBLE) - ), - LOG2(func(T(NUMBER)).to(DOUBLE)), - LOG10(func(T(NUMBER)).to(DOUBLE)), - LOWER( - func(T(STRING)).to(T), - func(T(STRING), STRING).to(T) - ), - LTRIM(func(T(STRING)).to(T)), - MAKETIME(func(INTEGER, INTEGER, INTEGER).to(OpenSearchDataType.DATE)), - MODULUS(func(T(NUMBER), NUMBER).to(T)), - MONTH(func(OpenSearchDataType.DATE).to(INTEGER)), - MONTHNAME(func(OpenSearchDataType.DATE).to(STRING)), - MULTIPLY(func(T(NUMBER), NUMBER).to(NUMBER)), - NOW(func().to(OpenSearchDataType.DATE)), - PI(func().to(DOUBLE)), - POW( - func(T(NUMBER)).to(T), - func(T(NUMBER), NUMBER).to(T) - ), - POWER( - func(T(NUMBER)).to(T), - func(T(NUMBER), NUMBER).to(T) - ), - RADIANS(func(T(NUMBER)).to(DOUBLE)), - RAND( - func().to(NUMBER), - func(T(NUMBER)).to(T) - ), - REPLACE(func(T(STRING), STRING, STRING).to(T)), - RIGHT(func(T(STRING), INTEGER).to(T)), - RINT(func(T(NUMBER)).to(T)), - ROUND(func(T(NUMBER)).to(T)), - RTRIM(func(T(STRING)).to(T)), - SIGN(func(T(NUMBER)).to(T)), - SIGNUM(func(T(NUMBER)).to(T)), - SIN(func(T(NUMBER)).to(DOUBLE)), - SINH(func(T(NUMBER)).to(DOUBLE)), - SQRT(func(T(NUMBER)).to(T)), - SUBSTRING(func(T(STRING), INTEGER, INTEGER).to(T)), - SUBTRACT(func(T(NUMBER), NUMBER).to(T)), - TAN(func(T(NUMBER)).to(DOUBLE)), - TIMESTAMP(func(OpenSearchDataType.DATE).to(OpenSearchDataType.DATE)), - TRIM(func(T(STRING)).to(T)), - UPPER( - func(T(STRING)).to(T), - func(T(STRING), STRING).to(T) - ), - YEAR(func(OpenSearchDataType.DATE).to(INTEGER)); - - private final TypeExpressionSpec[] specifications; + private final TypeExpressionSpec[] specifications; - ScalarFunction(TypeExpressionSpec... specifications) { - this.specifications = specifications; - } + ScalarFunction(TypeExpressionSpec... specifications) { + this.specifications = specifications; + } - @Override - public String getName() { - return name(); - } + @Override + public String getName() { + return name(); + } - @Override - public TypeExpressionSpec[] specifications() { - return specifications; - } + @Override + public TypeExpressionSpec[] specifications() { + return specifications; + } - private static TypeExpressionSpec func(Type... argTypes) { - return new TypeExpressionSpec().map(argTypes); - } + private static TypeExpressionSpec func(Type... argTypes) { + return new TypeExpressionSpec().map(argTypes); + } - @Override - public String toString() { - return "Function [" + name() + "]"; - } + @Override + public String toString() { + return "Function [" + name() + "]"; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/special/Product.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/special/Product.java index ad4d86895b..98f04dc629 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/special/Product.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/special/Product.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.types.special; import java.util.Collections; @@ -12,62 +11,56 @@ import lombok.Getter; import org.opensearch.sql.legacy.antlr.semantic.types.Type; -/** - * Combination of multiple types, ex. function arguments - */ +/** Combination of multiple types, ex. function arguments */ public class Product implements Type { - @Getter - private final List types; + @Getter private final List types; - public Product(List itemTypes) { - types = Collections.unmodifiableList(itemTypes); - } + public Product(List itemTypes) { + types = Collections.unmodifiableList(itemTypes); + } - @Override - public String getName() { - return "Product of types " + types; - } + @Override + public String getName() { + return "Product of types " + types; + } - @Override - public boolean isCompatible(Type other) { - if (!(other instanceof Product)) { - return false; - } - - Product otherProd = (Product) other; - if (types.size() != otherProd.types.size()) { - return false; - } - - for (int i = 0; i < types.size(); i++) { - Type type = types.get(i); - Type otherType = otherProd.types.get(i); - if (!isCompatibleEitherWay(type, otherType)) { - return false; - } - } - return true; + @Override + public boolean isCompatible(Type other) { + if (!(other instanceof Product)) { + return false; } - @Override - public Type construct(List others) { - return this; + Product otherProd = (Product) other; + if (types.size() != otherProd.types.size()) { + return false; } - @Override - public String usage() { - if (types.isEmpty()) { - return "(*)"; - } - return types.stream(). - map(Type::usage). - collect(Collectors.joining(", ", "(", ")")); + for (int i = 0; i < types.size(); i++) { + Type type = types.get(i); + Type otherType = otherProd.types.get(i); + if (!isCompatibleEitherWay(type, otherType)) { + return false; + } } + return true; + } + + @Override + public Type construct(List others) { + return this; + } - /** Perform two-way compatibility check here which is different from normal type expression */ - private boolean isCompatibleEitherWay(Type type1, Type type2) { - return type1.isCompatible(type2) || type2.isCompatible(type1); + @Override + public String usage() { + if (types.isEmpty()) { + return "(*)"; } + return types.stream().map(Type::usage).collect(Collectors.joining(", ", "(", ")")); + } + /** Perform two-way compatibility check here which is different from normal type expression */ + private boolean isCompatibleEitherWay(Type type1, Type type2) { + return type1.isCompatible(type2) || type2.isCompatible(type1); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/visitor/OpenSearchMappingLoader.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/visitor/OpenSearchMappingLoader.java index dca201f25b..4d009dc438 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/visitor/OpenSearchMappingLoader.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/visitor/OpenSearchMappingLoader.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.visitor; import static org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchIndex.IndexType.INDEX; @@ -26,178 +25,181 @@ import org.opensearch.sql.legacy.esdomain.mapping.IndexMappings; import org.opensearch.sql.legacy.utils.StringUtils; -/** - * Load index and nested field mapping into semantic context - */ +/** Load index and nested field mapping into semantic context */ public class OpenSearchMappingLoader implements GenericSqlParseTreeVisitor { - /** Semantic context shared in the semantic analysis process */ - private final SemanticContext context; - - /** Local cluster state for mapping query */ - private final LocalClusterState clusterState; - - /** Threshold to decide if continue the analysis */ - private final int threshold; - - public OpenSearchMappingLoader(SemanticContext context, LocalClusterState clusterState, int threshold) { - this.context = context; - this.clusterState = clusterState; - this.threshold = threshold; - } - - /* - * Suppose index 'accounts' includes 'name', 'age' and nested field 'projects' - * which includes 'name' and 'active'. - * - * 1. Define itself: - * ----- new definitions ----- - * accounts -> INDEX - * - * 2. Define without alias no matter if alias given: - * 'accounts' -> INDEX - * ----- new definitions ----- - * 'name' -> TEXT - * 'age' -> INTEGER - * 'projects' -> NESTED - * 'projects.name' -> KEYWORD - * 'projects.active' -> BOOLEAN - */ - @Override - public Type visitIndexName(String indexName) { - if (isNotNested(indexName)) { - defineIndexType(indexName); - loadAllFieldsWithType(indexName); - } - return defaultValue(); - } - - @Override - public void visitAs(String alias, Type type) { - if (!(type instanceof OpenSearchIndex)) { - return; - } - - OpenSearchIndex index = (OpenSearchIndex) type; - String indexName = type.getName(); - - if (index.type() == INDEX) { - String aliasName = alias.isEmpty() ? indexName : alias; - defineAllFieldNamesByAppendingAliasPrefix(indexName, aliasName); - } else if (index.type() == NESTED_FIELD) { - if (!alias.isEmpty()) { - defineNestedFieldNamesByReplacingWithAlias(indexName, alias); - } - } // else Do nothing for index pattern - } - - private void defineIndexType(String indexName) { - environment().define(new Symbol(Namespace.FIELD_NAME, indexName), new OpenSearchIndex(indexName, INDEX)); - } - - private void loadAllFieldsWithType(String indexName) { - Set mappings = getFieldMappings(indexName); - mappings.forEach(mapping -> mapping.flat(this::defineFieldName)); - } - - /* - * 3.1 Define with alias if given: ex."SELECT * FROM accounts a". - * 'accounts' -> INDEX - * 'name' -> TEXT - * 'age' -> INTEGER - * 'projects' -> NESTED - * 'projects.name' -> KEYWORD - * 'projects.active' -> BOOLEAN - * ----- new definitions ----- - * ['a' -> INDEX] -- this is done in semantic analyzer - * 'a.name' -> TEXT - * 'a.age' -> INTEGER - * 'a.projects' -> NESTED - * 'a.projects.name' -> KEYWORD - * 'a.projects.active' -> BOOLEAN - * - * 3.2 Otherwise define by index full name: ex."SELECT * FROM account" - * 'accounts' -> INDEX - * 'name' -> TEXT - * 'age' -> INTEGER - * 'projects' -> NESTED - * 'projects.name' -> KEYWORD - * 'projects.active' -> BOOLEAN - * ----- new definitions ----- - * 'accounts.name' -> TEXT - * 'accounts.age' -> INTEGER - * 'accounts.projects' -> NESTED - * 'accounts.projects.name' -> KEYWORD - * 'accounts.projects.active' -> BOOLEAN - */ - private void defineAllFieldNamesByAppendingAliasPrefix(String indexName, String alias) { - Set mappings = getFieldMappings(indexName); - mappings.stream().forEach(mapping -> mapping.flat((fieldName, type) -> - defineFieldName(alias + "." + fieldName, type))); + /** Semantic context shared in the semantic analysis process */ + private final SemanticContext context; + + /** Local cluster state for mapping query */ + private final LocalClusterState clusterState; + + /** Threshold to decide if continue the analysis */ + private final int threshold; + + public OpenSearchMappingLoader( + SemanticContext context, LocalClusterState clusterState, int threshold) { + this.context = context; + this.clusterState = clusterState; + this.threshold = threshold; + } + + /* + * Suppose index 'accounts' includes 'name', 'age' and nested field 'projects' + * which includes 'name' and 'active'. + * + * 1. Define itself: + * ----- new definitions ----- + * accounts -> INDEX + * + * 2. Define without alias no matter if alias given: + * 'accounts' -> INDEX + * ----- new definitions ----- + * 'name' -> TEXT + * 'age' -> INTEGER + * 'projects' -> NESTED + * 'projects.name' -> KEYWORD + * 'projects.active' -> BOOLEAN + */ + @Override + public Type visitIndexName(String indexName) { + if (isNotNested(indexName)) { + defineIndexType(indexName); + loadAllFieldsWithType(indexName); } + return defaultValue(); + } - /* - * 3.3 Define with alias if given: ex."SELECT * FROM accounts a, a.project p" - * 'accounts' -> INDEX - * 'name' -> TEXT - * 'age' -> INTEGER - * 'projects' -> NESTED - * 'projects.name' -> KEYWORD - * 'projects.active' -> BOOLEAN - * 'a.name' -> TEXT - * 'a.age' -> INTEGER - * 'a.projects' -> NESTED - * 'a.projects.name' -> KEYWORD - * 'a.projects.active' -> BOOLEAN - * ----- new definitions ----- - * ['p' -> NESTED] -- this is done in semantic analyzer - * 'p.name' -> KEYWORD - * 'p.active' -> BOOLEAN - */ - private void defineNestedFieldNamesByReplacingWithAlias(String nestedFieldName, String alias) { - Map typeByFullName = environment().resolveByPrefix( - new Symbol(Namespace.FIELD_NAME, nestedFieldName)); - typeByFullName.forEach( - (fieldName, fieldType) -> defineFieldName(fieldName.replace(nestedFieldName, alias), fieldType) - ); + @Override + public void visitAs(String alias, Type type) { + if (!(type instanceof OpenSearchIndex)) { + return; } - /** - * Check if index name is NOT nested, for example. return true for index 'accounts' or '.opensearch_dashboards' - * but return false for nested field name 'a.projects'. - */ - private boolean isNotNested(String indexName) { - return indexName.indexOf('.', 1) == -1; // taking care of .opensearch_dashboards + OpenSearchIndex index = (OpenSearchIndex) type; + String indexName = type.getName(); + + if (index.type() == INDEX) { + String aliasName = alias.isEmpty() ? indexName : alias; + defineAllFieldNamesByAppendingAliasPrefix(indexName, aliasName); + } else if (index.type() == NESTED_FIELD) { + if (!alias.isEmpty()) { + defineNestedFieldNamesByReplacingWithAlias(indexName, alias); + } + } // else Do nothing for index pattern + } + + private void defineIndexType(String indexName) { + environment() + .define(new Symbol(Namespace.FIELD_NAME, indexName), new OpenSearchIndex(indexName, INDEX)); + } + + private void loadAllFieldsWithType(String indexName) { + Set mappings = getFieldMappings(indexName); + mappings.forEach(mapping -> mapping.flat(this::defineFieldName)); + } + + /* + * 3.1 Define with alias if given: ex."SELECT * FROM accounts a". + * 'accounts' -> INDEX + * 'name' -> TEXT + * 'age' -> INTEGER + * 'projects' -> NESTED + * 'projects.name' -> KEYWORD + * 'projects.active' -> BOOLEAN + * ----- new definitions ----- + * ['a' -> INDEX] -- this is done in semantic analyzer + * 'a.name' -> TEXT + * 'a.age' -> INTEGER + * 'a.projects' -> NESTED + * 'a.projects.name' -> KEYWORD + * 'a.projects.active' -> BOOLEAN + * + * 3.2 Otherwise define by index full name: ex."SELECT * FROM account" + * 'accounts' -> INDEX + * 'name' -> TEXT + * 'age' -> INTEGER + * 'projects' -> NESTED + * 'projects.name' -> KEYWORD + * 'projects.active' -> BOOLEAN + * ----- new definitions ----- + * 'accounts.name' -> TEXT + * 'accounts.age' -> INTEGER + * 'accounts.projects' -> NESTED + * 'accounts.projects.name' -> KEYWORD + * 'accounts.projects.active' -> BOOLEAN + */ + private void defineAllFieldNamesByAppendingAliasPrefix(String indexName, String alias) { + Set mappings = getFieldMappings(indexName); + mappings.stream() + .forEach( + mapping -> + mapping.flat((fieldName, type) -> defineFieldName(alias + "." + fieldName, type))); + } + + /* + * 3.3 Define with alias if given: ex."SELECT * FROM accounts a, a.project p" + * 'accounts' -> INDEX + * 'name' -> TEXT + * 'age' -> INTEGER + * 'projects' -> NESTED + * 'projects.name' -> KEYWORD + * 'projects.active' -> BOOLEAN + * 'a.name' -> TEXT + * 'a.age' -> INTEGER + * 'a.projects' -> NESTED + * 'a.projects.name' -> KEYWORD + * 'a.projects.active' -> BOOLEAN + * ----- new definitions ----- + * ['p' -> NESTED] -- this is done in semantic analyzer + * 'p.name' -> KEYWORD + * 'p.active' -> BOOLEAN + */ + private void defineNestedFieldNamesByReplacingWithAlias(String nestedFieldName, String alias) { + Map typeByFullName = + environment().resolveByPrefix(new Symbol(Namespace.FIELD_NAME, nestedFieldName)); + typeByFullName.forEach( + (fieldName, fieldType) -> + defineFieldName(fieldName.replace(nestedFieldName, alias), fieldType)); + } + + /** + * Check if index name is NOT nested, for example. return true for index 'accounts' or + * '.opensearch_dashboards' but return false for nested field name 'a.projects'. + */ + private boolean isNotNested(String indexName) { + return indexName.indexOf('.', 1) == -1; // taking care of .opensearch_dashboards + } + + private Set getFieldMappings(String indexName) { + IndexMappings indexMappings = clusterState.getFieldMappings(new String[] {indexName}); + Set fieldMappingsSet = new HashSet<>(indexMappings.allMappings()); + + for (FieldMappings fieldMappings : fieldMappingsSet) { + int size = fieldMappings.data().size(); + if (size > threshold) { + throw new EarlyExitAnalysisException( + StringUtils.format( + "Index [%s] has [%d] fields more than threshold [%d]", indexName, size, threshold)); + } } - - private Set getFieldMappings(String indexName) { - IndexMappings indexMappings = clusterState.getFieldMappings(new String[]{indexName}); - Set fieldMappingsSet = new HashSet<>(indexMappings.allMappings()); - - for (FieldMappings fieldMappings : fieldMappingsSet) { - int size = fieldMappings.data().size(); - if (size > threshold) { - throw new EarlyExitAnalysisException(StringUtils.format( - "Index [%s] has [%d] fields more than threshold [%d]", indexName, size, threshold)); - } - } - return fieldMappingsSet; + return fieldMappingsSet; + } + + private void defineFieldName(String fieldName, String type) { + if ("NESTED".equalsIgnoreCase(type)) { + defineFieldName(fieldName, new OpenSearchIndex(fieldName, NESTED_FIELD)); + } else { + defineFieldName(fieldName, OpenSearchDataType.typeOf(type)); } + } - private void defineFieldName(String fieldName, String type) { - if ("NESTED".equalsIgnoreCase(type)) { - defineFieldName(fieldName, new OpenSearchIndex(fieldName, NESTED_FIELD)); - } else { - defineFieldName(fieldName, OpenSearchDataType.typeOf(type)); - } - } + private void defineFieldName(String fieldName, Type type) { + Symbol symbol = new Symbol(Namespace.FIELD_NAME, fieldName); + environment().define(symbol, type); + } - private void defineFieldName(String fieldName, Type type) { - Symbol symbol = new Symbol(Namespace.FIELD_NAME, fieldName); - environment().define(symbol, type); - } - - private Environment environment() { - return context.peek(); - } + private Environment environment() { + return context.peek(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/Reducible.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/Reducible.java index 510a76659e..edb4136d49 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/Reducible.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/Reducible.java @@ -3,21 +3,18 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.visitor; import java.util.List; -/** - * Abstraction for anything that can be reduced and used by {@link AntlrSqlParseTreeVisitor}. - */ +/** Abstraction for anything that can be reduced and used by {@link AntlrSqlParseTreeVisitor}. */ public interface Reducible { - /** - * Reduce current and others to generate a new one - * @param others others - * @return reduction - */ - T reduce(List others); - + /** + * Reduce current and others to generate a new one + * + * @param others others + * @return reduction + */ + T reduce(List others); } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/cursor/NullCursor.java b/legacy/src/main/java/org/opensearch/sql/legacy/cursor/NullCursor.java index fb6beca96d..5b99f49515 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/cursor/NullCursor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/cursor/NullCursor.java @@ -3,27 +3,24 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.cursor; -/** - * A placeholder Cursor implementation to work with non-paginated queries. - */ +/** A placeholder Cursor implementation to work with non-paginated queries. */ public class NullCursor implements Cursor { - private final CursorType type = CursorType.NULL; + private final CursorType type = CursorType.NULL; - @Override - public String generateCursorId() { - return null; - } + @Override + public String generateCursorId() { + return null; + } - @Override - public CursorType getType() { - return type; - } + @Override + public CursorType getType() { + return type; + } - public NullCursor from(String cursorId) { - return NULL_CURSOR; - } + public NullCursor from(String cursorId) { + return NULL_CURSOR; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/Order.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/Order.java index 2a9be3ce91..f593d6c428 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/Order.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/Order.java @@ -3,56 +3,53 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain; /** - * - * * @author ansj */ public class Order { - private String nestedPath; - private String name; - private String type; - private Field sortField; - - public boolean isScript() { - return sortField != null && sortField.isScriptField(); - } - - public Order(String nestedPath, String name, String type, Field sortField) { - this.nestedPath = nestedPath; - this.name = name; - this.type = type; - this.sortField = sortField; - } - - public String getNestedPath() { - return nestedPath; - } - - public void setNestedPath(String nestedPath) { - this.nestedPath = nestedPath; - } - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - public String getType() { - return type; - } - - public void setType(String type) { - this.type = type; - } - - public Field getSortField() { - return sortField; - } + private String nestedPath; + private String name; + private String type; + private Field sortField; + + public boolean isScript() { + return sortField != null && sortField.isScriptField(); + } + + public Order(String nestedPath, String name, String type, Field sortField) { + this.nestedPath = nestedPath; + this.name = name; + this.type = type; + this.sortField = sortField; + } + + public String getNestedPath() { + return nestedPath; + } + + public void setNestedPath(String nestedPath) { + this.nestedPath = nestedPath; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public Field getSortField() { + return sortField; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/Paramer.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/Paramer.java index 6cdf0148a8..38ca556199 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/Paramer.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/Paramer.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain; import com.alibaba.druid.sql.ast.SQLExpr; @@ -25,163 +24,164 @@ import org.opensearch.sql.legacy.exception.SqlParseException; import org.opensearch.sql.legacy.utils.Util; - public class Paramer { - public String analysis; - public Float boost; - public String value; - public Integer slop; - - public Map fieldsBoosts = new HashMap<>(); - public String type; - public Float tieBreaker; - public Operator operator; - - public String default_field; - - public static Paramer parseParamer(SQLMethodInvokeExpr method) throws SqlParseException { - Paramer instance = new Paramer(); - List parameters = method.getParameters(); - for (SQLExpr expr : parameters) { - if (expr instanceof SQLCharExpr) { - if (instance.value == null) { - instance.value = ((SQLCharExpr) expr).getText(); - } else { - instance.analysis = ((SQLCharExpr) expr).getText(); - } - } else if (expr instanceof SQLNumericLiteralExpr) { - instance.boost = ((SQLNumericLiteralExpr) expr).getNumber().floatValue(); - } else if (expr instanceof SQLBinaryOpExpr) { - SQLBinaryOpExpr sqlExpr = (SQLBinaryOpExpr) expr; - switch (Util.expr2Object(sqlExpr.getLeft()).toString()) { - case "query": - instance.value = Util.expr2Object(sqlExpr.getRight()).toString(); - break; - case "analyzer": - instance.analysis = Util.expr2Object(sqlExpr.getRight()).toString(); - break; - case "boost": - instance.boost = Float.parseFloat(Util.expr2Object(sqlExpr.getRight()).toString()); - break; - case "slop": - instance.slop = Integer.parseInt(Util.expr2Object(sqlExpr.getRight()).toString()); - break; - - case "fields": - int index; - for (String f : Strings.splitStringByCommaToArray( - Util.expr2Object(sqlExpr.getRight()).toString())) { - index = f.lastIndexOf('^'); - if (-1 < index) { - instance.fieldsBoosts.put(f.substring(0, index), - Float.parseFloat(f.substring(index + 1))); - } else { - instance.fieldsBoosts.put(f, 1.0F); - } - } - break; - case "type": - instance.type = Util.expr2Object(sqlExpr.getRight()).toString(); - break; - case "tie_breaker": - instance.tieBreaker = Float.parseFloat(Util.expr2Object(sqlExpr.getRight()).toString()); - break; - case "operator": - instance.operator = Operator.fromString(Util.expr2Object(sqlExpr.getRight()).toString()); - break; - - case "default_field": - instance.default_field = Util.expr2Object(sqlExpr.getRight()).toString(); - break; - - default: - break; - } + public String analysis; + public Float boost; + public String value; + public Integer slop; + + public Map fieldsBoosts = new HashMap<>(); + public String type; + public Float tieBreaker; + public Operator operator; + + public String default_field; + + public static Paramer parseParamer(SQLMethodInvokeExpr method) throws SqlParseException { + Paramer instance = new Paramer(); + List parameters = method.getParameters(); + for (SQLExpr expr : parameters) { + if (expr instanceof SQLCharExpr) { + if (instance.value == null) { + instance.value = ((SQLCharExpr) expr).getText(); + } else { + instance.analysis = ((SQLCharExpr) expr).getText(); + } + } else if (expr instanceof SQLNumericLiteralExpr) { + instance.boost = ((SQLNumericLiteralExpr) expr).getNumber().floatValue(); + } else if (expr instanceof SQLBinaryOpExpr) { + SQLBinaryOpExpr sqlExpr = (SQLBinaryOpExpr) expr; + switch (Util.expr2Object(sqlExpr.getLeft()).toString()) { + case "query": + instance.value = Util.expr2Object(sqlExpr.getRight()).toString(); + break; + case "analyzer": + instance.analysis = Util.expr2Object(sqlExpr.getRight()).toString(); + break; + case "boost": + instance.boost = Float.parseFloat(Util.expr2Object(sqlExpr.getRight()).toString()); + break; + case "slop": + instance.slop = Integer.parseInt(Util.expr2Object(sqlExpr.getRight()).toString()); + break; + + case "fields": + int index; + for (String f : + Strings.splitStringByCommaToArray( + Util.expr2Object(sqlExpr.getRight()).toString())) { + index = f.lastIndexOf('^'); + if (-1 < index) { + instance.fieldsBoosts.put( + f.substring(0, index), Float.parseFloat(f.substring(index + 1))); + } else { + instance.fieldsBoosts.put(f, 1.0F); + } } - } - - return instance; + break; + case "type": + instance.type = Util.expr2Object(sqlExpr.getRight()).toString(); + break; + case "tie_breaker": + instance.tieBreaker = Float.parseFloat(Util.expr2Object(sqlExpr.getRight()).toString()); + break; + case "operator": + instance.operator = + Operator.fromString(Util.expr2Object(sqlExpr.getRight()).toString()); + break; + + case "default_field": + instance.default_field = Util.expr2Object(sqlExpr.getRight()).toString(); + break; + + default: + break; + } + } } - public static ToXContent fullParamer(MatchPhraseQueryBuilder query, Paramer paramer) { - if (paramer.analysis != null) { - query.analyzer(paramer.analysis); - } + return instance; + } - if (paramer.boost != null) { - query.boost(paramer.boost); - } + public static ToXContent fullParamer(MatchPhraseQueryBuilder query, Paramer paramer) { + if (paramer.analysis != null) { + query.analyzer(paramer.analysis); + } - if (paramer.slop != null) { - query.slop(paramer.slop); - } + if (paramer.boost != null) { + query.boost(paramer.boost); + } - return query; + if (paramer.slop != null) { + query.slop(paramer.slop); } - public static ToXContent fullParamer(MatchQueryBuilder query, Paramer paramer) { - if (paramer.analysis != null) { - query.analyzer(paramer.analysis); - } + return query; + } - if (paramer.boost != null) { - query.boost(paramer.boost); - } - return query; + public static ToXContent fullParamer(MatchQueryBuilder query, Paramer paramer) { + if (paramer.analysis != null) { + query.analyzer(paramer.analysis); } - public static ToXContent fullParamer(WildcardQueryBuilder query, Paramer paramer) { - if (paramer.boost != null) { - query.boost(paramer.boost); - } - return query; + if (paramer.boost != null) { + query.boost(paramer.boost); } + return query; + } - public static ToXContent fullParamer(QueryStringQueryBuilder query, Paramer paramer) { - if (paramer.analysis != null) { - query.analyzer(paramer.analysis); - } + public static ToXContent fullParamer(WildcardQueryBuilder query, Paramer paramer) { + if (paramer.boost != null) { + query.boost(paramer.boost); + } + return query; + } - if (paramer.boost != null) { - query.boost(paramer.boost); - } + public static ToXContent fullParamer(QueryStringQueryBuilder query, Paramer paramer) { + if (paramer.analysis != null) { + query.analyzer(paramer.analysis); + } - if (paramer.slop != null) { - query.phraseSlop(paramer.slop); - } + if (paramer.boost != null) { + query.boost(paramer.boost); + } - if (paramer.default_field != null) { - query.defaultField(paramer.default_field); - } + if (paramer.slop != null) { + query.phraseSlop(paramer.slop); + } - return query; + if (paramer.default_field != null) { + query.defaultField(paramer.default_field); } - public static ToXContent fullParamer(MultiMatchQueryBuilder query, Paramer paramer) { - if (paramer.analysis != null) { - query.analyzer(paramer.analysis); - } + return query; + } - if (paramer.boost != null) { - query.boost(paramer.boost); - } + public static ToXContent fullParamer(MultiMatchQueryBuilder query, Paramer paramer) { + if (paramer.analysis != null) { + query.analyzer(paramer.analysis); + } - if (paramer.slop != null) { - query.slop(paramer.slop); - } + if (paramer.boost != null) { + query.boost(paramer.boost); + } - if (paramer.type != null) { - query.type(paramer.type); - } + if (paramer.slop != null) { + query.slop(paramer.slop); + } - if (paramer.tieBreaker != null) { - query.tieBreaker(paramer.tieBreaker); - } + if (paramer.type != null) { + query.type(paramer.type); + } - if (paramer.operator != null) { - query.operator(paramer.operator); - } + if (paramer.tieBreaker != null) { + query.tieBreaker(paramer.tieBreaker); + } - return query; + if (paramer.operator != null) { + query.operator(paramer.operator); } + + return query; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/Query.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/Query.java index b0538591b8..6f891e7fc5 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/Query.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/Query.java @@ -3,45 +3,39 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain; import java.util.ArrayList; import java.util.List; -/** - * Represents abstract query. every query - * has indexes, types, and where clause. - */ +/** Represents abstract query. every query has indexes, types, and where clause. */ public abstract class Query implements QueryStatement { - private Where where = null; - private List from = new ArrayList<>(); - - - public Where getWhere() { - return this.where; - } - - public void setWhere(Where where) { - this.where = where; - } - - public List getFrom() { - return from; - } - - - /** - * Get the indexes the query refer to. - * - * @return list of strings, the indexes names - */ - public String[] getIndexArr() { - String[] indexArr = new String[this.from.size()]; - for (int i = 0; i < indexArr.length; i++) { - indexArr[i] = this.from.get(i).getIndex(); - } - return indexArr; + private Where where = null; + private List from = new ArrayList<>(); + + public Where getWhere() { + return this.where; + } + + public void setWhere(Where where) { + this.where = where; + } + + public List getFrom() { + return from; + } + + /** + * Get the indexes the query refer to. + * + * @return list of strings, the indexes names + */ + public String[] getIndexArr() { + String[] indexArr = new String[this.from.size()]; + for (int i = 0; i < indexArr.length; i++) { + indexArr[i] = this.from.get(i).getIndex(); } + return indexArr; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/QueryActionRequest.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/QueryActionRequest.java index f13e053d92..f536e3ad6f 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/QueryActionRequest.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/QueryActionRequest.java @@ -3,20 +3,17 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain; import lombok.Getter; import lombok.RequiredArgsConstructor; import org.opensearch.sql.legacy.executor.Format; -/** - * The definition of QueryActionRequest. - */ +/** The definition of QueryActionRequest. */ @Getter @RequiredArgsConstructor public class QueryActionRequest { - private final String sql; - private final ColumnTypeProvider typeProvider; - private final Format format; + private final String sql; + private final ColumnTypeProvider typeProvider; + private final Format format; } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/QueryStatement.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/QueryStatement.java index 26c0b07517..71fe64906a 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/QueryStatement.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/QueryStatement.java @@ -3,11 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain; -/** - * Identifier interface used to encompass Query and IndexStatements - */ -public interface QueryStatement { -} +/** Identifier interface used to encompass Query and IndexStatements */ +public interface QueryStatement {} diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/ScriptMethodField.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/ScriptMethodField.java index bdc42b4ff3..bb4d17d897 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/ScriptMethodField.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/ScriptMethodField.java @@ -3,29 +3,27 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain; import com.alibaba.druid.sql.ast.expr.SQLAggregateOption; import java.util.List; -/** - * Stores information about function name for script fields - */ +/** Stores information about function name for script fields */ public class ScriptMethodField extends MethodField { - private final String functionName; + private final String functionName; - public ScriptMethodField(String functionName, List params, SQLAggregateOption option, String alias) { - super("script", params, option, alias); - this.functionName = functionName; - } + public ScriptMethodField( + String functionName, List params, SQLAggregateOption option, String alias) { + super("script", params, option, alias); + this.functionName = functionName; + } - public String getFunctionName() { - return functionName; - } + public String getFunctionName() { + return functionName; + } - @Override - public boolean isScriptField() { - return true; - } + @Override + public boolean isScriptField() { + return true; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/SearchResult.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/SearchResult.java index 5b7b73a910..e951c84961 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/SearchResult.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/SearchResult.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain; import java.util.ArrayList; @@ -29,128 +28,120 @@ import org.opensearch.sql.legacy.exception.SqlParseException; public class SearchResult { - /** - * - */ - private List> results; - - private long total; - - double maxScore = 0; - - public SearchResult(SearchResponse resp) { - SearchHits hits = resp.getHits(); - this.total = Optional.ofNullable(hits.getTotalHits()).map(totalHits -> totalHits.value).orElse(0L); - results = new ArrayList<>(hits.getHits().length); - for (SearchHit searchHit : hits.getHits()) { - if (searchHit.getSourceAsMap() != null) { - results.add(searchHit.getSourceAsMap()); - } else if (searchHit.getFields() != null) { - Map fields = searchHit.getFields(); - results.add(toFieldsMap(fields)); - } - - } + /** */ + private List> results; + + private long total; + + double maxScore = 0; + + public SearchResult(SearchResponse resp) { + SearchHits hits = resp.getHits(); + this.total = + Optional.ofNullable(hits.getTotalHits()).map(totalHits -> totalHits.value).orElse(0L); + results = new ArrayList<>(hits.getHits().length); + for (SearchHit searchHit : hits.getHits()) { + if (searchHit.getSourceAsMap() != null) { + results.add(searchHit.getSourceAsMap()); + } else if (searchHit.getFields() != null) { + Map fields = searchHit.getFields(); + results.add(toFieldsMap(fields)); + } } + } - public SearchResult(SearchResponse resp, Select select) throws SqlParseException { - Aggregations aggs = resp.getAggregations(); - if (aggs.get("filter") != null) { - InternalFilter inf = aggs.get("filter"); - aggs = inf.getAggregations(); - } - if (aggs.get("group by") != null) { - InternalTerms terms = aggs.get("group by"); - Collection buckets = terms.getBuckets(); - this.total = buckets.size(); - results = new ArrayList<>(buckets.size()); - for (Bucket bucket : buckets) { - Map aggsMap = toAggsMap(bucket.getAggregations().getAsMap()); - aggsMap.put("docCount", bucket.getDocCount()); - results.add(aggsMap); - } - } else { - results = new ArrayList<>(1); - this.total = 1; - Map map = new HashMap<>(); - for (Aggregation aggregation : aggs) { - map.put(aggregation.getName(), covenValue(aggregation)); - } - results.add(map); - } - + public SearchResult(SearchResponse resp, Select select) throws SqlParseException { + Aggregations aggs = resp.getAggregations(); + if (aggs.get("filter") != null) { + InternalFilter inf = aggs.get("filter"); + aggs = inf.getAggregations(); } - - /** - * - * - * @param fields - * @return - */ - private Map toFieldsMap(Map fields) { - Map result = new HashMap<>(); - for (Entry entry : fields.entrySet()) { - if (entry.getValue().getValues().size() > 1) { - result.put(entry.getKey(), entry.getValue().getValues()); - } else { - result.put(entry.getKey(), entry.getValue().getValue()); - } - - } - return result; + if (aggs.get("group by") != null) { + InternalTerms terms = aggs.get("group by"); + Collection buckets = terms.getBuckets(); + this.total = buckets.size(); + results = new ArrayList<>(buckets.size()); + for (Bucket bucket : buckets) { + Map aggsMap = toAggsMap(bucket.getAggregations().getAsMap()); + aggsMap.put("docCount", bucket.getDocCount()); + results.add(aggsMap); + } + } else { + results = new ArrayList<>(1); + this.total = 1; + Map map = new HashMap<>(); + for (Aggregation aggregation : aggs) { + map.put(aggregation.getName(), covenValue(aggregation)); + } + results.add(map); } - - /** - * - * - * @param fields - * @return - * @throws SqlParseException - */ - private Map toAggsMap(Map fields) throws SqlParseException { - Map result = new HashMap<>(); - for (Entry entry : fields.entrySet()) { - result.put(entry.getKey(), covenValue(entry.getValue())); - } - return result; + } + + /** + * @param fields + * @return + */ + private Map toFieldsMap(Map fields) { + Map result = new HashMap<>(); + for (Entry entry : fields.entrySet()) { + if (entry.getValue().getValues().size() > 1) { + result.put(entry.getKey(), entry.getValue().getValues()); + } else { + result.put(entry.getKey(), entry.getValue().getValue()); + } } - - private Object covenValue(Aggregation value) throws SqlParseException { - if (value instanceof InternalNumericMetricsAggregation.SingleValue) { - return ((InternalNumericMetricsAggregation.SingleValue) value).value(); - } else if (value instanceof InternalValueCount) { - return ((InternalValueCount) value).getValue(); - } else if (value instanceof InternalTopHits) { - return (value); - } else if (value instanceof LongTerms) { - return value; - } else { - throw new SqlParseException("Unknown aggregation value type: " + value.getClass().getSimpleName()); - } + return result; + } + + /** + * @param fields + * @return + * @throws SqlParseException + */ + private Map toAggsMap(Map fields) throws SqlParseException { + Map result = new HashMap<>(); + for (Entry entry : fields.entrySet()) { + result.put(entry.getKey(), covenValue(entry.getValue())); } - - public List> getResults() { - return results; + return result; + } + + private Object covenValue(Aggregation value) throws SqlParseException { + if (value instanceof InternalNumericMetricsAggregation.SingleValue) { + return ((InternalNumericMetricsAggregation.SingleValue) value).value(); + } else if (value instanceof InternalValueCount) { + return ((InternalValueCount) value).getValue(); + } else if (value instanceof InternalTopHits) { + return (value); + } else if (value instanceof LongTerms) { + return value; + } else { + throw new SqlParseException( + "Unknown aggregation value type: " + value.getClass().getSimpleName()); } + } - public void setResults(List> results) { - this.results = results; - } + public List> getResults() { + return results; + } - public long getTotal() { - return total; - } + public void setResults(List> results) { + this.results = results; + } - public void setTotal(long total) { - this.total = total; - } + public long getTotal() { + return total; + } - public double getMaxScore() { - return maxScore; - } + public void setTotal(long total) { + this.total = total; + } - public void setMaxScore(double maxScore) { - this.maxScore = maxScore; - } + public double getMaxScore() { + return maxScore; + } + public void setMaxScore(double maxScore) { + this.maxScore = maxScore; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/Select.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/Select.java index cd600d856e..2faa8cc6e5 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/Select.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/Select.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain; import static com.alibaba.druid.sql.ast.statement.SQLJoinTableSource.JoinType; @@ -16,7 +15,6 @@ import org.opensearch.sql.legacy.domain.hints.Hint; import org.opensearch.sql.legacy.parser.SubQueryExpression; - /** * sql select * @@ -24,167 +22,169 @@ */ public class Select extends Query { - /** - * Using this functions will cause query to execute as aggregation. - */ - private static final Set AGGREGATE_FUNCTIONS = - ImmutableSet.of( - "SUM", "MAX", "MIN", "AVG", - "TOPHITS", "COUNT", "STATS", "EXTENDED_STATS", - "PERCENTILES", "SCRIPTED_METRIC" - ); - - private List hints = new ArrayList<>(); - private List fields = new ArrayList<>(); - private List> groupBys = new ArrayList<>(); - private Having having; - private List orderBys = new ArrayList<>(); - private int offset; - private Integer rowCount; - private boolean containsSubQueries; - private List subQueries; - private boolean selectAll = false; - private JoinType nestedJoinType = JoinType.COMMA; - - public boolean isQuery = false; - public boolean isAggregate = false; - - public static final int DEFAULT_LIMIT = 200; - - public Select() { - } - - public List getFields() { - return fields; - } - - public void setOffset(int offset) { - this.offset = offset; - } - - public void setRowCount(Integer rowCount) { - this.rowCount = rowCount; - } - - public void addGroupBy(Field field) { - List wrapper = new ArrayList<>(); - wrapper.add(field); - addGroupBy(wrapper); - } - - public void addGroupBy(List fields) { - isAggregate = true; - selectAll = false; - this.groupBys.add(fields); - } + /** Using this functions will cause query to execute as aggregation. */ + private static final Set AGGREGATE_FUNCTIONS = + ImmutableSet.of( + "SUM", + "MAX", + "MIN", + "AVG", + "TOPHITS", + "COUNT", + "STATS", + "EXTENDED_STATS", + "PERCENTILES", + "SCRIPTED_METRIC"); + + private List hints = new ArrayList<>(); + private List fields = new ArrayList<>(); + private List> groupBys = new ArrayList<>(); + private Having having; + private List orderBys = new ArrayList<>(); + private int offset; + private Integer rowCount; + private boolean containsSubQueries; + private List subQueries; + private boolean selectAll = false; + private JoinType nestedJoinType = JoinType.COMMA; + + public boolean isQuery = false; + public boolean isAggregate = false; + + public static final int DEFAULT_LIMIT = 200; + + public Select() {} + + public List getFields() { + return fields; + } + + public void setOffset(int offset) { + this.offset = offset; + } + + public void setRowCount(Integer rowCount) { + this.rowCount = rowCount; + } + + public void addGroupBy(Field field) { + List wrapper = new ArrayList<>(); + wrapper.add(field); + addGroupBy(wrapper); + } + + public void addGroupBy(List fields) { + isAggregate = true; + selectAll = false; + this.groupBys.add(fields); + } + + public List> getGroupBys() { + return groupBys; + } + + public Having getHaving() { + return having; + } + + public void setHaving(Having having) { + this.having = having; + } + + public List getOrderBys() { + return orderBys; + } + + public int getOffset() { + return offset; + } + + public Integer getRowCount() { + return rowCount; + } + + public void addOrderBy(String nestedPath, String name, String type, Field field) { + if ("_score".equals(name)) { + isQuery = true; + } + this.orderBys.add(new Order(nestedPath, name, type, field)); + } + + public void addField(Field field) { + if (field == null) { + return; + } + if (field == STAR && !isAggregate) { + // Ignore GROUP BY since columns present in result are decided by column list in GROUP BY + this.selectAll = true; + return; + } + + if (field instanceof MethodField + && AGGREGATE_FUNCTIONS.contains(field.getName().toUpperCase())) { + isAggregate = true; + } + + fields.add(field); + } + + public List getHints() { + return hints; + } + + public JoinType getNestedJoinType() { + return nestedJoinType; + } + + public void setNestedJoinType(JoinType nestedJoinType) { + this.nestedJoinType = nestedJoinType; + } + + public void fillSubQueries() { + subQueries = new ArrayList<>(); + Where where = this.getWhere(); + fillSubQueriesFromWhereRecursive(where); + } - public List> getGroupBys() { - return groupBys; + private void fillSubQueriesFromWhereRecursive(Where where) { + if (where == null) { + return; } - - public Having getHaving() { - return having; - } - - public void setHaving(Having having) { - this.having = having; - } - - public List getOrderBys() { - return orderBys; - } - - public int getOffset() { - return offset; - } - - public Integer getRowCount() { - return rowCount; - } - - public void addOrderBy(String nestedPath, String name, String type, Field field) { - if ("_score".equals(name)) { - isQuery = true; + if (where instanceof Condition) { + Condition condition = (Condition) where; + if (condition.getValue() instanceof SubQueryExpression) { + this.subQueries.add((SubQueryExpression) condition.getValue()); + this.containsSubQueries = true; + } + if (condition.getValue() instanceof Object[]) { + + for (Object o : (Object[]) condition.getValue()) { + if (o instanceof SubQueryExpression) { + this.subQueries.add((SubQueryExpression) o); + this.containsSubQueries = true; + } } - this.orderBys.add(new Order(nestedPath, name, type, field)); + } + } else { + for (Where innerWhere : where.getWheres()) { + fillSubQueriesFromWhereRecursive(innerWhere); + } } + } - public void addField(Field field) { - if (field == null) { - return; - } - if (field == STAR && !isAggregate) { - // Ignore GROUP BY since columns present in result are decided by column list in GROUP BY - this.selectAll = true; - return; - } - - if (field instanceof MethodField && AGGREGATE_FUNCTIONS.contains(field.getName().toUpperCase())) { - isAggregate = true; - } - - fields.add(field); - } - - public List getHints() { - return hints; - } - - - public JoinType getNestedJoinType() { - return nestedJoinType; - } - - public void setNestedJoinType(JoinType nestedJoinType) { - this.nestedJoinType = nestedJoinType; - } + public boolean containsSubQueries() { + return containsSubQueries; + } + public List getSubQueries() { + return subQueries; + } - public void fillSubQueries() { - subQueries = new ArrayList<>(); - Where where = this.getWhere(); - fillSubQueriesFromWhereRecursive(where); - } - - private void fillSubQueriesFromWhereRecursive(Where where) { - if (where == null) { - return; - } - if (where instanceof Condition) { - Condition condition = (Condition) where; - if (condition.getValue() instanceof SubQueryExpression) { - this.subQueries.add((SubQueryExpression) condition.getValue()); - this.containsSubQueries = true; - } - if (condition.getValue() instanceof Object[]) { - - for (Object o : (Object[]) condition.getValue()) { - if (o instanceof SubQueryExpression) { - this.subQueries.add((SubQueryExpression) o); - this.containsSubQueries = true; - } - } - } - } else { - for (Where innerWhere : where.getWheres()) { - fillSubQueriesFromWhereRecursive(innerWhere); - } - } - } - - public boolean containsSubQueries() { - return containsSubQueries; - } - - public List getSubQueries() { - return subQueries; - } + public boolean isOrderdSelect() { + return this.getOrderBys() != null && this.getOrderBys().size() > 0; + } - public boolean isOrderdSelect() { - return this.getOrderBys() != null && this.getOrderBys().size() > 0; - } - - public boolean isSelectAll() { - return selectAll; - } + public boolean isSelectAll() { + return selectAll; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/bucketpath/Path.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/bucketpath/Path.java index d5c897cf90..4827e0e61c 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/bucketpath/Path.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/bucketpath/Path.java @@ -3,49 +3,49 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain.bucketpath; public class Path { - private final String path; - private final String separator; - private final PathType type; - - private Path(String path, String separator, PathType type) { - this.path = path; - this.separator = separator; - this.type = type; - } - - public String getPath() { - return path; - } - - public String getSeparator() { - return separator; - } - - public PathType getType() { - return type; - } - - public boolean isMetricPath() { - return type == PathType.METRIC; - } - - public boolean isAggPath() { - return type == PathType.AGG; - } - - public static Path getAggPath(String path) { - return new Path(path, ">", PathType.AGG); - } - - public static Path getMetricPath(String path) { - return new Path(path, ".", PathType.METRIC); - } - - public enum PathType { - AGG, METRIC - } + private final String path; + private final String separator; + private final PathType type; + + private Path(String path, String separator, PathType type) { + this.path = path; + this.separator = separator; + this.type = type; + } + + public String getPath() { + return path; + } + + public String getSeparator() { + return separator; + } + + public PathType getType() { + return type; + } + + public boolean isMetricPath() { + return type == PathType.METRIC; + } + + public boolean isAggPath() { + return type == PathType.AGG; + } + + public static Path getAggPath(String path) { + return new Path(path, ">", PathType.AGG); + } + + public static Path getMetricPath(String path) { + return new Path(path, ".", PathType.METRIC); + } + + public enum PathType { + AGG, + METRIC + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/OpenSearchClient.java b/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/OpenSearchClient.java index a823947466..fd02486fae 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/OpenSearchClient.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/OpenSearchClient.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.esdomain; import java.util.ArrayList; @@ -19,47 +18,57 @@ public class OpenSearchClient { - private static final Logger LOG = LogManager.getLogger(); - private static final int[] retryIntervals = new int[]{4, 12, 20, 20}; - private final Client client; + private static final Logger LOG = LogManager.getLogger(); + private static final int[] retryIntervals = new int[] {4, 12, 20, 20}; + private final Client client; - public OpenSearchClient(Client client) { - this.client = client; - } + public OpenSearchClient(Client client) { + this.client = client; + } - public MultiSearchResponse.Item[] multiSearch(MultiSearchRequest multiSearchRequest) { - MultiSearchResponse.Item[] responses = new MultiSearchResponse.Item[multiSearchRequest.requests().size()]; - multiSearchRetry(responses, multiSearchRequest, - IntStream.range(0, multiSearchRequest.requests().size()).boxed().collect(Collectors.toList()), 0); + public MultiSearchResponse.Item[] multiSearch(MultiSearchRequest multiSearchRequest) { + MultiSearchResponse.Item[] responses = + new MultiSearchResponse.Item[multiSearchRequest.requests().size()]; + multiSearchRetry( + responses, + multiSearchRequest, + IntStream.range(0, multiSearchRequest.requests().size()) + .boxed() + .collect(Collectors.toList()), + 0); - return responses; - } + return responses; + } - private void multiSearchRetry(MultiSearchResponse.Item[] responses, MultiSearchRequest multiSearchRequest, - List indices, int retry) { - MultiSearchRequest multiSearchRequestRetry = new MultiSearchRequest(); - for (int i : indices) { - multiSearchRequestRetry.add(multiSearchRequest.requests().get(i)); - } - MultiSearchResponse.Item[] res = client.multiSearch(multiSearchRequestRetry).actionGet().getResponses(); - List indicesFailure = new ArrayList<>(); - //Could get EsRejectedExecutionException and OpenSearchException as getCause - for (int i = 0; i < res.length; i++) { - if (res[i].isFailure()) { - indicesFailure.add(indices.get(i)); - if (retry == 3) { - responses[indices.get(i)] = res[i]; - } - } else { - responses[indices.get(i)] = res[i]; - } - } - if (!indicesFailure.isEmpty()) { - LOG.info("OpenSearch multisearch has failures on retry {}", retry); - if (retry < 3) { - BackOffRetryStrategy.backOffSleep(retryIntervals[retry]); - multiSearchRetry(responses, multiSearchRequest, indicesFailure, retry + 1); - } + private void multiSearchRetry( + MultiSearchResponse.Item[] responses, + MultiSearchRequest multiSearchRequest, + List indices, + int retry) { + MultiSearchRequest multiSearchRequestRetry = new MultiSearchRequest(); + for (int i : indices) { + multiSearchRequestRetry.add(multiSearchRequest.requests().get(i)); + } + MultiSearchResponse.Item[] res = + client.multiSearch(multiSearchRequestRetry).actionGet().getResponses(); + List indicesFailure = new ArrayList<>(); + // Could get EsRejectedExecutionException and OpenSearchException as getCause + for (int i = 0; i < res.length; i++) { + if (res[i].isFailure()) { + indicesFailure.add(indices.get(i)); + if (retry == 3) { + responses[indices.get(i)] = res[i]; } + } else { + responses[indices.get(i)] = res[i]; + } + } + if (!indicesFailure.isEmpty()) { + LOG.info("OpenSearch multisearch has failures on retry {}", retry); + if (retry < 3) { + BackOffRetryStrategy.backOffSleep(retryIntervals[retry]); + multiSearchRetry(responses, multiSearchRequest, indicesFailure, retry + 1); + } } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/QueryActionElasticExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/QueryActionElasticExecutor.java index bcb25fd39a..2e45fb45b7 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/QueryActionElasticExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/QueryActionElasticExecutor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor; import java.io.IOException; @@ -31,81 +30,85 @@ import org.opensearch.sql.legacy.query.multi.MultiQueryAction; import org.opensearch.sql.legacy.query.multi.MultiQueryRequestBuilder; -/** - * Created by Eliran on 3/10/2015. - */ +/** Created by Eliran on 3/10/2015. */ public class QueryActionElasticExecutor { - public static SearchHits executeSearchAction(DefaultQueryAction searchQueryAction) throws SqlParseException { - SqlOpenSearchRequestBuilder builder = searchQueryAction.explain(); - return ((SearchResponse) builder.get()).getHits(); - } + public static SearchHits executeSearchAction(DefaultQueryAction searchQueryAction) + throws SqlParseException { + SqlOpenSearchRequestBuilder builder = searchQueryAction.explain(); + return ((SearchResponse) builder.get()).getHits(); + } - public static SearchHits executeJoinSearchAction(Client client, OpenSearchJoinQueryAction joinQueryAction) - throws IOException, SqlParseException { - SqlElasticRequestBuilder joinRequestBuilder = joinQueryAction.explain(); - ElasticJoinExecutor executor = ElasticJoinExecutor.createJoinExecutor(client, joinRequestBuilder); - executor.run(); - return executor.getHits(); - } + public static SearchHits executeJoinSearchAction( + Client client, OpenSearchJoinQueryAction joinQueryAction) + throws IOException, SqlParseException { + SqlElasticRequestBuilder joinRequestBuilder = joinQueryAction.explain(); + ElasticJoinExecutor executor = + ElasticJoinExecutor.createJoinExecutor(client, joinRequestBuilder); + executor.run(); + return executor.getHits(); + } - public static Aggregations executeAggregationAction(AggregationQueryAction aggregationQueryAction) - throws SqlParseException { - SqlOpenSearchRequestBuilder select = aggregationQueryAction.explain(); - return ((SearchResponse) select.get()).getAggregations(); - } + public static Aggregations executeAggregationAction(AggregationQueryAction aggregationQueryAction) + throws SqlParseException { + SqlOpenSearchRequestBuilder select = aggregationQueryAction.explain(); + return ((SearchResponse) select.get()).getAggregations(); + } - public static List executeQueryPlanQueryAction(QueryPlanQueryAction queryPlanQueryAction) { - QueryPlanRequestBuilder select = (QueryPlanRequestBuilder) queryPlanQueryAction.explain(); - return select.execute(); - } + public static List executeQueryPlanQueryAction( + QueryPlanQueryAction queryPlanQueryAction) { + QueryPlanRequestBuilder select = (QueryPlanRequestBuilder) queryPlanQueryAction.explain(); + return select.execute(); + } - public static ActionResponse executeShowQueryAction(ShowQueryAction showQueryAction) { - return showQueryAction.explain().get(); - } + public static ActionResponse executeShowQueryAction(ShowQueryAction showQueryAction) { + return showQueryAction.explain().get(); + } - public static ActionResponse executeDescribeQueryAction(DescribeQueryAction describeQueryAction) { - return describeQueryAction.explain().get(); - } + public static ActionResponse executeDescribeQueryAction(DescribeQueryAction describeQueryAction) { + return describeQueryAction.explain().get(); + } - public static ActionResponse executeDeleteAction(DeleteQueryAction deleteQueryAction) throws SqlParseException { - return deleteQueryAction.explain().get(); - } + public static ActionResponse executeDeleteAction(DeleteQueryAction deleteQueryAction) + throws SqlParseException { + return deleteQueryAction.explain().get(); + } - public static SearchHits executeMultiQueryAction(Client client, MultiQueryAction queryAction) - throws SqlParseException, IOException { - SqlElasticRequestBuilder multiRequestBuilder = queryAction.explain(); - ElasticHitsExecutor executor = MultiRequestExecutorFactory.createExecutor(client, - (MultiQueryRequestBuilder) multiRequestBuilder); - executor.run(); - return executor.getHits(); - } + public static SearchHits executeMultiQueryAction(Client client, MultiQueryAction queryAction) + throws SqlParseException, IOException { + SqlElasticRequestBuilder multiRequestBuilder = queryAction.explain(); + ElasticHitsExecutor executor = + MultiRequestExecutorFactory.createExecutor( + client, (MultiQueryRequestBuilder) multiRequestBuilder); + executor.run(); + return executor.getHits(); + } - public static Object executeAnyAction(Client client, QueryAction queryAction) - throws SqlParseException, IOException { - if (queryAction instanceof DefaultQueryAction) { - return executeSearchAction((DefaultQueryAction) queryAction); - } - if (queryAction instanceof AggregationQueryAction) { - return executeAggregationAction((AggregationQueryAction) queryAction); - } - if (queryAction instanceof QueryPlanQueryAction) { - return executeQueryPlanQueryAction((QueryPlanQueryAction) queryAction); - } - if (queryAction instanceof ShowQueryAction) { - return executeShowQueryAction((ShowQueryAction) queryAction); - } - if (queryAction instanceof DescribeQueryAction) { - return executeDescribeQueryAction((DescribeQueryAction) queryAction); - } - if (queryAction instanceof OpenSearchJoinQueryAction) { - return executeJoinSearchAction(client, (OpenSearchJoinQueryAction) queryAction); - } - if (queryAction instanceof MultiQueryAction) { - return executeMultiQueryAction(client, (MultiQueryAction) queryAction); - } - if (queryAction instanceof DeleteQueryAction) { - return executeDeleteAction((DeleteQueryAction) queryAction); - } - return null; + public static Object executeAnyAction(Client client, QueryAction queryAction) + throws SqlParseException, IOException { + if (queryAction instanceof DefaultQueryAction) { + return executeSearchAction((DefaultQueryAction) queryAction); + } + if (queryAction instanceof AggregationQueryAction) { + return executeAggregationAction((AggregationQueryAction) queryAction); + } + if (queryAction instanceof QueryPlanQueryAction) { + return executeQueryPlanQueryAction((QueryPlanQueryAction) queryAction); + } + if (queryAction instanceof ShowQueryAction) { + return executeShowQueryAction((ShowQueryAction) queryAction); + } + if (queryAction instanceof DescribeQueryAction) { + return executeDescribeQueryAction((DescribeQueryAction) queryAction); + } + if (queryAction instanceof OpenSearchJoinQueryAction) { + return executeJoinSearchAction(client, (OpenSearchJoinQueryAction) queryAction); + } + if (queryAction instanceof MultiQueryAction) { + return executeMultiQueryAction(client, (MultiQueryAction) queryAction); + } + if (queryAction instanceof DeleteQueryAction) { + return executeDeleteAction((DeleteQueryAction) queryAction); } + return null; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/RestExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/RestExecutor.java index e0124fb8be..8a0ab65970 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/RestExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/RestExecutor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor; import java.util.Map; @@ -11,12 +10,12 @@ import org.opensearch.rest.RestChannel; import org.opensearch.sql.legacy.query.QueryAction; -/** - * Created by Eliran on 26/12/2015. - */ +/** Created by Eliran on 26/12/2015. */ public interface RestExecutor { - void execute(Client client, Map params, QueryAction queryAction, RestChannel channel) - throws Exception; + void execute( + Client client, Map params, QueryAction queryAction, RestChannel channel) + throws Exception; - String execute(Client client, Map params, QueryAction queryAction) throws Exception; + String execute(Client client, Map params, QueryAction queryAction) + throws Exception; } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/adapter/QueryPlanQueryAction.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/adapter/QueryPlanQueryAction.java index 091abca554..b0179d3d8d 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/adapter/QueryPlanQueryAction.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/adapter/QueryPlanQueryAction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.adapter; import com.google.common.base.Strings; @@ -14,27 +13,31 @@ import org.opensearch.sql.legacy.query.SqlElasticRequestBuilder; /** - * The definition of QueryPlan of QueryAction which works as the adapter to the current QueryAction framework. + * The definition of QueryPlan of QueryAction which works as the adapter to the current QueryAction + * framework. */ public class QueryPlanQueryAction extends QueryAction { - private final QueryPlanRequestBuilder requestBuilder; + private final QueryPlanRequestBuilder requestBuilder; - public QueryPlanQueryAction(QueryPlanRequestBuilder requestBuilder) { - super(null, null); - this.requestBuilder = requestBuilder; - } + public QueryPlanQueryAction(QueryPlanRequestBuilder requestBuilder) { + super(null, null); + this.requestBuilder = requestBuilder; + } - @Override - public SqlElasticRequestBuilder explain() { - return requestBuilder; - } + @Override + public SqlElasticRequestBuilder explain() { + return requestBuilder; + } - @Override - public Optional> getFieldNames() { - List fieldNames = ((QueryPlanRequestBuilder) requestBuilder).outputColumns() - .stream() - .map(node -> Strings.isNullOrEmpty(node.getAlias()) ? node.getName() : node.getAlias()) + @Override + public Optional> getFieldNames() { + List fieldNames = + ((QueryPlanRequestBuilder) requestBuilder) + .outputColumns().stream() + .map( + node -> + Strings.isNullOrEmpty(node.getAlias()) ? node.getName() : node.getAlias()) .collect(Collectors.toList()); - return Optional.of(fieldNames); - } + return Optional.of(fieldNames); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/adapter/QueryPlanRequestBuilder.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/adapter/QueryPlanRequestBuilder.java index ef0bc85bc1..3933df9bbb 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/adapter/QueryPlanRequestBuilder.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/adapter/QueryPlanRequestBuilder.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.adapter; import java.util.List; @@ -16,38 +15,36 @@ import org.opensearch.sql.legacy.query.planner.core.BindingTupleQueryPlanner; import org.opensearch.sql.legacy.query.planner.core.ColumnNode; -/** - * The definition of QueryPlan SqlElasticRequestBuilder. - */ +/** The definition of QueryPlan SqlElasticRequestBuilder. */ @RequiredArgsConstructor public class QueryPlanRequestBuilder implements SqlElasticRequestBuilder { - private final BindingTupleQueryPlanner queryPlanner; - - public List execute() { - return queryPlanner.execute(); - } - - public List outputColumns() { - return queryPlanner.getColumnNodes(); - } - - @Override - public String explain() { - return queryPlanner.explain(); - } - - @Override - public ActionRequest request() { - throw new RuntimeException("unsupported operation"); - } - - @Override - public ActionResponse get() { - throw new RuntimeException("unsupported operation"); - } - - @Override - public ActionRequestBuilder getBuilder() { - throw new RuntimeException("unsupported operation"); - } + private final BindingTupleQueryPlanner queryPlanner; + + public List execute() { + return queryPlanner.execute(); + } + + public List outputColumns() { + return queryPlanner.getColumnNodes(); + } + + @Override + public String explain() { + return queryPlanner.explain(); + } + + @Override + public ActionRequest request() { + throw new RuntimeException("unsupported operation"); + } + + @Override + public ActionResponse get() { + throw new RuntimeException("unsupported operation"); + } + + @Override + public ActionRequestBuilder getBuilder() { + throw new RuntimeException("unsupported operation"); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/OpenSearchErrorMessage.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/OpenSearchErrorMessage.java index a48ab003dc..8117d241b1 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/OpenSearchErrorMessage.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/OpenSearchErrorMessage.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.format; import org.opensearch.OpenSearchException; @@ -13,46 +12,53 @@ public class OpenSearchErrorMessage extends ErrorMessage { - OpenSearchErrorMessage(OpenSearchException exception, int status) { - super(exception, status); - } + OpenSearchErrorMessage(OpenSearchException exception, int status) { + super(exception, status); + } - @Override - protected String fetchReason() { - return "Error occurred in OpenSearch engine: " + exception.getMessage(); - } + @Override + protected String fetchReason() { + return "Error occurred in OpenSearch engine: " + exception.getMessage(); + } - /** Currently Sql-Jdbc plugin only supports string type as reason and details in the error messages */ - @Override - protected String fetchDetails() { - StringBuilder details = new StringBuilder(); - if (exception instanceof SearchPhaseExecutionException) { - details.append(fetchSearchPhaseExecutionExceptionDetails((SearchPhaseExecutionException) exception)); - } else { - details.append(defaultDetails(exception)); - } - details.append("\nFor more details, please send request for Json format to see the raw response from " - + "OpenSearch engine."); - return details.toString(); + /** + * Currently Sql-Jdbc plugin only supports string type as reason and details in the error messages + */ + @Override + protected String fetchDetails() { + StringBuilder details = new StringBuilder(); + if (exception instanceof SearchPhaseExecutionException) { + details.append( + fetchSearchPhaseExecutionExceptionDetails((SearchPhaseExecutionException) exception)); + } else { + details.append(defaultDetails(exception)); } + details.append( + "\nFor more details, please send request for Json format to see the raw response from " + + "OpenSearch engine."); + return details.toString(); + } - private String defaultDetails(OpenSearchException exception) { - return exception.getDetailedMessage(); - } + private String defaultDetails(OpenSearchException exception) { + return exception.getDetailedMessage(); + } - /** - * Could not deliver the exactly same error messages due to the limit of JDBC types. - * Currently our cases occurred only SearchPhaseExecutionException instances among all types of OpenSearch exceptions - * according to the survey, see all types: OpenSearchException.OpenSearchExceptionHandle. - * Either add methods of fetching details for different types, or re-make a consistent message by not giving - * detailed messages/root causes but only a suggestion message. - */ - private String fetchSearchPhaseExecutionExceptionDetails(SearchPhaseExecutionException exception) { - StringBuilder details = new StringBuilder(); - ShardSearchFailure[] shardFailures = exception.shardFailures(); - for (ShardSearchFailure failure : shardFailures) { - details.append(StringUtils.format("Shard[%d]: %s\n", failure.shardId(), failure.getCause().toString())); - } - return details.toString(); + /** + * Could not deliver the exactly same error messages due to the limit of JDBC types. Currently our + * cases occurred only SearchPhaseExecutionException instances among all types of OpenSearch + * exceptions according to the survey, see all types: + * OpenSearchException.OpenSearchExceptionHandle. Either add methods of fetching details for + * different types, or re-make a consistent message by not giving detailed messages/root causes + * but only a suggestion message. + */ + private String fetchSearchPhaseExecutionExceptionDetails( + SearchPhaseExecutionException exception) { + StringBuilder details = new StringBuilder(); + ShardSearchFailure[] shardFailures = exception.shardFailures(); + for (ShardSearchFailure failure : shardFailures) { + details.append( + StringUtils.format("Shard[%d]: %s\n", failure.shardId(), failure.getCause().toString())); } + return details.toString(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/PrettyFormatRestExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/PrettyFormatRestExecutor.java index 411fb90a24..00feabf5d8 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/PrettyFormatRestExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/PrettyFormatRestExecutor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.format; import java.util.Map; @@ -27,82 +26,84 @@ public class PrettyFormatRestExecutor implements RestExecutor { - private static final Logger LOG = LogManager.getLogger(); - - private final String format; - - public PrettyFormatRestExecutor(String format) { - this.format = format.toLowerCase(); + private static final Logger LOG = LogManager.getLogger(); + + private final String format; + + public PrettyFormatRestExecutor(String format) { + this.format = format.toLowerCase(); + } + + /** Execute the QueryAction and return the REST response using the channel. */ + @Override + public void execute( + Client client, Map params, QueryAction queryAction, RestChannel channel) { + String formattedResponse = execute(client, params, queryAction); + BytesRestResponse bytesRestResponse; + if (format.equals("jdbc")) { + bytesRestResponse = + new BytesRestResponse( + RestStatus.OK, "application/json; charset=UTF-8", formattedResponse); + } else { + bytesRestResponse = new BytesRestResponse(RestStatus.OK, formattedResponse); } - /** - * Execute the QueryAction and return the REST response using the channel. - */ - @Override - public void execute(Client client, Map params, QueryAction queryAction, RestChannel channel) { - String formattedResponse = execute(client, params, queryAction); - BytesRestResponse bytesRestResponse; - if (format.equals("jdbc")) { - bytesRestResponse = new BytesRestResponse(RestStatus.OK, - "application/json; charset=UTF-8", - formattedResponse); - } else { - bytesRestResponse = new BytesRestResponse(RestStatus.OK, formattedResponse); - } - - if (!BackOffRetryStrategy.isHealthy(2 * bytesRestResponse.content().length(), this)) { - throw new IllegalStateException( - "[PrettyFormatRestExecutor] Memory could be insufficient when sendResponse()."); - } - - channel.sendResponse(bytesRestResponse); + if (!BackOffRetryStrategy.isHealthy(2 * bytesRestResponse.content().length(), this)) { + throw new IllegalStateException( + "[PrettyFormatRestExecutor] Memory could be insufficient when sendResponse()."); } - @Override - public String execute(Client client, Map params, QueryAction queryAction) { - Protocol protocol; - - try { - if (queryAction instanceof DefaultQueryAction) { - protocol = buildProtocolForDefaultQuery(client, (DefaultQueryAction) queryAction); - } else { - Object queryResult = QueryActionElasticExecutor.executeAnyAction(client, queryAction); - protocol = new Protocol(client, queryAction, queryResult, format, Cursor.NULL_CURSOR); - } - } catch (Exception e) { - if (e instanceof OpenSearchException) { - LOG.warn("An error occurred in OpenSearch engine: " - + ((OpenSearchException) e).getDetailedMessage(), e); - } else { - LOG.warn("Error happened in pretty formatter", e); - } - protocol = new Protocol(e); - } - - return protocol.format(); + channel.sendResponse(bytesRestResponse); + } + + @Override + public String execute(Client client, Map params, QueryAction queryAction) { + Protocol protocol; + + try { + if (queryAction instanceof DefaultQueryAction) { + protocol = buildProtocolForDefaultQuery(client, (DefaultQueryAction) queryAction); + } else { + Object queryResult = QueryActionElasticExecutor.executeAnyAction(client, queryAction); + protocol = new Protocol(client, queryAction, queryResult, format, Cursor.NULL_CURSOR); + } + } catch (Exception e) { + if (e instanceof OpenSearchException) { + LOG.warn( + "An error occurred in OpenSearch engine: " + + ((OpenSearchException) e).getDetailedMessage(), + e); + } else { + LOG.warn("Error happened in pretty formatter", e); + } + protocol = new Protocol(e); } - /** - * QueryActionElasticExecutor.executeAnyAction() returns SearchHits inside SearchResponse. - * In order to get scroll ID if any, we need to execute DefaultQueryAction ourselves for SearchResponse. - */ - private Protocol buildProtocolForDefaultQuery(Client client, DefaultQueryAction queryAction) - throws SqlParseException { - - SearchResponse response = (SearchResponse) queryAction.explain().get(); - String scrollId = response.getScrollId(); - - Protocol protocol; - if (!Strings.isNullOrEmpty(scrollId)) { - DefaultCursor defaultCursor = new DefaultCursor(); - defaultCursor.setScrollId(scrollId); - defaultCursor.setLimit(queryAction.getSelect().getRowCount()); - defaultCursor.setFetchSize(queryAction.getSqlRequest().fetchSize()); - protocol = new Protocol(client, queryAction, response.getHits(), format, defaultCursor); - } else { - protocol = new Protocol(client, queryAction, response.getHits(), format, Cursor.NULL_CURSOR); - } - - return protocol; + return protocol.format(); + } + + /** + * QueryActionElasticExecutor.executeAnyAction() returns SearchHits inside SearchResponse. In + * order to get scroll ID if any, we need to execute DefaultQueryAction ourselves for + * SearchResponse. + */ + private Protocol buildProtocolForDefaultQuery(Client client, DefaultQueryAction queryAction) + throws SqlParseException { + + SearchResponse response = (SearchResponse) queryAction.explain().get(); + String scrollId = response.getScrollId(); + + Protocol protocol; + if (!Strings.isNullOrEmpty(scrollId)) { + DefaultCursor defaultCursor = new DefaultCursor(); + defaultCursor.setScrollId(scrollId); + defaultCursor.setLimit(queryAction.getSelect().getRowCount()); + defaultCursor.setFetchSize(queryAction.getSqlRequest().fetchSize()); + protocol = new Protocol(client, queryAction, response.getHits(), format, defaultCursor); + } else { + protocol = new Protocol(client, queryAction, response.getHits(), format, Cursor.NULL_CURSOR); } + + return protocol; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/Protocol.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/Protocol.java index aba0a3c599..e6ea767e17 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/Protocol.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/Protocol.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.format; import static org.opensearch.sql.legacy.domain.IndexStatement.StatementType; @@ -33,215 +32,223 @@ public class Protocol { - static final int OK_STATUS = 200; - static final int ERROR_STATUS = 500; - - private final String formatType; - private int status; - private long size; - private long total; - private ResultSet resultSet; - private ErrorMessage error; - private List columnNodeList; - private Cursor cursor = new NullCursor(); - private ColumnTypeProvider scriptColumnType = new ColumnTypeProvider(); - - public Protocol(Client client, QueryAction queryAction, Object queryResult, String formatType, Cursor cursor) { - this.cursor = cursor; - - if (queryAction instanceof QueryPlanQueryAction) { - this.columnNodeList = - ((QueryPlanRequestBuilder) (((QueryPlanQueryAction) queryAction).explain())).outputColumns(); - } else if (queryAction instanceof DefaultQueryAction) { - scriptColumnType = queryAction.getScriptColumnType(); - } - - this.formatType = formatType; - QueryStatement query = queryAction.getQueryStatement(); - this.status = OK_STATUS; - this.resultSet = loadResultSet(client, query, queryResult); - this.size = resultSet.getDataRows().getSize(); - this.total = resultSet.getDataRows().getTotalHits(); - } - - - public Protocol(Client client, Object queryResult, String formatType, Cursor cursor) { - this.cursor = cursor; - this.status = OK_STATUS; - this.formatType = formatType; - this.resultSet = loadResultSetForCursor(client, queryResult); - } - - public Protocol(Exception e) { - this.formatType = null; - this.status = ERROR_STATUS; - this.error = ErrorMessageFactory.createErrorMessage(e, status); - } - - private ResultSet loadResultSetForCursor(Client client, Object queryResult) { - return new SelectResultSet(client, queryResult, formatType, cursor); - } - - private ResultSet loadResultSet(Client client, QueryStatement queryStatement, Object queryResult) { - if (queryResult instanceof List) { - return new BindingTupleResultSet(columnNodeList, (List) queryResult); - } - if (queryStatement instanceof Delete) { - return new DeleteResultSet(client, (Delete) queryStatement, queryResult); - } else if (queryStatement instanceof Query) { - return new SelectResultSet(client, (Query) queryStatement, queryResult, - scriptColumnType, formatType, cursor); - } else if (queryStatement instanceof IndexStatement) { - IndexStatement statement = (IndexStatement) queryStatement; - StatementType statementType = statement.getStatementType(); - - if (statementType == StatementType.SHOW) { - return new ShowResultSet(client, statement, queryResult); - } else if (statementType == StatementType.DESCRIBE) { - return new DescribeResultSet(client, statement, queryResult); - } - } - - throw new UnsupportedOperationException( - String.format("The following instance of QueryStatement is not supported: %s", - queryStatement.getClass().toString()) - ); - } - - public int getStatus() { - return status; - } - - public ResultSet getResultSet() { - return resultSet; - } - - public String format() { - if (status == OK_STATUS) { - switch (formatType) { - case "jdbc": - return outputInJdbcFormat(); - case "table": - return outputInTableFormat(); - case "raw": - return outputInRawFormat(); - default: - throw new UnsupportedOperationException( - String.format("The following format is not supported: %s", formatType)); - } - } - - return error.toString(); + static final int OK_STATUS = 200; + static final int ERROR_STATUS = 500; + + private final String formatType; + private int status; + private long size; + private long total; + private ResultSet resultSet; + private ErrorMessage error; + private List columnNodeList; + private Cursor cursor = new NullCursor(); + private ColumnTypeProvider scriptColumnType = new ColumnTypeProvider(); + + public Protocol( + Client client, + QueryAction queryAction, + Object queryResult, + String formatType, + Cursor cursor) { + this.cursor = cursor; + + if (queryAction instanceof QueryPlanQueryAction) { + this.columnNodeList = + ((QueryPlanRequestBuilder) (((QueryPlanQueryAction) queryAction).explain())) + .outputColumns(); + } else if (queryAction instanceof DefaultQueryAction) { + scriptColumnType = queryAction.getScriptColumnType(); + } + + this.formatType = formatType; + QueryStatement query = queryAction.getQueryStatement(); + this.status = OK_STATUS; + this.resultSet = loadResultSet(client, query, queryResult); + this.size = resultSet.getDataRows().getSize(); + this.total = resultSet.getDataRows().getTotalHits(); + } + + public Protocol(Client client, Object queryResult, String formatType, Cursor cursor) { + this.cursor = cursor; + this.status = OK_STATUS; + this.formatType = formatType; + this.resultSet = loadResultSetForCursor(client, queryResult); + } + + public Protocol(Exception e) { + this.formatType = null; + this.status = ERROR_STATUS; + this.error = ErrorMessageFactory.createErrorMessage(e, status); + } + + private ResultSet loadResultSetForCursor(Client client, Object queryResult) { + return new SelectResultSet(client, queryResult, formatType, cursor); + } + + private ResultSet loadResultSet( + Client client, QueryStatement queryStatement, Object queryResult) { + if (queryResult instanceof List) { + return new BindingTupleResultSet(columnNodeList, (List) queryResult); + } + if (queryStatement instanceof Delete) { + return new DeleteResultSet(client, (Delete) queryStatement, queryResult); + } else if (queryStatement instanceof Query) { + return new SelectResultSet( + client, (Query) queryStatement, queryResult, scriptColumnType, formatType, cursor); + } else if (queryStatement instanceof IndexStatement) { + IndexStatement statement = (IndexStatement) queryStatement; + StatementType statementType = statement.getStatementType(); + + if (statementType == StatementType.SHOW) { + return new ShowResultSet(client, statement, queryResult); + } else if (statementType == StatementType.DESCRIBE) { + return new DescribeResultSet(client, statement, queryResult); + } + } + + throw new UnsupportedOperationException( + String.format( + "The following instance of QueryStatement is not supported: %s", + queryStatement.getClass().toString())); + } + + public int getStatus() { + return status; + } + + public ResultSet getResultSet() { + return resultSet; + } + + public String format() { + if (status == OK_STATUS) { + switch (formatType) { + case "jdbc": + return outputInJdbcFormat(); + case "table": + return outputInTableFormat(); + case "raw": + return outputInRawFormat(); + default: + throw new UnsupportedOperationException( + String.format("The following format is not supported: %s", formatType)); + } + } + + return error.toString(); + } + + private String outputInJdbcFormat() { + JSONObject formattedOutput = new JSONObject(); + + formattedOutput.put("status", status); + formattedOutput.put("size", size); + formattedOutput.put("total", total); + + JSONArray schema = getSchemaAsJson(); + + formattedOutput.put("schema", schema); + formattedOutput.put("datarows", getDataRowsAsJson()); + + String cursorId = cursor.generateCursorId(); + if (!Strings.isNullOrEmpty(cursorId)) { + formattedOutput.put("cursor", cursorId); + } + + return formattedOutput.toString(2); + } + + private String outputInRawFormat() { + Schema schema = resultSet.getSchema(); + DataRows dataRows = resultSet.getDataRows(); + + StringBuilder formattedOutput = new StringBuilder(); + for (Row row : dataRows) { + formattedOutput.append(rawEntry(row, schema)).append("\n"); + } + + return formattedOutput.toString(); + } + + private String outputInTableFormat() { + return null; + } + + public String cursorFormat() { + if (status == OK_STATUS) { + switch (formatType) { + case "jdbc": + return cursorOutputInJDBCFormat(); + default: + throw new UnsupportedOperationException( + String.format( + "The following response format is not supported for cursor: [%s]", formatType)); + } } + return error.toString(); + } - private String outputInJdbcFormat() { - JSONObject formattedOutput = new JSONObject(); + private String cursorOutputInJDBCFormat() { + JSONObject formattedOutput = new JSONObject(); + formattedOutput.put("datarows", getDataRowsAsJson()); - formattedOutput.put("status", status); - formattedOutput.put("size", size); - formattedOutput.put("total", total); - - JSONArray schema = getSchemaAsJson(); - - formattedOutput.put("schema", schema); - formattedOutput.put("datarows", getDataRowsAsJson()); - - String cursorId = cursor.generateCursorId(); - if (!Strings.isNullOrEmpty(cursorId)) { - formattedOutput.put("cursor", cursorId); - } - - return formattedOutput.toString(2); + String cursorId = cursor.generateCursorId(); + if (!Strings.isNullOrEmpty(cursorId)) { + formattedOutput.put("cursor", cursorId); } + return formattedOutput.toString(2); + } - private String outputInRawFormat() { - Schema schema = resultSet.getSchema(); - DataRows dataRows = resultSet.getDataRows(); + private String rawEntry(Row row, Schema schema) { + // TODO String separator is being kept to "|" for the time being as using "\t" will require + // formatting since + // TODO tabs are occurring in multiple of 4 (one option is Guava's Strings.padEnd() method) + return StreamSupport.stream(schema.spliterator(), false) + .map(column -> row.getDataOrDefault(column.getName(), "NULL").toString()) + .collect(Collectors.joining("|")); + } - StringBuilder formattedOutput = new StringBuilder(); - for (Row row : dataRows) { - formattedOutput.append(rawEntry(row, schema)).append("\n"); - } - - return formattedOutput.toString(); - } - - private String outputInTableFormat() { - return null; - } + private JSONArray getSchemaAsJson() { + Schema schema = resultSet.getSchema(); + JSONArray schemaJson = new JSONArray(); - public String cursorFormat() { - if (status == OK_STATUS) { - switch (formatType) { - case "jdbc": - return cursorOutputInJDBCFormat(); - default: - throw new UnsupportedOperationException(String.format( - "The following response format is not supported for cursor: [%s]", formatType)); - } - } - return error.toString(); + for (Column column : schema) { + schemaJson.put(schemaEntry(column.getName(), column.getAlias(), column.getType())); } - private String cursorOutputInJDBCFormat() { - JSONObject formattedOutput = new JSONObject(); - formattedOutput.put("datarows", getDataRowsAsJson()); + return schemaJson; + } - String cursorId = cursor.generateCursorId(); - if (!Strings.isNullOrEmpty(cursorId)) { - formattedOutput.put("cursor", cursorId); - } - return formattedOutput.toString(2); + private JSONObject schemaEntry(String name, String alias, String type) { + JSONObject entry = new JSONObject(); + entry.put("name", name); + if (alias != null) { + entry.put("alias", alias); } + entry.put("type", type); - private String rawEntry(Row row, Schema schema) { - // TODO String separator is being kept to "|" for the time being as using "\t" will require formatting since - // TODO tabs are occurring in multiple of 4 (one option is Guava's Strings.padEnd() method) - return StreamSupport.stream(schema.spliterator(), false) - .map(column -> row.getDataOrDefault(column.getName(), "NULL").toString()) - .collect(Collectors.joining("|")); - } - - private JSONArray getSchemaAsJson() { - Schema schema = resultSet.getSchema(); - JSONArray schemaJson = new JSONArray(); - - for (Column column : schema) { - schemaJson.put(schemaEntry(column.getName(), column.getAlias(), column.getType())); - } - - return schemaJson; - } + return entry; + } - private JSONObject schemaEntry(String name, String alias, String type) { - JSONObject entry = new JSONObject(); - entry.put("name", name); - if (alias != null) { - entry.put("alias", alias); - } - entry.put("type", type); + private JSONArray getDataRowsAsJson() { + Schema schema = resultSet.getSchema(); + DataRows dataRows = resultSet.getDataRows(); + JSONArray dataRowsJson = new JSONArray(); - return entry; + for (Row row : dataRows) { + dataRowsJson.put(dataEntry(row, schema)); } - private JSONArray getDataRowsAsJson() { - Schema schema = resultSet.getSchema(); - DataRows dataRows = resultSet.getDataRows(); - JSONArray dataRowsJson = new JSONArray(); - - for (Row row : dataRows) { - dataRowsJson.put(dataEntry(row, schema)); - } - - return dataRowsJson; - } + return dataRowsJson; + } - private JSONArray dataEntry(Row dataRow, Schema schema) { - JSONArray entry = new JSONArray(); - for (Column column : schema) { - String columnName = column.getIdentifier(); - entry.put(dataRow.getDataOrDefault(columnName, JSONObject.NULL)); - } - return entry; + private JSONArray dataEntry(Row dataRow, Schema schema) { + JSONArray entry = new JSONArray(); + for (Column column : schema) { + String columnName = column.getIdentifier(); + entry.put(dataRow.getDataOrDefault(columnName, JSONObject.NULL)); } + return entry; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/ResultSet.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/ResultSet.java index 9864f1ffdc..079a738eb3 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/ResultSet.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/ResultSet.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.format; import java.util.regex.Matcher; @@ -12,47 +11,44 @@ public abstract class ResultSet { - protected Schema schema; - protected DataRows dataRows; - - protected Client client; - protected String clusterName; - - public Schema getSchema() { - return schema; - } - - public DataRows getDataRows() { - return dataRows; - } - - protected String getClusterName() { - return client.admin().cluster() - .prepareHealth() - .get() - .getClusterName(); - } - - /** - * Check if given string matches the pattern. Do this check only if the pattern is a regex. - * Otherwise skip the matching process and consider it's a match. - * This is a quick fix to support SHOW/DESCRIBE alias by skip mismatch between actual index name - * and pattern (alias). - * @param string string to match - * @param pattern pattern - * @return true if match or pattern is not regular expression. otherwise false. - */ - protected boolean matchesPatternIfRegex(String string, String pattern) { - return isNotRegexPattern(pattern) || matchesPattern(string, pattern); - } - - protected boolean matchesPattern(String string, String pattern) { - Pattern p = Pattern.compile(pattern); - Matcher matcher = p.matcher(string); - return matcher.find(); - } - - private boolean isNotRegexPattern(String pattern) { - return !pattern.contains(".") && !pattern.contains("*"); - } + protected Schema schema; + protected DataRows dataRows; + + protected Client client; + protected String clusterName; + + public Schema getSchema() { + return schema; + } + + public DataRows getDataRows() { + return dataRows; + } + + protected String getClusterName() { + return client.admin().cluster().prepareHealth().get().getClusterName(); + } + + /** + * Check if given string matches the pattern. Do this check only if the pattern is a regex. + * Otherwise skip the matching process and consider it's a match. This is a quick fix to support + * SHOW/DESCRIBE alias by skip mismatch between actual index name and pattern (alias). + * + * @param string string to match + * @param pattern pattern + * @return true if match or pattern is not regular expression. otherwise false. + */ + protected boolean matchesPatternIfRegex(String string, String pattern) { + return isNotRegexPattern(pattern) || matchesPattern(string, pattern); + } + + protected boolean matchesPattern(String string, String pattern) { + Pattern p = Pattern.compile(pattern); + Matcher matcher = p.matcher(string); + return matcher.find(); + } + + private boolean isNotRegexPattern(String pattern) { + return !pattern.contains(".") && !pattern.contains("*"); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/Schema.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/Schema.java index e02841fcd6..b29369f713 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/Schema.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/Schema.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.format; import static java.util.Collections.unmodifiableList; @@ -17,144 +16,155 @@ public class Schema implements Iterable { - private String indexName; - private List columns; + private String indexName; + private List columns; - private static Set types; + private static Set types; - static { - types = getTypes(); - } + static { + types = getTypes(); + } - public Schema(String indexName, List columns) { - this.indexName = indexName; - this.columns = columns; - } + public Schema(String indexName, List columns) { + this.indexName = indexName; + this.columns = columns; + } - public Schema(IndexStatement statement, List columns) { - this.indexName = statement.getIndexPattern(); - this.columns = columns; - } + public Schema(IndexStatement statement, List columns) { + this.indexName = statement.getIndexPattern(); + this.columns = columns; + } - public Schema(List columns){ - this.columns = columns; - } + public Schema(List columns) { + this.columns = columns; + } + + public String getIndexName() { + return indexName; + } - public String getIndexName() { - return indexName; + public List getHeaders() { + return columns.stream().map(column -> column.getName()).collect(Collectors.toList()); + } + + public List getColumns() { + return unmodifiableList(columns); + } + + private static Set getTypes() { + HashSet types = new HashSet<>(); + for (Type type : Type.values()) { + types.add(type.name()); } - public List getHeaders() { - return columns.stream() - .map(column -> column.getName()) - .collect(Collectors.toList()); + return types; + } + + // A method for efficiently checking if a Type exists + public static boolean hasType(String type) { + return types.contains(type); + } + + // Iterator method for Schema + @Override + public Iterator iterator() { + return new Iterator() { + private final Iterator iter = columns.iterator(); + + @Override + public boolean hasNext() { + return iter.hasNext(); + } + + @Override + public Column next() { + return iter.next(); + } + + @Override + public void remove() { + throw new UnsupportedOperationException("No changes allowed to Schema columns"); + } + }; + } + + // Only core OpenSearch datatypes currently supported + public enum Type { + TEXT, + KEYWORD, + IP, // String types + LONG, + INTEGER, + SHORT, + BYTE, + DOUBLE, + FLOAT, + HALF_FLOAT, + SCALED_FLOAT, // Numeric types + DATE, // Date types + BOOLEAN, // Boolean types + BINARY, // Binary types + OBJECT, + NESTED, + INTEGER_RANGE, + FLOAT_RANGE, + LONG_RANGE, + DOUBLE_RANGE, + DATE_RANGE; // Range types + + public String nameLowerCase() { + return name().toLowerCase(); } + } + + // Inner class for Column object + public static class Column { - public List getColumns() { - return unmodifiableList(columns); + private final String name; + private String alias; + private final Type type; + + private boolean identifiedByAlias; + + public Column(String name, String alias, Type type, boolean identifiedByAlias) { + this.name = name; + this.alias = alias; + this.type = type; + this.identifiedByAlias = identifiedByAlias; } - private static Set getTypes() { - HashSet types = new HashSet<>(); - for (Type type : Type.values()) { - types.add(type.name()); - } + public Column(String name, String alias, Type type) { + this(name, alias, type, false); + } - return types; + public String getName() { + return name; } - // A method for efficiently checking if a Type exists - public static boolean hasType(String type) { - return types.contains(type); + public String getAlias() { + return alias; } - // Iterator method for Schema - @Override - public Iterator iterator() { - return new Iterator() { - private final Iterator iter = columns.iterator(); - - @Override - public boolean hasNext() { - return iter.hasNext(); - } - - @Override - public Column next() { - return iter.next(); - } - - @Override - public void remove() { - throw new UnsupportedOperationException("No changes allowed to Schema columns"); - } - }; + public String getType() { + return type.nameLowerCase(); } - // Only core OpenSearch datatypes currently supported - public enum Type { - TEXT, KEYWORD, IP, // String types - LONG, INTEGER, SHORT, BYTE, DOUBLE, FLOAT, HALF_FLOAT, SCALED_FLOAT, // Numeric types - DATE, // Date types - BOOLEAN, // Boolean types - BINARY, // Binary types - OBJECT, - NESTED, - INTEGER_RANGE, FLOAT_RANGE, LONG_RANGE, DOUBLE_RANGE, DATE_RANGE; // Range types - - public String nameLowerCase() { - return name().toLowerCase(); - } + /* + * Some query types (like JOIN) label the data in SearchHit using alias instead of field name if it's given. + * + * This method returns the alias as the identifier if the identifiedByAlias flag is set for such cases so that + * the correct identifier is used to access related data in DataRows. + */ + public String getIdentifier() { + if (identifiedByAlias && alias != null) { + return alias; + } else { + return name; + } } - // Inner class for Column object - public static class Column { - - private final String name; - private String alias; - private final Type type; - - private boolean identifiedByAlias; - - public Column(String name, String alias, Type type, boolean identifiedByAlias) { - this.name = name; - this.alias = alias; - this.type = type; - this.identifiedByAlias = identifiedByAlias; - } - - public Column(String name, String alias, Type type) { - this(name, alias, type, false); - } - - public String getName() { - return name; - } - - public String getAlias() { - return alias; - } - - public String getType() { - return type.nameLowerCase(); - } - - /* - * Some query types (like JOIN) label the data in SearchHit using alias instead of field name if it's given. - * - * This method returns the alias as the identifier if the identifiedByAlias flag is set for such cases so that - * the correct identifier is used to access related data in DataRows. - */ - public String getIdentifier() { - if (identifiedByAlias && alias != null) { - return alias; - } else { - return name; - } - } - - public Type getEnumType() { - return type; - } + public Type getEnumType() { + return type; } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/SelectResultSet.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/SelectResultSet.java index a6f4cf815a..445bdd45a0 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/SelectResultSet.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/SelectResultSet.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.format; import static java.util.Collections.unmodifiableMap; @@ -59,676 +58,671 @@ public class SelectResultSet extends ResultSet { - private static final Logger LOG = LogManager.getLogger(SelectResultSet.class); - - public static final String SCORE = "_score"; - private final String formatType; - - private Query query; - private Object queryResult; - - private boolean selectAll; - private String indexName; - private List columns = new ArrayList<>(); - private ColumnTypeProvider outputColumnType; - - private List head; - private long size; - private long totalHits; - private long internalTotalHits; - private List rows; - private Cursor cursor; - - private DateFieldFormatter dateFieldFormatter; - // alias -> base field name - private Map fieldAliasMap = new HashMap<>(); - - public SelectResultSet(Client client, - Query query, - Object queryResult, - ColumnTypeProvider outputColumnType, - String formatType, - Cursor cursor) { - this.client = client; - this.query = query; - this.queryResult = queryResult; - this.selectAll = false; - this.formatType = formatType; - this.outputColumnType = outputColumnType; - this.cursor = cursor; - - if (isJoinQuery()) { - JoinSelect joinQuery = (JoinSelect) query; - loadFromEsState(joinQuery.getFirstTable()); - loadFromEsState(joinQuery.getSecondTable()); - } else { - loadFromEsState(query); - } - this.schema = new Schema(indexName, columns); - this.head = schema.getHeaders(); - this.dateFieldFormatter = new DateFieldFormatter(indexName, columns, fieldAliasMap); - - extractData(); - populateCursor(); - this.dataRows = new DataRows(size, totalHits, rows); - } - - public SelectResultSet(Client client, Object queryResult, String formatType, Cursor cursor) { - this.cursor = cursor; - this.client = client; - this.queryResult = queryResult; - this.selectAll = false; - this.formatType = formatType; - populateResultSetFromCursor(cursor); - } - - public String indexName(){ - return this.indexName; - } - - public Map fieldAliasMap() { - return unmodifiableMap(this.fieldAliasMap); - } - - public void populateResultSetFromCursor(Cursor cursor) { - switch (cursor.getType()) { - case DEFAULT: - populateResultSetFromDefaultCursor((DefaultCursor) cursor); - default: - return; - } - } - - private void populateResultSetFromDefaultCursor(DefaultCursor cursor) { - this.columns = cursor.getColumns(); - this.schema = new Schema(columns); - this.head = schema.getHeaders(); - this.dateFieldFormatter = new DateFieldFormatter( - cursor.getIndexPattern(), - columns, - cursor.getFieldAliasMap() - ); - extractData(); - this.dataRows = new DataRows(size, totalHits, rows); - } - - //*********************************************************** - // Logic for loading Columns to be stored in Schema - //*********************************************************** - - /** - * Makes a request to local node to receive meta data information and maps each field specified in SELECT to its - * type in the index mapping - */ - private void loadFromEsState(Query query) { - String indexName = fetchIndexName(query); - String[] fieldNames = fetchFieldsAsArray(query); - - // Reset boolean in the case of JOIN query where multiple calls to loadFromEsState() are made - selectAll = isSimpleQuerySelectAll(query) || isJoinQuerySelectAll(query, fieldNames); - - GetFieldMappingsRequest request = new GetFieldMappingsRequest() - .indices(indexName) - .fields(selectAllFieldsIfEmpty(fieldNames)) - .local(true); - GetFieldMappingsResponse response = client.admin().indices() - .getFieldMappings(request) - .actionGet(); - - Map> mappings = response.mappings(); - if (mappings.isEmpty() || !mappings.containsKey(indexName)) { - throw new IllegalArgumentException(String.format("Index type %s does not exist", query.getFrom())); - } - Map typeMappings = mappings.get(indexName); - - - this.indexName = this.indexName == null ? indexName : (this.indexName + "|" + indexName); - this.columns.addAll(renameColumnWithTableAlias(query, populateColumns(query, fieldNames, typeMappings))); - } - - /** - * Rename column name with table alias as prefix for join query - */ - private List renameColumnWithTableAlias(Query query, List columns) { - List renamedCols; - if ((query instanceof TableOnJoinSelect) - && !Strings.isNullOrEmpty(((TableOnJoinSelect) query).getAlias())) { - - TableOnJoinSelect joinQuery = (TableOnJoinSelect) query; - renamedCols = new ArrayList<>(); - - for (Schema.Column column : columns) { - renamedCols.add(new Schema.Column( - joinQuery.getAlias() + "." + column.getName(), - column.getAlias(), - Schema.Type.valueOf(column.getType().toUpperCase()), - true - )); - } - } else { - renamedCols = columns; - } - return renamedCols; - } - - private boolean isSelectAll() { - return selectAll; - } - - /** - * Is a simple (non-join/non-group-by) query with SELECT * explicitly - */ - private boolean isSimpleQuerySelectAll(Query query) { - return (query instanceof Select) && ((Select) query).isSelectAll(); - } - - /** - * Is a join query with SELECT * on either one of the tables some fields specified - */ - private boolean isJoinQuerySelectAll(Query query, String[] fieldNames) { - return fieldNames.length == 0 && !fieldsSelectedOnAnotherTable(query); - } - - /** - * In the case of a JOIN query, if no fields are SELECTed on for a particular table, the other table's fields are - * checked in SELECT to ensure a table is not incorrectly marked as a isSelectAll() case. - */ - private boolean fieldsSelectedOnAnotherTable(Query query) { - if (isJoinQuery()) { - TableOnJoinSelect otherTable = getOtherTable(query); - return otherTable.getSelectedFields().size() > 0; - } - - return false; - } - - private TableOnJoinSelect getOtherTable(Query currJoinSelect) { - JoinSelect joinQuery = (JoinSelect) query; - if (joinQuery.getFirstTable() == currJoinSelect) { - return joinQuery.getSecondTable(); - } else { - return joinQuery.getFirstTable(); - } - } - - private boolean containsWildcard(Query query) { - for (Field field : fetchFields(query)) { - if (!(field instanceof MethodField) && field.getName().contains("*")) { - return true; - } - } - - return false; - } - - private String fetchIndexName(Query query) { - return query.getFrom().get(0).getIndex(); - } - - /** - * queryResult is checked to see if it's of type Aggregation in which case the aggregation fields in GROUP BY - * are returned as well. This prevents returning a Schema of all fields when SELECT * is called with - * GROUP BY (since all fields will be retrieved from the typeMappings request when no fields are returned from - * fetchFields()). - *

- * After getting all of the fields from GROUP BY, the fields from SELECT are iterated and only the fields of type - * MethodField are added (to prevent duplicate field in Schema for queries like - * "SELECT age, COUNT(*) FROM bank GROUP BY age" where 'age' is mentioned in both SELECT and GROUP BY). + private static final Logger LOG = LogManager.getLogger(SelectResultSet.class); + + public static final String SCORE = "_score"; + private final String formatType; + + private Query query; + private Object queryResult; + + private boolean selectAll; + private String indexName; + private List columns = new ArrayList<>(); + private ColumnTypeProvider outputColumnType; + + private List head; + private long size; + private long totalHits; + private long internalTotalHits; + private List rows; + private Cursor cursor; + + private DateFieldFormatter dateFieldFormatter; + // alias -> base field name + private Map fieldAliasMap = new HashMap<>(); + + public SelectResultSet( + Client client, + Query query, + Object queryResult, + ColumnTypeProvider outputColumnType, + String formatType, + Cursor cursor) { + this.client = client; + this.query = query; + this.queryResult = queryResult; + this.selectAll = false; + this.formatType = formatType; + this.outputColumnType = outputColumnType; + this.cursor = cursor; + + if (isJoinQuery()) { + JoinSelect joinQuery = (JoinSelect) query; + loadFromEsState(joinQuery.getFirstTable()); + loadFromEsState(joinQuery.getSecondTable()); + } else { + loadFromEsState(query); + } + this.schema = new Schema(indexName, columns); + this.head = schema.getHeaders(); + this.dateFieldFormatter = new DateFieldFormatter(indexName, columns, fieldAliasMap); + + extractData(); + populateCursor(); + this.dataRows = new DataRows(size, totalHits, rows); + } + + public SelectResultSet(Client client, Object queryResult, String formatType, Cursor cursor) { + this.cursor = cursor; + this.client = client; + this.queryResult = queryResult; + this.selectAll = false; + this.formatType = formatType; + populateResultSetFromCursor(cursor); + } + + public String indexName() { + return this.indexName; + } + + public Map fieldAliasMap() { + return unmodifiableMap(this.fieldAliasMap); + } + + public void populateResultSetFromCursor(Cursor cursor) { + switch (cursor.getType()) { + case DEFAULT: + populateResultSetFromDefaultCursor((DefaultCursor) cursor); + default: + return; + } + } + + private void populateResultSetFromDefaultCursor(DefaultCursor cursor) { + this.columns = cursor.getColumns(); + this.schema = new Schema(columns); + this.head = schema.getHeaders(); + this.dateFieldFormatter = + new DateFieldFormatter(cursor.getIndexPattern(), columns, cursor.getFieldAliasMap()); + extractData(); + this.dataRows = new DataRows(size, totalHits, rows); + } + + // *********************************************************** + // Logic for loading Columns to be stored in Schema + // *********************************************************** + + /** + * Makes a request to local node to receive meta data information and maps each field specified in + * SELECT to its type in the index mapping + */ + private void loadFromEsState(Query query) { + String indexName = fetchIndexName(query); + String[] fieldNames = fetchFieldsAsArray(query); + + // Reset boolean in the case of JOIN query where multiple calls to loadFromEsState() are made + selectAll = isSimpleQuerySelectAll(query) || isJoinQuerySelectAll(query, fieldNames); + + GetFieldMappingsRequest request = + new GetFieldMappingsRequest() + .indices(indexName) + .fields(selectAllFieldsIfEmpty(fieldNames)) + .local(true); + GetFieldMappingsResponse response = + client.admin().indices().getFieldMappings(request).actionGet(); + + Map> mappings = response.mappings(); + if (mappings.isEmpty() || !mappings.containsKey(indexName)) { + throw new IllegalArgumentException( + String.format("Index type %s does not exist", query.getFrom())); + } + Map typeMappings = mappings.get(indexName); + + this.indexName = this.indexName == null ? indexName : (this.indexName + "|" + indexName); + this.columns.addAll( + renameColumnWithTableAlias(query, populateColumns(query, fieldNames, typeMappings))); + } + + /** Rename column name with table alias as prefix for join query */ + private List renameColumnWithTableAlias(Query query, List columns) { + List renamedCols; + if ((query instanceof TableOnJoinSelect) + && !Strings.isNullOrEmpty(((TableOnJoinSelect) query).getAlias())) { + + TableOnJoinSelect joinQuery = (TableOnJoinSelect) query; + renamedCols = new ArrayList<>(); + + for (Schema.Column column : columns) { + renamedCols.add( + new Schema.Column( + joinQuery.getAlias() + "." + column.getName(), + column.getAlias(), + Schema.Type.valueOf(column.getType().toUpperCase()), + true)); + } + } else { + renamedCols = columns; + } + return renamedCols; + } + + private boolean isSelectAll() { + return selectAll; + } + + /** Is a simple (non-join/non-group-by) query with SELECT * explicitly */ + private boolean isSimpleQuerySelectAll(Query query) { + return (query instanceof Select) && ((Select) query).isSelectAll(); + } + + /** Is a join query with SELECT * on either one of the tables some fields specified */ + private boolean isJoinQuerySelectAll(Query query, String[] fieldNames) { + return fieldNames.length == 0 && !fieldsSelectedOnAnotherTable(query); + } + + /** + * In the case of a JOIN query, if no fields are SELECTed on for a particular table, the other + * table's fields are checked in SELECT to ensure a table is not incorrectly marked as a + * isSelectAll() case. + */ + private boolean fieldsSelectedOnAnotherTable(Query query) { + if (isJoinQuery()) { + TableOnJoinSelect otherTable = getOtherTable(query); + return otherTable.getSelectedFields().size() > 0; + } + + return false; + } + + private TableOnJoinSelect getOtherTable(Query currJoinSelect) { + JoinSelect joinQuery = (JoinSelect) query; + if (joinQuery.getFirstTable() == currJoinSelect) { + return joinQuery.getSecondTable(); + } else { + return joinQuery.getFirstTable(); + } + } + + private boolean containsWildcard(Query query) { + for (Field field : fetchFields(query)) { + if (!(field instanceof MethodField) && field.getName().contains("*")) { + return true; + } + } + + return false; + } + + private String fetchIndexName(Query query) { + return query.getFrom().get(0).getIndex(); + } + + /** + * queryResult is checked to see if it's of type Aggregation in which case the aggregation fields + * in GROUP BY are returned as well. This prevents returning a Schema of all fields when SELECT * + * is called with GROUP BY (since all fields will be retrieved from the typeMappings request when + * no fields are returned from fetchFields()). + * + *

After getting all of the fields from GROUP BY, the fields from SELECT are iterated and only + * the fields of type MethodField are added (to prevent duplicate field in Schema for queries like + * "SELECT age, COUNT(*) FROM bank GROUP BY age" where 'age' is mentioned in both SELECT and GROUP + * BY). + */ + private List fetchFields(Query query) { + Select select = (Select) query; + + if (queryResult instanceof Aggregations) { + List groupByFields = + select.getGroupBys().isEmpty() ? new ArrayList<>() : select.getGroupBys().get(0); + + for (Field selectField : select.getFields()) { + if (selectField instanceof MethodField && !selectField.isScriptField()) { + groupByFields.add(selectField); + } else if (selectField.isScriptField() + && selectField.getAlias().equals(groupByFields.get(0).getName())) { + return select.getFields(); + } + } + return groupByFields; + } + + if (query instanceof TableOnJoinSelect) { + return ((TableOnJoinSelect) query).getSelectedFields(); + } + + return select.getFields(); + } + + private String[] fetchFieldsAsArray(Query query) { + List fields = fetchFields(query); + return fields.stream().map(this::getFieldName).toArray(String[]::new); + } + + private String getFieldName(Field field) { + if (field instanceof MethodField) { + return field.getAlias(); + } + + return field.getName(); + } + + private Map fetchFieldMap(Query query) { + Map fieldMap = new HashMap<>(); + + for (Field field : fetchFields(query)) { + fieldMap.put(getFieldName(field), field); + } + + return fieldMap; + } + + private String[] selectAllFieldsIfEmpty(String[] fields) { + if (isSelectAll()) { + return new String[] {"*"}; + } + + return fields; + } + + private String[] emptyArrayIfNull(String typeName) { + if (typeName != null) { + return new String[] {typeName}; + } else { + return Strings.EMPTY_ARRAY; + } + } + + private Schema.Type fetchMethodReturnType(int fieldIndex, MethodField field) { + switch (field.getName().toLowerCase()) { + case "count": + return Schema.Type.LONG; + case "sum": + case "avg": + case "min": + case "max": + case "percentiles": + return Schema.Type.DOUBLE; + case "script": + { + // TODO: return type information is disconnected from the function definitions in + // SQLFunctions. + // Refactor SQLFunctions to have functions self-explanatory (types, scripts) and pluggable + // (similar to Strategy pattern) + if (field.getExpression() instanceof SQLCaseExpr) { + return Schema.Type.TEXT; + } + Schema.Type resolvedType = outputColumnType.get(fieldIndex); + return SQLFunctions.getScriptFunctionReturnType(field, resolvedType); + } + default: + throw new UnsupportedOperationException( + String.format("The following method is not supported in Schema: %s", field.getName())); + } + } + + /** + * Returns a list of Column objects which contain names identifying the field as well as its type. + * + *

If all fields are being selected (SELECT *) then the order of fields returned will be + * random, otherwise the output will be in the same order as how they were selected. + * + *

If an alias was given for a field, that will be used to identify the field in Column, + * otherwise the field name will be used. + */ + private List populateColumns( + Query query, String[] fieldNames, Map typeMappings) { + List fieldNameList; + + if (isSelectAll() || containsWildcard(query)) { + fieldNameList = new ArrayList<>(typeMappings.keySet()); + } else { + fieldNameList = Arrays.asList(fieldNames); + } + + /* + * The reason the 'fieldMap' mapping is needed on top of 'fieldNameList' is because the map would be + * empty in cases like 'SELECT *' but List fieldNameList will always be set in either case. + * That way, 'fieldNameList' is used to access field names in order that they were selected, if given, + * and then 'fieldMap' is used to access the respective Field object to check for aliases. */ - private List fetchFields(Query query) { - Select select = (Select) query; - - if (queryResult instanceof Aggregations) { - List groupByFields = select.getGroupBys().isEmpty() ? new ArrayList<>() : - select.getGroupBys().get(0); - - - for (Field selectField : select.getFields()) { - if (selectField instanceof MethodField && !selectField.isScriptField()) { - groupByFields.add(selectField); - } else if (selectField.isScriptField() - && selectField.getAlias().equals(groupByFields.get(0).getName())) { - return select.getFields(); - } - } - return groupByFields; - } - - if (query instanceof TableOnJoinSelect) { - return ((TableOnJoinSelect) query).getSelectedFields(); - } - - return select.getFields(); - } - - private String[] fetchFieldsAsArray(Query query) { - List fields = fetchFields(query); - return fields.stream() - .map(this::getFieldName) - .toArray(String[]::new); - } - - private String getFieldName(Field field) { - if (field instanceof MethodField) { - return field.getAlias(); - } - - return field.getName(); - } - - private Map fetchFieldMap(Query query) { - Map fieldMap = new HashMap<>(); - - for (Field field : fetchFields(query)) { - fieldMap.put(getFieldName(field), field); - } - - return fieldMap; - } - - private String[] selectAllFieldsIfEmpty(String[] fields) { - if (isSelectAll()) { - return new String[]{"*"}; - } - - return fields; - } - - private String[] emptyArrayIfNull(String typeName) { - if (typeName != null) { - return new String[]{typeName}; - } else { - return Strings.EMPTY_ARRAY; - } - } - - private Schema.Type fetchMethodReturnType(int fieldIndex, MethodField field) { - switch (field.getName().toLowerCase()) { - case "count": - return Schema.Type.LONG; - case "sum": - case "avg": - case "min": - case "max": - case "percentiles": - return Schema.Type.DOUBLE; - case "script": { - // TODO: return type information is disconnected from the function definitions in SQLFunctions. - // Refactor SQLFunctions to have functions self-explanatory (types, scripts) and pluggable - // (similar to Strategy pattern) - if (field.getExpression() instanceof SQLCaseExpr) { - return Schema.Type.TEXT; - } - Schema.Type resolvedType = outputColumnType.get(fieldIndex); - return SQLFunctions.getScriptFunctionReturnType(field, resolvedType); - } - default: - throw new UnsupportedOperationException( - String.format("The following method is not supported in Schema: %s", field.getName())); - } - } - - /** - * Returns a list of Column objects which contain names identifying the field as well as its type. - *

- * If all fields are being selected (SELECT *) then the order of fields returned will be random, otherwise - * the output will be in the same order as how they were selected. - *

- * If an alias was given for a field, that will be used to identify the field in Column, otherwise the field name - * will be used. - */ - private List populateColumns(Query query, String[] fieldNames, Map typeMappings) { - List fieldNameList; - - if (isSelectAll() || containsWildcard(query)) { - fieldNameList = new ArrayList<>(typeMappings.keySet()); - } else { - fieldNameList = Arrays.asList(fieldNames); + Map fieldMap = fetchFieldMap(query); + List columns = new ArrayList<>(); + for (String fieldName : fieldNameList) { + // _score is a special case since it is not included in typeMappings, so it is checked for + // here + if (fieldName.equals(SCORE)) { + columns.add( + new Schema.Column(fieldName, fetchAlias(fieldName, fieldMap), Schema.Type.FLOAT)); + continue; + } + /* + * Methods are also a special case as their type cannot be determined from typeMappings, so it is checked + * for here. + * + * Note: When adding the Column for Method, alias is used in place of getName() because the default name + * is set as alias (ex. COUNT(*)) and overwritten if an alias is given. So alias is used as the + * name instead. + */ + if (fieldMap.get(fieldName) instanceof MethodField) { + MethodField methodField = (MethodField) fieldMap.get(fieldName); + int fieldIndex = fieldNameList.indexOf(fieldName); + + SQLExpr expr = methodField.getExpression(); + if (expr instanceof SQLCastExpr) { + // Since CAST expressions create an alias for a field, we need to save the original field + // name + // for this alias for formatting data later. + SQLIdentifierExpr castFieldIdentifier = + (SQLIdentifierExpr) ((SQLCastExpr) expr).getExpr(); + fieldAliasMap.put(methodField.getAlias(), castFieldIdentifier.getName()); + } + + columns.add( + new Schema.Column( + methodField.getAlias(), null, fetchMethodReturnType(fieldIndex, methodField))); + continue; + } + + /* + * Unnecessary fields (ex. _index, _parent) are ignored. + * Fields like field.keyword will be ignored when isSelectAll is true but will be returned if + * explicitly selected. + */ + FieldMapping field = new FieldMapping(fieldName, typeMappings, fieldMap); + if (!field.isMetaField()) { + + if (field.isMultiField() && !field.isSpecified()) { + continue; + } + if (field.isPropertyField() && !field.isSpecified() && !field.isWildcardSpecified()) { + continue; } /* - * The reason the 'fieldMap' mapping is needed on top of 'fieldNameList' is because the map would be - * empty in cases like 'SELECT *' but List fieldNameList will always be set in either case. - * That way, 'fieldNameList' is used to access field names in order that they were selected, if given, - * and then 'fieldMap' is used to access the respective Field object to check for aliases. + * Three cases regarding Type: + * 1. If Type exists, create Column + * 2. If Type doesn't exist and isSelectAll() is false, throw exception + * 3. If Type doesn't exist and isSelectAll() is true, Column creation for fieldName is skipped */ - Map fieldMap = fetchFieldMap(query); - List columns = new ArrayList<>(); - for (String fieldName : fieldNameList) { - // _score is a special case since it is not included in typeMappings, so it is checked for here - if (fieldName.equals(SCORE)) { - columns.add(new Schema.Column(fieldName, fetchAlias(fieldName, fieldMap), Schema.Type.FLOAT)); - continue; - } - /* - * Methods are also a special case as their type cannot be determined from typeMappings, so it is checked - * for here. - * - * Note: When adding the Column for Method, alias is used in place of getName() because the default name - * is set as alias (ex. COUNT(*)) and overwritten if an alias is given. So alias is used as the - * name instead. - */ - if (fieldMap.get(fieldName) instanceof MethodField) { - MethodField methodField = (MethodField) fieldMap.get(fieldName); - int fieldIndex = fieldNameList.indexOf(fieldName); - - SQLExpr expr = methodField.getExpression(); - if (expr instanceof SQLCastExpr) { - // Since CAST expressions create an alias for a field, we need to save the original field name - // for this alias for formatting data later. - SQLIdentifierExpr castFieldIdentifier = (SQLIdentifierExpr) ((SQLCastExpr) expr).getExpr(); - fieldAliasMap.put(methodField.getAlias(), castFieldIdentifier.getName()); - } - - columns.add( - new Schema.Column( - methodField.getAlias(), - null, - fetchMethodReturnType(fieldIndex, methodField) - ) - ); - continue; - } - - /* - * Unnecessary fields (ex. _index, _parent) are ignored. - * Fields like field.keyword will be ignored when isSelectAll is true but will be returned if - * explicitly selected. - */ - FieldMapping field = new FieldMapping(fieldName, typeMappings, fieldMap); - if (!field.isMetaField()) { - - if (field.isMultiField() && !field.isSpecified()) { - continue; - } - if (field.isPropertyField() && !field.isSpecified() && !field.isWildcardSpecified()) { - continue; - } - - /* - * Three cases regarding Type: - * 1. If Type exists, create Column - * 2. If Type doesn't exist and isSelectAll() is false, throw exception - * 3. If Type doesn't exist and isSelectAll() is true, Column creation for fieldName is skipped - */ - String type = field.type().toUpperCase(); - if (Schema.hasType(type)) { - - // If the current field is a group key, we should use alias as the identifier - boolean isGroupKey = false; - Select select = (Select) query; - if (null != select.getGroupBys() - && !select.getGroupBys().isEmpty() - && select.getGroupBys().get(0).contains(fieldMap.get(fieldName))) { - isGroupKey = true; - } - - columns.add( - new Schema.Column( - fieldName, - fetchAlias(fieldName, fieldMap), - Schema.Type.valueOf(type), - isGroupKey - ) - ); - } else if (!isSelectAll()) { - throw new IllegalArgumentException( - String.format("%s fieldName types are currently not supported.", type)); - } - } - } - - if (isSelectAllOnly(query)) { - populateAllNestedFields(columns, fieldNameList); - } - return columns; - } - - /** - * SELECT * only without other columns or wildcard pattern specified. - */ - private boolean isSelectAllOnly(Query query) { - return isSelectAll() && fetchFields(query).isEmpty(); - } - - /** - * Special case which trades off consistency of SELECT * meaning for more intuition from customer perspective. - * In other cases, * means all regular fields on the level. - * The only exception here is * picks all non-regular (nested) fields as JSON without flatten. - */ - private void populateAllNestedFields(List columns, List fields) { - Set nestedFieldPaths = fields.stream(). - map(FieldMapping::new). - filter(FieldMapping::isPropertyField). - filter(f -> !f.isMultiField()). - map(FieldMapping::path). - collect(toSet()); - - for (String nestedFieldPath : nestedFieldPaths) { - columns.add( - new Schema.Column(nestedFieldPath, "", Schema.Type.TEXT) - ); - } - } - + String type = field.type().toUpperCase(); + if (Schema.hasType(type)) { + + // If the current field is a group key, we should use alias as the identifier + boolean isGroupKey = false; + Select select = (Select) query; + if (null != select.getGroupBys() + && !select.getGroupBys().isEmpty() + && select.getGroupBys().get(0).contains(fieldMap.get(fieldName))) { + isGroupKey = true; + } + + columns.add( + new Schema.Column( + fieldName, + fetchAlias(fieldName, fieldMap), + Schema.Type.valueOf(type), + isGroupKey)); + } else if (!isSelectAll()) { + throw new IllegalArgumentException( + String.format("%s fieldName types are currently not supported.", type)); + } + } + } + + if (isSelectAllOnly(query)) { + populateAllNestedFields(columns, fieldNameList); + } + return columns; + } + + /** SELECT * only without other columns or wildcard pattern specified. */ + private boolean isSelectAllOnly(Query query) { + return isSelectAll() && fetchFields(query).isEmpty(); + } + + /** + * Special case which trades off consistency of SELECT * meaning for more intuition from customer + * perspective. In other cases, * means all regular fields on the level. The only exception here + * is * picks all non-regular (nested) fields as JSON without flatten. + */ + private void populateAllNestedFields(List columns, List fields) { + Set nestedFieldPaths = + fields.stream() + .map(FieldMapping::new) + .filter(FieldMapping::isPropertyField) + .filter(f -> !f.isMultiField()) + .map(FieldMapping::path) + .collect(toSet()); + + for (String nestedFieldPath : nestedFieldPaths) { + columns.add(new Schema.Column(nestedFieldPath, "", Schema.Type.TEXT)); + } + } + + /** + * Since this helper method is called within a check to see if the field exists in type mapping, + * it's already confirmed that the fieldName is valid. The check for fieldName in fieldMap has to + * be done in the case that 'SELECT *' was called since the map will be empty. + */ + private String fetchAlias(String fieldName, Map fieldMap) { + if (fieldMap.containsKey(fieldName)) { + return fieldMap.get(fieldName).getAlias(); + } + + return null; + } + + // *********************************************************** + // Logic for loading Rows to be stored in DataRows + // *********************************************************** + + /** + * Extract data from query results into Row objects Need to cover two cases: 1. queryResult is a + * SearchHits object 2. queryResult is an Aggregations object + * + *

Ignoring queryResult being ActionResponse (from executeDeleteAction), there should be no + * data in this case + */ + private void extractData() { + if (queryResult instanceof SearchHits) { + SearchHits searchHits = (SearchHits) queryResult; + + this.rows = populateRows(searchHits); + this.size = rows.size(); + this.internalTotalHits = + Optional.ofNullable(searchHits.getTotalHits()).map(th -> th.value).orElse(0L); + // size may be greater than totalHits after nested rows be flatten + this.totalHits = Math.max(size, internalTotalHits); + } else if (queryResult instanceof Aggregations) { + Aggregations aggregations = (Aggregations) queryResult; + + this.rows = populateRows(aggregations); + this.size = rows.size(); + this.internalTotalHits = size; + // Total hits is not available from Aggregations so 'size' is used + this.totalHits = size; + } + } + + private void populateCursor() { + switch (cursor.getType()) { + case DEFAULT: + populateDefaultCursor((DefaultCursor) cursor); + default: + return; + } + } + + private void populateDefaultCursor(DefaultCursor cursor) { /** - * Since this helper method is called within a check to see if the field exists in type mapping, it's - * already confirmed that the fieldName is valid. The check for fieldName in fieldMap has to be done in the case - * that 'SELECT *' was called since the map will be empty. + * Assumption: scrollId, fetchSize, limit already being set in + * + * @see PrettyFormatRestExecutor.buildProtocolForDefaultQuery() */ - private String fetchAlias(String fieldName, Map fieldMap) { - if (fieldMap.containsKey(fieldName)) { - return fieldMap.get(fieldName).getAlias(); - } - - return null; - } - - //*********************************************************** - // Logic for loading Rows to be stored in DataRows - //*********************************************************** - - /** - * Extract data from query results into Row objects - * Need to cover two cases: - * 1. queryResult is a SearchHits object - * 2. queryResult is an Aggregations object - *

- * Ignoring queryResult being ActionResponse (from executeDeleteAction), there should be no data in this case - */ - private void extractData() { - if (queryResult instanceof SearchHits) { - SearchHits searchHits = (SearchHits) queryResult; - - this.rows = populateRows(searchHits); - this.size = rows.size(); - this.internalTotalHits = Optional.ofNullable(searchHits.getTotalHits()).map(th -> th.value).orElse(0L); - // size may be greater than totalHits after nested rows be flatten - this.totalHits = Math.max(size, internalTotalHits); - } else if (queryResult instanceof Aggregations) { - Aggregations aggregations = (Aggregations) queryResult; - - this.rows = populateRows(aggregations); - this.size = rows.size(); - this.internalTotalHits = size; - // Total hits is not available from Aggregations so 'size' is used - this.totalHits = size; - } - } - - private void populateCursor() { - switch(cursor.getType()) { - case DEFAULT: - populateDefaultCursor((DefaultCursor) cursor); - default: - return; - } - } - - private void populateDefaultCursor(DefaultCursor cursor) { - /** - * Assumption: scrollId, fetchSize, limit already being set in - * @see PrettyFormatRestExecutor.buildProtocolForDefaultQuery() - */ - - Integer limit = cursor.getLimit(); - long rowsLeft = rowsLeft(cursor.getFetchSize(), cursor.getLimit()); - if (rowsLeft <= 0) { - // close the cursor - String scrollId = cursor.getScrollId(); - ClearScrollResponse clearScrollResponse = client.prepareClearScroll().addScrollId(scrollId).get(); - if (!clearScrollResponse.isSucceeded()) { - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); - LOG.error("Error closing the cursor context {} ", scrollId); - } - return; - } - - cursor.setRowsLeft(rowsLeft); - cursor.setIndexPattern(indexName); - cursor.setFieldAliasMap(fieldAliasMap()); - cursor.setColumns(columns); - this.totalHits = limit != null && limit < internalTotalHits ? limit : internalTotalHits; - } - - private long rowsLeft(Integer fetchSize, Integer limit) { - long rowsLeft = 0; - long totalHits = internalTotalHits; - if (limit != null && limit < totalHits) { - rowsLeft = limit - fetchSize; - } else { - rowsLeft = totalHits - fetchSize; - } - return rowsLeft; - } - - private List populateRows(SearchHits searchHits) { - List rows = new ArrayList<>(); - Set newKeys = new HashSet<>(head); - for (SearchHit hit : searchHits) { - Map rowSource = hit.getSourceAsMap(); - List result; - - if (!isJoinQuery()) { - // Row already flatten in source in join. And join doesn't support nested fields for now. - rowSource = flatRow(head, rowSource); - rowSource.put(SCORE, hit.getScore()); - - for (Map.Entry field : hit.getFields().entrySet()) { - rowSource.put(field.getKey(), field.getValue().getValue()); - } - if (formatType.equalsIgnoreCase(Format.JDBC.getFormatName())) { - dateFieldFormatter.applyJDBCDateFormat(rowSource); - } - result = flatNestedField(newKeys, rowSource, hit.getInnerHits()); - } else { - if (formatType.equalsIgnoreCase(Format.JDBC.getFormatName())) { - dateFieldFormatter.applyJDBCDateFormat(rowSource); - } - result = new ArrayList<>(); - result.add(new DataRows.Row(rowSource)); - } - - rows.addAll(result); - } - - return rows; - } - - private List populateRows(Aggregations aggregations) { - List rows = new ArrayList<>(); - List aggs = aggregations.asList(); - if (hasTermAggregations(aggs)) { - Terms terms = (Terms) aggs.get(0); - String field = terms.getName(); - - for (Terms.Bucket bucket : terms.getBuckets()) { - List aggRows = new ArrayList<>(); - getAggsData(bucket, aggRows, addMap(field, bucket.getKey())); - - rows.addAll(aggRows); - } - } else { - // This occurs for cases like "SELECT AVG(age) FROM bank" where we aggregate in SELECT with no GROUP BY - rows.add( - new DataRows.Row( - addNumericAggregation(aggs, new HashMap<>()) - ) - ); - } - return rows; - } - - /** - * This recursive method goes through the buckets iterated through populateRows() and flattens any inner - * aggregations and puts that data as a Map into a Row (this nested aggregation happens when we GROUP BY - * multiple fields) - */ - private void getAggsData(Terms.Bucket bucket, List aggRows, Map data) { - List aggs = bucket.getAggregations().asList(); - if (hasTermAggregations(aggs)) { - Terms terms = (Terms) aggs.get(0); - String field = terms.getName(); - - for (Terms.Bucket innerBucket : terms.getBuckets()) { - data.put(field, innerBucket.getKey()); - getAggsData(innerBucket, aggRows, data); - data.remove(field); - } - } else { - data = addNumericAggregation(aggs, data); - aggRows.add(new DataRows.Row(new HashMap<>(data))); - } - } - - /** - * hasTermAggregations() checks for specific type of aggregation, one that contains Terms. This is the case when the - * aggregations contains the contents of a GROUP BY field. - *

- * If the aggregation contains the data for an aggregation function (ex. COUNT(*)), the items in the list will - * be of instance InternalValueCount, InternalSum, etc. (depending on the aggregation function) and will be - * considered a base case of getAggsData() which will add that data to the Row (if it exists). - */ - private boolean hasTermAggregations(List aggs) { - return !aggs.isEmpty() && aggs.get(0) instanceof Terms; - } - - /** - * Adds the contents of Aggregation (specifically the NumericMetricsAggregation.SingleValue instance) from - * bucket.aggregations into the data map - */ - private Map addNumericAggregation(List aggs, Map data) { - for (Aggregation aggregation : aggs) { - if (aggregation instanceof NumericMetricsAggregation.SingleValue) { - NumericMetricsAggregation.SingleValue singleValueAggregation = - (NumericMetricsAggregation.SingleValue) aggregation; - data.put(singleValueAggregation.getName(), !Double.isInfinite(singleValueAggregation.value()) - ? singleValueAggregation.getValueAsString() : "null"); - } else if (aggregation instanceof Percentiles) { - Percentiles percentiles = (Percentiles) aggregation; - - data.put(percentiles.getName(), StreamSupport - .stream(percentiles.spliterator(), false) - .collect( - Collectors.toMap( - Percentile::getPercent, - Percentile::getValue, - (v1, v2) -> { - throw new IllegalArgumentException( - String.format("Duplicate key for values %s and %s", v1, v2)); - }, - TreeMap::new))); - } else { - throw new SqlFeatureNotImplementedException("Aggregation type " + aggregation.getType() - + " is not yet implemented"); - } - } - - return data; - } + Integer limit = cursor.getLimit(); + long rowsLeft = rowsLeft(cursor.getFetchSize(), cursor.getLimit()); + if (rowsLeft <= 0) { + // close the cursor + String scrollId = cursor.getScrollId(); + ClearScrollResponse clearScrollResponse = + client.prepareClearScroll().addScrollId(scrollId).get(); + if (!clearScrollResponse.isSucceeded()) { + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); + LOG.error("Error closing the cursor context {} ", scrollId); + } + return; + } + + cursor.setRowsLeft(rowsLeft); + cursor.setIndexPattern(indexName); + cursor.setFieldAliasMap(fieldAliasMap()); + cursor.setColumns(columns); + this.totalHits = limit != null && limit < internalTotalHits ? limit : internalTotalHits; + } + + private long rowsLeft(Integer fetchSize, Integer limit) { + long rowsLeft = 0; + long totalHits = internalTotalHits; + if (limit != null && limit < totalHits) { + rowsLeft = limit - fetchSize; + } else { + rowsLeft = totalHits - fetchSize; + } + return rowsLeft; + } + + private List populateRows(SearchHits searchHits) { + List rows = new ArrayList<>(); + Set newKeys = new HashSet<>(head); + for (SearchHit hit : searchHits) { + Map rowSource = hit.getSourceAsMap(); + List result; + + if (!isJoinQuery()) { + // Row already flatten in source in join. And join doesn't support nested fields for now. + rowSource = flatRow(head, rowSource); + rowSource.put(SCORE, hit.getScore()); + + for (Map.Entry field : hit.getFields().entrySet()) { + rowSource.put(field.getKey(), field.getValue().getValue()); + } + if (formatType.equalsIgnoreCase(Format.JDBC.getFormatName())) { + dateFieldFormatter.applyJDBCDateFormat(rowSource); + } + result = flatNestedField(newKeys, rowSource, hit.getInnerHits()); + } else { + if (formatType.equalsIgnoreCase(Format.JDBC.getFormatName())) { + dateFieldFormatter.applyJDBCDateFormat(rowSource); + } + result = new ArrayList<>(); + result.add(new DataRows.Row(rowSource)); + } + + rows.addAll(result); + } + + return rows; + } + + private List populateRows(Aggregations aggregations) { + List rows = new ArrayList<>(); + List aggs = aggregations.asList(); + if (hasTermAggregations(aggs)) { + Terms terms = (Terms) aggs.get(0); + String field = terms.getName(); + + for (Terms.Bucket bucket : terms.getBuckets()) { + List aggRows = new ArrayList<>(); + getAggsData(bucket, aggRows, addMap(field, bucket.getKey())); + + rows.addAll(aggRows); + } + } else { + // This occurs for cases like "SELECT AVG(age) FROM bank" where we aggregate in SELECT with no + // GROUP BY + rows.add(new DataRows.Row(addNumericAggregation(aggs, new HashMap<>()))); + } + return rows; + } + + /** + * This recursive method goes through the buckets iterated through populateRows() and flattens any + * inner aggregations and puts that data as a Map into a Row (this nested aggregation happens when + * we GROUP BY multiple fields) + */ + private void getAggsData( + Terms.Bucket bucket, List aggRows, Map data) { + List aggs = bucket.getAggregations().asList(); + if (hasTermAggregations(aggs)) { + Terms terms = (Terms) aggs.get(0); + String field = terms.getName(); + + for (Terms.Bucket innerBucket : terms.getBuckets()) { + data.put(field, innerBucket.getKey()); + getAggsData(innerBucket, aggRows, data); + data.remove(field); + } + } else { + data = addNumericAggregation(aggs, data); + aggRows.add(new DataRows.Row(new HashMap<>(data))); + } + } + + /** + * hasTermAggregations() checks for specific type of aggregation, one that contains Terms. This is + * the case when the aggregations contains the contents of a GROUP BY field. + * + *

If the aggregation contains the data for an aggregation function (ex. COUNT(*)), the items + * in the list will be of instance InternalValueCount, InternalSum, etc. (depending on the + * aggregation function) and will be considered a base case of getAggsData() which will add that + * data to the Row (if it exists). + */ + private boolean hasTermAggregations(List aggs) { + return !aggs.isEmpty() && aggs.get(0) instanceof Terms; + } + + /** + * Adds the contents of Aggregation (specifically the NumericMetricsAggregation.SingleValue + * instance) from bucket.aggregations into the data map + */ + private Map addNumericAggregation( + List aggs, Map data) { + for (Aggregation aggregation : aggs) { + if (aggregation instanceof NumericMetricsAggregation.SingleValue) { + NumericMetricsAggregation.SingleValue singleValueAggregation = + (NumericMetricsAggregation.SingleValue) aggregation; + data.put( + singleValueAggregation.getName(), + !Double.isInfinite(singleValueAggregation.value()) + ? singleValueAggregation.getValueAsString() + : "null"); + } else if (aggregation instanceof Percentiles) { + Percentiles percentiles = (Percentiles) aggregation; + + data.put( + percentiles.getName(), + StreamSupport.stream(percentiles.spliterator(), false) + .collect( + Collectors.toMap( + Percentile::getPercent, + Percentile::getValue, + (v1, v2) -> { + throw new IllegalArgumentException( + String.format("Duplicate key for values %s and %s", v1, v2)); + }, + TreeMap::new))); + } else { + throw new SqlFeatureNotImplementedException( + "Aggregation type " + aggregation.getType() + " is not yet implemented"); + } + } + + return data; + } /** + *

      * Simplifies the structure of row's source Map by flattening it, making the full path of an object the key
      * and the Object it refers to the value. This handles the case of regular object since nested objects will not
      * be in hit.source but rather in hit.innerHits
@@ -741,6 +735,7 @@ private Map addNumericAggregation(List aggs, Map
      * Return:
      * flattenedRow = {comment.likes: 2}
+     * 
*/ @SuppressWarnings("unchecked") private Map flatRow(List keys, Map row) { @@ -750,31 +745,33 @@ private Map flatRow(List keys, Map row) boolean found = true; Object currentObj = row; - for (String splitKey : splitKeys) { - // This check is made to prevent Cast Exception as an ArrayList of objects can be in the sourceMap - if (!(currentObj instanceof Map)) { - found = false; - break; - } - - Map currentMap = (Map) currentObj; - if (!currentMap.containsKey(splitKey)) { - found = false; - break; - } - - currentObj = currentMap.get(splitKey); - } - - if (found) { - flattenedRow.put(key, currentObj); - } + for (String splitKey : splitKeys) { + // This check is made to prevent Cast Exception as an ArrayList of objects can be in the + // sourceMap + if (!(currentObj instanceof Map)) { + found = false; + break; } - return flattenedRow; + Map currentMap = (Map) currentObj; + if (!currentMap.containsKey(splitKey)) { + found = false; + break; + } + + currentObj = currentMap.get(splitKey); + } + + if (found) { + flattenedRow.put(key, currentObj); + } } + return flattenedRow; + } + /** + *
      * If innerHits associated with column name exists, flatten both the inner field name and the inner rows in it.
      * 

* Sample input: @@ -792,36 +789,38 @@ private Map flatRow(List keys, Map row) * } * }] * } + *

*/ private List flatNestedField(Set newKeys, Map row, Map innerHits) { List result = new ArrayList<>(); result.add(new DataRows.Row(row)); - if (innerHits == null) { - return result; - } - - for (String colName : innerHits.keySet()) { - SearchHit[] colValue = innerHits.get(colName).getHits(); - doFlatNestedFieldName(colName, colValue, newKeys); - result = doFlatNestedFieldValue(colName, colValue, result); - } + if (innerHits == null) { + return result; + } - return result; + for (String colName : innerHits.keySet()) { + SearchHit[] colValue = innerHits.get(colName).getHits(); + doFlatNestedFieldName(colName, colValue, newKeys); + result = doFlatNestedFieldValue(colName, colValue, result); } - private void doFlatNestedFieldName(String colName, SearchHit[] colValue, Set keys) { - Map innerRow = colValue[0].getSourceAsMap(); - for (String field : innerRow.keySet()) { - String innerName = colName + "." + field; - keys.add(innerName); - } + return result; + } - keys.remove(colName); + private void doFlatNestedFieldName(String colName, SearchHit[] colValue, Set keys) { + Map innerRow = colValue[0].getSourceAsMap(); + for (String field : innerRow.keySet()) { + String innerName = colName + "." + field; + keys.add(innerName); } + keys.remove(colName); + } + /** + *
      * Do Cartesian Product between current outer row and inner rows by nested loop and remove original outer row.
      * 

* Sample input: @@ -843,6 +842,7 @@ private void doFlatNestedFieldName(String colName, SearchHit[] colValue, Set */ private List doFlatNestedFieldValue(String colName, SearchHit[] colValue, List rows) { List result = new ArrayList<>(); @@ -851,28 +851,28 @@ private List doFlatNestedFieldValue(String colName, SearchHit[] co Map innerRow = hit.getSourceAsMap(); Map copy = new HashMap<>(); - for (String field : row.getContents().keySet()) { - copy.put(field, row.getData(field)); - } - for (String field : innerRow.keySet()) { - copy.put(colName + "." + field, innerRow.get(field)); - } - - copy.remove(colName); - result.add(new DataRows.Row(copy)); - } + for (String field : row.getContents().keySet()) { + copy.put(field, row.getData(field)); + } + for (String field : innerRow.keySet()) { + copy.put(colName + "." + field, innerRow.get(field)); } - return result; + copy.remove(colName); + result.add(new DataRows.Row(copy)); + } } - private Map addMap(String field, Object term) { - Map data = new HashMap<>(); - data.put(field, term); - return data; - } + return result; + } - private boolean isJoinQuery() { - return query instanceof JoinSelect; - } + private Map addMap(String field, Object term) { + Map data = new HashMap<>(); + data.put(field, term); + return data; + } + + private boolean isJoinQuery() { + return query instanceof JoinSelect; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/NestedLoopsElasticExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/NestedLoopsElasticExecutor.java index 21a9a6054f..56c5f96af5 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/NestedLoopsElasticExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/NestedLoopsElasticExecutor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.join; import com.alibaba.druid.sql.ast.statement.SQLJoinTableSource; @@ -34,301 +33,354 @@ import org.opensearch.sql.legacy.query.join.TableInJoinRequestBuilder; import org.opensearch.sql.legacy.query.maker.Maker; -/** - * Created by Eliran on 15/9/2015. - */ +/** Created by Eliran on 15/9/2015. */ public class NestedLoopsElasticExecutor extends ElasticJoinExecutor { - private static final Logger LOG = LogManager.getLogger(); + private static final Logger LOG = LogManager.getLogger(); - private final NestedLoopsElasticRequestBuilder nestedLoopsRequest; - private final Client client; + private final NestedLoopsElasticRequestBuilder nestedLoopsRequest; + private final Client client; - public NestedLoopsElasticExecutor(Client client, NestedLoopsElasticRequestBuilder nestedLoops) { - super(nestedLoops); - this.client = client; - this.nestedLoopsRequest = nestedLoops; - } + public NestedLoopsElasticExecutor(Client client, NestedLoopsElasticRequestBuilder nestedLoops) { + super(nestedLoops); + this.client = client; + this.nestedLoopsRequest = nestedLoops; + } - @Override - protected List innerRun() throws SqlParseException { - List combinedResults = new ArrayList<>(); - int totalLimit = nestedLoopsRequest.getTotalLimit(); - int multiSearchMaxSize = nestedLoopsRequest.getMultiSearchMaxSize(); - Select secondTableSelect = nestedLoopsRequest.getSecondTable().getOriginalSelect(); - Where originalSecondTableWhere = secondTableSelect.getWhere(); + @Override + protected List innerRun() throws SqlParseException { + List combinedResults = new ArrayList<>(); + int totalLimit = nestedLoopsRequest.getTotalLimit(); + int multiSearchMaxSize = nestedLoopsRequest.getMultiSearchMaxSize(); + Select secondTableSelect = nestedLoopsRequest.getSecondTable().getOriginalSelect(); + Where originalSecondTableWhere = secondTableSelect.getWhere(); - orderConditions(nestedLoopsRequest.getFirstTable().getAlias(), nestedLoopsRequest.getSecondTable().getAlias()); + orderConditions( + nestedLoopsRequest.getFirstTable().getAlias(), + nestedLoopsRequest.getSecondTable().getAlias()); + if (!BackOffRetryStrategy.isHealthy()) { + throw new IllegalStateException("Memory circuit is broken"); + } + FetchWithScrollResponse fetchWithScrollResponse = + firstFetch(this.nestedLoopsRequest.getFirstTable()); + SearchResponse firstTableResponse = fetchWithScrollResponse.getResponse(); + boolean needScrollForFirstTable = fetchWithScrollResponse.isNeedScrollForFirstTable(); + + int currentCombinedResults = 0; + boolean finishedWithFirstTable = false; + + while (totalLimit > currentCombinedResults && !finishedWithFirstTable) { + + SearchHit[] hits = firstTableResponse.getHits().getHits(); + boolean finishedMultiSearches = hits.length == 0; + int currentHitsIndex = 0; + + while (!finishedMultiSearches) { + MultiSearchRequest multiSearchRequest = + createMultiSearchRequest( + multiSearchMaxSize, + nestedLoopsRequest.getConnectedWhere(), + hits, + secondTableSelect, + originalSecondTableWhere, + currentHitsIndex); + int multiSearchSize = multiSearchRequest.requests().size(); if (!BackOffRetryStrategy.isHealthy()) { - throw new IllegalStateException("Memory circuit is broken"); + throw new IllegalStateException("Memory circuit is broken"); } - FetchWithScrollResponse fetchWithScrollResponse = firstFetch(this.nestedLoopsRequest.getFirstTable()); - SearchResponse firstTableResponse = fetchWithScrollResponse.getResponse(); - boolean needScrollForFirstTable = fetchWithScrollResponse.isNeedScrollForFirstTable(); - - int currentCombinedResults = 0; - boolean finishedWithFirstTable = false; - - while (totalLimit > currentCombinedResults && !finishedWithFirstTable) { - - SearchHit[] hits = firstTableResponse.getHits().getHits(); - boolean finishedMultiSearches = hits.length == 0; - int currentHitsIndex = 0; - - while (!finishedMultiSearches) { - MultiSearchRequest multiSearchRequest = createMultiSearchRequest(multiSearchMaxSize, - nestedLoopsRequest.getConnectedWhere(), hits, secondTableSelect, - originalSecondTableWhere, currentHitsIndex); - int multiSearchSize = multiSearchRequest.requests().size(); - if (!BackOffRetryStrategy.isHealthy()) { - throw new IllegalStateException("Memory circuit is broken"); - } - currentCombinedResults = combineResultsFromMultiResponses(combinedResults, totalLimit, - currentCombinedResults, hits, currentHitsIndex, multiSearchRequest); - currentHitsIndex += multiSearchSize; - finishedMultiSearches = currentHitsIndex >= hits.length - 1 || currentCombinedResults >= totalLimit; - } - - if (hits.length < MAX_RESULTS_ON_ONE_FETCH) { - needScrollForFirstTable = false; - } - - if (!finishedWithFirstTable) { - if (needScrollForFirstTable) { - if (!BackOffRetryStrategy.isHealthy()) { - throw new IllegalStateException("Memory circuit is broken"); - } - firstTableResponse = client.prepareSearchScroll(firstTableResponse.getScrollId()) - .setScroll(new TimeValue(600000)).get(); - } else { - finishedWithFirstTable = true; - } - } - + currentCombinedResults = + combineResultsFromMultiResponses( + combinedResults, + totalLimit, + currentCombinedResults, + hits, + currentHitsIndex, + multiSearchRequest); + currentHitsIndex += multiSearchSize; + finishedMultiSearches = + currentHitsIndex >= hits.length - 1 || currentCombinedResults >= totalLimit; + } + + if (hits.length < MAX_RESULTS_ON_ONE_FETCH) { + needScrollForFirstTable = false; + } + + if (!finishedWithFirstTable) { + if (needScrollForFirstTable) { + if (!BackOffRetryStrategy.isHealthy()) { + throw new IllegalStateException("Memory circuit is broken"); + } + firstTableResponse = + client + .prepareSearchScroll(firstTableResponse.getScrollId()) + .setScroll(new TimeValue(600000)) + .get(); + } else { + finishedWithFirstTable = true; } - return combinedResults; + } } - - private int combineResultsFromMultiResponses(List combinedResults, int totalLimit, - int currentCombinedResults, SearchHit[] hits, int currentIndex, - MultiSearchRequest multiSearchRequest) { - MultiSearchResponse.Item[] responses = new OpenSearchClient(client).multiSearch(multiSearchRequest); - String t1Alias = nestedLoopsRequest.getFirstTable().getAlias(); - String t2Alias = nestedLoopsRequest.getSecondTable().getAlias(); - - for (int j = 0; j < responses.length && currentCombinedResults < totalLimit; j++) { - SearchHit hitFromFirstTable = hits[currentIndex + j]; - onlyReturnedFields(hitFromFirstTable.getSourceAsMap(), - nestedLoopsRequest.getFirstTable().getReturnedFields(), - nestedLoopsRequest.getFirstTable().getOriginalSelect().isSelectAll()); - - SearchResponse multiItemResponse = responses[j].getResponse(); - - if (multiItemResponse == null) { - continue; - } - - updateMetaSearchResults(multiItemResponse); - - //todo: if responseForHit.getHits.length < responseForHit.getTotalHits(). need to fetch more! - SearchHits responseForHit = multiItemResponse.getHits(); - - if (responseForHit.getHits().length == 0 && nestedLoopsRequest.getJoinType() - == SQLJoinTableSource.JoinType.LEFT_OUTER_JOIN) { - SearchHit unmachedResult = createUnmachedResult(nestedLoopsRequest.getSecondTable().getReturnedFields(), - currentCombinedResults, t1Alias, t2Alias, hitFromFirstTable); - combinedResults.add(unmachedResult); - currentCombinedResults++; - continue; - } - - for (SearchHit matchedHit : responseForHit.getHits()) { - SearchHit searchHit = getMergedHit(currentCombinedResults, t1Alias, t2Alias, hitFromFirstTable, - matchedHit); - combinedResults.add(searchHit); - currentCombinedResults++; - if (currentCombinedResults >= totalLimit) { - break; - } - } - if (currentCombinedResults >= totalLimit) { - break; - } - + return combinedResults; + } + + private int combineResultsFromMultiResponses( + List combinedResults, + int totalLimit, + int currentCombinedResults, + SearchHit[] hits, + int currentIndex, + MultiSearchRequest multiSearchRequest) { + MultiSearchResponse.Item[] responses = + new OpenSearchClient(client).multiSearch(multiSearchRequest); + String t1Alias = nestedLoopsRequest.getFirstTable().getAlias(); + String t2Alias = nestedLoopsRequest.getSecondTable().getAlias(); + + for (int j = 0; j < responses.length && currentCombinedResults < totalLimit; j++) { + SearchHit hitFromFirstTable = hits[currentIndex + j]; + onlyReturnedFields( + hitFromFirstTable.getSourceAsMap(), + nestedLoopsRequest.getFirstTable().getReturnedFields(), + nestedLoopsRequest.getFirstTable().getOriginalSelect().isSelectAll()); + + SearchResponse multiItemResponse = responses[j].getResponse(); + + if (multiItemResponse == null) { + continue; + } + + updateMetaSearchResults(multiItemResponse); + + // todo: if responseForHit.getHits.length < responseForHit.getTotalHits(). need to fetch more! + SearchHits responseForHit = multiItemResponse.getHits(); + + if (responseForHit.getHits().length == 0 + && nestedLoopsRequest.getJoinType() == SQLJoinTableSource.JoinType.LEFT_OUTER_JOIN) { + SearchHit unmachedResult = + createUnmachedResult( + nestedLoopsRequest.getSecondTable().getReturnedFields(), + currentCombinedResults, + t1Alias, + t2Alias, + hitFromFirstTable); + combinedResults.add(unmachedResult); + currentCombinedResults++; + continue; + } + + for (SearchHit matchedHit : responseForHit.getHits()) { + SearchHit searchHit = + getMergedHit(currentCombinedResults, t1Alias, t2Alias, hitFromFirstTable, matchedHit); + combinedResults.add(searchHit); + currentCombinedResults++; + if (currentCombinedResults >= totalLimit) { + break; } - return currentCombinedResults; - } - - private SearchHit getMergedHit(int currentCombinedResults, String t1Alias, String t2Alias, - SearchHit hitFromFirstTable, SearchHit matchedHit) { - onlyReturnedFields(matchedHit.getSourceAsMap(), nestedLoopsRequest.getSecondTable().getReturnedFields(), - nestedLoopsRequest.getSecondTable().getOriginalSelect().isSelectAll()); - Map documentFields = new HashMap<>(); - Map metaFields = new HashMap<>(); - matchedHit.getFields().forEach((fieldName, docField) -> - (MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) ? metaFields : documentFields).put(fieldName, docField)); - SearchHit searchHit = new SearchHit(currentCombinedResults, hitFromFirstTable.getId() + "|" - + matchedHit.getId(), documentFields, metaFields); - searchHit.sourceRef(hitFromFirstTable.getSourceRef()); - searchHit.getSourceAsMap().clear(); - searchHit.getSourceAsMap().putAll(hitFromFirstTable.getSourceAsMap()); - - mergeSourceAndAddAliases(matchedHit.getSourceAsMap(), searchHit, t1Alias, t2Alias); - return searchHit; + } + if (currentCombinedResults >= totalLimit) { + break; + } } - - private MultiSearchRequest createMultiSearchRequest(int multiSearchMaxSize, Where connectedWhere, SearchHit[] hits, - Select secondTableSelect, Where originalWhere, int currentIndex) - throws SqlParseException { - MultiSearchRequest multiSearchRequest = new MultiSearchRequest(); - for (int i = currentIndex; i < currentIndex + multiSearchMaxSize && i < hits.length; i++) { - Map hitFromFirstTableAsMap = hits[i].getSourceAsMap(); - Where newWhere = Where.newInstance(); - if (originalWhere != null) { - newWhere.addWhere(originalWhere); - } - if (connectedWhere != null) { - Where connectedWhereCloned = null; - try { - connectedWhereCloned = (Where) connectedWhere.clone(); - } catch (CloneNotSupportedException e) { - e.printStackTrace(); - } - updateValuesOnWhereConditions(hitFromFirstTableAsMap, connectedWhereCloned); - newWhere.addWhere(connectedWhereCloned); - } - - -// for(Condition c : conditions){ -// Object value = deepSearchInMap(hitFromFirstTableAsMap,c.getValue().toString()); -// Condition conditionWithValue = new Condition(Where.CONN.AND,c.getName(),c.getOpear(),value); -// newWhere.addWhere(conditionWithValue); -// } - //using the 2nd table select and DefaultAction because we can't just change query on request - // (need to create lot of requests) - if (newWhere.getWheres().size() != 0) { - secondTableSelect.setWhere(newWhere); - } - DefaultQueryAction action = new DefaultQueryAction(this.client, secondTableSelect); - action.explain(); - SearchRequestBuilder secondTableRequest = action.getRequestBuilder(); - Integer secondTableHintLimit = this.nestedLoopsRequest.getSecondTable().getHintLimit(); - if (secondTableHintLimit != null && secondTableHintLimit <= MAX_RESULTS_ON_ONE_FETCH) { - secondTableRequest.setSize(secondTableHintLimit); - } - multiSearchRequest.add(secondTableRequest); + return currentCombinedResults; + } + + private SearchHit getMergedHit( + int currentCombinedResults, + String t1Alias, + String t2Alias, + SearchHit hitFromFirstTable, + SearchHit matchedHit) { + onlyReturnedFields( + matchedHit.getSourceAsMap(), + nestedLoopsRequest.getSecondTable().getReturnedFields(), + nestedLoopsRequest.getSecondTable().getOriginalSelect().isSelectAll()); + Map documentFields = new HashMap<>(); + Map metaFields = new HashMap<>(); + matchedHit + .getFields() + .forEach( + (fieldName, docField) -> + (MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) + ? metaFields + : documentFields) + .put(fieldName, docField)); + SearchHit searchHit = + new SearchHit( + currentCombinedResults, + hitFromFirstTable.getId() + "|" + matchedHit.getId(), + documentFields, + metaFields); + searchHit.sourceRef(hitFromFirstTable.getSourceRef()); + searchHit.getSourceAsMap().clear(); + searchHit.getSourceAsMap().putAll(hitFromFirstTable.getSourceAsMap()); + + mergeSourceAndAddAliases(matchedHit.getSourceAsMap(), searchHit, t1Alias, t2Alias); + return searchHit; + } + + private MultiSearchRequest createMultiSearchRequest( + int multiSearchMaxSize, + Where connectedWhere, + SearchHit[] hits, + Select secondTableSelect, + Where originalWhere, + int currentIndex) + throws SqlParseException { + MultiSearchRequest multiSearchRequest = new MultiSearchRequest(); + for (int i = currentIndex; i < currentIndex + multiSearchMaxSize && i < hits.length; i++) { + Map hitFromFirstTableAsMap = hits[i].getSourceAsMap(); + Where newWhere = Where.newInstance(); + if (originalWhere != null) { + newWhere.addWhere(originalWhere); + } + if (connectedWhere != null) { + Where connectedWhereCloned = null; + try { + connectedWhereCloned = (Where) connectedWhere.clone(); + } catch (CloneNotSupportedException e) { + e.printStackTrace(); } - return multiSearchRequest; + updateValuesOnWhereConditions(hitFromFirstTableAsMap, connectedWhereCloned); + newWhere.addWhere(connectedWhereCloned); + } + + // for(Condition c : conditions){ + // Object value = + // deepSearchInMap(hitFromFirstTableAsMap,c.getValue().toString()); + // Condition conditionWithValue = new + // Condition(Where.CONN.AND,c.getName(),c.getOpear(),value); + // newWhere.addWhere(conditionWithValue); + // } + // using the 2nd table select and DefaultAction because we can't just change query on request + // (need to create lot of requests) + if (newWhere.getWheres().size() != 0) { + secondTableSelect.setWhere(newWhere); + } + DefaultQueryAction action = new DefaultQueryAction(this.client, secondTableSelect); + action.explain(); + SearchRequestBuilder secondTableRequest = action.getRequestBuilder(); + Integer secondTableHintLimit = this.nestedLoopsRequest.getSecondTable().getHintLimit(); + if (secondTableHintLimit != null && secondTableHintLimit <= MAX_RESULTS_ON_ONE_FETCH) { + secondTableRequest.setSize(secondTableHintLimit); + } + multiSearchRequest.add(secondTableRequest); } - - private void updateValuesOnWhereConditions(Map hit, Where where) { - if (where instanceof Condition) { - Condition c = (Condition) where; - Object value = deepSearchInMap(hit, c.getValue().toString()); - if (value == null) { - value = Maker.NONE; - } - c.setValue(value); - } - for (Where innerWhere : where.getWheres()) { - updateValuesOnWhereConditions(hit, innerWhere); - } + return multiSearchRequest; + } + + private void updateValuesOnWhereConditions(Map hit, Where where) { + if (where instanceof Condition) { + Condition c = (Condition) where; + Object value = deepSearchInMap(hit, c.getValue().toString()); + if (value == null) { + value = Maker.NONE; + } + c.setValue(value); } - - private FetchWithScrollResponse firstFetch(TableInJoinRequestBuilder tableRequest) { - Integer hintLimit = tableRequest.getHintLimit(); - boolean needScrollForFirstTable = false; - SearchResponse responseWithHits; - if (hintLimit != null && hintLimit < MAX_RESULTS_ON_ONE_FETCH) { - - responseWithHits = tableRequest.getRequestBuilder().setSize(hintLimit).get(); - needScrollForFirstTable = false; - } else { - //scroll request with max. - responseWithHits = scrollOneTimeWithMax(client, tableRequest); - if (responseWithHits.getHits().getTotalHits() != null - && responseWithHits.getHits().getTotalHits().value < MAX_RESULTS_ON_ONE_FETCH) { - needScrollForFirstTable = true; - } - } - - updateMetaSearchResults(responseWithHits); - return new FetchWithScrollResponse(responseWithHits, needScrollForFirstTable); + for (Where innerWhere : where.getWheres()) { + updateValuesOnWhereConditions(hit, innerWhere); } - - - private void orderConditions(String t1Alias, String t2Alias) { - orderConditionRecursive(t1Alias, t2Alias, nestedLoopsRequest.getConnectedWhere()); -// Collection conditions = nestedLoopsRequest.getT1FieldToCondition().values(); -// for(Condition c : conditions){ -// //TODO: support all orders and for each OPEAR find his related OPEAR (< is > , EQ is EQ ,etc..) -// if(!c.getName().startsWith(t2Alias+".") || !c.getValue().toString().startsWith(t1Alias +".")) -// throw new RuntimeException("On NestedLoops currently only supported Ordered conditions -// t2.field2 OPEAR t1.field1) , badCondition was:" + c); -// c.setName(c.getName().replaceFirst(t2Alias+".","")); -// c.setValue(c.getValue().toString().replaceFirst(t1Alias+ ".", "")); -// } + } + + private FetchWithScrollResponse firstFetch(TableInJoinRequestBuilder tableRequest) { + Integer hintLimit = tableRequest.getHintLimit(); + boolean needScrollForFirstTable = false; + SearchResponse responseWithHits; + if (hintLimit != null && hintLimit < MAX_RESULTS_ON_ONE_FETCH) { + + responseWithHits = tableRequest.getRequestBuilder().setSize(hintLimit).get(); + needScrollForFirstTable = false; + } else { + // scroll request with max. + responseWithHits = scrollOneTimeWithMax(client, tableRequest); + if (responseWithHits.getHits().getTotalHits() != null + && responseWithHits.getHits().getTotalHits().value < MAX_RESULTS_ON_ONE_FETCH) { + needScrollForFirstTable = true; + } } - private void orderConditionRecursive(String t1Alias, String t2Alias, Where where) { - if (where == null) { - return; - } - if (where instanceof Condition) { - Condition c = (Condition) where; - if (shouldReverse(c, t1Alias, t2Alias)) { - try { - reverseOrderOfCondition(c, t1Alias, t2Alias); - return; - } catch (SqlParseException e) { - //Do nothing here to continue using original logic below. - //The condition is not changed here. - } - } - if (!c.getName().startsWith(t2Alias + ".") || !c.getValue().toString().startsWith(t1Alias + ".")) { - throw new RuntimeException("On NestedLoops currently only supported Ordered conditions " - + "(t2.field2 OPEAR t1.field1) , badCondition was:" + c); - } - c.setName(c.getName().replaceFirst(t2Alias + ".", "")); - c.setValue(c.getValue().toString().replaceFirst(t1Alias + ".", "")); - return; - } else { - for (Where innerWhere : where.getWheres()) { - orderConditionRecursive(t1Alias, t2Alias, innerWhere); - } + updateMetaSearchResults(responseWithHits); + return new FetchWithScrollResponse(responseWithHits, needScrollForFirstTable); + } + + private void orderConditions(String t1Alias, String t2Alias) { + orderConditionRecursive(t1Alias, t2Alias, nestedLoopsRequest.getConnectedWhere()); + // Collection conditions = + // nestedLoopsRequest.getT1FieldToCondition().values(); + // for(Condition c : conditions){ + // //TODO: support all orders and for each OPEAR find his related OPEAR (< is > , EQ + // is EQ ,etc..) + // if(!c.getName().startsWith(t2Alias+".") || + // !c.getValue().toString().startsWith(t1Alias +".")) + // throw new RuntimeException("On NestedLoops currently only supported Ordered + // conditions + // t2.field2 OPEAR t1.field1) , badCondition was:" + c); + // c.setName(c.getName().replaceFirst(t2Alias+".","")); + // c.setValue(c.getValue().toString().replaceFirst(t1Alias+ ".", "")); + // } + } + + private void orderConditionRecursive(String t1Alias, String t2Alias, Where where) { + if (where == null) { + return; + } + if (where instanceof Condition) { + Condition c = (Condition) where; + if (shouldReverse(c, t1Alias, t2Alias)) { + try { + reverseOrderOfCondition(c, t1Alias, t2Alias); + return; + } catch (SqlParseException e) { + // Do nothing here to continue using original logic below. + // The condition is not changed here. } + } + if (!c.getName().startsWith(t2Alias + ".") + || !c.getValue().toString().startsWith(t1Alias + ".")) { + throw new RuntimeException( + "On NestedLoops currently only supported Ordered conditions " + + "(t2.field2 OPEAR t1.field1) , badCondition was:" + + c); + } + c.setName(c.getName().replaceFirst(t2Alias + ".", "")); + c.setValue(c.getValue().toString().replaceFirst(t1Alias + ".", "")); + return; + } else { + for (Where innerWhere : where.getWheres()) { + orderConditionRecursive(t1Alias, t2Alias, innerWhere); + } } - - private Boolean shouldReverse(Condition cond, String t1Alias, String t2Alias) { - return cond.getName().startsWith(t1Alias + ".") && cond.getValue().toString().startsWith(t2Alias + ".") - && cond.getOPERATOR().isSimpleOperator(); + } + + private Boolean shouldReverse(Condition cond, String t1Alias, String t2Alias) { + return cond.getName().startsWith(t1Alias + ".") + && cond.getValue().toString().startsWith(t2Alias + ".") + && cond.getOPERATOR().isSimpleOperator(); + } + + private void reverseOrderOfCondition(Condition cond, String t1Alias, String t2Alias) + throws SqlParseException { + cond.setOPERATOR(cond.getOPERATOR().simpleReverse()); + String name = cond.getName(); + cond.setName(cond.getValue().toString().replaceFirst(t2Alias + ".", "")); + cond.setValue(name.replaceFirst(t1Alias + ".", "")); + } + + private class FetchWithScrollResponse { + private SearchResponse response; + private boolean needScrollForFirstTable; + + private FetchWithScrollResponse(SearchResponse response, boolean needScrollForFirstTable) { + this.response = response; + this.needScrollForFirstTable = needScrollForFirstTable; } - private void reverseOrderOfCondition(Condition cond, String t1Alias, String t2Alias) throws SqlParseException { - cond.setOPERATOR(cond.getOPERATOR().simpleReverse()); - String name = cond.getName(); - cond.setName(cond.getValue().toString().replaceFirst(t2Alias + ".", "")); - cond.setValue(name.replaceFirst(t1Alias + ".", "")); + public SearchResponse getResponse() { + return response; } - - private class FetchWithScrollResponse { - private SearchResponse response; - private boolean needScrollForFirstTable; - - private FetchWithScrollResponse(SearchResponse response, boolean needScrollForFirstTable) { - this.response = response; - this.needScrollForFirstTable = needScrollForFirstTable; - } - - public SearchResponse getResponse() { - return response; - } - - public boolean isNeedScrollForFirstTable() { - return needScrollForFirstTable; - } - + public boolean isNeedScrollForFirstTable() { + return needScrollForFirstTable; } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/QueryPlanElasticExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/QueryPlanElasticExecutor.java index 5702d397d5..f4b2f5421d 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/QueryPlanElasticExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/QueryPlanElasticExecutor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.join; import java.util.List; @@ -12,31 +11,30 @@ import org.opensearch.sql.legacy.query.planner.core.QueryPlanner; /** - * Executor for generic QueryPlanner execution. This executor is just acting as adaptor to integrate with - * existing framework. In future, QueryPlanner should be executed by itself and leave the response sent back - * or other post-processing logic to ElasticDefaultRestExecutor. + * Executor for generic QueryPlanner execution. This executor is just acting as adaptor to integrate + * with existing framework. In future, QueryPlanner should be executed by itself and leave the + * response sent back or other post-processing logic to ElasticDefaultRestExecutor. */ class QueryPlanElasticExecutor extends ElasticJoinExecutor { - private final QueryPlanner queryPlanner; - - QueryPlanElasticExecutor(HashJoinQueryPlanRequestBuilder request) { - super(request); - this.queryPlanner = request.plan(); - } - - @Override - protected List innerRun() { - List result = queryPlanner.execute(); - populateMetaResult(); - return result; - } - - private void populateMetaResult() { - metaResults.addTotalNumOfShards(queryPlanner.getMetaResult().getTotalNumOfShards()); - metaResults.addSuccessfulShards(queryPlanner.getMetaResult().getSuccessfulShards()); - metaResults.addFailedShards(queryPlanner.getMetaResult().getFailedShards()); - metaResults.updateTimeOut(queryPlanner.getMetaResult().isTimedOut()); - } - + private final QueryPlanner queryPlanner; + + QueryPlanElasticExecutor(HashJoinQueryPlanRequestBuilder request) { + super(request); + this.queryPlanner = request.plan(); + } + + @Override + protected List innerRun() { + List result = queryPlanner.execute(); + populateMetaResult(); + return result; + } + + private void populateMetaResult() { + metaResults.addTotalNumOfShards(queryPlanner.getMetaResult().getTotalNumOfShards()); + metaResults.addSuccessfulShards(queryPlanner.getMetaResult().getSuccessfulShards()); + metaResults.addFailedShards(queryPlanner.getMetaResult().getFailedShards()); + metaResults.updateTimeOut(queryPlanner.getMetaResult().isTimedOut()); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/SearchHitsResult.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/SearchHitsResult.java index 0955de9b88..10a1555874 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/SearchHitsResult.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/SearchHitsResult.java @@ -3,42 +3,39 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.join; import java.util.ArrayList; import java.util.List; import org.opensearch.search.SearchHit; -/** - * Created by Eliran on 28/8/2015. - */ +/** Created by Eliran on 28/8/2015. */ public class SearchHitsResult { - private List searchHits; - private boolean matchedWithOtherTable; + private List searchHits; + private boolean matchedWithOtherTable; - public SearchHitsResult() { - searchHits = new ArrayList<>(); - } + public SearchHitsResult() { + searchHits = new ArrayList<>(); + } - public SearchHitsResult(List searchHits, boolean matchedWithOtherTable) { - this.searchHits = searchHits; - this.matchedWithOtherTable = matchedWithOtherTable; - } + public SearchHitsResult(List searchHits, boolean matchedWithOtherTable) { + this.searchHits = searchHits; + this.matchedWithOtherTable = matchedWithOtherTable; + } - public List getSearchHits() { - return searchHits; - } + public List getSearchHits() { + return searchHits; + } - public void setSearchHits(List searchHits) { - this.searchHits = searchHits; - } + public void setSearchHits(List searchHits) { + this.searchHits = searchHits; + } - public boolean isMatchedWithOtherTable() { - return matchedWithOtherTable; - } + public boolean isMatchedWithOtherTable() { + return matchedWithOtherTable; + } - public void setMatchedWithOtherTable(boolean matchedWithOtherTable) { - this.matchedWithOtherTable = matchedWithOtherTable; - } + public void setMatchedWithOtherTable(boolean matchedWithOtherTable) { + this.matchedWithOtherTable = matchedWithOtherTable; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/ScalarOperation.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/ScalarOperation.java index 0be4dfa786..ea2a698921 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/ScalarOperation.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/ScalarOperation.java @@ -3,39 +3,36 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.core.operator; import lombok.Getter; import lombok.RequiredArgsConstructor; -/** - * The definition of the Scalar Operation. - */ +/** The definition of the Scalar Operation. */ @Getter @RequiredArgsConstructor public enum ScalarOperation { - ADD("add"), - SUBTRACT("subtract"), - MULTIPLY("multiply"), - DIVIDE("divide"), - MODULES("modules"), - ABS("abs"), - ACOS("acos"), - ASIN("asin"), - ATAN("atan"), - ATAN2("atan2"), - TAN("tan"), - CBRT("cbrt"), - CEIL("ceil"), - COS("cos"), - COSH("cosh"), - EXP("exp"), - FLOOR("floor"), - LN("ln"), - LOG("log"), - LOG2("log2"), - LOG10("log10"); + ADD("add"), + SUBTRACT("subtract"), + MULTIPLY("multiply"), + DIVIDE("divide"), + MODULES("modules"), + ABS("abs"), + ACOS("acos"), + ASIN("asin"), + ATAN("atan"), + ATAN2("atan2"), + TAN("tan"), + CBRT("cbrt"), + CEIL("ceil"), + COS("cos"), + COSH("cosh"), + EXP("exp"), + FLOOR("floor"), + LN("ln"), + LOG("log"), + LOG2("log2"), + LOG10("log10"); - private final String name; + private final String name; } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/ScalarOperator.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/ScalarOperator.java index bfb3a75afb..c0c3360afc 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/ScalarOperator.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/ScalarOperator.java @@ -3,26 +3,25 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.core.operator; import java.util.List; import org.opensearch.sql.legacy.expression.model.ExprValue; -/** - * Scalar Operator is a function has one or more arguments and return a single value. - */ +/** Scalar Operator is a function has one or more arguments and return a single value. */ public interface ScalarOperator { - /** - * Apply the operator to the input arguments. - * @param valueList argument list. - * @return result. - */ - ExprValue apply(List valueList); + /** + * Apply the operator to the input arguments. + * + * @param valueList argument list. + * @return result. + */ + ExprValue apply(List valueList); - /** - * The name of the operator. - * @return name. - */ - String name(); + /** + * The name of the operator. + * + * @return name. + */ + String name(); } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/metrics/NumericMetric.java b/legacy/src/main/java/org/opensearch/sql/legacy/metrics/NumericMetric.java index 085034bcd2..ee6d373f8f 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/metrics/NumericMetric.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/metrics/NumericMetric.java @@ -3,40 +3,38 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.metrics; public class NumericMetric extends Metric { - private Counter counter; - - public NumericMetric(String name, Counter counter) { - super(name); - this.counter = counter; - } + private Counter counter; - public String getName() { - return super.getName(); - } + public NumericMetric(String name, Counter counter) { + super(name); + this.counter = counter; + } - public Counter getCounter() { - return counter; - } + public String getName() { + return super.getName(); + } - public void increment() { - counter.increment(); - } + public Counter getCounter() { + return counter; + } - public void increment(long n) { - counter.add(n); - } + public void increment() { + counter.increment(); + } - public T getValue() { - return counter.getValue(); - } + public void increment(long n) { + counter.add(n); + } - public void clear() { - counter.reset(); - } + public T getValue() { + return counter.getValue(); + } + public void clear() { + counter.reset(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/metrics/RollingCounter.java b/legacy/src/main/java/org/opensearch/sql/legacy/metrics/RollingCounter.java index 1c624d7ffe..c7b9ec56ec 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/metrics/RollingCounter.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/metrics/RollingCounter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.metrics; import java.time.Clock; @@ -13,87 +12,85 @@ import org.opensearch.sql.legacy.esdomain.LocalClusterState; /** - * Rolling counter. The count is refreshed every interval. In every interval the count is cumulative. + * Rolling counter. The count is refreshed every interval. In every interval the count is + * cumulative. */ public class RollingCounter implements Counter { - private final long capacity; - private final long window; - private final long interval; - private final Clock clock; - private final ConcurrentSkipListMap time2CountWin; - private final LongAdder count; - - public RollingCounter() { - this( - LocalClusterState.state().getSettingValue( - Settings.Key.METRICS_ROLLING_WINDOW), - LocalClusterState.state().getSettingValue( - Settings.Key.METRICS_ROLLING_INTERVAL)); - } - - public RollingCounter(long window, long interval, Clock clock) { - this.window = window; - this.interval = interval; - this.clock = clock; - time2CountWin = new ConcurrentSkipListMap<>(); - count = new LongAdder(); - capacity = window / interval * 2; - } - - public RollingCounter(long window, long interval) { - this(window, interval, Clock.systemDefaultZone()); + private final long capacity; + private final long window; + private final long interval; + private final Clock clock; + private final ConcurrentSkipListMap time2CountWin; + private final LongAdder count; + + public RollingCounter() { + this( + LocalClusterState.state().getSettingValue(Settings.Key.METRICS_ROLLING_WINDOW), + LocalClusterState.state().getSettingValue(Settings.Key.METRICS_ROLLING_INTERVAL)); + } + + public RollingCounter(long window, long interval, Clock clock) { + this.window = window; + this.interval = interval; + this.clock = clock; + time2CountWin = new ConcurrentSkipListMap<>(); + count = new LongAdder(); + capacity = window / interval * 2; + } + + public RollingCounter(long window, long interval) { + this(window, interval, Clock.systemDefaultZone()); + } + + @Override + public void increment() { + add(1L); + } + + @Override + public void add(long n) { + trim(); + time2CountWin.compute(getKey(clock.millis()), (k, v) -> (v == null) ? n : v + n); + } + + @Override + public Long getValue() { + return getValue(getPreKey(clock.millis())); + } + + public long getValue(long key) { + Long res = time2CountWin.get(key); + if (res == null) { + return 0; } - @Override - public void increment() { - add(1L); - } + return res; + } - @Override - public void add(long n) { - trim(); - time2CountWin.compute(getKey(clock.millis()), (k, v) -> (v == null) ? n : v + n); - } + public long getSum() { + return count.longValue(); + } - @Override - public Long getValue() { - return getValue(getPreKey(clock.millis())); + private void trim() { + if (time2CountWin.size() > capacity) { + time2CountWin.headMap(getKey(clock.millis() - window * 1000)).clear(); } + } - public long getValue(long key) { - Long res = time2CountWin.get(key); - if (res == null) { - return 0; - } + private long getKey(long millis) { + return millis / 1000 / this.interval; + } - return res; - } - - public long getSum() { - return count.longValue(); - } + private long getPreKey(long millis) { + return getKey(millis) - 1; + } - private void trim() { - if (time2CountWin.size() > capacity) { - time2CountWin.headMap(getKey(clock.millis() - window * 1000)).clear(); - } - } - - private long getKey(long millis) { - return millis / 1000 / this.interval; - } - - private long getPreKey(long millis) { - return getKey(millis) - 1; - } - - public int size() { - return time2CountWin.size(); - } - - public void reset() { - time2CountWin.clear(); - } + public int size() { + return time2CountWin.size(); + } + public void reset() { + time2CountWin.clear(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/parser/NestedType.java b/legacy/src/main/java/org/opensearch/sql/legacy/parser/NestedType.java index d9b7886310..4deeba1309 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/parser/NestedType.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/parser/NestedType.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.parser; import com.alibaba.druid.sql.ast.SQLExpr; @@ -18,111 +17,107 @@ import org.opensearch.sql.legacy.exception.SqlParseException; import org.opensearch.sql.legacy.utils.Util; -/** - * Created by Eliran on 12/11/2015. - */ +/** Created by Eliran on 12/11/2015. */ public class NestedType { - public String field; - public String path; - public Where where; - private boolean reverse; - private boolean simple; - private final BucketPath bucketPath = new BucketPath(); - - public boolean tryFillFromExpr(SQLExpr expr) throws SqlParseException { - if (!(expr instanceof SQLMethodInvokeExpr)) { - return false; - } - SQLMethodInvokeExpr method = (SQLMethodInvokeExpr) expr; - String methodNameLower = method.getMethodName().toLowerCase(); - if (!(methodNameLower.equals("nested") || methodNameLower.equals("reverse_nested"))) { - return false; - } + public String field; + public String path; + public Where where; + private boolean reverse; + private boolean simple; + private final BucketPath bucketPath = new BucketPath(); + + public boolean tryFillFromExpr(SQLExpr expr) throws SqlParseException { + if (!(expr instanceof SQLMethodInvokeExpr)) { + return false; + } + SQLMethodInvokeExpr method = (SQLMethodInvokeExpr) expr; + String methodNameLower = method.getMethodName().toLowerCase(); + if (!(methodNameLower.equals("nested") || methodNameLower.equals("reverse_nested"))) { + return false; + } - reverse = methodNameLower.equals("reverse_nested"); + reverse = methodNameLower.equals("reverse_nested"); - List parameters = method.getParameters(); - if (parameters.size() != 2 && parameters.size() != 1) { - throw new IllegalArgumentException("on nested object only allowed 2 parameters " - + "(field,path)/(path,conditions..) or 1 parameter (field) "); - } + List parameters = method.getParameters(); + if (parameters.size() != 2 && parameters.size() != 1) { + throw new IllegalArgumentException( + "on nested object only allowed 2 parameters " + + "(field,path)/(path,conditions..) or 1 parameter (field) "); + } - String field = Util.extendedToString(parameters.get(0)); - this.field = field; - if (parameters.size() == 1) { - //calc path myself.. - if (!field.contains(".")) { - if (!reverse) { - throw new IllegalArgumentException("Illegal nested field name: " + field); - } else { - this.path = null; - this.simple = true; - } - } else { - int lastDot = field.lastIndexOf("."); - this.path = field.substring(0, lastDot); - this.simple = true; - - } - - } else if (parameters.size() == 2) { - SQLExpr secondParameter = parameters.get(1); - if (secondParameter instanceof SQLTextLiteralExpr || secondParameter instanceof SQLIdentifierExpr - || secondParameter instanceof SQLPropertyExpr) { - - String pathString = Util.extendedToString(secondParameter); - if (pathString.equals("")) { - this.path = null; - } else { - this.path = pathString; - } - this.simple = true; - } else { - this.path = field; - Where where = Where.newInstance(); - new WhereParser(new SqlParser()).parseWhere(secondParameter, where); - if (where.getWheres().size() == 0) { - throw new SqlParseException("Failed to parse filter condition"); - } - this.where = where; - simple = false; - } + String field = Util.extendedToString(parameters.get(0)); + this.field = field; + if (parameters.size() == 1) { + // calc path myself.. + if (!field.contains(".")) { + if (!reverse) { + throw new IllegalArgumentException("Illegal nested field name: " + field); + } else { + this.path = null; + this.simple = true; } - - return true; + } else { + int lastDot = field.lastIndexOf("."); + this.path = field.substring(0, lastDot); + this.simple = true; + } + + } else if (parameters.size() == 2) { + SQLExpr secondParameter = parameters.get(1); + if (secondParameter instanceof SQLTextLiteralExpr + || secondParameter instanceof SQLIdentifierExpr + || secondParameter instanceof SQLPropertyExpr) { + + String pathString = Util.extendedToString(secondParameter); + if (pathString.equals("")) { + this.path = null; + } else { + this.path = pathString; + } + this.simple = true; + } else { + this.path = field; + Where where = Where.newInstance(); + new WhereParser(new SqlParser()).parseWhere(secondParameter, where); + if (where.getWheres().size() == 0) { + throw new SqlParseException("Failed to parse filter condition"); + } + this.where = where; + simple = false; + } } - public boolean isSimple() { - return simple; - } + return true; + } - public boolean isReverse() { - return reverse; - } + public boolean isSimple() { + return simple; + } - /** - * Return the name of the Nested Aggregation. - */ - public String getNestedAggName() { - return field + "@NESTED"; - } + public boolean isReverse() { + return reverse; + } - /** - * Return the name of the Filter Aggregation - */ - public String getFilterAggName() { - return field + "@FILTER"; - } + /** Return the name of the Nested Aggregation. */ + public String getNestedAggName() { + return field + "@NESTED"; + } - public void addBucketPath(Path path) { - bucketPath.add(path); - } + /** Return the name of the Filter Aggregation */ + public String getFilterAggName() { + return field + "@FILTER"; + } - public String getBucketPath() { - return bucketPath.getBucketPath(); - } + public void addBucketPath(Path path) { + bucketPath.add(path); + } + + public String getBucketPath() { + return bucketPath.getBucketPath(); + } /** + *

      * Return true if the filed is the nested filed.
      * For example, the mapping
      * {
@@ -138,6 +133,7 @@ public String getBucketPath() {
      * 

* If the filed is projects, return true. * If the filed is projects.name, return false. + *

*/ public boolean isNestedField() { return !field.contains(".") && field.equalsIgnoreCase(path); diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/parser/ScriptFilter.java b/legacy/src/main/java/org/opensearch/sql/legacy/parser/ScriptFilter.java index 3eb4fecf67..3f9b12ca84 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/parser/ScriptFilter.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/parser/ScriptFilter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.parser; import com.alibaba.druid.sql.ast.SQLExpr; @@ -16,96 +15,92 @@ import org.opensearch.sql.legacy.exception.SqlParseException; import org.opensearch.sql.legacy.utils.Util; -/** - * Created by Eliran on 11/12/2015. - */ +/** Created by Eliran on 11/12/2015. */ public class ScriptFilter { - private String script; - private Map args; - private ScriptType scriptType; + private String script; + private Map args; + private ScriptType scriptType; - public ScriptFilter() { + public ScriptFilter() { - args = null; - scriptType = ScriptType.INLINE; - } + args = null; + scriptType = ScriptType.INLINE; + } - public ScriptFilter(String script, Map args, ScriptType scriptType) { - this.script = script; - this.args = args; - this.scriptType = scriptType; - } + public ScriptFilter(String script, Map args, ScriptType scriptType) { + this.script = script; + this.args = args; + this.scriptType = scriptType; + } - public boolean tryParseFromMethodExpr(SQLMethodInvokeExpr expr) throws SqlParseException { - if (!expr.getMethodName().toLowerCase().equals("script")) { - return false; - } - List methodParameters = expr.getParameters(); - if (methodParameters.size() == 0) { - return false; - } - script = Util.extendedToString(methodParameters.get(0)); - - if (methodParameters.size() == 1) { - return true; - } - - args = new HashMap<>(); - for (int i = 1; i < methodParameters.size(); i++) { - - SQLExpr innerExpr = methodParameters.get(i); - if (!(innerExpr instanceof SQLBinaryOpExpr)) { - return false; - } - SQLBinaryOpExpr binaryOpExpr = (SQLBinaryOpExpr) innerExpr; - if (!binaryOpExpr.getOperator().getName().equals("=")) { - return false; - } - - SQLExpr right = binaryOpExpr.getRight(); - Object value = Util.expr2Object(right); - String key = Util.extendedToString(binaryOpExpr.getLeft()); - if (key.equals("script_type")) { - parseAndUpdateScriptType(value.toString()); - } else { - args.put(key, value); - } - - } - return true; + public boolean tryParseFromMethodExpr(SQLMethodInvokeExpr expr) throws SqlParseException { + if (!expr.getMethodName().toLowerCase().equals("script")) { + return false; } - - private void parseAndUpdateScriptType(String scriptType) { - String scriptTypeUpper = scriptType.toUpperCase(); - switch (scriptTypeUpper) { - case "INLINE": - this.scriptType = ScriptType.INLINE; - break; - case "INDEXED": - case "STORED": - this.scriptType = ScriptType.STORED; - break; - } + List methodParameters = expr.getParameters(); + if (methodParameters.size() == 0) { + return false; } + script = Util.extendedToString(methodParameters.get(0)); - public boolean containsParameters() { - return args != null && args.size() > 0; + if (methodParameters.size() == 1) { + return true; } - public String getScript() { - return script; + args = new HashMap<>(); + for (int i = 1; i < methodParameters.size(); i++) { + + SQLExpr innerExpr = methodParameters.get(i); + if (!(innerExpr instanceof SQLBinaryOpExpr)) { + return false; + } + SQLBinaryOpExpr binaryOpExpr = (SQLBinaryOpExpr) innerExpr; + if (!binaryOpExpr.getOperator().getName().equals("=")) { + return false; + } + + SQLExpr right = binaryOpExpr.getRight(); + Object value = Util.expr2Object(right); + String key = Util.extendedToString(binaryOpExpr.getLeft()); + if (key.equals("script_type")) { + parseAndUpdateScriptType(value.toString()); + } else { + args.put(key, value); + } } - - public ScriptType getScriptType() { - return scriptType; + return true; + } + + private void parseAndUpdateScriptType(String scriptType) { + String scriptTypeUpper = scriptType.toUpperCase(); + switch (scriptTypeUpper) { + case "INLINE": + this.scriptType = ScriptType.INLINE; + break; + case "INDEXED": + case "STORED": + this.scriptType = ScriptType.STORED; + break; } + } - public Map getArgs() { - return args; - } + public boolean containsParameters() { + return args != null && args.size() > 0; + } - public void setArgs(Map args) { - this.args = args; - } + public String getScript() { + return script; + } + + public ScriptType getScriptType() { + return scriptType; + } + + public Map getArgs() { + return args; + } + public void setArgs(Map args) { + this.args = args; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/parser/SelectParser.java b/legacy/src/main/java/org/opensearch/sql/legacy/parser/SelectParser.java index 85becdaa53..62a63b320f 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/parser/SelectParser.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/parser/SelectParser.java @@ -3,11 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.parser; -/** - * Created by allwefantasy on 9/2/16. - */ -public class SelectParser { -} +/** Created by allwefantasy on 9/2/16. */ +public class SelectParser {} diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/plugin/RestSQLQueryAction.java b/legacy/src/main/java/org/opensearch/sql/legacy/plugin/RestSQLQueryAction.java index cd8056aed1..12176d4fa7 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/plugin/RestSQLQueryAction.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/plugin/RestSQLQueryAction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.plugin; import static org.opensearch.core.rest.RestStatus.OK; @@ -42,8 +41,10 @@ /** * New SQL REST action handler. This will not be registered to OpenSearch unless: - * 1) we want to test new SQL engine; - * 2) all old functionalities migrated to new query engine and legacy REST handler removed. + *
    + *
  1. we want to test new SQL engine; + *
  2. all old functionalities migrated to new query engine and legacy REST handler removed. + *
*/ public class RestSQLQueryAction extends BaseRestHandler { @@ -53,9 +54,7 @@ public class RestSQLQueryAction extends BaseRestHandler { private final Injector injector; - /** - * Constructor of RestSQLQueryAction. - */ + /** Constructor of RestSQLQueryAction. */ public RestSQLQueryAction(Injector injector) { super(); this.injector = injector; @@ -105,7 +104,7 @@ public RestChannelConsumer prepareRequest( fallbackHandler)); } // If close request, sqlService.closeCursor - else { + else { return channel -> sqlService.execute( request, @@ -123,8 +122,7 @@ private ResponseListener fallBackListener( return new ResponseListener() { @Override public void onResponse(T response) { - LOG.info("[{}] Request is handled by new SQL query engine", - QueryContext.getRequestId()); + LOG.info("[{}] Request is handled by new SQL query engine", QueryContext.getRequestId()); next.onResponse(response); } @@ -144,12 +142,13 @@ private ResponseListener createExplainResponseListener( return new ResponseListener<>() { @Override public void onResponse(ExplainResponse response) { - JsonResponseFormatter formatter = new JsonResponseFormatter<>(PRETTY) { - @Override - protected Object buildJsonObject(ExplainResponse response) { - return response; - } - }; + JsonResponseFormatter formatter = + new JsonResponseFormatter<>(PRETTY) { + @Override + protected Object buildJsonObject(ExplainResponse response) { + return response; + } + }; sendResponse(channel, OK, formatter.format(response), formatter.contentType()); } @@ -179,9 +178,12 @@ private ResponseListener createQueryResponseListener( return new ResponseListener() { @Override public void onResponse(QueryResponse response) { - sendResponse(channel, OK, - formatter.format(new QueryResult(response.getSchema(), response.getResults(), - response.getCursor())), formatter.contentType()); + sendResponse( + channel, + OK, + formatter.format( + new QueryResult(response.getSchema(), response.getResults(), response.getCursor())), + formatter.contentType()); } @Override @@ -191,9 +193,9 @@ public void onFailure(Exception e) { }; } - private void sendResponse(RestChannel channel, RestStatus status, String content, String contentType) { - channel.sendResponse(new BytesRestResponse( - status, contentType, content)); + private void sendResponse( + RestChannel channel, RestStatus status, String content, String contentType) { + channel.sendResponse(new BytesRestResponse(status, contentType, content)); } private static void logAndPublishMetrics(Exception e) { diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/plugin/RestSqlAction.java b/legacy/src/main/java/org/opensearch/sql/legacy/plugin/RestSqlAction.java index 69ed469fed..fc8934dd73 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/plugin/RestSqlAction.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/plugin/RestSqlAction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.plugin; import static org.opensearch.core.rest.RestStatus.BAD_REQUEST; @@ -67,233 +66,263 @@ public class RestSqlAction extends BaseRestHandler { - private static final Logger LOG = LogManager.getLogger(RestSqlAction.class); - - private final boolean allowExplicitIndex; - - private static final Predicate CONTAINS_SUBQUERY = Pattern.compile("\\(\\s*select ").asPredicate(); - - /** - * API endpoint path - */ - public static final String QUERY_API_ENDPOINT = "/_plugins/_sql"; - public static final String EXPLAIN_API_ENDPOINT = QUERY_API_ENDPOINT + "/_explain"; - public static final String CURSOR_CLOSE_ENDPOINT = QUERY_API_ENDPOINT + "/close"; - public static final String LEGACY_QUERY_API_ENDPOINT = "/_opendistro/_sql"; - public static final String LEGACY_EXPLAIN_API_ENDPOINT = LEGACY_QUERY_API_ENDPOINT + "/_explain"; - public static final String LEGACY_CURSOR_CLOSE_ENDPOINT = LEGACY_QUERY_API_ENDPOINT + "/close"; - - /** - * New SQL query request handler. - */ - private final RestSQLQueryAction newSqlQueryHandler; - - public RestSqlAction(Settings settings, Injector injector) { - super(); - this.allowExplicitIndex = MULTI_ALLOW_EXPLICIT_INDEX.get(settings); - this.newSqlQueryHandler = new RestSQLQueryAction(injector); - } - - @Override - public List routes() { - return ImmutableList.of(); - } - - @Override - public List replacedRoutes() { - return ImmutableList.of( - new ReplacedRoute( - RestRequest.Method.POST, QUERY_API_ENDPOINT, - RestRequest.Method.POST, LEGACY_QUERY_API_ENDPOINT), - new ReplacedRoute( - RestRequest.Method.POST, EXPLAIN_API_ENDPOINT, - RestRequest.Method.POST, LEGACY_EXPLAIN_API_ENDPOINT), - new ReplacedRoute( - RestRequest.Method.POST, CURSOR_CLOSE_ENDPOINT, - RestRequest.Method.POST, LEGACY_CURSOR_CLOSE_ENDPOINT)); - } - - @Override - public String getName() { - return "sql_action"; - } - - @Override - protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { - Metrics.getInstance().getNumericalMetric(MetricName.REQ_TOTAL).increment(); - Metrics.getInstance().getNumericalMetric(MetricName.REQ_COUNT_TOTAL).increment(); - - QueryContext.addRequestId(); - - try { - if (!isSQLFeatureEnabled()) { - throw new SQLFeatureDisabledException( - "Either plugins.sql.enabled or rest.action.multi.allow_explicit_index setting is false" - ); - } - - final SqlRequest sqlRequest = SqlRequestFactory.getSqlRequest(request); - if (isLegacyCursor(sqlRequest)) { - if (isExplainRequest(request)) { - throw new IllegalArgumentException("Invalid request. Cannot explain cursor"); - } else { - LOG.info("[{}] Cursor request {}: {}", QueryContext.getRequestId(), request.uri(), sqlRequest.cursor()); - return channel -> handleCursorRequest(request, sqlRequest.cursor(), client, channel); - } - } - - LOG.info("[{}] Incoming request {}", QueryContext.getRequestId(), request.uri()); - - Format format = SqlRequestParam.getFormat(request.params()); - - // Route request to new query engine if it's supported already - SQLQueryRequest newSqlRequest = new SQLQueryRequest(sqlRequest.getJsonContent(), - sqlRequest.getSql(), request.path(), request.params(), sqlRequest.cursor()); - return newSqlQueryHandler.prepareRequest(newSqlRequest, - (restChannel, exception) -> { - try{ - if (newSqlRequest.isExplainRequest()) { - LOG.info("Request is falling back to old SQL engine due to: " + exception.getMessage()); - } - LOG.info("[{}] Request {} is not supported and falling back to old SQL engine", - QueryContext.getRequestId(), newSqlRequest); - LOG.info("Request Query: {}", QueryDataAnonymizer.anonymizeData(sqlRequest.getSql())); - QueryAction queryAction = explainRequest(client, sqlRequest, format); - executeSqlRequest(request, queryAction, client, restChannel); - } catch (Exception e) { - logAndPublishMetrics(e); - reportError(restChannel, e, isClientError(e) ? BAD_REQUEST : SERVICE_UNAVAILABLE); - } - }, - (restChannel, exception) -> { - logAndPublishMetrics(exception); - reportError(restChannel, exception, isClientError(exception) ? - BAD_REQUEST : SERVICE_UNAVAILABLE); - }); - } catch (Exception e) { - logAndPublishMetrics(e); - return channel -> reportError(channel, e, isClientError(e) ? BAD_REQUEST : SERVICE_UNAVAILABLE); - } - } - - - /** - * @param sqlRequest client request - * @return true if this cursor was generated by the legacy engine, false otherwise. - */ - private static boolean isLegacyCursor(SqlRequest sqlRequest) { - String cursor = sqlRequest.cursor(); - return cursor != null - && CursorType.getById(cursor.substring(0, 1)) != CursorType.NULL; - } - - @Override - protected Set responseParams() { - Set responseParams = new HashSet<>(super.responseParams()); - responseParams.addAll(Arrays.asList("sql", "flat", "separator", "_score", "_type", "_id", "newLine", "format", "sanitize")); - return responseParams; - } - - private void handleCursorRequest(final RestRequest request, final String cursor, final Client client, - final RestChannel channel) throws Exception { - CursorAsyncRestExecutor cursorRestExecutor = CursorActionRequestRestExecutorFactory.createExecutor( - request, cursor, SqlRequestParam.getFormat(request.params())); - cursorRestExecutor.execute(client, request.params(), channel); - } - - private static void logAndPublishMetrics(final Exception e) { - if (isClientError(e)) { - LOG.error(QueryContext.getRequestId() + " Client side error during query execution", e); - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_CUS).increment(); - } else { - LOG.error(QueryContext.getRequestId() + " Server side error during query execution", e); - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); - } - } - - private static QueryAction explainRequest(final NodeClient client, final SqlRequest sqlRequest, Format format) - throws SQLFeatureNotSupportedException, SqlParseException, SQLFeatureDisabledException { - - ColumnTypeProvider typeProvider = performAnalysis(sqlRequest.getSql()); - - final QueryAction queryAction = new SearchDao(client) - .explain(new QueryActionRequest(sqlRequest.getSql(), typeProvider, format)); - queryAction.setSqlRequest(sqlRequest); - queryAction.setFormat(format); - queryAction.setColumnTypeProvider(typeProvider); - return queryAction; - } - - private void executeSqlRequest(final RestRequest request, final QueryAction queryAction, final Client client, - final RestChannel channel) throws Exception { - Map params = request.params(); + private static final Logger LOG = LogManager.getLogger(RestSqlAction.class); + + private final boolean allowExplicitIndex; + + private static final Predicate CONTAINS_SUBQUERY = + Pattern.compile("\\(\\s*select ").asPredicate(); + + /** API endpoint path */ + public static final String QUERY_API_ENDPOINT = "/_plugins/_sql"; + + public static final String EXPLAIN_API_ENDPOINT = QUERY_API_ENDPOINT + "/_explain"; + public static final String CURSOR_CLOSE_ENDPOINT = QUERY_API_ENDPOINT + "/close"; + public static final String LEGACY_QUERY_API_ENDPOINT = "/_opendistro/_sql"; + public static final String LEGACY_EXPLAIN_API_ENDPOINT = LEGACY_QUERY_API_ENDPOINT + "/_explain"; + public static final String LEGACY_CURSOR_CLOSE_ENDPOINT = LEGACY_QUERY_API_ENDPOINT + "/close"; + + /** New SQL query request handler. */ + private final RestSQLQueryAction newSqlQueryHandler; + + public RestSqlAction(Settings settings, Injector injector) { + super(); + this.allowExplicitIndex = MULTI_ALLOW_EXPLICIT_INDEX.get(settings); + this.newSqlQueryHandler = new RestSQLQueryAction(injector); + } + + @Override + public List routes() { + return ImmutableList.of(); + } + + @Override + public List replacedRoutes() { + return ImmutableList.of( + new ReplacedRoute( + RestRequest.Method.POST, QUERY_API_ENDPOINT, + RestRequest.Method.POST, LEGACY_QUERY_API_ENDPOINT), + new ReplacedRoute( + RestRequest.Method.POST, EXPLAIN_API_ENDPOINT, + RestRequest.Method.POST, LEGACY_EXPLAIN_API_ENDPOINT), + new ReplacedRoute( + RestRequest.Method.POST, CURSOR_CLOSE_ENDPOINT, + RestRequest.Method.POST, LEGACY_CURSOR_CLOSE_ENDPOINT)); + } + + @Override + public String getName() { + return "sql_action"; + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { + Metrics.getInstance().getNumericalMetric(MetricName.REQ_TOTAL).increment(); + Metrics.getInstance().getNumericalMetric(MetricName.REQ_COUNT_TOTAL).increment(); + + QueryContext.addRequestId(); + + try { + if (!isSQLFeatureEnabled()) { + throw new SQLFeatureDisabledException( + "Either plugins.sql.enabled or rest.action.multi.allow_explicit_index setting is" + + " false"); + } + + final SqlRequest sqlRequest = SqlRequestFactory.getSqlRequest(request); + if (isLegacyCursor(sqlRequest)) { if (isExplainRequest(request)) { - final String jsonExplanation = queryAction.explain().explain(); - String result; - if (SqlRequestParam.isPrettyFormat(params)) { - result = JsonPrettyFormatter.format(jsonExplanation); - } else { - result = jsonExplanation; - } - channel.sendResponse(new BytesRestResponse(OK, "application/json; charset=UTF-8", result)); + throw new IllegalArgumentException("Invalid request. Cannot explain cursor"); } else { - RestExecutor restExecutor = ActionRequestRestExecutorFactory.createExecutor( - SqlRequestParam.getFormat(params), - queryAction); - //doing this hack because OpenSearch throws exception for un-consumed props - Map additionalParams = new HashMap<>(); - for (String paramName : responseParams()) { - if (request.hasParam(paramName)) { - additionalParams.put(paramName, request.param(paramName)); - } - } - restExecutor.execute(client, additionalParams, queryAction, channel); + LOG.info( + "[{}] Cursor request {}: {}", + QueryContext.getRequestId(), + request.uri(), + sqlRequest.cursor()); + return channel -> handleCursorRequest(request, sqlRequest.cursor(), client, channel); } + } + + LOG.info("[{}] Incoming request {}", QueryContext.getRequestId(), request.uri()); + + Format format = SqlRequestParam.getFormat(request.params()); + + // Route request to new query engine if it's supported already + SQLQueryRequest newSqlRequest = + new SQLQueryRequest( + sqlRequest.getJsonContent(), + sqlRequest.getSql(), + request.path(), + request.params(), + sqlRequest.cursor()); + return newSqlQueryHandler.prepareRequest( + newSqlRequest, + (restChannel, exception) -> { + try { + if (newSqlRequest.isExplainRequest()) { + LOG.info( + "Request is falling back to old SQL engine due to: " + exception.getMessage()); + } + LOG.info( + "[{}] Request {} is not supported and falling back to old SQL engine", + QueryContext.getRequestId(), + newSqlRequest); + LOG.info("Request Query: {}", QueryDataAnonymizer.anonymizeData(sqlRequest.getSql())); + QueryAction queryAction = explainRequest(client, sqlRequest, format); + executeSqlRequest(request, queryAction, client, restChannel); + } catch (Exception e) { + logAndPublishMetrics(e); + reportError(restChannel, e, isClientError(e) ? BAD_REQUEST : SERVICE_UNAVAILABLE); + } + }, + (restChannel, exception) -> { + logAndPublishMetrics(exception); + reportError( + restChannel, + exception, + isClientError(exception) ? BAD_REQUEST : SERVICE_UNAVAILABLE); + }); + } catch (Exception e) { + logAndPublishMetrics(e); + return channel -> + reportError(channel, e, isClientError(e) ? BAD_REQUEST : SERVICE_UNAVAILABLE); } - - private static boolean isExplainRequest(final RestRequest request) { - return request.path().endsWith("/_explain"); - } - - private static boolean isClientError(Exception e) { - return e instanceof NullPointerException // NPE is hard to differentiate but more likely caused by bad query - || e instanceof SqlParseException - || e instanceof ParserException - || e instanceof SQLFeatureNotSupportedException - || e instanceof SQLFeatureDisabledException - || e instanceof IllegalArgumentException - || e instanceof IndexNotFoundException - || e instanceof VerificationException - || e instanceof SqlAnalysisException - || e instanceof SyntaxCheckException - || e instanceof SemanticCheckException - || e instanceof ExpressionEvaluationException; - } - - private void sendResponse(final RestChannel channel, final String message, final RestStatus status) { - channel.sendResponse(new BytesRestResponse(status, message)); - } - - private void reportError(final RestChannel channel, final Exception e, final RestStatus status) { - sendResponse(channel, ErrorMessageFactory.createErrorMessage(e, status.getStatus()).toString(), status); - } - - private boolean isSQLFeatureEnabled() { - boolean isSqlEnabled = LocalClusterState.state().getSettingValue( - org.opensearch.sql.common.setting.Settings.Key.SQL_ENABLED); - return allowExplicitIndex && isSqlEnabled; + } + + /** + * @param sqlRequest client request + * @return true if this cursor was generated by the legacy engine, false otherwise. + */ + private static boolean isLegacyCursor(SqlRequest sqlRequest) { + String cursor = sqlRequest.cursor(); + return cursor != null && CursorType.getById(cursor.substring(0, 1)) != CursorType.NULL; + } + + @Override + protected Set responseParams() { + Set responseParams = new HashSet<>(super.responseParams()); + responseParams.addAll( + Arrays.asList( + "sql", "flat", "separator", "_score", "_type", "_id", "newLine", "format", "sanitize")); + return responseParams; + } + + private void handleCursorRequest( + final RestRequest request, + final String cursor, + final Client client, + final RestChannel channel) + throws Exception { + CursorAsyncRestExecutor cursorRestExecutor = + CursorActionRequestRestExecutorFactory.createExecutor( + request, cursor, SqlRequestParam.getFormat(request.params())); + cursorRestExecutor.execute(client, request.params(), channel); + } + + private static void logAndPublishMetrics(final Exception e) { + if (isClientError(e)) { + LOG.error(QueryContext.getRequestId() + " Client side error during query execution", e); + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_CUS).increment(); + } else { + LOG.error(QueryContext.getRequestId() + " Server side error during query execution", e); + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); } - - private static ColumnTypeProvider performAnalysis(String sql) { - LocalClusterState clusterState = LocalClusterState.state(); - SqlAnalysisConfig config = new SqlAnalysisConfig(false, false, 200); - - OpenSearchLegacySqlAnalyzer analyzer = new OpenSearchLegacySqlAnalyzer(config); - Optional outputColumnType = analyzer.analyze(sql, clusterState); - if (outputColumnType.isPresent()) { - return new ColumnTypeProvider(outputColumnType.get()); - } else { - return new ColumnTypeProvider(); + } + + private static QueryAction explainRequest( + final NodeClient client, final SqlRequest sqlRequest, Format format) + throws SQLFeatureNotSupportedException, SqlParseException, SQLFeatureDisabledException { + + ColumnTypeProvider typeProvider = performAnalysis(sqlRequest.getSql()); + + final QueryAction queryAction = + new SearchDao(client) + .explain(new QueryActionRequest(sqlRequest.getSql(), typeProvider, format)); + queryAction.setSqlRequest(sqlRequest); + queryAction.setFormat(format); + queryAction.setColumnTypeProvider(typeProvider); + return queryAction; + } + + private void executeSqlRequest( + final RestRequest request, + final QueryAction queryAction, + final Client client, + final RestChannel channel) + throws Exception { + Map params = request.params(); + if (isExplainRequest(request)) { + final String jsonExplanation = queryAction.explain().explain(); + String result; + if (SqlRequestParam.isPrettyFormat(params)) { + result = JsonPrettyFormatter.format(jsonExplanation); + } else { + result = jsonExplanation; + } + channel.sendResponse(new BytesRestResponse(OK, "application/json; charset=UTF-8", result)); + } else { + RestExecutor restExecutor = + ActionRequestRestExecutorFactory.createExecutor( + SqlRequestParam.getFormat(params), queryAction); + // doing this hack because OpenSearch throws exception for un-consumed props + Map additionalParams = new HashMap<>(); + for (String paramName : responseParams()) { + if (request.hasParam(paramName)) { + additionalParams.put(paramName, request.param(paramName)); } + } + restExecutor.execute(client, additionalParams, queryAction, channel); + } + } + + private static boolean isExplainRequest(final RestRequest request) { + return request.path().endsWith("/_explain"); + } + + private static boolean isClientError(Exception e) { + return e + instanceof + NullPointerException // NPE is hard to differentiate but more likely caused by bad query + || e instanceof SqlParseException + || e instanceof ParserException + || e instanceof SQLFeatureNotSupportedException + || e instanceof SQLFeatureDisabledException + || e instanceof IllegalArgumentException + || e instanceof IndexNotFoundException + || e instanceof VerificationException + || e instanceof SqlAnalysisException + || e instanceof SyntaxCheckException + || e instanceof SemanticCheckException + || e instanceof ExpressionEvaluationException; + } + + private void sendResponse( + final RestChannel channel, final String message, final RestStatus status) { + channel.sendResponse(new BytesRestResponse(status, message)); + } + + private void reportError(final RestChannel channel, final Exception e, final RestStatus status) { + sendResponse( + channel, ErrorMessageFactory.createErrorMessage(e, status.getStatus()).toString(), status); + } + + private boolean isSQLFeatureEnabled() { + boolean isSqlEnabled = + LocalClusterState.state() + .getSettingValue(org.opensearch.sql.common.setting.Settings.Key.SQL_ENABLED); + return allowExplicitIndex && isSqlEnabled; + } + + private static ColumnTypeProvider performAnalysis(String sql) { + LocalClusterState clusterState = LocalClusterState.state(); + SqlAnalysisConfig config = new SqlAnalysisConfig(false, false, 200); + + OpenSearchLegacySqlAnalyzer analyzer = new OpenSearchLegacySqlAnalyzer(config); + Optional outputColumnType = analyzer.analyze(sql, clusterState); + if (outputColumnType.isPresent()) { + return new ColumnTypeProvider(outputColumnType.get()); + } else { + return new ColumnTypeProvider(); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/plugin/RestSqlStatsAction.java b/legacy/src/main/java/org/opensearch/sql/legacy/plugin/RestSqlStatsAction.java index cf3a3e3f96..bc0f3c73b8 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/plugin/RestSqlStatsAction.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/plugin/RestSqlStatsAction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.plugin; import static org.opensearch.core.rest.RestStatus.SERVICE_UNAVAILABLE; @@ -27,64 +26,69 @@ import org.opensearch.sql.legacy.metrics.Metrics; /** - * Currently this interface is for node level. - * Cluster level is coming up soon. https://github.com/opendistro-for-elasticsearch/sql/issues/41 + * Currently this interface is for node level. Cluster level is coming up soon. + * https://github.com/opendistro-for-elasticsearch/sql/issues/41 */ public class RestSqlStatsAction extends BaseRestHandler { - private static final Logger LOG = LogManager.getLogger(RestSqlStatsAction.class); - - /** - * API endpoint path - */ - public static final String STATS_API_ENDPOINT = "/_plugins/_sql/stats"; - public static final String LEGACY_STATS_API_ENDPOINT = "/_opendistro/_sql/stats"; - - public RestSqlStatsAction(Settings settings, RestController restController) { - super(); - } - - @Override - public String getName() { - return "sql_stats_action"; - } - - @Override - public List routes() { - return ImmutableList.of(); - } - - @Override - public List replacedRoutes() { - return ImmutableList.of( - new ReplacedRoute( - RestRequest.Method.POST, STATS_API_ENDPOINT, - RestRequest.Method.POST, LEGACY_STATS_API_ENDPOINT), - new ReplacedRoute( - RestRequest.Method.GET, STATS_API_ENDPOINT, - RestRequest.Method.GET, LEGACY_STATS_API_ENDPOINT)); + private static final Logger LOG = LogManager.getLogger(RestSqlStatsAction.class); + + /** API endpoint path */ + public static final String STATS_API_ENDPOINT = "/_plugins/_sql/stats"; + + public static final String LEGACY_STATS_API_ENDPOINT = "/_opendistro/_sql/stats"; + + public RestSqlStatsAction(Settings settings, RestController restController) { + super(); + } + + @Override + public String getName() { + return "sql_stats_action"; + } + + @Override + public List routes() { + return ImmutableList.of(); + } + + @Override + public List replacedRoutes() { + return ImmutableList.of( + new ReplacedRoute( + RestRequest.Method.POST, STATS_API_ENDPOINT, + RestRequest.Method.POST, LEGACY_STATS_API_ENDPOINT), + new ReplacedRoute( + RestRequest.Method.GET, STATS_API_ENDPOINT, + RestRequest.Method.GET, LEGACY_STATS_API_ENDPOINT)); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { + + QueryContext.addRequestId(); + + try { + return channel -> + channel.sendResponse( + new BytesRestResponse(RestStatus.OK, Metrics.getInstance().collectToJSON())); + } catch (Exception e) { + LOG.error("Failed during Query SQL STATS Action.", e); + + return channel -> + channel.sendResponse( + new BytesRestResponse( + SERVICE_UNAVAILABLE, + ErrorMessageFactory.createErrorMessage(e, SERVICE_UNAVAILABLE.getStatus()) + .toString())); } - - @Override - protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { - - QueryContext.addRequestId(); - - try { - return channel -> channel.sendResponse(new BytesRestResponse(RestStatus.OK, - Metrics.getInstance().collectToJSON())); - } catch (Exception e) { - LOG.error("Failed during Query SQL STATS Action.", e); - - return channel -> channel.sendResponse(new BytesRestResponse(SERVICE_UNAVAILABLE, - ErrorMessageFactory.createErrorMessage(e, SERVICE_UNAVAILABLE.getStatus()).toString())); - } - } - - @Override - protected Set responseParams() { - Set responseParams = new HashSet<>(super.responseParams()); - responseParams.addAll(Arrays.asList("sql", "flat", "separator", "_score", "_type", "_id", "newLine", "format", "sanitize")); - return responseParams; - } - + } + + @Override + protected Set responseParams() { + Set responseParams = new HashSet<>(super.responseParams()); + responseParams.addAll( + Arrays.asList( + "sql", "flat", "separator", "_score", "_type", "_id", "newLine", "format", "sanitize")); + return responseParams; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/plugin/SearchDao.java b/legacy/src/main/java/org/opensearch/sql/legacy/plugin/SearchDao.java index a18895723c..ea4e08281c 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/plugin/SearchDao.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/plugin/SearchDao.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.plugin; import java.sql.SQLFeatureNotSupportedException; @@ -16,39 +15,36 @@ import org.opensearch.sql.legacy.query.OpenSearchActionFactory; import org.opensearch.sql.legacy.query.QueryAction; - public class SearchDao { - private static final Set END_TABLE_MAP = new HashSet<>(); - - static { - END_TABLE_MAP.add("limit"); - END_TABLE_MAP.add("order"); - END_TABLE_MAP.add("where"); - END_TABLE_MAP.add("group"); - - } - - private Client client = null; - - public SearchDao(Client client) { - this.client = client; - } - - public Client getClient() { - return client; - } - - /** - * Prepare action And transform sql - * into OpenSearch ActionRequest - * - * @param queryActionRequest SQL query action request to execute. - * @return OpenSearch request - * @throws SqlParseException - */ - public QueryAction explain(QueryActionRequest queryActionRequest) - throws SqlParseException, SQLFeatureNotSupportedException, SQLFeatureDisabledException { - return OpenSearchActionFactory.create(client, queryActionRequest); - } + private static final Set END_TABLE_MAP = new HashSet<>(); + + static { + END_TABLE_MAP.add("limit"); + END_TABLE_MAP.add("order"); + END_TABLE_MAP.add("where"); + END_TABLE_MAP.add("group"); + } + + private Client client = null; + + public SearchDao(Client client) { + this.client = client; + } + + public Client getClient() { + return client; + } + + /** + * Prepare action And transform sql into OpenSearch ActionRequest + * + * @param queryActionRequest SQL query action request to execute. + * @return OpenSearch request + * @throws SqlParseException + */ + public QueryAction explain(QueryActionRequest queryActionRequest) + throws SqlParseException, SQLFeatureNotSupportedException, SQLFeatureDisabledException { + return OpenSearchActionFactory.create(client, queryActionRequest); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/OpenSearchActionFactory.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/OpenSearchActionFactory.java index de7256d2cf..b9a7c9f218 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/OpenSearchActionFactory.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/OpenSearchActionFactory.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query; import static org.opensearch.sql.legacy.domain.IndexStatement.StatementType; @@ -65,188 +64,193 @@ public class OpenSearchActionFactory { - public static QueryAction create(Client client, String sql) - throws SqlParseException, SQLFeatureNotSupportedException, SQLFeatureDisabledException { - return create(client, new QueryActionRequest(sql, new ColumnTypeProvider(), Format.JSON)); - } - - /** - * Create the compatible Query object - * based on the SQL query. - * - * @param request The SQL query. - * @return Query object. - */ - public static QueryAction create(Client client, QueryActionRequest request) - throws SqlParseException, SQLFeatureNotSupportedException, SQLFeatureDisabledException { - String sql = request.getSql(); - // Remove line breaker anywhere and semicolon at the end - sql = sql.replaceAll("\\R", " ").trim(); - if (sql.endsWith(";")) { - sql = sql.substring(0, sql.length() - 1); - } - - switch (getFirstWord(sql)) { - case "SELECT": - SQLQueryExpr sqlExpr = (SQLQueryExpr) toSqlExpr(sql); - - RewriteRuleExecutor ruleExecutor = RewriteRuleExecutor.builder() - .withRule(new SQLExprParentSetterRule()) - .withRule(new OrdinalRewriterRule(sql)) - .withRule(new UnquoteIdentifierRule()) - .withRule(new TableAliasPrefixRemoveRule()) - .withRule(new SubQueryRewriteRule()) - .build(); - ruleExecutor.executeOn(sqlExpr); - sqlExpr.accept(new NestedFieldRewriter()); - - if (isMulti(sqlExpr)) { - sqlExpr.accept(new TermFieldRewriter(TermRewriterFilter.MULTI_QUERY)); - MultiQuerySelect multiSelect = - new SqlParser().parseMultiSelect((SQLUnionQuery) sqlExpr.getSubQuery().getQuery()); - return new MultiQueryAction(client, multiSelect); - } else if (isJoin(sqlExpr, sql)) { - new JoinRewriteRule(LocalClusterState.state()).rewrite(sqlExpr); - sqlExpr.accept(new TermFieldRewriter(TermRewriterFilter.JOIN)); - JoinSelect joinSelect = new SqlParser().parseJoinSelect(sqlExpr); - return OpenSearchJoinQueryActionFactory.createJoinAction(client, joinSelect); - } else { - sqlExpr.accept(new TermFieldRewriter()); - // migrate aggregation to query planner framework. - if (shouldMigrateToQueryPlan(sqlExpr, request.getFormat())) { - return new QueryPlanQueryAction(new QueryPlanRequestBuilder( - new BindingTupleQueryPlanner(client, sqlExpr, request.getTypeProvider()))); - } - Select select = new SqlParser().parseSelect(sqlExpr); - return handleSelect(client, select); - } - case "DELETE": - if (isSQLDeleteEnabled()) { - SQLStatementParser parser = createSqlStatementParser(sql); - SQLDeleteStatement deleteStatement = parser.parseDeleteStatement(); - Delete delete = new SqlParser().parseDelete(deleteStatement); - return new DeleteQueryAction(client, delete); - } else { - throw new SQLFeatureDisabledException( - StringUtils.format("DELETE clause is disabled by default and will be " - + "deprecated. Using the %s setting to enable it", - Settings.Key.SQL_DELETE_ENABLED.getKeyValue())); - } - case "SHOW": - IndexStatement showStatement = new IndexStatement(StatementType.SHOW, sql); - return new ShowQueryAction(client, showStatement); - case "DESCRIBE": - IndexStatement describeStatement = new IndexStatement(StatementType.DESCRIBE, sql); - return new DescribeQueryAction(client, describeStatement); - default: - throw new SQLFeatureNotSupportedException( - String.format("Query must start with SELECT, DELETE, SHOW or DESCRIBE: %s", sql)); - } - } + public static QueryAction create(Client client, String sql) + throws SqlParseException, SQLFeatureNotSupportedException, SQLFeatureDisabledException { + return create(client, new QueryActionRequest(sql, new ColumnTypeProvider(), Format.JSON)); + } - private static boolean isSQLDeleteEnabled() { - return LocalClusterState.state().getSettingValue(Settings.Key.SQL_DELETE_ENABLED); + /** + * Create the compatible Query object based on the SQL query. + * + * @param request The SQL query. + * @return Query object. + */ + public static QueryAction create(Client client, QueryActionRequest request) + throws SqlParseException, SQLFeatureNotSupportedException, SQLFeatureDisabledException { + String sql = request.getSql(); + // Remove line breaker anywhere and semicolon at the end + sql = sql.replaceAll("\\R", " ").trim(); + if (sql.endsWith(";")) { + sql = sql.substring(0, sql.length() - 1); } - private static String getFirstWord(String sql) { - int endOfFirstWord = sql.indexOf(' '); - return sql.substring(0, endOfFirstWord > 0 ? endOfFirstWord : sql.length()).toUpperCase(); - } + switch (getFirstWord(sql)) { + case "SELECT": + SQLQueryExpr sqlExpr = (SQLQueryExpr) toSqlExpr(sql); - private static boolean isMulti(SQLQueryExpr sqlExpr) { - return sqlExpr.getSubQuery().getQuery() instanceof SQLUnionQuery; - } + RewriteRuleExecutor ruleExecutor = + RewriteRuleExecutor.builder() + .withRule(new SQLExprParentSetterRule()) + .withRule(new OrdinalRewriterRule(sql)) + .withRule(new UnquoteIdentifierRule()) + .withRule(new TableAliasPrefixRemoveRule()) + .withRule(new SubQueryRewriteRule()) + .build(); + ruleExecutor.executeOn(sqlExpr); + sqlExpr.accept(new NestedFieldRewriter()); - private static void executeAndFillSubQuery(Client client, - SubQueryExpression subQueryExpression, - QueryAction queryAction) throws SqlParseException { - List values = new ArrayList<>(); - Object queryResult; - try { - queryResult = QueryActionElasticExecutor.executeAnyAction(client, queryAction); - } catch (Exception e) { - throw new SqlParseException("could not execute SubQuery: " + e.getMessage()); + if (isMulti(sqlExpr)) { + sqlExpr.accept(new TermFieldRewriter(TermRewriterFilter.MULTI_QUERY)); + MultiQuerySelect multiSelect = + new SqlParser().parseMultiSelect((SQLUnionQuery) sqlExpr.getSubQuery().getQuery()); + return new MultiQueryAction(client, multiSelect); + } else if (isJoin(sqlExpr, sql)) { + new JoinRewriteRule(LocalClusterState.state()).rewrite(sqlExpr); + sqlExpr.accept(new TermFieldRewriter(TermRewriterFilter.JOIN)); + JoinSelect joinSelect = new SqlParser().parseJoinSelect(sqlExpr); + return OpenSearchJoinQueryActionFactory.createJoinAction(client, joinSelect); + } else { + sqlExpr.accept(new TermFieldRewriter()); + // migrate aggregation to query planner framework. + if (shouldMigrateToQueryPlan(sqlExpr, request.getFormat())) { + return new QueryPlanQueryAction( + new QueryPlanRequestBuilder( + new BindingTupleQueryPlanner(client, sqlExpr, request.getTypeProvider()))); + } + Select select = new SqlParser().parseSelect(sqlExpr); + return handleSelect(client, select); } - - String returnField = subQueryExpression.getReturnField(); - if (queryResult instanceof SearchHits) { - SearchHits hits = (SearchHits) queryResult; - for (SearchHit hit : hits) { - values.add(ElasticResultHandler.getFieldValue(hit, returnField)); - } + case "DELETE": + if (isSQLDeleteEnabled()) { + SQLStatementParser parser = createSqlStatementParser(sql); + SQLDeleteStatement deleteStatement = parser.parseDeleteStatement(); + Delete delete = new SqlParser().parseDelete(deleteStatement); + return new DeleteQueryAction(client, delete); } else { - throw new SqlParseException("on sub queries only support queries that return Hits and not aggregations"); + throw new SQLFeatureDisabledException( + StringUtils.format( + "DELETE clause is disabled by default and will be " + + "deprecated. Using the %s setting to enable it", + Settings.Key.SQL_DELETE_ENABLED.getKeyValue())); } - subQueryExpression.setValues(values.toArray()); + case "SHOW": + IndexStatement showStatement = new IndexStatement(StatementType.SHOW, sql); + return new ShowQueryAction(client, showStatement); + case "DESCRIBE": + IndexStatement describeStatement = new IndexStatement(StatementType.DESCRIBE, sql); + return new DescribeQueryAction(client, describeStatement); + default: + throw new SQLFeatureNotSupportedException( + String.format("Query must start with SELECT, DELETE, SHOW or DESCRIBE: %s", sql)); } + } - private static QueryAction handleSelect(Client client, Select select) { - if (select.isAggregate) { - return new AggregationQueryAction(client, select); - } else { - return new DefaultQueryAction(client, select); - } + private static boolean isSQLDeleteEnabled() { + return LocalClusterState.state().getSettingValue(Settings.Key.SQL_DELETE_ENABLED); + } + + private static String getFirstWord(String sql) { + int endOfFirstWord = sql.indexOf(' '); + return sql.substring(0, endOfFirstWord > 0 ? endOfFirstWord : sql.length()).toUpperCase(); + } + + private static boolean isMulti(SQLQueryExpr sqlExpr) { + return sqlExpr.getSubQuery().getQuery() instanceof SQLUnionQuery; + } + + private static void executeAndFillSubQuery( + Client client, SubQueryExpression subQueryExpression, QueryAction queryAction) + throws SqlParseException { + List values = new ArrayList<>(); + Object queryResult; + try { + queryResult = QueryActionElasticExecutor.executeAnyAction(client, queryAction); + } catch (Exception e) { + throw new SqlParseException("could not execute SubQuery: " + e.getMessage()); } - private static SQLStatementParser createSqlStatementParser(String sql) { - ElasticLexer lexer = new ElasticLexer(sql); - lexer.nextToken(); - return new MySqlStatementParser(lexer); + String returnField = subQueryExpression.getReturnField(); + if (queryResult instanceof SearchHits) { + SearchHits hits = (SearchHits) queryResult; + for (SearchHit hit : hits) { + values.add(ElasticResultHandler.getFieldValue(hit, returnField)); + } + } else { + throw new SqlParseException( + "on sub queries only support queries that return Hits and not aggregations"); } + subQueryExpression.setValues(values.toArray()); + } - private static boolean isJoin(SQLQueryExpr sqlExpr, String sql) { - MySqlSelectQueryBlock query = (MySqlSelectQueryBlock) sqlExpr.getSubQuery().getQuery(); - return query.getFrom() instanceof SQLJoinTableSource - && ((SQLJoinTableSource) query.getFrom()).getJoinType() != SQLJoinTableSource.JoinType.COMMA; + private static QueryAction handleSelect(Client client, Select select) { + if (select.isAggregate) { + return new AggregationQueryAction(client, select); + } else { + return new DefaultQueryAction(client, select); } + } - @VisibleForTesting - public static boolean shouldMigrateToQueryPlan(SQLQueryExpr expr, Format format) { - // The JSON format will return the OpenSearch aggregation result, which is not supported by the QueryPlanner. - if (format == Format.JSON) { - return false; - } - QueryPlannerScopeDecider decider = new QueryPlannerScopeDecider(); - return decider.isInScope(expr); + private static SQLStatementParser createSqlStatementParser(String sql) { + ElasticLexer lexer = new ElasticLexer(sql); + lexer.nextToken(); + return new MySqlStatementParser(lexer); + } + + private static boolean isJoin(SQLQueryExpr sqlExpr, String sql) { + MySqlSelectQueryBlock query = (MySqlSelectQueryBlock) sqlExpr.getSubQuery().getQuery(); + return query.getFrom() instanceof SQLJoinTableSource + && ((SQLJoinTableSource) query.getFrom()).getJoinType() + != SQLJoinTableSource.JoinType.COMMA; + } + + @VisibleForTesting + public static boolean shouldMigrateToQueryPlan(SQLQueryExpr expr, Format format) { + // The JSON format will return the OpenSearch aggregation result, which is not supported by the + // QueryPlanner. + if (format == Format.JSON) { + return false; } + QueryPlannerScopeDecider decider = new QueryPlannerScopeDecider(); + return decider.isInScope(expr); + } - private static class QueryPlannerScopeDecider extends MySqlASTVisitorAdapter { - private boolean hasAggregationFunc = false; - private boolean hasNestedFunction = false; - private boolean hasGroupBy = false; - private boolean hasAllColumnExpr = false; + private static class QueryPlannerScopeDecider extends MySqlASTVisitorAdapter { + private boolean hasAggregationFunc = false; + private boolean hasNestedFunction = false; + private boolean hasGroupBy = false; + private boolean hasAllColumnExpr = false; - public boolean isInScope(SQLQueryExpr expr) { - expr.accept(this); - return !hasAllColumnExpr && !hasNestedFunction && (hasGroupBy || hasAggregationFunc); - } + public boolean isInScope(SQLQueryExpr expr) { + expr.accept(this); + return !hasAllColumnExpr && !hasNestedFunction && (hasGroupBy || hasAggregationFunc); + } - @Override - public boolean visit(SQLSelectItem expr) { - if (expr.getExpr() instanceof SQLAllColumnExpr) { - hasAllColumnExpr = true; - } - return super.visit(expr); - } + @Override + public boolean visit(SQLSelectItem expr) { + if (expr.getExpr() instanceof SQLAllColumnExpr) { + hasAllColumnExpr = true; + } + return super.visit(expr); + } - @Override - public boolean visit(SQLSelectGroupByClause expr) { - hasGroupBy = true; - return super.visit(expr); - } + @Override + public boolean visit(SQLSelectGroupByClause expr) { + hasGroupBy = true; + return super.visit(expr); + } - @Override - public boolean visit(SQLAggregateExpr expr) { - hasAggregationFunc = true; - return super.visit(expr); - } + @Override + public boolean visit(SQLAggregateExpr expr) { + hasAggregationFunc = true; + return super.visit(expr); + } - @Override - public boolean visit(SQLMethodInvokeExpr expr) { - if (expr.getMethodName().equalsIgnoreCase("nested")) { - hasNestedFunction = true; - } - return super.visit(expr); - } + @Override + public boolean visit(SQLMethodInvokeExpr expr) { + if (expr.getMethodName().equalsIgnoreCase("nested")) { + hasNestedFunction = true; + } + return super.visit(expr); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/QueryAction.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/QueryAction.java index 7646639be4..c9b39d2f97 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/QueryAction.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/QueryAction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query; import com.fasterxml.jackson.core.JsonFactory; @@ -32,199 +31,208 @@ import org.opensearch.sql.legacy.request.SqlRequest; /** - * Abstract class. used to transform Select object (Represents SQL query) to - * SearchRequestBuilder (Represents OpenSearch query) + * Abstract class. used to transform Select object (Represents SQL query) to SearchRequestBuilder + * (Represents OpenSearch query) */ public abstract class QueryAction { - protected Query query; - protected Client client; - protected SqlRequest sqlRequest = SqlRequest.NULL; - protected ColumnTypeProvider scriptColumnType; - protected Format format; - - public QueryAction(Client client, Query query) { - this.client = client; - this.query = query; - } - - public Client getClient() { - return client; - } - - public QueryStatement getQueryStatement() { - return query; - } - - public void setSqlRequest(SqlRequest sqlRequest) { - this.sqlRequest = sqlRequest; - } - - public void setColumnTypeProvider(ColumnTypeProvider scriptColumnType) { - this.scriptColumnType = scriptColumnType; - } - - public SqlRequest getSqlRequest() { - return sqlRequest; - } - - public void setFormat(Format format) { - this.format = format; - } - - public Format getFormat() { - return this.format; - } - - public ColumnTypeProvider getScriptColumnType() { - return scriptColumnType; - } - - /** - * @return List of field names produced by the query - */ - public Optional> getFieldNames() { - return Optional.empty(); - } - - protected void updateRequestWithCollapse(Select select, SearchRequestBuilder request) throws SqlParseException { - JsonFactory jsonFactory = new JsonFactory(); - for (Hint hint : select.getHints()) { - if (hint.getType() == HintType.COLLAPSE && hint.getParams() != null && 0 < hint.getParams().length) { - try (JsonXContentParser parser = new JsonXContentParser(NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, jsonFactory.createParser(hint.getParams()[0].toString()))) { - request.setCollapse(CollapseBuilder.fromXContent(parser)); - } catch (IOException e) { - throw new SqlParseException("could not parse collapse hint: " + e.getMessage()); - } - } - } - } - - protected void updateRequestWithPostFilter(Select select, SearchRequestBuilder request) { - for (Hint hint : select.getHints()) { - if (hint.getType() == HintType.POST_FILTER && hint.getParams() != null && 0 < hint.getParams().length) { - request.setPostFilter(QueryBuilders.wrapperQuery(hint.getParams()[0].toString())); - } - } - } - - protected void updateRequestWithIndexAndRoutingOptions(Select select, SearchRequestBuilder request) { - for (Hint hint : select.getHints()) { - if (hint.getType() == HintType.IGNORE_UNAVAILABLE) { - //saving the defaults from TransportClient search - request.setIndicesOptions(IndicesOptions.fromOptions(true, false, true, false, - IndicesOptions.strictExpandOpenAndForbidClosed())); - } - if (hint.getType() == HintType.ROUTINGS) { - Object[] routings = hint.getParams(); - String[] routingsAsStringArray = new String[routings.length]; - for (int i = 0; i < routings.length; i++) { - routingsAsStringArray[i] = routings[i].toString(); - } - request.setRouting(routingsAsStringArray); - } - } - } - - protected void updateRequestWithHighlight(Select select, SearchRequestBuilder request) { - boolean foundAnyHighlights = false; - HighlightBuilder highlightBuilder = new HighlightBuilder(); - for (Hint hint : select.getHints()) { - if (hint.getType() == HintType.HIGHLIGHT) { - HighlightBuilder.Field highlightField = parseHighlightField(hint.getParams()); - if (highlightField != null) { - foundAnyHighlights = true; - highlightBuilder.field(highlightField); - } - } - } - if (foundAnyHighlights) { - request.highlighter(highlightBuilder); - } - } - - protected HighlightBuilder.Field parseHighlightField(Object[] params) { - if (params == null || params.length == 0 || params.length > 2) { - //todo: exception. + protected Query query; + protected Client client; + protected SqlRequest sqlRequest = SqlRequest.NULL; + protected ColumnTypeProvider scriptColumnType; + protected Format format; + + public QueryAction(Client client, Query query) { + this.client = client; + this.query = query; + } + + public Client getClient() { + return client; + } + + public QueryStatement getQueryStatement() { + return query; + } + + public void setSqlRequest(SqlRequest sqlRequest) { + this.sqlRequest = sqlRequest; + } + + public void setColumnTypeProvider(ColumnTypeProvider scriptColumnType) { + this.scriptColumnType = scriptColumnType; + } + + public SqlRequest getSqlRequest() { + return sqlRequest; + } + + public void setFormat(Format format) { + this.format = format; + } + + public Format getFormat() { + return this.format; + } + + public ColumnTypeProvider getScriptColumnType() { + return scriptColumnType; + } + + /** + * @return List of field names produced by the query + */ + public Optional> getFieldNames() { + return Optional.empty(); + } + + protected void updateRequestWithCollapse(Select select, SearchRequestBuilder request) + throws SqlParseException { + JsonFactory jsonFactory = new JsonFactory(); + for (Hint hint : select.getHints()) { + if (hint.getType() == HintType.COLLAPSE + && hint.getParams() != null + && 0 < hint.getParams().length) { + try (JsonXContentParser parser = + new JsonXContentParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + jsonFactory.createParser(hint.getParams()[0].toString()))) { + request.setCollapse(CollapseBuilder.fromXContent(parser)); + } catch (IOException e) { + throw new SqlParseException("could not parse collapse hint: " + e.getMessage()); } - HighlightBuilder.Field field = new HighlightBuilder.Field(params[0].toString()); - if (params.length == 1) { - return field; + } + } + } + + protected void updateRequestWithPostFilter(Select select, SearchRequestBuilder request) { + for (Hint hint : select.getHints()) { + if (hint.getType() == HintType.POST_FILTER + && hint.getParams() != null + && 0 < hint.getParams().length) { + request.setPostFilter(QueryBuilders.wrapperQuery(hint.getParams()[0].toString())); + } + } + } + + protected void updateRequestWithIndexAndRoutingOptions( + Select select, SearchRequestBuilder request) { + for (Hint hint : select.getHints()) { + if (hint.getType() == HintType.IGNORE_UNAVAILABLE) { + // saving the defaults from TransportClient search + request.setIndicesOptions( + IndicesOptions.fromOptions( + true, false, true, false, IndicesOptions.strictExpandOpenAndForbidClosed())); + } + if (hint.getType() == HintType.ROUTINGS) { + Object[] routings = hint.getParams(); + String[] routingsAsStringArray = new String[routings.length]; + for (int i = 0; i < routings.length; i++) { + routingsAsStringArray[i] = routings[i].toString(); } - Map highlightParams = (Map) params[1]; - - for (Map.Entry param : highlightParams.entrySet()) { - switch (param.getKey()) { - case "type": - field.highlighterType((String) param.getValue()); - break; - case "boundary_chars": - field.boundaryChars(fromArrayListToCharArray((ArrayList) param.getValue())); - break; - case "boundary_max_scan": - field.boundaryMaxScan((Integer) param.getValue()); - break; - case "force_source": - field.forceSource((Boolean) param.getValue()); - break; - case "fragmenter": - field.fragmenter((String) param.getValue()); - break; - case "fragment_offset": - field.fragmentOffset((Integer) param.getValue()); - break; - case "fragment_size": - field.fragmentSize((Integer) param.getValue()); - break; - case "highlight_filter": - field.highlightFilter((Boolean) param.getValue()); - break; - case "matched_fields": - field.matchedFields((String[]) ((ArrayList) param.getValue()).toArray(new String[0])); - break; - case "no_match_size": - field.noMatchSize((Integer) param.getValue()); - break; - case "num_of_fragments": - field.numOfFragments((Integer) param.getValue()); - break; - case "order": - field.order((String) param.getValue()); - break; - case "phrase_limit": - field.phraseLimit((Integer) param.getValue()); - break; - case "post_tags": - field.postTags((String[]) ((ArrayList) param.getValue()).toArray(new String[0])); - break; - case "pre_tags": - field.preTags((String[]) ((ArrayList) param.getValue()).toArray(new String[0])); - break; - case "require_field_match": - field.requireFieldMatch((Boolean) param.getValue()); - break; - - } + request.setRouting(routingsAsStringArray); + } + } + } + + protected void updateRequestWithHighlight(Select select, SearchRequestBuilder request) { + boolean foundAnyHighlights = false; + HighlightBuilder highlightBuilder = new HighlightBuilder(); + for (Hint hint : select.getHints()) { + if (hint.getType() == HintType.HIGHLIGHT) { + HighlightBuilder.Field highlightField = parseHighlightField(hint.getParams()); + if (highlightField != null) { + foundAnyHighlights = true; + highlightBuilder.field(highlightField); } - return field; - } - - private char[] fromArrayListToCharArray(ArrayList arrayList) { - char[] chars = new char[arrayList.size()]; - int i = 0; - for (Object item : arrayList) { - chars[i] = item.toString().charAt(0); - i++; - } - return chars; - } - - /** - * Prepare the request, and return OpenSearch request. - * - * @return ActionRequestBuilder (OpenSearch request) - * @throws SqlParseException - */ - public abstract SqlElasticRequestBuilder explain() throws SqlParseException; + } + } + if (foundAnyHighlights) { + request.highlighter(highlightBuilder); + } + } + + protected HighlightBuilder.Field parseHighlightField(Object[] params) { + if (params == null || params.length == 0 || params.length > 2) { + // todo: exception. + } + HighlightBuilder.Field field = new HighlightBuilder.Field(params[0].toString()); + if (params.length == 1) { + return field; + } + Map highlightParams = (Map) params[1]; + + for (Map.Entry param : highlightParams.entrySet()) { + switch (param.getKey()) { + case "type": + field.highlighterType((String) param.getValue()); + break; + case "boundary_chars": + field.boundaryChars(fromArrayListToCharArray((ArrayList) param.getValue())); + break; + case "boundary_max_scan": + field.boundaryMaxScan((Integer) param.getValue()); + break; + case "force_source": + field.forceSource((Boolean) param.getValue()); + break; + case "fragmenter": + field.fragmenter((String) param.getValue()); + break; + case "fragment_offset": + field.fragmentOffset((Integer) param.getValue()); + break; + case "fragment_size": + field.fragmentSize((Integer) param.getValue()); + break; + case "highlight_filter": + field.highlightFilter((Boolean) param.getValue()); + break; + case "matched_fields": + field.matchedFields((String[]) ((ArrayList) param.getValue()).toArray(new String[0])); + break; + case "no_match_size": + field.noMatchSize((Integer) param.getValue()); + break; + case "num_of_fragments": + field.numOfFragments((Integer) param.getValue()); + break; + case "order": + field.order((String) param.getValue()); + break; + case "phrase_limit": + field.phraseLimit((Integer) param.getValue()); + break; + case "post_tags": + field.postTags((String[]) ((ArrayList) param.getValue()).toArray(new String[0])); + break; + case "pre_tags": + field.preTags((String[]) ((ArrayList) param.getValue()).toArray(new String[0])); + break; + case "require_field_match": + field.requireFieldMatch((Boolean) param.getValue()); + break; + } + } + return field; + } + + private char[] fromArrayListToCharArray(ArrayList arrayList) { + char[] chars = new char[arrayList.size()]; + int i = 0; + for (Object item : arrayList) { + chars[i] = item.toString().charAt(0); + i++; + } + return chars; + } + + /** + * Prepare the request, and return OpenSearch request. + * + * @return ActionRequestBuilder (OpenSearch request) + * @throws SqlParseException + */ + public abstract SqlElasticRequestBuilder explain() throws SqlParseException; } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/join/NestedLoopsElasticRequestBuilder.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/join/NestedLoopsElasticRequestBuilder.java index c14d8f3012..9dd34c71b9 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/join/NestedLoopsElasticRequestBuilder.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/join/NestedLoopsElasticRequestBuilder.java @@ -3,10 +3,8 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.join; - import java.io.IOException; import org.json.JSONObject; import org.json.JSONStringer; @@ -19,86 +17,96 @@ import org.opensearch.sql.legacy.exception.SqlParseException; import org.opensearch.sql.legacy.query.maker.QueryMaker; -/** - * Created by Eliran on 15/9/2015. - */ +/** Created by Eliran on 15/9/2015. */ public class NestedLoopsElasticRequestBuilder extends JoinRequestBuilder { - private Where connectedWhere; - private int multiSearchMaxSize; + private Where connectedWhere; + private int multiSearchMaxSize; - public NestedLoopsElasticRequestBuilder() { + public NestedLoopsElasticRequestBuilder() { - multiSearchMaxSize = 100; - } + multiSearchMaxSize = 100; + } - @Override - public String explain() { - String conditions = ""; - - try { - Where where = (Where) this.connectedWhere.clone(); - setValueTypeConditionToStringRecursive(where); - if (where != null) { - conditions = QueryMaker.explain(where, false).toString(); - } - } catch (CloneNotSupportedException | SqlParseException e) { - conditions = "Could not parse conditions due to " + e.getMessage(); - } - - String desc = "Nested Loops run first query, and for each result run " - + "second query with additional conditions as following."; - String[] queries = explainNL(); - JSONStringer jsonStringer = new JSONStringer(); - jsonStringer.object().key("description").value(desc) - .key("conditions").value(new JSONObject(conditions)) - .key("first query").value(new JSONObject(queries[0])) - .key("second query").value(new JSONObject(queries[1])).endObject(); - return jsonStringer.toString(); - } + @Override + public String explain() { + String conditions = ""; - public int getMultiSearchMaxSize() { - return multiSearchMaxSize; + try { + Where where = (Where) this.connectedWhere.clone(); + setValueTypeConditionToStringRecursive(where); + if (where != null) { + conditions = QueryMaker.explain(where, false).toString(); + } + } catch (CloneNotSupportedException | SqlParseException e) { + conditions = "Could not parse conditions due to " + e.getMessage(); } - public void setMultiSearchMaxSize(int multiSearchMaxSize) { - this.multiSearchMaxSize = multiSearchMaxSize; + String desc = + "Nested Loops run first query, and for each result run " + + "second query with additional conditions as following."; + String[] queries = explainNL(); + JSONStringer jsonStringer = new JSONStringer(); + jsonStringer + .object() + .key("description") + .value(desc) + .key("conditions") + .value(new JSONObject(conditions)) + .key("first query") + .value(new JSONObject(queries[0])) + .key("second query") + .value(new JSONObject(queries[1])) + .endObject(); + return jsonStringer.toString(); + } + + public int getMultiSearchMaxSize() { + return multiSearchMaxSize; + } + + public void setMultiSearchMaxSize(int multiSearchMaxSize) { + this.multiSearchMaxSize = multiSearchMaxSize; + } + + public Where getConnectedWhere() { + return connectedWhere; + } + + public void setConnectedWhere(Where connectedWhere) { + this.connectedWhere = connectedWhere; + } + + private void setValueTypeConditionToStringRecursive(Where where) { + if (where == null) { + return; } - - public Where getConnectedWhere() { - return connectedWhere; + if (where instanceof Condition) { + Condition c = (Condition) where; + c.setValue(c.getValue().toString()); + return; + } else { + for (Where innerWhere : where.getWheres()) { + setValueTypeConditionToStringRecursive(innerWhere); + } } - - public void setConnectedWhere(Where connectedWhere) { - this.connectedWhere = connectedWhere; - } - - private void setValueTypeConditionToStringRecursive(Where where) { - if (where == null) { - return; - } - if (where instanceof Condition) { - Condition c = (Condition) where; - c.setValue(c.getValue().toString()); - return; - } else { - for (Where innerWhere : where.getWheres()) { - setValueTypeConditionToStringRecursive(innerWhere); - } - } - } - - private String[] explainNL() { - return new String[]{explainQuery(this.getFirstTable()), explainQuery(this.getSecondTable())}; - } - - private String explainQuery(TableInJoinRequestBuilder requestBuilder) { - try { - XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().prettyPrint(); - requestBuilder.getRequestBuilder().request().source().toXContent(xContentBuilder, ToXContent.EMPTY_PARAMS); - return BytesReference.bytes(xContentBuilder).utf8ToString(); - } catch (IOException e) { - return e.getMessage(); - } + } + + private String[] explainNL() { + return new String[] {explainQuery(this.getFirstTable()), explainQuery(this.getSecondTable())}; + } + + private String explainQuery(TableInJoinRequestBuilder requestBuilder) { + try { + XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().prettyPrint(); + requestBuilder + .getRequestBuilder() + .request() + .source() + .toXContent(xContentBuilder, ToXContent.EMPTY_PARAMS); + return BytesReference.bytes(xContentBuilder).utf8ToString(); + } catch (IOException e) { + return e.getMessage(); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/join/OpenSearchHashJoinQueryAction.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/join/OpenSearchHashJoinQueryAction.java index 0a87c16067..078ed6bcce 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/join/OpenSearchHashJoinQueryAction.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/join/OpenSearchHashJoinQueryAction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.join; import java.util.AbstractMap; @@ -20,129 +19,126 @@ import org.opensearch.sql.legacy.exception.SqlParseException; import org.opensearch.sql.legacy.query.planner.HashJoinQueryPlanRequestBuilder; -/** - * Created by Eliran on 22/8/2015. - */ +/** Created by Eliran on 22/8/2015. */ public class OpenSearchHashJoinQueryAction extends OpenSearchJoinQueryAction { - public OpenSearchHashJoinQueryAction(Client client, JoinSelect joinSelect) { - super(client, joinSelect); - } + public OpenSearchHashJoinQueryAction(Client client, JoinSelect joinSelect) { + super(client, joinSelect); + } - @Override - protected void fillSpecificRequestBuilder(JoinRequestBuilder requestBuilder) throws SqlParseException { - String t1Alias = joinSelect.getFirstTable().getAlias(); - String t2Alias = joinSelect.getSecondTable().getAlias(); + @Override + protected void fillSpecificRequestBuilder(JoinRequestBuilder requestBuilder) + throws SqlParseException { + String t1Alias = joinSelect.getFirstTable().getAlias(); + String t2Alias = joinSelect.getSecondTable().getAlias(); - List>> comparisonFields = getComparisonFields(t1Alias, t2Alias, - joinSelect.getConnectedWhere()); + List>> comparisonFields = + getComparisonFields(t1Alias, t2Alias, joinSelect.getConnectedWhere()); - ((HashJoinElasticRequestBuilder) requestBuilder).setT1ToT2FieldsComparison(comparisonFields); - } + ((HashJoinElasticRequestBuilder) requestBuilder).setT1ToT2FieldsComparison(comparisonFields); + } - @Override - protected JoinRequestBuilder createSpecificBuilder() { - if (isLegacy()) { - return new HashJoinElasticRequestBuilder(); - } - return new HashJoinQueryPlanRequestBuilder(client, sqlRequest); + @Override + protected JoinRequestBuilder createSpecificBuilder() { + if (isLegacy()) { + return new HashJoinElasticRequestBuilder(); } - - @Override - protected void updateRequestWithHints(JoinRequestBuilder requestBuilder) { - super.updateRequestWithHints(requestBuilder); - for (Hint hint : joinSelect.getHints()) { - if (hint.getType() == HintType.HASH_WITH_TERMS_FILTER) { - ((HashJoinElasticRequestBuilder) requestBuilder).setUseTermFiltersOptimization(true); - } - } + return new HashJoinQueryPlanRequestBuilder(client, sqlRequest); + } + + @Override + protected void updateRequestWithHints(JoinRequestBuilder requestBuilder) { + super.updateRequestWithHints(requestBuilder); + for (Hint hint : joinSelect.getHints()) { + if (hint.getType() == HintType.HASH_WITH_TERMS_FILTER) { + ((HashJoinElasticRequestBuilder) requestBuilder).setUseTermFiltersOptimization(true); + } } - - /** - * Keep the option to run legacy hash join algorithm mainly for the comparison - */ - private boolean isLegacy() { - for (Hint hint : joinSelect.getHints()) { - if (hint.getType() == HintType.JOIN_ALGORITHM_USE_LEGACY) { - return true; - } - } - return false; + } + + /** Keep the option to run legacy hash join algorithm mainly for the comparison */ + private boolean isLegacy() { + for (Hint hint : joinSelect.getHints()) { + if (hint.getType() == HintType.JOIN_ALGORITHM_USE_LEGACY) { + return true; + } } - - private List> getComparisonFields(String t1Alias, String t2Alias, - List connectedConditions) - throws SqlParseException { - List> comparisonFields = new ArrayList<>(); - for (Condition condition : connectedConditions) { - - if (condition.getOPERATOR() != Condition.OPERATOR.EQ) { - throw new SqlParseException( - String.format("HashJoin should only be with EQ conditions, got:%s on condition:%s", - condition.getOPERATOR().name(), condition.toString())); - } - - String firstField = condition.getName(); - String secondField = condition.getValue().toString(); - Field t1Field, t2Field; - if (firstField.startsWith(t1Alias)) { - t1Field = new Field(removeAlias(firstField, t1Alias), null); - t2Field = new Field(removeAlias(secondField, t2Alias), null); - } else { - t1Field = new Field(removeAlias(secondField, t1Alias), null); - t2Field = new Field(removeAlias(firstField, t2Alias), null); - } - comparisonFields.add(new AbstractMap.SimpleEntry<>(t1Field, t2Field)); - } - return comparisonFields; + return false; + } + + private List> getComparisonFields( + String t1Alias, String t2Alias, List connectedConditions) + throws SqlParseException { + List> comparisonFields = new ArrayList<>(); + for (Condition condition : connectedConditions) { + + if (condition.getOPERATOR() != Condition.OPERATOR.EQ) { + throw new SqlParseException( + String.format( + "HashJoin should only be with EQ conditions, got:%s on condition:%s", + condition.getOPERATOR().name(), condition.toString())); + } + + String firstField = condition.getName(); + String secondField = condition.getValue().toString(); + Field t1Field, t2Field; + if (firstField.startsWith(t1Alias)) { + t1Field = new Field(removeAlias(firstField, t1Alias), null); + t2Field = new Field(removeAlias(secondField, t2Alias), null); + } else { + t1Field = new Field(removeAlias(secondField, t1Alias), null); + t2Field = new Field(removeAlias(firstField, t2Alias), null); + } + comparisonFields.add(new AbstractMap.SimpleEntry<>(t1Field, t2Field)); } - - private List>> getComparisonFields(String t1Alias, String t2Alias, - Where connectedWhere) throws SqlParseException { - List>> comparisonFields = new ArrayList<>(); - //where is AND with lots of conditions. - if (connectedWhere == null) { - return comparisonFields; - } - boolean allAnds = true; - for (Where innerWhere : connectedWhere.getWheres()) { - if (innerWhere.getConn() == Where.CONN.OR) { - allAnds = false; - break; - } - } - if (allAnds) { - List> innerComparisonFields = - getComparisonFieldsFromWhere(t1Alias, t2Alias, connectedWhere); - comparisonFields.add(innerComparisonFields); - } else { - for (Where innerWhere : connectedWhere.getWheres()) { - comparisonFields.add(getComparisonFieldsFromWhere(t1Alias, t2Alias, innerWhere)); - } - } - - return comparisonFields; + return comparisonFields; + } + + private List>> getComparisonFields( + String t1Alias, String t2Alias, Where connectedWhere) throws SqlParseException { + List>> comparisonFields = new ArrayList<>(); + // where is AND with lots of conditions. + if (connectedWhere == null) { + return comparisonFields; } - - private List> getComparisonFieldsFromWhere(String t1Alias, String t2Alias, Where where) - throws SqlParseException { - List conditions = new ArrayList<>(); - if (where instanceof Condition) { - conditions.add((Condition) where); - } else { - for (Where innerWhere : where.getWheres()) { - if (!(innerWhere instanceof Condition)) { - throw new SqlParseException( - "if connectedCondition is AND then all inner wheres should be Conditions"); - } - conditions.add((Condition) innerWhere); - } - } - return getComparisonFields(t1Alias, t2Alias, conditions); + boolean allAnds = true; + for (Where innerWhere : connectedWhere.getWheres()) { + if (innerWhere.getConn() == Where.CONN.OR) { + allAnds = false; + break; + } + } + if (allAnds) { + List> innerComparisonFields = + getComparisonFieldsFromWhere(t1Alias, t2Alias, connectedWhere); + comparisonFields.add(innerComparisonFields); + } else { + for (Where innerWhere : connectedWhere.getWheres()) { + comparisonFields.add(getComparisonFieldsFromWhere(t1Alias, t2Alias, innerWhere)); + } } - private String removeAlias(String field, String alias) { - return field.replace(alias + ".", ""); + return comparisonFields; + } + + private List> getComparisonFieldsFromWhere( + String t1Alias, String t2Alias, Where where) throws SqlParseException { + List conditions = new ArrayList<>(); + if (where instanceof Condition) { + conditions.add((Condition) where); + } else { + for (Where innerWhere : where.getWheres()) { + if (!(innerWhere instanceof Condition)) { + throw new SqlParseException( + "if connectedCondition is AND then all inner wheres should be Conditions"); + } + conditions.add((Condition) innerWhere); + } } + return getComparisonFields(t1Alias, t2Alias, conditions); + } + private String removeAlias(String field, String alias) { + return field.replace(alias + ".", ""); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/join/OpenSearchJoinQueryAction.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/join/OpenSearchJoinQueryAction.java index 35e718d985..7068ddf9a2 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/join/OpenSearchJoinQueryAction.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/join/OpenSearchJoinQueryAction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.join; import java.util.List; @@ -20,111 +19,107 @@ import org.opensearch.sql.legacy.query.planner.HashJoinQueryPlanRequestBuilder; import org.opensearch.sql.legacy.query.planner.core.Config; -/** - * Created by Eliran on 15/9/2015. - */ +/** Created by Eliran on 15/9/2015. */ public abstract class OpenSearchJoinQueryAction extends QueryAction { - protected JoinSelect joinSelect; - - public OpenSearchJoinQueryAction(Client client, JoinSelect joinSelect) { - super(client, joinSelect); - this.joinSelect = joinSelect; - } - - @Override - public SqlElasticRequestBuilder explain() throws SqlParseException { - JoinRequestBuilder requestBuilder = createSpecificBuilder(); - fillBasicJoinRequestBuilder(requestBuilder); - fillSpecificRequestBuilder(requestBuilder); - return requestBuilder; - } - - protected abstract void fillSpecificRequestBuilder(JoinRequestBuilder requestBuilder) throws SqlParseException; - - protected abstract JoinRequestBuilder createSpecificBuilder(); - - - private void fillBasicJoinRequestBuilder(JoinRequestBuilder requestBuilder) throws SqlParseException { - - fillTableInJoinRequestBuilder(requestBuilder.getFirstTable(), joinSelect.getFirstTable()); - fillTableInJoinRequestBuilder(requestBuilder.getSecondTable(), joinSelect.getSecondTable()); - - requestBuilder.setJoinType(joinSelect.getJoinType()); - - requestBuilder.setTotalLimit(joinSelect.getTotalLimit()); - - updateRequestWithHints(requestBuilder); - - - } - - protected void updateRequestWithHints(JoinRequestBuilder requestBuilder) { - for (Hint hint : joinSelect.getHints()) { - Object[] params = hint.getParams(); - switch (hint.getType()) { - case JOIN_LIMIT: - requestBuilder.getFirstTable().setHintLimit((Integer) params[0]); - requestBuilder.getSecondTable().setHintLimit((Integer) params[1]); - break; - case JOIN_ALGORITHM_BLOCK_SIZE: - if (requestBuilder instanceof HashJoinQueryPlanRequestBuilder) { - queryPlannerConfig(requestBuilder).configureBlockSize(hint.getParams()); - } - break; - case JOIN_SCROLL_PAGE_SIZE: - if (requestBuilder instanceof HashJoinQueryPlanRequestBuilder) { - queryPlannerConfig(requestBuilder).configureScrollPageSize(hint.getParams()); - } - break; - case JOIN_CIRCUIT_BREAK_LIMIT: - if (requestBuilder instanceof HashJoinQueryPlanRequestBuilder) { - queryPlannerConfig(requestBuilder).configureCircuitBreakLimit(hint.getParams()); - } - break; - case JOIN_BACK_OFF_RETRY_INTERVALS: - if (requestBuilder instanceof HashJoinQueryPlanRequestBuilder) { - queryPlannerConfig(requestBuilder).configureBackOffRetryIntervals(hint.getParams()); - } - break; - case JOIN_TIME_OUT: - if (requestBuilder instanceof HashJoinQueryPlanRequestBuilder) { - queryPlannerConfig(requestBuilder).configureTimeOut(hint.getParams()); - } - break; - } - } + protected JoinSelect joinSelect; + + public OpenSearchJoinQueryAction(Client client, JoinSelect joinSelect) { + super(client, joinSelect); + this.joinSelect = joinSelect; + } + + @Override + public SqlElasticRequestBuilder explain() throws SqlParseException { + JoinRequestBuilder requestBuilder = createSpecificBuilder(); + fillBasicJoinRequestBuilder(requestBuilder); + fillSpecificRequestBuilder(requestBuilder); + return requestBuilder; + } + + protected abstract void fillSpecificRequestBuilder(JoinRequestBuilder requestBuilder) + throws SqlParseException; + + protected abstract JoinRequestBuilder createSpecificBuilder(); + + private void fillBasicJoinRequestBuilder(JoinRequestBuilder requestBuilder) + throws SqlParseException { + + fillTableInJoinRequestBuilder(requestBuilder.getFirstTable(), joinSelect.getFirstTable()); + fillTableInJoinRequestBuilder(requestBuilder.getSecondTable(), joinSelect.getSecondTable()); + + requestBuilder.setJoinType(joinSelect.getJoinType()); + + requestBuilder.setTotalLimit(joinSelect.getTotalLimit()); + + updateRequestWithHints(requestBuilder); + } + + protected void updateRequestWithHints(JoinRequestBuilder requestBuilder) { + for (Hint hint : joinSelect.getHints()) { + Object[] params = hint.getParams(); + switch (hint.getType()) { + case JOIN_LIMIT: + requestBuilder.getFirstTable().setHintLimit((Integer) params[0]); + requestBuilder.getSecondTable().setHintLimit((Integer) params[1]); + break; + case JOIN_ALGORITHM_BLOCK_SIZE: + if (requestBuilder instanceof HashJoinQueryPlanRequestBuilder) { + queryPlannerConfig(requestBuilder).configureBlockSize(hint.getParams()); + } + break; + case JOIN_SCROLL_PAGE_SIZE: + if (requestBuilder instanceof HashJoinQueryPlanRequestBuilder) { + queryPlannerConfig(requestBuilder).configureScrollPageSize(hint.getParams()); + } + break; + case JOIN_CIRCUIT_BREAK_LIMIT: + if (requestBuilder instanceof HashJoinQueryPlanRequestBuilder) { + queryPlannerConfig(requestBuilder).configureCircuitBreakLimit(hint.getParams()); + } + break; + case JOIN_BACK_OFF_RETRY_INTERVALS: + if (requestBuilder instanceof HashJoinQueryPlanRequestBuilder) { + queryPlannerConfig(requestBuilder).configureBackOffRetryIntervals(hint.getParams()); + } + break; + case JOIN_TIME_OUT: + if (requestBuilder instanceof HashJoinQueryPlanRequestBuilder) { + queryPlannerConfig(requestBuilder).configureTimeOut(hint.getParams()); + } + break; + } } - - private Config queryPlannerConfig(JoinRequestBuilder requestBuilder) { - return ((HashJoinQueryPlanRequestBuilder) requestBuilder).getConfig(); - } - - private void fillTableInJoinRequestBuilder(TableInJoinRequestBuilder requestBuilder, - TableOnJoinSelect tableOnJoinSelect) throws SqlParseException { - List connectedFields = tableOnJoinSelect.getConnectedFields(); - addFieldsToSelectIfMissing(tableOnJoinSelect, connectedFields); - requestBuilder.setOriginalSelect(tableOnJoinSelect); - DefaultQueryAction queryAction = new DefaultQueryAction(client, tableOnJoinSelect); - queryAction.explain(); - requestBuilder.setRequestBuilder(queryAction.getRequestBuilder()); - requestBuilder.setReturnedFields(tableOnJoinSelect.getSelectedFields()); - requestBuilder.setAlias(tableOnJoinSelect.getAlias()); + } + + private Config queryPlannerConfig(JoinRequestBuilder requestBuilder) { + return ((HashJoinQueryPlanRequestBuilder) requestBuilder).getConfig(); + } + + private void fillTableInJoinRequestBuilder( + TableInJoinRequestBuilder requestBuilder, TableOnJoinSelect tableOnJoinSelect) + throws SqlParseException { + List connectedFields = tableOnJoinSelect.getConnectedFields(); + addFieldsToSelectIfMissing(tableOnJoinSelect, connectedFields); + requestBuilder.setOriginalSelect(tableOnJoinSelect); + DefaultQueryAction queryAction = new DefaultQueryAction(client, tableOnJoinSelect); + queryAction.explain(); + requestBuilder.setRequestBuilder(queryAction.getRequestBuilder()); + requestBuilder.setReturnedFields(tableOnJoinSelect.getSelectedFields()); + requestBuilder.setAlias(tableOnJoinSelect.getAlias()); + } + + private void addFieldsToSelectIfMissing(Select select, List fields) { + // this means all fields + if (select.getFields() == null || select.getFields().size() == 0) { + return; } - private void addFieldsToSelectIfMissing(Select select, List fields) { - //this means all fields - if (select.getFields() == null || select.getFields().size() == 0) { - return; - } - - List selectedFields = select.getFields(); - for (Field field : fields) { - if (!selectedFields.contains(field)) { - selectedFields.add(field); - } - } - + List selectedFields = select.getFields(); + for (Field field : fields) { + if (!selectedFields.contains(field)) { + selectedFields.add(field); + } } - + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/join/OpenSearchJoinQueryActionFactory.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/join/OpenSearchJoinQueryActionFactory.java index c96cb6120c..c638f43519 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/join/OpenSearchJoinQueryActionFactory.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/join/OpenSearchJoinQueryActionFactory.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.join; import java.util.List; @@ -14,36 +13,32 @@ import org.opensearch.sql.legacy.domain.hints.HintType; import org.opensearch.sql.legacy.query.QueryAction; -/** - * Created by Eliran on 15/9/2015. - */ +/** Created by Eliran on 15/9/2015. */ public class OpenSearchJoinQueryActionFactory { - public static QueryAction createJoinAction(Client client, JoinSelect joinSelect) { - List connectedConditions = joinSelect.getConnectedConditions(); - boolean allEqual = true; - for (Condition condition : connectedConditions) { - if (condition.getOPERATOR() != Condition.OPERATOR.EQ) { - allEqual = false; - break; - } - - } - if (!allEqual) { - return new OpenSearchNestedLoopsQueryAction(client, joinSelect); - } - - boolean useNestedLoopsHintExist = false; - for (Hint hint : joinSelect.getHints()) { - if (hint.getType() == HintType.USE_NESTED_LOOPS) { - useNestedLoopsHintExist = true; - break; - } - } - if (useNestedLoopsHintExist) { - return new OpenSearchNestedLoopsQueryAction(client, joinSelect); - } - - return new OpenSearchHashJoinQueryAction(client, joinSelect); + public static QueryAction createJoinAction(Client client, JoinSelect joinSelect) { + List connectedConditions = joinSelect.getConnectedConditions(); + boolean allEqual = true; + for (Condition condition : connectedConditions) { + if (condition.getOPERATOR() != Condition.OPERATOR.EQ) { + allEqual = false; + break; + } + } + if (!allEqual) { + return new OpenSearchNestedLoopsQueryAction(client, joinSelect); + } + boolean useNestedLoopsHintExist = false; + for (Hint hint : joinSelect.getHints()) { + if (hint.getType() == HintType.USE_NESTED_LOOPS) { + useNestedLoopsHintExist = true; + break; + } } + if (useNestedLoopsHintExist) { + return new OpenSearchNestedLoopsQueryAction(client, joinSelect); + } + + return new OpenSearchHashJoinQueryAction(client, joinSelect); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/join/OpenSearchNestedLoopsQueryAction.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/join/OpenSearchNestedLoopsQueryAction.java index 8954106f8a..e9e9169605 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/join/OpenSearchNestedLoopsQueryAction.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/join/OpenSearchNestedLoopsQueryAction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.join; import org.opensearch.client.Client; @@ -13,45 +12,44 @@ import org.opensearch.sql.legacy.domain.hints.HintType; import org.opensearch.sql.legacy.exception.SqlParseException; -/** - * Created by Eliran on 15/9/2015. - */ +/** Created by Eliran on 15/9/2015. */ public class OpenSearchNestedLoopsQueryAction extends OpenSearchJoinQueryAction { - public OpenSearchNestedLoopsQueryAction(Client client, JoinSelect joinSelect) { - super(client, joinSelect); - } - - @Override - protected void fillSpecificRequestBuilder(JoinRequestBuilder requestBuilder) throws SqlParseException { - NestedLoopsElasticRequestBuilder nestedBuilder = (NestedLoopsElasticRequestBuilder) requestBuilder; - Where where = joinSelect.getConnectedWhere(); - nestedBuilder.setConnectedWhere(where); - + public OpenSearchNestedLoopsQueryAction(Client client, JoinSelect joinSelect) { + super(client, joinSelect); + } + + @Override + protected void fillSpecificRequestBuilder(JoinRequestBuilder requestBuilder) + throws SqlParseException { + NestedLoopsElasticRequestBuilder nestedBuilder = + (NestedLoopsElasticRequestBuilder) requestBuilder; + Where where = joinSelect.getConnectedWhere(); + nestedBuilder.setConnectedWhere(where); + } + + @Override + protected JoinRequestBuilder createSpecificBuilder() { + return new NestedLoopsElasticRequestBuilder(); + } + + @Override + protected void updateRequestWithHints(JoinRequestBuilder requestBuilder) { + super.updateRequestWithHints(requestBuilder); + for (Hint hint : this.joinSelect.getHints()) { + if (hint.getType() == HintType.NL_MULTISEARCH_SIZE) { + Integer multiSearchMaxSize = (Integer) hint.getParams()[0]; + ((NestedLoopsElasticRequestBuilder) requestBuilder) + .setMultiSearchMaxSize(multiSearchMaxSize); + } } + } - @Override - protected JoinRequestBuilder createSpecificBuilder() { - return new NestedLoopsElasticRequestBuilder(); + private String removeAlias(String field) { + String alias = joinSelect.getFirstTable().getAlias(); + if (!field.startsWith(alias + ".")) { + alias = joinSelect.getSecondTable().getAlias(); } - - @Override - protected void updateRequestWithHints(JoinRequestBuilder requestBuilder) { - super.updateRequestWithHints(requestBuilder); - for (Hint hint : this.joinSelect.getHints()) { - if (hint.getType() == HintType.NL_MULTISEARCH_SIZE) { - Integer multiSearchMaxSize = (Integer) hint.getParams()[0]; - ((NestedLoopsElasticRequestBuilder) requestBuilder).setMultiSearchMaxSize(multiSearchMaxSize); - } - } - } - - private String removeAlias(String field) { - String alias = joinSelect.getFirstTable().getAlias(); - if (!field.startsWith(alias + ".")) { - alias = joinSelect.getSecondTable().getAlias(); - } - return field.replace(alias + ".", ""); - } - + return field.replace(alias + ".", ""); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/maker/QueryMaker.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/maker/QueryMaker.java index f36bca2686..75f3538981 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/maker/QueryMaker.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/maker/QueryMaker.java @@ -3,10 +3,8 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.maker; - import org.apache.lucene.search.join.ScoreMode; import org.opensearch.index.query.BoolQueryBuilder; import org.opensearch.index.query.QueryBuilder; @@ -18,76 +16,76 @@ public class QueryMaker extends Maker { - /** - * - * - * @param where - * @return - * @throws SqlParseException - */ - public static BoolQueryBuilder explain(Where where) throws SqlParseException { - return explain(where, true); - } + /** + * @param where + * @return + * @throws SqlParseException + */ + public static BoolQueryBuilder explain(Where where) throws SqlParseException { + return explain(where, true); + } - public static BoolQueryBuilder explain(Where where, boolean isQuery) throws SqlParseException { - BoolQueryBuilder boolQuery = QueryBuilders.boolQuery(); - while (where.getWheres().size() == 1) { - where = where.getWheres().getFirst(); - } - new QueryMaker().explanWhere(boolQuery, where); - if (isQuery) { - return boolQuery; - } - return QueryBuilders.boolQuery().filter(boolQuery); + public static BoolQueryBuilder explain(Where where, boolean isQuery) throws SqlParseException { + BoolQueryBuilder boolQuery = QueryBuilders.boolQuery(); + while (where.getWheres().size() == 1) { + where = where.getWheres().getFirst(); } - - private QueryMaker() { - super(true); + new QueryMaker().explanWhere(boolQuery, where); + if (isQuery) { + return boolQuery; } + return QueryBuilders.boolQuery().filter(boolQuery); + } - private void explanWhere(BoolQueryBuilder boolQuery, Where where) throws SqlParseException { - if (where instanceof Condition) { - addSubQuery(boolQuery, where, (QueryBuilder) make((Condition) where)); - } else { - BoolQueryBuilder subQuery = QueryBuilders.boolQuery(); - addSubQuery(boolQuery, where, subQuery); - for (Where subWhere : where.getWheres()) { - explanWhere(subQuery, subWhere); - } - } - } + private QueryMaker() { + super(true); + } - /** - * - * - * @param boolQuery - * @param where - * @param subQuery - */ - private void addSubQuery(BoolQueryBuilder boolQuery, Where where, QueryBuilder subQuery) { - if (where instanceof Condition) { - Condition condition = (Condition) where; + private void explanWhere(BoolQueryBuilder boolQuery, Where where) throws SqlParseException { + if (where instanceof Condition) { + addSubQuery(boolQuery, where, (QueryBuilder) make((Condition) where)); + } else { + BoolQueryBuilder subQuery = QueryBuilders.boolQuery(); + addSubQuery(boolQuery, where, subQuery); + for (Where subWhere : where.getWheres()) { + explanWhere(subQuery, subWhere); + } + } + } - if (condition.isNested()) { - // bugfix #628 - if ("missing".equalsIgnoreCase(String.valueOf(condition.getValue())) - && (condition.getOPERATOR() == Condition.OPERATOR.IS - || condition.getOPERATOR() == Condition.OPERATOR.EQ)) { - boolQuery.mustNot(QueryBuilders.nestedQuery(condition.getNestedPath(), - QueryBuilders.boolQuery().mustNot(subQuery), ScoreMode.None)); - return; - } + /** + * @param boolQuery + * @param where + * @param subQuery + */ + private void addSubQuery(BoolQueryBuilder boolQuery, Where where, QueryBuilder subQuery) { + if (where instanceof Condition) { + Condition condition = (Condition) where; - subQuery = QueryBuilders.nestedQuery(condition.getNestedPath(), subQuery, ScoreMode.None); - } else if (condition.isChildren()) { - subQuery = JoinQueryBuilders.hasChildQuery(condition.getChildType(), subQuery, ScoreMode.None); - } + if (condition.isNested()) { + // bugfix #628 + if ("missing".equalsIgnoreCase(String.valueOf(condition.getValue())) + && (condition.getOPERATOR() == Condition.OPERATOR.IS + || condition.getOPERATOR() == Condition.OPERATOR.EQ)) { + boolQuery.mustNot( + QueryBuilders.nestedQuery( + condition.getNestedPath(), + QueryBuilders.boolQuery().mustNot(subQuery), + ScoreMode.None)); + return; } - if (where.getConn() == Where.CONN.AND) { - boolQuery.must(subQuery); - } else { - boolQuery.should(subQuery); - } + subQuery = QueryBuilders.nestedQuery(condition.getNestedPath(), subQuery, ScoreMode.None); + } else if (condition.isChildren()) { + subQuery = + JoinQueryBuilders.hasChildQuery(condition.getChildType(), subQuery, ScoreMode.None); + } + } + + if (where.getConn() == Where.CONN.AND) { + boolQuery.must(subQuery); + } else { + boolQuery.should(subQuery); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/OpenSearchMultiQueryActionFactory.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/OpenSearchMultiQueryActionFactory.java index be86fdef81..1f934e9a80 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/OpenSearchMultiQueryActionFactory.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/OpenSearchMultiQueryActionFactory.java @@ -3,26 +3,23 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.multi; import org.opensearch.client.Client; import org.opensearch.sql.legacy.exception.SqlParseException; import org.opensearch.sql.legacy.query.QueryAction; -/** - * Created by Eliran on 19/8/2016. - */ +/** Created by Eliran on 19/8/2016. */ public class OpenSearchMultiQueryActionFactory { - public static QueryAction createMultiQueryAction(Client client, MultiQuerySelect multiSelect) - throws SqlParseException { - switch (multiSelect.getOperation()) { - case UNION_ALL: - case UNION: - return new MultiQueryAction(client, multiSelect); - default: - throw new SqlParseException("only supports union and union all"); - } + public static QueryAction createMultiQueryAction(Client client, MultiQuerySelect multiSelect) + throws SqlParseException { + switch (multiSelect.getOperation()) { + case UNION_ALL: + case UNION: + return new MultiQueryAction(client, multiSelect); + default: + throw new SqlParseException("only supports union and union all"); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/Plan.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/Plan.java index f163e61f0e..328bb9451f 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/Plan.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/Plan.java @@ -3,26 +3,20 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.core; import org.opensearch.sql.legacy.query.planner.core.PlanNode.Visitor; -/** - * Query plan - */ +/** Query plan */ public interface Plan { - /** - * Explain current query plan by visitor - * - * @param explanation visitor to explain the plan - */ - void traverse(Visitor explanation); - - /** - * Optimize current query plan to get the optimal one - */ - void optimize(); + /** + * Explain current query plan by visitor + * + * @param explanation visitor to explain the plan + */ + void traverse(Visitor explanation); + /** Optimize current query plan to get the optimal one */ + void optimize(); } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/PlanNode.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/PlanNode.java index ad421f82a4..b30ec9d3d9 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/PlanNode.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/PlanNode.java @@ -3,54 +3,47 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.core; -/** - * Abstract plan node in query plan. - */ +/** Abstract plan node in query plan. */ public interface PlanNode { - /** - * All child nodes of current node used for traversal. - * - * @return all children - */ - PlanNode[] children(); + /** + * All child nodes of current node used for traversal. + * + * @return all children + */ + PlanNode[] children(); + + /** + * Accept a visitor and traverse the plan tree with it. + * + * @param visitor plan node visitor + */ + default void accept(Visitor visitor) { + if (visitor.visit(this)) { + for (PlanNode node : children()) { + node.accept(visitor); + } + } + visitor.endVisit(this); + } + + /** Plan node visitor. */ + interface Visitor { /** - * Accept a visitor and traverse the plan tree with it. + * To avoid listing all subclasses of PlanNode here, we dispatch manually in concrete visitor. * - * @param visitor plan node visitor + * @param op plan node being visited */ - default void accept(Visitor visitor) { - if (visitor.visit(this)) { - for (PlanNode node : children()) { - node.accept(visitor); - } - } - visitor.endVisit(this); - } + boolean visit(PlanNode op); /** - * Plan node visitor. + * Re-visit current node before return to parent node + * + * @param op plan node finished visit */ - interface Visitor { - - /** - * To avoid listing all subclasses of PlanNode here, we dispatch manually in concrete visitor. - * - * @param op plan node being visited - */ - boolean visit(PlanNode op); - - /** - * Re-visit current node before return to parent node - * - * @param op plan node finished visit - */ - default void endVisit(PlanNode op) { - } - } - + default void endVisit(PlanNode op) {} + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/QueryParams.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/QueryParams.java index 2cb835da94..ae5f0fb9c8 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/QueryParams.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/QueryParams.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.core; import com.alibaba.druid.sql.ast.statement.SQLJoinTableSource; @@ -12,70 +11,68 @@ import org.opensearch.sql.legacy.domain.Field; import org.opensearch.sql.legacy.query.join.TableInJoinRequestBuilder; -/** - * All parameters required by QueryPlanner - */ +/** All parameters required by QueryPlanner */ public class QueryParams { - /** - * Request builder for first table - */ - private final TableInJoinRequestBuilder request1; + /** Request builder for first table */ + private final TableInJoinRequestBuilder request1; - /** - * Request builder for second table - */ - private final TableInJoinRequestBuilder request2; + /** Request builder for second table */ + private final TableInJoinRequestBuilder request2; - /** - * Join type, ex. inner join, left join - */ - private final SQLJoinTableSource.JoinType joinType; + /** Join type, ex. inner join, left join */ + private final SQLJoinTableSource.JoinType joinType; /** + *
      * Join conditions in ON clause grouped by OR.
      * For example, "ON (a.name = b.id AND a.age = b.age) OR a.location = b.address"
      * => list: [
      * [ (a.name, b.id), (a.age, b.age) ],
      * [ (a.location, b.address) ]
      * ]
+     * 
*/ private final List>> joinConditions; + public QueryParams( + TableInJoinRequestBuilder request1, + TableInJoinRequestBuilder request2, + SQLJoinTableSource.JoinType joinType, + List>> t1ToT2FieldsComparison) { + this.request1 = request1; + this.request2 = request2; + this.joinType = joinType; + this.joinConditions = t1ToT2FieldsComparison; + } - public QueryParams(TableInJoinRequestBuilder request1, - TableInJoinRequestBuilder request2, - SQLJoinTableSource.JoinType joinType, - List>> t1ToT2FieldsComparison) { - this.request1 = request1; - this.request2 = request2; - this.joinType = joinType; - this.joinConditions = t1ToT2FieldsComparison; - } - - public TableInJoinRequestBuilder firstRequest() { - return request1; - } + public TableInJoinRequestBuilder firstRequest() { + return request1; + } - public TableInJoinRequestBuilder secondRequest() { - return request2; - } + public TableInJoinRequestBuilder secondRequest() { + return request2; + } - public SQLJoinTableSource.JoinType joinType() { - return joinType; - } + public SQLJoinTableSource.JoinType joinType() { + return joinType; + } - public List>> joinConditions() { - return joinConditions; - } + public List>> joinConditions() { + return joinConditions; + } - @Override - public String toString() { - return "QueryParams{" - + "request1=" + request1 - + ", request2=" + request2 - + ", joinType=" + joinType - + ", joinConditions=" + joinConditions - + '}'; - } + @Override + public String toString() { + return "QueryParams{" + + "request1=" + + request1 + + ", request2=" + + request2 + + ", joinType=" + + joinType + + ", joinConditions=" + + joinConditions + + '}'; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/QueryPlanner.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/QueryPlanner.java index 56acfa5d0c..0a1c2fd24b 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/QueryPlanner.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/QueryPlanner.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.core; import static org.opensearch.sql.legacy.query.planner.core.ExecuteParams.ExecuteParamType.CLIENT; @@ -21,89 +20,69 @@ import org.opensearch.sql.legacy.query.planner.resource.ResourceManager; import org.opensearch.sql.legacy.query.planner.resource.Stats; -/** - * Query planner that driver the logical planning, physical planning, execute and explain. - */ +/** Query planner that driver the logical planning, physical planning, execute and explain. */ public class QueryPlanner { - /** - * Connection to ElasticSearch - */ - private final Client client; - - /** - * Query plan configuration - */ - private final Config config; - - /** - * Optimized logical plan - */ - private final LogicalPlan logicalPlan; - - /** - * Best physical plan to execute - */ - private final PhysicalPlan physicalPlan; - - /** - * Statistics collector - */ - private Stats stats; - - /** - * Resource monitor and statistics manager - */ - private ResourceManager resourceMgr; - - - public QueryPlanner(Client client, Config config, QueryParams params) { - this.client = client; - this.config = config; - this.stats = new Stats(client); - this.resourceMgr = new ResourceManager(stats, config); - - logicalPlan = new LogicalPlan(config, params); - logicalPlan.optimize(); - - physicalPlan = new PhysicalPlan(logicalPlan); - physicalPlan.optimize(); - } - - /** - * Execute query plan - * - * @return response of the execution - */ - public List execute() { - ExecuteParams params = new ExecuteParams(); - params.add(CLIENT, client); - params.add(TIMEOUT, config.timeout()); - params.add(RESOURCE_MANAGER, resourceMgr); - return physicalPlan.execute(params); - } - - /** - * Explain query plan - * - * @return explanation string of the plan - */ - public String explain() { - return new Explanation( - logicalPlan, physicalPlan, - new JsonExplanationFormat(4) - ).toString(); - } - - public MetaSearchResult getMetaResult() { - return resourceMgr.getMetaResult(); - } - - /** - * Setter for unit test - */ - public void setStats(Stats stats) { - this.stats = stats; - this.resourceMgr = new ResourceManager(stats, config); - } + /** Connection to ElasticSearch */ + private final Client client; + + /** Query plan configuration */ + private final Config config; + + /** Optimized logical plan */ + private final LogicalPlan logicalPlan; + + /** Best physical plan to execute */ + private final PhysicalPlan physicalPlan; + + /** Statistics collector */ + private Stats stats; + + /** Resource monitor and statistics manager */ + private ResourceManager resourceMgr; + + public QueryPlanner(Client client, Config config, QueryParams params) { + this.client = client; + this.config = config; + this.stats = new Stats(client); + this.resourceMgr = new ResourceManager(stats, config); + + logicalPlan = new LogicalPlan(config, params); + logicalPlan.optimize(); + + physicalPlan = new PhysicalPlan(logicalPlan); + physicalPlan.optimize(); + } + + /** + * Execute query plan + * + * @return response of the execution + */ + public List execute() { + ExecuteParams params = new ExecuteParams(); + params.add(CLIENT, client); + params.add(TIMEOUT, config.timeout()); + params.add(RESOURCE_MANAGER, resourceMgr); + return physicalPlan.execute(params); + } + + /** + * Explain query plan + * + * @return explanation string of the plan + */ + public String explain() { + return new Explanation(logicalPlan, physicalPlan, new JsonExplanationFormat(4)).toString(); + } + + public MetaSearchResult getMetaResult() { + return resourceMgr.getMetaResult(); + } + + /** Setter for unit test */ + public void setStats(Stats stats) { + this.stats = stats; + this.resourceMgr = new ResourceManager(stats, config); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Project.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Project.java index bd24564de2..4226744f1b 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Project.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Project.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.logical.node; import com.google.common.collect.HashMultimap; @@ -23,126 +22,116 @@ import org.opensearch.sql.legacy.query.planner.physical.Row; import org.opensearch.sql.legacy.query.planner.physical.estimation.Cost; -/** - * Projection expression - */ +/** Projection expression */ public class Project implements LogicalOperator, PhysicalOperator { - private static final Logger LOG = LogManager.getLogger(); - - private final PlanNode next; + private static final Logger LOG = LogManager.getLogger(); - /** - * All columns being projected in SELECT in each table - */ - private final Multimap tableAliasColumns; + private final PlanNode next; - /** - * All columns full name (tableAlias.colName) to alias mapping - */ - private final Map fullNameAlias; + /** All columns being projected in SELECT in each table */ + private final Multimap tableAliasColumns; + /** All columns full name (tableAlias.colName) to alias mapping */ + private final Map fullNameAlias; - @SuppressWarnings("unchecked") - public Project(PlanNode next) { - this(next, HashMultimap.create()); - } + @SuppressWarnings("unchecked") + public Project(PlanNode next) { + this(next, HashMultimap.create()); + } - @SuppressWarnings("unchecked") - public Project(PlanNode next, Multimap tableAliasToColumns) { - this.next = next; - this.tableAliasColumns = tableAliasToColumns; - this.fullNameAlias = fullNameAndAlias(); - } - - @Override - public boolean isNoOp() { - return tableAliasColumns.isEmpty(); - } - - @Override - public PlanNode[] children() { - return new PlanNode[]{next}; - } + @SuppressWarnings("unchecked") + public Project(PlanNode next, Multimap tableAliasToColumns) { + this.next = next; + this.tableAliasColumns = tableAliasToColumns; + this.fullNameAlias = fullNameAndAlias(); + } - @Override - public PhysicalOperator[] toPhysical(Map> optimalOps) { - if (!(next instanceof LogicalOperator)) { - throw new IllegalStateException("Only logical operator can perform this toPhysical() operation"); - } - return new PhysicalOperator[]{ - new Project(optimalOps.get(next), tableAliasColumns) // Create physical Project instance - }; - } + @Override + public boolean isNoOp() { + return tableAliasColumns.isEmpty(); + } - @Override - public Cost estimate() { - return new Cost(); - } + @Override + public PlanNode[] children() { + return new PlanNode[] {next}; + } - @Override - public boolean hasNext() { - return ((PhysicalOperator) next).hasNext(); + @Override + public PhysicalOperator[] toPhysical(Map> optimalOps) { + if (!(next instanceof LogicalOperator)) { + throw new IllegalStateException( + "Only logical operator can perform this toPhysical() operation"); } - - @SuppressWarnings("unchecked") - @Override - public Row next() { - Row row = ((PhysicalOperator) this.next).next(); - - /* - * Empty means SELECT * which means retain all fields from both tables - * Because push down is always applied, only limited support for this. - */ - if (!fullNameAlias.isEmpty()) { - row.retain(fullNameAlias); - } - - LOG.trace("Projected row by fields {}: {}", tableAliasColumns, row); - return row; + return new PhysicalOperator[] { + new Project(optimalOps.get(next), tableAliasColumns) // Create physical Project instance + }; + } + + @Override + public Cost estimate() { + return new Cost(); + } + + @Override + public boolean hasNext() { + return ((PhysicalOperator) next).hasNext(); + } + + @SuppressWarnings("unchecked") + @Override + public Row next() { + Row row = ((PhysicalOperator) this.next).next(); + + /* + * Empty means SELECT * which means retain all fields from both tables + * Because push down is always applied, only limited support for this. + */ + if (!fullNameAlias.isEmpty()) { + row.retain(fullNameAlias); } - public void project(String tableAlias, Collection columns) { - tableAliasColumns.putAll(tableAlias, columns); - } + LOG.trace("Projected row by fields {}: {}", tableAliasColumns, row); + return row; + } - public void projectAll(String tableAlias) { - tableAliasColumns.put(tableAlias, new Field("*", "")); - } + public void project(String tableAlias, Collection columns) { + tableAliasColumns.putAll(tableAlias, columns); + } - public void forEach(BiConsumer> action) { - tableAliasColumns.asMap().forEach(action); - } + public void projectAll(String tableAlias) { + tableAliasColumns.put(tableAlias, new Field("*", "")); + } - public void pushDown(String tableAlias, Project pushedDownProj) { - Collection columns = pushedDownProj.tableAliasColumns.get(tableAlias); - if (columns != null) { - tableAliasColumns.putAll(tableAlias, columns); - } - } + public void forEach(BiConsumer> action) { + tableAliasColumns.asMap().forEach(action); + } - /** - * Return mapping from column full name ("e.age") and alias ("a" in "SELECT e.age AS a") - */ - private Map fullNameAndAlias() { - Map fullNamesAlias = new HashMap<>(); - forEach( - (tableAlias, fields) -> { - for (Field field : fields) { - fullNamesAlias.put(tableAlias + "." + field.getName(), field.getAlias()); - } - } - ); - return fullNamesAlias; + public void pushDown(String tableAlias, Project pushedDownProj) { + Collection columns = pushedDownProj.tableAliasColumns.get(tableAlias); + if (columns != null) { + tableAliasColumns.putAll(tableAlias, columns); } - - @Override - public String toString() { - List colStrs = new ArrayList<>(); - for (Map.Entry entry : tableAliasColumns.entries()) { - colStrs.add(entry.getKey() + "." + entry.getValue().getName()); - } - return "Project [ columns=[" + String.join(", ", colStrs) + "] ]"; + } + + /** Return mapping from column full name ("e.age") and alias ("a" in "SELECT e.age AS a") */ + private Map fullNameAndAlias() { + Map fullNamesAlias = new HashMap<>(); + forEach( + (tableAlias, fields) -> { + for (Field field : fields) { + fullNamesAlias.put(tableAlias + "." + field.getName(), field.getAlias()); + } + }); + return fullNamesAlias; + } + + @Override + public String toString() { + List colStrs = new ArrayList<>(); + for (Map.Entry entry : tableAliasColumns.entries()) { + colStrs.add(entry.getKey() + "." + entry.getValue().getName()); } - + return "Project [ columns=[" + String.join(", ", colStrs) + "] ]"; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/rule/ProjectionPushDown.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/rule/ProjectionPushDown.java index f5a3e28fce..5195894a75 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/rule/ProjectionPushDown.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/rule/ProjectionPushDown.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.logical.rule; import static java.util.stream.Collectors.toList; @@ -18,68 +17,54 @@ import org.opensearch.sql.legacy.query.planner.logical.node.Join; import org.opensearch.sql.legacy.query.planner.logical.node.Project; - -/** - * Projection push down optimization. - */ +/** Projection push down optimization. */ public class ProjectionPushDown implements LogicalPlanVisitor { - /** - * Project used to collect column names in SELECT, ON, ORDER BY... - */ - private final Project project = new Project(null); + /** Project used to collect column names in SELECT, ON, ORDER BY... */ + private final Project project = new Project(null); - @Override - public boolean visit(Project project) { - pushDown(project); - return true; - } + @Override + public boolean visit(Project project) { + pushDown(project); + return true; + } - @Override - public boolean visit(Join join) { - pushDown(join.conditions()); - return true; - } + @Override + public boolean visit(Join join) { + pushDown(join.conditions()); + return true; + } - @Override - public boolean visit(Group group) { - if (!project.isNoOp()) { - group.pushDown(project); - } - return false; // avoid iterating operators in virtual Group + @Override + public boolean visit(Group group) { + if (!project.isNoOp()) { + group.pushDown(project); } + return false; // avoid iterating operators in virtual Group + } - /** - * Note that raw type Project cause generic type of forEach be erased at compile time - */ - private void pushDown(Project project) { - project.forEach(this::project); - } - - private void pushDown(JoinCondition orCond) { - for (int i = 0; i < orCond.groupSize(); i++) { - project( - orCond.leftTableAlias(), - columnNamesToFields(orCond.leftColumnNames(i)) - ); - project( - orCond.rightTableAlias(), - columnNamesToFields(orCond.rightColumnNames(i)) - ); - } - } + /** Note that raw type Project cause generic type of forEach be erased at compile time */ + private void pushDown(Project project) { + project.forEach(this::project); + } - private void project(String tableAlias, Collection columns) { - project.project(tableAlias, columns); // Bug: Field doesn't implement hashCode() which leads to duplicate + private void pushDown(JoinCondition orCond) { + for (int i = 0; i < orCond.groupSize(); i++) { + project(orCond.leftTableAlias(), columnNamesToFields(orCond.leftColumnNames(i))); + project(orCond.rightTableAlias(), columnNamesToFields(orCond.rightColumnNames(i))); } + } - /** - * Convert column name string to Field object with empty alias - */ - private List columnNamesToFields(String[] colNames) { - return Arrays.stream(colNames). - map(name -> new Field(name, null)). // Alias is useless for pushed down project - collect(toList()); - } + private void project(String tableAlias, Collection columns) { + project.project( + tableAlias, columns); // Bug: Field doesn't implement hashCode() which leads to duplicate + } + /** Convert column name string to Field object with empty alias */ + private List columnNamesToFields(String[] colNames) { + return Arrays.stream(colNames) + .map(name -> new Field(name, null)) + . // Alias is useless for pushed down project + collect(toList()); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/rule/SelectionPushDown.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/rule/SelectionPushDown.java index 61578f91b7..deae266afc 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/rule/SelectionPushDown.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/rule/SelectionPushDown.java @@ -3,36 +3,32 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.logical.rule; import org.opensearch.sql.legacy.query.planner.logical.LogicalPlanVisitor; import org.opensearch.sql.legacy.query.planner.logical.node.Filter; import org.opensearch.sql.legacy.query.planner.logical.node.Group; -/** - * Push down selection (filter) - */ +/** Push down selection (filter) */ public class SelectionPushDown implements LogicalPlanVisitor { - /** - * Store the filter found in visit and reused to push down. - * It's not necessary to create a new one because no need to collect filter condition elsewhere - */ - private Filter filter; - - @Override - public boolean visit(Filter filter) { - this.filter = filter; - return true; + /** + * Store the filter found in visit and reused to push down. It's not necessary to create a new one + * because no need to collect filter condition elsewhere + */ + private Filter filter; + + @Override + public boolean visit(Filter filter) { + this.filter = filter; + return true; + } + + @Override + public boolean visit(Group group) { + if (filter != null && !filter.isNoOp()) { + group.pushDown(filter); } - - @Override - public boolean visit(Group group) { - if (filter != null && !filter.isNoOp()) { - group.pushDown(filter); - } - return false; // avoid iterating operators in virtual Group - } - + return false; // avoid iterating operators in virtual Group + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/PhysicalOperator.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/PhysicalOperator.java index 9271bae0d7..897beee3e9 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/PhysicalOperator.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/PhysicalOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.physical; import java.util.Iterator; @@ -11,40 +10,36 @@ import org.opensearch.sql.legacy.query.planner.core.PlanNode; import org.opensearch.sql.legacy.query.planner.physical.estimation.Cost; -/** - * Physical operator - */ +/** Physical operator */ public interface PhysicalOperator extends PlanNode, Iterator>, AutoCloseable { - /** - * Estimate the cost of current physical operator - * - * @return cost - */ - Cost estimate(); - - - /** - * Initialize operator. - * - * @param params exuecution parameters needed - */ - default void open(ExecuteParams params) throws Exception { - for (PlanNode node : children()) { - ((PhysicalOperator) node).open(params); - } + /** + * Estimate the cost of current physical operator + * + * @return cost + */ + Cost estimate(); + + /** + * Initialize operator. + * + * @param params exuecution parameters needed + */ + default void open(ExecuteParams params) throws Exception { + for (PlanNode node : children()) { + ((PhysicalOperator) node).open(params); } - - - /** - * Close resources related to the operator. - * - * @throws Exception potential exception raised - */ - @Override - default void close() { - for (PlanNode node : children()) { - ((PhysicalOperator) node).close(); - } + } + + /** + * Close resources related to the operator. + * + * @throws Exception potential exception raised + */ + @Override + default void close() { + for (PlanNode node : children()) { + ((PhysicalOperator) node).close(); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/PhysicalPlan.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/PhysicalPlan.java index eac4e855b0..5a79c63838 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/PhysicalPlan.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/PhysicalPlan.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.physical; import java.util.ArrayList; @@ -18,81 +17,69 @@ import org.opensearch.sql.legacy.query.planner.physical.estimation.Estimation; import org.opensearch.sql.legacy.query.planner.resource.ResourceManager; -/** - * Physical plan - */ +/** Physical plan */ public class PhysicalPlan implements Plan { - private static final Logger LOG = LogManager.getLogger(); + private static final Logger LOG = LogManager.getLogger(); - /** - * Optimized logical plan that being ready for physical planning - */ - private final LogicalPlan logicalPlan; + /** Optimized logical plan that being ready for physical planning */ + private final LogicalPlan logicalPlan; - /** - * Root of physical plan tree - */ - private PhysicalOperator root; + /** Root of physical plan tree */ + private PhysicalOperator root; - public PhysicalPlan(LogicalPlan logicalPlan) { - this.logicalPlan = logicalPlan; - } + public PhysicalPlan(LogicalPlan logicalPlan) { + this.logicalPlan = logicalPlan; + } - @Override - public void traverse(Visitor visitor) { - if (root != null) { - root.accept(visitor); - } + @Override + public void traverse(Visitor visitor) { + if (root != null) { + root.accept(visitor); } - - @Override - public void optimize() { - Estimation estimation = new Estimation<>(); - logicalPlan.traverse(estimation); - root = estimation.optimalPlan(); + } + + @Override + public void optimize() { + Estimation estimation = new Estimation<>(); + logicalPlan.traverse(estimation); + root = estimation.optimalPlan(); + } + + /** Execute physical plan after verifying if system is healthy at the moment */ + public List execute(ExecuteParams params) { + if (shouldReject(params)) { + throw new IllegalStateException("Query request rejected due to insufficient resource"); } - /** - * Execute physical plan after verifying if system is healthy at the moment - */ - public List execute(ExecuteParams params) { - if (shouldReject(params)) { - throw new IllegalStateException("Query request rejected due to insufficient resource"); - } - - try (PhysicalOperator op = root) { - return doExecutePlan(op, params); - } catch (Exception e) { - LOG.error("Error happened during execution", e); - // Runtime error or circuit break. Should we return partial result to customer? - throw new IllegalStateException("Error happened during execution", e); - } + try (PhysicalOperator op = root) { + return doExecutePlan(op, params); + } catch (Exception e) { + LOG.error("Error happened during execution", e); + // Runtime error or circuit break. Should we return partial result to customer? + throw new IllegalStateException("Error happened during execution", e); } - - /** - * Reject physical plan execution of new query request if unhealthy - */ - private boolean shouldReject(ExecuteParams params) { - return !((ResourceManager) params.get(ExecuteParams.ExecuteParamType.RESOURCE_MANAGER)).isHealthy(); + } + + /** Reject physical plan execution of new query request if unhealthy */ + private boolean shouldReject(ExecuteParams params) { + return !((ResourceManager) params.get(ExecuteParams.ExecuteParamType.RESOURCE_MANAGER)) + .isHealthy(); + } + + /** Execute physical plan in order: open, fetch result, close */ + private List doExecutePlan(PhysicalOperator op, ExecuteParams params) + throws Exception { + List hits = new ArrayList<>(); + op.open(params); + + while (op.hasNext()) { + hits.add(op.next().data()); } - /** - * Execute physical plan in order: open, fetch result, close - */ - private List doExecutePlan(PhysicalOperator op, - ExecuteParams params) throws Exception { - List hits = new ArrayList<>(); - op.open(params); - - while (op.hasNext()) { - hits.add(op.next().data()); - } - - if (LOG.isTraceEnabled()) { - hits.forEach(hit -> LOG.trace("Final result row: {}", hit.getSourceAsMap())); - } - return hits; + if (LOG.isTraceEnabled()) { + hits.forEach(hit -> LOG.trace("Final result row: {}", hit.getSourceAsMap())); } - + return hits; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/Row.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/Row.java index 9e7d81a194..5ed074da6d 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/Row.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/Row.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.physical; import java.util.Arrays; @@ -17,106 +16,93 @@ */ public interface Row { - Row NULL = null; - - /** - * Generate key to represent identity of the row. - * - * @param colNames column names as keys - * @return row key - */ - RowKey key(String[] colNames); - - - /** - * Combine current row and another row together to generate a new combined row. - * - * @param otherRow another row - * @return combined row - */ - Row combine(Row otherRow); - - - /** - * Retain columns specified and rename to alias if any. - * - * @param colNameAlias column names to alias mapping - */ - void retain(Map colNameAlias); - + Row NULL = null; + + /** + * Generate key to represent identity of the row. + * + * @param colNames column names as keys + * @return row key + */ + RowKey key(String[] colNames); + + /** + * Combine current row and another row together to generate a new combined row. + * + * @param otherRow another row + * @return combined row + */ + Row combine(Row otherRow); + + /** + * Retain columns specified and rename to alias if any. + * + * @param colNameAlias column names to alias mapping + */ + void retain(Map colNameAlias); + + /** + * @return raw data of row wrapped inside + */ + T data(); + + /** Key that help Row be sorted or hashed. */ + class RowKey implements Comparable { + + /** Represent null key if any joined column value is NULL */ + public static final RowKey NULL = null; + + /** Values of row key */ + private final Object[] keys; + + /** Cached hash code since this class is intended to be used by hash table */ + private final int hashCode; + + public RowKey(Object... keys) { + this.keys = keys; + this.hashCode = Objects.hash(keys); + } - /** - * @return raw data of row wrapped inside - */ - T data(); + public Object[] keys() { + return keys; + } + @Override + public int hashCode() { + return hashCode; + } - /** - * Key that help Row be sorted or hashed. - */ - class RowKey implements Comparable { + @Override + public boolean equals(Object other) { + return other instanceof RowKey && Arrays.deepEquals(this.keys, ((RowKey) other).keys); + } - /** - * Represent null key if any joined column value is NULL - */ - public static final RowKey NULL = null; + @SuppressWarnings("unchecked") + @Override + public int compareTo(RowKey other) { + for (int i = 0; i < keys.length; i++) { - /** - * Values of row key + /* + * Only one is null, otherwise (both null or non-null) go ahead. + * Always consider NULL is smaller value which means NULL comes last in ASC and first in DESC */ - private final Object[] keys; - - /** - * Cached hash code since this class is intended to be used by hash table - */ - private final int hashCode; - - public RowKey(Object... keys) { - this.keys = keys; - this.hashCode = Objects.hash(keys); - } - - public Object[] keys() { - return keys; - } - - @Override - public int hashCode() { - return hashCode; + if (keys[i] == null ^ other.keys[i] == null) { + return keys[i] == null ? 1 : -1; } - @Override - public boolean equals(Object other) { - return other instanceof RowKey && Arrays.deepEquals(this.keys, ((RowKey) other).keys); - } - - @SuppressWarnings("unchecked") - @Override - public int compareTo(RowKey other) { - for (int i = 0; i < keys.length; i++) { - - /* - * Only one is null, otherwise (both null or non-null) go ahead. - * Always consider NULL is smaller value which means NULL comes last in ASC and first in DESC - */ - if (keys[i] == null ^ other.keys[i] == null) { - return keys[i] == null ? 1 : -1; - } - - if (keys[i] instanceof Comparable) { - int result = ((Comparable) keys[i]).compareTo(other.keys[i]); - if (result != 0) { - return result; - } - } // Ignore incomparable field silently? - } - return 0; - } - - @Override - public String toString() { - return "RowKey: " + Arrays.toString(keys); - } + if (keys[i] instanceof Comparable) { + int result = ((Comparable) keys[i]).compareTo(other.keys[i]); + if (result != 0) { + return result; + } + } // Ignore incomparable field silently? + } + return 0; + } + @Override + public String toString() { + return "RowKey: " + Arrays.toString(keys); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/project/PhysicalProject.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/project/PhysicalProject.java index 9c4bdc5c9e..e09ef5c3fe 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/project/PhysicalProject.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/project/PhysicalProject.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.physical.node.project; import java.util.List; @@ -16,34 +15,34 @@ import org.opensearch.sql.legacy.query.planner.physical.estimation.Cost; import org.opensearch.sql.legacy.query.planner.physical.node.scroll.BindingTupleRow; -/** - * The definition of Project Operator. - */ +/** The definition of Project Operator. */ @RequiredArgsConstructor public class PhysicalProject implements PhysicalOperator { - private final PhysicalOperator next; - private final List fields; - - @Override - public Cost estimate() { - return null; - } - - @Override - public PlanNode[] children() { - return new PlanNode[]{next}; - } - - @Override - public boolean hasNext() { - return next.hasNext(); - } - - @Override - public Row next() { - BindingTuple input = next.next().data(); - BindingTuple.BindingTupleBuilder outputBindingTupleBuilder = BindingTuple.builder(); - fields.forEach(field -> outputBindingTupleBuilder.binding(field.getName(), field.getExpr().valueOf(input))); - return new BindingTupleRow(outputBindingTupleBuilder.build()); - } + private final PhysicalOperator next; + private final List fields; + + @Override + public Cost estimate() { + return null; + } + + @Override + public PlanNode[] children() { + return new PlanNode[] {next}; + } + + @Override + public boolean hasNext() { + return next.hasNext(); + } + + @Override + public Row next() { + BindingTuple input = next.next().data(); + BindingTuple.BindingTupleBuilder outputBindingTupleBuilder = BindingTuple.builder(); + fields.forEach( + field -> + outputBindingTupleBuilder.binding(field.getName(), field.getExpr().valueOf(input))); + return new BindingTupleRow(outputBindingTupleBuilder.build()); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/PhysicalScroll.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/PhysicalScroll.java index 8866420218..16ad327a87 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/PhysicalScroll.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/PhysicalScroll.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.physical.node.scroll; import java.util.Iterator; @@ -21,54 +20,53 @@ import org.opensearch.sql.legacy.query.planner.physical.Row; import org.opensearch.sql.legacy.query.planner.physical.estimation.Cost; -/** - * The definition of Scroll Operator. - */ +/** The definition of Scroll Operator. */ @RequiredArgsConstructor public class PhysicalScroll implements PhysicalOperator { - private final QueryAction queryAction; + private final QueryAction queryAction; - private Iterator rowIterator; + private Iterator rowIterator; - @Override - public Cost estimate() { - return null; - } + @Override + public Cost estimate() { + return null; + } - @Override - public PlanNode[] children() { - return new PlanNode[0]; - } + @Override + public PlanNode[] children() { + return new PlanNode[0]; + } - @Override - public boolean hasNext() { - return rowIterator.hasNext(); - } + @Override + public boolean hasNext() { + return rowIterator.hasNext(); + } - @Override - public Row next() { - return rowIterator.next(); - } + @Override + public Row next() { + return rowIterator.next(); + } - @Override - public void open(ExecuteParams params) { - try { - ActionResponse response = queryAction.explain().get(); - if (queryAction instanceof AggregationQueryAction) { - rowIterator = SearchAggregationResponseHelper - .populateSearchAggregationResponse(((SearchResponse) response).getAggregations()) - .iterator(); - } else { - throw new IllegalStateException("Not support QueryAction type: " + queryAction.getClass()); - } - } catch (SqlParseException e) { - throw new RuntimeException(e); - } + @Override + public void open(ExecuteParams params) { + try { + ActionResponse response = queryAction.explain().get(); + if (queryAction instanceof AggregationQueryAction) { + rowIterator = + SearchAggregationResponseHelper.populateSearchAggregationResponse( + ((SearchResponse) response).getAggregations()) + .iterator(); + } else { + throw new IllegalStateException("Not support QueryAction type: " + queryAction.getClass()); + } + } catch (SqlParseException e) { + throw new RuntimeException(e); } + } - @SneakyThrows - @Override - public String toString() { - return queryAction.explain().toString(); - } + @SneakyThrows + @Override + public String toString() { + return queryAction.explain().toString(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/Scroll.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/Scroll.java index 2d781d7c3d..40e9860886 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/Scroll.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/Scroll.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.physical.node.scroll; import java.util.Arrays; @@ -31,170 +30,160 @@ import org.opensearch.sql.legacy.query.planner.physical.node.BatchPhysicalOperator; import org.opensearch.sql.legacy.query.planner.resource.ResourceManager; -/** - * OpenSearch Scroll API as physical implementation of TableScan - */ +/** OpenSearch Scroll API as physical implementation of TableScan */ public class Scroll extends BatchPhysicalOperator { - /** - * Request to submit to OpenSearch to scroll over - */ - private final TableInJoinRequestBuilder request; - - /** - * Page size to scroll over index - */ - private final int pageSize; - - /** - * Client connection to ElasticSearch - */ - private Client client; - - /** - * Currently undergoing Scroll - */ - private SearchResponse scrollResponse; - - /** - * Time out - */ - private Integer timeout; - - /** - * Resource monitor manager - */ - private ResourceManager resourceMgr; - - - public Scroll(TableInJoinRequestBuilder request, int pageSize) { - this.request = request; - this.pageSize = pageSize; + /** Request to submit to OpenSearch to scroll over */ + private final TableInJoinRequestBuilder request; + + /** Page size to scroll over index */ + private final int pageSize; + + /** Client connection to ElasticSearch */ + private Client client; + + /** Currently undergoing Scroll */ + private SearchResponse scrollResponse; + + /** Time out */ + private Integer timeout; + + /** Resource monitor manager */ + private ResourceManager resourceMgr; + + public Scroll(TableInJoinRequestBuilder request, int pageSize) { + this.request = request; + this.pageSize = pageSize; + } + + @Override + public PlanNode[] children() { + return new PlanNode[0]; + } + + @Override + public Cost estimate() { + return new Cost(); + } + + @Override + public void open(ExecuteParams params) throws Exception { + super.open(params); + client = params.get(ExecuteParams.ExecuteParamType.CLIENT); + timeout = params.get(ExecuteParams.ExecuteParamType.TIMEOUT); + resourceMgr = params.get(ExecuteParams.ExecuteParamType.RESOURCE_MANAGER); + + Object filter = params.get(ExecuteParams.ExecuteParamType.EXTRA_QUERY_FILTER); + if (filter instanceof BoolQueryBuilder) { + request + .getRequestBuilder() + .setQuery(generateNewQueryWithExtraFilter((BoolQueryBuilder) filter)); + + if (LOG.isDebugEnabled()) { + LOG.debug( + "Received extra query filter, re-build query: {}", + Strings.toString( + XContentType.JSON, request.getRequestBuilder().request().source(), true, true)); + } } - - @Override - public PlanNode[] children() { - return new PlanNode[0]; + } + + @Override + public void close() { + if (scrollResponse != null) { + LOG.debug("Closing all scroll resources"); + ClearScrollResponse clearScrollResponse = + client.prepareClearScroll().addScrollId(scrollResponse.getScrollId()).get(); + if (!clearScrollResponse.isSucceeded()) { + LOG.warn("Failed to close scroll: {}", clearScrollResponse.status()); + } + scrollResponse = null; + } else { + LOG.debug("Scroll already be closed"); } - - @Override - public Cost estimate() { - return new Cost(); + } + + @Override + protected Collection> prefetch() { + Objects.requireNonNull(client, "Client connection is not ready"); + Objects.requireNonNull(resourceMgr, "ResourceManager is not set"); + Objects.requireNonNull(timeout, "Time out is not set"); + + if (scrollResponse == null) { + loadFirstBatch(); + updateMetaResult(); + } else { + loadNextBatchByScrollId(); } - - @Override - public void open(ExecuteParams params) throws Exception { - super.open(params); - client = params.get(ExecuteParams.ExecuteParamType.CLIENT); - timeout = params.get(ExecuteParams.ExecuteParamType.TIMEOUT); - resourceMgr = params.get(ExecuteParams.ExecuteParamType.RESOURCE_MANAGER); - - Object filter = params.get(ExecuteParams.ExecuteParamType.EXTRA_QUERY_FILTER); - if (filter instanceof BoolQueryBuilder) { - request.getRequestBuilder().setQuery( - generateNewQueryWithExtraFilter((BoolQueryBuilder) filter)); - - if (LOG.isDebugEnabled()) { - LOG.debug("Received extra query filter, re-build query: {}", Strings.toString(XContentType.JSON, - request.getRequestBuilder().request().source(), true, true - )); - } - } + return wrapRowForCurrentBatch(); + } + + /** + * Extra filter pushed down from upstream. Re-parse WHERE clause with extra filter because + * OpenSearch RequestBuilder doesn't allow QueryBuilder inside be changed after added. + */ + private QueryBuilder generateNewQueryWithExtraFilter(BoolQueryBuilder filter) + throws SqlParseException { + Where where = request.getOriginalSelect().getWhere(); + BoolQueryBuilder newQuery; + if (where != null) { + newQuery = QueryMaker.explain(where, false); + newQuery.must(filter); + } else { + newQuery = filter; } - - @Override - public void close() { - if (scrollResponse != null) { - LOG.debug("Closing all scroll resources"); - ClearScrollResponse clearScrollResponse = client.prepareClearScroll(). - addScrollId(scrollResponse.getScrollId()). - get(); - if (!clearScrollResponse.isSucceeded()) { - LOG.warn("Failed to close scroll: {}", clearScrollResponse.status()); - } - scrollResponse = null; - } else { - LOG.debug("Scroll already be closed"); - } + return newQuery; + } + + private void loadFirstBatch() { + scrollResponse = + request + .getRequestBuilder() + .addSort(FieldSortBuilder.DOC_FIELD_NAME, SortOrder.ASC) + .setSize(pageSize) + .setScroll(TimeValue.timeValueSeconds(timeout)) + .get(); + } + + private void updateMetaResult() { + resourceMgr.getMetaResult().addTotalNumOfShards(scrollResponse.getTotalShards()); + resourceMgr.getMetaResult().addSuccessfulShards(scrollResponse.getSuccessfulShards()); + resourceMgr.getMetaResult().addFailedShards(scrollResponse.getFailedShards()); + resourceMgr.getMetaResult().updateTimeOut(scrollResponse.isTimedOut()); + } + + private void loadNextBatchByScrollId() { + scrollResponse = + client + .prepareSearchScroll(scrollResponse.getScrollId()) + .setScroll(TimeValue.timeValueSeconds(timeout)) + .get(); + } + + @SuppressWarnings("unchecked") + private Collection> wrapRowForCurrentBatch() { + SearchHit[] hits = scrollResponse.getHits().getHits(); + Row[] rows = new Row[hits.length]; + for (int i = 0; i < hits.length; i++) { + rows[i] = new SearchHitRow(hits[i], request.getAlias()); } + return Arrays.asList(rows); + } - @Override - protected Collection> prefetch() { - Objects.requireNonNull(client, "Client connection is not ready"); - Objects.requireNonNull(resourceMgr, "ResourceManager is not set"); - Objects.requireNonNull(timeout, "Time out is not set"); - - if (scrollResponse == null) { - loadFirstBatch(); - updateMetaResult(); - } else { - loadNextBatchByScrollId(); - } - return wrapRowForCurrentBatch(); - } + @Override + public String toString() { + return "Scroll [ " + describeTable() + ", pageSize=" + pageSize + " ]"; + } - /** - * Extra filter pushed down from upstream. Re-parse WHERE clause with extra filter - * because OpenSearch RequestBuilder doesn't allow QueryBuilder inside be changed after added. - */ - private QueryBuilder generateNewQueryWithExtraFilter(BoolQueryBuilder filter) throws SqlParseException { - Where where = request.getOriginalSelect().getWhere(); - BoolQueryBuilder newQuery; - if (where != null) { - newQuery = QueryMaker.explain(where, false); - newQuery.must(filter); - } else { - newQuery = filter; - } - return newQuery; - } + private String describeTable() { + return request.getOriginalSelect().getFrom().get(0).getIndex() + " as " + request.getAlias(); + } - private void loadFirstBatch() { - scrollResponse = request.getRequestBuilder(). - addSort(FieldSortBuilder.DOC_FIELD_NAME, SortOrder.ASC). - setSize(pageSize). - setScroll(TimeValue.timeValueSeconds(timeout)). - get(); - } - - private void updateMetaResult() { - resourceMgr.getMetaResult().addTotalNumOfShards(scrollResponse.getTotalShards()); - resourceMgr.getMetaResult().addSuccessfulShards(scrollResponse.getSuccessfulShards()); - resourceMgr.getMetaResult().addFailedShards(scrollResponse.getFailedShards()); - resourceMgr.getMetaResult().updateTimeOut(scrollResponse.isTimedOut()); - } - - private void loadNextBatchByScrollId() { - scrollResponse = client.prepareSearchScroll(scrollResponse.getScrollId()). - setScroll(TimeValue.timeValueSeconds(timeout)). - get(); - } - - @SuppressWarnings("unchecked") - private Collection> wrapRowForCurrentBatch() { - SearchHit[] hits = scrollResponse.getHits().getHits(); - Row[] rows = new Row[hits.length]; - for (int i = 0; i < hits.length; i++) { - rows[i] = new SearchHitRow(hits[i], request.getAlias()); - } - return Arrays.asList(rows); - } + /********************************************* + * Getters for Explain + *********************************************/ - @Override - public String toString() { - return "Scroll [ " + describeTable() + ", pageSize=" + pageSize + " ]"; - } - - private String describeTable() { - return request.getOriginalSelect().getFrom().get(0).getIndex() + " as " + request.getAlias(); - } - - - /********************************************* - * Getters for Explain - *********************************************/ - - public String getRequest() { - return Strings.toString(XContentType.JSON, request.getRequestBuilder().request().source()); - } + public String getRequest() { + return Strings.toString(XContentType.JSON, request.getRequestBuilder().request().source()); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/SearchAggregationResponseHelper.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/SearchAggregationResponseHelper.java index 5e0ce1f2b4..ed0e0f2423 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/SearchAggregationResponseHelper.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/SearchAggregationResponseHelper.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.physical.node.scroll; import com.google.common.annotations.VisibleForTesting; @@ -22,70 +21,82 @@ import org.opensearch.search.aggregations.metrics.Percentiles; import org.opensearch.sql.legacy.expression.domain.BindingTuple; -/** - * The definition of Search {@link Aggregations} parser helper class. - */ +/** The definition of Search {@link Aggregations} parser helper class. */ public class SearchAggregationResponseHelper { - public static List populateSearchAggregationResponse(Aggregations aggs) { - List> flatten = flatten(aggs); - List bindingTupleList = flatten.stream() - .map(BindingTuple::from) - .map(bindingTuple -> new BindingTupleRow(bindingTuple)) - .collect(Collectors.toList()); - return bindingTupleList; - } + public static List populateSearchAggregationResponse(Aggregations aggs) { + List> flatten = flatten(aggs); + List bindingTupleList = + flatten.stream() + .map(BindingTuple::from) + .map(bindingTuple -> new BindingTupleRow(bindingTuple)) + .collect(Collectors.toList()); + return bindingTupleList; + } - @VisibleForTesting - public static List> flatten(Aggregations aggregations) { - List aggregationList = aggregations.asList(); - List> resultList = new ArrayList<>(); - Map resultMap = new HashMap<>(); - for (Aggregation aggregation : aggregationList) { - if (aggregation instanceof Terms) { - for (Terms.Bucket bucket : ((Terms) aggregation).getBuckets()) { - List> internalBucketList = flatten(bucket.getAggregations()); - fillResultListWithInternalBucket(resultList, internalBucketList, aggregation.getName(), - bucket.getKey()); - } - } else if (aggregation instanceof NumericMetricsAggregation.SingleValue) { - resultMap.put(aggregation.getName(), ((NumericMetricsAggregation.SingleValue) aggregation).value()); - } else if (aggregation instanceof Percentiles) { - Percentiles percentiles = (Percentiles) aggregation; - resultMap.putAll((Map) StreamSupport.stream(percentiles.spliterator(), false) - .collect(Collectors.toMap( - (percentile) -> String.format("%s_%s", percentiles.getName(), percentile.getPercent()), - Percentile::getValue, (v1, v2) -> { - throw new IllegalArgumentException( - String.format("Duplicate key for values %s and %s", v1, v2)); - }, HashMap::new))); - } else if (aggregation instanceof Histogram) { - for (Histogram.Bucket bucket : ((Histogram) aggregation).getBuckets()) { - List> internalBucketList = flatten(bucket.getAggregations()); - fillResultListWithInternalBucket(resultList, internalBucketList, aggregation.getName(), - bucket.getKeyAsString()); - } - } else { - throw new RuntimeException("unsupported aggregation type " + aggregation.getType()); - } + @VisibleForTesting + public static List> flatten(Aggregations aggregations) { + List aggregationList = aggregations.asList(); + List> resultList = new ArrayList<>(); + Map resultMap = new HashMap<>(); + for (Aggregation aggregation : aggregationList) { + if (aggregation instanceof Terms) { + for (Terms.Bucket bucket : ((Terms) aggregation).getBuckets()) { + List> internalBucketList = flatten(bucket.getAggregations()); + fillResultListWithInternalBucket( + resultList, internalBucketList, aggregation.getName(), bucket.getKey()); } - if (!resultMap.isEmpty()) { - resultList.add(resultMap); + } else if (aggregation instanceof NumericMetricsAggregation.SingleValue) { + resultMap.put( + aggregation.getName(), ((NumericMetricsAggregation.SingleValue) aggregation).value()); + } else if (aggregation instanceof Percentiles) { + Percentiles percentiles = (Percentiles) aggregation; + resultMap.putAll( + (Map) + StreamSupport.stream(percentiles.spliterator(), false) + .collect( + Collectors.toMap( + (percentile) -> + String.format( + "%s_%s", percentiles.getName(), percentile.getPercent()), + Percentile::getValue, + (v1, v2) -> { + throw new IllegalArgumentException( + String.format("Duplicate key for values %s and %s", v1, v2)); + }, + HashMap::new))); + } else if (aggregation instanceof Histogram) { + for (Histogram.Bucket bucket : ((Histogram) aggregation).getBuckets()) { + List> internalBucketList = flatten(bucket.getAggregations()); + fillResultListWithInternalBucket( + resultList, internalBucketList, aggregation.getName(), bucket.getKeyAsString()); } - return resultList; + } else { + throw new RuntimeException("unsupported aggregation type " + aggregation.getType()); + } + } + if (!resultMap.isEmpty()) { + resultList.add(resultMap); } + return resultList; + } - private static void fillResultListWithInternalBucket(List> resultList, - List> internalBucketList, - String aggregationName, Object bucketKey) { - if (internalBucketList.isEmpty()) { - resultList.add(new HashMap() {{ - put(aggregationName, bucketKey); - }}); - } else { - for (Map map : internalBucketList) { - map.put(aggregationName, bucketKey); + private static void fillResultListWithInternalBucket( + List> resultList, + List> internalBucketList, + String aggregationName, + Object bucketKey) { + if (internalBucketList.isEmpty()) { + resultList.add( + new HashMap() { + { + put(aggregationName, bucketKey); } - resultList.addAll(internalBucketList); - } + }); + } else { + for (Map map : internalBucketList) { + map.put(aggregationName, bucketKey); + } + resultList.addAll(internalBucketList); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/SearchHitRow.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/SearchHitRow.java index 27e3072bab..1750563e47 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/SearchHitRow.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/SearchHitRow.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.physical.node.scroll; import com.google.common.base.Strings; @@ -15,6 +14,7 @@ import org.opensearch.sql.legacy.query.planner.physical.Row; /** + *
  * Search hit row that implements basic accessor for SearchHit.
  * Encapsulate all OpenSearch specific knowledge: how to parse source including nested path.
  * 

@@ -32,164 +32,156 @@ * ---------------------------------------------------------------------------------------------------------------------- * retain() in Project | {"firstName": "Allen", "age": 30 } | "" | retain("e.name.first", "e.age") * ---------------------------------------------------------------------------------------------------------------------- + *

*/ class SearchHitRow implements Row { - /** - * Native OpenSearch data object for each row - */ - private final SearchHit hit; - - /** - * Column and value pairs - */ - private final Map source; - - /** - * Table alias owned the row. Empty if this row comes from combination of two other rows - */ - private final String tableAlias; + /** Native OpenSearch data object for each row */ + private final SearchHit hit; - SearchHitRow(SearchHit hit, String tableAlias) { - this.hit = hit; - this.source = hit.getSourceAsMap(); - this.tableAlias = tableAlias; - } + /** Column and value pairs */ + private final Map source; - @Override - public RowKey key(String[] colNames) { - if (colNames.length == 0) { - return RowKey.NULL; - } - - Object[] keys = new Object[colNames.length]; - for (int i = 0; i < colNames.length; i++) { - keys[i] = getValueOfPath(colNames[i]); - - if (keys[i] == null) { - return RowKey.NULL; - } - } - return new RowKey(keys); - } + /** Table alias owned the row. Empty if this row comes from combination of two other rows */ + private final String tableAlias; - /** - * Replace column name by full name to avoid naming conflicts. - * For efficiency, this only happens here when matched rows found. - * Create a new one to avoid mutating the original ones in hash table which impact subsequent match. - */ - @Override - public Row combine(Row other) { - SearchHit combined = cloneHit(other); - - collectFullName(combined.getSourceAsMap(), this); - if (other != NULL) { - collectFullName(combined.getSourceAsMap(), (SearchHitRow) other); - } - return new SearchHitRow(combined, ""); - } + SearchHitRow(SearchHit hit, String tableAlias) { + this.hit = hit; + this.source = hit.getSourceAsMap(); + this.tableAlias = tableAlias; + } - @Override - public void retain(Map colNameAlias) { - Map aliasSource = new HashMap<>(); - colNameAlias.forEach((colName, alias) -> { - if (colName.endsWith(".*")) { - String tableAlias = colName.substring(0, colName.length() - 2) + "."; - retainAllFieldsFromTable(aliasSource, tableAlias); - } else { - retainOneField(aliasSource, colName, alias); - } - }); - resetSource(aliasSource); + @Override + public RowKey key(String[] colNames) { + if (colNames.length == 0) { + return RowKey.NULL; } - @Override - public SearchHit data() { - return hit; - } + Object[] keys = new Object[colNames.length]; + for (int i = 0; i < colNames.length; i++) { + keys[i] = getValueOfPath(colNames[i]); - @Override - public String toString() { - return "SearchHitRow{" + "hit=" + source + '}'; + if (keys[i] == null) { + return RowKey.NULL; + } } - - private Object getValueOfPath(String path) { - /* - * If table alias is missing which means the row was generated by combine(). - * In this case, table alias is present and the first dot should be ignored, ex. "e.name.first" - */ - return getValueOfPath(source, path, Strings.isNullOrEmpty(tableAlias)); + return new RowKey(keys); + } + + /** + * Replace column name by full name to avoid naming conflicts. For efficiency, this only happens + * here when matched rows found. Create a new one to avoid mutating the original ones in hash + * table which impact subsequent match. + */ + @Override + public Row combine(Row other) { + SearchHit combined = cloneHit(other); + + collectFullName(combined.getSourceAsMap(), this); + if (other != NULL) { + collectFullName(combined.getSourceAsMap(), (SearchHitRow) other); } - - /** - * Recursively get value for field name path, such as object field a.b.c + return new SearchHitRow(combined, ""); + } + + @Override + public void retain(Map colNameAlias) { + Map aliasSource = new HashMap<>(); + colNameAlias.forEach( + (colName, alias) -> { + if (colName.endsWith(".*")) { + String tableAlias = colName.substring(0, colName.length() - 2) + "."; + retainAllFieldsFromTable(aliasSource, tableAlias); + } else { + retainOneField(aliasSource, colName, alias); + } + }); + resetSource(aliasSource); + } + + @Override + public SearchHit data() { + return hit; + } + + @Override + public String toString() { + return "SearchHitRow{" + "hit=" + source + '}'; + } + + private Object getValueOfPath(String path) { + /* + * If table alias is missing which means the row was generated by combine(). + * In this case, table alias is present and the first dot should be ignored, ex. "e.name.first" */ - private Object getValueOfPath(Object source, String path, boolean isIgnoreFirstDot) { - if (!(source instanceof Map) || path.isEmpty()) { - return source; - } - - int dot = path.indexOf('.', (isIgnoreFirstDot ? path.indexOf('.') + 1 : 0)); - if (dot == -1) { - return ((Map) source).get(path); - } - - // Object field name maybe unexpanded without recursive object structure - // ex. {"a.b.c": value} instead of {"a": {"b": {"c": value}}}} - if (((Map) source).containsKey(path)) { - return ((Map) source).get(path); - } - - return getValueOfPath( - ((Map) source).get(path.substring(0, dot)), - path.substring(dot + 1), - false - ); - } + return getValueOfPath(source, path, Strings.isNullOrEmpty(tableAlias)); + } - private SearchHit cloneHit(Row other) { - Map documentFields = new HashMap<>(); - Map metaFields = new HashMap<>(); - hit.getFields().forEach((fieldName, docField) -> - (MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) ? metaFields : documentFields).put(fieldName, docField)); - SearchHit combined = new SearchHit( - hit.docId(), - hit.getId() + "|" + (other == NULL ? "0" : ((SearchHitRow) other).hit.getId()), - documentFields, - metaFields - ); - combined.sourceRef(hit.getSourceRef()); - combined.getSourceAsMap().clear(); - return combined; + /** Recursively get value for field name path, such as object field a.b.c */ + private Object getValueOfPath(Object source, String path, boolean isIgnoreFirstDot) { + if (!(source instanceof Map) || path.isEmpty()) { + return source; } - private void collectFullName(Map newSource, SearchHitRow row) { - row.source.forEach((colName, value) -> newSource.put(row.tableAlias + "." + colName, value)); + int dot = path.indexOf('.', (isIgnoreFirstDot ? path.indexOf('.') + 1 : 0)); + if (dot == -1) { + return ((Map) source).get(path); } - private void retainAllFieldsFromTable(Map aliasSource, String tableAlias) { - source.entrySet(). - stream(). - filter(e -> e.getKey().startsWith(tableAlias)). - forEach(e -> aliasSource.put(e.getKey(), e.getValue())); + // Object field name maybe unexpanded without recursive object structure + // ex. {"a.b.c": value} instead of {"a": {"b": {"c": value}}}} + if (((Map) source).containsKey(path)) { + return ((Map) source).get(path); } - /** - * Note that column here is already prefixed by table alias after combine(). - *

- * Meanwhile check if column name with table alias prefix, ex. a.name, is property, namely a.name.lastname. - * In this case, split by first second dot and continue searching for the final value in nested map - * by getValueOfPath(source.get("a.name"), "lastname") - */ - private void retainOneField(Map aliasSource, String colName, String alias) { - aliasSource.put( - Strings.isNullOrEmpty(alias) ? colName : alias, - getValueOfPath(colName) - ); - } - - private void resetSource(Map newSource) { - source.clear(); - source.putAll(newSource); - } + return getValueOfPath( + ((Map) source).get(path.substring(0, dot)), path.substring(dot + 1), false); + } + + private SearchHit cloneHit(Row other) { + Map documentFields = new HashMap<>(); + Map metaFields = new HashMap<>(); + hit.getFields() + .forEach( + (fieldName, docField) -> + (MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) + ? metaFields + : documentFields) + .put(fieldName, docField)); + SearchHit combined = + new SearchHit( + hit.docId(), + hit.getId() + "|" + (other == NULL ? "0" : ((SearchHitRow) other).hit.getId()), + documentFields, + metaFields); + combined.sourceRef(hit.getSourceRef()); + combined.getSourceAsMap().clear(); + return combined; + } + + private void collectFullName(Map newSource, SearchHitRow row) { + row.source.forEach((colName, value) -> newSource.put(row.tableAlias + "." + colName, value)); + } + + private void retainAllFieldsFromTable(Map aliasSource, String tableAlias) { + source.entrySet().stream() + .filter(e -> e.getKey().startsWith(tableAlias)) + .forEach(e -> aliasSource.put(e.getKey(), e.getValue())); + } + + /** + * Note that column here is already prefixed by table alias after combine(). + * + *

Meanwhile check if column name with table alias prefix, ex. a.name, is property, namely + * a.name.lastname. In this case, split by first second dot and continue searching for the final + * value in nested map by getValueOfPath(source.get("a.name"), "lastname") + */ + private void retainOneField(Map aliasSource, String colName, String alias) { + aliasSource.put(Strings.isNullOrEmpty(alias) ? colName : alias, getValueOfPath(colName)); + } + + private void resetSource(Map newSource) { + source.clear(); + source.putAll(newSource); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/sort/QuickSort.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/sort/QuickSort.java index 90ae595d56..abfcf273ad 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/sort/QuickSort.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/sort/QuickSort.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.physical.node.sort; import static java.util.Collections.emptyList; @@ -23,83 +22,80 @@ import org.opensearch.sql.legacy.query.planner.physical.node.BatchPhysicalOperator; /** - * Physical operator to sort by quick sort implementation in JDK. - * Note that this is all in-memory operator which may be a problem for large index. + * Physical operator to sort by quick sort implementation in JDK. Note that this is all in-memory + * operator which may be a problem for large index. * * @param actual data type, ex.SearchHit */ public class QuickSort extends BatchPhysicalOperator { - private static final Logger LOG = LogManager.getLogger(); + private static final Logger LOG = LogManager.getLogger(); - private final PhysicalOperator next; + private final PhysicalOperator next; - /** - * Column name list in ORDER BY - */ - private final String[] orderByColNames; + /** Column name list in ORDER BY */ + private final String[] orderByColNames; - /** - * Order by type, ex. ASC, DESC - */ - private final String orderByType; + /** Order by type, ex. ASC, DESC */ + private final String orderByType; - private boolean isDone = false; + private boolean isDone = false; - public QuickSort(PhysicalOperator next, List orderByColNames, String orderByType) { - this.next = next; - this.orderByColNames = orderByColNames.toArray(new String[0]); - this.orderByType = orderByType; - } + public QuickSort(PhysicalOperator next, List orderByColNames, String orderByType) { + this.next = next; + this.orderByColNames = orderByColNames.toArray(new String[0]); + this.orderByType = orderByType; + } - @Override - public PlanNode[] children() { - return new PlanNode[]{next}; - } + @Override + public PlanNode[] children() { + return new PlanNode[] {next}; + } - @Override - public Cost estimate() { - return new Cost(); - } + @Override + public Cost estimate() { + return new Cost(); + } - @Override - public void open(ExecuteParams params) throws Exception { - super.open(params); - next.open(params); - } + @Override + public void open(ExecuteParams params) throws Exception { + super.open(params); + next.open(params); + } - /** - * Only load all data once and return one batch - */ - @Override - protected Collection> prefetch() { - if (isDone) { - return emptyList(); - } - - List> allRowsSorted = new ArrayList<>(); - next.forEachRemaining(allRowsSorted::add); - allRowsSorted.sort(createRowComparator()); - - if (LOG.isTraceEnabled()) { - LOG.trace("All rows being sorted in RB-Tree: {}", allRowsSorted); - } - - isDone = true; - return allRowsSorted; + /** Only load all data once and return one batch */ + @Override + protected Collection> prefetch() { + if (isDone) { + return emptyList(); } - private Comparator> createRowComparator() { - Comparator> comparator = Comparator.comparing(o -> o.key(orderByColNames)); - if ("DESC".equals(orderByType)) { - comparator = comparator.reversed(); - } - return comparator; - } + List> allRowsSorted = new ArrayList<>(); + next.forEachRemaining(allRowsSorted::add); + allRowsSorted.sort(createRowComparator()); - @Override - public String toString() { - return "QuickSort [ columns=" + Arrays.toString(orderByColNames) + ", order=" + orderByType + " ]"; + if (LOG.isTraceEnabled()) { + LOG.trace("All rows being sorted in RB-Tree: {}", allRowsSorted); } + isDone = true; + return allRowsSorted; + } + + private Comparator> createRowComparator() { + Comparator> comparator = Comparator.comparing(o -> o.key(orderByColNames)); + if ("DESC".equals(orderByType)) { + comparator = comparator.reversed(); + } + return comparator; + } + + @Override + public String toString() { + return "QuickSort [ columns=" + + Arrays.toString(orderByColNames) + + ", order=" + + orderByType + + " ]"; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/ResourceManager.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/ResourceManager.java index 32cc7f45e3..4818d0a3ee 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/ResourceManager.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/ResourceManager.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.resource; import java.time.Duration; @@ -18,55 +17,48 @@ import org.opensearch.sql.legacy.query.planner.resource.monitor.Monitor; import org.opensearch.sql.legacy.query.planner.resource.monitor.TotalMemoryMonitor; -/** - * Aggregated resource monitor - */ +/** Aggregated resource monitor */ public class ResourceManager { - private static final Logger LOG = LogManager.getLogger(); + private static final Logger LOG = LogManager.getLogger(); + + /** Actual resource monitor list */ + private final List monitors = new ArrayList<>(); - /** - * Actual resource monitor list - */ - private final List monitors = new ArrayList<>(); + /** Time out for the execution */ + private final int timeout; - /** - * Time out for the execution - */ - private final int timeout; - private final Instant startTime; + private final Instant startTime; - /** - * Meta result of the execution - */ - private final MetaSearchResult metaResult; + /** Meta result of the execution */ + private final MetaSearchResult metaResult; - public ResourceManager(Stats stats, Config config) { - this.monitors.add(new TotalMemoryMonitor(stats, config)); - this.timeout = config.timeout(); - this.startTime = Instant.now(); - this.metaResult = new MetaSearchResult(); - } + public ResourceManager(Stats stats, Config config) { + this.monitors.add(new TotalMemoryMonitor(stats, config)); + this.timeout = config.timeout(); + this.startTime = Instant.now(); + this.metaResult = new MetaSearchResult(); + } - /** - * Is all resource monitor healthy with strategy. - * - * @return true for yes - */ - public boolean isHealthy() { - return BackOffRetryStrategy.isHealthy(); - } + /** + * Is all resource monitor healthy with strategy. + * + * @return true for yes + */ + public boolean isHealthy() { + return BackOffRetryStrategy.isHealthy(); + } - /** - * Is current execution time out? - * - * @return true for yes - */ - public boolean isTimeout() { - return Duration.between(startTime, Instant.now()).getSeconds() >= timeout; - } + /** + * Is current execution time out? + * + * @return true for yes + */ + public boolean isTimeout() { + return Duration.between(startTime, Instant.now()).getSeconds() >= timeout; + } - public MetaSearchResult getMetaResult() { - return metaResult; - } + public MetaSearchResult getMetaResult() { + return metaResult; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/request/PreparedStatementRequest.java b/legacy/src/main/java/org/opensearch/sql/legacy/request/PreparedStatementRequest.java index deff4e2393..c32e529157 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/request/PreparedStatementRequest.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/request/PreparedStatementRequest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.request; import java.util.List; @@ -11,174 +10,181 @@ public class PreparedStatementRequest extends SqlRequest { - private List parameters; - private String sqlTemplate; - - public PreparedStatementRequest(String sql, JSONObject payloadJson, List parameters) { - super(null, payloadJson); - this.sqlTemplate = sql; - this.parameters = parameters; - this.sql = this.substituteParameters(); - } - - public PreparedStatementRequest(String sql, final Integer fetchSize, - JSONObject payloadJson, List parameters) { - this(sql, payloadJson, parameters); - this.fetchSize = fetchSize; + private List parameters; + private String sqlTemplate; + + public PreparedStatementRequest( + String sql, JSONObject payloadJson, List parameters) { + super(null, payloadJson); + this.sqlTemplate = sql; + this.parameters = parameters; + this.sql = this.substituteParameters(); + } + + public PreparedStatementRequest( + String sql, + final Integer fetchSize, + JSONObject payloadJson, + List parameters) { + this(sql, payloadJson, parameters); + this.fetchSize = fetchSize; + } + + public List getParameters() { + return this.parameters; + } + + @Override + public String getSql() { + return this.sql; + } + + public String getPreparedStatement() { + return this.sqlTemplate; + } + + private String substituteParameters() { + if (this.sqlTemplate == null) { + return null; } - public List getParameters() { - return this.parameters; - } - - @Override - public String getSql() { - return this.sql; - } - - public String getPreparedStatement() { - return this.sqlTemplate; - } - - private String substituteParameters() { - if (this.sqlTemplate == null) { - return null; - } - - StringBuilder sb = new StringBuilder(); - int paramIndex = 0; - int i = 0; + StringBuilder sb = new StringBuilder(); + int paramIndex = 0; + int i = 0; + while (i < this.sqlTemplate.length()) { + char c = this.sqlTemplate.charAt(i); + if (c == '\'') { + // found string starting quote character, skip the string + sb.append(c); + i++; while (i < this.sqlTemplate.length()) { - char c = this.sqlTemplate.charAt(i); - if (c == '\'') { - // found string starting quote character, skip the string - sb.append(c); - i++; - while (i < this.sqlTemplate.length()) { - char s = this.sqlTemplate.charAt(i); - sb.append(s); - if (s == '\'') { - if (this.sqlTemplate.charAt(i - 1) == '\\') { - // this is an escaped single quote (\') still in the string - i++; - } else if ((i + 1) < this.sqlTemplate.length() && this.sqlTemplate.charAt(i + 1) == '\'') { - // found 2 single quote {''} in a string, which is escaped single quote {'} - // move to next character - sb.append('\''); - i += 2; - } else { - // found the string ending single quote char - break; - } - } else { - // not single quote character, move on - i++; - } - } - } else if (c == '?') { - // question mark "?" not in a string - if (paramIndex >= this.parameters.size()) { - throw new IllegalStateException("Placeholder count is greater than parameter number " - + parameters.size() + " . Cannot convert PreparedStatement to sql query"); - } - sb.append(this.parameters.get(paramIndex).getSqlSubstitutionValue()); - paramIndex++; + char s = this.sqlTemplate.charAt(i); + sb.append(s); + if (s == '\'') { + if (this.sqlTemplate.charAt(i - 1) == '\\') { + // this is an escaped single quote (\') still in the string + i++; + } else if ((i + 1) < this.sqlTemplate.length() + && this.sqlTemplate.charAt(i + 1) == '\'') { + // found 2 single quote {''} in a string, which is escaped single quote {'} + // move to next character + sb.append('\''); + i += 2; } else { - // other character, simply append - sb.append(c); + // found the string ending single quote char + break; } + } else { + // not single quote character, move on i++; + } } - - return sb.toString(); + } else if (c == '?') { + // question mark "?" not in a string + if (paramIndex >= this.parameters.size()) { + throw new IllegalStateException( + "Placeholder count is greater than parameter number " + + parameters.size() + + " . Cannot convert PreparedStatement to sql query"); + } + sb.append(this.parameters.get(paramIndex).getSqlSubstitutionValue()); + paramIndex++; + } else { + // other character, simply append + sb.append(c); + } + i++; } - ////////////////////////////////////////////////// - // Parameter related types below - ////////////////////////////////////////////////// - public enum ParameterType { - BYTE, - SHORT, - INTEGER, - LONG, - FLOAT, - DOUBLE, - BOOLEAN, - STRING, - KEYWORD, - DATE, - NULL + return sb.toString(); + } + + ////////////////////////////////////////////////// + // Parameter related types below + ////////////////////////////////////////////////// + public enum ParameterType { + BYTE, + SHORT, + INTEGER, + LONG, + FLOAT, + DOUBLE, + BOOLEAN, + STRING, + KEYWORD, + DATE, + NULL + } + + public static class PreparedStatementParameter { + protected T value; + + public PreparedStatementParameter(T value) { + this.value = value; } - public static class PreparedStatementParameter { - protected T value; - - public PreparedStatementParameter(T value) { - this.value = value; - } - - public String getSqlSubstitutionValue() { - return String.valueOf(this.value); - } + public String getSqlSubstitutionValue() { + return String.valueOf(this.value); + } - public T getValue() { - return this.value; - } + public T getValue() { + return this.value; } + } - public static class StringParameter extends PreparedStatementParameter { + public static class StringParameter extends PreparedStatementParameter { - public StringParameter(String value) { - super(value); - } + public StringParameter(String value) { + super(value); + } - @Override - public String getSqlSubstitutionValue() { - // TODO: investigate other injection prevention - if (this.value == null) { - return "null"; - } - StringBuilder sb = new StringBuilder(); - sb.append('\''); // starting quote - for (int i = 0; i < this.value.length(); i++) { - char c = this.value.charAt(i); - switch (c) { - case 0: - sb.append('\\').append(0); - break; - case '\n': - sb.append('\\').append('n'); - break; - case '\r': - sb.append('\\').append('r'); - break; - case '\\': - sb.append('\\').append('\\'); - break; - case '\'': - sb.append('\\').append('\''); - break; - case '\"': - sb.append('\\').append('\"'); - break; - default: - sb.append(c); - } - } - sb.append('\''); // ending quote - return sb.toString(); + @Override + public String getSqlSubstitutionValue() { + // TODO: investigate other injection prevention + if (this.value == null) { + return "null"; + } + StringBuilder sb = new StringBuilder(); + sb.append('\''); // starting quote + for (int i = 0; i < this.value.length(); i++) { + char c = this.value.charAt(i); + switch (c) { + case 0: + sb.append('\\').append(0); + break; + case '\n': + sb.append('\\').append('n'); + break; + case '\r': + sb.append('\\').append('r'); + break; + case '\\': + sb.append('\\').append('\\'); + break; + case '\'': + sb.append('\\').append('\''); + break; + case '\"': + sb.append('\\').append('\"'); + break; + default: + sb.append(c); } + } + sb.append('\''); // ending quote + return sb.toString(); } + } - public static class NullParameter extends PreparedStatementParameter { + public static class NullParameter extends PreparedStatementParameter { - public NullParameter() { - super(null); - } + public NullParameter() { + super(null); + } - @Override - public String getSqlSubstitutionValue() { - return "null"; - } + @Override + public String getSqlSubstitutionValue() { + return "null"; } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/RewriteRule.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/RewriteRule.java index 6744bfa3e5..cd6400ed88 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/RewriteRule.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/RewriteRule.java @@ -3,29 +3,26 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter; import com.alibaba.druid.sql.ast.expr.SQLQueryExpr; import java.sql.SQLFeatureNotSupportedException; -/** - * Query Optimize Rule - */ +/** Query Optimize Rule */ public interface RewriteRule { - /** - * Checking whether the rule match the query? - * - * @return true if the rule match to the query. - * @throws SQLFeatureNotSupportedException - */ - boolean match(T expr) throws SQLFeatureNotSupportedException; + /** + * Checking whether the rule match the query? + * + * @return true if the rule match to the query. + * @throws SQLFeatureNotSupportedException + */ + boolean match(T expr) throws SQLFeatureNotSupportedException; - /** - * Optimize the query. - * - * @throws SQLFeatureNotSupportedException - */ - void rewrite(T expr) throws SQLFeatureNotSupportedException; + /** + * Optimize the query. + * + * @throws SQLFeatureNotSupportedException + */ + void rewrite(T expr) throws SQLFeatureNotSupportedException; } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/RewriteRuleExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/RewriteRuleExecutor.java index 86aa3d0b20..20fd018ae8 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/RewriteRuleExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/RewriteRuleExecutor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter; import com.alibaba.druid.sql.ast.expr.SQLQueryExpr; @@ -11,50 +10,42 @@ import java.util.ArrayList; import java.util.List; -/** - * Query RewriteRuleExecutor which will execute the {@link RewriteRule} with registered order. - */ +/** Query RewriteRuleExecutor which will execute the {@link RewriteRule} with registered order. */ public class RewriteRuleExecutor { - private final List> rewriteRules; - - public RewriteRuleExecutor(List> rewriteRules) { - this.rewriteRules = rewriteRules; + private final List> rewriteRules; + + public RewriteRuleExecutor(List> rewriteRules) { + this.rewriteRules = rewriteRules; + } + + /** Execute the registered {@link RewriteRule} in order on the Query. */ + public void executeOn(T expr) throws SQLFeatureNotSupportedException { + for (RewriteRule rule : rewriteRules) { + if (rule.match(expr)) { + rule.rewrite(expr); + } } - - /** - * Execute the registered {@link RewriteRule} in order on the Query. - */ - public void executeOn(T expr) throws SQLFeatureNotSupportedException { - for (RewriteRule rule : rewriteRules) { - if (rule.match(expr)) { - rule.rewrite(expr); - } - } - } - - /** - * Build {@link RewriteRuleExecutor} - */ - public static BuilderOptimizer builder() { - return new BuilderOptimizer(); + } + + /** Build {@link RewriteRuleExecutor} */ + public static BuilderOptimizer builder() { + return new BuilderOptimizer(); + } + + /** Builder of {@link RewriteRuleExecutor} */ + public static class BuilderOptimizer { + private List> rewriteRules; + + public BuilderOptimizer withRule(RewriteRule rule) { + if (rewriteRules == null) { + rewriteRules = new ArrayList<>(); + } + rewriteRules.add(rule); + return this; } - /** - * Builder of {@link RewriteRuleExecutor} - */ - public static class BuilderOptimizer { - private List> rewriteRules; - - public BuilderOptimizer withRule(RewriteRule rule) { - if (rewriteRules == null) { - rewriteRules = new ArrayList<>(); - } - rewriteRules.add(rule); - return this; - } - - public RewriteRuleExecutor build() { - return new RewriteRuleExecutor(rewriteRules); - } + public RewriteRuleExecutor build() { + return new RewriteRuleExecutor(rewriteRules); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/NestedFieldProjection.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/NestedFieldProjection.java index 4fa4611f9a..83a94b1e9b 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/NestedFieldProjection.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/NestedFieldProjection.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.nestedfield; import static com.alibaba.druid.sql.ast.statement.SQLJoinTableSource.JoinType; @@ -33,86 +32,90 @@ import org.opensearch.sql.legacy.rewriter.matchtoterm.VerificationException; import org.opensearch.sql.legacy.utils.StringUtils; -/** - * Nested field projection class to make OpenSearch return matched rows in nested field. - */ +/** Nested field projection class to make OpenSearch return matched rows in nested field. */ public class NestedFieldProjection { - private final SearchRequestBuilder request; + private final SearchRequestBuilder request; + + public NestedFieldProjection(SearchRequestBuilder request) { + this.request = request; + } + + /** + * Project nested field in SELECT clause to InnerHit in NestedQueryBuilder + * + * @param fields list of field domain object + */ + public void project(List fields, JoinType nestedJoinType) { + if (isAnyNestedField(fields)) { + initBoolQueryFilterIfNull(); + List nestedQueries = extractNestedQueries(query()); + + if (nestedJoinType == JoinType.LEFT_OUTER_JOIN) { + // for LEFT JOIN on nested field as right table, the query will have only one nested field, + // so one path + Map> fieldNamesByPath = groupFieldNamesByPath(fields); + + if (fieldNamesByPath.size() > 1) { + String message = + StringUtils.format( + "only single nested field is allowed as right table for LEFT JOIN, found %s ", + fieldNamesByPath.keySet()); + + throw new VerificationException(message); + } - public NestedFieldProjection(SearchRequestBuilder request) { - this.request = request; + Map.Entry> pathToFields = + fieldNamesByPath.entrySet().iterator().next(); + String path = pathToFields.getKey(); + List fieldNames = pathToFields.getValue(); + buildNestedLeftJoinQuery(path, fieldNames); + } else { + + groupFieldNamesByPath(fields) + .forEach( + (path, fieldNames) -> + buildInnerHit(fieldNames, findNestedQueryWithSamePath(nestedQueries, path))); + } } - - /** - * Project nested field in SELECT clause to InnerHit in NestedQueryBuilder - * - * @param fields list of field domain object - */ - public void project(List fields, JoinType nestedJoinType) { - if (isAnyNestedField(fields)) { - initBoolQueryFilterIfNull(); - List nestedQueries = extractNestedQueries(query()); - - if (nestedJoinType == JoinType.LEFT_OUTER_JOIN) { - // for LEFT JOIN on nested field as right table, the query will have only one nested field, so one path - Map> fieldNamesByPath = groupFieldNamesByPath(fields); - - if (fieldNamesByPath.size() > 1) { - String message = StringUtils.format( - "only single nested field is allowed as right table for LEFT JOIN, found %s ", - fieldNamesByPath.keySet() - ); - - throw new VerificationException(message); - } - - Map.Entry> pathToFields = fieldNamesByPath.entrySet().iterator().next(); - String path = pathToFields.getKey(); - List fieldNames = pathToFields.getValue(); - buildNestedLeftJoinQuery(path, fieldNames); - } else { - - groupFieldNamesByPath(fields).forEach( - (path, fieldNames) -> buildInnerHit(fieldNames, findNestedQueryWithSamePath(nestedQueries, path)) - ); - } - } + } + + /** + * Check via traditional for loop first to avoid lambda performance impact on all queries even + * though those without nested field + */ + private boolean isAnyNestedField(List fields) { + for (Field field : fields) { + if (field.isNested() && !field.isReverseNested()) { + return true; + } } + return false; + } - /** - * Check via traditional for loop first to avoid lambda performance impact on all queries - * even though those without nested field - */ - private boolean isAnyNestedField(List fields) { - for (Field field : fields) { - if (field.isNested() && !field.isReverseNested()) { - return true; - } - } - return false; + private void initBoolQueryFilterIfNull() { + if (request.request().source() == null || query() == null) { + request.setQuery(boolQuery()); } - - private void initBoolQueryFilterIfNull() { - if (request.request().source() == null || query() == null) { - request.setQuery(boolQuery()); - } - if (query().filter().isEmpty()) { - query().filter(boolQuery()); - } + if (query().filter().isEmpty()) { + query().filter(boolQuery()); } + } - private Map> groupFieldNamesByPath(List fields) { - return fields.stream(). - filter(Field::isNested). - filter(not(Field::isReverseNested)). - collect(groupingBy(Field::getNestedPath, mapping(Field::getName, toList()))); - } + private Map> groupFieldNamesByPath(List fields) { + return fields.stream() + .filter(Field::isNested) + .filter(not(Field::isReverseNested)) + .collect(groupingBy(Field::getNestedPath, mapping(Field::getName, toList()))); + } /** * Why search for NestedQueryBuilder recursively? - * Because 1) it was added and wrapped by BoolQuery when WHERE explained (far from here) - * 2) InnerHit must be added to the NestedQueryBuilder related + * Because + *

    + *
  1. it was added and wrapped by BoolQuery when WHERE explained (far from here) + *
  2. InnerHit must be added to the NestedQueryBuilder related + *
*

* Either we store it to global data structure (which requires to be thread-safe or ThreadLocal) * or we peel off BoolQuery to find it (the way we followed here because recursion tree should be very thin). @@ -130,55 +133,54 @@ private List extractNestedQueries(QueryBuilder query) { return result; } - private void buildInnerHit(List fieldNames, NestedQueryBuilder query) { - query.innerHit(new InnerHitBuilder().setFetchSourceContext( - new FetchSourceContext(true, fieldNames.toArray(new String[0]), null) - )); - } - - /** - * Why linear search? Because NestedQueryBuilder hides "path" field from any access. - * Assumption: collected NestedQueryBuilder list should be very small or mostly only one. - */ - private NestedQueryBuilder findNestedQueryWithSamePath(List nestedQueries, String path) { - return nestedQueries.stream(). - filter(query -> isSamePath(path, query)). - findAny(). - orElseGet(createEmptyNestedQuery(path)); - } - - private boolean isSamePath(String path, NestedQueryBuilder query) { - return nestedQuery(path, query.query(), query.scoreMode()).equals(query); - } - - /** - * Create a nested query with match all filter to place inner hits - */ - private Supplier createEmptyNestedQuery(String path) { - return () -> { - NestedQueryBuilder nestedQuery = nestedQuery(path, matchAllQuery(), ScoreMode.None); - ((BoolQueryBuilder) query().filter().get(0)).must(nestedQuery); - return nestedQuery; - }; - } - - private BoolQueryBuilder query() { - return (BoolQueryBuilder) request.request().source().query(); - } - - private Predicate not(Predicate predicate) { - return predicate.negate(); - } - - - private void buildNestedLeftJoinQuery(String path, List fieldNames) { - BoolQueryBuilder existsNestedQuery = boolQuery(); - existsNestedQuery.mustNot().add(nestedQuery(path, existsQuery(path), ScoreMode.None)); - - NestedQueryBuilder matchAllNestedQuery = nestedQuery(path, matchAllQuery(), ScoreMode.None); - buildInnerHit(fieldNames, matchAllNestedQuery); - - ((BoolQueryBuilder) query().filter().get(0)).should().add(existsNestedQuery); - ((BoolQueryBuilder) query().filter().get(0)).should().add(matchAllNestedQuery); - } + private void buildInnerHit(List fieldNames, NestedQueryBuilder query) { + query.innerHit( + new InnerHitBuilder() + .setFetchSourceContext( + new FetchSourceContext(true, fieldNames.toArray(new String[0]), null))); + } + + /** + * Why linear search? Because NestedQueryBuilder hides "path" field from any access. Assumption: + * collected NestedQueryBuilder list should be very small or mostly only one. + */ + private NestedQueryBuilder findNestedQueryWithSamePath( + List nestedQueries, String path) { + return nestedQueries.stream() + .filter(query -> isSamePath(path, query)) + .findAny() + .orElseGet(createEmptyNestedQuery(path)); + } + + private boolean isSamePath(String path, NestedQueryBuilder query) { + return nestedQuery(path, query.query(), query.scoreMode()).equals(query); + } + + /** Create a nested query with match all filter to place inner hits */ + private Supplier createEmptyNestedQuery(String path) { + return () -> { + NestedQueryBuilder nestedQuery = nestedQuery(path, matchAllQuery(), ScoreMode.None); + ((BoolQueryBuilder) query().filter().get(0)).must(nestedQuery); + return nestedQuery; + }; + } + + private BoolQueryBuilder query() { + return (BoolQueryBuilder) request.request().source().query(); + } + + private Predicate not(Predicate predicate) { + return predicate.negate(); + } + + private void buildNestedLeftJoinQuery(String path, List fieldNames) { + BoolQueryBuilder existsNestedQuery = boolQuery(); + existsNestedQuery.mustNot().add(nestedQuery(path, existsQuery(path), ScoreMode.None)); + + NestedQueryBuilder matchAllNestedQuery = nestedQuery(path, matchAllQuery(), ScoreMode.None); + buildInnerHit(fieldNames, matchAllNestedQuery); + + ((BoolQueryBuilder) query().filter().get(0)).should().add(existsNestedQuery); + ((BoolQueryBuilder) query().filter().get(0)).should().add(matchAllNestedQuery); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/NestedFieldRewriter.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/NestedFieldRewriter.java index f93f5e344e..976075a72d 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/NestedFieldRewriter.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/NestedFieldRewriter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.nestedfield; import static org.opensearch.sql.legacy.utils.Util.NESTED_JOIN_TYPE; @@ -16,6 +15,7 @@ import java.util.Deque; /** + *

  * Visitor to rewrite AST (abstract syntax tree) for nested type fields to support implicit nested() function call.
  * Intuitively, the approach is to implement SQLIdentifier.visit() and wrap nested() function for nested field.
  * The parsing result of FROM clause will be used to determine if an identifier is nested field.
@@ -47,66 +47,64 @@
  * 1) Manage environment in the case of subquery
  * 2) Add nested field to select for SELECT *
  * 3) Merge conditions of same nested field to single nested() call
+ * 
*/ public class NestedFieldRewriter extends MySqlASTVisitorAdapter { - /** - * Scope stack to record the state (nested field names etc) for current query. - * In the case of subquery, the active scope of current query is the top element of the stack. - */ - private Deque environment = new ArrayDeque<>(); - - /** - * Rewrite FROM here to make sure FROM statement always be visited before other statement in query. - * Note that return true anyway to continue visiting FROM in subquery if any. - */ - @Override - public boolean visit(MySqlSelectQueryBlock query) { - environment.push(new Scope()); - if (query.getFrom() == null) { - return false; - } - - query.getFrom().setParent(query); - new From(query.getFrom()).rewrite(curScope()); + /** + * Scope stack to record the state (nested field names etc) for current query. In the case of + * subquery, the active scope of current query is the top element of the stack. + */ + private Deque environment = new ArrayDeque<>(); + + /** + * Rewrite FROM here to make sure FROM statement always be visited before other statement in + * query. Note that return true anyway to continue visiting FROM in subquery if any. + */ + @Override + public boolean visit(MySqlSelectQueryBlock query) { + environment.push(new Scope()); + if (query.getFrom() == null) { + return false; + } - if (curScope().isAnyNestedField() && isNotGroupBy(query)) { - new Select(query.getSelectList()).rewrite(curScope()); - } + query.getFrom().setParent(query); + new From(query.getFrom()).rewrite(curScope()); - query.putAttribute(NESTED_JOIN_TYPE, curScope().getActualJoinType()); - return true; + if (curScope().isAnyNestedField() && isNotGroupBy(query)) { + new Select(query.getSelectList()).rewrite(curScope()); } - @Override - public boolean visit(SQLIdentifierExpr expr) { - if (curScope().isAnyNestedField()) { - new Identifier(expr).rewrite(curScope()); - } - return true; - } + query.putAttribute(NESTED_JOIN_TYPE, curScope().getActualJoinType()); + return true; + } - @Override - public void endVisit(SQLBinaryOpExpr expr) { - if (curScope().isAnyNestedField()) { - new Where(expr).rewrite(curScope()); - } + @Override + public boolean visit(SQLIdentifierExpr expr) { + if (curScope().isAnyNestedField()) { + new Identifier(expr).rewrite(curScope()); } + return true; + } - @Override - public void endVisit(MySqlSelectQueryBlock query) { - environment.pop(); + @Override + public void endVisit(SQLBinaryOpExpr expr) { + if (curScope().isAnyNestedField()) { + new Where(expr).rewrite(curScope()); } + } - /** - * Current scope which is top of the stack - */ - private Scope curScope() { - return environment.peek(); - } + @Override + public void endVisit(MySqlSelectQueryBlock query) { + environment.pop(); + } - private boolean isNotGroupBy(MySqlSelectQueryBlock query) { - return query.getGroupBy() == null; - } + /** Current scope which is top of the stack */ + private Scope curScope() { + return environment.peek(); + } + private boolean isNotGroupBy(MySqlSelectQueryBlock query) { + return query.getGroupBy() == null; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/Scope.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/Scope.java index 5f035bc725..f65d7f166b 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/Scope.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/Scope.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.nestedfield; import static com.alibaba.druid.sql.ast.statement.SQLJoinTableSource.JoinType; @@ -14,71 +13,68 @@ import java.util.Map; import java.util.Set; -/** - * Nested field information in current query being visited. - */ +/** Nested field information in current query being visited. */ class Scope { - /** Join Type as passed in the actual SQL subquery */ - private JoinType actualJoinType; - - /** Alias of parent such as alias "t" of parent table "team" in "FROM team t, t.employees e" */ - - private String parentAlias; - - /** - * Mapping from nested field path alias to path full name in FROM. - * eg. e in {e => employees} in "FROM t.employees e" - */ - private Map aliasFullPaths = new HashMap<>(); - - /** - * Mapping from binary operation condition (in WHERE) to nested - * field tag (full path for nested, EMPTY for non-nested field) - */ - private Map conditionTags = new IdentityHashMap<>(); - - String getParentAlias() { - return parentAlias; - } - - void setParentAlias(String parentAlias) { - this.parentAlias = parentAlias; + /** Join Type as passed in the actual SQL subquery */ + private JoinType actualJoinType; + + /** Alias of parent such as alias "t" of parent table "team" in "FROM team t, t.employees e" */ + private String parentAlias; + + /** + * Mapping from nested field path alias to path full name in FROM. eg. e in {e => employees} in + * "FROM t.employees e" + */ + private Map aliasFullPaths = new HashMap<>(); + + /** + * Mapping from binary operation condition (in WHERE) to nested field tag (full path for nested, + * EMPTY for non-nested field) + */ + private Map conditionTags = new IdentityHashMap<>(); + + String getParentAlias() { + return parentAlias; + } + + void setParentAlias(String parentAlias) { + this.parentAlias = parentAlias; + } + + void addAliasFullPath(String alias, String path) { + if (alias.isEmpty()) { + aliasFullPaths.put(path, path); + } else { + aliasFullPaths.put(alias, path); } + } - void addAliasFullPath(String alias, String path) { - if (alias.isEmpty()) { - aliasFullPaths.put(path, path); - } else { - aliasFullPaths.put(alias, path); - } - } + String getFullPath(String alias) { + return aliasFullPaths.getOrDefault(alias, ""); + } - String getFullPath(String alias) { - return aliasFullPaths.getOrDefault(alias, ""); - } + boolean isAnyNestedField() { + return !aliasFullPaths.isEmpty(); + } - boolean isAnyNestedField() { - return !aliasFullPaths.isEmpty(); - } + Set getAliases() { + return aliasFullPaths.keySet(); + } - Set getAliases() { - return aliasFullPaths.keySet(); - } + String getConditionTag(SQLBinaryOpExpr expr) { + return conditionTags.getOrDefault(expr, ""); + } - String getConditionTag(SQLBinaryOpExpr expr) { - return conditionTags.getOrDefault(expr, ""); - } + void addConditionTag(SQLBinaryOpExpr expr, String tag) { + conditionTags.put(expr, tag); + } - void addConditionTag(SQLBinaryOpExpr expr, String tag) { - conditionTags.put(expr, tag); - } - - JoinType getActualJoinType() { - return actualJoinType; - } + JoinType getActualJoinType() { + return actualJoinType; + } - void setActualJoinType(JoinType joinType) { - actualJoinType = joinType; - } + void setActualJoinType(JoinType joinType) { + actualJoinType = joinType; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/Select.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/Select.java index f514e6d081..8d2d6402e1 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/Select.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/Select.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.nestedfield; import com.alibaba.druid.sql.ast.expr.SQLAllColumnExpr; @@ -11,39 +10,37 @@ import com.alibaba.druid.sql.ast.statement.SQLSelectItem; import java.util.List; -/** - * Column list in SELECT statement. - */ +/** Column list in SELECT statement. */ class Select extends SQLClause> { - Select(List expr) { - super(expr); - } - - /** - * Rewrite by adding nested field to SELECT in the case of 'SELECT *'. - *

- * Ex. 'SELECT *' => 'SELECT *, employees.*' - * So that NestedFieldProjection will add 'employees.*' to includes list in inner_hits. - */ - @Override - void rewrite(Scope scope) { - if (isSelectAllOnly()) { - addSelectAllForNestedField(scope); - } + Select(List expr) { + super(expr); + } + + /** + * Rewrite by adding nested field to SELECT in the case of 'SELECT *'. + * + *

Ex. 'SELECT *' => 'SELECT *, employees.*' So that NestedFieldProjection will add + * 'employees.*' to includes list in inner_hits. + */ + @Override + void rewrite(Scope scope) { + if (isSelectAllOnly()) { + addSelectAllForNestedField(scope); } + } - private boolean isSelectAllOnly() { - return expr.size() == 1 && expr.get(0).getExpr() instanceof SQLAllColumnExpr; - } + private boolean isSelectAllOnly() { + return expr.size() == 1 && expr.get(0).getExpr() instanceof SQLAllColumnExpr; + } - private void addSelectAllForNestedField(Scope scope) { - for (String alias : scope.getAliases()) { - expr.add(createSelectItem(alias + ".*")); - } + private void addSelectAllForNestedField(Scope scope) { + for (String alias : scope.getAliases()) { + expr.add(createSelectItem(alias + ".*")); } + } - private SQLSelectItem createSelectItem(String name) { - return new SQLSelectItem(new SQLIdentifierExpr(name)); - } + private SQLSelectItem createSelectItem(String name) { + return new SQLSelectItem(new SQLIdentifierExpr(name)); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/ordinal/OrdinalRewriterRule.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/ordinal/OrdinalRewriterRule.java index 1d44ac8261..03ff07b1b8 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/ordinal/OrdinalRewriterRule.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/ordinal/OrdinalRewriterRule.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.ordinal; import com.alibaba.druid.sql.ast.SQLExpr; @@ -23,128 +22,131 @@ import org.opensearch.sql.legacy.rewriter.matchtoterm.VerificationException; /** - * Rewrite rule for changing ordinal alias in order by and group by to actual select field. - * Since we cannot clone or deepcopy the Druid SQL objects, we need to generate the - * two syntax tree from the original query to map Group By and Order By fields with ordinal alias - * to Select fields in newly generated syntax tree. + * Rewrite rule for changing ordinal alias in order by and group by to actual select field. Since we + * cannot clone or deepcopy the Druid SQL objects, we need to generate the two syntax tree from the + * original query to map Group By and Order By fields with ordinal alias to Select fields in newly + * generated syntax tree. * - * This rewriter assumes that all the backticks have been removed from identifiers. - * It also assumes that table alias have been removed from SELECT, WHERE, GROUP BY, ORDER BY fields. + *

This rewriter assumes that all the backticks have been removed from identifiers. It also + * assumes that table alias have been removed from SELECT, WHERE, GROUP BY, ORDER BY fields. */ - public class OrdinalRewriterRule implements RewriteRule { - private final String sql; + private final String sql; - public OrdinalRewriterRule(String sql) { - this.sql = sql; - } + public OrdinalRewriterRule(String sql) { + this.sql = sql; + } - @Override - public boolean match(SQLQueryExpr root) { - SQLSelectQuery sqlSelectQuery = root.getSubQuery().getQuery(); - if (!(sqlSelectQuery instanceof MySqlSelectQueryBlock)) { - // it could be SQLUnionQuery - return false; - } - - MySqlSelectQueryBlock query = (MySqlSelectQueryBlock) sqlSelectQuery; - if (!hasGroupByWithOrdinals(query) && !hasOrderByWithOrdinals(query)) { - return false; - } - return true; + @Override + public boolean match(SQLQueryExpr root) { + SQLSelectQuery sqlSelectQuery = root.getSubQuery().getQuery(); + if (!(sqlSelectQuery instanceof MySqlSelectQueryBlock)) { + // it could be SQLUnionQuery + return false; } - @Override - public void rewrite(SQLQueryExpr root) { - // we cannot clone SQLSelectItem, so we need similar objects to assign to GroupBy and OrderBy items - SQLQueryExpr sqlExprGroupCopy = toSqlExpr(); - SQLQueryExpr sqlExprOrderCopy = toSqlExpr(); - - changeOrdinalAliasInGroupAndOrderBy(root, sqlExprGroupCopy, sqlExprOrderCopy); + MySqlSelectQueryBlock query = (MySqlSelectQueryBlock) sqlSelectQuery; + if (!hasGroupByWithOrdinals(query) && !hasOrderByWithOrdinals(query)) { + return false; } - - private void changeOrdinalAliasInGroupAndOrderBy(SQLQueryExpr root, - SQLQueryExpr exprGroup, - SQLQueryExpr exprOrder) { - root.accept(new MySqlASTVisitorAdapter() { - - private String groupException = "Invalid ordinal [%s] specified in [GROUP BY %s]"; - private String orderException = "Invalid ordinal [%s] specified in [ORDER BY %s]"; - - private List groupSelectList = ((MySqlSelectQueryBlock) exprGroup.getSubQuery().getQuery()) - .getSelectList(); - - private List orderSelectList = ((MySqlSelectQueryBlock) exprOrder.getSubQuery().getQuery()) - .getSelectList(); - - @Override - public boolean visit(MySqlSelectGroupByExpr groupByExpr) { - SQLExpr expr = groupByExpr.getExpr(); - if (expr instanceof SQLIntegerExpr) { - Integer ordinalValue = ((SQLIntegerExpr) expr).getNumber().intValue(); - SQLExpr newExpr = checkAndGet(groupSelectList, ordinalValue, groupException); - groupByExpr.setExpr(newExpr); - newExpr.setParent(groupByExpr); - } - return false; + return true; + } + + @Override + public void rewrite(SQLQueryExpr root) { + // we cannot clone SQLSelectItem, so we need similar objects to assign to GroupBy and OrderBy + // items + SQLQueryExpr sqlExprGroupCopy = toSqlExpr(); + SQLQueryExpr sqlExprOrderCopy = toSqlExpr(); + + changeOrdinalAliasInGroupAndOrderBy(root, sqlExprGroupCopy, sqlExprOrderCopy); + } + + private void changeOrdinalAliasInGroupAndOrderBy( + SQLQueryExpr root, SQLQueryExpr exprGroup, SQLQueryExpr exprOrder) { + root.accept( + new MySqlASTVisitorAdapter() { + + private String groupException = "Invalid ordinal [%s] specified in [GROUP BY %s]"; + private String orderException = "Invalid ordinal [%s] specified in [ORDER BY %s]"; + + private List groupSelectList = + ((MySqlSelectQueryBlock) exprGroup.getSubQuery().getQuery()).getSelectList(); + + private List orderSelectList = + ((MySqlSelectQueryBlock) exprOrder.getSubQuery().getQuery()).getSelectList(); + + @Override + public boolean visit(MySqlSelectGroupByExpr groupByExpr) { + SQLExpr expr = groupByExpr.getExpr(); + if (expr instanceof SQLIntegerExpr) { + Integer ordinalValue = ((SQLIntegerExpr) expr).getNumber().intValue(); + SQLExpr newExpr = checkAndGet(groupSelectList, ordinalValue, groupException); + groupByExpr.setExpr(newExpr); + newExpr.setParent(groupByExpr); } - - @Override - public boolean visit(SQLSelectOrderByItem orderByItem) { - SQLExpr expr = orderByItem.getExpr(); - Integer ordinalValue; - - if (expr instanceof SQLIntegerExpr) { - ordinalValue = ((SQLIntegerExpr) expr).getNumber().intValue(); - SQLExpr newExpr = checkAndGet(orderSelectList, ordinalValue, orderException); - orderByItem.setExpr(newExpr); - newExpr.setParent(orderByItem); - } else if (expr instanceof SQLBinaryOpExpr - && ((SQLBinaryOpExpr) expr).getLeft() instanceof SQLIntegerExpr) { - // support ORDER BY IS NULL/NOT NULL - SQLBinaryOpExpr binaryOpExpr = (SQLBinaryOpExpr) expr; - SQLIntegerExpr integerExpr = (SQLIntegerExpr) binaryOpExpr.getLeft(); - - ordinalValue = integerExpr.getNumber().intValue(); - SQLExpr newExpr = checkAndGet(orderSelectList, ordinalValue, orderException); - binaryOpExpr.setLeft(newExpr); - newExpr.setParent(binaryOpExpr); - } - - return false; + return false; + } + + @Override + public boolean visit(SQLSelectOrderByItem orderByItem) { + SQLExpr expr = orderByItem.getExpr(); + Integer ordinalValue; + + if (expr instanceof SQLIntegerExpr) { + ordinalValue = ((SQLIntegerExpr) expr).getNumber().intValue(); + SQLExpr newExpr = checkAndGet(orderSelectList, ordinalValue, orderException); + orderByItem.setExpr(newExpr); + newExpr.setParent(orderByItem); + } else if (expr instanceof SQLBinaryOpExpr + && ((SQLBinaryOpExpr) expr).getLeft() instanceof SQLIntegerExpr) { + // support ORDER BY IS NULL/NOT NULL + SQLBinaryOpExpr binaryOpExpr = (SQLBinaryOpExpr) expr; + SQLIntegerExpr integerExpr = (SQLIntegerExpr) binaryOpExpr.getLeft(); + + ordinalValue = integerExpr.getNumber().intValue(); + SQLExpr newExpr = checkAndGet(orderSelectList, ordinalValue, orderException); + binaryOpExpr.setLeft(newExpr); + newExpr.setParent(binaryOpExpr); } - }); - } - private SQLExpr checkAndGet(List selectList, Integer ordinal, String exception) { - if (ordinal > selectList.size()) { - throw new VerificationException(String.format(exception, ordinal, ordinal)); - } + return false; + } + }); + } - return selectList.get(ordinal-1).getExpr(); + private SQLExpr checkAndGet(List selectList, Integer ordinal, String exception) { + if (ordinal > selectList.size()) { + throw new VerificationException(String.format(exception, ordinal, ordinal)); } - private boolean hasGroupByWithOrdinals(MySqlSelectQueryBlock query) { - if (query.getGroupBy() == null) { - return false; - } else if (query.getGroupBy().getItems().isEmpty()){ - return false; - } + return selectList.get(ordinal - 1).getExpr(); + } - return query.getGroupBy().getItems().stream().anyMatch(x -> - x instanceof MySqlSelectGroupByExpr && ((MySqlSelectGroupByExpr) x).getExpr() instanceof SQLIntegerExpr - ); + private boolean hasGroupByWithOrdinals(MySqlSelectQueryBlock query) { + if (query.getGroupBy() == null) { + return false; + } else if (query.getGroupBy().getItems().isEmpty()) { + return false; } - private boolean hasOrderByWithOrdinals(MySqlSelectQueryBlock query) { - if (query.getOrderBy() == null) { - return false; - } else if (query.getOrderBy().getItems().isEmpty()){ - return false; - } + return query.getGroupBy().getItems().stream() + .anyMatch( + x -> + x instanceof MySqlSelectGroupByExpr + && ((MySqlSelectGroupByExpr) x).getExpr() instanceof SQLIntegerExpr); + } + + private boolean hasOrderByWithOrdinals(MySqlSelectQueryBlock query) { + if (query.getOrderBy() == null) { + return false; + } else if (query.getOrderBy().getItems().isEmpty()) { + return false; + } /** + *

          * The second condition checks valid AST that meets ORDER BY IS NULL/NOT NULL condition
          *
          *            SQLSelectOrderByItem
@@ -152,6 +154,7 @@ private boolean hasOrderByWithOrdinals(MySqlSelectQueryBlock query) {
          *             SQLBinaryOpExpr (Is || IsNot)
          *                    /  \
          *    SQLIdentifierExpr  SQLNullExpr
+         *  
*/ return query.getOrderBy().getItems().stream().anyMatch(x -> x.getExpr() instanceof SQLIntegerExpr @@ -162,9 +165,9 @@ private boolean hasOrderByWithOrdinals(MySqlSelectQueryBlock query) { ); } - private SQLQueryExpr toSqlExpr() { - SQLExprParser parser = new ElasticSqlExprParser(sql); - SQLExpr expr = parser.expr(); - return (SQLQueryExpr) expr; - } + private SQLQueryExpr toSqlExpr() { + SQLExprParser parser = new ElasticSqlExprParser(sql); + SQLExpr expr = parser.expr(); + return (SQLQueryExpr) expr; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/NestedQueryContext.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/NestedQueryContext.java index ce254e2103..b300015d49 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/NestedQueryContext.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/NestedQueryContext.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.subquery; import com.alibaba.druid.sql.ast.statement.SQLExprTableSource; @@ -14,53 +13,51 @@ import java.util.Map; /** - * {@link NestedQueryContext} build the context with Query to detected the specified table is nested or not. - * Todo current implementation doesn't rely on the index mapping which should be added after the semantics is builded. + * {@link NestedQueryContext} build the context with Query to detected the specified table is nested + * or not. + *
Todo current implementation doesn't rely on the index mapping which should be added after + * the semantics is built. */ public class NestedQueryContext { - private static final String SEPARATOR = "."; - private static final String EMPTY = ""; - // , if parentTable not exist, parentTableAlias = ""; - private final Map aliasParents = new HashMap<>(); + private static final String SEPARATOR = "."; + private static final String EMPTY = ""; + // , if parentTable not exist, parentTableAlias = ""; + private final Map aliasParents = new HashMap<>(); - /** - * Is the table refer to the nested field of the parent table. - */ - public boolean isNested(SQLExprTableSource table) { - String parent = parent(table); - if (Strings.isNullOrEmpty(parent)) { - return !Strings.isNullOrEmpty(aliasParents.get(alias(table))); - } else { - return aliasParents.containsKey(parent); - } + /** Is the table refer to the nested field of the parent table. */ + public boolean isNested(SQLExprTableSource table) { + String parent = parent(table); + if (Strings.isNullOrEmpty(parent)) { + return !Strings.isNullOrEmpty(aliasParents.get(alias(table))); + } else { + return aliasParents.containsKey(parent); } + } - /** - * add table to the context. - */ - public void add(SQLTableSource table) { - if (table instanceof SQLExprTableSource) { - process((SQLExprTableSource) table); - } else if (table instanceof SQLJoinTableSource) { - add(((SQLJoinTableSource) table).getLeft()); - add(((SQLJoinTableSource) table).getRight()); - } else { - throw new IllegalStateException("unsupported table source"); - } + /** add table to the context. */ + public void add(SQLTableSource table) { + if (table instanceof SQLExprTableSource) { + process((SQLExprTableSource) table); + } else if (table instanceof SQLJoinTableSource) { + add(((SQLJoinTableSource) table).getLeft()); + add(((SQLJoinTableSource) table).getRight()); + } else { + throw new IllegalStateException("unsupported table source"); } + } - private void process(SQLExprTableSource table) { - String alias = alias(table); - String parent = parent(table); - if (!Strings.isNullOrEmpty(alias)) { - aliasParents.putIfAbsent(alias, parent); - } + private void process(SQLExprTableSource table) { + String alias = alias(table); + String parent = parent(table); + if (!Strings.isNullOrEmpty(alias)) { + aliasParents.putIfAbsent(alias, parent); } + } /** - * Extract the parent alias from the tableName. For example - * SELECT * FROM employee e, e.project as p, - * For expr: employee, the parent alias is "". + * Extract the parent alias from the tableName. For example
+ * SELECT * FROM employee e, e.project as p,
+ * For expr: employee, the parent alias is "".
* For expr: e.project, the parent alias is e. */ private String parent(SQLExprTableSource table) { @@ -69,10 +66,10 @@ private String parent(SQLExprTableSource table) { return index == -1 ? EMPTY : tableName.substring(0, index); } - private String alias(SQLExprTableSource table) { - if (Strings.isNullOrEmpty(table.getAlias())) { - return table.getExpr().toString(); - } - return table.getAlias(); + private String alias(SQLExprTableSource table) { + if (Strings.isNullOrEmpty(table.getAlias())) { + return table.getExpr().toString(); } + return table.getAlias(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/RewriterContext.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/RewriterContext.java index 09698095e6..54cba6547b 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/RewriterContext.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/RewriterContext.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.subquery; import com.alibaba.druid.sql.ast.SQLExpr; @@ -19,68 +18,66 @@ import java.util.Deque; import java.util.List; -/** - * Environment for rewriting the SQL. - */ +/** Environment for rewriting the SQL. */ public class RewriterContext { - private final Deque tableStack = new ArrayDeque<>(); - private final Deque conditionStack = new ArrayDeque<>(); - private final List sqlInSubQueryExprs = new ArrayList<>(); - private final List sqlExistsExprs = new ArrayList<>(); - private final NestedQueryContext nestedQueryDetector = new NestedQueryContext(); + private final Deque tableStack = new ArrayDeque<>(); + private final Deque conditionStack = new ArrayDeque<>(); + private final List sqlInSubQueryExprs = new ArrayList<>(); + private final List sqlExistsExprs = new ArrayList<>(); + private final NestedQueryContext nestedQueryDetector = new NestedQueryContext(); - public SQLTableSource popJoin() { - return tableStack.pop(); - } + public SQLTableSource popJoin() { + return tableStack.pop(); + } - public SQLExpr popWhere() { - return conditionStack.pop(); - } + public SQLExpr popWhere() { + return conditionStack.pop(); + } - public void addWhere(SQLExpr expr) { - conditionStack.push(expr); - } + public void addWhere(SQLExpr expr) { + conditionStack.push(expr); + } - /** - * Add the Join right table and {@link JoinType} and {@link SQLBinaryOpExpr} which will - * merge the left table in the tableStack. - */ - public void addJoin(SQLTableSource right, JoinType joinType, SQLBinaryOpExpr condition) { - SQLTableSource left = tableStack.pop(); - SQLJoinTableSource joinTableSource = new SQLJoinTableSource(); - joinTableSource.setLeft(left); - joinTableSource.setRight(right); - joinTableSource.setJoinType(joinType); - joinTableSource.setCondition(condition); - tableStack.push(joinTableSource); - } + /** + * Add the Join right table and {@link JoinType} and {@link SQLBinaryOpExpr} which will merge the + * left table in the tableStack. + */ + public void addJoin(SQLTableSource right, JoinType joinType, SQLBinaryOpExpr condition) { + SQLTableSource left = tableStack.pop(); + SQLJoinTableSource joinTableSource = new SQLJoinTableSource(); + joinTableSource.setLeft(left); + joinTableSource.setRight(right); + joinTableSource.setJoinType(joinType); + joinTableSource.setCondition(condition); + tableStack.push(joinTableSource); + } - public void addJoin(SQLTableSource right, JoinType joinType) { - addJoin(right, joinType, null); - } + public void addJoin(SQLTableSource right, JoinType joinType) { + addJoin(right, joinType, null); + } - public void addTable(SQLTableSource table) { - tableStack.push(table); - nestedQueryDetector.add(table); - } + public void addTable(SQLTableSource table) { + tableStack.push(table); + nestedQueryDetector.add(table); + } - public boolean isNestedQuery(SQLExprTableSource table) { - return nestedQueryDetector.isNested(table); - } + public boolean isNestedQuery(SQLExprTableSource table) { + return nestedQueryDetector.isNested(table); + } - public void setInSubQuery(SQLInSubQueryExpr expr) { - sqlInSubQueryExprs.add(expr); - } + public void setInSubQuery(SQLInSubQueryExpr expr) { + sqlInSubQueryExprs.add(expr); + } - public void setExistsSubQuery(SQLExistsExpr expr) { - sqlExistsExprs.add(expr); - } + public void setExistsSubQuery(SQLExistsExpr expr) { + sqlExistsExprs.add(expr); + } - public List getSqlInSubQueryExprs() { - return sqlInSubQueryExprs; - } + public List getSqlInSubQueryExprs() { + return sqlInSubQueryExprs; + } - public List getSqlExistsExprs() { - return sqlExistsExprs; - } + public List getSqlExistsExprs() { + return sqlExistsExprs; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/Rewriter.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/Rewriter.java index 5ca0a38d7f..a23eaaf514 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/Rewriter.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/Rewriter.java @@ -3,28 +3,21 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.subquery.rewriter; import com.alibaba.druid.sql.ast.expr.SQLBinaryOpExpr; import com.alibaba.druid.sql.ast.expr.SQLBinaryOperator; -/** - * Interface of SQL Rewriter - */ +/** Interface of SQL Rewriter */ public interface Rewriter { - /** - * Whether the Rewriter can rewrite the SQL? - */ - boolean canRewrite(); + /** Whether the Rewriter can rewrite the SQL? */ + boolean canRewrite(); - /** - * Rewrite the SQL. - */ - void rewrite(); + /** Rewrite the SQL. */ + void rewrite(); - default SQLBinaryOpExpr and(SQLBinaryOpExpr left, SQLBinaryOpExpr right) { - return new SQLBinaryOpExpr(left, SQLBinaryOperator.BooleanAnd, right); - } + default SQLBinaryOpExpr and(SQLBinaryOpExpr left, SQLBinaryOpExpr right) { + return new SQLBinaryOpExpr(left, SQLBinaryOperator.BooleanAnd, right); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/RewriterFactory.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/RewriterFactory.java index ace333e981..6e6656ec37 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/RewriterFactory.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/RewriterFactory.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.subquery.rewriter; import com.alibaba.druid.sql.ast.SQLExpr; @@ -13,32 +12,26 @@ import java.util.List; import org.opensearch.sql.legacy.rewriter.subquery.RewriterContext; -/** - * Factory for generating the {@link Rewriter}. - */ +/** Factory for generating the {@link Rewriter}. */ public class RewriterFactory { - /** - * Create list of {@link Rewriter}. - */ - public static List createRewriterList(SQLExpr expr, RewriterContext bb) { - if (expr instanceof SQLExistsExpr) { - return existRewriterList((SQLExistsExpr) expr, bb); - } else if (expr instanceof SQLInSubQueryExpr) { - return inRewriterList((SQLInSubQueryExpr) expr, bb); - } - return ImmutableList.of(); + /** Create list of {@link Rewriter}. */ + public static List createRewriterList(SQLExpr expr, RewriterContext bb) { + if (expr instanceof SQLExistsExpr) { + return existRewriterList((SQLExistsExpr) expr, bb); + } else if (expr instanceof SQLInSubQueryExpr) { + return inRewriterList((SQLInSubQueryExpr) expr, bb); } + return ImmutableList.of(); + } - private static List existRewriterList(SQLExistsExpr existsExpr, RewriterContext bb) { - return new ImmutableList.Builder() - .add(new NestedExistsRewriter(existsExpr, bb)) - .build(); - } + private static List existRewriterList(SQLExistsExpr existsExpr, RewriterContext bb) { + return new ImmutableList.Builder() + .add(new NestedExistsRewriter(existsExpr, bb)) + .build(); + } - private static List inRewriterList(SQLInSubQueryExpr inExpr, RewriterContext bb) { - return new ImmutableList.Builder() - .add(new InRewriter(inExpr, bb)) - .build(); - } + private static List inRewriterList(SQLInSubQueryExpr inExpr, RewriterContext bb) { + return new ImmutableList.Builder().add(new InRewriter(inExpr, bb)).build(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/spatial/Point.java b/legacy/src/main/java/org/opensearch/sql/legacy/spatial/Point.java index c449ef1364..f3f8639a1c 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/spatial/Point.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/spatial/Point.java @@ -3,26 +3,23 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.spatial; -/** - * Created by Eliran on 1/8/2015. - */ +/** Created by Eliran on 1/8/2015. */ public class Point { - private double lon; - private double lat; + private double lon; + private double lat; - public Point(double lon, double lat) { - this.lon = lon; - this.lat = lat; - } + public Point(double lon, double lat) { + this.lon = lon; + this.lat = lat; + } - public double getLon() { - return lon; - } + public double getLon() { + return lon; + } - public double getLat() { - return lat; - } + public double getLat() { + return lat; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/spatial/PolygonFilterParams.java b/legacy/src/main/java/org/opensearch/sql/legacy/spatial/PolygonFilterParams.java index 0d0592f519..1aeddb24a4 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/spatial/PolygonFilterParams.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/spatial/PolygonFilterParams.java @@ -3,22 +3,19 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.spatial; import java.util.List; -/** - * Created by Eliran on 15/8/2015. - */ +/** Created by Eliran on 15/8/2015. */ public class PolygonFilterParams { - private List polygon; + private List polygon; - public PolygonFilterParams(List polygon) { - this.polygon = polygon; - } + public PolygonFilterParams(List polygon) { + this.polygon = polygon; + } - public List getPolygon() { - return polygon; - } + public List getPolygon() { + return polygon; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/spatial/RangeDistanceFilterParams.java b/legacy/src/main/java/org/opensearch/sql/legacy/spatial/RangeDistanceFilterParams.java index 91962332bf..0bdb01c3ce 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/spatial/RangeDistanceFilterParams.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/spatial/RangeDistanceFilterParams.java @@ -3,25 +3,22 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.spatial; -/** - * Created by Eliran on 15/8/2015. - */ +/** Created by Eliran on 15/8/2015. */ public class RangeDistanceFilterParams extends DistanceFilterParams { - private String distanceTo; + private String distanceTo; - public RangeDistanceFilterParams(String distanceFrom, String distanceTo, Point from) { - super(distanceFrom, from); - this.distanceTo = distanceTo; - } + public RangeDistanceFilterParams(String distanceFrom, String distanceTo, Point from) { + super(distanceFrom, from); + this.distanceTo = distanceTo; + } - public String getDistanceTo() { - return distanceTo; - } + public String getDistanceTo() { + return distanceTo; + } - public String getDistanceFrom() { - return this.getDistance(); - } + public String getDistanceFrom() { + return this.getDistance(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/utils/QueryDataAnonymizer.java b/legacy/src/main/java/org/opensearch/sql/legacy/utils/QueryDataAnonymizer.java index b58691c022..acf7a73ba5 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/utils/QueryDataAnonymizer.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/utils/QueryDataAnonymizer.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.utils; import static org.opensearch.sql.legacy.utils.Util.toSqlExpr; @@ -14,35 +13,35 @@ import org.apache.logging.log4j.Logger; import org.opensearch.sql.legacy.rewriter.identifier.AnonymizeSensitiveDataRule; -/** - * Utility class to mask sensitive information in incoming SQL queries - */ +/** Utility class to mask sensitive information in incoming SQL queries */ public class QueryDataAnonymizer { - private static final Logger LOG = LogManager.getLogger(QueryDataAnonymizer.class); + private static final Logger LOG = LogManager.getLogger(QueryDataAnonymizer.class); - /** - * This method is used to anonymize sensitive data in SQL query. - * Sensitive data includes index names, column names etc., - * which in druid parser are parsed to SQLIdentifierExpr instances - * @param query entire sql query string - * @return sql query string with all identifiers replaced with "***" on success - * and failure string otherwise to ensure no non-anonymized data is logged in production. - */ - public static String anonymizeData(String query) { - String resultQuery; - try { - AnonymizeSensitiveDataRule rule = new AnonymizeSensitiveDataRule(); - SQLQueryExpr sqlExpr = (SQLQueryExpr) toSqlExpr(query); - rule.rewrite(sqlExpr); - resultQuery = SQLUtils.toMySqlString(sqlExpr).replaceAll("0", "number") - .replaceAll("false", "boolean_literal") - .replaceAll("[\\n][\\t]+", " "); - } catch (Exception e) { - LOG.warn("Caught an exception when anonymizing sensitive data."); - LOG.debug("String {} failed anonymization.", query); - resultQuery = "Failed to anonymize data."; - } - return resultQuery; + /** + * This method is used to anonymize sensitive data in SQL query. Sensitive data includes index + * names, column names etc., which in druid parser are parsed to SQLIdentifierExpr instances + * + * @param query entire sql query string + * @return sql query string with all identifiers replaced with "***" on success and failure string + * otherwise to ensure no non-anonymized data is logged in production. + */ + public static String anonymizeData(String query) { + String resultQuery; + try { + AnonymizeSensitiveDataRule rule = new AnonymizeSensitiveDataRule(); + SQLQueryExpr sqlExpr = (SQLQueryExpr) toSqlExpr(query); + rule.rewrite(sqlExpr); + resultQuery = + SQLUtils.toMySqlString(sqlExpr) + .replaceAll("0", "number") + .replaceAll("false", "boolean_literal") + .replaceAll("[\\n][\\t]+", " "); + } catch (Exception e) { + LOG.warn("Caught an exception when anonymizing sensitive data."); + LOG.debug("String {} failed anonymization.", query); + resultQuery = "Failed to anonymize data."; } + return resultQuery; + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/types/ProductTypeTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/types/ProductTypeTest.java index 326dd6ce06..5c87aabdee 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/types/ProductTypeTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/types/ProductTypeTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.types; import static java.util.Collections.singletonList; @@ -18,56 +17,53 @@ import org.junit.Test; import org.opensearch.sql.legacy.antlr.semantic.types.special.Product; -/** - * Test cases fro product type - */ +/** Test cases fro product type */ public class ProductTypeTest { - @Test - public void singleSameTypeInTwoProductsShouldPass() { - Product product1 = new Product(singletonList(INTEGER)); - Product product2 = new Product(singletonList(INTEGER)); - Assert.assertTrue(product1.isCompatible(product2)); - Assert.assertTrue(product2.isCompatible(product1)); - } - - @Test - public void singleCompatibleTypeInTwoProductsShouldPass() { - Product product1 = new Product(singletonList(NUMBER)); - Product product2 = new Product(singletonList(INTEGER)); - Assert.assertTrue(product1.isCompatible(product2)); - Assert.assertTrue(product2.isCompatible(product1)); - } + @Test + public void singleSameTypeInTwoProductsShouldPass() { + Product product1 = new Product(singletonList(INTEGER)); + Product product2 = new Product(singletonList(INTEGER)); + Assert.assertTrue(product1.isCompatible(product2)); + Assert.assertTrue(product2.isCompatible(product1)); + } - @Test - public void twoCompatibleTypesInTwoProductsShouldPass() { - Product product1 = new Product(Arrays.asList(NUMBER, KEYWORD)); - Product product2 = new Product(Arrays.asList(INTEGER, STRING)); - Assert.assertTrue(product1.isCompatible(product2)); - Assert.assertTrue(product2.isCompatible(product1)); - } + @Test + public void singleCompatibleTypeInTwoProductsShouldPass() { + Product product1 = new Product(singletonList(NUMBER)); + Product product2 = new Product(singletonList(INTEGER)); + Assert.assertTrue(product1.isCompatible(product2)); + Assert.assertTrue(product2.isCompatible(product1)); + } - @Test - public void incompatibleTypesInTwoProductsShouldFail() { - Product product1 = new Product(singletonList(BOOLEAN)); - Product product2 = new Product(singletonList(STRING)); - Assert.assertFalse(product1.isCompatible(product2)); - Assert.assertFalse(product2.isCompatible(product1)); - } + @Test + public void twoCompatibleTypesInTwoProductsShouldPass() { + Product product1 = new Product(Arrays.asList(NUMBER, KEYWORD)); + Product product2 = new Product(Arrays.asList(INTEGER, STRING)); + Assert.assertTrue(product1.isCompatible(product2)); + Assert.assertTrue(product2.isCompatible(product1)); + } - @Test - public void compatibleButDifferentTypeNumberInTwoProductsShouldFail() { - Product product1 = new Product(Arrays.asList(KEYWORD, INTEGER)); - Product product2 = new Product(singletonList(STRING)); - Assert.assertFalse(product1.isCompatible(product2)); - Assert.assertFalse(product2.isCompatible(product1)); - } + @Test + public void incompatibleTypesInTwoProductsShouldFail() { + Product product1 = new Product(singletonList(BOOLEAN)); + Product product2 = new Product(singletonList(STRING)); + Assert.assertFalse(product1.isCompatible(product2)); + Assert.assertFalse(product2.isCompatible(product1)); + } - @Test - public void baseTypeShouldBeIncompatibleWithProductType() { - Product product = new Product(singletonList(INTEGER)); - Assert.assertFalse(INTEGER.isCompatible(product)); - Assert.assertFalse(product.isCompatible(INTEGER)); - } + @Test + public void compatibleButDifferentTypeNumberInTwoProductsShouldFail() { + Product product1 = new Product(Arrays.asList(KEYWORD, INTEGER)); + Product product2 = new Product(singletonList(STRING)); + Assert.assertFalse(product1.isCompatible(product2)); + Assert.assertFalse(product2.isCompatible(product1)); + } + @Test + public void baseTypeShouldBeIncompatibleWithProductType() { + Product product = new Product(singletonList(INTEGER)); + Assert.assertFalse(INTEGER.isCompatible(product)); + Assert.assertFalse(product.isCompatible(INTEGER)); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/executor/format/ResultSetTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/executor/format/ResultSetTest.java index 69da4ca475..7cfada0b78 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/executor/format/ResultSetTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/executor/format/ResultSetTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.format; import static org.junit.Assert.assertFalse; @@ -13,18 +12,21 @@ public class ResultSetTest { - private final ResultSet resultSet = new ResultSet() { - @Override - public Schema getSchema() { - return super.getSchema(); - } - }; + private final ResultSet resultSet = + new ResultSet() { + @Override + public Schema getSchema() { + return super.getSchema(); + } + }; /** * Case #1: * LIKE 'test%' is converted to: - * 1. Regex pattern: test.* - * 2. OpenSearch search pattern: test* + *
    + *
  1. Regex pattern: test.* + *
  2. OpenSearch search pattern: test* + *
* In this case, what OpenSearch returns is the final result. */ @Test @@ -35,8 +37,10 @@ public void testWildcardForZeroOrMoreCharacters() { /** * Case #2: * LIKE 'test_123' is converted to: - * 1. Regex pattern: test.123 - * 2. OpenSearch search pattern: (all) + *
    x + *
  1. Regex pattern: test.123 + *
  2. OpenSearch search pattern: (all) + *
* Because OpenSearch doesn't support single wildcard character, in this case, none is passed * as OpenSearch search pattern. So all index names are returned and need to be filtered by * regex pattern again. @@ -49,12 +53,10 @@ public void testWildcardForSingleCharacter() { } /** - * Case #3: - * LIKE 'acc' has same regex and OpenSearch pattern. - * In this case, only index name(s) aliased by 'acc' is returned. - * So regex match is skipped to avoid wrong empty result. - * The assumption here is OpenSearch won't return unrelated index names if - * LIKE pattern doesn't include any wildcard. + * Case #3: LIKE 'acc' has same regex and OpenSearch pattern. In this case, only index name(s) + * aliased by 'acc' is returned. So regex match is skipped to avoid wrong empty result. The + * assumption here is OpenSearch won't return unrelated index names if LIKE pattern doesn't + * include any wildcard. */ @Test public void testIndexAlias() { @@ -62,11 +64,9 @@ public void testIndexAlias() { } /** - * Case #4: - * LIKE 'test.2020.10' has same regex pattern. Because it includes dot (wildcard), - * OpenSearch search pattern is all. - * In this case, all index names are returned. Because the pattern includes dot, - * it's treated as regex and regex match won't be skipped. + * Case #4: LIKE 'test.2020.10' has same regex pattern. Because it includes dot (wildcard), + * OpenSearch search pattern is all. In this case, all index names are returned. Because the + * pattern includes dot, it's treated as regex and regex match won't be skipped. */ @Test public void testIndexNameWithDot() { @@ -74,5 +74,4 @@ public void testIndexNameWithDot() { assertFalse(resultSet.matchesPatternIfRegex(".opensearch_dashboards", "test.2020.10")); assertTrue(resultSet.matchesPatternIfRegex("test.2020.10", "test.2020.10")); } - } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/plugin/RestSQLQueryActionCursorFallbackTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/plugin/RestSQLQueryActionCursorFallbackTest.java index 64e5d161b7..30d8c9d27d 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/plugin/RestSQLQueryActionCursorFallbackTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/plugin/RestSQLQueryActionCursorFallbackTest.java @@ -34,25 +34,19 @@ import org.opensearch.sql.sql.domain.SQLQueryRequest; import org.opensearch.threadpool.ThreadPool; -/** - * A test suite that verifies fallback behaviour of cursor queries. - */ +/** A test suite that verifies fallback behaviour of cursor queries. */ @RunWith(MockitoJUnitRunner.class) public class RestSQLQueryActionCursorFallbackTest extends BaseRestHandler { private NodeClient nodeClient; - @Mock - private ThreadPool threadPool; + @Mock private ThreadPool threadPool; - @Mock - private QueryManager queryManager; + @Mock private QueryManager queryManager; - @Mock - private QueryPlanFactory factory; + @Mock private QueryPlanFactory factory; - @Mock - private RestChannel restChannel; + @Mock private RestChannel restChannel; private Injector injector; @@ -60,11 +54,14 @@ public class RestSQLQueryActionCursorFallbackTest extends BaseRestHandler { public void setup() { nodeClient = new NodeClient(org.opensearch.common.settings.Settings.EMPTY, threadPool); ModulesBuilder modules = new ModulesBuilder(); - modules.add(b -> { - b.bind(SQLService.class).toInstance(new SQLService(new SQLSyntaxParser(), queryManager, factory)); - }); + modules.add( + b -> { + b.bind(SQLService.class) + .toInstance(new SQLService(new SQLSyntaxParser(), queryManager, factory)); + }); injector = modules.createInjector(); - Mockito.lenient().when(threadPool.getThreadContext()) + Mockito.lenient() + .when(threadPool.getThreadContext()) .thenReturn(new ThreadContext(org.opensearch.common.settings.Settings.EMPTY)); } @@ -73,17 +70,14 @@ public void setup() { @Test public void no_fallback_with_column_reference() throws Exception { String query = "SELECT name FROM test1"; - SQLQueryRequest request = createSqlQueryRequest(query, Optional.empty(), - Optional.of(5)); + SQLQueryRequest request = createSqlQueryRequest(query, Optional.empty(), Optional.of(5)); assertFalse(doesQueryFallback(request)); } - private static SQLQueryRequest createSqlQueryRequest(String query, Optional cursorId, - Optional fetchSize) throws IOException { - var builder = XContentFactory.jsonBuilder() - .startObject() - .field("query").value(query); + private static SQLQueryRequest createSqlQueryRequest( + String query, Optional cursorId, Optional fetchSize) throws IOException { + var builder = XContentFactory.jsonBuilder().startObject().field("query").value(query); if (cursorId.isPresent()) { builder.field("cursor").value(cursorId.get()); } @@ -94,17 +88,21 @@ private static SQLQueryRequest createSqlQueryRequest(String query, Optional { - fallback.set(true); - }, (channel, exception) -> { - }).accept(restChannel); + queryAction + .prepareRequest( + request, + (channel, exception) -> { + fallback.set(true); + }, + (channel, exception) -> {}) + .accept(restChannel); return fallback.get(); } @@ -115,8 +113,8 @@ public String getName() { } @Override - protected BaseRestHandler.RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient nodeClient) - { + protected BaseRestHandler.RestChannelConsumer prepareRequest( + RestRequest restRequest, NodeClient nodeClient) { // do nothing, RestChannelConsumer is protected which required to extend BaseRestHandler return null; } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/plugin/RestSQLQueryActionTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/plugin/RestSQLQueryActionTest.java index be572f3dfb..b14b2c09cb 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/plugin/RestSQLQueryActionTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/plugin/RestSQLQueryActionTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.plugin; import static org.junit.Assert.assertTrue; @@ -42,17 +41,13 @@ public class RestSQLQueryActionTest extends BaseRestHandler { private NodeClient nodeClient; - @Mock - private ThreadPool threadPool; + @Mock private ThreadPool threadPool; - @Mock - private QueryManager queryManager; + @Mock private QueryManager queryManager; - @Mock - private QueryPlanFactory factory; + @Mock private QueryPlanFactory factory; - @Mock - private RestChannel restChannel; + @Mock private RestChannel restChannel; private Injector injector; @@ -60,88 +55,112 @@ public class RestSQLQueryActionTest extends BaseRestHandler { public void setup() { nodeClient = new NodeClient(org.opensearch.common.settings.Settings.EMPTY, threadPool); ModulesBuilder modules = new ModulesBuilder(); - modules.add(b -> { - b.bind(SQLService.class).toInstance(new SQLService(new SQLSyntaxParser(), queryManager, factory)); - }); + modules.add( + b -> { + b.bind(SQLService.class) + .toInstance(new SQLService(new SQLSyntaxParser(), queryManager, factory)); + }); injector = modules.createInjector(); - Mockito.lenient().when(threadPool.getThreadContext()) + Mockito.lenient() + .when(threadPool.getThreadContext()) .thenReturn(new ThreadContext(org.opensearch.common.settings.Settings.EMPTY)); } @Test public void handleQueryThatCanSupport() throws Exception { - SQLQueryRequest request = new SQLQueryRequest( - new JSONObject("{\"query\": \"SELECT -123\"}"), - "SELECT -123", - QUERY_API_ENDPOINT, - "jdbc"); + SQLQueryRequest request = + new SQLQueryRequest( + new JSONObject("{\"query\": \"SELECT -123\"}"), + "SELECT -123", + QUERY_API_ENDPOINT, + "jdbc"); RestSQLQueryAction queryAction = new RestSQLQueryAction(injector); - queryAction.prepareRequest(request, (channel, exception) -> { - fail(); - }, (channel, exception) -> { - fail(); - }).accept(restChannel); + queryAction + .prepareRequest( + request, + (channel, exception) -> { + fail(); + }, + (channel, exception) -> { + fail(); + }) + .accept(restChannel); } @Test public void handleExplainThatCanSupport() throws Exception { - SQLQueryRequest request = new SQLQueryRequest( - new JSONObject("{\"query\": \"SELECT -123\"}"), - "SELECT -123", - EXPLAIN_API_ENDPOINT, - "jdbc"); + SQLQueryRequest request = + new SQLQueryRequest( + new JSONObject("{\"query\": \"SELECT -123\"}"), + "SELECT -123", + EXPLAIN_API_ENDPOINT, + "jdbc"); RestSQLQueryAction queryAction = new RestSQLQueryAction(injector); - queryAction.prepareRequest(request, (channel, exception) -> { - fail(); - }, (channel, exception) -> { - fail(); - }).accept(restChannel); + queryAction + .prepareRequest( + request, + (channel, exception) -> { + fail(); + }, + (channel, exception) -> { + fail(); + }) + .accept(restChannel); } @Test public void queryThatNotSupportIsHandledByFallbackHandler() throws Exception { - SQLQueryRequest request = new SQLQueryRequest( - new JSONObject( - "{\"query\": \"SELECT name FROM test1 JOIN test2 ON test1.name = test2.name\"}"), - "SELECT name FROM test1 JOIN test2 ON test1.name = test2.name", - QUERY_API_ENDPOINT, - "jdbc"); + SQLQueryRequest request = + new SQLQueryRequest( + new JSONObject( + "{\"query\": \"SELECT name FROM test1 JOIN test2 ON test1.name = test2.name\"}"), + "SELECT name FROM test1 JOIN test2 ON test1.name = test2.name", + QUERY_API_ENDPOINT, + "jdbc"); AtomicBoolean fallback = new AtomicBoolean(false); RestSQLQueryAction queryAction = new RestSQLQueryAction(injector); - queryAction.prepareRequest(request, (channel, exception) -> { - fallback.set(true); - assertTrue(exception instanceof SyntaxCheckException); - }, (channel, exception) -> { - fail(); - }).accept(restChannel); + queryAction + .prepareRequest( + request, + (channel, exception) -> { + fallback.set(true); + assertTrue(exception instanceof SyntaxCheckException); + }, + (channel, exception) -> { + fail(); + }) + .accept(restChannel); assertTrue(fallback.get()); } @Test public void queryExecutionFailedIsHandledByExecutionErrorHandler() throws Exception { - SQLQueryRequest request = new SQLQueryRequest( - new JSONObject( - "{\"query\": \"SELECT -123\"}"), - "SELECT -123", - QUERY_API_ENDPOINT, - "jdbc"); + SQLQueryRequest request = + new SQLQueryRequest( + new JSONObject("{\"query\": \"SELECT -123\"}"), + "SELECT -123", + QUERY_API_ENDPOINT, + "jdbc"); - doThrow(new IllegalStateException("execution exception")) - .when(queryManager) - .submit(any()); + doThrow(new IllegalStateException("execution exception")).when(queryManager).submit(any()); AtomicBoolean executionErrorHandler = new AtomicBoolean(false); RestSQLQueryAction queryAction = new RestSQLQueryAction(injector); - queryAction.prepareRequest(request, (channel, exception) -> { - assertTrue(exception instanceof SyntaxCheckException); - }, (channel, exception) -> { - executionErrorHandler.set(true); - assertTrue(exception instanceof IllegalStateException); - }).accept(restChannel); + queryAction + .prepareRequest( + request, + (channel, exception) -> { + assertTrue(exception instanceof SyntaxCheckException); + }, + (channel, exception) -> { + executionErrorHandler.set(true); + assertTrue(exception instanceof IllegalStateException); + }) + .accept(restChannel); assertTrue(executionErrorHandler.get()); } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/SearchHitRowTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/SearchHitRowTest.java index fe5c641009..dd0fc626c0 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/SearchHitRowTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/SearchHitRowTest.java @@ -20,7 +20,7 @@ public void testKeyWithObjectField() { SearchHit hit = new SearchHit(1); hit.sourceRef(new BytesArray("{\"id\": {\"serial\": 3}}")); SearchHitRow row = new SearchHitRow(hit, "a"); - RowKey key = row.key(new String[]{"id.serial"}); + RowKey key = row.key(new String[] {"id.serial"}); Object[] data = key.keys(); assertEquals(1, data.length); @@ -32,7 +32,7 @@ public void testKeyWithUnexpandedObjectField() { SearchHit hit = new SearchHit(1); hit.sourceRef(new BytesArray("{\"attributes.hardware.correlate_id\": 10}")); SearchHitRow row = new SearchHitRow(hit, "a"); - RowKey key = row.key(new String[]{"attributes.hardware.correlate_id"}); + RowKey key = row.key(new String[] {"attributes.hardware.correlate_id"}); Object[] data = key.keys(); assertEquals(1, data.length); diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/NestedFieldProjectionTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/NestedFieldProjectionTest.java index 63af01caaa..859259756f 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/NestedFieldProjectionTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/NestedFieldProjectionTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest; import static org.hamcrest.MatcherAssert.assertThat; @@ -52,372 +51,284 @@ public class NestedFieldProjectionTest { - @Test - public void regression() { - assertThat(query("SELECT region FROM team"), is(anything())); - assertThat(query("SELECT region FROM team WHERE nested(employees.age) = 30"), is(anything())); - assertThat(query("SELECT * FROM team WHERE region = 'US'"), is(anything())); - } - - @Test - public void nestedFieldSelectAll() { - assertThat( - query("SELECT nested(employees.*) FROM team"), - source( - boolQuery( - filter( - boolQuery( - must( - nestedQuery( - path("employees"), - innerHits("employees.*") - ) - ) - ) - ) - ) - ) - ); - } - - @Test - public void nestedFieldInSelect() { - assertThat( - query("SELECT nested(employees.firstname) FROM team"), - source( - boolQuery( - filter( - boolQuery( - must( - nestedQuery( - path("employees"), - innerHits("employees.firstname") - ) - ) - ) - ) - ) - ) - ); - } - - @Test - public void regularAndNestedFieldInSelect() { - assertThat( - query("SELECT region, nested(employees.firstname) FROM team"), - source( - boolQuery( - filter( - boolQuery( - must( - nestedQuery( - path("employees"), - innerHits("employees.firstname") - ) - ) - ) - ) - ), - fetchSource("region") - ) - ); - } - - /* - // Should be integration test - @Test - public void nestedFieldInWhereSelectAll() {} - */ - - @Test - public void nestedFieldInSelectAndWhere() { - assertThat( - query("SELECT nested(employees.firstname) " + - " FROM team " + - " WHERE nested(employees.age) = 30"), - source( - boolQuery( - filter( - boolQuery( - must( - nestedQuery( - path("employees"), - innerHits("employees.firstname") - ) - ) - ) - ) - ) - ) - ); - } - - @Test - public void regularAndNestedFieldInSelectAndWhere() { - assertThat( - query("SELECT region, nested(employees.firstname) " + - " FROM team " + - " WHERE nested(employees.age) = 30"), - source( - boolQuery( - filter( - boolQuery( - must( - nestedQuery( - innerHits("employees.firstname") - ) - ) - ) - ) - ), - fetchSource("region") - ) - ); - } - - @Test - public void multipleSameNestedFields() { - assertThat( - query("SELECT nested(employees.firstname), nested(employees.lastname) " + - " FROM team " + - " WHERE nested(\"employees\", employees.age = 30 AND employees.firstname LIKE 'John')"), - source( - boolQuery( - filter( - boolQuery( - must( - nestedQuery( - path("employees"), - innerHits("employees.firstname", "employees.lastname") - ) - ) - ) - ) - ) - ) - ); + @Test + public void regression() { + assertThat(query("SELECT region FROM team"), is(anything())); + assertThat(query("SELECT region FROM team WHERE nested(employees.age) = 30"), is(anything())); + assertThat(query("SELECT * FROM team WHERE region = 'US'"), is(anything())); + } + + @Test + public void nestedFieldSelectAll() { + assertThat( + query("SELECT nested(employees.*) FROM team"), + source( + boolQuery( + filter( + boolQuery(must(nestedQuery(path("employees"), innerHits("employees.*")))))))); + } + + @Test + public void nestedFieldInSelect() { + assertThat( + query("SELECT nested(employees.firstname) FROM team"), + source( + boolQuery( + filter( + boolQuery( + must(nestedQuery(path("employees"), innerHits("employees.firstname")))))))); + } + + @Test + public void regularAndNestedFieldInSelect() { + assertThat( + query("SELECT region, nested(employees.firstname) FROM team"), + source( + boolQuery( + filter( + boolQuery( + must(nestedQuery(path("employees"), innerHits("employees.firstname")))))), + fetchSource("region"))); + } + + /* + // Should be integration test + @Test + public void nestedFieldInWhereSelectAll() {} + */ + + @Test + public void nestedFieldInSelectAndWhere() { + assertThat( + query( + "SELECT nested(employees.firstname) " + + " FROM team " + + " WHERE nested(employees.age) = 30"), + source( + boolQuery( + filter( + boolQuery( + must(nestedQuery(path("employees"), innerHits("employees.firstname")))))))); + } + + @Test + public void regularAndNestedFieldInSelectAndWhere() { + assertThat( + query( + "SELECT region, nested(employees.firstname) " + + " FROM team " + + " WHERE nested(employees.age) = 30"), + source( + boolQuery(filter(boolQuery(must(nestedQuery(innerHits("employees.firstname")))))), + fetchSource("region"))); + } + + @Test + public void multipleSameNestedFields() { + assertThat( + query( + "SELECT nested(employees.firstname), nested(employees.lastname) FROM team WHERE" + + " nested(\"employees\", employees.age = 30 AND employees.firstname LIKE 'John')"), + source( + boolQuery( + filter( + boolQuery( + must( + nestedQuery( + path("employees"), + innerHits("employees.firstname", "employees.lastname")))))))); + } + + @Test + public void multipleDifferentNestedFields() { + assertThat( + query( + "SELECT region, nested(employees.firstname), nested(manager.name) " + + " FROM team " + + " WHERE nested(employees.age) = 30 AND nested(manager.age) = 50"), + source( + boolQuery( + filter( + boolQuery( + must( + boolQuery( + must( + nestedQuery( + path("employees"), innerHits("employees.firstname")), + nestedQuery(path("manager"), innerHits("manager.name")))))))), + fetchSource("region"))); + } + + @Test + public void leftJoinWithSelectAll() { + assertThat( + query("SELECT * FROM team AS t LEFT JOIN t.projects AS p "), + source( + boolQuery( + filter( + boolQuery( + should( + boolQuery(mustNot(nestedQuery(path("projects")))), + nestedQuery(path("projects"), innerHits("projects.*")))))))); + } + + @Test + public void leftJoinWithSpecificFields() { + assertThat( + query("SELECT t.name, p.name, p.started_year FROM team AS t LEFT JOIN t.projects AS p "), + source( + boolQuery( + filter( + boolQuery( + should( + boolQuery(mustNot(nestedQuery(path("projects")))), + nestedQuery( + path("projects"), + innerHits("projects.name", "projects.started_year")))))), + fetchSource("name"))); + } + + private Matcher source(Matcher queryMatcher) { + return featureValueOf("query", queryMatcher, SearchSourceBuilder::query); + } + + private Matcher source( + Matcher queryMatcher, Matcher fetchSourceMatcher) { + return allOf( + featureValueOf("query", queryMatcher, SearchSourceBuilder::query), + featureValueOf("fetchSource", fetchSourceMatcher, SearchSourceBuilder::fetchSource)); + } + + /** + * Asserting instanceOf and continue other chained matchers of subclass requires explicity cast + */ + @SuppressWarnings("unchecked") + private Matcher boolQuery(Matcher matcher) { + return (Matcher) allOf(instanceOf(BoolQueryBuilder.class), matcher); + } + + @SafeVarargs + @SuppressWarnings("unchecked") + private final Matcher nestedQuery(Matcher... matchers) { + return (Matcher) + both(is(Matchers.instanceOf(NestedQueryBuilder.class))) + .and(allOf(matchers)); + } + + @SafeVarargs + private final FeatureMatcher> filter( + Matcher... matchers) { + return hasClauses("filter", BoolQueryBuilder::filter, matchers); + } + + @SafeVarargs + private final FeatureMatcher> must( + Matcher... matchers) { + return hasClauses("must", BoolQueryBuilder::must, matchers); + } + + @SafeVarargs + private final FeatureMatcher> mustNot( + Matcher... matchers) { + return hasClauses("must_not", BoolQueryBuilder::mustNot, matchers); + } + + @SafeVarargs + private final FeatureMatcher> should( + Matcher... matchers) { + return hasClauses("should", BoolQueryBuilder::should, matchers); + } + + /** Hide contains() assertion to simplify */ + @SafeVarargs + private final FeatureMatcher> hasClauses( + String name, + Function> func, + Matcher... matchers) { + return new FeatureMatcher>( + contains(matchers), name, name) { + @Override + protected List featureValueOf(BoolQueryBuilder query) { + return func.apply(query); + } + }; + } + + private Matcher path(String expected) { + return HasFieldWithValue.hasFieldWithValue("path", "path", is(equalTo(expected))); + } + + /** Skip intermediate property along the path. Hide arrayContaining assertion to simplify. */ + private FeatureMatcher innerHits(String... expected) { + return featureValueOf( + "innerHits", + arrayContaining(expected), + (nestedQuery -> nestedQuery.innerHit().getFetchSourceContext().includes())); + } + + @SuppressWarnings("unchecked") + private Matcher fetchSource(String... expected) { + if (expected.length == 0) { + return anyOf( + is(nullValue()), + featureValueOf("includes", is(nullValue()), FetchSourceContext::includes), + featureValueOf("includes", is(emptyArray()), FetchSourceContext::includes)); } - - @Test - public void multipleDifferentNestedFields() { - assertThat( - query("SELECT region, nested(employees.firstname), nested(manager.name) " + - " FROM team " + - " WHERE nested(employees.age) = 30 AND nested(manager.age) = 50"), - source( - boolQuery( - filter( - boolQuery( - must( - boolQuery( - must( - nestedQuery( - path("employees"), - innerHits("employees.firstname") - ), - nestedQuery( - path("manager"), - innerHits("manager.name") - ) - ) - ) - ) - ) - ) - ), - fetchSource("region") - ) - ); - } - - - @Test - public void leftJoinWithSelectAll() { - assertThat( - query("SELECT * FROM team AS t LEFT JOIN t.projects AS p "), - source( - boolQuery( - filter( - boolQuery( - should( - boolQuery( - mustNot( - nestedQuery( - path("projects") - ) - ) - ), - nestedQuery( - path("projects"), - innerHits("projects.*") - ) - ) - ) - ) - ) - ) - ); - } - - @Test - public void leftJoinWithSpecificFields() { - assertThat( - query("SELECT t.name, p.name, p.started_year FROM team AS t LEFT JOIN t.projects AS p "), - source( - boolQuery( - filter( - boolQuery( - should( - boolQuery( - mustNot( - nestedQuery( - path("projects") - ) - ) - ), - nestedQuery( - path("projects"), - innerHits("projects.name", "projects.started_year") - ) - ) - ) - ) - ), - fetchSource("name") - ) - ); - } - - private Matcher source(Matcher queryMatcher) { - return featureValueOf("query", queryMatcher, SearchSourceBuilder::query); - } - - private Matcher source(Matcher queryMatcher, - Matcher fetchSourceMatcher) { - return allOf( - featureValueOf("query", queryMatcher, SearchSourceBuilder::query), - featureValueOf("fetchSource", fetchSourceMatcher, SearchSourceBuilder::fetchSource) - ); - } - - /** Asserting instanceOf and continue other chained matchers of subclass requires explicity cast */ - @SuppressWarnings("unchecked") - private Matcher boolQuery(Matcher matcher) { - return (Matcher) allOf(instanceOf(BoolQueryBuilder.class), matcher); + return featureValueOf( + "includes", contains(expected), fetchSource -> Arrays.asList(fetchSource.includes())); + } + + private FeatureMatcher featureValueOf( + String name, Matcher subMatcher, Function getter) { + return new FeatureMatcher(subMatcher, name, name) { + @Override + protected U featureValueOf(T actual) { + return getter.apply(actual); + } + }; + } + + private SearchSourceBuilder query(String sql) { + SQLQueryExpr expr = parseSql(sql); + if (sql.contains("nested")) { + return translate(expr).source(); } - @SafeVarargs - @SuppressWarnings("unchecked") - private final Matcher nestedQuery(Matcher... matchers) { - return (Matcher) both(is(Matchers.instanceOf(NestedQueryBuilder.class))). - and(allOf(matchers)); + expr = rewrite(expr); + return translate(expr).source(); + } + + private SearchRequest translate(SQLQueryExpr expr) { + try { + Client mockClient = Mockito.mock(Client.class); + SearchRequestBuilder request = new SearchRequestBuilder(mockClient, SearchAction.INSTANCE); + Select select = new SqlParser().parseSelect(expr); + + DefaultQueryAction action = new DefaultQueryAction(mockClient, select); + action.initialize(request); + action.setFields(select.getFields()); + + if (select.getWhere() != null) { + request.setQuery(QueryMaker.explain(select.getWhere(), select.isQuery)); + } + new NestedFieldProjection(request).project(select.getFields(), select.getNestedJoinType()); + return request.request(); + } catch (SqlParseException e) { + throw new ParserException("Illegal sql expr: " + expr.toString()); } + } - @SafeVarargs - private final FeatureMatcher> filter(Matcher... matchers) { - return hasClauses("filter", BoolQueryBuilder::filter, matchers); + private SQLQueryExpr parseSql(String sql) { + ElasticSqlExprParser parser = new ElasticSqlExprParser(sql); + SQLExpr expr = parser.expr(); + if (parser.getLexer().token() != Token.EOF) { + throw new ParserException("Illegal sql: " + sql); } + return (SQLQueryExpr) expr; + } - @SafeVarargs - private final FeatureMatcher> must(Matcher... matchers) { - return hasClauses("must", BoolQueryBuilder::must, matchers); - } - - @SafeVarargs - private final FeatureMatcher> mustNot(Matcher... matchers) { - return hasClauses("must_not", BoolQueryBuilder::mustNot, matchers); - } - - @SafeVarargs - private final FeatureMatcher> should(Matcher... matchers) { - return hasClauses("should", BoolQueryBuilder::should, matchers); - } - - /** Hide contains() assertion to simplify */ - @SafeVarargs - private final FeatureMatcher> hasClauses(String name, - Function> func, - Matcher... matchers) { - return new FeatureMatcher>(contains(matchers), name, name) { - @Override - protected List featureValueOf(BoolQueryBuilder query) { - return func.apply(query); - } - }; - } - - private Matcher path(String expected) { - return HasFieldWithValue.hasFieldWithValue("path", "path", is(equalTo(expected))); - } - - /** Skip intermediate property along the path. Hide arrayContaining assertion to simplify. */ - private FeatureMatcher innerHits(String... expected) { - return featureValueOf("innerHits", - arrayContaining(expected), - (nestedQuery -> nestedQuery.innerHit().getFetchSourceContext().includes())); - } - - @SuppressWarnings("unchecked") - private Matcher fetchSource(String... expected) { - if (expected.length == 0) { - return anyOf(is(nullValue()), - featureValueOf("includes", is(nullValue()), FetchSourceContext::includes), - featureValueOf("includes", is(emptyArray()), FetchSourceContext::includes)); - } - return featureValueOf("includes", contains(expected), fetchSource -> Arrays.asList(fetchSource.includes())); - } - - private FeatureMatcher featureValueOf(String name, Matcher subMatcher, Function getter) { - return new FeatureMatcher(subMatcher, name, name) { - @Override - protected U featureValueOf(T actual) { - return getter.apply(actual); - } - }; - } - - private SearchSourceBuilder query(String sql) { - SQLQueryExpr expr = parseSql(sql); - if (sql.contains("nested")) { - return translate(expr).source(); - } - - expr = rewrite(expr); - return translate(expr).source(); - } - - private SearchRequest translate(SQLQueryExpr expr) { - try { - Client mockClient = Mockito.mock(Client.class); - SearchRequestBuilder request = new SearchRequestBuilder(mockClient, SearchAction.INSTANCE); - Select select = new SqlParser().parseSelect(expr); - - DefaultQueryAction action = new DefaultQueryAction(mockClient, select); - action.initialize(request); - action.setFields(select.getFields()); - - if (select.getWhere() != null) { - request.setQuery(QueryMaker.explain(select.getWhere(), select.isQuery)); - } - new NestedFieldProjection(request).project(select.getFields(), select.getNestedJoinType()); - return request.request(); - } - catch (SqlParseException e) { - throw new ParserException("Illegal sql expr: " + expr.toString()); - } - } - - private SQLQueryExpr parseSql(String sql) { - ElasticSqlExprParser parser = new ElasticSqlExprParser(sql); - SQLExpr expr = parser.expr(); - if (parser.getLexer().token() != Token.EOF) { - throw new ParserException("Illegal sql: " + sql); - } - return (SQLQueryExpr) expr; - } - - private SQLQueryExpr rewrite(SQLQueryExpr expr) { - expr.accept(new NestedFieldRewriter()); - return expr; - } + private SQLQueryExpr rewrite(SQLQueryExpr expr) { + expr.accept(new NestedFieldRewriter()); + return expr; + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/NestedFieldRewriterTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/NestedFieldRewriterTest.java index 58a6f7e244..2593f25379 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/NestedFieldRewriterTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/NestedFieldRewriterTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest; import static java.util.stream.IntStream.range; @@ -29,630 +28,608 @@ public class NestedFieldRewriterTest { - @Test - public void regression() { - noImpact("SELECT * FROM team"); - noImpact("SELECT region FROM team/test, employees/test"); - noImpact("SELECT manager.name FROM team WHERE region = 'US' ORDER BY COUNT(*)"); - noImpact("SELECT COUNT(*) FROM team GROUP BY region"); - } - - @Test - public void selectWithoutFrom() { - // Expect no exception thrown - query("SELECT now()"); - } - - @Test - public void selectAll() { - same( - query("SELECT * FROM team t, t.employees"), - query("SELECT *, nested(employees.*, 'employees') FROM team") - ); - } - - @Test - public void selectAllWithGroupBy() { - same( - query("SELECT * FROM team t, t.employees e GROUP BY e.firstname"), - query("SELECT * FROM team GROUP BY nested(employees.firstname, 'employees')") - ); - } - - @Test - public void selectAllWithCondition() { - same( - query("SELECT * FROM team t, t.employees e WHERE e.age = 26"), - query("SELECT *, nested(employees.*, 'employees') FROM team WHERE nested(employees.age, 'employees') = 26") - ); - } - - @Test - public void singleCondition() { - same( - query("SELECT region FROM team t, t.employees e WHERE e.age = 26"), - query("SELECT region FROM team WHERE nested(employees.age, 'employees') = 26") - ); - } - - @Test - public void mixedWithObjectType() { - same( - query("SELECT region FROM team t, t.employees e WHERE e.age > 30 OR manager.age = 50"), - query("SELECT region FROM team WHERE nested(employees.age, 'employees') > 30 OR manager.age = 50") - ); - } - - @Test - public void noAlias() { - same( - query("SELECT region FROM team t, t.employees WHERE employees.age = 26"), - query("SELECT region FROM team WHERE nested(employees.age, 'employees') = 26") - ); - } - - @Test(expected = AssertionError.class) - public void multipleRegularTables() { - same( - query("SELECT region FROM team t, t.employees e, company c WHERE e.age = 26"), - query("SELECT region FROM team, company WHERE nested(employees.age) = 26") - ); - } - - @Test - public void eraseParentAlias() { - same( - query("SELECT t.age FROM team t, t.employees e WHERE t.region = 'US' AND age > 26"), - query("SELECT age FROM team WHERE region = 'US' AND age > 26") - ); - noImpact("SELECT t.age FROM team t WHERE t.region = 'US'"); - } - - @Test - public void select() { - same( - query("SELECT e.age FROM team t, t.employees e"), - query("SELECT nested(employees.age, 'employees' ) FROM team") - ); - } - - @Test - public void aggregationInSelect() { - same( - query("SELECT AVG(e.age) FROM team t, t.employees e"), - query("SELECT AVG(nested(employees.age, 'employees')) FROM team") - ); - } - - @Test - public void multipleAggregationsInSelect() { - same( - query("SELECT COUNT(*), AVG(e.age) FROM team t, t.employees e"), - query("SELECT COUNT(*), AVG(nested(employees.age, 'employees')) FROM team") - ); - } - - @Test - public void groupBy() { - same( - query("SELECT e.firstname, COUNT(*) FROM team t, t.employees e GROUP BY e.firstname"), - query("SELECT nested(employees.firstname, 'employees'), COUNT(*) FROM team GROUP BY nested(employees.firstname, 'employees')") - ); - } - - @Test - public void multipleFieldsInGroupBy() { - same( - query("SELECT COUNT(*) FROM team t, t.employees e GROUP BY t.manager, e.age"), - query("SELECT COUNT(*) FROM team GROUP BY manager, nested(employees.age, 'employees')") - ); - } - - @Test - public void orderBy() { - same( - query("SELECT region FROM team t, t.employees e ORDER BY e.age"), - query("SELECT region FROM team ORDER BY nested(employees.age)") - ); - } - - @Test - public void multipleConditions() { - same( - query("SELECT region " + - "FROM team t, t.manager m, t.employees e " + - "WHERE t.department = 'IT' AND " + - " (e.age = 26 OR (e.firstname = 'John' AND e.lastname = 'Smith')) AND " + - " t.region = 'US' AND " + - " (m.name = 'Alice' AND m.age = 50)"), - query("SELECT region " + - "FROM team " + - "WHERE department = 'IT' AND " + - " nested(\"employees\", employees.age = 26 OR (employees.firstname = 'John' AND employees.lastname = 'Smith')) AND " + - " region = 'US' AND " + - " nested(\"manager\", manager.name = 'Alice' AND manager.age = 50)") - ); - } - - @Test - public void multipleFieldsInFrom() { - same( - query("SELECT region FROM team/test t, t.manager m, t.employees e WHERE m.age = 30 AND e.age = 26"), - query("SELECT region FROM team/test WHERE nested(manager.age, 'manager') = 30 " + - "AND nested(employees.age, 'employees') = 26") - ); - } - - @Test - public void unionAll() { - // NLPchina doesn't support UNION (intersection) - same( - query("SELECT region FROM team t, t.employees e WHERE e.age = 26 " + - "UNION ALL " + - "SELECT region FROM team t, t.employees e WHERE e.firstname = 'John'"), - query("SELECT region FROM team WHERE nested(employees.age, 'employees') = 26 " + - "UNION ALL " + - "SELECT region FROM team WHERE nested(employees.firstname, 'employees') = 'John'") - ); - } - - @Test - public void minus() { - same( - query("SELECT region FROM team t, t.employees e WHERE e.age = 26 " + - "MINUS " + - "SELECT region FROM team t, t.employees e WHERE e.firstname = 'John'"), - query("SELECT region FROM team WHERE nested(employees.age, 'employees') = 26 " + - "MINUS " + - "SELECT region FROM team WHERE nested(employees.firstname, 'employees') = 'John'") - ); - } - - public void join() { - // TODO - } - - @Test - public void subQuery() { - // Subquery only support IN and TERMS - same( - query("SELECT region FROM team t, t.employees e " + - " WHERE e.age IN " + - " (SELECT t1.manager.age FROM team t1, t1.employees e1 WHERE e1.age > 0)"), - query("SELECT region FROM team " + - " WHERE nested(employees.age, 'employees') IN " + - " (SELECT manager.age FROM team WHERE nested(employees.age, 'employees') > 0)") - ); - } - - @Test - public void subQueryWitSameAlias() { - // Inner alias e shadow outer alias e of nested field - same( - query("SELECT name FROM team t, t.employees e " + - " WHERE e.age IN " + - " (SELECT e.age FROM team e, e.manager m WHERE e.age > 0 OR m.name = 'Alice')"), - query("SELECT name FROM team " + - " WHERE nested(employees.age, 'employees') IN " + - " (SELECT age FROM team WHERE age > 0 OR nested(manager.name, 'manager') = 'Alice')") - ); - } - - @Test - public void isNotNull() { - same( - query("SELECT e.name " + - "FROM employee as e, e.projects as p " + - "WHERE p IS NOT MISSING"), - query("SELECT name " + - "FROM employee " + - "WHERE nested(projects, 'projects') IS NOT MISSING") - ); - } - - @Test - public void isNotNullAndCondition() { - same( - query("SELECT e.name " + - "FROM employee as e, e.projects as p " + - "WHERE p IS NOT MISSING AND p.name LIKE 'security'"), - query("SELECT name " + - "FROM employee " + - "WHERE nested('projects', projects IS NOT MISSING AND projects.name LIKE 'security')") - ); - } - - @Test - public void multiCondition() { - same( - query("SELECT e.name FROM employee as e, e.projects as p WHERE p.year = 2016 and p.name LIKE 'security'"), - query("SELECT name FROM employee WHERE nested('projects', projects.year = 2016 AND projects.name LIKE 'security')") - ); - } - - @Test - public void nestedAndParentCondition() { - same( - query("SELECT name " + - "FROM employee " + - "WHERE nested(projects, 'projects') IS NOT MISSING AND name LIKE 'security'"), - query("SELECT e.name " + - "FROM employee e, e.projects p " + - "WHERE p IS NOT MISSING AND e.name LIKE 'security'") - ); - } - - @Test - public void aggWithWhereOnParent() { - same( - query("SELECT e.name, COUNT(p) as c " + - "FROM employee AS e, e.projects AS p " + - "WHERE e.name like '%smith%' " + - "GROUP BY e.name " + - "HAVING c > 1"), - query("SELECT name, COUNT(nested(projects, 'projects')) AS c " + - "FROM employee " + - "WHERE name LIKE '%smith%' " + - "GROUP BY name " + - "HAVING c > 1") - ); - - } - - @Test - public void aggWithWhereOnNested() { - same( - query("SELECT e.name, COUNT(p) as c " + - "FROM employee AS e, e.projects AS p " + - "WHERE p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING c > 1"), - query("SELECT name, COUNT(nested(projects, 'projects')) AS c " + - "FROM employee " + - "WHERE nested(projects.name, 'projects') LIKE '%security%' " + - "GROUP BY name " + - "HAVING c > 1") - ); - } - - @Test - public void aggWithWhereOnParentOrNested() { - same( - query("SELECT e.name, COUNT(p) as c " + - "FROM employee AS e, e.projects AS p " + - "WHERE e.name like '%smith%' or p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING c > 1"), - query("SELECT name, COUNT(nested(projects, 'projects')) AS c " + - "FROM employee " + - "WHERE name LIKE '%smith%' OR nested(projects.name, 'projects') LIKE '%security%' " + - "GROUP BY name " + - "HAVING c > 1") - ); - } - - @Test - public void aggWithWhereOnParentAndNested() { - same( - query("SELECT e.name, COUNT(p) as c " + - "FROM employee AS e, e.projects AS p " + - "WHERE e.name like '%smith%' AND p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING c > 1"), - query("SELECT name, COUNT(nested(projects, 'projects')) AS c " + - "FROM employee " + - "WHERE name LIKE '%smith%' AND nested(projects.name, 'projects') LIKE '%security%' " + - "GROUP BY name " + - "HAVING c > 1") - ); - } - - @Test - public void aggWithWhereOnNestedAndNested() { - same( - query("SELECT e.name, COUNT(p) as c " + - "FROM employee AS e, e.projects AS p " + - "WHERE p.started_year > 1990 AND p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING c > 1"), - query("SELECT name, COUNT(nested(projects, 'projects')) AS c " + - "FROM employee " + - "WHERE nested('projects', projects.started_year > 1990 AND projects.name LIKE '%security%') " + - "GROUP BY name " + - "HAVING c > 1") - ); - } - - @Test - public void aggWithWhereOnNestedOrNested() { - same( - query("SELECT e.name, COUNT(p) as c " + - "FROM employee AS e, e.projects AS p " + - "WHERE p.started_year > 1990 OR p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING c > 1"), - query("SELECT name, COUNT(nested(projects, 'projects')) AS c " + - "FROM employee " + - "WHERE nested('projects', projects.started_year > 1990 OR projects.name LIKE '%security%') " + - "GROUP BY name " + - "HAVING c > 1") - ); - } - - @Test - public void aggInHavingWithWhereOnParent() { - same( - query("SELECT e.name " + - "FROM employee AS e, e.projects AS p " + - "WHERE e.name like '%smith%' " + - "GROUP BY e.name " + - "HAVING COUNT(p) > 1"), - query("SELECT name " + - "FROM employee " + - "WHERE name LIKE '%smith%' " + - "GROUP BY name " + - "HAVING COUNT(nested(projects, 'projects')) > 1") - ); - - } - - @Test - public void aggInHavingWithWhereOnNested() { - same( - query("SELECT e.name " + - "FROM employee AS e, e.projects AS p " + - "WHERE p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING COUNT(p) > 1"), - query("SELECT name " + - "FROM employee " + - "WHERE nested(projects.name, 'projects') LIKE '%security%' " + - "GROUP BY name " + - "HAVING COUNT(nested(projects, 'projects')) > 1") - ); - } - - @Test - public void aggInHavingWithWhereOnParentOrNested() { - same( - query("SELECT e.name " + - "FROM employee AS e, e.projects AS p " + - "WHERE e.name like '%smith%' or p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING COUNT(p) > 1"), - query("SELECT name " + - "FROM employee " + - "WHERE name LIKE '%smith%' OR nested(projects.name, 'projects') LIKE '%security%' " + - "GROUP BY name " + - "HAVING COUNT(nested(projects, 'projects')) > 1") - ); - } - - @Test - public void aggInHavingWithWhereOnParentAndNested() { - same( - query("SELECT e.name " + - "FROM employee AS e, e.projects AS p " + - "WHERE e.name like '%smith%' AND p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING COUNT(p) > 1"), - query("SELECT name " + - "FROM employee " + - "WHERE name LIKE '%smith%' AND nested(projects.name, 'projects') LIKE '%security%' " + - "GROUP BY name " + - "HAVING COUNT(nested(projects, 'projects')) > 1") - ); - } - - @Test - public void aggInHavingWithWhereOnNestedAndNested() { - same( - query("SELECT e.name " + - "FROM employee AS e, e.projects AS p " + - "WHERE p.started_year > 1990 AND p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING COUNT(p) > 1"), - query("SELECT name " + - "FROM employee " + - "WHERE nested('projects', projects.started_year > 1990 AND projects.name LIKE '%security%') " + - "GROUP BY name " + - "HAVING COUNT(nested(projects, 'projects')) > 1") - ); - } - - @Test - public void aggInHavingWithWhereOnNestedOrNested() { - same( - query("SELECT e.name " + - "FROM employee AS e, e.projects AS p " + - "WHERE p.started_year > 1990 OR p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING COUNT(p) > 1"), - query("SELECT name " + - "FROM employee " + - "WHERE nested('projects', projects.started_year > 1990 OR projects.name LIKE '%security%') " + - "GROUP BY name " + - "HAVING COUNT(nested(projects, 'projects')) > 1") - ); - } - - @Test - public void notIsNotNull() { - same( - query("SELECT name " + - "FROM employee " + - "WHERE not (nested(projects, 'projects') IS NOT MISSING)"), - query("SELECT e.name " + - "FROM employee as e, e.projects as p " + - "WHERE not (p IS NOT MISSING)") - ); - } - - @Test - public void notIsNotNullAndCondition() { - same( - query("SELECT e.name " + - "FROM employee as e, e.projects as p " + - "WHERE not (p IS NOT MISSING AND p.name LIKE 'security')"), - query("SELECT name " + - "FROM employee " + - "WHERE not nested('projects', projects IS NOT MISSING AND projects.name LIKE 'security')") - ); - } - - @Test - public void notMultiCondition() { - same( - query("SELECT name " + - "FROM employee " + - "WHERE not nested('projects', projects.year = 2016 AND projects.name LIKE 'security')"), - query("SELECT e.name " + - "FROM employee as e, e.projects as p " + - "WHERE not (p.year = 2016 and p.name LIKE 'security')") - ); - } - - @Test - public void notNestedAndParentCondition() { - same( - query("SELECT name " + - "FROM employee " + - "WHERE (not nested(projects, 'projects') IS NOT MISSING) AND name LIKE 'security'"), - query("SELECT e.name " + - "FROM employee e, e.projects p " + - "WHERE not (p IS NOT MISSING) AND e.name LIKE 'security'") - ); - } - - private void noImpact(String sql) { - same(parse(sql), rewrite(parse(sql))); - } - - /** - * The intention for this assert method is: - * - * 1) MySqlSelectQueryBlock.equals() doesn't call super.equals(). - * But select items, from, where and group by are all held by parent class SQLSelectQueryBlock. - * - * 2) SQLSelectGroupByClause doesn't implement equals() at all.. - * MySqlSelectGroupByExpr compares identity of expression.. - * - * 3) MySqlUnionQuery doesn't implement equals() at all - */ - private void same(SQLQueryExpr actual, SQLQueryExpr expected) { - assertEquals(expected.getClass(), actual.getClass()); - - SQLSelect expectedQuery = expected.getSubQuery(); - SQLSelect actualQuery = actual.getSubQuery(); - assertEquals(expectedQuery.getOrderBy(), actualQuery.getOrderBy()); - assertQuery(expectedQuery, actualQuery); - } - - private void assertQuery(SQLSelect expected, SQLSelect actual) { - SQLSelectQuery expectedQuery = expected.getQuery(); - SQLSelectQuery actualQuery = actual.getQuery(); - if (actualQuery instanceof SQLSelectQueryBlock) { - assertQueryBlock( - (SQLSelectQueryBlock) expectedQuery, - (SQLSelectQueryBlock) actualQuery - ); - } - else if (actualQuery instanceof SQLUnionQuery) { - assertQueryBlock( - (SQLSelectQueryBlock) ((SQLUnionQuery) expectedQuery).getLeft(), - (SQLSelectQueryBlock) ((SQLUnionQuery) actualQuery).getLeft() - ); - assertQueryBlock( - (SQLSelectQueryBlock) ((SQLUnionQuery) expectedQuery).getRight(), - (SQLSelectQueryBlock) ((SQLUnionQuery) actualQuery).getRight() - ); - assertEquals( - ((SQLUnionQuery) expectedQuery).getOperator(), - ((SQLUnionQuery) actualQuery).getOperator() - ); - } - else { - throw new IllegalStateException("Unsupported test SQL"); - } - } - - private void assertQueryBlock(SQLSelectQueryBlock expected, SQLSelectQueryBlock actual) { - assertEquals("SELECT", expected.getSelectList(), actual.getSelectList()); - assertEquals("INTO", expected.getInto(), actual.getInto()); - assertEquals("WHERE", expected.getWhere(), actual.getWhere()); - if (actual.getWhere() instanceof SQLInSubQueryExpr) { - assertQuery( - ((SQLInSubQueryExpr) expected.getWhere()).getSubQuery(), - ((SQLInSubQueryExpr) actual.getWhere()).getSubQuery() - ); - } - assertEquals("PARENTHESIZED", expected.isParenthesized(), actual.isParenthesized()); - assertEquals("DISTION", expected.getDistionOption(), actual.getDistionOption()); - assertFrom(expected, actual); - if (!(expected.getGroupBy() == null && actual.getGroupBy() == null)) { - assertGroupBy(expected.getGroupBy(), actual.getGroupBy()); - } - } - - private void assertFrom(SQLSelectQueryBlock expected, SQLSelectQueryBlock actual) { - // Only 2 tables JOIN at most is supported - if (expected.getFrom() instanceof SQLExprTableSource) { - assertTable(expected.getFrom(), actual.getFrom()); - } else { - assertEquals(actual.getFrom().getClass(), SQLJoinTableSource.class); - assertTable( - ((SQLJoinTableSource) expected.getFrom()).getLeft(), - ((SQLJoinTableSource) actual.getFrom()).getLeft() - ); - assertTable( - ((SQLJoinTableSource) expected.getFrom()).getRight(), - ((SQLJoinTableSource) actual.getFrom()).getRight() - ); - assertEquals( - ((SQLJoinTableSource) expected.getFrom()).getJoinType(), - ((SQLJoinTableSource) actual.getFrom()).getJoinType() - ); - } - } - - private void assertGroupBy(SQLSelectGroupByClause expected, SQLSelectGroupByClause actual) { - assertEquals("HAVING", expected.getHaving(), actual.getHaving()); - - List expectedGroupby = expected.getItems(); - List actualGroupby = actual.getItems(); - assertEquals(expectedGroupby.size(), actualGroupby.size()); - range(0, expectedGroupby.size()). - forEach(i -> assertEquals( - ((MySqlSelectGroupByExpr) expectedGroupby.get(i)).getExpr(), - ((MySqlSelectGroupByExpr) actualGroupby.get(i)).getExpr()) - ); - } - - private void assertTable(SQLTableSource expect, SQLTableSource actual) { - assertEquals(SQLExprTableSource.class, expect.getClass()); - assertEquals(SQLExprTableSource.class, actual.getClass()); - assertEquals(((SQLExprTableSource) expect).getExpr(), ((SQLExprTableSource) actual).getExpr()); - assertEquals(expect.getAlias(), actual.getAlias()); - } - - /** - * Walk through extra rewrite logic if NOT found "nested" in SQL query statement. - * Otherwise return as before so that original logic be compared with result of rewrite. - * - * @param sql Test sql - * @return Node parsed out of sql - */ - private SQLQueryExpr query(String sql) { - SQLQueryExpr expr = SqlParserUtils.parse(sql); - if (sql.contains("nested")) { - return expr; - } - return rewrite(expr); - } - - private SQLQueryExpr rewrite(SQLQueryExpr expr) { - expr.accept(new NestedFieldRewriter()); - return expr; - } - + @Test + public void regression() { + noImpact("SELECT * FROM team"); + noImpact("SELECT region FROM team/test, employees/test"); + noImpact("SELECT manager.name FROM team WHERE region = 'US' ORDER BY COUNT(*)"); + noImpact("SELECT COUNT(*) FROM team GROUP BY region"); + } + + @Test + public void selectWithoutFrom() { + // Expect no exception thrown + query("SELECT now()"); + } + + @Test + public void selectAll() { + same( + query("SELECT * FROM team t, t.employees"), + query("SELECT *, nested(employees.*, 'employees') FROM team")); + } + + @Test + public void selectAllWithGroupBy() { + same( + query("SELECT * FROM team t, t.employees e GROUP BY e.firstname"), + query("SELECT * FROM team GROUP BY nested(employees.firstname, 'employees')")); + } + + @Test + public void selectAllWithCondition() { + same( + query("SELECT * FROM team t, t.employees e WHERE e.age = 26"), + query( + "SELECT *, nested(employees.*, 'employees') FROM team WHERE nested(employees.age," + + " 'employees') = 26")); + } + + @Test + public void singleCondition() { + same( + query("SELECT region FROM team t, t.employees e WHERE e.age = 26"), + query("SELECT region FROM team WHERE nested(employees.age, 'employees') = 26")); + } + + @Test + public void mixedWithObjectType() { + same( + query("SELECT region FROM team t, t.employees e WHERE e.age > 30 OR manager.age = 50"), + query( + "SELECT region FROM team WHERE nested(employees.age, 'employees') > 30 OR manager.age =" + + " 50")); + } + + @Test + public void noAlias() { + same( + query("SELECT region FROM team t, t.employees WHERE employees.age = 26"), + query("SELECT region FROM team WHERE nested(employees.age, 'employees') = 26")); + } + + @Test(expected = AssertionError.class) + public void multipleRegularTables() { + same( + query("SELECT region FROM team t, t.employees e, company c WHERE e.age = 26"), + query("SELECT region FROM team, company WHERE nested(employees.age) = 26")); + } + + @Test + public void eraseParentAlias() { + same( + query("SELECT t.age FROM team t, t.employees e WHERE t.region = 'US' AND age > 26"), + query("SELECT age FROM team WHERE region = 'US' AND age > 26")); + noImpact("SELECT t.age FROM team t WHERE t.region = 'US'"); + } + + @Test + public void select() { + same( + query("SELECT e.age FROM team t, t.employees e"), + query("SELECT nested(employees.age, 'employees' ) FROM team")); + } + + @Test + public void aggregationInSelect() { + same( + query("SELECT AVG(e.age) FROM team t, t.employees e"), + query("SELECT AVG(nested(employees.age, 'employees')) FROM team")); + } + + @Test + public void multipleAggregationsInSelect() { + same( + query("SELECT COUNT(*), AVG(e.age) FROM team t, t.employees e"), + query("SELECT COUNT(*), AVG(nested(employees.age, 'employees')) FROM team")); + } + + @Test + public void groupBy() { + same( + query("SELECT e.firstname, COUNT(*) FROM team t, t.employees e GROUP BY e.firstname"), + query( + "SELECT nested(employees.firstname, 'employees'), COUNT(*) FROM team GROUP BY" + + " nested(employees.firstname, 'employees')")); + } + + @Test + public void multipleFieldsInGroupBy() { + same( + query("SELECT COUNT(*) FROM team t, t.employees e GROUP BY t.manager, e.age"), + query("SELECT COUNT(*) FROM team GROUP BY manager, nested(employees.age, 'employees')")); + } + + @Test + public void orderBy() { + same( + query("SELECT region FROM team t, t.employees e ORDER BY e.age"), + query("SELECT region FROM team ORDER BY nested(employees.age)")); + } + + @Test + public void multipleConditions() { + same( + query( + "SELECT region " + + "FROM team t, t.manager m, t.employees e " + + "WHERE t.department = 'IT' AND " + + " (e.age = 26 OR (e.firstname = 'John' AND e.lastname = 'Smith')) AND " + + " t.region = 'US' AND " + + " (m.name = 'Alice' AND m.age = 50)"), + query( + "SELECT region FROM team WHERE department = 'IT' AND nested(\"employees\"," + + " employees.age = 26 OR (employees.firstname = 'John' AND employees.lastname =" + + " 'Smith')) AND region = 'US' AND nested(\"manager\", manager.name =" + + " 'Alice' AND manager.age = 50)")); + } + + @Test + public void multipleFieldsInFrom() { + same( + query( + "SELECT region FROM team/test t, t.manager m, t.employees e WHERE m.age = 30 AND e.age" + + " = 26"), + query( + "SELECT region FROM team/test WHERE nested(manager.age, 'manager') = 30 " + + "AND nested(employees.age, 'employees') = 26")); + } + + @Test + public void unionAll() { + // NLPchina doesn't support UNION (intersection) + same( + query( + "SELECT region FROM team t, t.employees e WHERE e.age = 26 " + + "UNION ALL " + + "SELECT region FROM team t, t.employees e WHERE e.firstname = 'John'"), + query( + "SELECT region FROM team WHERE nested(employees.age, 'employees') = 26 UNION ALL SELECT" + + " region FROM team WHERE nested(employees.firstname, 'employees') = 'John'")); + } + + @Test + public void minus() { + same( + query( + "SELECT region FROM team t, t.employees e WHERE e.age = 26 " + + "MINUS " + + "SELECT region FROM team t, t.employees e WHERE e.firstname = 'John'"), + query( + "SELECT region FROM team WHERE nested(employees.age, 'employees') = 26 MINUS SELECT" + + " region FROM team WHERE nested(employees.firstname, 'employees') = 'John'")); + } + + public void join() { + // TODO + } + + @Test + public void subQuery() { + // Subquery only support IN and TERMS + same( + query( + "SELECT region FROM team t, t.employees e " + + " WHERE e.age IN " + + " (SELECT t1.manager.age FROM team t1, t1.employees e1 WHERE e1.age > 0)"), + query( + "SELECT region FROM team WHERE nested(employees.age, 'employees') IN (SELECT" + + " manager.age FROM team WHERE nested(employees.age, 'employees') > 0)")); + } + + @Test + public void subQueryWitSameAlias() { + // Inner alias e shadow outer alias e of nested field + same( + query( + "SELECT name FROM team t, t.employees e WHERE e.age IN (SELECT e.age FROM team e," + + " e.manager m WHERE e.age > 0 OR m.name = 'Alice')"), + query( + "SELECT name FROM team WHERE nested(employees.age, 'employees') IN (SELECT age" + + " FROM team WHERE age > 0 OR nested(manager.name, 'manager') = 'Alice')")); + } + + @Test + public void isNotNull() { + same( + query("SELECT e.name " + "FROM employee as e, e.projects as p " + "WHERE p IS NOT MISSING"), + query( + "SELECT name " + + "FROM employee " + + "WHERE nested(projects, 'projects') IS NOT MISSING")); + } + + @Test + public void isNotNullAndCondition() { + same( + query( + "SELECT e.name " + + "FROM employee as e, e.projects as p " + + "WHERE p IS NOT MISSING AND p.name LIKE 'security'"), + query( + "SELECT name FROM employee WHERE nested('projects', projects IS NOT MISSING AND" + + " projects.name LIKE 'security')")); + } + + @Test + public void multiCondition() { + same( + query( + "SELECT e.name FROM employee as e, e.projects as p WHERE p.year = 2016 and p.name LIKE" + + " 'security'"), + query( + "SELECT name FROM employee WHERE nested('projects', projects.year = 2016 AND" + + " projects.name LIKE 'security')")); + } + + @Test + public void nestedAndParentCondition() { + same( + query( + "SELECT name " + + "FROM employee " + + "WHERE nested(projects, 'projects') IS NOT MISSING AND name LIKE 'security'"), + query( + "SELECT e.name " + + "FROM employee e, e.projects p " + + "WHERE p IS NOT MISSING AND e.name LIKE 'security'")); + } + + @Test + public void aggWithWhereOnParent() { + same( + query( + "SELECT e.name, COUNT(p) as c " + + "FROM employee AS e, e.projects AS p " + + "WHERE e.name like '%smith%' " + + "GROUP BY e.name " + + "HAVING c > 1"), + query( + "SELECT name, COUNT(nested(projects, 'projects')) AS c " + + "FROM employee " + + "WHERE name LIKE '%smith%' " + + "GROUP BY name " + + "HAVING c > 1")); + } + + @Test + public void aggWithWhereOnNested() { + same( + query( + "SELECT e.name, COUNT(p) as c " + + "FROM employee AS e, e.projects AS p " + + "WHERE p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING c > 1"), + query( + "SELECT name, COUNT(nested(projects, 'projects')) AS c " + + "FROM employee " + + "WHERE nested(projects.name, 'projects') LIKE '%security%' " + + "GROUP BY name " + + "HAVING c > 1")); + } + + @Test + public void aggWithWhereOnParentOrNested() { + same( + query( + "SELECT e.name, COUNT(p) as c " + + "FROM employee AS e, e.projects AS p " + + "WHERE e.name like '%smith%' or p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING c > 1"), + query( + "SELECT name, COUNT(nested(projects, 'projects')) AS c FROM employee WHERE name LIKE" + + " '%smith%' OR nested(projects.name, 'projects') LIKE '%security%' GROUP BY name" + + " HAVING c > 1")); + } + + @Test + public void aggWithWhereOnParentAndNested() { + same( + query( + "SELECT e.name, COUNT(p) as c " + + "FROM employee AS e, e.projects AS p " + + "WHERE e.name like '%smith%' AND p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING c > 1"), + query( + "SELECT name, COUNT(nested(projects, 'projects')) AS c FROM employee WHERE name LIKE" + + " '%smith%' AND nested(projects.name, 'projects') LIKE '%security%' GROUP BY name" + + " HAVING c > 1")); + } + + @Test + public void aggWithWhereOnNestedAndNested() { + same( + query( + "SELECT e.name, COUNT(p) as c " + + "FROM employee AS e, e.projects AS p " + + "WHERE p.started_year > 1990 AND p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING c > 1"), + query( + "SELECT name, COUNT(nested(projects, 'projects')) AS c FROM employee WHERE" + + " nested('projects', projects.started_year > 1990 AND projects.name LIKE" + + " '%security%') GROUP BY name HAVING c > 1")); + } + + @Test + public void aggWithWhereOnNestedOrNested() { + same( + query( + "SELECT e.name, COUNT(p) as c " + + "FROM employee AS e, e.projects AS p " + + "WHERE p.started_year > 1990 OR p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING c > 1"), + query( + "SELECT name, COUNT(nested(projects, 'projects')) AS c FROM employee WHERE" + + " nested('projects', projects.started_year > 1990 OR projects.name LIKE" + + " '%security%') GROUP BY name HAVING c > 1")); + } + + @Test + public void aggInHavingWithWhereOnParent() { + same( + query( + "SELECT e.name " + + "FROM employee AS e, e.projects AS p " + + "WHERE e.name like '%smith%' " + + "GROUP BY e.name " + + "HAVING COUNT(p) > 1"), + query( + "SELECT name " + + "FROM employee " + + "WHERE name LIKE '%smith%' " + + "GROUP BY name " + + "HAVING COUNT(nested(projects, 'projects')) > 1")); + } + + @Test + public void aggInHavingWithWhereOnNested() { + same( + query( + "SELECT e.name " + + "FROM employee AS e, e.projects AS p " + + "WHERE p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING COUNT(p) > 1"), + query( + "SELECT name " + + "FROM employee " + + "WHERE nested(projects.name, 'projects') LIKE '%security%' " + + "GROUP BY name " + + "HAVING COUNT(nested(projects, 'projects')) > 1")); + } + + @Test + public void aggInHavingWithWhereOnParentOrNested() { + same( + query( + "SELECT e.name " + + "FROM employee AS e, e.projects AS p " + + "WHERE e.name like '%smith%' or p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING COUNT(p) > 1"), + query( + "SELECT name FROM employee WHERE name LIKE '%smith%' OR nested(projects.name," + + " 'projects') LIKE '%security%' GROUP BY name HAVING COUNT(nested(projects," + + " 'projects')) > 1")); + } + + @Test + public void aggInHavingWithWhereOnParentAndNested() { + same( + query( + "SELECT e.name " + + "FROM employee AS e, e.projects AS p " + + "WHERE e.name like '%smith%' AND p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING COUNT(p) > 1"), + query( + "SELECT name FROM employee WHERE name LIKE '%smith%' AND nested(projects.name," + + " 'projects') LIKE '%security%' GROUP BY name HAVING COUNT(nested(projects," + + " 'projects')) > 1")); + } + + @Test + public void aggInHavingWithWhereOnNestedAndNested() { + same( + query( + "SELECT e.name " + + "FROM employee AS e, e.projects AS p " + + "WHERE p.started_year > 1990 AND p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING COUNT(p) > 1"), + query( + "SELECT name FROM employee WHERE nested('projects', projects.started_year > 1990 AND" + + " projects.name LIKE '%security%') GROUP BY name HAVING COUNT(nested(projects," + + " 'projects')) > 1")); + } + + @Test + public void aggInHavingWithWhereOnNestedOrNested() { + same( + query( + "SELECT e.name " + + "FROM employee AS e, e.projects AS p " + + "WHERE p.started_year > 1990 OR p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING COUNT(p) > 1"), + query( + "SELECT name FROM employee WHERE nested('projects', projects.started_year > 1990 OR" + + " projects.name LIKE '%security%') GROUP BY name HAVING COUNT(nested(projects," + + " 'projects')) > 1")); + } + + @Test + public void notIsNotNull() { + same( + query( + "SELECT name " + + "FROM employee " + + "WHERE not (nested(projects, 'projects') IS NOT MISSING)"), + query( + "SELECT e.name " + + "FROM employee as e, e.projects as p " + + "WHERE not (p IS NOT MISSING)")); + } + + @Test + public void notIsNotNullAndCondition() { + same( + query( + "SELECT e.name " + + "FROM employee as e, e.projects as p " + + "WHERE not (p IS NOT MISSING AND p.name LIKE 'security')"), + query( + "SELECT name FROM employee WHERE not nested('projects', projects IS NOT MISSING AND" + + " projects.name LIKE 'security')")); + } + + @Test + public void notMultiCondition() { + same( + query( + "SELECT name FROM employee WHERE not nested('projects', projects.year = 2016 AND" + + " projects.name LIKE 'security')"), + query( + "SELECT e.name " + + "FROM employee as e, e.projects as p " + + "WHERE not (p.year = 2016 and p.name LIKE 'security')")); + } + + @Test + public void notNestedAndParentCondition() { + same( + query( + "SELECT name FROM employee WHERE (not nested(projects, 'projects') IS NOT MISSING) AND" + + " name LIKE 'security'"), + query( + "SELECT e.name " + + "FROM employee e, e.projects p " + + "WHERE not (p IS NOT MISSING) AND e.name LIKE 'security'")); + } + + private void noImpact(String sql) { + same(parse(sql), rewrite(parse(sql))); + } + + /** + * The intention for this assert method is: + *
    + *
  1. MySqlSelectQueryBlock.equals() doesn't call super.equals(). But select items, from, where + * and group by are all held by parent class SQLSelectQueryBlock. + * + *
  2. SQLSelectGroupByClause doesn't implement equals() at all.. MySqlSelectGroupByExpr + * compares identity of expression.. + * + *
  3. MySqlUnionQuery doesn't implement equals() at all + *
+ */ + private void same(SQLQueryExpr actual, SQLQueryExpr expected) { + assertEquals(expected.getClass(), actual.getClass()); + + SQLSelect expectedQuery = expected.getSubQuery(); + SQLSelect actualQuery = actual.getSubQuery(); + assertEquals(expectedQuery.getOrderBy(), actualQuery.getOrderBy()); + assertQuery(expectedQuery, actualQuery); + } + + private void assertQuery(SQLSelect expected, SQLSelect actual) { + SQLSelectQuery expectedQuery = expected.getQuery(); + SQLSelectQuery actualQuery = actual.getQuery(); + if (actualQuery instanceof SQLSelectQueryBlock) { + assertQueryBlock((SQLSelectQueryBlock) expectedQuery, (SQLSelectQueryBlock) actualQuery); + } else if (actualQuery instanceof SQLUnionQuery) { + assertQueryBlock( + (SQLSelectQueryBlock) ((SQLUnionQuery) expectedQuery).getLeft(), + (SQLSelectQueryBlock) ((SQLUnionQuery) actualQuery).getLeft()); + assertQueryBlock( + (SQLSelectQueryBlock) ((SQLUnionQuery) expectedQuery).getRight(), + (SQLSelectQueryBlock) ((SQLUnionQuery) actualQuery).getRight()); + assertEquals( + ((SQLUnionQuery) expectedQuery).getOperator(), + ((SQLUnionQuery) actualQuery).getOperator()); + } else { + throw new IllegalStateException("Unsupported test SQL"); + } + } + + private void assertQueryBlock(SQLSelectQueryBlock expected, SQLSelectQueryBlock actual) { + assertEquals("SELECT", expected.getSelectList(), actual.getSelectList()); + assertEquals("INTO", expected.getInto(), actual.getInto()); + assertEquals("WHERE", expected.getWhere(), actual.getWhere()); + if (actual.getWhere() instanceof SQLInSubQueryExpr) { + assertQuery( + ((SQLInSubQueryExpr) expected.getWhere()).getSubQuery(), + ((SQLInSubQueryExpr) actual.getWhere()).getSubQuery()); + } + assertEquals("PARENTHESIZED", expected.isParenthesized(), actual.isParenthesized()); + assertEquals("DISTION", expected.getDistionOption(), actual.getDistionOption()); + assertFrom(expected, actual); + if (!(expected.getGroupBy() == null && actual.getGroupBy() == null)) { + assertGroupBy(expected.getGroupBy(), actual.getGroupBy()); + } + } + + private void assertFrom(SQLSelectQueryBlock expected, SQLSelectQueryBlock actual) { + // Only 2 tables JOIN at most is supported + if (expected.getFrom() instanceof SQLExprTableSource) { + assertTable(expected.getFrom(), actual.getFrom()); + } else { + assertEquals(actual.getFrom().getClass(), SQLJoinTableSource.class); + assertTable( + ((SQLJoinTableSource) expected.getFrom()).getLeft(), + ((SQLJoinTableSource) actual.getFrom()).getLeft()); + assertTable( + ((SQLJoinTableSource) expected.getFrom()).getRight(), + ((SQLJoinTableSource) actual.getFrom()).getRight()); + assertEquals( + ((SQLJoinTableSource) expected.getFrom()).getJoinType(), + ((SQLJoinTableSource) actual.getFrom()).getJoinType()); + } + } + + private void assertGroupBy(SQLSelectGroupByClause expected, SQLSelectGroupByClause actual) { + assertEquals("HAVING", expected.getHaving(), actual.getHaving()); + + List expectedGroupby = expected.getItems(); + List actualGroupby = actual.getItems(); + assertEquals(expectedGroupby.size(), actualGroupby.size()); + range(0, expectedGroupby.size()) + .forEach( + i -> + assertEquals( + ((MySqlSelectGroupByExpr) expectedGroupby.get(i)).getExpr(), + ((MySqlSelectGroupByExpr) actualGroupby.get(i)).getExpr())); + } + + private void assertTable(SQLTableSource expect, SQLTableSource actual) { + assertEquals(SQLExprTableSource.class, expect.getClass()); + assertEquals(SQLExprTableSource.class, actual.getClass()); + assertEquals(((SQLExprTableSource) expect).getExpr(), ((SQLExprTableSource) actual).getExpr()); + assertEquals(expect.getAlias(), actual.getAlias()); + } + + /** + * Walk through extra rewrite logic if NOT found "nested" in SQL query statement. Otherwise return + * as before so that original logic be compared with result of rewrite. + * + * @param sql Test sql + * @return Node parsed out of sql + */ + private SQLQueryExpr query(String sql) { + SQLQueryExpr expr = SqlParserUtils.parse(sql); + if (sql.contains("nested")) { + return expr; + } + return rewrite(expr); + } + + private SQLQueryExpr rewrite(SQLQueryExpr expr) { + expr.accept(new NestedFieldRewriter()); + return expr; + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/OpenSearchClientTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/OpenSearchClientTest.java index 2a654774d4..2dd5cc16ac 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/OpenSearchClientTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/OpenSearchClientTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest; import static org.mockito.Matchers.any; @@ -27,39 +26,42 @@ public class OpenSearchClientTest { - @Mock - protected Client client; + @Mock protected Client client; - @Before - public void init() { - MockitoAnnotations.initMocks(this); - ActionFuture mockFuture = mock(ActionFuture.class); - when(client.multiSearch(any())).thenReturn(mockFuture); + @Before + public void init() { + MockitoAnnotations.initMocks(this); + ActionFuture mockFuture = mock(ActionFuture.class); + when(client.multiSearch(any())).thenReturn(mockFuture); - MultiSearchResponse response = mock(MultiSearchResponse.class); - when(mockFuture.actionGet()).thenReturn(response); + MultiSearchResponse response = mock(MultiSearchResponse.class); + when(mockFuture.actionGet()).thenReturn(response); - MultiSearchResponse.Item item0 = new MultiSearchResponse.Item(mock(SearchResponse.class), null); - MultiSearchResponse.Item item1 = new MultiSearchResponse.Item(mock(SearchResponse.class), new Exception()); - MultiSearchResponse.Item[] itemsRetry0 = new MultiSearchResponse.Item[]{item0, item1}; - MultiSearchResponse.Item[] itemsRetry1 = new MultiSearchResponse.Item[]{item0}; - when(response.getResponses()).thenAnswer(new Answer() { - private int callCnt; + MultiSearchResponse.Item item0 = new MultiSearchResponse.Item(mock(SearchResponse.class), null); + MultiSearchResponse.Item item1 = + new MultiSearchResponse.Item(mock(SearchResponse.class), new Exception()); + MultiSearchResponse.Item[] itemsRetry0 = new MultiSearchResponse.Item[] {item0, item1}; + MultiSearchResponse.Item[] itemsRetry1 = new MultiSearchResponse.Item[] {item0}; + when(response.getResponses()) + .thenAnswer( + new Answer() { + private int callCnt; - @Override - public MultiSearchResponse.Item[] answer(InvocationOnMock invocation) { + @Override + public MultiSearchResponse.Item[] answer(InvocationOnMock invocation) { return callCnt++ == 0 ? itemsRetry0 : itemsRetry1; - } - }); - } - - @Test - public void multiSearchRetryOneTime() { - OpenSearchClient openSearchClient = new OpenSearchClient(client); - MultiSearchResponse.Item[] res = openSearchClient.multiSearch(new MultiSearchRequest().add(new SearchRequest()).add(new SearchRequest())); - Assert.assertEquals(res.length, 2); - Assert.assertFalse(res[0].isFailure()); - Assert.assertFalse(res[1].isFailure()); - } + } + }); + } + @Test + public void multiSearchRetryOneTime() { + OpenSearchClient openSearchClient = new OpenSearchClient(client); + MultiSearchResponse.Item[] res = + openSearchClient.multiSearch( + new MultiSearchRequest().add(new SearchRequest()).add(new SearchRequest())); + Assert.assertEquals(res.length, 2); + Assert.assertFalse(res[0].isFailure()); + Assert.assertFalse(res[1].isFailure()); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/PreparedStatementRequestTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/PreparedStatementRequestTest.java index 0b714ed41c..8a31c530e3 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/PreparedStatementRequestTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/PreparedStatementRequestTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest; import java.util.ArrayList; @@ -15,64 +14,68 @@ public class PreparedStatementRequestTest { - @Test - public void testSubstitute() { - String sqlTemplate = "select * from table_name where number_param > ? and string_param = 'Amazon.com' " + - "and test_str = '''test escape? \\'' and state in (?,?) and null_param = ? and double_param = ? " + - "and question_mark = '?'"; - List params = new ArrayList<>(); - params.add(new PreparedStatementRequest.PreparedStatementParameter(10)); - params.add(new PreparedStatementRequest.StringParameter("WA")); - params.add(new PreparedStatementRequest.StringParameter("")); - params.add(new PreparedStatementRequest.NullParameter()); - params.add(new PreparedStatementRequest.PreparedStatementParameter(2.0)); - PreparedStatementRequest psr = new PreparedStatementRequest(sqlTemplate, new JSONObject(), params); - String generatedSql = psr.getSql(); - - String expectedSql = "select * from table_name where number_param > 10 and string_param = 'Amazon.com' " + - "and test_str = '''test escape? \\'' and state in ('WA','') and null_param = null " + - "and double_param = 2.0 and question_mark = '?'"; - Assert.assertEquals(expectedSql, generatedSql); - } - - @Test - public void testStringParameter() { - PreparedStatementRequest.StringParameter param; - param = new PreparedStatementRequest.StringParameter("test string"); - Assert.assertEquals("'test string'", param.getSqlSubstitutionValue()); - - param = new PreparedStatementRequest.StringParameter("test ' single ' quote '"); - Assert.assertEquals("'test \\' single \\' quote \\''", param.getSqlSubstitutionValue()); - - param = new PreparedStatementRequest.StringParameter("test line \n break \n char"); - Assert.assertEquals("'test line \\n break \\n char'", param.getSqlSubstitutionValue()); - - param = new PreparedStatementRequest.StringParameter("test carriage \r return \r char"); - Assert.assertEquals("'test carriage \\r return \\r char'", param.getSqlSubstitutionValue()); - - param = new PreparedStatementRequest.StringParameter("test \\ backslash \\ char"); - Assert.assertEquals("'test \\\\ backslash \\\\ char'", param.getSqlSubstitutionValue()); - - param = new PreparedStatementRequest.StringParameter("test single ' quote ' char"); - Assert.assertEquals("'test single \\' quote \\' char'", param.getSqlSubstitutionValue()); - - param = new PreparedStatementRequest.StringParameter("test double \" quote \" char"); - Assert.assertEquals("'test double \\\" quote \\\" char'", param.getSqlSubstitutionValue()); - } - - @Test(expected = IllegalStateException.class) - public void testSubstitute_parameterNumberNotMatch() { - String sqlTemplate = "select * from table_name where param1 = ? and param2 = ?"; - List params = new ArrayList<>(); - params.add(new PreparedStatementRequest.StringParameter("value")); - - PreparedStatementRequest psr = new PreparedStatementRequest(sqlTemplate, new JSONObject(), params); - } - - @Test - public void testSubstitute_nullSql() { - PreparedStatementRequest psr = new PreparedStatementRequest(null, new JSONObject(), null); - - Assert.assertNull(psr.getSql()); - } + @Test + public void testSubstitute() { + String sqlTemplate = + "select * from table_name where number_param > ? and string_param = 'Amazon.com' and" + + " test_str = '''test escape? \\'' and state in (?,?) and null_param = ? and" + + " double_param = ? and question_mark = '?'"; + List params = new ArrayList<>(); + params.add(new PreparedStatementRequest.PreparedStatementParameter(10)); + params.add(new PreparedStatementRequest.StringParameter("WA")); + params.add(new PreparedStatementRequest.StringParameter("")); + params.add(new PreparedStatementRequest.NullParameter()); + params.add(new PreparedStatementRequest.PreparedStatementParameter(2.0)); + PreparedStatementRequest psr = + new PreparedStatementRequest(sqlTemplate, new JSONObject(), params); + String generatedSql = psr.getSql(); + + String expectedSql = + "select * from table_name where number_param > 10 and string_param = 'Amazon.com' " + + "and test_str = '''test escape? \\'' and state in ('WA','') and null_param = null " + + "and double_param = 2.0 and question_mark = '?'"; + Assert.assertEquals(expectedSql, generatedSql); + } + + @Test + public void testStringParameter() { + PreparedStatementRequest.StringParameter param; + param = new PreparedStatementRequest.StringParameter("test string"); + Assert.assertEquals("'test string'", param.getSqlSubstitutionValue()); + + param = new PreparedStatementRequest.StringParameter("test ' single ' quote '"); + Assert.assertEquals("'test \\' single \\' quote \\''", param.getSqlSubstitutionValue()); + + param = new PreparedStatementRequest.StringParameter("test line \n break \n char"); + Assert.assertEquals("'test line \\n break \\n char'", param.getSqlSubstitutionValue()); + + param = new PreparedStatementRequest.StringParameter("test carriage \r return \r char"); + Assert.assertEquals("'test carriage \\r return \\r char'", param.getSqlSubstitutionValue()); + + param = new PreparedStatementRequest.StringParameter("test \\ backslash \\ char"); + Assert.assertEquals("'test \\\\ backslash \\\\ char'", param.getSqlSubstitutionValue()); + + param = new PreparedStatementRequest.StringParameter("test single ' quote ' char"); + Assert.assertEquals("'test single \\' quote \\' char'", param.getSqlSubstitutionValue()); + + param = new PreparedStatementRequest.StringParameter("test double \" quote \" char"); + Assert.assertEquals("'test double \\\" quote \\\" char'", param.getSqlSubstitutionValue()); + } + + @Test(expected = IllegalStateException.class) + public void testSubstitute_parameterNumberNotMatch() { + String sqlTemplate = "select * from table_name where param1 = ? and param2 = ?"; + List params = new ArrayList<>(); + params.add(new PreparedStatementRequest.StringParameter("value")); + + PreparedStatementRequest psr = + new PreparedStatementRequest(sqlTemplate, new JSONObject(), params); + } + + @Test + public void testSubstitute_nullSql() { + PreparedStatementRequest psr = new PreparedStatementRequest(null, new JSONObject(), null); + + Assert.assertNull(psr.getSql()); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/QueryFunctionsTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/QueryFunctionsTest.java index 0ebf89e296..b5a82f6737 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/QueryFunctionsTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/QueryFunctionsTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest; import static org.hamcrest.MatcherAssert.assertThat; @@ -37,272 +36,178 @@ public class QueryFunctionsTest { - private static final String SELECT_ALL = "SELECT *"; - private static final String FROM_ACCOUNTS = "FROM " + TestsConstants.TEST_INDEX_ACCOUNT + "/account"; - private static final String FROM_NESTED = "FROM " + TestsConstants.TEST_INDEX_NESTED_TYPE + "/nestedType"; - private static final String FROM_PHRASE = "FROM " + TestsConstants.TEST_INDEX_PHRASE + "/phrase"; - - @Test - public void query() { - assertThat( - query( - FROM_ACCOUNTS, - "WHERE QUERY('CA')" - ), - contains( - queryStringQuery("CA") - ) - ); - } - - @Test - public void matchQueryRegularField() { - assertThat( - query( - FROM_ACCOUNTS, - "WHERE MATCH_QUERY(firstname, 'Ayers')" - ), - contains( - matchQuery("firstname", "Ayers") - ) - ); - } - - @Test - public void matchQueryNestedField() { - assertThat( - query( - FROM_NESTED, - "WHERE MATCH_QUERY(NESTED(comment.data), 'aa')" - ), - contains( - nestedQuery("comment", matchQuery("comment.data", "aa"), ScoreMode.None) - ) - ); - } - - @Test - public void scoreQuery() { - assertThat( - query( - FROM_ACCOUNTS, - "WHERE SCORE(MATCH_QUERY(firstname, 'Ayers'), 10)" - ), - contains( - constantScoreQuery( - matchQuery("firstname", "Ayers") - ).boost(10) - ) - ); - } - - @Test - public void scoreQueryWithNestedField() { - assertThat( - query( - FROM_NESTED, - "WHERE SCORE(MATCH_QUERY(NESTED(comment.data), 'ab'), 10)" - ), - contains( - constantScoreQuery( - nestedQuery("comment", matchQuery("comment.data", "ab"), ScoreMode.None) - ).boost(10) - ) - ); - } - - @Test - public void wildcardQueryRegularField() { - assertThat( - query( - FROM_ACCOUNTS, - "WHERE WILDCARD_QUERY(city.keyword, 'B*')" - ), - contains( - wildcardQuery("city.keyword", "B*") - ) - ); - } - - @Test - public void wildcardQueryNestedField() { - assertThat( - query( - FROM_NESTED, - "WHERE WILDCARD_QUERY(nested(comment.data), 'a*')" - ), - contains( - nestedQuery("comment", wildcardQuery("comment.data", "a*"), ScoreMode.None) - ) - ); - } - - @Test - public void matchPhraseQueryDefault() { - assertThat( - query( - FROM_PHRASE, - "WHERE MATCH_PHRASE(phrase, 'brown fox')" - ), - contains( - matchPhraseQuery("phrase", "brown fox") - ) - ); - } - - @Test - public void matchPhraseQueryWithSlop() { - assertThat( - query( - FROM_PHRASE, - "WHERE MATCH_PHRASE(phrase, 'brown fox', slop=2)" - ), - contains( - matchPhraseQuery("phrase", "brown fox").slop(2) - ) - ); - } - - @Test - public void multiMatchQuerySingleField() { - assertThat( - query( - FROM_ACCOUNTS, - "WHERE MULTI_MATCH(query='Ayers', fields='firstname')" - ), - contains( - multiMatchQuery("Ayers").field("firstname") - ) - ); - } - - @Test - public void multiMatchQueryWildcardField() { - assertThat( - query( - FROM_ACCOUNTS, - "WHERE MULTI_MATCH(query='Ay', fields='*name', type='phrase_prefix')" - ), - contains( - multiMatchQuery("Ay"). - field("*name"). - type(MultiMatchQueryBuilder.Type.PHRASE_PREFIX) - ) - ); - } - - @Test - public void numberLiteralInSelectField() { - String query = "SELECT 2 AS number FROM bank WHERE age > 20"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "def assign" - ) - ); - } - - @Test - public void ifFunctionWithConditionStatement() { - String query = "SELECT IF(age > 35, 'elastic', 'search') AS Ages FROM accounts"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "boolean cond = doc['age'].value > 35;" - ) - ); - } - - @Test - public void ifFunctionWithEquationConditionStatement() { - String query = "SELECT IF(age = 35, 'elastic', 'search') AS Ages FROM accounts"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "boolean cond = doc['age'].value == 35;" - ) - ); - } - - @Test - public void ifFunctionWithConstantConditionStatement() { - String query = "SELECT IF(1 = 2, 'elastic', 'search') FROM accounts"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "boolean cond = 1 == 2;" - ) - ); - } - - @Test - public void ifNull() { - String query = "SELECT IFNULL(lastname, 'Unknown') FROM accounts"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "doc['lastname'].size()==0" - ) - ); - } - - @Test - public void isNullWithMathExpr() { - String query = "SELECT ISNULL(1+1) FROM accounts"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "catch(ArithmeticException e)" - ) - ); - - } - - @Test(expected = SQLFeatureNotSupportedException.class) - public void emptyQueryShouldThrowSQLFeatureNotSupportedException() - throws SQLFeatureNotSupportedException, SqlParseException, SQLFeatureDisabledException { - OpenSearchActionFactory.create(Mockito.mock(Client.class), ""); - } - - @Test(expected = SQLFeatureNotSupportedException.class) - public void emptyNewLineQueryShouldThrowSQLFeatureNotSupportedException() - throws SQLFeatureNotSupportedException, SqlParseException, SQLFeatureDisabledException { - OpenSearchActionFactory.create(Mockito.mock(Client.class), "\n"); - } - - @Test(expected = SQLFeatureNotSupportedException.class) - public void emptyNewLineQueryShouldThrowSQLFeatureNotSupportedException2() - throws SQLFeatureNotSupportedException, SqlParseException, SQLFeatureDisabledException { - OpenSearchActionFactory.create(Mockito.mock(Client.class), "\r\n"); - } - - @Test(expected = SQLFeatureNotSupportedException.class) - public void queryWithoutSpaceShouldSQLFeatureNotSupportedException() - throws SQLFeatureNotSupportedException, SqlParseException, SQLFeatureDisabledException { - OpenSearchActionFactory.create(Mockito.mock(Client.class), "SELE"); - } - - @Test(expected = SQLFeatureNotSupportedException.class) - public void spacesOnlyQueryShouldThrowSQLFeatureNotSupportedException() - throws SQLFeatureNotSupportedException, SqlParseException, SQLFeatureDisabledException { - OpenSearchActionFactory.create(Mockito.mock(Client.class), " "); - } - - private String query(String from, String... statements) { - return explain(SELECT_ALL + " " + from + " " + String.join(" ", statements)); - } - - private String query(String sql) { - return explain(sql); - } - - private Matcher contains(AbstractQueryBuilder queryBuilder) { - return containsString(Strings.toString(XContentType.JSON, queryBuilder, false, false)); - } + private static final String SELECT_ALL = "SELECT *"; + private static final String FROM_ACCOUNTS = + "FROM " + TestsConstants.TEST_INDEX_ACCOUNT + "/account"; + private static final String FROM_NESTED = + "FROM " + TestsConstants.TEST_INDEX_NESTED_TYPE + "/nestedType"; + private static final String FROM_PHRASE = "FROM " + TestsConstants.TEST_INDEX_PHRASE + "/phrase"; + + @Test + public void query() { + assertThat(query(FROM_ACCOUNTS, "WHERE QUERY('CA')"), contains(queryStringQuery("CA"))); + } + + @Test + public void matchQueryRegularField() { + assertThat( + query(FROM_ACCOUNTS, "WHERE MATCH_QUERY(firstname, 'Ayers')"), + contains(matchQuery("firstname", "Ayers"))); + } + + @Test + public void matchQueryNestedField() { + assertThat( + query(FROM_NESTED, "WHERE MATCH_QUERY(NESTED(comment.data), 'aa')"), + contains(nestedQuery("comment", matchQuery("comment.data", "aa"), ScoreMode.None))); + } + + @Test + public void scoreQuery() { + assertThat( + query(FROM_ACCOUNTS, "WHERE SCORE(MATCH_QUERY(firstname, 'Ayers'), 10)"), + contains(constantScoreQuery(matchQuery("firstname", "Ayers")).boost(10))); + } + + @Test + public void scoreQueryWithNestedField() { + assertThat( + query(FROM_NESTED, "WHERE SCORE(MATCH_QUERY(NESTED(comment.data), 'ab'), 10)"), + contains( + constantScoreQuery( + nestedQuery("comment", matchQuery("comment.data", "ab"), ScoreMode.None)) + .boost(10))); + } + + @Test + public void wildcardQueryRegularField() { + assertThat( + query(FROM_ACCOUNTS, "WHERE WILDCARD_QUERY(city.keyword, 'B*')"), + contains(wildcardQuery("city.keyword", "B*"))); + } + + @Test + public void wildcardQueryNestedField() { + assertThat( + query(FROM_NESTED, "WHERE WILDCARD_QUERY(nested(comment.data), 'a*')"), + contains(nestedQuery("comment", wildcardQuery("comment.data", "a*"), ScoreMode.None))); + } + + @Test + public void matchPhraseQueryDefault() { + assertThat( + query(FROM_PHRASE, "WHERE MATCH_PHRASE(phrase, 'brown fox')"), + contains(matchPhraseQuery("phrase", "brown fox"))); + } + + @Test + public void matchPhraseQueryWithSlop() { + assertThat( + query(FROM_PHRASE, "WHERE MATCH_PHRASE(phrase, 'brown fox', slop=2)"), + contains(matchPhraseQuery("phrase", "brown fox").slop(2))); + } + + @Test + public void multiMatchQuerySingleField() { + assertThat( + query(FROM_ACCOUNTS, "WHERE MULTI_MATCH(query='Ayers', fields='firstname')"), + contains(multiMatchQuery("Ayers").field("firstname"))); + } + + @Test + public void multiMatchQueryWildcardField() { + assertThat( + query(FROM_ACCOUNTS, "WHERE MULTI_MATCH(query='Ay', fields='*name', type='phrase_prefix')"), + contains( + multiMatchQuery("Ay").field("*name").type(MultiMatchQueryBuilder.Type.PHRASE_PREFIX))); + } + + @Test + public void numberLiteralInSelectField() { + String query = "SELECT 2 AS number FROM bank WHERE age > 20"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue(CheckScriptContents.scriptContainsString(scriptField, "def assign")); + } + + @Test + public void ifFunctionWithConditionStatement() { + String query = "SELECT IF(age > 35, 'elastic', 'search') AS Ages FROM accounts"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptField, "boolean cond = doc['age'].value > 35;")); + } + + @Test + public void ifFunctionWithEquationConditionStatement() { + String query = "SELECT IF(age = 35, 'elastic', 'search') AS Ages FROM accounts"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptField, "boolean cond = doc['age'].value == 35;")); + } + + @Test + public void ifFunctionWithConstantConditionStatement() { + String query = "SELECT IF(1 = 2, 'elastic', 'search') FROM accounts"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue(CheckScriptContents.scriptContainsString(scriptField, "boolean cond = 1 == 2;")); + } + + @Test + public void ifNull() { + String query = "SELECT IFNULL(lastname, 'Unknown') FROM accounts"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue(CheckScriptContents.scriptContainsString(scriptField, "doc['lastname'].size()==0")); + } + + @Test + public void isNullWithMathExpr() { + String query = "SELECT ISNULL(1+1) FROM accounts"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString(scriptField, "catch(ArithmeticException e)")); + } + + @Test(expected = SQLFeatureNotSupportedException.class) + public void emptyQueryShouldThrowSQLFeatureNotSupportedException() + throws SQLFeatureNotSupportedException, SqlParseException, SQLFeatureDisabledException { + OpenSearchActionFactory.create(Mockito.mock(Client.class), ""); + } + + @Test(expected = SQLFeatureNotSupportedException.class) + public void emptyNewLineQueryShouldThrowSQLFeatureNotSupportedException() + throws SQLFeatureNotSupportedException, SqlParseException, SQLFeatureDisabledException { + OpenSearchActionFactory.create(Mockito.mock(Client.class), "\n"); + } + + @Test(expected = SQLFeatureNotSupportedException.class) + public void emptyNewLineQueryShouldThrowSQLFeatureNotSupportedException2() + throws SQLFeatureNotSupportedException, SqlParseException, SQLFeatureDisabledException { + OpenSearchActionFactory.create(Mockito.mock(Client.class), "\r\n"); + } + + @Test(expected = SQLFeatureNotSupportedException.class) + public void queryWithoutSpaceShouldSQLFeatureNotSupportedException() + throws SQLFeatureNotSupportedException, SqlParseException, SQLFeatureDisabledException { + OpenSearchActionFactory.create(Mockito.mock(Client.class), "SELE"); + } + + @Test(expected = SQLFeatureNotSupportedException.class) + public void spacesOnlyQueryShouldThrowSQLFeatureNotSupportedException() + throws SQLFeatureNotSupportedException, SqlParseException, SQLFeatureDisabledException { + OpenSearchActionFactory.create(Mockito.mock(Client.class), " "); + } + + private String query(String from, String... statements) { + return explain(SELECT_ALL + " " + from + " " + String.join(" ", statements)); + } + + private String query(String sql) { + return explain(sql); + } + + private Matcher contains(AbstractQueryBuilder queryBuilder) { + return containsString(Strings.toString(XContentType.JSON, queryBuilder, false, false)); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/RefExpressionTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/RefExpressionTest.java index f8607ca889..faefa6d2c1 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/RefExpressionTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/RefExpressionTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.expression.core; import static org.hamcrest.MatcherAssert.assertThat; @@ -25,36 +24,40 @@ import org.junit.Test; public class RefExpressionTest extends ExpressionTest { - @Test - public void refIntegerValueShouldPass() { - assertEquals(Integer.valueOf(1), getIntegerValue(ref("intValue").valueOf(bindingTuple()))); - } - - @Test - public void refDoubleValueShouldPass() { - assertEquals(Double.valueOf(2d), getDoubleValue(ref("doubleValue").valueOf(bindingTuple()))); - } - - @Test - public void refStringValueShouldPass() { - assertEquals("string", getStringValue(ref("stringValue").valueOf(bindingTuple()))); - } - - @Test - public void refBooleanValueShouldPass() { - assertEquals(true, getBooleanValue(ref("booleanValue").valueOf(bindingTuple()))); - } - - @Test - public void refTupleValueShouldPass() { - assertThat(getTupleValue(ref("tupleValue").valueOf(bindingTuple())), - allOf(hasEntry("intValue", integerValue(1)), hasEntry("doubleValue", doubleValue(2d)), - hasEntry("stringValue", stringValue("string")))); - } - - @Test - public void refCollectValueShouldPass() { - assertThat(getCollectionValue(ref("collectValue").valueOf(bindingTuple())), - contains(integerValue(1), integerValue(2), integerValue(3))); - } + @Test + public void refIntegerValueShouldPass() { + assertEquals(Integer.valueOf(1), getIntegerValue(ref("intValue").valueOf(bindingTuple()))); + } + + @Test + public void refDoubleValueShouldPass() { + assertEquals(Double.valueOf(2d), getDoubleValue(ref("doubleValue").valueOf(bindingTuple()))); + } + + @Test + public void refStringValueShouldPass() { + assertEquals("string", getStringValue(ref("stringValue").valueOf(bindingTuple()))); + } + + @Test + public void refBooleanValueShouldPass() { + assertEquals(true, getBooleanValue(ref("booleanValue").valueOf(bindingTuple()))); + } + + @Test + public void refTupleValueShouldPass() { + assertThat( + getTupleValue(ref("tupleValue").valueOf(bindingTuple())), + allOf( + hasEntry("intValue", integerValue(1)), + hasEntry("doubleValue", doubleValue(2d)), + hasEntry("stringValue", stringValue("string")))); + } + + @Test + public void refCollectValueShouldPass() { + assertThat( + getCollectionValue(ref("collectValue").valueOf(bindingTuple())), + contains(integerValue(1), integerValue(2), integerValue(3))); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/NumericMetricTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/NumericMetricTest.java index f2c2c25fab..d76241056f 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/NumericMetricTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/NumericMetricTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.metrics; import static org.hamcrest.MatcherAssert.assertThat; @@ -15,22 +14,21 @@ public class NumericMetricTest { - @Test - public void increment() { - NumericMetric metric = new NumericMetric("test", new BasicCounter()); - for (int i=0; i<5; ++i) { - metric.increment(); - } - - assertThat(metric.getValue(), equalTo(5L)); + @Test + public void increment() { + NumericMetric metric = new NumericMetric("test", new BasicCounter()); + for (int i = 0; i < 5; ++i) { + metric.increment(); } - @Test - public void add() { - NumericMetric metric = new NumericMetric("test", new BasicCounter()); - metric.increment(5); + assertThat(metric.getValue(), equalTo(5L)); + } - assertThat(metric.getValue(), equalTo(5L)); - } + @Test + public void add() { + NumericMetric metric = new NumericMetric("test", new BasicCounter()); + metric.increment(5); + assertThat(metric.getValue(), equalTo(5L)); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/RollingCounterTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/RollingCounterTest.java index a1651aad6b..0ad333a6e2 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/RollingCounterTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/RollingCounterTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.metrics; import static org.hamcrest.MatcherAssert.assertThat; @@ -20,61 +19,58 @@ @RunWith(MockitoJUnitRunner.class) public class RollingCounterTest { - @Mock - Clock clock; + @Mock Clock clock; - @Test - public void increment() { - RollingCounter counter = new RollingCounter(3, 1, clock); - for (int i=0; i<5; ++i) { - counter.increment(); - } + @Test + public void increment() { + RollingCounter counter = new RollingCounter(3, 1, clock); + for (int i = 0; i < 5; ++i) { + counter.increment(); + } - assertThat(counter.getValue(), equalTo(0L)); + assertThat(counter.getValue(), equalTo(0L)); - when(clock.millis()).thenReturn(1000L); // 1 second passed - assertThat(counter.getValue(), equalTo(5L)); + when(clock.millis()).thenReturn(1000L); // 1 second passed + assertThat(counter.getValue(), equalTo(5L)); - counter.increment(); - counter.increment(); + counter.increment(); + counter.increment(); - when(clock.millis()).thenReturn(2000L); // 1 second passed - assertThat(counter.getValue(), lessThanOrEqualTo(3L)); + when(clock.millis()).thenReturn(2000L); // 1 second passed + assertThat(counter.getValue(), lessThanOrEqualTo(3L)); - when(clock.millis()).thenReturn(3000L); // 1 second passed - assertThat(counter.getValue(), equalTo(0L)); + when(clock.millis()).thenReturn(3000L); // 1 second passed + assertThat(counter.getValue(), equalTo(0L)); + } - } + @Test + public void add() { + RollingCounter counter = new RollingCounter(3, 1, clock); - @Test - public void add() { - RollingCounter counter = new RollingCounter(3, 1, clock); + counter.add(6); + assertThat(counter.getValue(), equalTo(0L)); - counter.add(6); - assertThat(counter.getValue(), equalTo(0L)); + when(clock.millis()).thenReturn(1000L); // 1 second passed + assertThat(counter.getValue(), equalTo(6L)); - when(clock.millis()).thenReturn(1000L); // 1 second passed - assertThat(counter.getValue(), equalTo(6L)); + counter.add(4); + when(clock.millis()).thenReturn(2000L); // 1 second passed + assertThat(counter.getValue(), equalTo(4L)); - counter.add(4); - when(clock.millis()).thenReturn(2000L); // 1 second passed - assertThat(counter.getValue(), equalTo(4L)); + when(clock.millis()).thenReturn(3000L); // 1 second passed + assertThat(counter.getValue(), equalTo(0L)); + } - when(clock.millis()).thenReturn(3000L); // 1 second passed - assertThat(counter.getValue(), equalTo(0L)); - } + @Test + public void trim() { + RollingCounter counter = new RollingCounter(2, 1, clock); - @Test - public void trim() { - RollingCounter counter = new RollingCounter(2, 1, clock); - - for (int i=1; i<6; ++i) { - counter.increment(); - assertThat(counter.size(), equalTo(i)); - when(clock.millis()).thenReturn(i * 1000L); // i seconds passed - } - counter.increment(); - assertThat(counter.size(), lessThanOrEqualTo(3)); + for (int i = 1; i < 6; ++i) { + counter.increment(); + assertThat(counter.size(), equalTo(i)); + when(clock.millis()).thenReturn(i * 1000L); // i seconds passed } - + counter.increment(); + assertThat(counter.size(), lessThanOrEqualTo(3)); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/OpenSearchActionFactoryTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/OpenSearchActionFactoryTest.java index 0b7c7f6740..3443c2decd 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/OpenSearchActionFactoryTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/OpenSearchActionFactoryTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.planner; import static org.junit.Assert.assertFalse; @@ -15,60 +14,51 @@ import org.opensearch.sql.legacy.util.SqlParserUtils; public class OpenSearchActionFactoryTest { - @Test - public void josnOutputRequestShouldNotMigrateToQueryPlan() { - String sql = "SELECT age, MAX(balance) " + - "FROM account " + - "GROUP BY age"; + @Test + public void josnOutputRequestShouldNotMigrateToQueryPlan() { + String sql = "SELECT age, MAX(balance) FROM account GROUP BY age"; - assertFalse( - OpenSearchActionFactory.shouldMigrateToQueryPlan(SqlParserUtils.parse(sql), Format.JSON)); - } + assertFalse( + OpenSearchActionFactory.shouldMigrateToQueryPlan(SqlParserUtils.parse(sql), Format.JSON)); + } - @Test - public void nestQueryShouldNotMigrateToQueryPlan() { - String sql = "SELECT age, nested(balance) " + - "FROM account " + - "GROUP BY age"; + @Test + public void nestQueryShouldNotMigrateToQueryPlan() { + String sql = "SELECT age, nested(balance) FROM account GROUP BY age"; - assertFalse( - OpenSearchActionFactory.shouldMigrateToQueryPlan(SqlParserUtils.parse(sql), Format.JDBC)); - } + assertFalse( + OpenSearchActionFactory.shouldMigrateToQueryPlan(SqlParserUtils.parse(sql), Format.JDBC)); + } - @Test - public void nonAggregationQueryShouldNotMigrateToQueryPlan() { - String sql = "SELECT age " + - "FROM account "; + @Test + public void nonAggregationQueryShouldNotMigrateToQueryPlan() { + String sql = "SELECT age FROM account "; - assertFalse( - OpenSearchActionFactory.shouldMigrateToQueryPlan(SqlParserUtils.parse(sql), Format.JDBC)); - } + assertFalse( + OpenSearchActionFactory.shouldMigrateToQueryPlan(SqlParserUtils.parse(sql), Format.JDBC)); + } - @Test - public void aggregationQueryWithoutGroupByShouldMigrateToQueryPlan() { - String sql = "SELECT age, COUNT(balance) " + - "FROM account "; + @Test + public void aggregationQueryWithoutGroupByShouldMigrateToQueryPlan() { + String sql = "SELECT age, COUNT(balance) FROM account "; - assertTrue( - OpenSearchActionFactory.shouldMigrateToQueryPlan(SqlParserUtils.parse(sql), Format.JDBC)); - } + assertTrue( + OpenSearchActionFactory.shouldMigrateToQueryPlan(SqlParserUtils.parse(sql), Format.JDBC)); + } - @Test - public void aggregationQueryWithExpressionByShouldMigrateToQueryPlan() { - String sql = "SELECT age, MAX(balance) - MIN(balance) " + - "FROM account "; + @Test + public void aggregationQueryWithExpressionByShouldMigrateToQueryPlan() { + String sql = "SELECT age, MAX(balance) - MIN(balance) FROM account "; - assertTrue( - OpenSearchActionFactory.shouldMigrateToQueryPlan(SqlParserUtils.parse(sql), Format.JDBC)); - } + assertTrue( + OpenSearchActionFactory.shouldMigrateToQueryPlan(SqlParserUtils.parse(sql), Format.JDBC)); + } - @Test - public void queryOnlyHasGroupByShouldMigrateToQueryPlan() { - String sql = "SELECT CAST(age AS DOUBLE) as alias " + - "FROM account " + - "GROUP BY alias"; + @Test + public void queryOnlyHasGroupByShouldMigrateToQueryPlan() { + String sql = "SELECT CAST(age AS DOUBLE) as alias FROM account GROUP BY alias"; - assertTrue( - OpenSearchActionFactory.shouldMigrateToQueryPlan(SqlParserUtils.parse(sql), Format.JDBC)); - } + assertTrue( + OpenSearchActionFactory.shouldMigrateToQueryPlan(SqlParserUtils.parse(sql), Format.JDBC)); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerBatchTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerBatchTest.java index 545710e343..0c77550a2f 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerBatchTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerBatchTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.planner; import static org.hamcrest.MatcherAssert.assertThat; @@ -24,221 +23,153 @@ import org.opensearch.search.SearchHits; /** - * Batch prefetch testing. Test against different combination of algorithm block size and scroll page size. + * Batch prefetch testing. Test against different combination of algorithm block size and scroll + * page size. */ @SuppressWarnings("unchecked") @RunWith(Parameterized.class) public class QueryPlannerBatchTest extends QueryPlannerTest { - private static final String TEST_SQL1 = - "SELECT " + - " /*! JOIN_CIRCUIT_BREAK_LIMIT(100) */ " + - " /*! JOIN_ALGORITHM_BLOCK_SIZE(%d) */ " + - " /*! JOIN_SCROLL_PAGE_SIZE(%d) */ " + - " e.lastname AS name, d.id AS id, d.name AS dep "; - - private static final String TEST_SQL2_JOIN1 = - "FROM department d " + - " %s employee e "; - - private static final String TEST_SQL2_JOIN2 = - "FROM employee e " + - " %s department d "; - - private static final String TEST_SQL3 = - "ON d.id = e.departmentId " + - " WHERE e.age <= 50"; - - private SearchHit[] employees = { - employee(1, "People 1", "A"), - employee(2, "People 2", "A"), - employee(3, "People 3", "A"), - employee(4, "People 4", "B"), - employee(5, "People 5", "B"), - employee(6, "People 6", "C"), - employee(7, "People 7", "D"), - employee(8, "People 8", "D"), - employee(9, "People 9", "E"), - employee(10, "People 10", "F") - }; - - private SearchHit[] departments = { - department(1, "A", "AWS"), - department(2, "C", "Capital One"), - department(3, "D", "Dell"), - department(4, "F", "Facebook"), - department(5, "G", "Google"), - department(6, "M", "Microsoft"), - department(7, "U", "Uber"), - }; - - private Matcher[] matched = { - hit( - kv("name", "People 1"), - kv("id", "A"), - kv("dep", "AWS") - ), - hit( - kv("name", "People 2"), - kv("id", "A"), - kv("dep", "AWS") - ), - hit( - kv("name", "People 3"), - kv("id", "A"), - kv("dep", "AWS") - ), - hit( - kv("name", "People 6"), - kv("id", "C"), - kv("dep", "Capital One") - ), - hit( - kv("name", "People 7"), - kv("id", "D"), - kv("dep", "Dell") - ), - hit( - kv("name", "People 8"), - kv("id", "D"), - kv("dep", "Dell") - ), - hit( - kv("name", "People 10"), - kv("id", "F"), - kv("dep", "Facebook") - ) - }; - - private Matcher[] mismatched1 = { - hit( - kv("name", null), - kv("id", "G"), - kv("dep", "Google") - ), - hit( - kv("name", null), - kv("id", "M"), - kv("dep", "Microsoft") - ), - hit( - kv("name", null), - kv("id", "U"), - kv("dep", "Uber") - ) - }; - - private Matcher[] mismatched2 = { - hit( - kv("name", "People 4"), - kv("id", null), - kv("dep", null) - ), - hit( - kv("name", "People 5"), - kv("id", null), - kv("dep", null) - ), - hit( - kv("name", "People 9"), - kv("id", null), - kv("dep", null) - ) - }; - - private Matcher expectedInnerJoinResult = hits(matched); - - /** Department left join Employee */ - private Matcher expectedLeftOuterJoinResult1 = hits(concat(matched, mismatched1)); - - /** Employee left join Department */ - private Matcher expectedLeftOuterJoinResult2 = hits(concat(matched, mismatched2)); - - /** Parameterized test cases */ - private final int blockSize; - private final int pageSize; - - public QueryPlannerBatchTest(int blockSize, int pageSize) { - this.blockSize = blockSize; - this.pageSize = pageSize; - } - - @Parameters - public static Collection data() { - List params = new ArrayList<>(); - for (int blockSize = 1; blockSize <= 11; blockSize++) { - for (int pageSize = 1; pageSize <= 11; pageSize++) { - params.add(new Object[]{ blockSize, pageSize }); - } - } - return params; - } - - @Test - public void departmentInnerJoinEmployee() { - assertThat( - query( - String.format( - TEST_SQL1 + TEST_SQL2_JOIN1 + TEST_SQL3, - blockSize, pageSize, "INNER JOIN"), - departments(pageSize, departments), - employees(pageSize, employees) - ), - expectedInnerJoinResult - ); - } - - @Test - public void employeeInnerJoinDepartment() { - assertThat( - query( - String.format( - TEST_SQL1 + TEST_SQL2_JOIN2 + TEST_SQL3, - blockSize, pageSize, "INNER JOIN"), - employees(pageSize, employees), - departments(pageSize, departments) - ), - expectedInnerJoinResult - ); - } - - @Test - public void departmentLeftJoinEmployee() { - assertThat( - query( - String.format( - TEST_SQL1 + TEST_SQL2_JOIN1 + TEST_SQL3, - blockSize, pageSize, "LEFT JOIN"), - departments(pageSize, departments), - employees(pageSize, employees) - ), - expectedLeftOuterJoinResult1 - ); - } - - @Test - public void employeeLeftJoinDepartment() { - assertThat( - query( - String.format( - TEST_SQL1 + TEST_SQL2_JOIN2 + TEST_SQL3, - blockSize, pageSize, "LEFT JOIN"), - employees(pageSize, employees), - departments(pageSize, departments) - ), - expectedLeftOuterJoinResult2 - ); - } - - private static Matcher[] concat(Matcher[] one, Matcher[] other) { - return concat(one, other, Matcher.class); - } - - /** Copy from OpenSearch ArrayUtils */ - private static T[] concat(T[] one, T[] other, Class clazz) { - T[] target = (T[]) Array.newInstance(clazz, one.length + other.length); - System.arraycopy(one, 0, target, 0, one.length); - System.arraycopy(other, 0, target, one.length, other.length); - return target; + private static final String TEST_SQL1 = + "SELECT " + + " /*! JOIN_CIRCUIT_BREAK_LIMIT(100) */ " + + " /*! JOIN_ALGORITHM_BLOCK_SIZE(%d) */ " + + " /*! JOIN_SCROLL_PAGE_SIZE(%d) */ " + + " e.lastname AS name, d.id AS id, d.name AS dep "; + + private static final String TEST_SQL2_JOIN1 = "FROM department d " + " %s employee e "; + + private static final String TEST_SQL2_JOIN2 = "FROM employee e " + " %s department d "; + + private static final String TEST_SQL3 = "ON d.id = e.departmentId " + " WHERE e.age <= 50"; + + private SearchHit[] employees = { + employee(1, "People 1", "A"), + employee(2, "People 2", "A"), + employee(3, "People 3", "A"), + employee(4, "People 4", "B"), + employee(5, "People 5", "B"), + employee(6, "People 6", "C"), + employee(7, "People 7", "D"), + employee(8, "People 8", "D"), + employee(9, "People 9", "E"), + employee(10, "People 10", "F") + }; + + private SearchHit[] departments = { + department(1, "A", "AWS"), + department(2, "C", "Capital One"), + department(3, "D", "Dell"), + department(4, "F", "Facebook"), + department(5, "G", "Google"), + department(6, "M", "Microsoft"), + department(7, "U", "Uber"), + }; + + private Matcher[] matched = { + hit(kv("name", "People 1"), kv("id", "A"), kv("dep", "AWS")), + hit(kv("name", "People 2"), kv("id", "A"), kv("dep", "AWS")), + hit(kv("name", "People 3"), kv("id", "A"), kv("dep", "AWS")), + hit(kv("name", "People 6"), kv("id", "C"), kv("dep", "Capital One")), + hit(kv("name", "People 7"), kv("id", "D"), kv("dep", "Dell")), + hit(kv("name", "People 8"), kv("id", "D"), kv("dep", "Dell")), + hit(kv("name", "People 10"), kv("id", "F"), kv("dep", "Facebook")) + }; + + private Matcher[] mismatched1 = { + hit(kv("name", null), kv("id", "G"), kv("dep", "Google")), + hit(kv("name", null), kv("id", "M"), kv("dep", "Microsoft")), + hit(kv("name", null), kv("id", "U"), kv("dep", "Uber")) + }; + + private Matcher[] mismatched2 = { + hit(kv("name", "People 4"), kv("id", null), kv("dep", null)), + hit(kv("name", "People 5"), kv("id", null), kv("dep", null)), + hit(kv("name", "People 9"), kv("id", null), kv("dep", null)) + }; + + private Matcher expectedInnerJoinResult = hits(matched); + + /** Department left join Employee */ + private Matcher expectedLeftOuterJoinResult1 = hits(concat(matched, mismatched1)); + + /** Employee left join Department */ + private Matcher expectedLeftOuterJoinResult2 = hits(concat(matched, mismatched2)); + + /** Parameterized test cases */ + private final int blockSize; + + private final int pageSize; + + public QueryPlannerBatchTest(int blockSize, int pageSize) { + this.blockSize = blockSize; + this.pageSize = pageSize; + } + + @Parameters + public static Collection data() { + List params = new ArrayList<>(); + for (int blockSize = 1; blockSize <= 11; blockSize++) { + for (int pageSize = 1; pageSize <= 11; pageSize++) { + params.add(new Object[] {blockSize, pageSize}); + } } + return params; + } + + @Test + public void departmentInnerJoinEmployee() { + assertThat( + query( + String.format( + TEST_SQL1 + TEST_SQL2_JOIN1 + TEST_SQL3, blockSize, pageSize, "INNER JOIN"), + departments(pageSize, departments), + employees(pageSize, employees)), + expectedInnerJoinResult); + } + + @Test + public void employeeInnerJoinDepartment() { + assertThat( + query( + String.format( + TEST_SQL1 + TEST_SQL2_JOIN2 + TEST_SQL3, blockSize, pageSize, "INNER JOIN"), + employees(pageSize, employees), + departments(pageSize, departments)), + expectedInnerJoinResult); + } + + @Test + public void departmentLeftJoinEmployee() { + assertThat( + query( + String.format( + TEST_SQL1 + TEST_SQL2_JOIN1 + TEST_SQL3, blockSize, pageSize, "LEFT JOIN"), + departments(pageSize, departments), + employees(pageSize, employees)), + expectedLeftOuterJoinResult1); + } + + @Test + public void employeeLeftJoinDepartment() { + assertThat( + query( + String.format( + TEST_SQL1 + TEST_SQL2_JOIN2 + TEST_SQL3, blockSize, pageSize, "LEFT JOIN"), + employees(pageSize, employees), + departments(pageSize, departments)), + expectedLeftOuterJoinResult2); + } + + private static Matcher[] concat(Matcher[] one, Matcher[] other) { + return concat(one, other, Matcher.class); + } + + /** Copy from OpenSearch ArrayUtils */ + private static T[] concat(T[] one, T[] other, Class clazz) { + T[] target = (T[]) Array.newInstance(clazz, one.length + other.length); + System.arraycopy(one, 0, target, 0, one.length); + System.arraycopy(other, 0, target, one.length, other.length); + return target; + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerConfigTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerConfigTest.java index 07a84683ce..81d6d718b9 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerConfigTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerConfigTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.planner; import static org.hamcrest.MatcherAssert.assertThat; @@ -23,291 +22,252 @@ import org.opensearch.sql.legacy.query.planner.HashJoinQueryPlanRequestBuilder; import org.opensearch.sql.legacy.query.planner.core.Config; -/** - * Hint & Configuring Ability Test Cases - */ +/** Hint & Configuring Ability Test Cases */ public class QueryPlannerConfigTest extends QueryPlannerTest { - private static final Matcher DEFAULT_TOTAL_AND_TABLE_LIMIT_MATCHER = totalAndTableLimit(200, 0, 0); - - @Test - public void algorithmBlockSizeHint() { - assertThat( - parseHint("! JOIN_ALGORITHM_BLOCK_SIZE(100000)"), - hint( - hintType(HintType.JOIN_ALGORITHM_BLOCK_SIZE), - hintValues(100000) - ) - ); - } - - @Test - public void algorithmUseLegacy() { - assertThat( - parseHint("! JOIN_ALGORITHM_USE_LEGACY"), - hint( - hintType(HintType.JOIN_ALGORITHM_USE_LEGACY), - hintValues() - ) - ); - } - - @Test - public void algorithmBlockSizeHintWithSpaces() { - assertThat( - parseHint("! JOIN_ALGORITHM_BLOCK_SIZE ( 200000 ) "), - hint( - hintType(HintType.JOIN_ALGORITHM_BLOCK_SIZE), - hintValues(200000) - ) - ); - } - - @Test - public void scrollPageSizeHint() { - assertThat( - parseHint("! JOIN_SCROLL_PAGE_SIZE(1000) "), - hint( - hintType(HintType.JOIN_SCROLL_PAGE_SIZE), - hintValues(1000) - ) - ); - } - - @Test - public void scrollPageSizeHintWithTwoSizes() { - assertThat( - parseHint("! JOIN_SCROLL_PAGE_SIZE(1000, 2000) "), - hint( - hintType(HintType.JOIN_SCROLL_PAGE_SIZE), - hintValues(1000, 2000) - ) - ); - } - - @Test - public void circuitBreakLimitHint() { - assertThat( - parseHint("! JOIN_CIRCUIT_BREAK_LIMIT(80)"), - hint( - hintType(HintType.JOIN_CIRCUIT_BREAK_LIMIT), - hintValues(80) - ) - ); - } - - @Test - public void backOffRetryIntervalsHint() { - assertThat( - parseHint("! JOIN_BACK_OFF_RETRY_INTERVALS(1, 5)"), - hint( - hintType(HintType.JOIN_BACK_OFF_RETRY_INTERVALS), - hintValues(1, 5) - ) - ); - } - - @Test - public void timeOutHint() { - assertThat( - parseHint("! JOIN_TIME_OUT(120)"), - hint( - hintType(HintType.JOIN_TIME_OUT), - hintValues(120) - ) - ); - } - - @Test - public void blockSizeConfig() { - assertThat(queryPlannerConfig( - "SELECT /*! JOIN_ALGORITHM_BLOCK_SIZE(200000) */ " + - " d.name FROM employee e JOIN department d ON d.id = e.departmentId "), - config( - blockSize(200000), - scrollPageSize(Config.DEFAULT_SCROLL_PAGE_SIZE, Config.DEFAULT_SCROLL_PAGE_SIZE), - circuitBreakLimit(Config.DEFAULT_CIRCUIT_BREAK_LIMIT), - backOffRetryIntervals(Config.DEFAULT_BACK_OFF_RETRY_INTERVALS), - DEFAULT_TOTAL_AND_TABLE_LIMIT_MATCHER, - timeOut(Config.DEFAULT_TIME_OUT) - ) - ); - } - - @Test - public void scrollPageSizeConfig() { - assertThat(queryPlannerConfig( - "SELECT /*! JOIN_SCROLL_PAGE_SIZE(50, 20) */ " + - " d.name FROM employee e JOIN department d ON d.id = e.departmentId "), - config( - blockSize(Config.DEFAULT_BLOCK_SIZE), - scrollPageSize(50, 20), - circuitBreakLimit(Config.DEFAULT_CIRCUIT_BREAK_LIMIT), - backOffRetryIntervals(Config.DEFAULT_BACK_OFF_RETRY_INTERVALS), - DEFAULT_TOTAL_AND_TABLE_LIMIT_MATCHER, - timeOut(Config.DEFAULT_TIME_OUT) - ) - ); - } - - @Test - public void circuitBreakLimitConfig() { - assertThat(queryPlannerConfig( - "SELECT /*! JOIN_CIRCUIT_BREAK_LIMIT(60) */ " + - " d.name FROM employee e JOIN department d ON d.id = e.departmentId "), - config( - blockSize(Config.DEFAULT_BLOCK_SIZE), - scrollPageSize(Config.DEFAULT_SCROLL_PAGE_SIZE, Config.DEFAULT_SCROLL_PAGE_SIZE), - circuitBreakLimit(60), - backOffRetryIntervals(Config.DEFAULT_BACK_OFF_RETRY_INTERVALS), - DEFAULT_TOTAL_AND_TABLE_LIMIT_MATCHER, - timeOut(Config.DEFAULT_TIME_OUT) - ) - ); - } - - @Test - public void backOffRetryIntervalsConfig() { - assertThat(queryPlannerConfig( - "SELECT /*! JOIN_BACK_OFF_RETRY_INTERVALS(1, 3, 5, 10) */ " + - " d.name FROM employee e JOIN department d ON d.id = e.departmentId "), - config( - blockSize(Config.DEFAULT_BLOCK_SIZE), - scrollPageSize(Config.DEFAULT_SCROLL_PAGE_SIZE, Config.DEFAULT_SCROLL_PAGE_SIZE), - circuitBreakLimit(Config.DEFAULT_CIRCUIT_BREAK_LIMIT), - backOffRetryIntervals(new double[]{1, 3, 5, 10}), - DEFAULT_TOTAL_AND_TABLE_LIMIT_MATCHER, - timeOut(Config.DEFAULT_TIME_OUT) - ) - ); - } - - @Test - public void totalAndTableLimitConfig() { - assertThat(queryPlannerConfig( - "SELECT /*! JOIN_TABLES_LIMIT(10, 20) */ " + - " d.name FROM employee e JOIN department d ON d.id = e.departmentId LIMIT 50"), - config( - blockSize(Config.DEFAULT_BLOCK_SIZE), - scrollPageSize(Config.DEFAULT_SCROLL_PAGE_SIZE, Config.DEFAULT_SCROLL_PAGE_SIZE), - circuitBreakLimit(Config.DEFAULT_CIRCUIT_BREAK_LIMIT), - backOffRetryIntervals(Config.DEFAULT_BACK_OFF_RETRY_INTERVALS), - totalAndTableLimit(50, 10, 20), - timeOut(Config.DEFAULT_TIME_OUT) - ) - ); - } - - @Test - public void timeOutConfig() { - assertThat(queryPlannerConfig( - "SELECT /*! JOIN_TIME_OUT(120) */ " + - " d.name FROM employee e JOIN department d ON d.id = e.departmentId"), - config( - blockSize(Config.DEFAULT_BLOCK_SIZE), - scrollPageSize(Config.DEFAULT_SCROLL_PAGE_SIZE, Config.DEFAULT_SCROLL_PAGE_SIZE), - circuitBreakLimit(Config.DEFAULT_CIRCUIT_BREAK_LIMIT), - backOffRetryIntervals(Config.DEFAULT_BACK_OFF_RETRY_INTERVALS), - DEFAULT_TOTAL_AND_TABLE_LIMIT_MATCHER, - timeOut(120) - ) - ); - } - - @Test - public void multipleConfigCombined() { - assertThat(queryPlannerConfig( - "SELECT " + - " /*! JOIN_ALGORITHM_BLOCK_SIZE(100) */ " + - " /*! JOIN_SCROLL_PAGE_SIZE(50, 20) */ " + - " /*! JOIN_CIRCUIT_BREAK_LIMIT(10) */ " + - " d.name FROM employee e JOIN department d ON d.id = e.departmentId "), - config( - blockSize(100), - scrollPageSize(50, 20), - circuitBreakLimit(10), - backOffRetryIntervals(Config.DEFAULT_BACK_OFF_RETRY_INTERVALS), - DEFAULT_TOTAL_AND_TABLE_LIMIT_MATCHER, - timeOut(Config.DEFAULT_TIME_OUT) - ) - ); - } - - private Hint parseHint(String hintStr) { - try { - return HintFactory.getHintFromString(hintStr); - } - catch (SqlParseException e) { - throw new IllegalArgumentException(e); - } - } - - private Config queryPlannerConfig(String sql) { - HashJoinQueryPlanRequestBuilder request = ((HashJoinQueryPlanRequestBuilder) createRequestBuilder(sql)); - request.plan(); - return request.getConfig(); + private static final Matcher DEFAULT_TOTAL_AND_TABLE_LIMIT_MATCHER = + totalAndTableLimit(200, 0, 0); + + @Test + public void algorithmBlockSizeHint() { + assertThat( + parseHint("! JOIN_ALGORITHM_BLOCK_SIZE(100000)"), + hint(hintType(HintType.JOIN_ALGORITHM_BLOCK_SIZE), hintValues(100000))); + } + + @Test + public void algorithmUseLegacy() { + assertThat( + parseHint("! JOIN_ALGORITHM_USE_LEGACY"), + hint(hintType(HintType.JOIN_ALGORITHM_USE_LEGACY), hintValues())); + } + + @Test + public void algorithmBlockSizeHintWithSpaces() { + assertThat( + parseHint("! JOIN_ALGORITHM_BLOCK_SIZE ( 200000 ) "), + hint(hintType(HintType.JOIN_ALGORITHM_BLOCK_SIZE), hintValues(200000))); + } + + @Test + public void scrollPageSizeHint() { + assertThat( + parseHint("! JOIN_SCROLL_PAGE_SIZE(1000) "), + hint(hintType(HintType.JOIN_SCROLL_PAGE_SIZE), hintValues(1000))); + } + + @Test + public void scrollPageSizeHintWithTwoSizes() { + assertThat( + parseHint("! JOIN_SCROLL_PAGE_SIZE(1000, 2000) "), + hint(hintType(HintType.JOIN_SCROLL_PAGE_SIZE), hintValues(1000, 2000))); + } + + @Test + public void circuitBreakLimitHint() { + assertThat( + parseHint("! JOIN_CIRCUIT_BREAK_LIMIT(80)"), + hint(hintType(HintType.JOIN_CIRCUIT_BREAK_LIMIT), hintValues(80))); + } + + @Test + public void backOffRetryIntervalsHint() { + assertThat( + parseHint("! JOIN_BACK_OFF_RETRY_INTERVALS(1, 5)"), + hint(hintType(HintType.JOIN_BACK_OFF_RETRY_INTERVALS), hintValues(1, 5))); + } + + @Test + public void timeOutHint() { + assertThat( + parseHint("! JOIN_TIME_OUT(120)"), hint(hintType(HintType.JOIN_TIME_OUT), hintValues(120))); + } + + @Test + public void blockSizeConfig() { + assertThat( + queryPlannerConfig( + "SELECT /*! JOIN_ALGORITHM_BLOCK_SIZE(200000) */ " + + " d.name FROM employee e JOIN department d ON d.id = e.departmentId "), + config( + blockSize(200000), + scrollPageSize(Config.DEFAULT_SCROLL_PAGE_SIZE, Config.DEFAULT_SCROLL_PAGE_SIZE), + circuitBreakLimit(Config.DEFAULT_CIRCUIT_BREAK_LIMIT), + backOffRetryIntervals(Config.DEFAULT_BACK_OFF_RETRY_INTERVALS), + DEFAULT_TOTAL_AND_TABLE_LIMIT_MATCHER, + timeOut(Config.DEFAULT_TIME_OUT))); + } + + @Test + public void scrollPageSizeConfig() { + assertThat( + queryPlannerConfig( + "SELECT /*! JOIN_SCROLL_PAGE_SIZE(50, 20) */ " + + " d.name FROM employee e JOIN department d ON d.id = e.departmentId "), + config( + blockSize(Config.DEFAULT_BLOCK_SIZE), + scrollPageSize(50, 20), + circuitBreakLimit(Config.DEFAULT_CIRCUIT_BREAK_LIMIT), + backOffRetryIntervals(Config.DEFAULT_BACK_OFF_RETRY_INTERVALS), + DEFAULT_TOTAL_AND_TABLE_LIMIT_MATCHER, + timeOut(Config.DEFAULT_TIME_OUT))); + } + + @Test + public void circuitBreakLimitConfig() { + assertThat( + queryPlannerConfig( + "SELECT /*! JOIN_CIRCUIT_BREAK_LIMIT(60) */ " + + " d.name FROM employee e JOIN department d ON d.id = e.departmentId "), + config( + blockSize(Config.DEFAULT_BLOCK_SIZE), + scrollPageSize(Config.DEFAULT_SCROLL_PAGE_SIZE, Config.DEFAULT_SCROLL_PAGE_SIZE), + circuitBreakLimit(60), + backOffRetryIntervals(Config.DEFAULT_BACK_OFF_RETRY_INTERVALS), + DEFAULT_TOTAL_AND_TABLE_LIMIT_MATCHER, + timeOut(Config.DEFAULT_TIME_OUT))); + } + + @Test + public void backOffRetryIntervalsConfig() { + assertThat( + queryPlannerConfig( + "SELECT /*! JOIN_BACK_OFF_RETRY_INTERVALS(1, 3, 5, 10) */ " + + " d.name FROM employee e JOIN department d ON d.id = e.departmentId "), + config( + blockSize(Config.DEFAULT_BLOCK_SIZE), + scrollPageSize(Config.DEFAULT_SCROLL_PAGE_SIZE, Config.DEFAULT_SCROLL_PAGE_SIZE), + circuitBreakLimit(Config.DEFAULT_CIRCUIT_BREAK_LIMIT), + backOffRetryIntervals(new double[] {1, 3, 5, 10}), + DEFAULT_TOTAL_AND_TABLE_LIMIT_MATCHER, + timeOut(Config.DEFAULT_TIME_OUT))); + } + + @Test + public void totalAndTableLimitConfig() { + assertThat( + queryPlannerConfig( + "SELECT /*! JOIN_TABLES_LIMIT(10, 20) */ " + + " d.name FROM employee e JOIN department d ON d.id = e.departmentId LIMIT 50"), + config( + blockSize(Config.DEFAULT_BLOCK_SIZE), + scrollPageSize(Config.DEFAULT_SCROLL_PAGE_SIZE, Config.DEFAULT_SCROLL_PAGE_SIZE), + circuitBreakLimit(Config.DEFAULT_CIRCUIT_BREAK_LIMIT), + backOffRetryIntervals(Config.DEFAULT_BACK_OFF_RETRY_INTERVALS), + totalAndTableLimit(50, 10, 20), + timeOut(Config.DEFAULT_TIME_OUT))); + } + + @Test + public void timeOutConfig() { + assertThat( + queryPlannerConfig( + "SELECT /*! JOIN_TIME_OUT(120) */ " + + " d.name FROM employee e JOIN department d ON d.id = e.departmentId"), + config( + blockSize(Config.DEFAULT_BLOCK_SIZE), + scrollPageSize(Config.DEFAULT_SCROLL_PAGE_SIZE, Config.DEFAULT_SCROLL_PAGE_SIZE), + circuitBreakLimit(Config.DEFAULT_CIRCUIT_BREAK_LIMIT), + backOffRetryIntervals(Config.DEFAULT_BACK_OFF_RETRY_INTERVALS), + DEFAULT_TOTAL_AND_TABLE_LIMIT_MATCHER, + timeOut(120))); + } + + @Test + public void multipleConfigCombined() { + assertThat( + queryPlannerConfig( + "SELECT " + + " /*! JOIN_ALGORITHM_BLOCK_SIZE(100) */ " + + " /*! JOIN_SCROLL_PAGE_SIZE(50, 20) */ " + + " /*! JOIN_CIRCUIT_BREAK_LIMIT(10) */ " + + " d.name FROM employee e JOIN department d ON d.id = e.departmentId "), + config( + blockSize(100), + scrollPageSize(50, 20), + circuitBreakLimit(10), + backOffRetryIntervals(Config.DEFAULT_BACK_OFF_RETRY_INTERVALS), + DEFAULT_TOTAL_AND_TABLE_LIMIT_MATCHER, + timeOut(Config.DEFAULT_TIME_OUT))); + } + + private Hint parseHint(String hintStr) { + try { + return HintFactory.getHintFromString(hintStr); + } catch (SqlParseException e) { + throw new IllegalArgumentException(e); } - - private Matcher hint(Matcher typeMatcher, Matcher valuesMatcher) { - return both( - featureValueOf("HintType", typeMatcher, Hint::getType) - ).and( - featureValueOf("HintValue", valuesMatcher, Hint::getParams) - ); - } - - private Matcher hintType(HintType type) { - return is(type); - } - - private Matcher hintValues(Object... values) { - if (values.length == 0) { - return emptyArray(); - } - return arrayContaining(values); - } - - private Matcher config(Matcher blockSizeMatcher, - Matcher scrollPageSizeMatcher, - Matcher circuitBreakLimitMatcher, - Matcher backOffRetryIntervalsMatcher, - Matcher totalAndTableLimitMatcher, - Matcher timeOutMatcher) { - return allOf( - featureValueOf("Block size", blockSizeMatcher, (cfg -> cfg.blockSize().size())), - featureValueOf("Scroll page size", scrollPageSizeMatcher, Config::scrollPageSize), - featureValueOf("Circuit break limit", circuitBreakLimitMatcher, Config::circuitBreakLimit), - featureValueOf("Back off retry intervals", backOffRetryIntervalsMatcher, Config::backOffRetryIntervals), - featureValueOf("Total and table limit", totalAndTableLimitMatcher, - (cfg -> new Integer[]{cfg.totalLimit(), cfg.tableLimit1(), cfg.tableLimit2()})), - featureValueOf("Time out", timeOutMatcher, Config::timeout) - ); + } + + private Config queryPlannerConfig(String sql) { + HashJoinQueryPlanRequestBuilder request = + ((HashJoinQueryPlanRequestBuilder) createRequestBuilder(sql)); + request.plan(); + return request.getConfig(); + } + + private Matcher hint(Matcher typeMatcher, Matcher valuesMatcher) { + return both(featureValueOf("HintType", typeMatcher, Hint::getType)) + .and(featureValueOf("HintValue", valuesMatcher, Hint::getParams)); + } + + private Matcher hintType(HintType type) { + return is(type); + } + + private Matcher hintValues(Object... values) { + if (values.length == 0) { + return emptyArray(); } - - private Matcher blockSize(int size) { - return is(size); - } - - @SuppressWarnings("unchecked") - private Matcher scrollPageSize(int size1, int size2) { - return arrayContaining(is(size1), is(size2)); - } - - private Matcher circuitBreakLimit(int limit) { - return is(limit); - } - - private Matcher backOffRetryIntervals(double[] intervals) { - return is(intervals); - } - - @SuppressWarnings("unchecked") - private static Matcher totalAndTableLimit(int totalLimit, int tableLimit1, int tableLimit2) { - return arrayContaining(is(totalLimit), is(tableLimit1), is(tableLimit2)); - } - - private static Matcher timeOut(int timeout) { - return is(timeout); - } - + return arrayContaining(values); + } + + private Matcher config( + Matcher blockSizeMatcher, + Matcher scrollPageSizeMatcher, + Matcher circuitBreakLimitMatcher, + Matcher backOffRetryIntervalsMatcher, + Matcher totalAndTableLimitMatcher, + Matcher timeOutMatcher) { + return allOf( + featureValueOf("Block size", blockSizeMatcher, (cfg -> cfg.blockSize().size())), + featureValueOf("Scroll page size", scrollPageSizeMatcher, Config::scrollPageSize), + featureValueOf("Circuit break limit", circuitBreakLimitMatcher, Config::circuitBreakLimit), + featureValueOf( + "Back off retry intervals", + backOffRetryIntervalsMatcher, + Config::backOffRetryIntervals), + featureValueOf( + "Total and table limit", + totalAndTableLimitMatcher, + (cfg -> new Integer[] {cfg.totalLimit(), cfg.tableLimit1(), cfg.tableLimit2()})), + featureValueOf("Time out", timeOutMatcher, Config::timeout)); + } + + private Matcher blockSize(int size) { + return is(size); + } + + @SuppressWarnings("unchecked") + private Matcher scrollPageSize(int size1, int size2) { + return arrayContaining(is(size1), is(size2)); + } + + private Matcher circuitBreakLimit(int limit) { + return is(limit); + } + + private Matcher backOffRetryIntervals(double[] intervals) { + return is(intervals); + } + + @SuppressWarnings("unchecked") + private static Matcher totalAndTableLimit( + int totalLimit, int tableLimit1, int tableLimit2) { + return arrayContaining(is(totalLimit), is(tableLimit1), is(tableLimit2)); + } + + private static Matcher timeOut(int timeout) { + return is(timeout); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerExecuteTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerExecuteTest.java index 55ea8c390b..dc8e094e2d 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerExecuteTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerExecuteTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.planner; import static org.opensearch.sql.legacy.util.MatcherUtils.hit; @@ -14,767 +13,420 @@ import org.opensearch.search.SearchHit; import org.opensearch.sql.legacy.util.MatcherUtils; -/** - * Query planner execution unit test - */ +/** Query planner execution unit test */ public class QueryPlannerExecuteTest extends QueryPlannerTest { - @Test - public void simpleJoin() { - MatcherAssert.assertThat( - query( - "SELECT d.name, e.lastname FROM employee e " + - " JOIN department d ON d.id = e.departmentId " + - " WHERE d.region = 'US' AND e.age > 30", - employees( - employee(1, "Alice", "1"), - employee(2, "Hank", "1") - ), - departments( - department(1, "1", "AWS") - ) - ), - hits( - hit( - MatcherUtils.kv("d.name", "AWS"), - MatcherUtils.kv("e.lastname", "Alice") - ), - hit( - MatcherUtils.kv("d.name", "AWS"), - MatcherUtils.kv("e.lastname", "Hank") - ) - ) - ); - } - - @Test - public void simpleJoinWithSelectAll() { - MatcherAssert.assertThat( - query( - "SELECT * FROM employee e " + - " JOIN department d ON d.id = e.departmentId ", - employees( - employee(1, "Alice", "1"), - employee(2, "Hank", "1") - ), - departments( - department(1, "1", "AWS") - ) - ), - hits( - hit( - MatcherUtils.kv("d.name", "AWS"), - MatcherUtils.kv("d.id", "1"), - MatcherUtils.kv("e.lastname", "Alice"), - MatcherUtils.kv("e.departmentId", "1") - ), - hit( - MatcherUtils.kv("d.name", "AWS"), - MatcherUtils.kv("d.id", "1"), - MatcherUtils.kv("e.lastname", "Hank"), - MatcherUtils.kv("e.departmentId", "1") - ) - ) - ); - } - - @Test - public void simpleLeftJoinWithSelectAllFromOneTable() { - MatcherAssert.assertThat( - query( - "SELECT e.lastname, d.* FROM employee e " + - " LEFT JOIN department d ON d.id = e.departmentId ", - employees( - employee(1, "Alice", "1"), - employee(2, "Hank", "1"), - employee(3, "Allen", "3") - ), - departments( - department(1, "1", "AWS"), - department(2, "2", "Retail") - ) - ), - hits( - hit( - MatcherUtils.kv("e.lastname", "Alice"), - MatcherUtils.kv("d.name", "AWS"), - MatcherUtils.kv("d.id", "1") - ), - hit( - MatcherUtils.kv("e.lastname", "Hank"), - MatcherUtils.kv("d.name", "AWS"), - MatcherUtils.kv("d.id", "1") - ), - hit( - MatcherUtils.kv("e.lastname", "Allen") - /* - * Not easy to figure out all column names for d.* without reading metadata - * or look into other rows from d. But in the extreme case, d could be empty table - * which requires metadata read anyway. - */ - //kv("d.name", null), - //kv("d.id", null) - ) - ) - ); - } - - @Test - public void simpleJoinWithSelectAllFromBothTables() { - MatcherAssert.assertThat( - query( - "SELECT e.*, d.* FROM employee e " + - " JOIN department d ON d.id = e.departmentId ", - employees( - employee(1, "Alice", "1"), - employee(2, "Hank", "1") - ), - departments( - department(1, "1", "AWS") - ) - ), - hits( - hit( - MatcherUtils.kv("d.name", "AWS"), - MatcherUtils.kv("d.id", "1"), - MatcherUtils.kv("e.lastname", "Alice"), - MatcherUtils.kv("e.departmentId", "1") - ), - hit( - MatcherUtils.kv("d.name", "AWS"), - MatcherUtils.kv("d.id", "1"), - MatcherUtils.kv("e.lastname", "Hank"), - MatcherUtils.kv("e.departmentId", "1") - ) - ) - ); - } - - @Test - public void simpleJoinWithoutMatch() { - MatcherAssert.assertThat( - query( - "SELECT d.name, e.lastname FROM employee e " + - " JOIN department d ON d.id = e.departmentId " + - " WHERE d.region = 'US' AND e.age > 30", - employees( - employee(1, "Alice", "2"), - employee(2, "Hank", "3") - ), - departments( - department(1, "1", "AWS") - ) - ), - hits() - ); - } - - @Test - public void simpleJoinWithSomeMatches() { - MatcherAssert.assertThat( - query( - "SELECT d.name, e.lastname FROM employee e " + - " JOIN department d ON d.id = e.departmentId " + - " WHERE d.region = 'US' AND e.age > 30", - employees( - employee(1, "Alice", "2"), - employee(2, "Hank", "3") - ), - departments( - department(1, "1", "AWS"), - department(2, "2", "Retail") - ) - ), - hits( - hit( - MatcherUtils.kv("d.name", "Retail"), - MatcherUtils.kv("e.lastname", "Alice") - ) - ) - ); - } - - @Test - public void simpleJoinWithAllMatches() { - MatcherAssert.assertThat( - query( - "SELECT d.name, e.lastname FROM employee e " + - " JOIN department d ON d.id = e.departmentId " + - " WHERE d.region = 'US' AND e.age > 30", - employees( - employee(1, "Alice", "1"), - employee(2, "Hank", "1"), - employee(3, "Mike", "2") - ), - departments( - department(1, "1", "AWS"), - department(2, "2", "Retail") - ) - ), - hits( - hit( - MatcherUtils.kv("d.name", "AWS"), - MatcherUtils.kv("e.lastname", "Alice") - ), - hit( - MatcherUtils.kv("d.name", "AWS"), - MatcherUtils.kv("e.lastname", "Hank") - ), - hit( - MatcherUtils.kv("d.name", "Retail"), - MatcherUtils.kv("e.lastname", "Mike") - ) - ) - ); - } - - @Test - public void simpleJoinWithNull() { - MatcherAssert.assertThat( - query( - "SELECT d.name, e.lastname FROM employee e " + - " JOIN department d ON d.id = e.departmentId " + - " WHERE d.region = 'US' AND e.age > 30", - employees( - employee(1, "Alice", "1"), - employee(2, "Hank", null), - employee(3, "Mike", "2") - ), - departments( - department(1, "1", "AWS"), - department(2, null, "Retail") - ) - ), - hits( - hit( - MatcherUtils.kv("d.name", "AWS"), - MatcherUtils.kv("e.lastname", "Alice") - ) - ) - ); - } - - @Test - public void simpleJoinWithColumnNameConflict() { - // Add a same column 'name' as in department on purpose - SearchHit alice = employee(1, "Alice", "1"); - alice.getSourceAsMap().put("name", "Alice Alice"); - SearchHit hank = employee(2, "Hank", "2"); - hank.getSourceAsMap().put("name", "Hank Hank"); - - MatcherAssert.assertThat( - query( - "SELECT d.name, e.name FROM employee e " + - " JOIN department d ON d.id = e.departmentId " + - " WHERE d.region = 'US' AND e.age > 30", - employees( - alice, hank - ), - departments( - department(1, "1", "AWS") - ) - ), - hits( - hit( - MatcherUtils.kv("d.name", "AWS"), - MatcherUtils.kv("e.name", "Alice Alice") - ) - ) - ); - } - - @Test - public void simpleJoinWithAliasInSelect() { - MatcherAssert.assertThat( - query( - "SELECT d.name AS dname, e.lastname AS ename FROM employee e " + - " JOIN department d ON d.id = e.departmentId " + - " WHERE d.region = 'US' AND e.age > 30", - employees( - employee(1, "Alice", "2"), - employee(2, "Hank", "3") - ), - departments( - department(1, "1", "AWS"), - department(2, "2", "Retail") - ) - ), - hits( - hit( - MatcherUtils.kv("dname", "Retail"), - MatcherUtils.kv("ename", "Alice") - ) - ) - ); - } - - @Test - public void simpleLeftJoinWithoutMatchInLeft() { - MatcherAssert.assertThat( - query( - "SELECT d.name, e.lastname FROM employee e " + - " LEFT JOIN department d ON d.id = e.departmentId " + - " WHERE d.region = 'US' AND e.age > 30", - employees( - employee(1, "Alice", "2"), - employee(2, "Hank", "3") - ), - departments( - department(1, "1", "AWS") - ) - ), - hits( - hit( - MatcherUtils.kv("d.name", null), - MatcherUtils.kv("e.lastname", "Alice") - ), - hit( - MatcherUtils.kv("d.name", null), - MatcherUtils.kv("e.lastname", "Hank") - ) - ) - ); - } - - @Test - public void simpleLeftJoinWithSomeMismatchesInLeft() { - MatcherAssert.assertThat( - query( - "SELECT d.name, e.lastname FROM employee e " + - " LEFT JOIN department d ON d.id = e.departmentId " + - " WHERE d.region = 'US' AND e.age > 30", - employees( - employee(1, "Alice", "1"), - employee(2, "Hank", "2") - ), - departments( - department(1, "1", "AWS") - ) - ), - hits( - hit( - MatcherUtils.kv("d.name", "AWS"), - MatcherUtils.kv("e.lastname", "Alice") - ), - hit( - MatcherUtils.kv("d.name", null), - MatcherUtils.kv("e.lastname", "Hank") - ) - ) - ); - } - - @Test - public void simpleLeftJoinWithSomeMismatchesInRight() { - MatcherAssert.assertThat( - query( - "SELECT d.name, e.lastname FROM employee e " + - " LEFT JOIN department d ON d.id = e.departmentId " + - " WHERE d.region = 'US' AND e.age > 30", - employees( - employee(1, "Alice", "1"), - employee(2, "Hank", "1") - ), - departments( - department(1, "1", "AWS"), - department(2, "2", "Retail") - ) - ), - hits( - hit( - MatcherUtils.kv("d.name", "AWS"), - MatcherUtils.kv("e.lastname", "Alice") - ), - hit( - MatcherUtils.kv("d.name", "AWS"), - MatcherUtils.kv("e.lastname", "Hank") - ) - ) - ); - } - - @Test - public void simpleQueryWithTotalLimit() { - MatcherAssert.assertThat( - query( - "SELECT d.name, e.lastname FROM employee e JOIN department d ON d.id = e.departmentId LIMIT 1", - employees( - employee(1, "Alice", "1"), - employee(2, "Hank", "2") - ), - departments( - department(1, "1", "AWS"), - department(1, "2", "Retail") - ) - ), - hits( - hit( - MatcherUtils.kv("d.name", "AWS"), - MatcherUtils.kv("e.lastname", "Alice") - ) - ) - ); - } - - @Test - public void simpleQueryWithTableLimit() { - MatcherAssert.assertThat( - query( - "SELECT /*! JOIN_TABLES_LIMIT(1, 5) */ d.name, e.lastname FROM employee e JOIN department d ON d.id = e.departmentId", - employees( - employee(1, "Alice", "1"), - employee(2, "Hank", "1") - ), - departments( - department(1, "1", "AWS"), - department(1, "2", "Retail") - ) - ), - hits( - hit( - MatcherUtils.kv("d.name", "AWS"), - MatcherUtils.kv("e.lastname", "Alice") - ) - ) - ); - } - - @Test - public void simpleQueryWithOrderBy() { - MatcherAssert.assertThat( - query( - "SELECT d.name, e.lastname FROM employee e JOIN department d ON d.id = e.departmentId ORDER BY e.lastname", - employees( - employee(1, "Hank", "1"), - employee(2, "Alice", "2"), - employee(3, "Allen", "1"), - employee(4, "Ellis", "2"), - employee(5, "Frank", "2") - ), - departments( - department(1, "1", "AWS"), - department(2, "2", "Retail") - ) - ), - MatcherUtils.hitsInOrder( - hit( - MatcherUtils.kv("d.name", "Retail"), - MatcherUtils.kv("e.lastname", "Alice") - ), - hit( - MatcherUtils.kv("d.name", "AWS"), - MatcherUtils.kv("e.lastname", "Allen") - ), - hit( - MatcherUtils.kv("d.name", "Retail"), - MatcherUtils.kv("e.lastname", "Ellis") - ), - hit( - MatcherUtils.kv("d.name", "Retail"), - MatcherUtils.kv("e.lastname", "Frank") - ), - hit( - MatcherUtils.kv("d.name", "AWS"), - MatcherUtils.kv("e.lastname", "Hank") - ) - ) - ); - } - - /** Doesn't support muliple columns from both tables (order is missing) */ - @Test - public void simpleQueryWithLeftJoinAndOrderByMultipleColumnsFromOneTableInDesc() { - MatcherAssert.assertThat( - query( - "SELECT d.id AS id, e.lastname AS lastname FROM employee e " + - " LEFT JOIN department d ON d.id = e.departmentId " + - " ORDER BY e.departmentId, e.lastname DESC", - employees( - employee(1, "Hank", "1"), - employee(2, "Alice", "2"), - employee(3, "Allen", "1"), - employee(4, "Ellis", "2"), - employee(5, "Gary", "3"), - employee(5, "Frank", "3") - ), - departments( - department(1, "1", "AWS"), - department(2, "2", "Retail") - ) - ), - MatcherUtils.hitsInOrder( - hit( - MatcherUtils.kv("id", null), - MatcherUtils.kv("lastname", "Gary") - ), - hit( - MatcherUtils.kv("id", null), - MatcherUtils.kv("lastname", "Frank") - ), - hit( - MatcherUtils.kv("id", "2"), - MatcherUtils.kv("lastname", "Ellis") - ), - hit( - MatcherUtils.kv("id", "2"), - MatcherUtils.kv("lastname", "Alice") - ), - hit( - MatcherUtils.kv("id", "1"), - MatcherUtils.kv("lastname", "Hank") - ), - hit( - MatcherUtils.kv("id", "1"), - MatcherUtils.kv("lastname", "Allen") - ) - ) - ); - } - - @Test - public void simpleCrossJoin() { - MatcherAssert.assertThat( - query( - "SELECT d.name AS dname, e.lastname AS ename FROM employee e JOIN department d", - employees( - employee(1, "Alice", "2"), - employee(2, "Hank", "3") - ), - departments( - department(1, "1", "AWS"), - department(2, "2", "Retail") - ) - ), - hits( - hit( - MatcherUtils.kv("dname", "AWS"), - MatcherUtils.kv("ename", "Alice") - ), - hit( - MatcherUtils.kv("dname", "AWS"), - MatcherUtils.kv("ename", "Hank") - ), - hit( - MatcherUtils.kv("dname", "Retail"), - MatcherUtils.kv("ename", "Alice") - ), - hit( - MatcherUtils.kv("dname", "Retail"), - MatcherUtils.kv("ename", "Hank") - ) - ) - ); - } - - @Test - public void simpleQueryWithTermsFilterOptimization() { - MatcherAssert.assertThat( - query( - "SELECT /*! HASH_WITH_TERMS_FILTER*/ " + // Be careful that no space between ...FILTER and */ - " e.lastname, d.id FROM employee e " + - " JOIN department d ON d.id = e.departmentId AND d.name = e.lastname", - employees( - employee(1, "Johnson", "1"), - employee(2, "Allen", "4"), - employee(3, "Ellis", "2"), - employee(4, "Dell", "1"), - employee(5, "Dell", "4") - ), - departments( - department(1, "1", "Johnson"), - department(1, "4", "Dell") - ) - ), - hits( - hit( - MatcherUtils.kv("e.lastname", "Johnson"), - MatcherUtils.kv("d.id", "1") - ), - hit( - MatcherUtils.kv("e.lastname", "Dell"), - MatcherUtils.kv("d.id", "4") - ) - ) - ); - } - - @Test - public void complexJoinWithMultipleConditions() { - MatcherAssert.assertThat( - query( - "SELECT d.name, e.lastname, d.id " + - " FROM employee e " + - " JOIN department d " + - " ON d.id = e.departmentId AND d.name = e.lastname" + - " WHERE d.region = 'US' AND e.age > 30", - employees( - employee(1, "Dell", "1"), - employee(2, "Hank", "1") - ), - departments( - department(1, "1", "Dell") - ) - ), - hits( - hit( - MatcherUtils.kv("d.name", "Dell"), - MatcherUtils.kv("e.lastname", "Dell"), - MatcherUtils.kv("d.id", "1") - ) - ) - ); - } - - @Test - public void complexJoinWithOrConditions() { - MatcherAssert.assertThat( - query( - "SELECT d.name, e.lastname " + - " FROM employee e " + - " JOIN department d " + - " ON d.id = e.departmentId OR d.name = e.lastname", - employees( - employee(1, "Alice", "1"), - employee(2, "Dell", "2"), - employee(3, "Hank", "3") - ), - departments( - department(1, "1", "Dell"), - department(2, "4", "AWS") - ) - ), - hits( - hit( - MatcherUtils.kv("d.name", "Dell"), - MatcherUtils.kv("e.lastname", "Alice") - ), - hit( - MatcherUtils.kv("d.name", "Dell"), - MatcherUtils.kv("e.lastname", "Dell") - ) - ) - ); - } - - @Test - public void complexJoinWithOrConditionsDuplicate() { - MatcherAssert.assertThat( - query( - "SELECT d.name, e.departmentId " + - " FROM employee e " + - " JOIN department d " + - " ON d.id = e.departmentId OR d.name = e.lastname", - employees( - employee(1, "Dell", "1") // Match both condition but should only show once in result - ), - departments( - department(1, "1", "Dell"), - department(2, "4", "AWS") - ) - ), - hits( - hit( - MatcherUtils.kv("d.name", "Dell"), - MatcherUtils.kv("e.departmentId", "1") - ) - ) - ); - } - - @Test - public void complexJoinWithOrConditionsAndTermsFilterOptimization() { - MatcherAssert.assertThat( - query( - "SELECT /*! HASH_WITH_TERMS_FILTER*/ " + - " d.name, e.lastname " + - " FROM employee e " + - " JOIN department d " + - " ON d.id = e.departmentId OR d.name = e.lastname", - employees( - employee(1, "Alice", "1"), - employee(2, "Dell", "2"), - employee(3, "Hank", "3") - ), - departments( - department(1, "1", "Dell"), - department(2, "4", "AWS") - ) - ), - hits( - hit( - MatcherUtils.kv("d.name", "Dell"), - MatcherUtils.kv("e.lastname", "Alice") - ), - hit( - MatcherUtils.kv("d.name", "Dell"), - MatcherUtils.kv("e.lastname", "Dell") - ) - ) - ); - } - - @Test - public void complexLeftJoinWithOrConditions() { - MatcherAssert.assertThat( - query( - "SELECT d.name, e.lastname " + - " FROM employee e " + - " LEFT JOIN department d " + - " ON d.id = e.departmentId OR d.name = e.lastname", - employees( - employee(1, "Alice", "1"), - employee(2, "Dell", "2"), - employee(3, "Hank", "3") - ), - departments( - department(1, "1", "Dell"), - department(2, "4", "AWS") - ) - ), - hits( - hit( - MatcherUtils.kv("d.name", "Dell"), - MatcherUtils.kv("e.lastname", "Alice") - ), - hit( - MatcherUtils.kv("d.name", "Dell"), - MatcherUtils.kv("e.lastname", "Dell") - ), - hit( - MatcherUtils.kv("d.name", null), - MatcherUtils.kv("e.lastname", "Hank") - ) - ) - ); - } - - @Test - public void complexJoinWithTableLimitHint() { - MatcherAssert.assertThat( - query( - "SELECT " + - " /*! JOIN_TABLES_LIMIT(2, 1)*/" + - " d.name, e.lastname " + - " FROM employee e " + - " JOIN department d " + - " ON d.id = e.departmentId", - employees( - employee(1, "Alice", "1"), // Only this and the second row will be pulled out - employee(2, "Dell", "4"), - employee(3, "Hank", "1") - ), - departments( - department(1, "1", "Dell"), // Only this row will be pulled out - department(2, "4", "AWS") - ) - ), - hits( - hit( - MatcherUtils.kv("d.name", "Dell"), - MatcherUtils.kv("e.lastname", "Alice") - ) - ) - ); - } - + @Test + public void simpleJoin() { + MatcherAssert.assertThat( + query( + "SELECT d.name, e.lastname FROM employee e " + + " JOIN department d ON d.id = e.departmentId " + + " WHERE d.region = 'US' AND e.age > 30", + employees(employee(1, "Alice", "1"), employee(2, "Hank", "1")), + departments(department(1, "1", "AWS"))), + hits( + hit(MatcherUtils.kv("d.name", "AWS"), MatcherUtils.kv("e.lastname", "Alice")), + hit(MatcherUtils.kv("d.name", "AWS"), MatcherUtils.kv("e.lastname", "Hank")))); + } + + @Test + public void simpleJoinWithSelectAll() { + MatcherAssert.assertThat( + query( + "SELECT * FROM employee e " + " JOIN department d ON d.id = e.departmentId ", + employees(employee(1, "Alice", "1"), employee(2, "Hank", "1")), + departments(department(1, "1", "AWS"))), + hits( + hit( + MatcherUtils.kv("d.name", "AWS"), + MatcherUtils.kv("d.id", "1"), + MatcherUtils.kv("e.lastname", "Alice"), + MatcherUtils.kv("e.departmentId", "1")), + hit( + MatcherUtils.kv("d.name", "AWS"), + MatcherUtils.kv("d.id", "1"), + MatcherUtils.kv("e.lastname", "Hank"), + MatcherUtils.kv("e.departmentId", "1")))); + } + + @Test + public void simpleLeftJoinWithSelectAllFromOneTable() { + MatcherAssert.assertThat( + query( + "SELECT e.lastname, d.* FROM employee e " + + " LEFT JOIN department d ON d.id = e.departmentId ", + employees( + employee(1, "Alice", "1"), employee(2, "Hank", "1"), employee(3, "Allen", "3")), + departments(department(1, "1", "AWS"), department(2, "2", "Retail"))), + hits( + hit( + MatcherUtils.kv("e.lastname", "Alice"), + MatcherUtils.kv("d.name", "AWS"), + MatcherUtils.kv("d.id", "1")), + hit( + MatcherUtils.kv("e.lastname", "Hank"), + MatcherUtils.kv("d.name", "AWS"), + MatcherUtils.kv("d.id", "1")), + hit( + MatcherUtils.kv("e.lastname", "Allen") + /* + * Not easy to figure out all column names for d.* without reading metadata + * or look into other rows from d. But in the extreme case, d could be empty table + * which requires metadata read anyway. + */ + // kv("d.name", null), + // kv("d.id", null) + ))); + } + + @Test + public void simpleJoinWithSelectAllFromBothTables() { + MatcherAssert.assertThat( + query( + "SELECT e.*, d.* FROM employee e " + " JOIN department d ON d.id = e.departmentId ", + employees(employee(1, "Alice", "1"), employee(2, "Hank", "1")), + departments(department(1, "1", "AWS"))), + hits( + hit( + MatcherUtils.kv("d.name", "AWS"), + MatcherUtils.kv("d.id", "1"), + MatcherUtils.kv("e.lastname", "Alice"), + MatcherUtils.kv("e.departmentId", "1")), + hit( + MatcherUtils.kv("d.name", "AWS"), + MatcherUtils.kv("d.id", "1"), + MatcherUtils.kv("e.lastname", "Hank"), + MatcherUtils.kv("e.departmentId", "1")))); + } + + @Test + public void simpleJoinWithoutMatch() { + MatcherAssert.assertThat( + query( + "SELECT d.name, e.lastname FROM employee e " + + " JOIN department d ON d.id = e.departmentId " + + " WHERE d.region = 'US' AND e.age > 30", + employees(employee(1, "Alice", "2"), employee(2, "Hank", "3")), + departments(department(1, "1", "AWS"))), + hits()); + } + + @Test + public void simpleJoinWithSomeMatches() { + MatcherAssert.assertThat( + query( + "SELECT d.name, e.lastname FROM employee e " + + " JOIN department d ON d.id = e.departmentId " + + " WHERE d.region = 'US' AND e.age > 30", + employees(employee(1, "Alice", "2"), employee(2, "Hank", "3")), + departments(department(1, "1", "AWS"), department(2, "2", "Retail"))), + hits(hit(MatcherUtils.kv("d.name", "Retail"), MatcherUtils.kv("e.lastname", "Alice")))); + } + + @Test + public void simpleJoinWithAllMatches() { + MatcherAssert.assertThat( + query( + "SELECT d.name, e.lastname FROM employee e " + + " JOIN department d ON d.id = e.departmentId " + + " WHERE d.region = 'US' AND e.age > 30", + employees( + employee(1, "Alice", "1"), employee(2, "Hank", "1"), employee(3, "Mike", "2")), + departments(department(1, "1", "AWS"), department(2, "2", "Retail"))), + hits( + hit(MatcherUtils.kv("d.name", "AWS"), MatcherUtils.kv("e.lastname", "Alice")), + hit(MatcherUtils.kv("d.name", "AWS"), MatcherUtils.kv("e.lastname", "Hank")), + hit(MatcherUtils.kv("d.name", "Retail"), MatcherUtils.kv("e.lastname", "Mike")))); + } + + @Test + public void simpleJoinWithNull() { + MatcherAssert.assertThat( + query( + "SELECT d.name, e.lastname FROM employee e " + + " JOIN department d ON d.id = e.departmentId " + + " WHERE d.region = 'US' AND e.age > 30", + employees( + employee(1, "Alice", "1"), employee(2, "Hank", null), employee(3, "Mike", "2")), + departments(department(1, "1", "AWS"), department(2, null, "Retail"))), + hits(hit(MatcherUtils.kv("d.name", "AWS"), MatcherUtils.kv("e.lastname", "Alice")))); + } + + @Test + public void simpleJoinWithColumnNameConflict() { + // Add a same column 'name' as in department on purpose + SearchHit alice = employee(1, "Alice", "1"); + alice.getSourceAsMap().put("name", "Alice Alice"); + SearchHit hank = employee(2, "Hank", "2"); + hank.getSourceAsMap().put("name", "Hank Hank"); + + MatcherAssert.assertThat( + query( + "SELECT d.name, e.name FROM employee e " + + " JOIN department d ON d.id = e.departmentId " + + " WHERE d.region = 'US' AND e.age > 30", + employees(alice, hank), + departments(department(1, "1", "AWS"))), + hits(hit(MatcherUtils.kv("d.name", "AWS"), MatcherUtils.kv("e.name", "Alice Alice")))); + } + + @Test + public void simpleJoinWithAliasInSelect() { + MatcherAssert.assertThat( + query( + "SELECT d.name AS dname, e.lastname AS ename FROM employee e " + + " JOIN department d ON d.id = e.departmentId " + + " WHERE d.region = 'US' AND e.age > 30", + employees(employee(1, "Alice", "2"), employee(2, "Hank", "3")), + departments(department(1, "1", "AWS"), department(2, "2", "Retail"))), + hits(hit(MatcherUtils.kv("dname", "Retail"), MatcherUtils.kv("ename", "Alice")))); + } + + @Test + public void simpleLeftJoinWithoutMatchInLeft() { + MatcherAssert.assertThat( + query( + "SELECT d.name, e.lastname FROM employee e " + + " LEFT JOIN department d ON d.id = e.departmentId " + + " WHERE d.region = 'US' AND e.age > 30", + employees(employee(1, "Alice", "2"), employee(2, "Hank", "3")), + departments(department(1, "1", "AWS"))), + hits( + hit(MatcherUtils.kv("d.name", null), MatcherUtils.kv("e.lastname", "Alice")), + hit(MatcherUtils.kv("d.name", null), MatcherUtils.kv("e.lastname", "Hank")))); + } + + @Test + public void simpleLeftJoinWithSomeMismatchesInLeft() { + MatcherAssert.assertThat( + query( + "SELECT d.name, e.lastname FROM employee e " + + " LEFT JOIN department d ON d.id = e.departmentId " + + " WHERE d.region = 'US' AND e.age > 30", + employees(employee(1, "Alice", "1"), employee(2, "Hank", "2")), + departments(department(1, "1", "AWS"))), + hits( + hit(MatcherUtils.kv("d.name", "AWS"), MatcherUtils.kv("e.lastname", "Alice")), + hit(MatcherUtils.kv("d.name", null), MatcherUtils.kv("e.lastname", "Hank")))); + } + + @Test + public void simpleLeftJoinWithSomeMismatchesInRight() { + MatcherAssert.assertThat( + query( + "SELECT d.name, e.lastname FROM employee e " + + " LEFT JOIN department d ON d.id = e.departmentId " + + " WHERE d.region = 'US' AND e.age > 30", + employees(employee(1, "Alice", "1"), employee(2, "Hank", "1")), + departments(department(1, "1", "AWS"), department(2, "2", "Retail"))), + hits( + hit(MatcherUtils.kv("d.name", "AWS"), MatcherUtils.kv("e.lastname", "Alice")), + hit(MatcherUtils.kv("d.name", "AWS"), MatcherUtils.kv("e.lastname", "Hank")))); + } + + @Test + public void simpleQueryWithTotalLimit() { + MatcherAssert.assertThat( + query( + "SELECT d.name, e.lastname FROM employee e JOIN department d ON d.id = e.departmentId" + + " LIMIT 1", + employees(employee(1, "Alice", "1"), employee(2, "Hank", "2")), + departments(department(1, "1", "AWS"), department(1, "2", "Retail"))), + hits(hit(MatcherUtils.kv("d.name", "AWS"), MatcherUtils.kv("e.lastname", "Alice")))); + } + + @Test + public void simpleQueryWithTableLimit() { + MatcherAssert.assertThat( + query( + "SELECT /*! JOIN_TABLES_LIMIT(1, 5) */ d.name, e.lastname FROM employee e JOIN" + + " department d ON d.id = e.departmentId", + employees(employee(1, "Alice", "1"), employee(2, "Hank", "1")), + departments(department(1, "1", "AWS"), department(1, "2", "Retail"))), + hits(hit(MatcherUtils.kv("d.name", "AWS"), MatcherUtils.kv("e.lastname", "Alice")))); + } + + @Test + public void simpleQueryWithOrderBy() { + MatcherAssert.assertThat( + query( + "SELECT d.name, e.lastname FROM employee e JOIN department d ON d.id = e.departmentId" + + " ORDER BY e.lastname", + employees( + employee(1, "Hank", "1"), + employee(2, "Alice", "2"), + employee(3, "Allen", "1"), + employee(4, "Ellis", "2"), + employee(5, "Frank", "2")), + departments(department(1, "1", "AWS"), department(2, "2", "Retail"))), + MatcherUtils.hitsInOrder( + hit(MatcherUtils.kv("d.name", "Retail"), MatcherUtils.kv("e.lastname", "Alice")), + hit(MatcherUtils.kv("d.name", "AWS"), MatcherUtils.kv("e.lastname", "Allen")), + hit(MatcherUtils.kv("d.name", "Retail"), MatcherUtils.kv("e.lastname", "Ellis")), + hit(MatcherUtils.kv("d.name", "Retail"), MatcherUtils.kv("e.lastname", "Frank")), + hit(MatcherUtils.kv("d.name", "AWS"), MatcherUtils.kv("e.lastname", "Hank")))); + } + + /** Doesn't support muliple columns from both tables (order is missing) */ + @Test + public void simpleQueryWithLeftJoinAndOrderByMultipleColumnsFromOneTableInDesc() { + MatcherAssert.assertThat( + query( + "SELECT d.id AS id, e.lastname AS lastname FROM employee e " + + " LEFT JOIN department d ON d.id = e.departmentId " + + " ORDER BY e.departmentId, e.lastname DESC", + employees( + employee(1, "Hank", "1"), + employee(2, "Alice", "2"), + employee(3, "Allen", "1"), + employee(4, "Ellis", "2"), + employee(5, "Gary", "3"), + employee(5, "Frank", "3")), + departments(department(1, "1", "AWS"), department(2, "2", "Retail"))), + MatcherUtils.hitsInOrder( + hit(MatcherUtils.kv("id", null), MatcherUtils.kv("lastname", "Gary")), + hit(MatcherUtils.kv("id", null), MatcherUtils.kv("lastname", "Frank")), + hit(MatcherUtils.kv("id", "2"), MatcherUtils.kv("lastname", "Ellis")), + hit(MatcherUtils.kv("id", "2"), MatcherUtils.kv("lastname", "Alice")), + hit(MatcherUtils.kv("id", "1"), MatcherUtils.kv("lastname", "Hank")), + hit(MatcherUtils.kv("id", "1"), MatcherUtils.kv("lastname", "Allen")))); + } + + @Test + public void simpleCrossJoin() { + MatcherAssert.assertThat( + query( + "SELECT d.name AS dname, e.lastname AS ename FROM employee e JOIN department d", + employees(employee(1, "Alice", "2"), employee(2, "Hank", "3")), + departments(department(1, "1", "AWS"), department(2, "2", "Retail"))), + hits( + hit(MatcherUtils.kv("dname", "AWS"), MatcherUtils.kv("ename", "Alice")), + hit(MatcherUtils.kv("dname", "AWS"), MatcherUtils.kv("ename", "Hank")), + hit(MatcherUtils.kv("dname", "Retail"), MatcherUtils.kv("ename", "Alice")), + hit(MatcherUtils.kv("dname", "Retail"), MatcherUtils.kv("ename", "Hank")))); + } + + @Test + public void simpleQueryWithTermsFilterOptimization() { + MatcherAssert.assertThat( + query( + "SELECT /*! HASH_WITH_TERMS_FILTER*/ " + + // Be careful that no space between ...FILTER and */ + " e.lastname, d.id FROM employee e " + + " JOIN department d ON d.id = e.departmentId AND d.name = e.lastname", + employees( + employee(1, "Johnson", "1"), + employee(2, "Allen", "4"), + employee(3, "Ellis", "2"), + employee(4, "Dell", "1"), + employee(5, "Dell", "4")), + departments(department(1, "1", "Johnson"), department(1, "4", "Dell"))), + hits( + hit(MatcherUtils.kv("e.lastname", "Johnson"), MatcherUtils.kv("d.id", "1")), + hit(MatcherUtils.kv("e.lastname", "Dell"), MatcherUtils.kv("d.id", "4")))); + } + + @Test + public void complexJoinWithMultipleConditions() { + MatcherAssert.assertThat( + query( + "SELECT d.name, e.lastname, d.id " + + " FROM employee e " + + " JOIN department d " + + " ON d.id = e.departmentId AND d.name = e.lastname" + + " WHERE d.region = 'US' AND e.age > 30", + employees(employee(1, "Dell", "1"), employee(2, "Hank", "1")), + departments(department(1, "1", "Dell"))), + hits( + hit( + MatcherUtils.kv("d.name", "Dell"), + MatcherUtils.kv("e.lastname", "Dell"), + MatcherUtils.kv("d.id", "1")))); + } + + @Test + public void complexJoinWithOrConditions() { + MatcherAssert.assertThat( + query( + "SELECT d.name, e.lastname " + + " FROM employee e " + + " JOIN department d " + + " ON d.id = e.departmentId OR d.name = e.lastname", + employees( + employee(1, "Alice", "1"), employee(2, "Dell", "2"), employee(3, "Hank", "3")), + departments(department(1, "1", "Dell"), department(2, "4", "AWS"))), + hits( + hit(MatcherUtils.kv("d.name", "Dell"), MatcherUtils.kv("e.lastname", "Alice")), + hit(MatcherUtils.kv("d.name", "Dell"), MatcherUtils.kv("e.lastname", "Dell")))); + } + + @Test + public void complexJoinWithOrConditionsDuplicate() { + MatcherAssert.assertThat( + query( + "SELECT d.name, e.departmentId " + + " FROM employee e " + + " JOIN department d " + + " ON d.id = e.departmentId OR d.name = e.lastname", + employees( + employee(1, "Dell", "1") // Match both condition but should only show once in result + ), + departments(department(1, "1", "Dell"), department(2, "4", "AWS"))), + hits(hit(MatcherUtils.kv("d.name", "Dell"), MatcherUtils.kv("e.departmentId", "1")))); + } + + @Test + public void complexJoinWithOrConditionsAndTermsFilterOptimization() { + MatcherAssert.assertThat( + query( + "SELECT /*! HASH_WITH_TERMS_FILTER*/ " + + " d.name, e.lastname " + + " FROM employee e " + + " JOIN department d " + + " ON d.id = e.departmentId OR d.name = e.lastname", + employees( + employee(1, "Alice", "1"), employee(2, "Dell", "2"), employee(3, "Hank", "3")), + departments(department(1, "1", "Dell"), department(2, "4", "AWS"))), + hits( + hit(MatcherUtils.kv("d.name", "Dell"), MatcherUtils.kv("e.lastname", "Alice")), + hit(MatcherUtils.kv("d.name", "Dell"), MatcherUtils.kv("e.lastname", "Dell")))); + } + + @Test + public void complexLeftJoinWithOrConditions() { + MatcherAssert.assertThat( + query( + "SELECT d.name, e.lastname " + + " FROM employee e " + + " LEFT JOIN department d " + + " ON d.id = e.departmentId OR d.name = e.lastname", + employees( + employee(1, "Alice", "1"), employee(2, "Dell", "2"), employee(3, "Hank", "3")), + departments(department(1, "1", "Dell"), department(2, "4", "AWS"))), + hits( + hit(MatcherUtils.kv("d.name", "Dell"), MatcherUtils.kv("e.lastname", "Alice")), + hit(MatcherUtils.kv("d.name", "Dell"), MatcherUtils.kv("e.lastname", "Dell")), + hit(MatcherUtils.kv("d.name", null), MatcherUtils.kv("e.lastname", "Hank")))); + } + + @Test + public void complexJoinWithTableLimitHint() { + MatcherAssert.assertThat( + query( + "SELECT " + + " /*! JOIN_TABLES_LIMIT(2, 1)*/" + + " d.name, e.lastname " + + " FROM employee e " + + " JOIN department d " + + " ON d.id = e.departmentId", + employees( + employee(1, "Alice", "1"), // Only this and the second row will be pulled out + employee(2, "Dell", "4"), + employee(3, "Hank", "1")), + departments( + department(1, "1", "Dell"), // Only this row will be pulled out + department(2, "4", "AWS"))), + hits(hit(MatcherUtils.kv("d.name", "Dell"), MatcherUtils.kv("e.lastname", "Alice")))); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerExplainTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerExplainTest.java index 2c92c91666..7f495935ca 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerExplainTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerExplainTest.java @@ -3,45 +3,41 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.planner; import org.junit.Test; import org.opensearch.sql.legacy.query.planner.core.QueryPlanner; -/** - * Query planner explanation unit test - */ +/** Query planner explanation unit test */ public class QueryPlannerExplainTest extends QueryPlannerTest { - @Test - public void explainInJson() { - QueryPlanner planner = plan( - "SELECT d.name, e.lastname FROM employee e " + - " JOIN department d ON d.id = e.departmentId " + - " WHERE d.region = 'US' AND e.age > 30" - ); - planner.explain(); - } - - @Test - public void explainInJsonWithComplicatedOn() { - QueryPlanner planner = plan( - "SELECT d.name, e.lastname FROM employee e " + - " JOIN department d ON d.id = e.departmentId AND d.location = e.region " + - " WHERE d.region = 'US' AND e.age > 30" - ); - planner.explain(); - } - - @Test - public void explainInJsonWithDuplicateColumnsPushedDown() { - QueryPlanner planner = plan( - "SELECT d.id, e.departmentId FROM employee e " + - " JOIN department d ON d.id = e.departmentId AND d.location = e.region " + - " WHERE d.region = 'US' AND e.age > 30" - ); - planner.explain(); - } - + @Test + public void explainInJson() { + QueryPlanner planner = + plan( + "SELECT d.name, e.lastname FROM employee e " + + " JOIN department d ON d.id = e.departmentId " + + " WHERE d.region = 'US' AND e.age > 30"); + planner.explain(); + } + + @Test + public void explainInJsonWithComplicatedOn() { + QueryPlanner planner = + plan( + "SELECT d.name, e.lastname FROM employee e " + + " JOIN department d ON d.id = e.departmentId AND d.location = e.region " + + " WHERE d.region = 'US' AND e.age > 30"); + planner.explain(); + } + + @Test + public void explainInJsonWithDuplicateColumnsPushedDown() { + QueryPlanner planner = + plan( + "SELECT d.id, e.departmentId FROM employee e " + + " JOIN department d ON d.id = e.departmentId AND d.location = e.region " + + " WHERE d.region = 'US' AND e.age > 30"); + planner.explain(); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerMonitorTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerMonitorTest.java index 66ce2411f4..9b1d307ebc 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerMonitorTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerMonitorTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.planner; import static org.mockito.Mockito.doAnswer; @@ -18,109 +17,95 @@ import org.opensearch.sql.legacy.query.planner.resource.Stats; import org.opensearch.sql.legacy.query.planner.resource.Stats.MemStats; -/** - * Circuit breaker component test - */ +/** Circuit breaker component test */ @Ignore public class QueryPlannerMonitorTest extends QueryPlannerTest { - /** Configure back off strategy 1s, 1s and 1s - retry 4 times at most */ - private static final String TEST_SQL1 = - "SELECT /*! JOIN_BACK_OFF_RETRY_INTERVALS(1, 1, 1) */ " + - " /*! JOIN_CIRCUIT_BREAK_LIMIT("; - - private static final String TEST_SQL2 = - ") */ d.name, e.lastname FROM employee e " + - " JOIN department d ON d.id = e.departmentId " + - " WHERE d.region = 'US' AND e.age > 30"; - - private static final long[] PERCENT_USAGE_15 = freeAndTotalMem(85, 100); - private static final long[] PERCENT_USAGE_24 = freeAndTotalMem(76, 100); - private static final long[] PERCENT_USAGE_50 = freeAndTotalMem(50, 100); - - @Spy - private Stats stats = new Stats(client); - - @Test - public void reachedLimitAndRecoverAt1stAttempt() { - mockMemUsage(PERCENT_USAGE_15, PERCENT_USAGE_50, PERCENT_USAGE_24); - queryWithLimit(25); // TODO: assert if final result set is correct after recovery - } - - @Test - public void reachedLimitAndRecoverAt2ndAttempt() { - mockMemUsage(PERCENT_USAGE_15, PERCENT_USAGE_50, PERCENT_USAGE_50, PERCENT_USAGE_15); - queryWithLimit(25); - } - - @Test - public void reachedLimitAndRecoverAt3rdAttempt() { - mockMemUsage(PERCENT_USAGE_15, PERCENT_USAGE_50, PERCENT_USAGE_50, PERCENT_USAGE_50, PERCENT_USAGE_15); - queryWithLimit(25); - } - - @Test(expected = IllegalStateException.class) - public void reachedLimitAndFailFinally() { - mockMemUsage(PERCENT_USAGE_15, PERCENT_USAGE_50); - queryWithLimit(25); - } - - @Test(expected = IllegalStateException.class) - public void reachedLimitAndRejectNewRequest() { - mockMemUsage(PERCENT_USAGE_50); - queryWithLimit(25); - } - - @Test(expected = IllegalStateException.class) - public void timeOut() { - query( - "SELECT /*! JOIN_TIME_OUT(0) */ " + - " d.name FROM employee e JOIN department d ON d.id = e.departmentId", - employees( - employee(1, "Dell", "1") - ), - departments( - department(1, "1", "Dell") - ) - ); - } - - private void mockMemUsage(long[]... memUsages) { - doAnswer(new Answer() { - private int callCnt = -1; - - @Override - public MemStats answer(InvocationOnMock invocation) { + /** Configure back off strategy 1s, 1s and 1s - retry 4 times at most */ + private static final String TEST_SQL1 = + "SELECT /*! JOIN_BACK_OFF_RETRY_INTERVALS(1, 1, 1) */ " + " /*! JOIN_CIRCUIT_BREAK_LIMIT("; + + private static final String TEST_SQL2 = + ") */ d.name, e.lastname FROM employee e " + + " JOIN department d ON d.id = e.departmentId " + + " WHERE d.region = 'US' AND e.age > 30"; + + private static final long[] PERCENT_USAGE_15 = freeAndTotalMem(85, 100); + private static final long[] PERCENT_USAGE_24 = freeAndTotalMem(76, 100); + private static final long[] PERCENT_USAGE_50 = freeAndTotalMem(50, 100); + + @Spy private Stats stats = new Stats(client); + + @Test + public void reachedLimitAndRecoverAt1stAttempt() { + mockMemUsage(PERCENT_USAGE_15, PERCENT_USAGE_50, PERCENT_USAGE_24); + queryWithLimit(25); // TODO: assert if final result set is correct after recovery + } + + @Test + public void reachedLimitAndRecoverAt2ndAttempt() { + mockMemUsage(PERCENT_USAGE_15, PERCENT_USAGE_50, PERCENT_USAGE_50, PERCENT_USAGE_15); + queryWithLimit(25); + } + + @Test + public void reachedLimitAndRecoverAt3rdAttempt() { + mockMemUsage( + PERCENT_USAGE_15, PERCENT_USAGE_50, PERCENT_USAGE_50, PERCENT_USAGE_50, PERCENT_USAGE_15); + queryWithLimit(25); + } + + @Test(expected = IllegalStateException.class) + public void reachedLimitAndFailFinally() { + mockMemUsage(PERCENT_USAGE_15, PERCENT_USAGE_50); + queryWithLimit(25); + } + + @Test(expected = IllegalStateException.class) + public void reachedLimitAndRejectNewRequest() { + mockMemUsage(PERCENT_USAGE_50); + queryWithLimit(25); + } + + @Test(expected = IllegalStateException.class) + public void timeOut() { + query( + "SELECT /*! JOIN_TIME_OUT(0) */ " + + " d.name FROM employee e JOIN department d ON d.id = e.departmentId", + employees(employee(1, "Dell", "1")), + departments(department(1, "1", "Dell"))); + } + + private void mockMemUsage(long[]... memUsages) { + doAnswer( + new Answer() { + private int callCnt = -1; + + @Override + public MemStats answer(InvocationOnMock invocation) { callCnt = Math.min(callCnt + 1, memUsages.length - 1); - return new MemStats( - memUsages[callCnt][0], memUsages[callCnt][1] - ); - } - }).when(stats).collectMemStats(); - } - - private static long[] freeAndTotalMem(long free, long total) { - return new long[]{ free, total }; - } - - private SearchHits queryWithLimit(int limit) { - return query( - TEST_SQL1 + limit + TEST_SQL2, - employees( - employee(1, "Dell", "1"), - employee(2, "Hank", "1") - ), - departments( - department(1, "1", "Dell") - ) - ); - } - - @Override - protected QueryPlanner plan(String sql) { - QueryPlanner planner = super.plan(sql); - planner.setStats(stats); - return planner; - } - + return new MemStats(memUsages[callCnt][0], memUsages[callCnt][1]); + } + }) + .when(stats) + .collectMemStats(); + } + + private static long[] freeAndTotalMem(long free, long total) { + return new long[] {free, total}; + } + + private SearchHits queryWithLimit(int limit) { + return query( + TEST_SQL1 + limit + TEST_SQL2, + employees(employee(1, "Dell", "1"), employee(2, "Hank", "1")), + departments(department(1, "1", "Dell"))); + } + + @Override + protected QueryPlanner plan(String sql) { + QueryPlanner planner = super.plan(sql); + planner.setStats(stats); + return planner; + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerTest.java index 66380c108d..4cda101ae4 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.planner; import static java.util.Collections.emptyList; @@ -58,246 +57,240 @@ import org.opensearch.sql.legacy.request.SqlRequest; import org.opensearch.sql.opensearch.setting.OpenSearchSettings; -/** - * Test base class for all query planner tests. - */ +/** Test base class for all query planner tests. */ @Ignore public abstract class QueryPlannerTest { - @Mock - protected Client client; + @Mock protected Client client; - @Mock - private SearchResponse response1; - private static final String SCROLL_ID1 = "1"; + @Mock private SearchResponse response1; + private static final String SCROLL_ID1 = "1"; - @Mock - private SearchResponse response2; - private static final String SCROLL_ID2 = "2"; + @Mock private SearchResponse response2; + private static final String SCROLL_ID2 = "2"; - @Mock - private ClusterSettings clusterSettings; + @Mock private ClusterSettings clusterSettings; - /* - @BeforeClass - public static void initLogger() { - ConfigurationBuilder builder = newConfigurationBuilder(); - AppenderComponentBuilder appender = builder.newAppender("stdout", "Console"); + /* + @BeforeClass + public static void initLogger() { + ConfigurationBuilder builder = newConfigurationBuilder(); + AppenderComponentBuilder appender = builder.newAppender("stdout", "Console"); - LayoutComponentBuilder standard = builder.newLayout("PatternLayout"); - standard.addAttribute("pattern", "%d [%t] %-5level: %msg%n%throwable"); - appender.add(standard); + LayoutComponentBuilder standard = builder.newLayout("PatternLayout"); + standard.addAttribute("pattern", "%d [%t] %-5level: %msg%n%throwable"); + appender.add(standard); - RootLoggerComponentBuilder rootLogger = builder.newRootLogger(Level.ERROR); - rootLogger.add(builder.newAppenderRef("stdout")); + RootLoggerComponentBuilder rootLogger = builder.newRootLogger(Level.ERROR); + rootLogger.add(builder.newAppenderRef("stdout")); - LoggerComponentBuilder logger = builder.newLogger("org.nlpcn.es4sql.query.planner", Level.TRACE); - logger.add(builder.newAppenderRef("stdout")); - //logger.addAttribute("additivity", false); + LoggerComponentBuilder logger = builder.newLogger("org.nlpcn.es4sql.query.planner", Level.TRACE); + logger.add(builder.newAppenderRef("stdout")); + //logger.addAttribute("additivity", false); - builder.add(logger); + builder.add(logger); - Configurator.initialize(builder.build()); - } - */ + Configurator.initialize(builder.build()); + } + */ - @Before - public void init() { - MockitoAnnotations.initMocks(this); - when(clusterSettings.get(ClusterName.CLUSTER_NAME_SETTING)).thenReturn(ClusterName.DEFAULT); - OpenSearchSettings settings = spy(new OpenSearchSettings(clusterSettings)); + @Before + public void init() { + MockitoAnnotations.initMocks(this); + when(clusterSettings.get(ClusterName.CLUSTER_NAME_SETTING)).thenReturn(ClusterName.DEFAULT); + OpenSearchSettings settings = spy(new OpenSearchSettings(clusterSettings)); - // Force return empty list to avoid ClusterSettings be invoked which is a final class and hard to mock. - // In this case, default value in Setting will be returned all the time. - doReturn(emptyList()).when(settings).getSettings(); - LocalClusterState.state().setPluginSettings(settings); + // Force return empty list to avoid ClusterSettings be invoked which is a final class and hard + // to mock. + // In this case, default value in Setting will be returned all the time. + doReturn(emptyList()).when(settings).getSettings(); + LocalClusterState.state().setPluginSettings(settings); - ActionFuture mockFuture = mock(ActionFuture.class); - when(client.execute(any(), any())).thenReturn(mockFuture); + ActionFuture mockFuture = mock(ActionFuture.class); + when(client.execute(any(), any())).thenReturn(mockFuture); - // Differentiate response for Scroll-1/2 by call count and scroll ID. - when(mockFuture.actionGet()).thenAnswer(new Answer() { - private int callCnt; + // Differentiate response for Scroll-1/2 by call count and scroll ID. + when(mockFuture.actionGet()) + .thenAnswer( + new Answer() { + private int callCnt; - @Override - public SearchResponse answer(InvocationOnMock invocation) { + @Override + public SearchResponse answer(InvocationOnMock invocation) { /* * This works based on assumption that first call comes from Scroll-1, all the following calls come from Scroll-2. * Because Scroll-1 only open scroll once and must be ahead of Scroll-2 which opens multiple times later. */ return callCnt++ == 0 ? response1 : response2; - } - }); - - doReturn(SCROLL_ID1).when(response1).getScrollId(); - doReturn(SCROLL_ID2).when(response2).getScrollId(); - - // Avoid NPE in empty SearchResponse - doReturn(0).when(response1).getFailedShards(); - doReturn(0).when(response2).getFailedShards(); - doReturn(false).when(response1).isTimedOut(); - doReturn(false).when(response2).isTimedOut(); - - returnMockResponse(SCROLL_ID1, response1); - returnMockResponse(SCROLL_ID2, response2); - - Metrics.getInstance().registerDefaultMetrics(); - } - - private void returnMockResponse(String scrollId, SearchResponse response) { - SearchScrollRequestBuilder mockReqBuilder = mock(SearchScrollRequestBuilder.class); - when(client.prepareSearchScroll(scrollId)).thenReturn(mockReqBuilder); - when(mockReqBuilder.setScroll(any(TimeValue.class))).thenReturn(mockReqBuilder); - when(mockReqBuilder.get()).thenReturn(response); - } - - protected SearchHits query(String sql, MockSearchHits mockHits1, MockSearchHits mockHits2) { - doAnswer(mockHits1).when(response1).getHits(); - doAnswer(mockHits2).when(response2).getHits(); - - try (MockedStatic backOffRetryStrategyMocked = - Mockito.mockStatic(BackOffRetryStrategy.class)) { - backOffRetryStrategyMocked.when(BackOffRetryStrategy::isHealthy).thenReturn(true); + } + }); - ClearScrollRequestBuilder mockReqBuilder = mock(ClearScrollRequestBuilder.class); - when(client.prepareClearScroll()).thenReturn(mockReqBuilder); - when(mockReqBuilder.addScrollId(any())).thenReturn(mockReqBuilder); - when(mockReqBuilder.get()).thenAnswer(new Answer() { + doReturn(SCROLL_ID1).when(response1).getScrollId(); + doReturn(SCROLL_ID2).when(response2).getScrollId(); + + // Avoid NPE in empty SearchResponse + doReturn(0).when(response1).getFailedShards(); + doReturn(0).when(response2).getFailedShards(); + doReturn(false).when(response1).isTimedOut(); + doReturn(false).when(response2).isTimedOut(); + + returnMockResponse(SCROLL_ID1, response1); + returnMockResponse(SCROLL_ID2, response2); + + Metrics.getInstance().registerDefaultMetrics(); + } + + private void returnMockResponse(String scrollId, SearchResponse response) { + SearchScrollRequestBuilder mockReqBuilder = mock(SearchScrollRequestBuilder.class); + when(client.prepareSearchScroll(scrollId)).thenReturn(mockReqBuilder); + when(mockReqBuilder.setScroll(any(TimeValue.class))).thenReturn(mockReqBuilder); + when(mockReqBuilder.get()).thenReturn(response); + } + + protected SearchHits query(String sql, MockSearchHits mockHits1, MockSearchHits mockHits2) { + doAnswer(mockHits1).when(response1).getHits(); + doAnswer(mockHits2).when(response2).getHits(); + + try (MockedStatic backOffRetryStrategyMocked = + Mockito.mockStatic(BackOffRetryStrategy.class)) { + backOffRetryStrategyMocked.when(BackOffRetryStrategy::isHealthy).thenReturn(true); + + ClearScrollRequestBuilder mockReqBuilder = mock(ClearScrollRequestBuilder.class); + when(client.prepareClearScroll()).thenReturn(mockReqBuilder); + when(mockReqBuilder.addScrollId(any())).thenReturn(mockReqBuilder); + when(mockReqBuilder.get()) + .thenAnswer( + new Answer() { @Override public ClearScrollResponse answer(InvocationOnMock invocation) throws Throwable { - mockHits2.reset(); - return new ClearScrollResponse(true, 0); + mockHits2.reset(); + return new ClearScrollResponse(true, 0); } - }); + }); - List hits = plan(sql).execute(); - return new SearchHits(hits.toArray(new SearchHit[0]), new TotalHits(hits.size(), Relation.EQUAL_TO), 0); - } + List hits = plan(sql).execute(); + return new SearchHits( + hits.toArray(new SearchHit[0]), new TotalHits(hits.size(), Relation.EQUAL_TO), 0); } + } - protected QueryPlanner plan(String sql) { - SqlElasticRequestBuilder request = createRequestBuilder(sql); - if (request instanceof HashJoinQueryPlanRequestBuilder) { - return ((HashJoinQueryPlanRequestBuilder) request).plan(); - } - throw new IllegalStateException("Not a JOIN query: " + sql); + protected QueryPlanner plan(String sql) { + SqlElasticRequestBuilder request = createRequestBuilder(sql); + if (request instanceof HashJoinQueryPlanRequestBuilder) { + return ((HashJoinQueryPlanRequestBuilder) request).plan(); } - - protected SqlElasticRequestBuilder createRequestBuilder(String sql) { - try { - SQLQueryExpr sqlExpr = (SQLQueryExpr) toSqlExpr(sql); - JoinSelect joinSelect = new SqlParser().parseJoinSelect(sqlExpr); // Ignore handleSubquery() - QueryAction queryAction = OpenSearchJoinQueryActionFactory - .createJoinAction(client, joinSelect); - queryAction.setSqlRequest(new SqlRequest(sql, null)); - return queryAction.explain(); - } - catch (SqlParseException e) { - throw new IllegalStateException("Invalid query: " + sql, e); - } + throw new IllegalStateException("Not a JOIN query: " + sql); + } + + protected SqlElasticRequestBuilder createRequestBuilder(String sql) { + try { + SQLQueryExpr sqlExpr = (SQLQueryExpr) toSqlExpr(sql); + JoinSelect joinSelect = new SqlParser().parseJoinSelect(sqlExpr); // Ignore handleSubquery() + QueryAction queryAction = + OpenSearchJoinQueryActionFactory.createJoinAction(client, joinSelect); + queryAction.setSqlRequest(new SqlRequest(sql, null)); + return queryAction.explain(); + } catch (SqlParseException e) { + throw new IllegalStateException("Invalid query: " + sql, e); } + } - private SQLExpr toSqlExpr(String sql) { - SQLExprParser parser = new ElasticSqlExprParser(sql); - SQLExpr expr = parser.expr(); + private SQLExpr toSqlExpr(String sql) { + SQLExprParser parser = new ElasticSqlExprParser(sql); + SQLExpr expr = parser.expr(); - if (parser.getLexer().token() != Token.EOF) { - throw new ParserException("illegal sql expr : " + sql); - } - return expr; + if (parser.getLexer().token() != Token.EOF) { + throw new ParserException("illegal sql expr : " + sql); } + return expr; + } - /** - * Mock SearchHits and slice and return in batch. - */ - protected static class MockSearchHits implements Answer { - - private final SearchHit[] allHits; - - private final int batchSize; //TODO: should be inferred from mock object dynamically - - private int callCnt; - - MockSearchHits(SearchHit[] allHits, int batchSize) { - this.allHits = allHits; - this.batchSize = batchSize; - } - - @Override - public SearchHits answer(InvocationOnMock invocation) { - SearchHit[] curBatch; - if (isNoMoreBatch()) { - curBatch = new SearchHit[0]; - } else { - curBatch = currentBatch(); - callCnt++; - } - return new SearchHits(curBatch, new TotalHits(allHits.length, Relation.EQUAL_TO), 0); - } - - private boolean isNoMoreBatch() { - return callCnt > allHits.length / batchSize; - } - - private SearchHit[] currentBatch() { - return Arrays.copyOfRange(allHits, startIndex(), endIndex()); - } - - private int startIndex() { - return callCnt * batchSize; - } - - private int endIndex() { - return Math.min(startIndex() + batchSize, allHits.length); - } - - private void reset() { - callCnt = 0; - } - } + /** Mock SearchHits and slice and return in batch. */ + protected static class MockSearchHits implements Answer { + + private final SearchHit[] allHits; - protected MockSearchHits employees(SearchHit... mockHits) { - return employees(5, mockHits); + private final int batchSize; // TODO: should be inferred from mock object dynamically + + private int callCnt; + + MockSearchHits(SearchHit[] allHits, int batchSize) { + this.allHits = allHits; + this.batchSize = batchSize; } - protected MockSearchHits employees(int pageSize, SearchHit... mockHits) { - return new MockSearchHits(mockHits, pageSize); + @Override + public SearchHits answer(InvocationOnMock invocation) { + SearchHit[] curBatch; + if (isNoMoreBatch()) { + curBatch = new SearchHit[0]; + } else { + curBatch = currentBatch(); + callCnt++; + } + return new SearchHits(curBatch, new TotalHits(allHits.length, Relation.EQUAL_TO), 0); } - protected MockSearchHits departments(SearchHit... mockHits) { - return departments(5, mockHits); + private boolean isNoMoreBatch() { + return callCnt > allHits.length / batchSize; } - protected MockSearchHits departments(int pageSize, SearchHit... mockHits) { - return new MockSearchHits(mockHits, pageSize); + private SearchHit[] currentBatch() { + return Arrays.copyOfRange(allHits, startIndex(), endIndex()); } - protected SearchHit employee(int docId, String lastname, String departmentId) { - SearchHit hit = new SearchHit(docId); - if (lastname == null) { - hit.sourceRef(new BytesArray("{\"departmentId\":\"" + departmentId + "\"}")); - } - else if (departmentId == null) { - hit.sourceRef(new BytesArray("{\"lastname\":\"" + lastname + "\"}")); - } - else { - hit.sourceRef(new BytesArray("{\"lastname\":\"" + lastname + "\",\"departmentId\":\"" + departmentId + "\"}")); - } - return hit; + private int startIndex() { + return callCnt * batchSize; } - protected SearchHit department(int docId, String id, String name) { - SearchHit hit = new SearchHit(docId); - if (id == null) { - hit.sourceRef(new BytesArray("{\"name\":\"" + name + "\"}")); - } - else if (name == null) { - hit.sourceRef(new BytesArray("{\"id\":\"" + id + "\"}")); - } - else { - hit.sourceRef(new BytesArray("{\"id\":\"" + id + "\",\"name\":\"" + name + "\"}")); - } - return hit; + private int endIndex() { + return Math.min(startIndex() + batchSize, allHits.length); } + private void reset() { + callCnt = 0; + } + } + + protected MockSearchHits employees(SearchHit... mockHits) { + return employees(5, mockHits); + } + + protected MockSearchHits employees(int pageSize, SearchHit... mockHits) { + return new MockSearchHits(mockHits, pageSize); + } + + protected MockSearchHits departments(SearchHit... mockHits) { + return departments(5, mockHits); + } + + protected MockSearchHits departments(int pageSize, SearchHit... mockHits) { + return new MockSearchHits(mockHits, pageSize); + } + + protected SearchHit employee(int docId, String lastname, String departmentId) { + SearchHit hit = new SearchHit(docId); + if (lastname == null) { + hit.sourceRef(new BytesArray("{\"departmentId\":\"" + departmentId + "\"}")); + } else if (departmentId == null) { + hit.sourceRef(new BytesArray("{\"lastname\":\"" + lastname + "\"}")); + } else { + hit.sourceRef( + new BytesArray( + "{\"lastname\":\"" + lastname + "\",\"departmentId\":\"" + departmentId + "\"}")); + } + return hit; + } + + protected SearchHit department(int docId, String id, String name) { + SearchHit hit = new SearchHit(docId); + if (id == null) { + hit.sourceRef(new BytesArray("{\"name\":\"" + name + "\"}")); + } else if (name == null) { + hit.sourceRef(new BytesArray("{\"id\":\"" + id + "\"}")); + } else { + hit.sourceRef(new BytesArray("{\"id\":\"" + id + "\",\"name\":\"" + name + "\"}")); + } + return hit; + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/physical/SearchAggregationResponseHelperTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/physical/SearchAggregationResponseHelperTest.java index 589dab8905..cca5f745ee 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/physical/SearchAggregationResponseHelperTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/physical/SearchAggregationResponseHelperTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.planner.physical; import static org.hamcrest.MatcherAssert.assertThat; @@ -29,305 +28,318 @@ @RunWith(MockitoJUnitRunner.class) public class SearchAggregationResponseHelperTest { - /** - * SELECT MAX(age) as max - * FROM accounts - */ - @Test - public void noBucketOneMetricShouldPass() { - String json = "{\n" - + " \"max#max\": {\n" - + " \"value\": 40\n" - + " }\n" - + "}"; - List> result = SearchAggregationResponseHelper.flatten(AggregationUtils.fromJson(json)); - assertThat(result, contains(allOf(hasEntry("max", 40d)))); - } + /** SELECT MAX(age) as max FROM accounts */ + @Test + public void noBucketOneMetricShouldPass() { + String json = "{\n \"max#max\": {\n \"value\": 40\n }\n}"; + List> result = + SearchAggregationResponseHelper.flatten(AggregationUtils.fromJson(json)); + assertThat(result, contains(allOf(hasEntry("max", 40d)))); + } - /** - * SELECT MAX(age) as max, MIN(age) as min - * FROM accounts - */ - @Test - public void noBucketMultipleMetricShouldPass() { - String json = "{\n" - + " \"max#max\": {\n" - + " \"value\": 40\n" - + " },\n" - + " \"min#min\": {\n" - + " \"value\": 20\n" - + " }\n" - + "}"; - List> result = SearchAggregationResponseHelper.flatten(AggregationUtils.fromJson(json)); - assertThat(result, contains(allOf(hasEntry("max", 40d), hasEntry("min", 20d)))); - } + /** SELECT MAX(age) as max, MIN(age) as min FROM accounts */ + @Test + public void noBucketMultipleMetricShouldPass() { + String json = + "{\n" + + " \"max#max\": {\n" + + " \"value\": 40\n" + + " },\n" + + " \"min#min\": {\n" + + " \"value\": 20\n" + + " }\n" + + "}"; + List> result = + SearchAggregationResponseHelper.flatten(AggregationUtils.fromJson(json)); + assertThat(result, contains(allOf(hasEntry("max", 40d), hasEntry("min", 20d)))); + } - /** - * SELECT gender, MAX(age) as max, MIN(age) as min - * FROM accounts - * GROUP BY gender - */ - @Test - public void oneBucketMultipleMetricShouldPass() { - String json = "{\n" - + " \"sterms#gender\": {\n" - + " \"buckets\": [\n" - + " {\n" - + " \"key\": \"m\",\n" - + " \"doc_count\": 507,\n" - + " \"min#min\": {\n" - + " \"value\": 10\n" - + " },\n" - + " \"max#max\": {\n" - + " \"value\": 20\n" - + " }\n" - + " },\n" - + " {\n" - + " \"key\": \"f\",\n" - + " \"doc_count\": 493,\n" - + " \"min#min\": {\n" - + " \"value\": 20\n" - + " },\n" - + " \"max#max\": {\n" - + " \"value\": 40\n" - + " }\n" - + " }\n" - + " ]\n" - + " }\n" - + "}"; - List> result = SearchAggregationResponseHelper.flatten(AggregationUtils.fromJson(json)); - assertThat(result, contains(allOf(hasEntry("gender", (Object) "m"), hasEntry("min", 10d), hasEntry("max", 20d)), - allOf(hasEntry("gender", (Object) "f"), hasEntry("min", 20d), - hasEntry("max", 40d)))); - } + /** SELECT gender, MAX(age) as max, MIN(age) as min FROM accounts GROUP BY gender */ + @Test + public void oneBucketMultipleMetricShouldPass() { + String json = + "{\n" + + " \"sterms#gender\": {\n" + + " \"buckets\": [\n" + + " {\n" + + " \"key\": \"m\",\n" + + " \"doc_count\": 507,\n" + + " \"min#min\": {\n" + + " \"value\": 10\n" + + " },\n" + + " \"max#max\": {\n" + + " \"value\": 20\n" + + " }\n" + + " },\n" + + " {\n" + + " \"key\": \"f\",\n" + + " \"doc_count\": 493,\n" + + " \"min#min\": {\n" + + " \"value\": 20\n" + + " },\n" + + " \"max#max\": {\n" + + " \"value\": 40\n" + + " }\n" + + " }\n" + + " ]\n" + + " }\n" + + "}"; + List> result = + SearchAggregationResponseHelper.flatten(AggregationUtils.fromJson(json)); + assertThat( + result, + contains( + allOf(hasEntry("gender", (Object) "m"), hasEntry("min", 10d), hasEntry("max", 20d)), + allOf(hasEntry("gender", (Object) "f"), hasEntry("min", 20d), hasEntry("max", 40d)))); + } - /** - * SELECT gender, state, MAX(age) as max, MIN(age) as min - * FROM accounts - * GROUP BY gender, state - */ - @Test - public void multipleBucketMultipleMetricShouldPass() { - String json = "{\n" - + " \"sterms#gender\": {\n" - + " \"buckets\": [\n" - + " {\n" - + " \"key\": \"m\",\n" - + " \"sterms#state\": {\n" - + " \"buckets\": [\n" - + " {\n" - + " \"key\": \"MD\",\n" - + " \"min#min\": {\n" - + " \"value\": 22\n" - + " },\n" - + " \"max#max\": {\n" - + " \"value\": 39\n" - + " }\n" - + " },\n" - + " {\n" - + " \"key\": \"ID\",\n" - + " \"min#min\": {\n" - + " \"value\": 23\n" - + " },\n" - + " \"max#max\": {\n" - + " \"value\": 40\n" - + " }\n" - + " }\n" - + " ]\n" - + " }\n" - + " },\n" - + " {\n" - + " \"key\": \"f\",\n" - + " \"sterms#state\": {\n" - + " \"buckets\": [\n" - + " {\n" - + " \"key\": \"TX\",\n" - + " \"min#min\": {\n" - + " \"value\": 20\n" - + " },\n" - + " \"max#max\": {\n" - + " \"value\": 38\n" - + " }\n" - + " },\n" - + " {\n" - + " \"key\": \"MI\",\n" - + " \"min#min\": {\n" - + " \"value\": 22\n" - + " },\n" - + " \"max#max\": {\n" - + " \"value\": 40\n" - + " }\n" - + " }\n" - + " ]\n" - + " }\n" - + " }\n" - + " ]\n" - + " }\n" - + "}"; - List> result = SearchAggregationResponseHelper.flatten(AggregationUtils.fromJson(json)); - assertThat(result, contains( - allOf(hasEntry("gender", (Object) "m"), hasEntry("state", (Object) "MD"), hasEntry("min", 22d), - hasEntry("max", 39d)), - allOf(hasEntry("gender", (Object) "m"), hasEntry("state", (Object) "ID"), hasEntry("min", 23d), - hasEntry("max", 40d)), - allOf(hasEntry("gender", (Object) "f"), hasEntry("state", (Object) "TX"), hasEntry("min", 20d), - hasEntry("max", 38d)), - allOf(hasEntry("gender", (Object) "f"), hasEntry("state", (Object) "MI"), hasEntry("min", 22d), - hasEntry("max", 40d)))); - } + /** SELECT gender, state, MAX(age) as max, MIN(age) as min FROM accounts GROUP BY gender, state */ + @Test + public void multipleBucketMultipleMetricShouldPass() { + String json = + "{\n" + + " \"sterms#gender\": {\n" + + " \"buckets\": [\n" + + " {\n" + + " \"key\": \"m\",\n" + + " \"sterms#state\": {\n" + + " \"buckets\": [\n" + + " {\n" + + " \"key\": \"MD\",\n" + + " \"min#min\": {\n" + + " \"value\": 22\n" + + " },\n" + + " \"max#max\": {\n" + + " \"value\": 39\n" + + " }\n" + + " },\n" + + " {\n" + + " \"key\": \"ID\",\n" + + " \"min#min\": {\n" + + " \"value\": 23\n" + + " },\n" + + " \"max#max\": {\n" + + " \"value\": 40\n" + + " }\n" + + " }\n" + + " ]\n" + + " }\n" + + " },\n" + + " {\n" + + " \"key\": \"f\",\n" + + " \"sterms#state\": {\n" + + " \"buckets\": [\n" + + " {\n" + + " \"key\": \"TX\",\n" + + " \"min#min\": {\n" + + " \"value\": 20\n" + + " },\n" + + " \"max#max\": {\n" + + " \"value\": 38\n" + + " }\n" + + " },\n" + + " {\n" + + " \"key\": \"MI\",\n" + + " \"min#min\": {\n" + + " \"value\": 22\n" + + " },\n" + + " \"max#max\": {\n" + + " \"value\": 40\n" + + " }\n" + + " }\n" + + " ]\n" + + " }\n" + + " }\n" + + " ]\n" + + " }\n" + + "}"; + List> result = + SearchAggregationResponseHelper.flatten(AggregationUtils.fromJson(json)); + assertThat( + result, + contains( + allOf( + hasEntry("gender", (Object) "m"), + hasEntry("state", (Object) "MD"), + hasEntry("min", 22d), + hasEntry("max", 39d)), + allOf( + hasEntry("gender", (Object) "m"), + hasEntry("state", (Object) "ID"), + hasEntry("min", 23d), + hasEntry("max", 40d)), + allOf( + hasEntry("gender", (Object) "f"), + hasEntry("state", (Object) "TX"), + hasEntry("min", 20d), + hasEntry("max", 38d)), + allOf( + hasEntry("gender", (Object) "f"), + hasEntry("state", (Object) "MI"), + hasEntry("min", 22d), + hasEntry("max", 40d)))); + } - /** - * SELECT age, gender FROM accounts GROUP BY age, gender - */ - @Test - public void multipleBucketWithoutMetricShouldPass() { - String json = "{\n" - + " \"lterms#age\": {\n" - + " \"buckets\": [\n" - + " {\n" - + " \"key\": 31,\n" - + " \"doc_count\": 61,\n" - + " \"sterms#gender\": {\n" - + " \"buckets\": [\n" - + " {\n" - + " \"key\": \"m\",\n" - + " \"doc_count\": 35\n" - + " },\n" - + " {\n" - + " \"key\": \"f\",\n" - + " \"doc_count\": 26\n" - + " }\n" - + " ]\n" - + " }\n" - + " },\n" - + " {\n" - + " \"key\": 39,\n" - + " \"doc_count\": 60,\n" - + " \"sterms#gender\": {\n" - + " \"buckets\": [\n" - + " {\n" - + " \"key\": \"f\",\n" - + " \"doc_count\": 38\n" - + " },\n" - + " {\n" - + " \"key\": \"m\",\n" - + " \"doc_count\": 22\n" - + " }\n" - + " ]\n" - + " }\n" - + " }\n" - + " ]\n" - + " }\n" - + "}"; - List> result = SearchAggregationResponseHelper.flatten(AggregationUtils.fromJson(json)); - assertThat(result, containsInAnyOrder( - allOf(hasEntry("age", (Object) 31L), hasEntry("gender","m")), - allOf(hasEntry("age", (Object) 31L), hasEntry("gender","f")), - allOf(hasEntry("age", (Object) 39L), hasEntry("gender","m")), - allOf(hasEntry("age", (Object) 39L), hasEntry("gender","f")))); - } + /** SELECT age, gender FROM accounts GROUP BY age, gender */ + @Test + public void multipleBucketWithoutMetricShouldPass() { + String json = + "{\n" + + " \"lterms#age\": {\n" + + " \"buckets\": [\n" + + " {\n" + + " \"key\": 31,\n" + + " \"doc_count\": 61,\n" + + " \"sterms#gender\": {\n" + + " \"buckets\": [\n" + + " {\n" + + " \"key\": \"m\",\n" + + " \"doc_count\": 35\n" + + " },\n" + + " {\n" + + " \"key\": \"f\",\n" + + " \"doc_count\": 26\n" + + " }\n" + + " ]\n" + + " }\n" + + " },\n" + + " {\n" + + " \"key\": 39,\n" + + " \"doc_count\": 60,\n" + + " \"sterms#gender\": {\n" + + " \"buckets\": [\n" + + " {\n" + + " \"key\": \"f\",\n" + + " \"doc_count\": 38\n" + + " },\n" + + " {\n" + + " \"key\": \"m\",\n" + + " \"doc_count\": 22\n" + + " }\n" + + " ]\n" + + " }\n" + + " }\n" + + " ]\n" + + " }\n" + + "}"; + List> result = + SearchAggregationResponseHelper.flatten(AggregationUtils.fromJson(json)); + assertThat( + result, + containsInAnyOrder( + allOf(hasEntry("age", (Object) 31L), hasEntry("gender", "m")), + allOf(hasEntry("age", (Object) 31L), hasEntry("gender", "f")), + allOf(hasEntry("age", (Object) 39L), hasEntry("gender", "m")), + allOf(hasEntry("age", (Object) 39L), hasEntry("gender", "f")))); + } - /** - * SELECT PERCENTILES(age) FROM accounts - */ - @Test - public void noBucketPercentilesShouldPass() { - String json = "{\n" - + " \"percentiles_bucket#age\": {\n" - + " \"values\": {\n" - + " \"1.0\": 20,\n" - + " \"5.0\": 21,\n" - + " \"25.0\": 25,\n" - + " \"50.0\": 30.90909090909091,\n" - + " \"75.0\": 35,\n" - + " \"95.0\": 39,\n" - + " \"99.0\": 40\n" - + " }\n" - + " }\n" - + "}"; - List> result = SearchAggregationResponseHelper.flatten(AggregationUtils.fromJson(json)); - assertThat(result, contains(allOf(hasEntry("age_1.0", 20d)))); - } + /** SELECT PERCENTILES(age) FROM accounts */ + @Test + public void noBucketPercentilesShouldPass() { + String json = + "{\n" + + " \"percentiles_bucket#age\": {\n" + + " \"values\": {\n" + + " \"1.0\": 20,\n" + + " \"5.0\": 21,\n" + + " \"25.0\": 25,\n" + + " \"50.0\": 30.90909090909091,\n" + + " \"75.0\": 35,\n" + + " \"95.0\": 39,\n" + + " \"99.0\": 40\n" + + " }\n" + + " }\n" + + "}"; + List> result = + SearchAggregationResponseHelper.flatten(AggregationUtils.fromJson(json)); + assertThat(result, contains(allOf(hasEntry("age_1.0", 20d)))); + } - /** - * SELECT count(*) from online - * GROUP BY date_histogram('field'='insert_time','interval'='4d','alias'='days') - */ - @Test - public void populateShouldPass() { - String json = "{\n" - + " \"date_histogram#days\": {\n" - + " \"buckets\": [\n" - + " {\n" - + " \"key_as_string\": \"2014-08-14 00:00:00\",\n" - + " \"key\": 1407974400000,\n" - + " \"doc_count\": 477,\n" - + " \"value_count#COUNT_0\": {\n" - + " \"value\": 477\n" - + " }\n" - + " }\n" - + " ]\n" - + " }\n" - + "}"; - List> result = SearchAggregationResponseHelper.flatten(AggregationUtils.fromJson(json)); - assertThat(result, containsInAnyOrder( - allOf(hasEntry("days", (Object) "2014-08-14 00:00:00"), hasEntry("COUNT_0",477d)))); - } + /** + * SELECT count(*) from online GROUP BY + * date_histogram('field'='insert_time','interval'='4d','alias'='days') + */ + @Test + public void populateShouldPass() { + String json = + "{\n" + + " \"date_histogram#days\": {\n" + + " \"buckets\": [\n" + + " {\n" + + " \"key_as_string\": \"2014-08-14 00:00:00\",\n" + + " \"key\": 1407974400000,\n" + + " \"doc_count\": 477,\n" + + " \"value_count#COUNT_0\": {\n" + + " \"value\": 477\n" + + " }\n" + + " }\n" + + " ]\n" + + " }\n" + + "}"; + List> result = + SearchAggregationResponseHelper.flatten(AggregationUtils.fromJson(json)); + assertThat( + result, + containsInAnyOrder( + allOf(hasEntry("days", (Object) "2014-08-14 00:00:00"), hasEntry("COUNT_0", 477d)))); + } - /** - * SELECT s - */ - @Test - public void populateSearchAggregationResponeShouldPass() { - String json = "{\n" - + " \"lterms#age\": {\n" - + " \"buckets\": [\n" - + " {\n" - + " \"key\": 31,\n" - + " \"doc_count\": 61,\n" - + " \"sterms#gender\": {\n" - + " \"buckets\": [\n" - + " {\n" - + " \"key\": \"m\",\n" - + " \"doc_count\": 35\n" - + " },\n" - + " {\n" - + " \"key\": \"f\",\n" - + " \"doc_count\": 26\n" - + " }\n" - + " ]\n" - + " }\n" - + " },\n" - + " {\n" - + " \"key\": 39,\n" - + " \"doc_count\": 60,\n" - + " \"sterms#gender\": {\n" - + " \"buckets\": [\n" - + " {\n" - + " \"key\": \"f\",\n" - + " \"doc_count\": 38\n" - + " },\n" - + " {\n" - + " \"key\": \"m\",\n" - + " \"doc_count\": 22\n" - + " }\n" - + " ]\n" - + " }\n" - + " }\n" - + " ]\n" - + " }\n" - + "}"; - List bindingTupleRows = - SearchAggregationResponseHelper.populateSearchAggregationResponse(AggregationUtils.fromJson(json)); - assertEquals(4, bindingTupleRows.size()); - assertThat(bindingTupleRows, containsInAnyOrder( - bindingTupleRow(BindingTuple.from(ImmutableMap.of("age", 31L, "gender", "m"))), - bindingTupleRow(BindingTuple.from(ImmutableMap.of("age", 31L, "gender", "f"))), - bindingTupleRow(BindingTuple.from(ImmutableMap.of("age", 39L, "gender", "m"))), - bindingTupleRow(BindingTuple.from(ImmutableMap.of("age", 39L, "gender", "f"))))); - } + /** SELECT s */ + @Test + public void populateSearchAggregationResponeShouldPass() { + String json = + "{\n" + + " \"lterms#age\": {\n" + + " \"buckets\": [\n" + + " {\n" + + " \"key\": 31,\n" + + " \"doc_count\": 61,\n" + + " \"sterms#gender\": {\n" + + " \"buckets\": [\n" + + " {\n" + + " \"key\": \"m\",\n" + + " \"doc_count\": 35\n" + + " },\n" + + " {\n" + + " \"key\": \"f\",\n" + + " \"doc_count\": 26\n" + + " }\n" + + " ]\n" + + " }\n" + + " },\n" + + " {\n" + + " \"key\": 39,\n" + + " \"doc_count\": 60,\n" + + " \"sterms#gender\": {\n" + + " \"buckets\": [\n" + + " {\n" + + " \"key\": \"f\",\n" + + " \"doc_count\": 38\n" + + " },\n" + + " {\n" + + " \"key\": \"m\",\n" + + " \"doc_count\": 22\n" + + " }\n" + + " ]\n" + + " }\n" + + " }\n" + + " ]\n" + + " }\n" + + "}"; + List bindingTupleRows = + SearchAggregationResponseHelper.populateSearchAggregationResponse( + AggregationUtils.fromJson(json)); + assertEquals(4, bindingTupleRows.size()); + assertThat( + bindingTupleRows, + containsInAnyOrder( + bindingTupleRow(BindingTuple.from(ImmutableMap.of("age", 31L, "gender", "m"))), + bindingTupleRow(BindingTuple.from(ImmutableMap.of("age", 31L, "gender", "f"))), + bindingTupleRow(BindingTuple.from(ImmutableMap.of("age", 39L, "gender", "m"))), + bindingTupleRow(BindingTuple.from(ImmutableMap.of("age", 39L, "gender", "f"))))); + } - private static Matcher bindingTupleRow(BindingTuple bindingTuple) { - return featureValueOf("BindingTuple", equalTo(bindingTuple), BindingTupleRow::data); - } + private static Matcher bindingTupleRow(BindingTuple bindingTuple) { + return featureValueOf("BindingTuple", equalTo(bindingTuple), BindingTupleRow::data); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/RewriteRuleExecutorTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/RewriteRuleExecutorTest.java index 632cd2d7ea..9c13e1fc71 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/RewriteRuleExecutorTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/RewriteRuleExecutorTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.rewriter; import static org.mockito.Mockito.never; @@ -23,31 +22,29 @@ @RunWith(MockitoJUnitRunner.class) public class RewriteRuleExecutorTest { - @Mock - private RewriteRule rewriter; - @Mock - private SQLQueryExpr expr; + @Mock private RewriteRule rewriter; + @Mock private SQLQueryExpr expr; - private RewriteRuleExecutor ruleExecutor; + private RewriteRuleExecutor ruleExecutor; - @Before - public void setup() { - ruleExecutor = RewriteRuleExecutor.builder().withRule(rewriter).build(); - } + @Before + public void setup() { + ruleExecutor = RewriteRuleExecutor.builder().withRule(rewriter).build(); + } - @Test - public void optimize() throws SQLFeatureNotSupportedException { - when(rewriter.match(expr)).thenReturn(true); + @Test + public void optimize() throws SQLFeatureNotSupportedException { + when(rewriter.match(expr)).thenReturn(true); - ruleExecutor.executeOn(expr); - verify(rewriter, times(1)).rewrite(expr); - } + ruleExecutor.executeOn(expr); + verify(rewriter, times(1)).rewrite(expr); + } - @Test - public void noOptimize() throws SQLFeatureNotSupportedException { - when(rewriter.match(expr)).thenReturn(false); + @Test + public void noOptimize() throws SQLFeatureNotSupportedException { + when(rewriter.match(expr)).thenReturn(false); - ruleExecutor.executeOn(expr); - verify(rewriter, never()).rewrite(expr); - } + ruleExecutor.executeOn(expr); + verify(rewriter, never()).rewrite(expr); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/ordinal/OrdinalRewriterRuleTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/ordinal/OrdinalRewriterRuleTest.java index 3f4f799d66..d27967e361 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/ordinal/OrdinalRewriterRuleTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/ordinal/OrdinalRewriterRuleTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.rewriter.ordinal; import com.alibaba.druid.sql.SQLUtils; @@ -16,141 +15,139 @@ import org.opensearch.sql.legacy.rewriter.ordinal.OrdinalRewriterRule; import org.opensearch.sql.legacy.util.SqlParserUtils; -/** - * Test cases for ordinal aliases in GROUP BY and ORDER BY - */ - +/** Test cases for ordinal aliases in GROUP BY and ORDER BY */ public class OrdinalRewriterRuleTest { - @Rule - public ExpectedException exception = ExpectedException.none(); - - @Test - public void ordinalInGroupByShouldMatch() { - query("SELECT lastname FROM bank GROUP BY 1").shouldMatchRule(); - } - - @Test - public void ordinalInOrderByShouldMatch() { - query("SELECT lastname FROM bank ORDER BY 1").shouldMatchRule(); - } - - @Test - public void ordinalInGroupAndOrderByShouldMatch() { - query("SELECT lastname, age FROM bank GROUP BY 2, 1 ORDER BY 1").shouldMatchRule(); - } - - @Test - public void noOrdinalInGroupByShouldNotMatch() { - query("SELECT lastname FROM bank GROUP BY lastname").shouldNotMatchRule(); + @Rule public ExpectedException exception = ExpectedException.none(); + + @Test + public void ordinalInGroupByShouldMatch() { + query("SELECT lastname FROM bank GROUP BY 1").shouldMatchRule(); + } + + @Test + public void ordinalInOrderByShouldMatch() { + query("SELECT lastname FROM bank ORDER BY 1").shouldMatchRule(); + } + + @Test + public void ordinalInGroupAndOrderByShouldMatch() { + query("SELECT lastname, age FROM bank GROUP BY 2, 1 ORDER BY 1").shouldMatchRule(); + } + + @Test + public void noOrdinalInGroupByShouldNotMatch() { + query("SELECT lastname FROM bank GROUP BY lastname").shouldNotMatchRule(); + } + + @Test + public void noOrdinalInOrderByShouldNotMatch() { + query("SELECT lastname, age FROM bank ORDER BY age").shouldNotMatchRule(); + } + + @Test + public void noOrdinalInGroupAndOrderByShouldNotMatch() { + query("SELECT lastname, age FROM bank GROUP BY lastname, age ORDER BY age") + .shouldNotMatchRule(); + } + + @Test + public void simpleGroupByOrdinal() { + query("SELECT lastname FROM bank GROUP BY 1") + .shouldBeAfterRewrite("SELECT lastname FROM bank GROUP BY lastname"); + } + + @Test + public void multipleGroupByOrdinal() { + query("SELECT lastname, age FROM bank GROUP BY 1, 2 ") + .shouldBeAfterRewrite("SELECT lastname, age FROM bank GROUP BY lastname, age"); + + query("SELECT lastname, age FROM bank GROUP BY 2, 1") + .shouldBeAfterRewrite("SELECT lastname, age FROM bank GROUP BY age, lastname"); + + query("SELECT lastname, age, firstname FROM bank GROUP BY 2, firstname, 1") + .shouldBeAfterRewrite( + "SELECT lastname, age, firstname FROM bank GROUP BY age, firstname, lastname"); + + query("SELECT lastname, age, firstname FROM bank GROUP BY 2, something, 1") + .shouldBeAfterRewrite( + "SELECT lastname, age, firstname FROM bank GROUP BY age, something, lastname"); + } + + @Test + public void simpleOrderByOrdinal() { + query("SELECT lastname FROM bank ORDER BY 1") + .shouldBeAfterRewrite("SELECT lastname FROM bank ORDER BY lastname"); + } + + @Test + public void multipleOrderByOrdinal() { + query("SELECT lastname, age FROM bank ORDER BY 1, 2 ") + .shouldBeAfterRewrite("SELECT lastname, age FROM bank ORDER BY lastname, age"); + + query("SELECT lastname, age FROM bank ORDER BY 2, 1") + .shouldBeAfterRewrite("SELECT lastname, age FROM bank ORDER BY age, lastname"); + + query("SELECT lastname, age, firstname FROM bank ORDER BY 2, firstname, 1") + .shouldBeAfterRewrite( + "SELECT lastname, age, firstname FROM bank ORDER BY age, firstname, lastname"); + + query("SELECT lastname, age, firstname FROM bank ORDER BY 2, department, 1") + .shouldBeAfterRewrite( + "SELECT lastname, age, firstname FROM bank ORDER BY age, department, lastname"); + } + + // Tests invalid Ordinals, non-positive ordinal values are already validated by semantic analyzer + @Test + public void invalidGroupByOrdinalShouldThrowException() { + exception.expect(VerificationException.class); + exception.expectMessage("Invalid ordinal [3] specified in [GROUP BY 3]"); + query("SELECT lastname, MAX(lastname) FROM bank GROUP BY 3 ").rewrite(); + } + + @Test + public void invalidOrderByOrdinalShouldThrowException() { + exception.expect(VerificationException.class); + exception.expectMessage("Invalid ordinal [4] specified in [ORDER BY 4]"); + query("SELECT `lastname`, `age`, `firstname` FROM bank ORDER BY 4 IS NOT NULL").rewrite(); + } + + private QueryAssertion query(String sql) { + return new QueryAssertion(sql); + } + + private static class QueryAssertion { + + private OrdinalRewriterRule rule; + private SQLQueryExpr expr; + + QueryAssertion(String sql) { + this.expr = SqlParserUtils.parse(sql); + this.rule = new OrdinalRewriterRule(sql); } - @Test - public void noOrdinalInOrderByShouldNotMatch() { - query("SELECT lastname, age FROM bank ORDER BY age").shouldNotMatchRule(); + void shouldBeAfterRewrite(String expected) { + shouldMatchRule(); + rule.rewrite(expr); + Assert.assertEquals( + SQLUtils.toMySqlString(SqlParserUtils.parse(expected)), SQLUtils.toMySqlString(expr)); } - @Test - public void noOrdinalInGroupAndOrderByShouldNotMatch() { - query("SELECT lastname, age FROM bank GROUP BY lastname, age ORDER BY age").shouldNotMatchRule(); + void shouldMatchRule() { + Assert.assertTrue(match()); } - @Test - public void simpleGroupByOrdinal() { - query("SELECT lastname FROM bank GROUP BY 1" - ).shouldBeAfterRewrite("SELECT lastname FROM bank GROUP BY lastname"); + void shouldNotMatchRule() { + Assert.assertFalse(match()); } - @Test - public void multipleGroupByOrdinal() { - query("SELECT lastname, age FROM bank GROUP BY 1, 2 " - ).shouldBeAfterRewrite("SELECT lastname, age FROM bank GROUP BY lastname, age"); - - query("SELECT lastname, age FROM bank GROUP BY 2, 1" - ).shouldBeAfterRewrite("SELECT lastname, age FROM bank GROUP BY age, lastname"); - - query("SELECT lastname, age, firstname FROM bank GROUP BY 2, firstname, 1" - ).shouldBeAfterRewrite("SELECT lastname, age, firstname FROM bank GROUP BY age, firstname, lastname"); - - query("SELECT lastname, age, firstname FROM bank GROUP BY 2, something, 1" - ).shouldBeAfterRewrite("SELECT lastname, age, firstname FROM bank GROUP BY age, something, lastname"); - + void rewrite() { + shouldMatchRule(); + rule.rewrite(expr); } - @Test - public void simpleOrderByOrdinal() { - query("SELECT lastname FROM bank ORDER BY 1" - ).shouldBeAfterRewrite("SELECT lastname FROM bank ORDER BY lastname"); - } - - @Test - public void multipleOrderByOrdinal() { - query("SELECT lastname, age FROM bank ORDER BY 1, 2 " - ).shouldBeAfterRewrite("SELECT lastname, age FROM bank ORDER BY lastname, age"); - - query("SELECT lastname, age FROM bank ORDER BY 2, 1" - ).shouldBeAfterRewrite("SELECT lastname, age FROM bank ORDER BY age, lastname"); - - query("SELECT lastname, age, firstname FROM bank ORDER BY 2, firstname, 1" - ).shouldBeAfterRewrite("SELECT lastname, age, firstname FROM bank ORDER BY age, firstname, lastname"); - - query("SELECT lastname, age, firstname FROM bank ORDER BY 2, department, 1" - ).shouldBeAfterRewrite("SELECT lastname, age, firstname FROM bank ORDER BY age, department, lastname"); - } - - // Tests invalid Ordinals, non-positive ordinal values are already validated by semantic analyzer - @Test - public void invalidGroupByOrdinalShouldThrowException() { - exception.expect(VerificationException.class); - exception.expectMessage("Invalid ordinal [3] specified in [GROUP BY 3]"); - query("SELECT lastname, MAX(lastname) FROM bank GROUP BY 3 ").rewrite(); - } - - @Test - public void invalidOrderByOrdinalShouldThrowException() { - exception.expect(VerificationException.class); - exception.expectMessage("Invalid ordinal [4] specified in [ORDER BY 4]"); - query("SELECT `lastname`, `age`, `firstname` FROM bank ORDER BY 4 IS NOT NULL").rewrite(); - } - - - private QueryAssertion query(String sql) { - return new QueryAssertion(sql); - } - private static class QueryAssertion { - - private OrdinalRewriterRule rule; - private SQLQueryExpr expr; - - QueryAssertion(String sql) { - this.expr = SqlParserUtils.parse(sql); - this.rule = new OrdinalRewriterRule(sql); - } - - void shouldBeAfterRewrite(String expected) { - shouldMatchRule(); - rule.rewrite(expr); - Assert.assertEquals( - SQLUtils.toMySqlString(SqlParserUtils.parse(expected)), - SQLUtils.toMySqlString(expr) - ); - } - - void shouldMatchRule() { - Assert.assertTrue(match()); - } - - void shouldNotMatchRule() { - Assert.assertFalse(match()); - } - - void rewrite() { - shouldMatchRule(); - rule.rewrite(expr); - } - - private boolean match() { - return rule.match(expr); - } + private boolean match() { + return rule.match(expr); } + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/NestedQueryContextTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/NestedQueryContextTest.java index a94b3e6112..3e20e8edf6 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/NestedQueryContextTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/NestedQueryContextTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.rewriter.subquery; import static org.junit.Assert.assertFalse; @@ -16,42 +15,48 @@ import org.junit.Test; import org.opensearch.sql.legacy.rewriter.subquery.NestedQueryContext; - public class NestedQueryContextTest { - @Test - public void isNested() { - NestedQueryContext nestedQueryDetector = new NestedQueryContext(); - nestedQueryDetector.add(new SQLExprTableSource(new SQLIdentifierExpr("employee"), "e")); - - assertFalse(nestedQueryDetector.isNested(new SQLExprTableSource(new SQLIdentifierExpr("e"), "e1"))); - assertTrue(nestedQueryDetector.isNested(new SQLExprTableSource(new SQLIdentifierExpr("e.projects"), "p"))); - - nestedQueryDetector.add(new SQLExprTableSource(new SQLIdentifierExpr("e.projects"), "p")); - assertTrue(nestedQueryDetector.isNested(new SQLExprTableSource(new SQLIdentifierExpr("p")))); - } - - @Test - public void isNestedJoin() { - NestedQueryContext nestedQueryDetector = new NestedQueryContext(); - SQLJoinTableSource joinTableSource = new SQLJoinTableSource(); - joinTableSource.setLeft(new SQLExprTableSource(new SQLIdentifierExpr("employee"), "e")); - joinTableSource.setRight(new SQLExprTableSource(new SQLIdentifierExpr("e.projects"), "p")); - joinTableSource.setJoinType(JoinType.COMMA); - nestedQueryDetector.add(joinTableSource); - - assertFalse(nestedQueryDetector.isNested(new SQLExprTableSource(new SQLIdentifierExpr("e"), "e1"))); - assertTrue(nestedQueryDetector.isNested(new SQLExprTableSource(new SQLIdentifierExpr("e.projects"), "p"))); - assertTrue(nestedQueryDetector.isNested(new SQLExprTableSource(new SQLIdentifierExpr("p")))); - } - - @Test - public void notNested() { - NestedQueryContext nestedQueryDetector = new NestedQueryContext(); - nestedQueryDetector.add(new SQLExprTableSource(new SQLIdentifierExpr("employee"), "e")); - nestedQueryDetector.add(new SQLExprTableSource(new SQLIdentifierExpr("projects"), "p")); - - assertFalse(nestedQueryDetector.isNested(new SQLExprTableSource(new SQLIdentifierExpr("e"), "e1"))); - assertFalse(nestedQueryDetector.isNested(new SQLExprTableSource(new SQLIdentifierExpr("p")))); - } + @Test + public void isNested() { + NestedQueryContext nestedQueryDetector = new NestedQueryContext(); + nestedQueryDetector.add(new SQLExprTableSource(new SQLIdentifierExpr("employee"), "e")); + + assertFalse( + nestedQueryDetector.isNested(new SQLExprTableSource(new SQLIdentifierExpr("e"), "e1"))); + assertTrue( + nestedQueryDetector.isNested( + new SQLExprTableSource(new SQLIdentifierExpr("e.projects"), "p"))); + + nestedQueryDetector.add(new SQLExprTableSource(new SQLIdentifierExpr("e.projects"), "p")); + assertTrue(nestedQueryDetector.isNested(new SQLExprTableSource(new SQLIdentifierExpr("p")))); + } + + @Test + public void isNestedJoin() { + NestedQueryContext nestedQueryDetector = new NestedQueryContext(); + SQLJoinTableSource joinTableSource = new SQLJoinTableSource(); + joinTableSource.setLeft(new SQLExprTableSource(new SQLIdentifierExpr("employee"), "e")); + joinTableSource.setRight(new SQLExprTableSource(new SQLIdentifierExpr("e.projects"), "p")); + joinTableSource.setJoinType(JoinType.COMMA); + nestedQueryDetector.add(joinTableSource); + + assertFalse( + nestedQueryDetector.isNested(new SQLExprTableSource(new SQLIdentifierExpr("e"), "e1"))); + assertTrue( + nestedQueryDetector.isNested( + new SQLExprTableSource(new SQLIdentifierExpr("e.projects"), "p"))); + assertTrue(nestedQueryDetector.isNested(new SQLExprTableSource(new SQLIdentifierExpr("p")))); + } + + @Test + public void notNested() { + NestedQueryContext nestedQueryDetector = new NestedQueryContext(); + nestedQueryDetector.add(new SQLExprTableSource(new SQLIdentifierExpr("employee"), "e")); + nestedQueryDetector.add(new SQLExprTableSource(new SQLIdentifierExpr("projects"), "p")); + + assertFalse( + nestedQueryDetector.isNested(new SQLExprTableSource(new SQLIdentifierExpr("e"), "e1"))); + assertFalse(nestedQueryDetector.isNested(new SQLExprTableSource(new SQLIdentifierExpr("p")))); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/PrettyFormatterTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/PrettyFormatterTest.java index f876b14110..68ad891020 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/PrettyFormatterTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/PrettyFormatterTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.utils; import static org.hamcrest.MatcherAssert.assertThat; @@ -19,42 +18,45 @@ public class PrettyFormatterTest { - @Test - public void assertFormatterWithoutContentInside() throws IOException { - String noContentInput = "{ }"; - String expectedOutput = "{ }"; - String result = JsonPrettyFormatter.format(noContentInput); - assertThat(result, equalTo(expectedOutput)); - } - - @Test - public void assertFormatterOutputsPrettyJson() throws IOException { - String explainFormattedPrettyFilePath = TestUtils.getResourceFilePath( - "/src/test/resources/expectedOutput/explain_format_pretty.json"); - String explainFormattedPretty = Files.toString(new File(explainFormattedPrettyFilePath), StandardCharsets.UTF_8) - .replaceAll("\r", ""); - - String explainFormattedOnelineFilePath = TestUtils.getResourceFilePath( - "/src/test/resources/explain_format_oneline.json"); - String explainFormattedOneline = Files.toString(new File(explainFormattedOnelineFilePath), StandardCharsets.UTF_8) - .replaceAll("\r", ""); - String result = JsonPrettyFormatter.format(explainFormattedOneline); - - assertThat(result, equalTo(explainFormattedPretty)); - } - - @Test(expected = IOException.class) - public void illegalInputOfNull() throws IOException { - JsonPrettyFormatter.format(""); - } - - @Test(expected = IOException.class) - public void illegalInputOfUnpairedBrace() throws IOException { - JsonPrettyFormatter.format("{\"key\" : \"value\""); - } - - @Test(expected = IOException.class) - public void illegalInputOfWrongBraces() throws IOException { - JsonPrettyFormatter.format("<\"key\" : \"value\">"); - } + @Test + public void assertFormatterWithoutContentInside() throws IOException { + String noContentInput = "{ }"; + String expectedOutput = "{ }"; + String result = JsonPrettyFormatter.format(noContentInput); + assertThat(result, equalTo(expectedOutput)); + } + + @Test + public void assertFormatterOutputsPrettyJson() throws IOException { + String explainFormattedPrettyFilePath = + TestUtils.getResourceFilePath( + "/src/test/resources/expectedOutput/explain_format_pretty.json"); + String explainFormattedPretty = + Files.toString(new File(explainFormattedPrettyFilePath), StandardCharsets.UTF_8) + .replaceAll("\r", ""); + + String explainFormattedOnelineFilePath = + TestUtils.getResourceFilePath("/src/test/resources/explain_format_oneline.json"); + String explainFormattedOneline = + Files.toString(new File(explainFormattedOnelineFilePath), StandardCharsets.UTF_8) + .replaceAll("\r", ""); + String result = JsonPrettyFormatter.format(explainFormattedOneline); + + assertThat(result, equalTo(explainFormattedPretty)); + } + + @Test(expected = IOException.class) + public void illegalInputOfNull() throws IOException { + JsonPrettyFormatter.format(""); + } + + @Test(expected = IOException.class) + public void illegalInputOfUnpairedBrace() throws IOException { + JsonPrettyFormatter.format("{\"key\" : \"value\""); + } + + @Test(expected = IOException.class) + public void illegalInputOfWrongBraces() throws IOException { + JsonPrettyFormatter.format("<\"key\" : \"value\">"); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/QueryContextTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/QueryContextTest.java index 55b78af0d7..5dbda8cb92 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/QueryContextTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/QueryContextTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.utils; import static org.hamcrest.Matchers.equalTo; @@ -18,56 +17,57 @@ public class QueryContextTest { - private static final String REQUEST_ID_KEY = "request_id"; + private static final String REQUEST_ID_KEY = "request_id"; - @After - public void cleanUpContext() { + @After + public void cleanUpContext() { - ThreadContext.clearMap(); - } + ThreadContext.clearMap(); + } - @Test - public void addRequestId() { + @Test + public void addRequestId() { - Assert.assertNull(ThreadContext.get(REQUEST_ID_KEY)); - QueryContext.addRequestId(); - final String requestId = ThreadContext.get(REQUEST_ID_KEY); - Assert.assertNotNull(requestId); - } + Assert.assertNull(ThreadContext.get(REQUEST_ID_KEY)); + QueryContext.addRequestId(); + final String requestId = ThreadContext.get(REQUEST_ID_KEY); + Assert.assertNotNull(requestId); + } - @Test - public void addRequestId_alreadyExists() { + @Test + public void addRequestId_alreadyExists() { - QueryContext.addRequestId(); - final String requestId = ThreadContext.get(REQUEST_ID_KEY); - QueryContext.addRequestId(); - final String requestId2 = ThreadContext.get(REQUEST_ID_KEY); - Assert.assertThat(requestId2, not(equalTo(requestId))); - } + QueryContext.addRequestId(); + final String requestId = ThreadContext.get(REQUEST_ID_KEY); + QueryContext.addRequestId(); + final String requestId2 = ThreadContext.get(REQUEST_ID_KEY); + Assert.assertThat(requestId2, not(equalTo(requestId))); + } - @Test - public void getRequestId_doesNotExist() { - assertNotNull(QueryContext.getRequestId()); - } + @Test + public void getRequestId_doesNotExist() { + assertNotNull(QueryContext.getRequestId()); + } - @Test - public void getRequestId() { + @Test + public void getRequestId() { - final String test_request_id = "test_id_111"; - ThreadContext.put(REQUEST_ID_KEY, test_request_id); - final String requestId = QueryContext.getRequestId(); - Assert.assertThat(requestId, equalTo(test_request_id)); - } + final String test_request_id = "test_id_111"; + ThreadContext.put(REQUEST_ID_KEY, test_request_id); + final String requestId = QueryContext.getRequestId(); + Assert.assertThat(requestId, equalTo(test_request_id)); + } - @Test - public void withCurrentContext() throws InterruptedException { + @Test + public void withCurrentContext() throws InterruptedException { - Runnable task = () -> { - Assert.assertTrue(ThreadContext.containsKey("test11")); - Assert.assertTrue(ThreadContext.containsKey("test22")); + Runnable task = + () -> { + Assert.assertTrue(ThreadContext.containsKey("test11")); + Assert.assertTrue(ThreadContext.containsKey("test22")); }; - ThreadContext.put("test11", "value11"); - ThreadContext.put("test22", "value11"); - new Thread(QueryContext.withCurrentContext(task)).join(); - } + ThreadContext.put("test11", "value11"); + ThreadContext.put("test22", "value11"); + new Thread(QueryContext.withCurrentContext(task)).join(); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/QueryDataAnonymizerTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/QueryDataAnonymizerTest.java index ca95b547a9..073fec61e7 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/QueryDataAnonymizerTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/QueryDataAnonymizerTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.utils; import org.junit.Assert; @@ -12,78 +11,84 @@ public class QueryDataAnonymizerTest { - @Test - public void queriesShouldHaveAnonymousFieldAndIndex() { - String query = "SELECT ABS(balance) FROM accounts WHERE age > 30 GROUP BY ABS(balance)"; - String expectedQuery = "( SELECT ABS(identifier) FROM table WHERE identifier > number GROUP BY ABS(identifier) )"; - Assert.assertEquals(expectedQuery, QueryDataAnonymizer.anonymizeData(query)); - } + @Test + public void queriesShouldHaveAnonymousFieldAndIndex() { + String query = "SELECT ABS(balance) FROM accounts WHERE age > 30 GROUP BY ABS(balance)"; + String expectedQuery = + "( SELECT ABS(identifier) FROM table WHERE identifier > number GROUP BY ABS(identifier) )"; + Assert.assertEquals(expectedQuery, QueryDataAnonymizer.anonymizeData(query)); + } - @Test - public void queriesShouldAnonymousNumbers() { - String query = "SELECT ABS(20), LOG(20.20) FROM accounts"; - String expectedQuery = "( SELECT ABS(number), LOG(number) FROM table )"; - Assert.assertEquals(expectedQuery, QueryDataAnonymizer.anonymizeData(query)); - } + @Test + public void queriesShouldAnonymousNumbers() { + String query = "SELECT ABS(20), LOG(20.20) FROM accounts"; + String expectedQuery = "( SELECT ABS(number), LOG(number) FROM table )"; + Assert.assertEquals(expectedQuery, QueryDataAnonymizer.anonymizeData(query)); + } - @Test - public void queriesShouldHaveAnonymousBooleanLiterals() { - String query = "SELECT TRUE FROM accounts"; - String expectedQuery = "( SELECT boolean_literal FROM table )"; - Assert.assertEquals(expectedQuery, QueryDataAnonymizer.anonymizeData(query)); - } + @Test + public void queriesShouldHaveAnonymousBooleanLiterals() { + String query = "SELECT TRUE FROM accounts"; + String expectedQuery = "( SELECT boolean_literal FROM table )"; + Assert.assertEquals(expectedQuery, QueryDataAnonymizer.anonymizeData(query)); + } - @Test - public void queriesShouldHaveAnonymousInputStrings() { - String query = "SELECT * FROM accounts WHERE name = 'Oliver'"; - String expectedQuery = "( SELECT * FROM table WHERE identifier = 'string_literal' )"; - Assert.assertEquals(expectedQuery, QueryDataAnonymizer.anonymizeData(query)); - } + @Test + public void queriesShouldHaveAnonymousInputStrings() { + String query = "SELECT * FROM accounts WHERE name = 'Oliver'"; + String expectedQuery = "( SELECT * FROM table WHERE identifier = 'string_literal' )"; + Assert.assertEquals(expectedQuery, QueryDataAnonymizer.anonymizeData(query)); + } - @Test - public void queriesWithAliasesShouldAnonymizeSensitiveData() { - String query = "SELECT balance AS b FROM accounts AS a"; - String expectedQuery = "( SELECT identifier AS b FROM table a )"; - Assert.assertEquals(expectedQuery, QueryDataAnonymizer.anonymizeData(query)); - } + @Test + public void queriesWithAliasesShouldAnonymizeSensitiveData() { + String query = "SELECT balance AS b FROM accounts AS a"; + String expectedQuery = "( SELECT identifier AS b FROM table a )"; + Assert.assertEquals(expectedQuery, QueryDataAnonymizer.anonymizeData(query)); + } - @Test - public void queriesWithFunctionsShouldAnonymizeSensitiveData() { - String query = "SELECT LTRIM(firstname) FROM accounts"; - String expectedQuery = "( SELECT LTRIM(identifier) FROM table )"; - Assert.assertEquals(expectedQuery, QueryDataAnonymizer.anonymizeData(query)); - } + @Test + public void queriesWithFunctionsShouldAnonymizeSensitiveData() { + String query = "SELECT LTRIM(firstname) FROM accounts"; + String expectedQuery = "( SELECT LTRIM(identifier) FROM table )"; + Assert.assertEquals(expectedQuery, QueryDataAnonymizer.anonymizeData(query)); + } - @Test - public void queriesWithAggregatesShouldAnonymizeSensitiveData() { - String query = "SELECT MAX(price) - MIN(price) from tickets"; - String expectedQuery = "( SELECT MAX(identifier) - MIN(identifier) FROM table )"; - Assert.assertEquals(expectedQuery, QueryDataAnonymizer.anonymizeData(query)); - } + @Test + public void queriesWithAggregatesShouldAnonymizeSensitiveData() { + String query = "SELECT MAX(price) - MIN(price) from tickets"; + String expectedQuery = "( SELECT MAX(identifier) - MIN(identifier) FROM table )"; + Assert.assertEquals(expectedQuery, QueryDataAnonymizer.anonymizeData(query)); + } - @Test - public void queriesWithSubqueriesShouldAnonymizeSensitiveData() { - String query = "SELECT a.f, a.l, a.a FROM " + - "(SELECT firstname AS f, lastname AS l, age AS a FROM accounts WHERE age > 30) a"; - String expectedQuery = "( SELECT identifier, identifier, identifier FROM (SELECT identifier AS f, " + - "identifier AS l, identifier AS a FROM table WHERE identifier > number ) a )"; - Assert.assertEquals(expectedQuery, QueryDataAnonymizer.anonymizeData(query)); - } + @Test + public void queriesWithSubqueriesShouldAnonymizeSensitiveData() { + String query = + "SELECT a.f, a.l, a.a FROM " + + "(SELECT firstname AS f, lastname AS l, age AS a FROM accounts WHERE age > 30) a"; + String expectedQuery = + "( SELECT identifier, identifier, identifier FROM (SELECT identifier AS f, " + + "identifier AS l, identifier AS a FROM table WHERE identifier > number ) a )"; + Assert.assertEquals(expectedQuery, QueryDataAnonymizer.anonymizeData(query)); + } - @Test - public void joinQueriesShouldAnonymizeSensitiveData() { - String query = "SELECT a.account_number, a.firstname, a.lastname, e.id, e.name " + - "FROM accounts a JOIN employees e"; - String expectedQuery = "( SELECT identifier, identifier, identifier, identifier, identifier " + - "FROM table a JOIN table e )"; - Assert.assertEquals(expectedQuery, QueryDataAnonymizer.anonymizeData(query)); - } + @Test + public void joinQueriesShouldAnonymizeSensitiveData() { + String query = + "SELECT a.account_number, a.firstname, a.lastname, e.id, e.name " + + "FROM accounts a JOIN employees e"; + String expectedQuery = + "( SELECT identifier, identifier, identifier, identifier, identifier " + + "FROM table a JOIN table e )"; + Assert.assertEquals(expectedQuery, QueryDataAnonymizer.anonymizeData(query)); + } - @Test - public void unionQueriesShouldAnonymizeSensitiveData() { - String query = "SELECT name, age FROM accounts UNION SELECT name, age FROM employees"; - String expectedQuery = "( SELECT identifier, identifier FROM table " + - "UNION SELECT identifier, identifier FROM table )"; - Assert.assertEquals(expectedQuery, QueryDataAnonymizer.anonymizeData(query)); - } + @Test + public void unionQueriesShouldAnonymizeSensitiveData() { + String query = "SELECT name, age FROM accounts UNION SELECT name, age FROM employees"; + String expectedQuery = + "( SELECT identifier, identifier FROM table " + + "UNION SELECT identifier, identifier FROM table )"; + Assert.assertEquals(expectedQuery, QueryDataAnonymizer.anonymizeData(query)); + } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchAggregationResponseParserTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchAggregationResponseParserTest.java index cd915cf5e5..1a15e57c55 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchAggregationResponseParserTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchAggregationResponseParserTest.java @@ -36,7 +36,7 @@ class OpenSearchAggregationResponseParserTest { /** SELECT MAX(age) as max FROM accounts. */ @Test void no_bucket_one_metric_should_pass() { - String response = "{\n \"max#max\": {\n \"value\": 40\n }\n}"; + String response = "{\n" + " \"max#max\": {\n" + " \"value\": 40\n" + " }\n" + "}"; NoBucketAggregationParser parser = new NoBucketAggregationParser(new SingleValueParser("max")); assertThat(parse(parser, response), contains(entry("max", 40d))); } @@ -140,7 +140,8 @@ void two_bucket_one_metric_should_pass() { @Test void unsupported_aggregation_should_fail() { - String response = "{\n \"date_histogram#date_histogram\": {\n \"value\": 40\n }\n}"; + String response = + "{\n" + " \"date_histogram#date_histogram\": {\n" + " \"value\": 40\n" + " }\n" + "}"; NoBucketAggregationParser parser = new NoBucketAggregationParser(new SingleValueParser("max")); RuntimeException exception = assertThrows(RuntimeException.class, () -> parse(parser, response)); From 220150969209bc8eefbac44fa0fc2f3ebc461687 Mon Sep 17 00:00:00 2001 From: Mitchell Gale Date: Mon, 21 Aug 2023 15:26:30 -0700 Subject: [PATCH 37/42] [Spotless] Applying Google Code Format for legacy directory (pt 2/4) #20 (#1989) * Spotless apply for legacy pt 2 Signed-off-by: Mitchell Gale * spotless apply Signed-off-by: Mitchell Gale * Spotless apply for legacy pt 2 Signed-off-by: Mitchell Gale * spotless apply Signed-off-by: Mitchell Gale * updated type Signed-off-by: Mitchell Gale * removed spotless Signed-off-by: Mitchell Gale * add spotless Signed-off-by: Mitchell Gale * removed extra test Signed-off-by: Mitchell Gale --------- Signed-off-by: Mitchell Gale Signed-off-by: Mitchell Gale --- .../antlr/semantic/scope/Environment.java | 134 +- .../antlr/semantic/scope/Namespace.java | 29 +- .../semantic/types/operator/JoinOperator.java | 56 +- .../antlr/semantic/types/special/Generic.java | 148 +- .../visitor/GenericSqlParseTreeVisitor.java | 98 +- .../opensearch/sql/legacy/domain/Field.java | 262 ++- .../opensearch/sql/legacy/domain/From.java | 88 +- .../opensearch/sql/legacy/domain/Having.java | 308 +-- .../sql/legacy/domain/IndexStatement.java | 136 +- .../sql/legacy/domain/JoinSelect.java | 107 +- .../opensearch/sql/legacy/domain/KVValue.java | 35 +- .../sql/legacy/domain/MethodField.java | 151 +- .../sql/legacy/domain/hints/Hint.java | 29 +- .../sql/legacy/domain/hints/HintFactory.java | 398 ++-- .../sql/legacy/domain/hints/HintType.java | 45 +- .../legacy/esdomain/LocalClusterState.java | 322 ++-- .../legacy/esdomain/mapping/FieldMapping.java | 221 ++- .../esdomain/mapping/FieldMappings.java | 185 +- .../esdomain/mapping/IndexMappings.java | 76 +- .../sql/legacy/esdomain/mapping/Mappings.java | 53 +- .../ActionRequestRestExecutorFactory.java | 63 +- .../sql/legacy/executor/Format.java | 35 +- .../executor/GetIndexRequestRestListener.java | 126 +- .../legacy/executor/format/ErrorMessage.java | 85 +- .../executor/format/ErrorMessageFactory.java | 58 +- .../legacy/executor/join/ElasticUtils.java | 251 +-- .../join/HashJoinComparisonStructure.java | 84 +- .../join/HashJoinElasticExecutor.java | 665 +++---- .../executor/join/MetaSearchResult.java | 110 +- .../legacy/executor/multi/MinusExecutor.java | 789 ++++---- .../MinusOneFieldAndOptimizationResult.java | 30 +- .../multi/MultiRequestExecutorFactory.java | 28 +- .../legacy/expression/core/Expression.java | 19 +- .../expression/core/ExpressionFactory.java | 145 +- .../core/builder/ExpressionBuilder.java | 7 +- .../expression/model/ExprBooleanValue.java | 33 +- .../expression/model/ExprCollectionValue.java | 35 +- .../expression/model/ExprDoubleValue.java | 27 +- .../expression/model/ExprFloatValue.java | 27 +- .../expression/model/ExprIntegerValue.java | 27 +- .../expression/model/ExprLongValue.java | 27 +- .../expression/model/ExprMissingValue.java | 13 +- .../expression/model/ExprStringValue.java | 27 +- .../expression/model/ExprTupleValue.java | 32 +- .../legacy/expression/model/ExprValue.java | 39 +- .../expression/model/ExprValueFactory.java | 95 +- .../expression/model/ExprValueUtils.java | 95 +- .../sql/legacy/metrics/GaugeMetric.java | 28 +- .../opensearch/sql/legacy/metrics/Metric.java | 20 +- .../sql/legacy/metrics/MetricFactory.java | 43 +- .../sql/legacy/metrics/MetricName.java | 97 +- .../sql/legacy/metrics/Metrics.java | 92 +- .../legacy/parser/ElasticSqlExprParser.java | 1648 +++++++++-------- .../legacy/parser/ElasticSqlSelectParser.java | 509 +++-- .../sql/legacy/parser/FieldMaker.java | 680 +++---- .../sql/legacy/parser/HavingParser.java | 153 +- .../join/HashJoinElasticRequestBuilder.java | 45 +- .../legacy/query/join/JoinRequestBuilder.java | 179 +- .../sql/legacy/query/maker/Maker.java | 869 ++++----- .../legacy/query/multi/MultiQueryAction.java | 103 +- .../query/multi/MultiQueryRequestBuilder.java | 193 +- .../legacy/query/multi/MultiQuerySelect.java | 47 +- .../HashJoinQueryPlanRequestBuilder.java | 101 +- .../query/planner/core/ExecuteParams.java | 35 +- .../query/planner/explain/Explanation.java | 125 +- .../planner/explain/ExplanationFormat.java | 56 +- .../explain/JsonExplanationFormat.java | 154 +- .../planner/logical/LogicalOperator.java | 48 +- .../query/planner/logical/LogicalPlan.java | 287 ++- .../planner/logical/LogicalPlanVisitor.java | 172 +- .../query/planner/logical/node/Filter.java | 80 +- .../query/planner/logical/node/Group.java | 90 +- .../query/planner/logical/node/Join.java | 239 ++- .../physical/estimation/Estimation.java | 61 +- .../planner/physical/node/join/HashTable.java | 85 +- .../physical/node/join/HashTableGroup.java | 109 +- .../physical/node/join/JoinAlgorithm.java | 416 ++--- .../physical/node/join/ListHashTable.java | 69 +- .../resource/blocksize/AdaptiveBlockSize.java | 31 +- .../planner/resource/monitor/Monitor.java | 18 +- .../sql/legacy/rewriter/alias/Identifier.java | 49 +- .../legacy/rewriter/join/JoinRewriteRule.java | 281 +-- .../sql/legacy/rewriter/nestedfield/From.java | 146 +- .../rewriter/nestedfield/Identifier.java | 138 +- .../subquery/rewriter/InRewriter.java | 84 +- .../rewriter/NestedExistsRewriter.java | 107 +- .../rewriter/subquery/utils/FindSubQuery.java | 73 +- .../sql/legacy/utils/JsonPrettyFormatter.java | 45 +- .../antlr/semantic/scope/EnvironmentTest.java | 266 ++- .../antlr/semantic/types/GenericTypeTest.java | 54 +- .../esdomain/mapping/FieldMappingTest.java | 175 +- .../esdomain/mapping/FieldMappingsTest.java | 79 +- .../legacy/rewriter/alias/IdentifierTest.java | 65 +- .../unittest/ErrorMessageFactoryTest.java | 67 +- .../sql/legacy/unittest/FormatTest.java | 21 +- .../sql/legacy/unittest/HavingTest.java | 620 +++---- .../sql/legacy/unittest/JSONRequestTest.java | 806 ++++---- .../unittest/LocalClusterStateTest.java | 300 +-- .../legacy/unittest/MathFunctionsTest.java | 721 +++----- .../executor/join/ElasticUtilsTest.java | 51 +- .../expression/core/ExpressionTest.java | 61 +- .../expression/model/ExprValueUtilsTest.java | 76 +- .../unittest/metrics/GaugeMetricTest.java | 23 +- .../legacy/unittest/metrics/MetricsTest.java | 101 +- .../unittest/parser/FieldMakerTest.java | 69 +- .../subquery/ExistsSubQueryRewriterTest.java | 183 +- .../subquery/InSubqueryRewriterTest.java | 118 +- .../subquery/utils/FindSubQueryTest.java | 63 +- .../sql/legacy/util/HasFieldWithValue.java | 70 +- .../sql/legacy/util/MatcherUtils.java | 483 ++--- .../util/MultipleIndexClusterUtils.java | 322 ++-- 111 files changed, 9052 insertions(+), 9400 deletions(-) diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/Environment.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/Environment.java index 11d25c3ce8..4f8a62d898 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/Environment.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/Environment.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.scope; import java.util.HashMap; @@ -11,84 +10,85 @@ import java.util.Optional; import org.opensearch.sql.legacy.antlr.semantic.types.Type; -/** - * Environment for symbol and its attribute (type) in the current scope - */ +/** Environment for symbol and its attribute (type) in the current scope */ public class Environment { - private final Environment parent; - - private final SymbolTable symbolTable; + private final Environment parent; - public Environment(Environment parent) { - this.parent = parent; - this.symbolTable = new SymbolTable(); - } + private final SymbolTable symbolTable; - /** - * Define symbol with the type - * @param symbol symbol to define - * @param type type - */ - public void define(Symbol symbol, Type type) { - symbolTable.store(symbol, type); - } + public Environment(Environment parent) { + this.parent = parent; + this.symbolTable = new SymbolTable(); + } - /** - * Resolve symbol in the environment - * @param symbol symbol to look up - * @return type if exist - */ - public Optional resolve(Symbol symbol) { - Optional type = Optional.empty(); - for (Environment cur = this; cur != null; cur = cur.parent) { - type = cur.symbolTable.lookup(symbol); - if (type.isPresent()) { - break; - } - } - return type; - } + /** + * Define symbol with the type + * + * @param symbol symbol to define + * @param type type + */ + public void define(Symbol symbol, Type type) { + symbolTable.store(symbol, type); + } - /** - * Resolve symbol definitions by a prefix. - * @param prefix a prefix of symbol - * @return all symbols with types that starts with the prefix - */ - public Map resolveByPrefix(Symbol prefix) { - Map typeByName = new HashMap<>(); - for (Environment cur = this; cur != null; cur = cur.parent) { - typeByName.putAll(cur.symbolTable.lookupByPrefix(prefix)); - } - return typeByName; + /** + * Resolve symbol in the environment + * + * @param symbol symbol to look up + * @return type if exist + */ + public Optional resolve(Symbol symbol) { + Optional type = Optional.empty(); + for (Environment cur = this; cur != null; cur = cur.parent) { + type = cur.symbolTable.lookup(symbol); + if (type.isPresent()) { + break; + } } + return type; + } - /** - * Resolve all symbols in the namespace. - * @param namespace a namespace - * @return all symbols in the namespace - */ - public Map resolveAll(Namespace namespace) { - Map result = new HashMap<>(); - for (Environment cur = this; cur != null; cur = cur.parent) { - // putIfAbsent ensures inner most definition will be used (shadow outers) - cur.symbolTable.lookupAll(namespace).forEach(result::putIfAbsent); - } - return result; + /** + * Resolve symbol definitions by a prefix. + * + * @param prefix a prefix of symbol + * @return all symbols with types that starts with the prefix + */ + public Map resolveByPrefix(Symbol prefix) { + Map typeByName = new HashMap<>(); + for (Environment cur = this; cur != null; cur = cur.parent) { + typeByName.putAll(cur.symbolTable.lookupByPrefix(prefix)); } + return typeByName; + } - /** Current environment is root and no any symbol defined */ - public boolean isEmpty(Namespace namespace) { - for (Environment cur = this; cur != null; cur = cur.parent) { - if (!cur.symbolTable.isEmpty(namespace)) { - return false; - } - } - return true; + /** + * Resolve all symbols in the namespace. + * + * @param namespace a namespace + * @return all symbols in the namespace + */ + public Map resolveAll(Namespace namespace) { + Map result = new HashMap<>(); + for (Environment cur = this; cur != null; cur = cur.parent) { + // putIfAbsent ensures inner most definition will be used (shadow outers) + cur.symbolTable.lookupAll(namespace).forEach(result::putIfAbsent); } + return result; + } - public Environment getParent() { - return parent; + /** Current environment is root and no any symbol defined */ + public boolean isEmpty(Namespace namespace) { + for (Environment cur = this; cur != null; cur = cur.parent) { + if (!cur.symbolTable.isEmpty(namespace)) { + return false; + } } + return true; + } + public Environment getParent() { + return parent; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/Namespace.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/Namespace.java index b591de5783..c500809a70 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/Namespace.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/Namespace.java @@ -3,27 +3,22 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.scope; -/** - * Namespace of symbol to avoid naming conflict - */ +/** Namespace of symbol to avoid naming conflict */ public enum Namespace { + FIELD_NAME("Field"), + FUNCTION_NAME("Function"), + OPERATOR_NAME("Operator"); - FIELD_NAME("Field"), - FUNCTION_NAME("Function"), - OPERATOR_NAME("Operator"); - - private final String name; - - Namespace(String name) { - this.name = name; - } + private final String name; - @Override - public String toString() { - return name; - } + Namespace(String name) { + this.name = name; + } + @Override + public String toString() { + return name; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/operator/JoinOperator.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/operator/JoinOperator.java index 75bc306cd9..02decab1ae 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/operator/JoinOperator.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/operator/JoinOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.types.operator; import static org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchDataType.TYPE_ERROR; @@ -13,35 +12,32 @@ import org.opensearch.sql.legacy.antlr.semantic.types.Type; import org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchIndex; -/** - * Join operator - */ +/** Join operator */ public enum JoinOperator implements Type { - JOIN; - - @Override - public String getName() { - return name(); - } - - @Override - public Type construct(List others) { - Optional isAnyNonIndexType = others.stream(). - filter(type -> !(type instanceof OpenSearchIndex)). - findAny(); - if (isAnyNonIndexType.isPresent()) { - return TYPE_ERROR; - } - return others.get(0); - } - - @Override - public String usage() { - return "Please join index with other index or its nested field."; - } - - @Override - public String toString() { - return "Operator [" + getName() + "]"; + JOIN; + + @Override + public String getName() { + return name(); + } + + @Override + public Type construct(List others) { + Optional isAnyNonIndexType = + others.stream().filter(type -> !(type instanceof OpenSearchIndex)).findAny(); + if (isAnyNonIndexType.isPresent()) { + return TYPE_ERROR; } + return others.get(0); + } + + @Override + public String usage() { + return "Please join index with other index or its nested field."; + } + + @Override + public String toString() { + return "Operator [" + getName() + "]"; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/special/Generic.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/special/Generic.java index 7efdb55426..ad718a8256 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/special/Generic.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/special/Generic.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.types.special; import java.util.Arrays; @@ -12,79 +11,84 @@ import org.opensearch.sql.legacy.antlr.semantic.types.Type; import org.opensearch.sql.legacy.utils.StringUtils; -/** - * Generic type for more precise type expression - */ +/** Generic type for more precise type expression */ public class Generic implements Type { - /** Generic type placeholder namespace */ - private enum Name { T } - - /** Construct function to find generic type in argument list with same name */ - public static final Function T = types -> findSameGenericType(Name.T, types); - - /** Generic type name */ - private final Name name; - - /** Actual type binding to current generic type */ - private final Type binding; - - public Generic(Name name, Type type) { - this.name = name; - this.binding = type; - } - - public static Type T(Type type) { - return new Generic(Name.T, type); - } - - /** - * Return a function for replacing generic type in argument list with binding type. - * Ex. after T instance found in argument list [T(NUMBER), STRING], create function to return actualTypes[0] - * - * @param func function for finding generic type in argument list (namely, function T above) - * @param actualArgTypes actual argument types - */ - public static Function specialize(Function func, - Type[] actualArgTypes) { - if (func != T) { - return func; - } - - Type genericType = func.apply(actualArgTypes); - int genericTypeIndex = Arrays.asList(actualArgTypes).indexOf(genericType); - return actualTypes -> actualTypes[genericTypeIndex]; + /** Generic type placeholder namespace */ + private enum Name { + T + } + + /** Construct function to find generic type in argument list with same name */ + public static final Function T = types -> findSameGenericType(Name.T, types); + + /** Generic type name */ + private final Name name; + + /** Actual type binding to current generic type */ + private final Type binding; + + public Generic(Name name, Type type) { + this.name = name; + this.binding = type; + } + + public static Type T(Type type) { + return new Generic(Name.T, type); + } + + /** + * Return a function for replacing generic type in argument list with binding type. Ex. after T + * instance found in argument list [T(NUMBER), STRING], create function to return actualTypes[0] + * + * @param func function for finding generic type in argument list (namely, function T above) + * @param actualArgTypes actual argument types + */ + public static Function specialize( + Function func, Type[] actualArgTypes) { + if (func != T) { + return func; } - /** Find placeholder in argument list, ex. in [T(NUMBER), STRING] -> T, return instance at first T */ - private static Type findSameGenericType(Name name, Type[] types) { - return Arrays.stream(types). - filter(type -> type instanceof Generic). - filter(type -> ((Generic) type).name == name). - findFirst(). - orElseThrow(() -> new IllegalStateException(StringUtils.format( - "Type definition is wrong. Could not unbind generic type [%s] in type list %s.", - name, types)) - ); - } - - @Override - public String getName() { - return this.name.name(); - } - - @Override - public boolean isCompatible(Type other) { - return binding.isCompatible(other); - } - - @Override - public Type construct(List others) { - return binding.construct(others); - } - - @Override - public String usage() { - return binding.usage() + " " + name; - } + Type genericType = func.apply(actualArgTypes); + int genericTypeIndex = Arrays.asList(actualArgTypes).indexOf(genericType); + return actualTypes -> actualTypes[genericTypeIndex]; + } + + /** + * Find placeholder in argument list, ex. in [T(NUMBER), STRING] -> T, return instance at first T + */ + private static Type findSameGenericType(Name name, Type[] types) { + return Arrays.stream(types) + .filter(type -> type instanceof Generic) + .filter(type -> ((Generic) type).name == name) + .findFirst() + .orElseThrow( + () -> + new IllegalStateException( + StringUtils.format( + "Type definition is wrong. Could not unbind generic type [%s] in type list" + + " %s.", + name, types))); + } + + @Override + public String getName() { + return this.name.name(); + } + + @Override + public boolean isCompatible(Type other) { + return binding.isCompatible(other); + } + + @Override + public Type construct(List others) { + return binding.construct(others); + } + + @Override + public String usage() { + return binding.usage() + " " + name; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/GenericSqlParseTreeVisitor.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/GenericSqlParseTreeVisitor.java index 511f932a0f..bd78c1b03f 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/GenericSqlParseTreeVisitor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/GenericSqlParseTreeVisitor.java @@ -3,78 +3,74 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.visitor; import java.util.List; -/** - * Generic parse tree visitor without dependency on concrete parse tree class. - */ +/** Generic parse tree visitor without dependency on concrete parse tree class. */ public interface GenericSqlParseTreeVisitor { - default void visitRoot() {} - - default void visitQuery() {} + default void visitRoot() {} - default void endVisitQuery() {} + default void visitQuery() {} - default T visitSelect(List items) { - return defaultValue(); - } + default void endVisitQuery() {} - default T visitSelectAllColumn() { - return defaultValue(); - } + default T visitSelect(List items) { + return defaultValue(); + } - default void visitAs(String alias, T type) {} + default T visitSelectAllColumn() { + return defaultValue(); + } - default T visitIndexName(String indexName) { - return defaultValue(); - } + default void visitAs(String alias, T type) {} - default T visitFieldName(String fieldName) { - return defaultValue(); - } + default T visitIndexName(String indexName) { + return defaultValue(); + } - default T visitFunctionName(String funcName) { - return defaultValue(); - } + default T visitFieldName(String fieldName) { + return defaultValue(); + } - default T visitOperator(String opName) { - return defaultValue(); - } + default T visitFunctionName(String funcName) { + return defaultValue(); + } - default T visitString(String text) { - return defaultValue(); - } + default T visitOperator(String opName) { + return defaultValue(); + } - default T visitInteger(String text) { - return defaultValue(); - } + default T visitString(String text) { + return defaultValue(); + } - default T visitFloat(String text) { - return defaultValue(); - } + default T visitInteger(String text) { + return defaultValue(); + } - default T visitBoolean(String text) { - return defaultValue(); - } + default T visitFloat(String text) { + return defaultValue(); + } - default T visitDate(String text) { - return defaultValue(); - } + default T visitBoolean(String text) { + return defaultValue(); + } - default T visitNull() { - return defaultValue(); - } + default T visitDate(String text) { + return defaultValue(); + } - default T visitConvertedType(String text) { - return defaultValue(); - } + default T visitNull() { + return defaultValue(); + } - default T defaultValue() { - return null; - } + default T visitConvertedType(String text) { + return defaultValue(); + } + default T defaultValue() { + return null; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/Field.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/Field.java index 1b6be05f20..09471fa2d7 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/Field.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/Field.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain; import com.alibaba.druid.sql.ast.SQLExpr; @@ -13,143 +12,136 @@ import org.opensearch.sql.legacy.parser.NestedType; /** - * - * * @author ansj */ public class Field implements Cloneable { - /** - * Constant for '*' field in SELECT - */ - public static final Field STAR = new Field("*", ""); - - protected String name; - protected SQLAggregateOption option; - private String alias; - private NestedType nested; - private ChildrenType children; - private SQLExpr expression; - - public Field(String name, String alias) { - this.name = name; - this.alias = alias; - this.nested = null; - this.children = null; - this.option = null; - } - - public Field(String name, String alias, NestedType nested, ChildrenType children) { - this.name = name; - this.alias = alias; - this.nested = nested; - this.children = children; - } - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - public String getAlias() { - return alias; - } - - public void setAlias(String alias) { - this.alias = alias; - } - - public boolean isNested() { - return this.nested != null; - } - - public boolean isReverseNested() { - return this.nested != null && this.nested.isReverse(); - } - - public void setNested(NestedType nested) { - this.nested = nested; - } - - public String getNestedPath() { - if (this.nested == null) { - return null; - } - - return this.nested.path; - } - - public boolean isChildren() { - return this.children != null; - } - - public void setChildren(ChildrenType children) { - this.children = children; - } - - public String getChildType() { - if (this.children == null) { - return null; - } - return this.children.childType; - } - - public void setAggregationOption(SQLAggregateOption option) { - this.option = option; - } - - public SQLAggregateOption getOption() { - return option; - } - - @Override - public String toString() { - return this.name; - } + /** Constant for '*' field in SELECT */ + public static final Field STAR = new Field("*", ""); + + protected String name; + protected SQLAggregateOption option; + private String alias; + private NestedType nested; + private ChildrenType children; + private SQLExpr expression; + + public Field(String name, String alias) { + this.name = name; + this.alias = alias; + this.nested = null; + this.children = null; + this.option = null; + } + + public Field(String name, String alias, NestedType nested, ChildrenType children) { + this.name = name; + this.alias = alias; + this.nested = nested; + this.children = children; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getAlias() { + return alias; + } + + public void setAlias(String alias) { + this.alias = alias; + } + + public boolean isNested() { + return this.nested != null; + } + + public boolean isReverseNested() { + return this.nested != null && this.nested.isReverse(); + } + + public void setNested(NestedType nested) { + this.nested = nested; + } + + public String getNestedPath() { + if (this.nested == null) { + return null; + } + + return this.nested.path; + } + + public boolean isChildren() { + return this.children != null; + } + + public void setChildren(ChildrenType children) { + this.children = children; + } + + public String getChildType() { + if (this.children == null) { + return null; + } + return this.children.childType; + } + + public void setAggregationOption(SQLAggregateOption option) { + this.option = option; + } + + public SQLAggregateOption getOption() { + return option; + } + + @Override + public String toString() { + return this.name; + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (obj.getClass() != this.getClass()) { + return false; + } + Field other = (Field) obj; + boolean namesAreEqual = + (other.getName() == null && this.name == null) || other.getName().equals(this.name); + if (!namesAreEqual) { + return false; + } + return (other.getAlias() == null && this.alias == null) || other.getAlias().equals(this.alias); + } + + @Override + public int hashCode() { // Bug: equals() is present but hashCode was missing + return Objects.hash(name, alias); + } + + @Override + protected Object clone() throws CloneNotSupportedException { + return new Field(new String(this.name), new String(this.alias)); + } + + /** Returns true if the field is script field. */ + public boolean isScriptField() { + return false; + } + + public void setExpression(SQLExpr expression) { + this.expression = expression; + } - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (obj.getClass() != this.getClass()) { - return false; - } - Field other = (Field) obj; - boolean namesAreEqual = (other.getName() == null && this.name == null) - || other.getName().equals(this.name); - if (!namesAreEqual) { - return false; - } - return (other.getAlias() == null && this.alias == null) - || other.getAlias().equals(this.alias); - } - - @Override - public int hashCode() { // Bug: equals() is present but hashCode was missing - return Objects.hash(name, alias); - } - - @Override - protected Object clone() throws CloneNotSupportedException { - return new Field(new String(this.name), new String(this.alias)); - } - - /** - * Returns true if the field is script field. - */ - public boolean isScriptField() { - return false; - } - - public void setExpression(SQLExpr expression) { - this.expression = expression; - } - - public SQLExpr getExpression() { - return expression; - } + public SQLExpr getExpression() { + return expression; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/From.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/From.java index 6455df727c..67ac7f0e3c 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/From.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/From.java @@ -3,55 +3,49 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain; - -/** - * Represents the from clause. - * Contains index and type which the - * query refer to. - */ +/** Represents the from clause. Contains index and type which the query refer to. */ public class From { - private String index; - private String alias; - - /** - * Extract index and type from the 'from' string - * - * @param from The part after the FROM keyword. - */ - public From(String from) { - index = from; - } - - public From(String from, String alias) { - this(from); - this.alias = alias; - } - - public String getIndex() { - return index; - } - - public void setIndex(String index) { - this.index = index; - } - - public String getAlias() { - return alias; - } - - public void setAlias(String alias) { - this.alias = alias; - } - - @Override - public String toString() { - StringBuilder str = new StringBuilder(index); - if (alias != null) { - str.append(" AS ").append(alias); - } - return str.toString(); + private String index; + private String alias; + + /** + * Extract index and type from the 'from' string + * + * @param from The part after the FROM keyword. + */ + public From(String from) { + index = from; + } + + public From(String from, String alias) { + this(from); + this.alias = alias; + } + + public String getIndex() { + return index; + } + + public void setIndex(String index) { + this.index = index; + } + + public String getAlias() { + return alias; + } + + public void setAlias(String alias) { + this.alias = alias; + } + + @Override + public String toString() { + StringBuilder str = new StringBuilder(index); + if (alias != null) { + str.append(" AS ").append(alias); } + return str.toString(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/Having.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/Having.java index 30cfba4c7a..7d0765580b 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/Having.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/Having.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain; import static java.util.stream.Collectors.joining; @@ -29,100 +28,106 @@ * Domain object for HAVING clause in SQL which covers both the parsing and explain logic. *

* Responsibilities: - * 1. Parsing: parse conditions out during initialization - * 2. Explain: translate conditions to OpenSearch query DSL (Bucket Selector Aggregation) + *

    + *
  1. Parsing: parse conditions out during initialization + *
  2. Explain: translate conditions to OpenSearch query DSL (Bucket Selector Aggregation) + *
*/ public class Having { - private static final String BUCKET_SELECTOR_NAME = "bucket_filter"; - private static final String PARAMS = "params."; - private static final String AND = " && "; - private static final String OR = " || "; - - /** - * Conditions parsed out of HAVING clause - */ - private final List conditions; - - private HavingParser havingParser; - - public List getHavingFields() { - return havingParser.getHavingFields(); + private static final String BUCKET_SELECTOR_NAME = "bucket_filter"; + private static final String PARAMS = "params."; + private static final String AND = " && "; + private static final String OR = " || "; + + /** Conditions parsed out of HAVING clause */ + private final List conditions; + + private HavingParser havingParser; + + public List getHavingFields() { + return havingParser.getHavingFields(); + } + + /** + * Construct by HAVING expression + * + * @param havingExpr having expression + * @param parser where parser + * @throws SqlParseException exception thrown by where parser + */ + public Having(SQLExpr havingExpr, WhereParser parser) throws SqlParseException { + havingParser = new HavingParser(parser); + conditions = parseHavingExprToConditions(havingExpr, havingParser); + } + + public List getConditions() { + return conditions; + } + + /** + * Construct by GROUP BY expression with null check + * + * @param groupByExpr group by expression + * @param parser where parser + * @throws SqlParseException exception thrown by where parser + */ + public Having(SQLSelectGroupByClause groupByExpr, WhereParser parser) throws SqlParseException { + this(groupByExpr == null ? null : groupByExpr.getHaving(), parser); + } + + /** + * Add Bucket Selector Aggregation under group by aggregation with sibling of aggregation of + * fields in SELECT. OpenSearch makes sure that all sibling runs before bucket selector + * aggregation. + * + * @param groupByAgg aggregation builder for GROUP BY clause + * @param fields fields in SELECT clause + * @throws SqlParseException exception thrown for unknown expression + */ + public void explain(AggregationBuilder groupByAgg, List fields) throws SqlParseException { + if (groupByAgg == null || conditions.isEmpty()) { + return; } - /** - * Construct by HAVING expression - * - * @param havingExpr having expression - * @param parser where parser - * @throws SqlParseException exception thrown by where parser - */ - public Having(SQLExpr havingExpr, WhereParser parser) throws SqlParseException { - havingParser = new HavingParser(parser); - conditions = parseHavingExprToConditions(havingExpr, havingParser); + // parsing the fields from SELECT and HAVING clause + groupByAgg.subAggregation( + bucketSelector( + BUCKET_SELECTOR_NAME, + contextForFieldsInSelect(Iterables.concat(fields, getHavingFields())), + explainConditions())); + } + + private List parseHavingExprToConditions(SQLExpr havingExpr, HavingParser parser) + throws SqlParseException { + if (havingExpr == null) { + return Collections.emptyList(); } - public List getConditions() { - return conditions; + Where where = Where.newInstance(); + parser.parseWhere(havingExpr, where); + return where.getWheres(); + } + + private Map contextForFieldsInSelect(Iterable fields) { + Map context = new HashMap<>(); + for (Field field : fields) { + if (field instanceof MethodField) { + // It's required to add to context even if alias in SELECT is exactly same name as that in + // script + context.put( + field.getAlias(), bucketsPath(field.getAlias(), ((MethodField) field).getParams())); + } } + return context; + } - /** - * Construct by GROUP BY expression with null check - * - * @param groupByExpr group by expression - * @param parser where parser - * @throws SqlParseException exception thrown by where parser - */ - public Having(SQLSelectGroupByClause groupByExpr, WhereParser parser) throws SqlParseException { - this(groupByExpr == null ? null : groupByExpr.getHaving(), parser); - } - - /** - * Add Bucket Selector Aggregation under group by aggregation with sibling of aggregation of fields in SELECT. - * OpenSearch makes sure that all sibling runs before bucket selector aggregation. - * - * @param groupByAgg aggregation builder for GROUP BY clause - * @param fields fields in SELECT clause - * @throws SqlParseException exception thrown for unknown expression - */ - public void explain(AggregationBuilder groupByAgg, List fields) throws SqlParseException { - if (groupByAgg == null || conditions.isEmpty()) { - return; - } - - // parsing the fields from SELECT and HAVING clause - groupByAgg.subAggregation(bucketSelector(BUCKET_SELECTOR_NAME, - contextForFieldsInSelect(Iterables.concat(fields, getHavingFields())), - explainConditions())); - } - - private List parseHavingExprToConditions(SQLExpr havingExpr, HavingParser parser) - throws SqlParseException { - if (havingExpr == null) { - return Collections.emptyList(); - } - - Where where = Where.newInstance(); - parser.parseWhere(havingExpr, where); - return where.getWheres(); - } - - private Map contextForFieldsInSelect(Iterable fields) { - Map context = new HashMap<>(); - for (Field field : fields) { - if (field instanceof MethodField) { - // It's required to add to context even if alias in SELECT is exactly same name as that in script - context.put(field.getAlias(), bucketsPath(field.getAlias(), ((MethodField) field).getParams())); - } - } - return context; - } - - private Script explainConditions() throws SqlParseException { - return new Script(doExplain(conditions)); - } + private Script explainConditions() throws SqlParseException { + return new Script(doExplain(conditions)); + } /** + *
      * Explain conditions recursively.
      * Example: HAVING c >= 2 OR NOT (a > 20 AND c <= 10 OR a < 1) OR a < 5
      * Object: Where(?:
@@ -138,7 +143,7 @@ private Script explainConditions() throws SqlParseException {
      * Note: a) Where(connector : condition expression).
      * b) Condition is a subclass of Where.
      * c) connector=? means it doesn't matter for first condition in the list
-     *
+     * 
* @param wheres conditions * @return painless script string * @throws SqlParseException unknown type of expression other than identifier and value @@ -148,75 +153,74 @@ private String doExplain(List wheres) throws SqlParseException { return ""; } - StringBuilder script = new StringBuilder(); - for (Where cond : wheres) { - if (script.length() > 0) { - script.append(cond.getConn() == Where.CONN.AND ? AND : OR); - } - - if (cond instanceof Condition) { - script.append(createScript((Condition) cond)); - } else { - script.append('('). - append(doExplain(cond.getWheres())). - append(')'); - } - } - return script.toString(); + StringBuilder script = new StringBuilder(); + for (Where cond : wheres) { + if (script.length() > 0) { + script.append(cond.getConn() == Where.CONN.AND ? AND : OR); + } + + if (cond instanceof Condition) { + script.append(createScript((Condition) cond)); + } else { + script.append('(').append(doExplain(cond.getWheres())).append(')'); + } } - - private String createScript(Condition cond) throws SqlParseException { - String name = cond.getName(); - Object value = cond.getValue(); - switch (cond.getOPERATOR()) { - case EQ: - case GT: - case LT: - case GTE: - case LTE: - case IS: - case ISN: - return expr(name, cond.getOpertatorSymbol(), value); - case N: - return expr(name, "!=", value); - case BETWEEN: { - Object[] values = (Object[]) value; - return expr(name, ">=", values[0]) + AND + expr(name, "<=", values[1]); - } - case NBETWEEN: { - Object[] values = (Object[]) value; - return expr(name, "<", values[0]) + OR + expr(name, ">", values[1]); - } - case IN: - return Arrays.stream((Object[]) value). - map(val -> expr(name, "==", val)). - collect(joining(OR)); - case NIN: - return Arrays.stream((Object[]) value). - map(val -> expr(name, "!=", val)). - collect(joining(AND)); - default: - throw new SqlParseException("Unsupported operation in HAVING clause: " + cond.getOPERATOR()); + return script.toString(); + } + + private String createScript(Condition cond) throws SqlParseException { + String name = cond.getName(); + Object value = cond.getValue(); + switch (cond.getOPERATOR()) { + case EQ: + case GT: + case LT: + case GTE: + case LTE: + case IS: + case ISN: + return expr(name, cond.getOpertatorSymbol(), value); + case N: + return expr(name, "!=", value); + case BETWEEN: + { + Object[] values = (Object[]) value; + return expr(name, ">=", values[0]) + AND + expr(name, "<=", values[1]); } - } - - private String expr(String name, String operator, Object value) { - return String.join(" ", PARAMS + name, operator, value.toString()); - } - - /** - * Build the buckets_path. - * If the field is nested field, using the bucket path. - * else using the alias. - */ - private String bucketsPath(String alias, List kvValueList) { - if (kvValueList.size() == 1) { - KVValue kvValue = kvValueList.get(0); - if (StringUtils.equals(kvValue.key, "nested") - && kvValue.value instanceof NestedType) { - return ((NestedType) kvValue.value).getBucketPath(); - } + case NBETWEEN: + { + Object[] values = (Object[]) value; + return expr(name, "<", values[0]) + OR + expr(name, ">", values[1]); } - return alias; + case IN: + return Arrays.stream((Object[]) value) + .map(val -> expr(name, "==", val)) + .collect(joining(OR)); + case NIN: + return Arrays.stream((Object[]) value) + .map(val -> expr(name, "!=", val)) + .collect(joining(AND)); + default: + throw new SqlParseException( + "Unsupported operation in HAVING clause: " + cond.getOPERATOR()); + } + } + + private String expr(String name, String operator, Object value) { + return String.join(" ", PARAMS + name, operator, value.toString()); + } + + /** + * Build the buckets_path. If the field is nested field, using the bucket path. else using the + * alias. + */ + private String bucketsPath(String alias, List kvValueList) { + if (kvValueList.size() == 1) { + KVValue kvValue = kvValueList.get(0); + if (StringUtils.equals(kvValue.key, "nested") && kvValue.value instanceof NestedType) { + return ((NestedType) kvValue.value).getBucketPath(); + } } + return alias; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/IndexStatement.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/IndexStatement.java index e97a482b40..2a5be5728c 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/IndexStatement.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/IndexStatement.java @@ -3,89 +3,87 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain; -/** - * Class used to differentiate SHOW and DESCRIBE statements - */ +/** Class used to differentiate SHOW and DESCRIBE statements */ public class IndexStatement implements QueryStatement { - private StatementType statementType; - private String query; - private String indexPattern; - private String columnPattern; - - public IndexStatement(StatementType statementType, String query) { - this.statementType = statementType; - this.query = query; - - parseQuery(); - } - - private void parseQuery() { - String[] statement = query.split(" "); - - int tokenLength = statement.length; - try { - for (int i = 1; i < tokenLength; i++) { - switch (statement[i].toUpperCase()) { - case "TABLES": - if (i + 1 < tokenLength && statement[i + 1].equalsIgnoreCase("LIKE")) { - if (i + 2 < tokenLength) { - indexPattern = replaceWildcard(statement[i + 2]); - i += 2; - } - } - break; - case "COLUMNS": - if (i + 1 < tokenLength && statement[i + 1].equalsIgnoreCase("LIKE")) { - if (i + 2 < tokenLength) { - columnPattern = replaceWildcard(statement[i + 2]); - i += 2; - } - } - break; - } + private StatementType statementType; + private String query; + private String indexPattern; + private String columnPattern; + + public IndexStatement(StatementType statementType, String query) { + this.statementType = statementType; + this.query = query; + + parseQuery(); + } + + private void parseQuery() { + String[] statement = query.split(" "); + + int tokenLength = statement.length; + try { + for (int i = 1; i < tokenLength; i++) { + switch (statement[i].toUpperCase()) { + case "TABLES": + if (i + 1 < tokenLength && statement[i + 1].equalsIgnoreCase("LIKE")) { + if (i + 2 < tokenLength) { + indexPattern = replaceWildcard(statement[i + 2]); + i += 2; + } } - - if (indexPattern == null) { - throw new IllegalArgumentException(); + break; + case "COLUMNS": + if (i + 1 < tokenLength && statement[i + 1].equalsIgnoreCase("LIKE")) { + if (i + 2 < tokenLength) { + columnPattern = replaceWildcard(statement[i + 2]); + i += 2; + } } - } catch (Exception e) { - throw new IllegalArgumentException("Expected syntax example: " + syntaxString(), e); + break; } - } + } - private String replaceWildcard(String str) { - return str.replace("%", ".*").replace("_", "."); + if (indexPattern == null) { + throw new IllegalArgumentException(); + } + } catch (Exception e) { + throw new IllegalArgumentException("Expected syntax example: " + syntaxString(), e); } + } - private String syntaxString() { - if (statementType.equals(StatementType.SHOW)) { - return "'SHOW TABLES LIKE

ExprTypeWidens to data types
INTEGERLONG, FLOAT, DOUBLE
'"; - } else { - return "'DESCRIBE TABLES LIKE
[COLUMNS LIKE ]'"; - } - } + private String replaceWildcard(String str) { + return str.replace("%", ".*").replace("_", "."); + } - public StatementType getStatementType() { - return statementType; + private String syntaxString() { + if (statementType.equals(StatementType.SHOW)) { + return "'SHOW TABLES LIKE
'"; + } else { + return "'DESCRIBE TABLES LIKE
[COLUMNS LIKE ]'"; } + } - public String getQuery() { - return query; - } + public StatementType getStatementType() { + return statementType; + } - public String getIndexPattern() { - return indexPattern; - } + public String getQuery() { + return query; + } - public String getColumnPattern() { - return columnPattern; - } + public String getIndexPattern() { + return indexPattern; + } - public enum StatementType { - SHOW, DESCRIBE - } + public String getColumnPattern() { + return columnPattern; + } + + public enum StatementType { + SHOW, + DESCRIBE + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/JoinSelect.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/JoinSelect.java index c77df6e9ad..211b33c68a 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/JoinSelect.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/JoinSelect.java @@ -3,85 +3,78 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain; import com.alibaba.druid.sql.ast.statement.SQLJoinTableSource; import java.util.List; import org.opensearch.sql.legacy.domain.hints.Hint; -/** - * Created by Eliran on 20/8/2015. - */ +/** Created by Eliran on 20/8/2015. */ public class JoinSelect extends Query { + private TableOnJoinSelect firstTable; + private TableOnJoinSelect secondTable; + private Where connectedWhere; + private List hints; + private List connectedConditions; + private int totalLimit; - private TableOnJoinSelect firstTable; - private TableOnJoinSelect secondTable; - private Where connectedWhere; - private List hints; - private List connectedConditions; - private int totalLimit; - - private final int DEAFULT_NUM_OF_RESULTS = 200; - - private SQLJoinTableSource.JoinType joinType; - - - public JoinSelect() { - firstTable = new TableOnJoinSelect(); - secondTable = new TableOnJoinSelect(); + private final int DEAFULT_NUM_OF_RESULTS = 200; - totalLimit = DEAFULT_NUM_OF_RESULTS; - } + private SQLJoinTableSource.JoinType joinType; + public JoinSelect() { + firstTable = new TableOnJoinSelect(); + secondTable = new TableOnJoinSelect(); - public Where getConnectedWhere() { - return connectedWhere; - } + totalLimit = DEAFULT_NUM_OF_RESULTS; + } - public void setConnectedWhere(Where connectedWhere) { - this.connectedWhere = connectedWhere; - } + public Where getConnectedWhere() { + return connectedWhere; + } - public TableOnJoinSelect getFirstTable() { - return firstTable; - } + public void setConnectedWhere(Where connectedWhere) { + this.connectedWhere = connectedWhere; + } - public TableOnJoinSelect getSecondTable() { - return secondTable; - } + public TableOnJoinSelect getFirstTable() { + return firstTable; + } + public TableOnJoinSelect getSecondTable() { + return secondTable; + } - public SQLJoinTableSource.JoinType getJoinType() { - return joinType; - } + public SQLJoinTableSource.JoinType getJoinType() { + return joinType; + } - public void setJoinType(SQLJoinTableSource.JoinType joinType) { - this.joinType = joinType; - } + public void setJoinType(SQLJoinTableSource.JoinType joinType) { + this.joinType = joinType; + } - public List getHints() { - return hints; - } + public List getHints() { + return hints; + } - public void setHints(List hints) { - this.hints = hints; - } + public void setHints(List hints) { + this.hints = hints; + } - public int getTotalLimit() { - return totalLimit; - } + public int getTotalLimit() { + return totalLimit; + } - public List getConnectedConditions() { - return connectedConditions; - } + public List getConnectedConditions() { + return connectedConditions; + } - public void setConnectedConditions(List connectedConditions) { - this.connectedConditions = connectedConditions; - } + public void setConnectedConditions(List connectedConditions) { + this.connectedConditions = connectedConditions; + } - public void setTotalLimit(int totalLimit) { - this.totalLimit = totalLimit; - } + public void setTotalLimit(int totalLimit) { + this.totalLimit = totalLimit; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/KVValue.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/KVValue.java index 10e2ad3d12..d864cbac12 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/KVValue.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/KVValue.java @@ -3,30 +3,29 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain; public class KVValue implements Cloneable { - public String key; - public Object value; + public String key; + public Object value; - public KVValue(Object value) { - this.value = value; - } + public KVValue(Object value) { + this.value = value; + } - public KVValue(String key, Object value) { - if (key != null) { - this.key = key.replace("'", ""); - } - this.value = value; + public KVValue(String key, Object value) { + if (key != null) { + this.key = key.replace("'", ""); } + this.value = value; + } - @Override - public String toString() { - if (key == null) { - return value.toString(); - } else { - return key + "=" + value; - } + @Override + public String toString() { + if (key == null) { + return value.toString(); + } else { + return key + "=" + value; } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/MethodField.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/MethodField.java index 4529c4344c..45d6d1053e 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/MethodField.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/MethodField.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain; import com.alibaba.druid.sql.ast.expr.SQLAggregateOption; @@ -14,96 +13,94 @@ import org.opensearch.sql.legacy.utils.Util; /** - * - * * @author ansj */ public class MethodField extends Field { - private List params = null; - - public MethodField(String name, List params, SQLAggregateOption option, String alias) { - super(name, alias); - this.params = params; - this.option = option; - if (alias == null || alias.trim().length() == 0) { - Map paramsAsMap = this.getParamsAsMap(); - if (paramsAsMap.containsKey("alias")) { - this.setAlias(paramsAsMap.get("alias").toString()); - } else { - this.setAlias(this.toString()); - } - } + private List params = null; + + public MethodField(String name, List params, SQLAggregateOption option, String alias) { + super(name, alias); + this.params = params; + this.option = option; + if (alias == null || alias.trim().length() == 0) { + Map paramsAsMap = this.getParamsAsMap(); + if (paramsAsMap.containsKey("alias")) { + this.setAlias(paramsAsMap.get("alias").toString()); + } else { + this.setAlias(this.toString()); + } } + } - public List getParams() { - return params; - } + public List getParams() { + return params; + } - public Map getParamsAsMap() { - Map paramsAsMap = new HashMap<>(); - if (this.params == null) { - return paramsAsMap; - } - for (KVValue kvValue : this.params) { - paramsAsMap.put(kvValue.key, kvValue.value); - } - return paramsAsMap; + public Map getParamsAsMap() { + Map paramsAsMap = new HashMap<>(); + if (this.params == null) { + return paramsAsMap; } - - @Override - public String toString() { - if (option != null) { - return this.name + "(" + option + " " + Util.joiner(params, ",") + ")"; - } - return this.name + "(" + Util.joiner(params, ",") + ")"; + for (KVValue kvValue : this.params) { + paramsAsMap.put(kvValue.key, kvValue.value); } + return paramsAsMap; + } - @Override - public boolean isNested() { - Map paramsAsMap = this.getParamsAsMap(); - return paramsAsMap.containsKey("nested") || paramsAsMap.containsKey("reverse_nested"); + @Override + public String toString() { + if (option != null) { + return this.name + "(" + option + " " + Util.joiner(params, ",") + ")"; } - - @Override - public boolean isReverseNested() { - return this.getParamsAsMap().containsKey("reverse_nested"); + return this.name + "(" + Util.joiner(params, ",") + ")"; + } + + @Override + public boolean isNested() { + Map paramsAsMap = this.getParamsAsMap(); + return paramsAsMap.containsKey("nested") || paramsAsMap.containsKey("reverse_nested"); + } + + @Override + public boolean isReverseNested() { + return this.getParamsAsMap().containsKey("reverse_nested"); + } + + @Override + public String getNestedPath() { + if (!this.isNested()) { + return null; } - - @Override - public String getNestedPath() { - if (!this.isNested()) { - return null; - } - if (this.isReverseNested()) { - String reverseNestedPath = this.getParamsAsMap().get("reverse_nested").toString(); - return reverseNestedPath.isEmpty() ? null : reverseNestedPath; - } - - // Fix bug: NestedType.toString() isn't implemented which won't return desired nested path - Object nestedField = getParamsAsMap().get("nested"); - if (nestedField instanceof NestedType) { - return ((NestedType) nestedField).path; - } - return nestedField.toString(); + if (this.isReverseNested()) { + String reverseNestedPath = this.getParamsAsMap().get("reverse_nested").toString(); + return reverseNestedPath.isEmpty() ? null : reverseNestedPath; } - @Override - public boolean isChildren() { - Map paramsAsMap = this.getParamsAsMap(); - return paramsAsMap.containsKey("children"); + // Fix bug: NestedType.toString() isn't implemented which won't return desired nested path + Object nestedField = getParamsAsMap().get("nested"); + if (nestedField instanceof NestedType) { + return ((NestedType) nestedField).path; } - - @Override - public String getChildType() { - if (!this.isChildren()) { - return null; - } - - return this.getParamsAsMap().get("children").toString(); + return nestedField.toString(); + } + + @Override + public boolean isChildren() { + Map paramsAsMap = this.getParamsAsMap(); + return paramsAsMap.containsKey("children"); + } + + @Override + public String getChildType() { + if (!this.isChildren()) { + return null; } - @Override - public boolean isScriptField() { - return "script".equals(getName()); - } + return this.getParamsAsMap().get("children").toString(); + } + + @Override + public boolean isScriptField() { + return "script".equals(getName()); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/hints/Hint.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/hints/Hint.java index 8a5c174c41..b83c63aae1 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/hints/Hint.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/hints/Hint.java @@ -3,26 +3,23 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain.hints; -/** - * Created by Eliran on 5/9/2015. - */ +/** Created by Eliran on 5/9/2015. */ public class Hint { - private HintType type; - private Object[] params; + private HintType type; + private Object[] params; - public Hint(HintType type, Object[] params) { - this.type = type; - this.params = params; - } + public Hint(HintType type, Object[] params) { + this.type = type; + this.params = params; + } - public HintType getType() { - return type; - } + public HintType getType() { + return type; + } - public Object[] getParams() { - return params; - } + public Object[] getParams() { + return params; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/hints/HintFactory.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/hints/HintFactory.java index 18c68d57ab..81b676e3d5 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/hints/HintFactory.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/hints/HintFactory.java @@ -3,10 +3,8 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain.hints; - import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; import com.fasterxml.jackson.dataformat.yaml.YAMLParser; import java.io.IOException; @@ -18,211 +16,217 @@ import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.sql.legacy.exception.SqlParseException; -/** - * Created by Eliran on 5/9/2015. - */ +/** Created by Eliran on 5/9/2015. */ public class HintFactory { - private static final String PREFIX = "! "; + private static final String PREFIX = "! "; - public static Hint getHintFromString(String hintAsString) throws SqlParseException { - if (hintAsString.startsWith("! USE_NESTED_LOOPS") || hintAsString.startsWith("! USE_NL")) { - return new Hint(HintType.USE_NESTED_LOOPS, null); - } - - if (hintAsString.startsWith("! SHARD_SIZE")) { - String[] numbers = getParamsFromHint(hintAsString, "! SHARD_SIZE"); - //todo: check if numbers etc.. - List params = new ArrayList<>(); - for (String number : numbers) { - if (number.equals("null") || number.equals("infinity")) { - params.add(null); - } else { - params.add(Integer.parseInt(number)); - } - } - return new Hint(HintType.SHARD_SIZE, params.toArray()); - } - - if (hintAsString.equals("! HASH_WITH_TERMS_FILTER")) { - return new Hint(HintType.HASH_WITH_TERMS_FILTER, null); - } - if (hintAsString.startsWith("! JOIN_TABLES_LIMIT")) { - String[] numbers = getParamsFromHint(hintAsString, "! JOIN_TABLES_LIMIT"); - //todo: check if numbers etc.. - List params = new ArrayList<>(); - for (String number : numbers) { - if (number.equals("null") || number.equals("infinity")) { - params.add(null); - } else { - params.add(Integer.parseInt(number)); - } - } - - return new Hint(HintType.JOIN_LIMIT, params.toArray()); - } - if (hintAsString.startsWith("! NL_MULTISEARCH_SIZE")) { - String[] number = getParamsFromHint(hintAsString, "! NL_MULTISEARCH_SIZE"); - //todo: check if numbers etc.. - int multiSearchSize = Integer.parseInt(number[0]); - return new Hint(HintType.NL_MULTISEARCH_SIZE, new Object[]{multiSearchSize}); - } - if (hintAsString.startsWith("! USE_SCROLL")) { - String[] scrollParams = getParamsFromHint(hintAsString, "! USE_SCROLL"); - if (scrollParams != null && scrollParams.length == 2) { - String param = scrollParams[0]; - return new Hint(HintType.USE_SCROLL, - new Object[]{ - (param.startsWith("\"") && param.endsWith("\"")) - || (param.startsWith("'") && param.endsWith("'")) - ? param.substring(1, param.length() - 1) : Integer.parseInt(param), - Integer.parseInt(scrollParams[1])}); - } else { - return new Hint(HintType.USE_SCROLL, new Object[]{50, 60000}); - } - } - if (hintAsString.startsWith("! IGNORE_UNAVAILABLE")) { - return new Hint(HintType.IGNORE_UNAVAILABLE, null); - } - if (hintAsString.startsWith("! DOCS_WITH_AGGREGATION")) { - Integer[] params = parseParamsAsInts(hintAsString, "! DOCS_WITH_AGGREGATION"); - return new Hint(HintType.DOCS_WITH_AGGREGATION, params); - } - if (hintAsString.startsWith("! ROUTINGS")) { - String[] routings = getParamsFromHint(hintAsString, "! ROUTINGS"); - return new Hint(HintType.ROUTINGS, routings); - } - if (hintAsString.startsWith("! HIGHLIGHT")) { - String[] heighlights = getParamsFromHint(hintAsString, "! HIGHLIGHT"); - ArrayList hintParams = new ArrayList(); - hintParams.add(heighlights[0]); - if (heighlights.length > 1) { - StringBuilder builder = new StringBuilder(); - for (int i = 1; i < heighlights.length; i++) { - if (i != 1) { - builder.append("\n"); - } - builder.append(heighlights[i]); - } - String heighlightParam = builder.toString(); - YAMLFactory yamlFactory = new YAMLFactory(); - YAMLParser yamlParser = null; - try { - yamlParser = yamlFactory.createParser(heighlightParam.toCharArray()); - YamlXContentParser yamlXContentParser = new YamlXContentParser(NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, yamlParser); - Map map = yamlXContentParser.map(); - hintParams.add(map); - } catch (IOException e) { - throw new SqlParseException("could not parse heighlight hint: " + e.getMessage()); - } - } - return new Hint(HintType.HIGHLIGHT, hintParams.toArray()); - } - if (hintAsString.startsWith("! MINUS_SCROLL_FETCH_AND_RESULT_LIMITS")) { - Integer[] params = parseParamsAsInts(hintAsString, "! MINUS_SCROLL_FETCH_AND_RESULT_LIMITS"); - if (params.length > 3) { - throw new SqlParseException("MINUS_FETCH_AND_RESULT_LIMITS should have 3 int params " - + "(maxFromFirst,maxFromSecond,hitsPerScrollShard)"); - } - Integer[] paramsWithDefaults = new Integer[3]; - int defaultMaxFetchFromTable = 100000; - int defaultFetchOnScroll = 1000; - paramsWithDefaults[0] = defaultMaxFetchFromTable; - paramsWithDefaults[1] = defaultMaxFetchFromTable; - paramsWithDefaults[2] = defaultFetchOnScroll; - for (int i = 0; i < params.length; i++) { - paramsWithDefaults[i] = params[i]; - } - - return new Hint(HintType.MINUS_FETCH_AND_RESULT_LIMITS, paramsWithDefaults); - } - if (hintAsString.startsWith("! MINUS_USE_TERMS_OPTIMIZATION")) { - String[] param = getParamsFromHint(hintAsString, "! MINUS_USE_TERMS_OPTIMIZATION"); - boolean shouldLowerStringOnTerms = false; - if (param != null) { - if (param.length != 1) { - throw new SqlParseException( - "MINUS_USE_TERMS_OPTIMIZATION should have none or one boolean param: false/true "); - } - try { - shouldLowerStringOnTerms = Boolean.parseBoolean(param[0].toLowerCase()); - } catch (Exception e) { - throw new SqlParseException("MINUS_USE_TERMS_OPTIMIZATION should have none or one boolean param: " - + "false/true , got:" + param[0]); - } - } - return new Hint(HintType.MINUS_USE_TERMS_OPTIMIZATION, new Object[]{shouldLowerStringOnTerms}); - } - if (hintAsString.startsWith("! COLLAPSE")) { - String collapse = getParamFromHint(hintAsString, "! COLLAPSE"); - return new Hint(HintType.COLLAPSE, new String[]{collapse}); - } - if (hintAsString.startsWith("! POST_FILTER")) { - String postFilter = getParamFromHint(hintAsString, "! POST_FILTER"); - return new Hint(HintType.POST_FILTER, new String[]{postFilter}); - } - - Hint queryPlanHint = parseHintForQueryPlanner(hintAsString); - if (queryPlanHint != null) { - return queryPlanHint; - } - - return null; - } - - /** - * Parse hints for hash join in new query planning framework. - * Only check syntax error here and leave semantics interpret work for planner. - */ - private static Hint parseHintForQueryPlanner(String hintStr) { - if (hintStr.contains("(") - && (hintStr.startsWith("! JOIN_ALGORITHM_BLOCK_SIZE") - || hintStr.startsWith("! JOIN_SCROLL_PAGE_SIZE") - || hintStr.startsWith("! JOIN_CIRCUIT_BREAK_LIMIT") - || hintStr.startsWith("! JOIN_BACK_OFF_RETRY_INTERVALS") - || hintStr.startsWith("! JOIN_TIME_OUT") - )) { // Note that Trie tree is needed here if many hint options - - String hintName = hintStr.substring(PREFIX.length(), hintStr.indexOf('(')).trim(); - String hintPrefix = PREFIX + hintName; - HintType hintType = HintType.valueOf(hintName); - Integer[] params = parseParamsAsInts(hintStr, hintPrefix); - - if (params != null && params.length > 0) { - return new Hint(hintType, params); - } - } else if (hintStr.startsWith("! JOIN_ALGORITHM_USE_LEGACY")) { - return new Hint(HintType.JOIN_ALGORITHM_USE_LEGACY, new Object[0]); - } - return null; + public static Hint getHintFromString(String hintAsString) throws SqlParseException { + if (hintAsString.startsWith("! USE_NESTED_LOOPS") || hintAsString.startsWith("! USE_NL")) { + return new Hint(HintType.USE_NESTED_LOOPS, null); } - private static String getParamFromHint(String hint, String prefix) { - if (!hint.contains("(")) { - return null; - } - return hint.replace(prefix, "").replaceAll("\\s*\\(\\s*", "").replaceAll("\\s*\\,\\s*", ",") - .replaceAll("\\s*\\)\\s*", ""); + if (hintAsString.startsWith("! SHARD_SIZE")) { + String[] numbers = getParamsFromHint(hintAsString, "! SHARD_SIZE"); + // todo: check if numbers etc.. + List params = new ArrayList<>(); + for (String number : numbers) { + if (number.equals("null") || number.equals("infinity")) { + params.add(null); + } else { + params.add(Integer.parseInt(number)); + } + } + return new Hint(HintType.SHARD_SIZE, params.toArray()); } - private static String[] getParamsFromHint(String hint, String prefix) { - String param = getParamFromHint(hint, prefix); - return param != null ? param.split(",") : null; + if (hintAsString.equals("! HASH_WITH_TERMS_FILTER")) { + return new Hint(HintType.HASH_WITH_TERMS_FILTER, null); + } + if (hintAsString.startsWith("! JOIN_TABLES_LIMIT")) { + String[] numbers = getParamsFromHint(hintAsString, "! JOIN_TABLES_LIMIT"); + // todo: check if numbers etc.. + List params = new ArrayList<>(); + for (String number : numbers) { + if (number.equals("null") || number.equals("infinity")) { + params.add(null); + } else { + params.add(Integer.parseInt(number)); + } + } + + return new Hint(HintType.JOIN_LIMIT, params.toArray()); + } + if (hintAsString.startsWith("! NL_MULTISEARCH_SIZE")) { + String[] number = getParamsFromHint(hintAsString, "! NL_MULTISEARCH_SIZE"); + // todo: check if numbers etc.. + int multiSearchSize = Integer.parseInt(number[0]); + return new Hint(HintType.NL_MULTISEARCH_SIZE, new Object[] {multiSearchSize}); + } + if (hintAsString.startsWith("! USE_SCROLL")) { + String[] scrollParams = getParamsFromHint(hintAsString, "! USE_SCROLL"); + if (scrollParams != null && scrollParams.length == 2) { + String param = scrollParams[0]; + return new Hint( + HintType.USE_SCROLL, + new Object[] { + (param.startsWith("\"") && param.endsWith("\"")) + || (param.startsWith("'") && param.endsWith("'")) + ? param.substring(1, param.length() - 1) + : Integer.parseInt(param), + Integer.parseInt(scrollParams[1]) + }); + } else { + return new Hint(HintType.USE_SCROLL, new Object[] {50, 60000}); + } + } + if (hintAsString.startsWith("! IGNORE_UNAVAILABLE")) { + return new Hint(HintType.IGNORE_UNAVAILABLE, null); + } + if (hintAsString.startsWith("! DOCS_WITH_AGGREGATION")) { + Integer[] params = parseParamsAsInts(hintAsString, "! DOCS_WITH_AGGREGATION"); + return new Hint(HintType.DOCS_WITH_AGGREGATION, params); + } + if (hintAsString.startsWith("! ROUTINGS")) { + String[] routings = getParamsFromHint(hintAsString, "! ROUTINGS"); + return new Hint(HintType.ROUTINGS, routings); + } + if (hintAsString.startsWith("! HIGHLIGHT")) { + String[] highlights = getParamsFromHint(hintAsString, "! HIGHLIGHT"); + ArrayList hintParams = new ArrayList(); + hintParams.add(highlights[0]); + if (highlights.length > 1) { + StringBuilder builder = new StringBuilder(); + for (int i = 1; i < highlights.length; i++) { + if (i != 1) { + builder.append("\n"); + } + builder.append(highlights[i]); + } + String heighlightParam = builder.toString(); + YAMLFactory yamlFactory = new YAMLFactory(); + YAMLParser yamlParser = null; + try { + yamlParser = yamlFactory.createParser(heighlightParam.toCharArray()); + YamlXContentParser yamlXContentParser = + new YamlXContentParser( + NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, yamlParser); + Map map = yamlXContentParser.map(); + hintParams.add(map); + } catch (IOException e) { + throw new SqlParseException("could not parse heighlight hint: " + e.getMessage()); + } + } + return new Hint(HintType.HIGHLIGHT, hintParams.toArray()); + } + if (hintAsString.startsWith("! MINUS_SCROLL_FETCH_AND_RESULT_LIMITS")) { + Integer[] params = parseParamsAsInts(hintAsString, "! MINUS_SCROLL_FETCH_AND_RESULT_LIMITS"); + if (params.length > 3) { + throw new SqlParseException( + "MINUS_FETCH_AND_RESULT_LIMITS should have 3 int params " + + "(maxFromFirst,maxFromSecond,hitsPerScrollShard)"); + } + Integer[] paramsWithDefaults = new Integer[3]; + int defaultMaxFetchFromTable = 100000; + int defaultFetchOnScroll = 1000; + paramsWithDefaults[0] = defaultMaxFetchFromTable; + paramsWithDefaults[1] = defaultMaxFetchFromTable; + paramsWithDefaults[2] = defaultFetchOnScroll; + for (int i = 0; i < params.length; i++) { + paramsWithDefaults[i] = params[i]; + } + + return new Hint(HintType.MINUS_FETCH_AND_RESULT_LIMITS, paramsWithDefaults); + } + if (hintAsString.startsWith("! MINUS_USE_TERMS_OPTIMIZATION")) { + String[] param = getParamsFromHint(hintAsString, "! MINUS_USE_TERMS_OPTIMIZATION"); + boolean shouldLowerStringOnTerms = false; + if (param != null) { + if (param.length != 1) { + throw new SqlParseException( + "MINUS_USE_TERMS_OPTIMIZATION should have none or one boolean param: false/true "); + } + try { + shouldLowerStringOnTerms = Boolean.parseBoolean(param[0].toLowerCase()); + } catch (Exception e) { + throw new SqlParseException( + "MINUS_USE_TERMS_OPTIMIZATION should have none or one boolean param: " + + "false/true , got:" + + param[0]); + } + } + return new Hint( + HintType.MINUS_USE_TERMS_OPTIMIZATION, new Object[] {shouldLowerStringOnTerms}); + } + if (hintAsString.startsWith("! COLLAPSE")) { + String collapse = getParamFromHint(hintAsString, "! COLLAPSE"); + return new Hint(HintType.COLLAPSE, new String[] {collapse}); + } + if (hintAsString.startsWith("! POST_FILTER")) { + String postFilter = getParamFromHint(hintAsString, "! POST_FILTER"); + return new Hint(HintType.POST_FILTER, new String[] {postFilter}); } - private static Integer[] parseParamsAsInts(String hintAsString, String startWith) { - String[] number = getParamsFromHint(hintAsString, startWith); - if (number == null) { - return new Integer[0]; - } - //todo: check if numbers etc.. - Integer[] params = new Integer[number.length]; - for (int i = 0; i < params.length; i++) { - params[i] = Integer.parseInt(number[i]); - } - return params; + Hint queryPlanHint = parseHintForQueryPlanner(hintAsString); + if (queryPlanHint != null) { + return queryPlanHint; } + return null; + } + + /** + * Parse hints for hash join in new query planning framework. Only check syntax error here and + * leave semantics interpret work for planner. + */ + private static Hint parseHintForQueryPlanner(String hintStr) { + if (hintStr.contains("(") + && (hintStr.startsWith("! JOIN_ALGORITHM_BLOCK_SIZE") + || hintStr.startsWith("! JOIN_SCROLL_PAGE_SIZE") + || hintStr.startsWith("! JOIN_CIRCUIT_BREAK_LIMIT") + || hintStr.startsWith("! JOIN_BACK_OFF_RETRY_INTERVALS") + || hintStr.startsWith( + "! JOIN_TIME_OUT"))) { // Note that Trie tree is needed here if many hint options + + String hintName = hintStr.substring(PREFIX.length(), hintStr.indexOf('(')).trim(); + String hintPrefix = PREFIX + hintName; + HintType hintType = HintType.valueOf(hintName); + Integer[] params = parseParamsAsInts(hintStr, hintPrefix); + + if (params != null && params.length > 0) { + return new Hint(hintType, params); + } + } else if (hintStr.startsWith("! JOIN_ALGORITHM_USE_LEGACY")) { + return new Hint(HintType.JOIN_ALGORITHM_USE_LEGACY, new Object[0]); + } + return null; + } + private static String getParamFromHint(String hint, String prefix) { + if (!hint.contains("(")) { + return null; + } + return hint.replace(prefix, "") + .replaceAll("\\s*\\(\\s*", "") + .replaceAll("\\s*\\,\\s*", ",") + .replaceAll("\\s*\\)\\s*", ""); + } + + private static String[] getParamsFromHint(String hint, String prefix) { + String param = getParamFromHint(hint, prefix); + return param != null ? param.split(",") : null; + } + + private static Integer[] parseParamsAsInts(String hintAsString, String startWith) { + String[] number = getParamsFromHint(hintAsString, startWith); + if (number == null) { + return new Integer[0]; + } + // todo: check if numbers etc.. + Integer[] params = new Integer[number.length]; + for (int i = 0; i < params.length; i++) { + params[i] = Integer.parseInt(number[i]); + } + return params; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/hints/HintType.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/hints/HintType.java index 7d3444c36c..0134ef0874 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/hints/HintType.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/hints/HintType.java @@ -3,31 +3,28 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain.hints; -/** - * Created by Eliran on 29/8/2015. - */ +/** Created by Eliran on 29/8/2015. */ public enum HintType { - HASH_WITH_TERMS_FILTER, - JOIN_LIMIT, - USE_NESTED_LOOPS, - NL_MULTISEARCH_SIZE, - USE_SCROLL, - IGNORE_UNAVAILABLE, - DOCS_WITH_AGGREGATION, - ROUTINGS, - SHARD_SIZE, - HIGHLIGHT, - MINUS_FETCH_AND_RESULT_LIMITS, - MINUS_USE_TERMS_OPTIMIZATION, - COLLAPSE, - POST_FILTER, - JOIN_ALGORITHM_BLOCK_SIZE, - JOIN_ALGORITHM_USE_LEGACY, - JOIN_SCROLL_PAGE_SIZE, - JOIN_CIRCUIT_BREAK_LIMIT, - JOIN_BACK_OFF_RETRY_INTERVALS, - JOIN_TIME_OUT + HASH_WITH_TERMS_FILTER, + JOIN_LIMIT, + USE_NESTED_LOOPS, + NL_MULTISEARCH_SIZE, + USE_SCROLL, + IGNORE_UNAVAILABLE, + DOCS_WITH_AGGREGATION, + ROUTINGS, + SHARD_SIZE, + HIGHLIGHT, + MINUS_FETCH_AND_RESULT_LIMITS, + MINUS_USE_TERMS_OPTIMIZATION, + COLLAPSE, + POST_FILTER, + JOIN_ALGORITHM_BLOCK_SIZE, + JOIN_ALGORITHM_USE_LEGACY, + JOIN_SCROLL_PAGE_SIZE, + JOIN_CIRCUIT_BREAK_LIMIT, + JOIN_BACK_OFF_RETRY_INTERVALS, + JOIN_TIME_OUT } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/LocalClusterState.java b/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/LocalClusterState.java index fc69ecff30..84875b9531 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/LocalClusterState.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/LocalClusterState.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.esdomain; import com.google.common.cache.Cache; @@ -30,188 +29,181 @@ import org.opensearch.sql.opensearch.setting.OpenSearchSettings; /** - * Local cluster state information which may be stale but help avoid blocking operation in NIO thread. - *

- * 1) Why extending TransportAction doesn't work here? - * TransportAction enforce implementation to be performed remotely but local cluster state read is expected here. - *

- * 2) Why injection by AbstractModule doesn't work here? - * Because this state needs to be used across the plugin, ex. in rewriter, pretty formatter etc. + * Local cluster state information which may be stale but help avoid blocking operation in NIO + * thread. + *

    + *
  1. Why extending TransportAction doesn't work here? TransportAction enforce implementation to + * be performed remotely but local cluster state read is expected here. + *
  2. Why injection by AbstractModule doesn't work here? Because this state needs to be used + * across the plugin, ex. in rewriter, pretty formatter etc. + *
*/ public class LocalClusterState { - private static final Logger LOG = LogManager.getLogger(); - - private static final Function> ALL_FIELDS = (anyIndex -> (anyField -> true)); + private static final Logger LOG = LogManager.getLogger(); - /** - * Singleton instance - */ - private static LocalClusterState INSTANCE; + private static final Function> ALL_FIELDS = + (anyIndex -> (anyField -> true)); - /** - * Current cluster state on local node - */ - private ClusterService clusterService; + /** Singleton instance */ + private static LocalClusterState INSTANCE; - private OpenSearchSettings pluginSettings; + /** Current cluster state on local node */ + private ClusterService clusterService; - /** - * Index name expression resolver to get concrete index name - */ - private IndexNameExpressionResolver resolver; + private OpenSearchSettings pluginSettings; - /** - * Thread-safe mapping cache to save the computation of sourceAsMap() which is not lightweight as thought - * Array cannot be used as key because hashCode() always return reference address, so either use wrapper or List. - */ - private final Cache, IndexMappings> cache; + /** Index name expression resolver to get concrete index name */ + private IndexNameExpressionResolver resolver; - /** - * Latest setting value for each registered key. Thread-safe is required. - */ - private final Map latestSettings = new ConcurrentHashMap<>(); + /** + * Thread-safe mapping cache to save the computation of sourceAsMap() which is not lightweight as + * thought Array cannot be used as key because hashCode() always return reference address, so + * either use wrapper or List. + */ + private final Cache, IndexMappings> cache; - public static synchronized LocalClusterState state() { - if (INSTANCE == null) { - INSTANCE = new LocalClusterState(); - } - return INSTANCE; - } + /** Latest setting value for each registered key. Thread-safe is required. */ + private final Map latestSettings = new ConcurrentHashMap<>(); - /** - * Give testing code a chance to inject mock object - */ - public static synchronized void state(LocalClusterState instance) { - INSTANCE = instance; + public static synchronized LocalClusterState state() { + if (INSTANCE == null) { + INSTANCE = new LocalClusterState(); } - - public void setClusterService(ClusterService clusterService) { - this.clusterService = clusterService; - - clusterService.addListener(event -> { - if (event.metadataChanged()) { - // State in cluster service is already changed to event.state() before listener fired - if (LOG.isDebugEnabled()) { - LOG.debug("Metadata in cluster state changed: {}", - new IndexMappings(clusterService.state().metadata())); - } - cache.invalidateAll(); + return INSTANCE; + } + + /** Give testing code a chance to inject mock object */ + public static synchronized void state(LocalClusterState instance) { + INSTANCE = instance; + } + + public void setClusterService(ClusterService clusterService) { + this.clusterService = clusterService; + + clusterService.addListener( + event -> { + if (event.metadataChanged()) { + // State in cluster service is already changed to event.state() before listener fired + if (LOG.isDebugEnabled()) { + LOG.debug( + "Metadata in cluster state changed: {}", + new IndexMappings(clusterService.state().metadata())); } + cache.invalidateAll(); + } }); - } - - public void setPluginSettings(OpenSearchSettings settings) { - this.pluginSettings = settings; - for (Setting setting: settings.getSettings()) { - clusterService.getClusterSettings().addSettingsUpdateConsumer( - setting, - newVal -> { - if (LOG.isDebugEnabled()) { - LOG.debug("The value of setting [{}] changed to [{}]", setting.getKey(), newVal); - } - latestSettings.put(setting.getKey(), newVal); + } + + public void setPluginSettings(OpenSearchSettings settings) { + this.pluginSettings = settings; + for (Setting setting : settings.getSettings()) { + clusterService + .getClusterSettings() + .addSettingsUpdateConsumer( + setting, + newVal -> { + if (LOG.isDebugEnabled()) { + LOG.debug("The value of setting [{}] changed to [{}]", setting.getKey(), newVal); } - ); - } - + latestSettings.put(setting.getKey(), newVal); + }); } - - public void setResolver(IndexNameExpressionResolver resolver) { - this.resolver = resolver; + } + + public void setResolver(IndexNameExpressionResolver resolver) { + this.resolver = resolver; + } + + private LocalClusterState() { + cache = CacheBuilder.newBuilder().maximumSize(100).build(); + } + + /** + * Get plugin setting value by key. Return default value if not configured explicitly. + * + * @param key setting key registered during plugin bootstrap. + * @return setting value or default. + */ + @SuppressWarnings("unchecked") + public T getSettingValue(Settings.Key key) { + Objects.requireNonNull(pluginSettings, "SQL plugin setting is null"); + return (T) latestSettings.getOrDefault(key.getKeyValue(), pluginSettings.getSettingValue(key)); + } + + /** Get field mappings by index expressions. All types and fields are included in response. */ + public IndexMappings getFieldMappings(String[] indices) { + return getFieldMappings(indices, ALL_FIELDS); + } + + /** + * Get field mappings by index expressions, type and field filter. Because + * IndexMetaData/MappingMetaData is hard to convert to FieldMappingMetaData, custom mapping domain + * objects are being used here. In future, it should be moved to domain model layer for all + * OpenSearch specific knowledge. + * + *

Note that cluster state may be change inside OpenSearch so it's possible to read different + * state in 2 accesses to ClusterService.state() here. + * + * @param indices index name expression + * @param fieldFilter field filter predicate + * @return index mapping(s) + */ + private IndexMappings getFieldMappings( + String[] indices, Function> fieldFilter) { + Objects.requireNonNull(clusterService, "Cluster service is null"); + Objects.requireNonNull(resolver, "Index name expression resolver is null"); + + try { + ClusterState state = clusterService.state(); + String[] concreteIndices = resolveIndexExpression(state, indices); + + IndexMappings mappings; + if (fieldFilter == ALL_FIELDS) { + mappings = findMappingsInCache(state, concreteIndices); + } else { + mappings = findMappings(state, concreteIndices, fieldFilter); + } + + LOG.debug("Found mappings: {}", mappings); + return mappings; + } catch (IndexNotFoundException e) { + throw e; + } catch (Exception e) { + throw new IllegalStateException( + "Failed to read mapping in cluster state for indices=" + Arrays.toString(indices), e); } + } - private LocalClusterState() { - cache = CacheBuilder.newBuilder().maximumSize(100).build(); - } - - /** - * Get plugin setting value by key. Return default value if not configured explicitly. - * @param key setting key registered during plugin bootstrap. - * @return setting value or default. - */ - @SuppressWarnings("unchecked") - public T getSettingValue(Settings.Key key) { - Objects.requireNonNull(pluginSettings, "SQL plugin setting is null"); - return (T) latestSettings.getOrDefault(key.getKeyValue(), - pluginSettings.getSettingValue(key)); - } + private String[] resolveIndexExpression(ClusterState state, String[] indices) { + String[] concreteIndices = + resolver.concreteIndexNames(state, IndicesOptions.strictExpandOpen(), true, indices); - /** - * Get field mappings by index expressions. All types and fields are included in response. - */ - public IndexMappings getFieldMappings(String[] indices) { - return getFieldMappings(indices, ALL_FIELDS); + if (LOG.isDebugEnabled()) { + LOG.debug( + "Resolved index expression {} to concrete index names {}", + Arrays.toString(indices), + Arrays.toString(concreteIndices)); } - - /** - * Get field mappings by index expressions, type and field filter. Because IndexMetaData/MappingMetaData - * is hard to convert to FieldMappingMetaData, custom mapping domain objects are being used here. In future, - * it should be moved to domain model layer for all OpenSearch specific knowledge. - *

- * Note that cluster state may be change inside OpenSearch so it's possible to read different state in 2 accesses - * to ClusterService.state() here. - * - * @param indices index name expression - * @param fieldFilter field filter predicate - * @return index mapping(s) - */ - private IndexMappings getFieldMappings(String[] indices, Function> fieldFilter) { - Objects.requireNonNull(clusterService, "Cluster service is null"); - Objects.requireNonNull(resolver, "Index name expression resolver is null"); - - try { - ClusterState state = clusterService.state(); - String[] concreteIndices = resolveIndexExpression(state, indices); - - IndexMappings mappings; - if (fieldFilter == ALL_FIELDS) { - mappings = findMappingsInCache(state, concreteIndices); - } else { - mappings = findMappings(state, concreteIndices, fieldFilter); - } - - LOG.debug("Found mappings: {}", mappings); - return mappings; - } catch (IndexNotFoundException e) { - throw e; - } catch (Exception e) { - throw new IllegalStateException( - "Failed to read mapping in cluster state for indices=" - + Arrays.toString(indices) , e); - } - } - - private String[] resolveIndexExpression(ClusterState state, String[] indices) { - String[] concreteIndices = resolver.concreteIndexNames(state, IndicesOptions.strictExpandOpen(), true, indices); - - if (LOG.isDebugEnabled()) { - LOG.debug("Resolved index expression {} to concrete index names {}", - Arrays.toString(indices), Arrays.toString(concreteIndices)); - } - return concreteIndices; - } - - private IndexMappings findMappings(ClusterState state, String[] indices, - Function> fieldFilter) throws IOException { - LOG.debug("Cache didn't help. Load and parse mapping in cluster state"); - return new IndexMappings( - state.metadata().findMappings(indices, fieldFilter) - ); - } - - private IndexMappings findMappingsInCache(ClusterState state, String[] indices) - throws ExecutionException { - LOG.debug("Looking for mapping in cache: {}", cache.asMap()); - return cache.get(sortToList(indices), - () -> findMappings(state, indices, ALL_FIELDS) - ); - } - - private List sortToList(T[] array) { - // Mostly array has single element - Arrays.sort(array); - return Arrays.asList(array); - } - + return concreteIndices; + } + + private IndexMappings findMappings( + ClusterState state, String[] indices, Function> fieldFilter) + throws IOException { + LOG.debug("Cache didn't help. Load and parse mapping in cluster state"); + return new IndexMappings(state.metadata().findMappings(indices, fieldFilter)); + } + + private IndexMappings findMappingsInCache(ClusterState state, String[] indices) + throws ExecutionException { + LOG.debug("Looking for mapping in cache: {}", cache.asMap()); + return cache.get(sortToList(indices), () -> findMappings(state, indices, ALL_FIELDS)); + } + + private List sortToList(T[] array) { + // Mostly array has single element + Arrays.sort(array); + return Arrays.asList(array); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMapping.java b/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMapping.java index bc6c26a6d6..89f8f9ac89 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMapping.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMapping.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.esdomain.mapping; import static java.util.Collections.emptyMap; @@ -16,121 +15,119 @@ /** * Field mapping that parses native OpenSearch mapping. - *

- * NOTE that approaches in this class are NOT reliable because of the OpenSearch mapping query API used. - * We should deprecate this in future and parse field mapping in more solid way. + * + *

NOTE that approaches in this class are NOT reliable because of the OpenSearch mapping query + * API used. We should deprecate this in future and parse field mapping in more solid way. */ public class FieldMapping { - /** - * Name of the Field to be parsed - */ - private final String fieldName; - - /** - * Native mapping information returned from OpenSearch - */ - private final Map typeMappings; - - /** - * Maps a field name to Field object that specified in query explicitly - */ - private final Map specifiedFieldsByName; - - public FieldMapping(String fieldName) { - this(fieldName, emptyMap(), emptyMap()); - } - - public FieldMapping(String fieldName, - Map typeMappings, - Map specifiedFieldByNames) { - - this.fieldName = fieldName; - this.typeMappings = typeMappings; - this.specifiedFieldsByName = specifiedFieldByNames; - } - - /** - * Is field specified explicitly in query - * - * @return true if specified - */ - public boolean isSpecified() { - return specifiedFieldsByName.containsKey(fieldName); - } - - /** - * Verify if property field matches wildcard pattern specified in query - * - * @return true if matched - */ - public boolean isWildcardSpecified() { - return specifiedFieldsByName.containsKey(path() + ".*"); - } - - /** - * Is field a property field, which means either object field or nested field. - * - * @return true for property field - */ - public boolean isPropertyField() { - int numOfDots = StringUtils.countMatches(fieldName, '.'); - return numOfDots > 1 || (numOfDots == 1 && !isMultiField()); - } - - /** - * Is field a/in multi-field, for example, field "a.keyword" in field "a" - * - * @return true for multi field - */ - public boolean isMultiField() { - return fieldName.endsWith(".keyword"); + /** Name of the Field to be parsed */ + private final String fieldName; + + /** Native mapping information returned from OpenSearch */ + private final Map typeMappings; + + /** Maps a field name to Field object that specified in query explicitly */ + private final Map specifiedFieldsByName; + + public FieldMapping(String fieldName) { + this(fieldName, emptyMap(), emptyMap()); + } + + public FieldMapping( + String fieldName, + Map typeMappings, + Map specifiedFieldByNames) { + + this.fieldName = fieldName; + this.typeMappings = typeMappings; + this.specifiedFieldsByName = specifiedFieldByNames; + } + + /** + * Is field specified explicitly in query + * + * @return true if specified + */ + public boolean isSpecified() { + return specifiedFieldsByName.containsKey(fieldName); + } + + /** + * Verify if property field matches wildcard pattern specified in query + * + * @return true if matched + */ + public boolean isWildcardSpecified() { + return specifiedFieldsByName.containsKey(path() + ".*"); + } + + /** + * Is field a property field, which means either object field or nested field. + * + * @return true for property field + */ + public boolean isPropertyField() { + int numOfDots = StringUtils.countMatches(fieldName, '.'); + return numOfDots > 1 || (numOfDots == 1 && !isMultiField()); + } + + /** + * Is field a/in multi-field, for example, field "a.keyword" in field "a" + * + * @return true for multi field + */ + public boolean isMultiField() { + return fieldName.endsWith(".keyword"); + } + + /** + * Is field meta field, such as _id, _index, _source etc. + * + * @return true for meta field + */ + public boolean isMetaField() { + return fieldName.startsWith("_"); + } + + /** + * Path of property field, for example "employee" in "employee.manager" + * + * @return path of property field + */ + public String path() { + int lastDot = fieldName.lastIndexOf("."); + if (lastDot == -1) { + throw new IllegalStateException( + "path() is being invoked on the wrong field [" + fieldName + "]"); } - - /** - * Is field meta field, such as _id, _index, _source etc. - * - * @return true for meta field - */ - public boolean isMetaField() { - return fieldName.startsWith("_"); - } - - /** - * Path of property field, for example "employee" in "employee.manager" - * - * @return path of property field - */ - public String path() { - int lastDot = fieldName.lastIndexOf("."); - if (lastDot == -1) { - throw new IllegalStateException("path() is being invoked on the wrong field [" + fieldName + "]"); - } - return fieldName.substring(0, lastDot); - } - - /** - * Find field type in OpenSearch Get Field Mapping API response. Note that Get Field Mapping API does NOT return - * the type for object or nested field. In this case, object type is used as default under the assumption - * that the field queried here must exist (which is true if semantic analyzer is enabled). - * - * @return field type if found in mapping, otherwise "object" type returned - */ - @SuppressWarnings("unchecked") - public String type() { - FieldMappingMetadata metaData = typeMappings.get(fieldName); - if (metaData == null) { - return DescribeResultSet.DEFAULT_OBJECT_DATATYPE; - } - - Map source = metaData.sourceAsMap(); - String[] fieldPath = fieldName.split("\\."); - - // For object/nested field, fieldName is full path though only innermost field name present in mapping - // For example, fieldName='employee.location.city', metaData='{"city":{"type":"text"}}' - String innermostFieldName = (fieldPath.length == 1) ? fieldName : fieldPath[fieldPath.length - 1]; - Map fieldMapping = (Map) source.get(innermostFieldName); - return (String) fieldMapping.get("type"); + return fieldName.substring(0, lastDot); + } + + /** + * Find field type in OpenSearch Get Field Mapping API response. Note that Get Field Mapping API + * does NOT return the type for object or nested field. In this case, object type is used as + * default under the assumption that the field queried here must exist (which is true if semantic + * analyzer is enabled). + * + * @return field type if found in mapping, otherwise "object" type returned + */ + @SuppressWarnings("unchecked") + public String type() { + FieldMappingMetadata metaData = typeMappings.get(fieldName); + if (metaData == null) { + return DescribeResultSet.DEFAULT_OBJECT_DATATYPE; } + Map source = metaData.sourceAsMap(); + String[] fieldPath = fieldName.split("\\."); + + // For object/nested field, fieldName is full path though only innermost field name present in + // mapping + // For example, fieldName='employee.location.city', metaData='{"city":{"type":"text"}}' + String innermostFieldName = + (fieldPath.length == 1) ? fieldName : fieldPath[fieldPath.length - 1]; + Map fieldMapping = (Map) source.get(innermostFieldName); + return (String) fieldMapping.get("type"); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMappings.java b/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMappings.java index 6f73da62e4..05b3f2854e 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMappings.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMappings.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.esdomain.mapping; import java.util.HashMap; @@ -15,6 +14,7 @@ import org.opensearch.cluster.metadata.MappingMetadata; /** + *

  * Field mappings in a specific type.
  * 

* Sample: @@ -35,113 +35,104 @@ * 'keyword': { * 'type': keyword, * 'ignore_above': 256 - * } - * } - * } - * } - * } + * }}}}} + *

*/ @SuppressWarnings("unchecked") public class FieldMappings implements Mappings> { - private static final String PROPERTIES = "properties"; + private static final String PROPERTIES = "properties"; - /** - * Mapping from field name to its type - */ - private final Map fieldMappings; + /** Mapping from field name to its type */ + private final Map fieldMappings; - public FieldMappings(MappingMetadata mappings) { - fieldMappings = mappings.sourceAsMap(); - } + public FieldMappings(MappingMetadata mappings) { + fieldMappings = mappings.sourceAsMap(); + } - public FieldMappings(Map> mapping) { - Map finalMapping = new HashMap<>(); - finalMapping.put(PROPERTIES, mapping); - fieldMappings = finalMapping; - } + public FieldMappings(Map> mapping) { + Map finalMapping = new HashMap<>(); + finalMapping.put(PROPERTIES, mapping); + fieldMappings = finalMapping; + } - @Override - public boolean has(String path) { - return mapping(path) != null; - } + @Override + public boolean has(String path) { + return mapping(path) != null; + } - /** - * Different from default implementation that search mapping for path is required - */ - @Override - public Map mapping(String path) { - Map mapping = fieldMappings; - for (String name : path.split("\\.")) { - if (mapping == null || !mapping.containsKey(PROPERTIES)) { - return null; - } - - mapping = (Map) - ((Map) mapping.get(PROPERTIES)).get(name); - } - return mapping; - } + /** Different from default implementation that search mapping for path is required */ + @Override + public Map mapping(String path) { + Map mapping = fieldMappings; + for (String name : path.split("\\.")) { + if (mapping == null || !mapping.containsKey(PROPERTIES)) { + return null; + } - @Override - public Map> data() { - // Is this assumption true? Is it possible mapping of field is NOT a Map? - return (Map>) fieldMappings.get(PROPERTIES); + mapping = (Map) ((Map) mapping.get(PROPERTIES)).get(name); } - - public void flat(BiConsumer func) { - flatMappings(data(), Optional.empty(), func); + return mapping; + } + + @Override + public Map> data() { + // Is this assumption true? Is it possible mapping of field is NOT a Map? + return (Map>) fieldMappings.get(PROPERTIES); + } + + public void flat(BiConsumer func) { + flatMappings(data(), Optional.empty(), func); + } + + @SuppressWarnings("unchecked") + private void flatMappings( + Map> mappings, + Optional path, + BiConsumer func) { + mappings.forEach( + (fieldName, mapping) -> { + String fullFieldName = path.map(s -> s + "." + fieldName).orElse(fieldName); + String type = (String) mapping.getOrDefault("type", "object"); + func.accept(fullFieldName, type); + + if (mapping.containsKey("fields")) { + ((Map>) mapping.get("fields")) + .forEach( + (innerFieldName, innerMapping) -> + func.accept( + fullFieldName + "." + innerFieldName, + (String) innerMapping.getOrDefault("type", "object"))); + } + + if (mapping.containsKey("properties")) { + flatMappings( + (Map>) mapping.get("properties"), + Optional.of(fullFieldName), + func); + } + }); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; } - - @SuppressWarnings("unchecked") - private void flatMappings(Map> mappings, - Optional path, - BiConsumer func) { - mappings.forEach( - (fieldName, mapping) -> { - String fullFieldName = path.map(s -> s + "." + fieldName).orElse(fieldName); - String type = (String) mapping.getOrDefault("type", "object"); - func.accept(fullFieldName, type); - - if (mapping.containsKey("fields")) { - ((Map>) mapping.get("fields")).forEach( - (innerFieldName, innerMapping) -> - func.accept(fullFieldName + "." + innerFieldName, - (String) innerMapping.getOrDefault("type", "object")) - ); - } - - if (mapping.containsKey("properties")) { - flatMappings( - (Map>) mapping.get("properties"), - Optional.of(fullFieldName), - func - ); - } - } - ); + if (o == null || getClass() != o.getClass()) { + return false; } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - FieldMappings that = (FieldMappings) o; - return Objects.equals(fieldMappings, that.fieldMappings); - } - - @Override - public int hashCode() { - return Objects.hash(fieldMappings); - } - - @Override - public String toString() { - return "FieldMappings" + new JSONObject(fieldMappings).toString(2); - } - + FieldMappings that = (FieldMappings) o; + return Objects.equals(fieldMappings, that.fieldMappings); + } + + @Override + public int hashCode() { + return Objects.hash(fieldMappings); + } + + @Override + public String toString() { + return "FieldMappings" + new JSONObject(fieldMappings).toString(2); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/IndexMappings.java b/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/IndexMappings.java index 3b89eef02f..22cb99c44e 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/IndexMappings.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/IndexMappings.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.esdomain.mapping; import static java.util.Collections.emptyMap; @@ -14,6 +13,7 @@ import org.opensearch.cluster.metadata.Metadata; /** + *
  * Index mappings in the cluster.
  * 

* Sample: @@ -30,53 +30,53 @@ * 2) FieldMetadata: * ((Map) client.admin().indices().getFieldMappings(request).actionGet().mappings().get("bank") * .get("account").get("balance").sourceAsMap().get("balance")).get("type") + *

*/ public class IndexMappings implements Mappings { - public static final IndexMappings EMPTY = new IndexMappings(); + public static final IndexMappings EMPTY = new IndexMappings(); - /** - * Mapping from Index name to mappings of all fields in it - */ - private final Map indexMappings; + /** Mapping from Index name to mappings of all fields in it */ + private final Map indexMappings; - public IndexMappings() { - this.indexMappings = emptyMap(); - } + public IndexMappings() { + this.indexMappings = emptyMap(); + } - public IndexMappings(Metadata metaData) { - this.indexMappings = buildMappings(metaData.indices(), - indexMetaData -> new FieldMappings(indexMetaData.mapping())); - } + public IndexMappings(Metadata metaData) { + this.indexMappings = + buildMappings( + metaData.indices(), indexMetaData -> new FieldMappings(indexMetaData.mapping())); + } - public IndexMappings(Map mappings) { - this.indexMappings = buildMappings(mappings, FieldMappings::new); - } + public IndexMappings(Map mappings) { + this.indexMappings = buildMappings(mappings, FieldMappings::new); + } - @Override - public Map data() { - return indexMappings; - } + @Override + public Map data() { + return indexMappings; + } - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - IndexMappings that = (IndexMappings) o; - return Objects.equals(indexMappings, that.indexMappings); + @Override + public boolean equals(Object o) { + if (this == o) { + return true; } - - @Override - public int hashCode() { - return Objects.hash(indexMappings); + if (o == null || getClass() != o.getClass()) { + return false; } + IndexMappings that = (IndexMappings) o; + return Objects.equals(indexMappings, that.indexMappings); + } - @Override - public String toString() { - return "IndexMappings{" + indexMappings + '}'; - } + @Override + public int hashCode() { + return Objects.hash(indexMappings); + } + + @Override + public String toString() { + return "IndexMappings{" + indexMappings + '}'; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/Mappings.java b/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/Mappings.java index 03bfcaf030..3cf02b55d8 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/Mappings.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/Mappings.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.esdomain.mapping; import java.util.Collection; @@ -12,43 +11,43 @@ import java.util.stream.Collectors; /** - * Mappings interface to provide default implementation (minimal set of Map methods) for subclass in hierarchy. + * Mappings interface to provide default implementation (minimal set of Map methods) for subclass in + * hierarchy. * * @param Type of nested mapping */ public interface Mappings { - default boolean has(String name) { - return data().containsKey(name); - } + default boolean has(String name) { + return data().containsKey(name); + } - default Collection allNames() { - return data().keySet(); - } + default Collection allNames() { + return data().keySet(); + } - default T mapping(String name) { - return data().get(name); - } + default T mapping(String name) { + return data().get(name); + } - default T firstMapping() { - return allMappings().iterator().next(); - } + default T firstMapping() { + return allMappings().iterator().next(); + } - default Collection allMappings() { - return data().values(); - } + default Collection allMappings() { + return data().values(); + } - default boolean isEmpty() { - return data().isEmpty(); - } + default boolean isEmpty() { + return data().isEmpty(); + } - Map data(); + Map data(); - /** - * Build a map from an existing map by applying provided function to each value. - */ - default Map buildMappings(Map mappings, Function func) { - return mappings.entrySet().stream().collect( + /** Build a map from an existing map by applying provided function to each value. */ + default Map buildMappings(Map mappings, Function func) { + return mappings.entrySet().stream() + .collect( Collectors.toUnmodifiableMap(Map.Entry::getKey, func.compose(Map.Entry::getValue))); - } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/ActionRequestRestExecutorFactory.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/ActionRequestRestExecutorFactory.java index d56ff231e0..c58bba9e26 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/ActionRequestRestExecutorFactory.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/ActionRequestRestExecutorFactory.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor; import org.opensearch.sql.legacy.executor.csv.CSVResultRestExecutor; @@ -12,41 +11,37 @@ import org.opensearch.sql.legacy.query.join.OpenSearchJoinQueryAction; import org.opensearch.sql.legacy.query.multi.MultiQueryAction; -/** - * Created by Eliran on 26/12/2015. - */ +/** Created by Eliran on 26/12/2015. */ public class ActionRequestRestExecutorFactory { - /** - * Create executor based on the format and wrap with AsyncRestExecutor - * to async blocking execute() call if necessary. - * - * @param format format of response - * @param queryAction query action - * @return executor - */ - public static RestExecutor createExecutor(Format format, QueryAction queryAction) { - switch (format) { - case CSV: - return new AsyncRestExecutor(new CSVResultRestExecutor()); - case JSON: - return new AsyncRestExecutor( - new ElasticDefaultRestExecutor(queryAction), - action -> isJoin(action) || isUnionMinus(action) - ); - case JDBC: - case RAW: - case TABLE: - default: - return new AsyncRestExecutor(new PrettyFormatRestExecutor(format.getFormatName())); - } + /** + * Create executor based on the format and wrap with AsyncRestExecutor to async blocking execute() + * call if necessary. + * + * @param format format of response + * @param queryAction query action + * @return executor + */ + public static RestExecutor createExecutor(Format format, QueryAction queryAction) { + switch (format) { + case CSV: + return new AsyncRestExecutor(new CSVResultRestExecutor()); + case JSON: + return new AsyncRestExecutor( + new ElasticDefaultRestExecutor(queryAction), + action -> isJoin(action) || isUnionMinus(action)); + case JDBC: + case RAW: + case TABLE: + default: + return new AsyncRestExecutor(new PrettyFormatRestExecutor(format.getFormatName())); } + } - private static boolean isJoin(QueryAction queryAction) { - return queryAction instanceof OpenSearchJoinQueryAction; - } - - private static boolean isUnionMinus(QueryAction queryAction) { - return queryAction instanceof MultiQueryAction; - } + private static boolean isJoin(QueryAction queryAction) { + return queryAction instanceof OpenSearchJoinQueryAction; + } + private static boolean isUnionMinus(QueryAction queryAction) { + return queryAction instanceof MultiQueryAction; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/Format.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/Format.java index 454babd2e9..c47092f10b 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/Format.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/Format.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor; import com.google.common.collect.ImmutableMap; @@ -14,25 +13,25 @@ @RequiredArgsConstructor public enum Format { - JDBC("jdbc"), - JSON("json"), - CSV("csv"), - RAW("raw"), - TABLE("table"); + JDBC("jdbc"), + JSON("json"), + CSV("csv"), + RAW("raw"), + TABLE("table"); - @Getter - private final String formatName; + @Getter private final String formatName; - private static final Map ALL_FORMATS; - static { - ImmutableMap.Builder builder = new ImmutableMap.Builder<>(); - for (Format format : Format.values()) { - builder.put(format.formatName, format); - } - ALL_FORMATS = builder.build(); - } + private static final Map ALL_FORMATS; - public static Optional of(String formatName) { - return Optional.ofNullable(ALL_FORMATS.getOrDefault(formatName, null)); + static { + ImmutableMap.Builder builder = new ImmutableMap.Builder<>(); + for (Format format : Format.values()) { + builder.put(format.formatName, format); } + ALL_FORMATS = builder.build(); + } + + public static Optional of(String formatName) { + return Optional.ofNullable(ALL_FORMATS.getOrDefault(formatName, null)); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/GetIndexRequestRestListener.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/GetIndexRequestRestListener.java index be6677a405..58808ee8f3 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/GetIndexRequestRestListener.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/GetIndexRequestRestListener.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor; import java.io.IOException; @@ -22,80 +21,79 @@ import org.opensearch.rest.action.RestBuilderListener; import org.opensearch.sql.legacy.antlr.semantic.SemanticAnalysisException; -/** - * Created by Eliran on 6/10/2015. - */ +/** Created by Eliran on 6/10/2015. */ public class GetIndexRequestRestListener extends RestBuilderListener { - private GetIndexRequest getIndexRequest; - - public GetIndexRequestRestListener(RestChannel channel, GetIndexRequest getIndexRequest) { - super(channel); - this.getIndexRequest = getIndexRequest; - } + private GetIndexRequest getIndexRequest; - @Override - public RestResponse buildResponse(GetIndexResponse getIndexResponse, XContentBuilder builder) throws Exception { - GetIndexRequest.Feature[] features = getIndexRequest.features(); - String[] indices = getIndexResponse.indices(); + public GetIndexRequestRestListener(RestChannel channel, GetIndexRequest getIndexRequest) { + super(channel); + this.getIndexRequest = getIndexRequest; + } - builder.startObject(); - for (String index : indices) { - builder.startObject(index); - for (GetIndexRequest.Feature feature : features) { - switch (feature) { - case ALIASES: - writeAliases(getIndexResponse.aliases().get(index), builder, channel.request()); - break; - case MAPPINGS: - writeMappings(getIndexResponse.mappings().get(index), builder, channel.request()); - break; - case SETTINGS: - writeSettings(getIndexResponse.settings().get(index), builder, channel.request()); - break; - default: - throw new SemanticAnalysisException("Unsupported feature: " + feature); - } - } - builder.endObject(); + @Override + public RestResponse buildResponse(GetIndexResponse getIndexResponse, XContentBuilder builder) + throws Exception { + GetIndexRequest.Feature[] features = getIndexRequest.features(); + String[] indices = getIndexResponse.indices(); + builder.startObject(); + for (String index : indices) { + builder.startObject(index); + for (GetIndexRequest.Feature feature : features) { + switch (feature) { + case ALIASES: + writeAliases(getIndexResponse.aliases().get(index), builder, channel.request()); + break; + case MAPPINGS: + writeMappings(getIndexResponse.mappings().get(index), builder, channel.request()); + break; + case SETTINGS: + writeSettings(getIndexResponse.settings().get(index), builder, channel.request()); + break; + default: + throw new SemanticAnalysisException("Unsupported feature: " + feature); } - builder.endObject(); - - return new BytesRestResponse(RestStatus.OK, builder); + } + builder.endObject(); } + builder.endObject(); - private void writeAliases(List aliases, XContentBuilder builder, ToXContent.Params params) - throws IOException { - builder.startObject(Fields.ALIASES); - if (aliases != null) { - for (AliasMetadata alias : aliases) { - AliasMetadata.Builder.toXContent(alias, builder, params); - } - } - builder.endObject(); - } + return new BytesRestResponse(RestStatus.OK, builder); + } - private void writeSettings(Settings settings, XContentBuilder builder, ToXContent.Params params) - throws IOException { - builder.startObject(Fields.SETTINGS); - settings.toXContent(builder, params); - builder.endObject(); + private void writeAliases( + List aliases, XContentBuilder builder, ToXContent.Params params) + throws IOException { + builder.startObject(Fields.ALIASES); + if (aliases != null) { + for (AliasMetadata alias : aliases) { + AliasMetadata.Builder.toXContent(alias, builder, params); + } } + builder.endObject(); + } - private void writeMappings(MappingMetadata mappingMetadata, - XContentBuilder builder, ToXContent.Params params) throws IOException { - if ( mappingMetadata != null) { - builder.field(Fields.MAPPINGS); - builder.map(mappingMetadata.getSourceAsMap()); - } - } + private void writeSettings(Settings settings, XContentBuilder builder, ToXContent.Params params) + throws IOException { + builder.startObject(Fields.SETTINGS); + settings.toXContent(builder, params); + builder.endObject(); + } - - static class Fields { - static final String ALIASES = "aliases"; - static final String MAPPINGS = "mappings"; - static final String SETTINGS = "settings"; - static final String WARMERS = "warmers"; + private void writeMappings( + MappingMetadata mappingMetadata, XContentBuilder builder, ToXContent.Params params) + throws IOException { + if (mappingMetadata != null) { + builder.field(Fields.MAPPINGS); + builder.map(mappingMetadata.getSourceAsMap()); } + } + + static class Fields { + static final String ALIASES = "aliases"; + static final String MAPPINGS = "mappings"; + static final String SETTINGS = "settings"; + static final String WARMERS = "warmers"; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/ErrorMessage.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/ErrorMessage.java index 5297fa38ff..aa0d02bed8 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/ErrorMessage.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/ErrorMessage.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.format; import org.json.JSONObject; @@ -11,59 +10,59 @@ public class ErrorMessage { - protected E exception; + protected E exception; - private int status; - private String type; - private String reason; - private String details; + private int status; + private String type; + private String reason; + private String details; - public ErrorMessage(E exception, int status) { - this.exception = exception; - this.status = status; + public ErrorMessage(E exception, int status) { + this.exception = exception; + this.status = status; - this.type = fetchType(); - this.reason = fetchReason(); - this.details = fetchDetails(); - } + this.type = fetchType(); + this.reason = fetchReason(); + this.details = fetchDetails(); + } - private String fetchType() { - return exception.getClass().getSimpleName(); - } + private String fetchType() { + return exception.getClass().getSimpleName(); + } - protected String fetchReason() { - return status == RestStatus.BAD_REQUEST.getStatus() - ? "Invalid SQL query" - : "There was internal problem at backend"; - } + protected String fetchReason() { + return status == RestStatus.BAD_REQUEST.getStatus() + ? "Invalid SQL query" + : "There was internal problem at backend"; + } - protected String fetchDetails() { - // Some exception prints internal information (full class name) which is security concern - //return exception.toString(); - return emptyStringIfNull(exception.getLocalizedMessage()); - } + protected String fetchDetails() { + // Some exception prints internal information (full class name) which is security concern + // return exception.toString(); + return emptyStringIfNull(exception.getLocalizedMessage()); + } - private String emptyStringIfNull(String str) { - return str != null ? str : ""; - } + private String emptyStringIfNull(String str) { + return str != null ? str : ""; + } - @Override - public String toString() { - JSONObject output = new JSONObject(); + @Override + public String toString() { + JSONObject output = new JSONObject(); - output.put("status", status); - output.put("error", getErrorAsJson()); + output.put("status", status); + output.put("error", getErrorAsJson()); - return output.toString(2); - } + return output.toString(2); + } - private JSONObject getErrorAsJson() { - JSONObject errorJson = new JSONObject(); + private JSONObject getErrorAsJson() { + JSONObject errorJson = new JSONObject(); - errorJson.put("type", type); - errorJson.put("reason", reason); - errorJson.put("details", details); + errorJson.put("type", type); + errorJson.put("reason", reason); + errorJson.put("details", details); - return errorJson; - } + return errorJson; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/ErrorMessageFactory.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/ErrorMessageFactory.java index 0e96fe9b67..ba28ee8325 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/ErrorMessageFactory.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/ErrorMessageFactory.java @@ -3,42 +3,40 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.format; import org.opensearch.OpenSearchException; public class ErrorMessageFactory { - /** - * Create error message based on the exception type - * Exceptions of OpenSearch exception type and exceptions with wrapped OpenSearch exception causes - * should create {@link OpenSearchErrorMessage} - * - * @param e exception to create error message - * @param status exception status code - * @return error message - */ - - public static ErrorMessage createErrorMessage(Exception e, int status) { - if (e instanceof OpenSearchException) { - return new OpenSearchErrorMessage((OpenSearchException) e, - ((OpenSearchException) e).status().getStatus()); - } else if (unwrapCause(e) instanceof OpenSearchException) { - OpenSearchException exception = (OpenSearchException) unwrapCause(e); - return new OpenSearchErrorMessage(exception, exception.status().getStatus()); - } - return new ErrorMessage(e, status); + /** + * Create error message based on the exception type Exceptions of OpenSearch exception type and + * exceptions with wrapped OpenSearch exception causes should create {@link + * OpenSearchErrorMessage} + * + * @param e exception to create error message + * @param status exception status code + * @return error message + */ + public static ErrorMessage createErrorMessage(Exception e, int status) { + if (e instanceof OpenSearchException) { + return new OpenSearchErrorMessage( + (OpenSearchException) e, ((OpenSearchException) e).status().getStatus()); + } else if (unwrapCause(e) instanceof OpenSearchException) { + OpenSearchException exception = (OpenSearchException) unwrapCause(e); + return new OpenSearchErrorMessage(exception, exception.status().getStatus()); } + return new ErrorMessage(e, status); + } - public static Throwable unwrapCause(Throwable t) { - Throwable result = t; - if (result instanceof OpenSearchException) { - return result; - } - if (result.getCause() == null) { - return result; - } - result = unwrapCause(result.getCause()); - return result; + public static Throwable unwrapCause(Throwable t) { + Throwable result = t; + if (result instanceof OpenSearchException) { + return result; + } + if (result.getCause() == null) { + return result; } + result = unwrapCause(result.getCause()); + return result; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/ElasticUtils.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/ElasticUtils.java index 7269e271f4..7b6228a3d2 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/ElasticUtils.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/ElasticUtils.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.join; import static org.opensearch.core.xcontent.ToXContent.EMPTY_PARAMS; @@ -30,136 +29,144 @@ import org.opensearch.sql.legacy.domain.Select; import org.opensearch.sql.legacy.query.join.BackOffRetryStrategy; -/** - * Created by Eliran on 2/9/2016. - */ +/** Created by Eliran on 2/9/2016. */ public class ElasticUtils { - public static SearchResponse scrollOneTimeWithHits(Client client, SearchRequestBuilder requestBuilder, - Select originalSelect, int resultSize) { - SearchRequestBuilder scrollRequest = requestBuilder - .setScroll(new TimeValue(60000)).setSize(resultSize); - boolean ordered = originalSelect.isOrderdSelect(); - if (!ordered) { - scrollRequest.addSort(FieldSortBuilder.DOC_FIELD_NAME, SortOrder.ASC); - } - SearchResponse responseWithHits = scrollRequest.get(); - //on ordered select - not using SCAN , elastic returns hits on first scroll - //es5.0 elastic always return docs on scan -// if(!ordered) { -// responseWithHits = client.prepareSearchScroll(responseWithHits.getScrollId()) -// .setScroll(new TimeValue(600000)).get(); -// } - return responseWithHits; + public static SearchResponse scrollOneTimeWithHits( + Client client, SearchRequestBuilder requestBuilder, Select originalSelect, int resultSize) { + SearchRequestBuilder scrollRequest = + requestBuilder.setScroll(new TimeValue(60000)).setSize(resultSize); + boolean ordered = originalSelect.isOrderdSelect(); + if (!ordered) { + scrollRequest.addSort(FieldSortBuilder.DOC_FIELD_NAME, SortOrder.ASC); + } + SearchResponse responseWithHits = scrollRequest.get(); + // on ordered select - not using SCAN , elastic returns hits on first scroll + // es5.0 elastic always return docs on scan + // if(!ordered) { + // responseWithHits = client.prepareSearchScroll(responseWithHits.getScrollId()) + // .setScroll(new TimeValue(600000)).get(); + // } + return responseWithHits; + } + + // use our deserializer instead of results toXcontent because the source field is different from + // sourceAsMap. + public static String hitsAsStringResult(SearchHits results, MetaSearchResult metaResults) + throws IOException { + if (results == null) { + return null; + } + Object[] searchHits; + searchHits = + new Object + [Optional.ofNullable(results.getTotalHits()).map(th -> th.value).orElse(0L).intValue()]; + int i = 0; + for (SearchHit hit : results) { + HashMap value = new HashMap<>(); + value.put("_id", hit.getId()); + value.put("_score", hit.getScore()); + value.put("_source", hit.getSourceAsMap()); + searchHits[i] = value; + i++; + } + HashMap hits = new HashMap<>(); + hits.put( + "total", + ImmutableMap.of( + "value", Optional.ofNullable(results.getTotalHits()).map(th -> th.value).orElse(0L), + "relation", + Optional.ofNullable(results.getTotalHits()) + .map(th -> th.relation) + .orElse(Relation.EQUAL_TO))); + hits.put("max_score", results.getMaxScore()); + hits.put("hits", searchHits); + XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint(); + builder.startObject(); + builder.field("took", metaResults.getTookImMilli()); + builder.field("timed_out", metaResults.isTimedOut()); + builder.field( + "_shards", + ImmutableMap.of( + "total", + metaResults.getTotalNumOfShards(), + "successful", + metaResults.getSuccessfulShards(), + "failed", + metaResults.getFailedShards())); + builder.field("hits", hits); + builder.endObject(); + return BytesReference.bytes(builder).utf8ToString(); + } + + /** Generate string by serializing SearchHits in place without any new HashMap copy */ + public static XContentBuilder hitsAsStringResultZeroCopy( + List results, MetaSearchResult metaResults, ElasticJoinExecutor executor) + throws IOException { + BytesStreamOutput outputStream = new BytesStreamOutput(); + + XContentBuilder builder = XContentFactory.jsonBuilder(outputStream).prettyPrint(); + builder.startObject(); + builder.field("took", metaResults.getTookImMilli()); + builder.field("timed_out", metaResults.isTimedOut()); + builder.field( + "_shards", + ImmutableMap.of( + "total", metaResults.getTotalNumOfShards(), + "successful", metaResults.getSuccessfulShards(), + "failed", metaResults.getFailedShards())); + toXContent(builder, EMPTY_PARAMS, results, executor); + builder.endObject(); + + if (!BackOffRetryStrategy.isHealthy(2 * outputStream.size(), executor)) { + throw new IllegalStateException("Memory could be insufficient when sendResponse()."); } - - //use our deserializer instead of results toXcontent because the source field is different from sourceAsMap. - public static String hitsAsStringResult(SearchHits results, MetaSearchResult metaResults) throws IOException { - if (results == null) { - return null; - } - Object[] searchHits; - searchHits = new Object[Optional.ofNullable(results.getTotalHits()).map(th -> th.value).orElse(0L).intValue()]; - int i = 0; - for (SearchHit hit : results) { - HashMap value = new HashMap<>(); - value.put("_id", hit.getId()); - value.put("_score", hit.getScore()); - value.put("_source", hit.getSourceAsMap()); - searchHits[i] = value; - i++; - } - HashMap hits = new HashMap<>(); - hits.put("total", ImmutableMap.of( - "value", Optional.ofNullable(results.getTotalHits()).map(th -> th.value).orElse(0L), - "relation", Optional.ofNullable(results.getTotalHits()).map(th -> th.relation).orElse(Relation.EQUAL_TO) - )); - hits.put("max_score", results.getMaxScore()); - hits.put("hits", searchHits); - XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint(); - builder.startObject(); - builder.field("took", metaResults.getTookImMilli()); - builder.field("timed_out", metaResults.isTimedOut()); - builder.field("_shards", ImmutableMap.of("total", metaResults.getTotalNumOfShards(), - "successful", metaResults.getSuccessfulShards() - , "failed", metaResults.getFailedShards())); - builder.field("hits", hits); - builder.endObject(); - return BytesReference.bytes(builder).utf8ToString(); + return builder; + } + + /** Code copy from SearchHits */ + private static void toXContent( + XContentBuilder builder, Params params, List hits, ElasticJoinExecutor executor) + throws IOException { + builder.startObject(SearchHits.Fields.HITS); + builder.field( + SearchHits.Fields.TOTAL, + ImmutableMap.of("value", hits.size(), "relation", Relation.EQUAL_TO)); + builder.field(SearchHits.Fields.MAX_SCORE, 1.0f); + builder.field(SearchHits.Fields.HITS); + builder.startArray(); + + for (int i = 0; i < hits.size(); i++) { + if (i % 10000 == 0 && !BackOffRetryStrategy.isHealthy()) { + throw new IllegalStateException("Memory circuit break when generating json builder"); + } + toXContent(builder, params, hits.get(i)); } - /** - * Generate string by serializing SearchHits in place without any new HashMap copy - */ - public static XContentBuilder hitsAsStringResultZeroCopy(List results, MetaSearchResult metaResults, - ElasticJoinExecutor executor) throws IOException { - BytesStreamOutput outputStream = new BytesStreamOutput(); - - XContentBuilder builder = XContentFactory.jsonBuilder(outputStream).prettyPrint(); - builder.startObject(); - builder.field("took", metaResults.getTookImMilli()); - builder.field("timed_out", metaResults.isTimedOut()); - builder.field("_shards", ImmutableMap.of( - "total", metaResults.getTotalNumOfShards(), - "successful", metaResults.getSuccessfulShards(), - "failed", metaResults.getFailedShards() - )); - toXContent(builder, EMPTY_PARAMS, results, executor); - builder.endObject(); - - if (!BackOffRetryStrategy.isHealthy(2 * outputStream.size(), executor)) { - throw new IllegalStateException("Memory could be insufficient when sendResponse()."); - } - - return builder; + builder.endArray(); + builder.endObject(); + } + + /** Code copy from SearchHit but only keep fields interested and replace source by sourceMap */ + private static void toXContent(XContentBuilder builder, Params params, SearchHit hit) + throws IOException { + builder.startObject(); + if (hit.getId() != null) { + builder.field("_id", hit.getId()); } - /** - * Code copy from SearchHits - */ - private static void toXContent(XContentBuilder builder, Params params, List hits, - ElasticJoinExecutor executor) throws IOException { - builder.startObject(SearchHits.Fields.HITS); - builder.field(SearchHits.Fields.TOTAL, ImmutableMap.of( - "value", hits.size(), - "relation", Relation.EQUAL_TO - )); - builder.field(SearchHits.Fields.MAX_SCORE, 1.0f); - builder.field(SearchHits.Fields.HITS); - builder.startArray(); - - for (int i = 0; i < hits.size(); i++) { - if (i % 10000 == 0 && !BackOffRetryStrategy.isHealthy()) { - throw new IllegalStateException("Memory circuit break when generating json builder"); - } - toXContent(builder, params, hits.get(i)); - } - - builder.endArray(); - builder.endObject(); + if (Float.isNaN(hit.getScore())) { + builder.nullField("_score"); + } else { + builder.field("_score", hit.getScore()); } - /** - * Code copy from SearchHit but only keep fields interested and replace source by sourceMap + /* + * Use sourceMap rather than binary source because source is out-of-date + * and only used when creating a new instance of SearchHit */ - private static void toXContent(XContentBuilder builder, Params params, SearchHit hit) throws IOException { - builder.startObject(); - if (hit.getId() != null) { - builder.field("_id", hit.getId()); - } - - if (Float.isNaN(hit.getScore())) { - builder.nullField("_score"); - } else { - builder.field("_score", hit.getScore()); - } - - /* - * Use sourceMap rather than binary source because source is out-of-date - * and only used when creating a new instance of SearchHit - */ - builder.field("_source", hit.getSourceAsMap()); - builder.endObject(); - } + builder.field("_source", hit.getSourceAsMap()); + builder.endObject(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/HashJoinComparisonStructure.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/HashJoinComparisonStructure.java index 52d292a2e5..8216feac66 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/HashJoinComparisonStructure.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/HashJoinComparisonStructure.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.join; import java.util.ArrayList; @@ -14,55 +13,56 @@ import org.opensearch.search.SearchHit; import org.opensearch.sql.legacy.domain.Field; -/** - * Created by Eliran on 2/11/2015. - */ +/** Created by Eliran on 2/11/2015. */ public class HashJoinComparisonStructure { - private HashMap>> comparisonIDtoComparisonFields; - private HashMap> comparisonIDtoComparisonHash; + private HashMap>> comparisonIDtoComparisonFields; + private HashMap> comparisonIDtoComparisonHash; - public HashJoinComparisonStructure(List>> t1ToT2FieldsComparisons) { - comparisonIDtoComparisonFields = new HashMap<>(); - comparisonIDtoComparisonHash = new HashMap<>(); - if (t1ToT2FieldsComparisons == null || t1ToT2FieldsComparisons.size() == 0) { - String comparisonId = UUID.randomUUID().toString(); - this.comparisonIDtoComparisonFields.put(comparisonId, new ArrayList>()); - this.comparisonIDtoComparisonHash.put(comparisonId, new HashMap()); - } - for (List> comparisonFields : t1ToT2FieldsComparisons) { - String comparisonId = UUID.randomUUID().toString(); - //maby from field to List ? - this.comparisonIDtoComparisonFields.put(comparisonId, comparisonFields); - this.comparisonIDtoComparisonHash.put(comparisonId, new HashMap()); - } + public HashJoinComparisonStructure(List>> t1ToT2FieldsComparisons) { + comparisonIDtoComparisonFields = new HashMap<>(); + comparisonIDtoComparisonHash = new HashMap<>(); + if (t1ToT2FieldsComparisons == null || t1ToT2FieldsComparisons.size() == 0) { + String comparisonId = UUID.randomUUID().toString(); + this.comparisonIDtoComparisonFields.put( + comparisonId, new ArrayList>()); + this.comparisonIDtoComparisonHash.put(comparisonId, new HashMap()); } - - public HashMap>> getComparisons() { - return comparisonIDtoComparisonFields; + for (List> comparisonFields : t1ToT2FieldsComparisons) { + String comparisonId = UUID.randomUUID().toString(); + // maby from field to List ? + this.comparisonIDtoComparisonFields.put(comparisonId, comparisonFields); + this.comparisonIDtoComparisonHash.put(comparisonId, new HashMap()); } + } - public void insertIntoComparisonHash(String comparisonID, String comparisonKey, SearchHit hit) { - HashMap comparisonHash = this.comparisonIDtoComparisonHash.get(comparisonID); - SearchHitsResult currentSearchHitsResult = comparisonHash.get(comparisonKey); - if (currentSearchHitsResult == null) { - currentSearchHitsResult = new SearchHitsResult(new ArrayList(), false); - comparisonHash.put(comparisonKey, currentSearchHitsResult); - } - currentSearchHitsResult.getSearchHits().add(hit); - } + public HashMap>> getComparisons() { + return comparisonIDtoComparisonFields; + } - public SearchHitsResult searchForMatchingSearchHits(String comparisonID, String comparisonKey) { - HashMap comparisonHash = this.comparisonIDtoComparisonHash.get(comparisonID); - return comparisonHash.get(comparisonKey); + public void insertIntoComparisonHash(String comparisonID, String comparisonKey, SearchHit hit) { + HashMap comparisonHash = + this.comparisonIDtoComparisonHash.get(comparisonID); + SearchHitsResult currentSearchHitsResult = comparisonHash.get(comparisonKey); + if (currentSearchHitsResult == null) { + currentSearchHitsResult = new SearchHitsResult(new ArrayList(), false); + comparisonHash.put(comparisonKey, currentSearchHitsResult); } + currentSearchHitsResult.getSearchHits().add(hit); + } - public List getAllSearchHits() { - List allSearchHits = new ArrayList<>(); + public SearchHitsResult searchForMatchingSearchHits(String comparisonID, String comparisonKey) { + HashMap comparisonHash = + this.comparisonIDtoComparisonHash.get(comparisonID); + return comparisonHash.get(comparisonKey); + } - for (HashMap comparisonHash : this.comparisonIDtoComparisonHash.values()) { - allSearchHits.addAll(comparisonHash.values()); - } - return allSearchHits; - } + public List getAllSearchHits() { + List allSearchHits = new ArrayList<>(); + for (HashMap comparisonHash : + this.comparisonIDtoComparisonHash.values()) { + allSearchHits.addAll(comparisonHash.values()); + } + return allSearchHits; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/HashJoinElasticExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/HashJoinElasticExecutor.java index 5703cf2ef5..06a913205d 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/HashJoinElasticExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/HashJoinElasticExecutor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.join; import com.alibaba.druid.sql.ast.statement.SQLJoinTableSource; @@ -34,341 +33,385 @@ import org.opensearch.sql.legacy.query.join.TableInJoinRequestBuilder; import org.opensearch.sql.legacy.query.maker.QueryMaker; -/** - * Created by Eliran on 22/8/2015. - */ +/** Created by Eliran on 22/8/2015. */ public class HashJoinElasticExecutor extends ElasticJoinExecutor { - private HashJoinElasticRequestBuilder requestBuilder; - - - private Client client; - private boolean useQueryTermsFilterOptimization = false; - private final int MAX_RESULTS_FOR_FIRST_TABLE = 100000; - HashJoinComparisonStructure hashJoinComparisonStructure; - private Set alreadyMatched; - - public HashJoinElasticExecutor(Client client, HashJoinElasticRequestBuilder requestBuilder) { - super(requestBuilder); - this.client = client; - this.requestBuilder = requestBuilder; - this.useQueryTermsFilterOptimization = requestBuilder.isUseTermFiltersOptimization(); - this.hashJoinComparisonStructure = new HashJoinComparisonStructure(requestBuilder.getT1ToT2FieldsComparison()); - this.alreadyMatched = new HashSet<>(); + private HashJoinElasticRequestBuilder requestBuilder; + + private Client client; + private boolean useQueryTermsFilterOptimization = false; + private final int MAX_RESULTS_FOR_FIRST_TABLE = 100000; + HashJoinComparisonStructure hashJoinComparisonStructure; + private Set alreadyMatched; + + public HashJoinElasticExecutor(Client client, HashJoinElasticRequestBuilder requestBuilder) { + super(requestBuilder); + this.client = client; + this.requestBuilder = requestBuilder; + this.useQueryTermsFilterOptimization = requestBuilder.isUseTermFiltersOptimization(); + this.hashJoinComparisonStructure = + new HashJoinComparisonStructure(requestBuilder.getT1ToT2FieldsComparison()); + this.alreadyMatched = new HashSet<>(); + } + + public List innerRun() throws IOException, SqlParseException { + + Map>> optimizationTermsFilterStructure = + initOptimizationStructure(); + + updateFirstTableLimitIfNeeded(); + TableInJoinRequestBuilder firstTableRequest = requestBuilder.getFirstTable(); + createKeyToResultsAndFillOptimizationStructure( + optimizationTermsFilterStructure, firstTableRequest); + + TableInJoinRequestBuilder secondTableRequest = requestBuilder.getSecondTable(); + if (needToOptimize(optimizationTermsFilterStructure)) { + updateRequestWithTermsFilter(optimizationTermsFilterStructure, secondTableRequest); } - public List innerRun() throws IOException, SqlParseException { - - Map>> optimizationTermsFilterStructure = initOptimizationStructure(); - - updateFirstTableLimitIfNeeded(); - TableInJoinRequestBuilder firstTableRequest = requestBuilder.getFirstTable(); - createKeyToResultsAndFillOptimizationStructure(optimizationTermsFilterStructure, firstTableRequest); - - TableInJoinRequestBuilder secondTableRequest = requestBuilder.getSecondTable(); - if (needToOptimize(optimizationTermsFilterStructure)) { - updateRequestWithTermsFilter(optimizationTermsFilterStructure, secondTableRequest); - } - - List combinedResult = createCombinedResults(secondTableRequest); - - int currentNumOfResults = combinedResult.size(); - int totalLimit = requestBuilder.getTotalLimit(); - if (requestBuilder.getJoinType() == SQLJoinTableSource.JoinType.LEFT_OUTER_JOIN - && currentNumOfResults < totalLimit) { - String t1Alias = requestBuilder.getFirstTable().getAlias(); - String t2Alias = requestBuilder.getSecondTable().getAlias(); - //todo: for each till Limit - addUnmatchedResults(combinedResult, this.hashJoinComparisonStructure.getAllSearchHits(), - requestBuilder.getSecondTable().getReturnedFields(), - currentNumOfResults, totalLimit, - t1Alias, - t2Alias); - } - if (firstTableRequest.getOriginalSelect().isOrderdSelect()) { - Collections.sort(combinedResult, new Comparator() { - @Override - public int compare(SearchHit o1, SearchHit o2) { - return o1.docId() - o2.docId(); - } - }); - - } - return combinedResult; + List combinedResult = createCombinedResults(secondTableRequest); + + int currentNumOfResults = combinedResult.size(); + int totalLimit = requestBuilder.getTotalLimit(); + if (requestBuilder.getJoinType() == SQLJoinTableSource.JoinType.LEFT_OUTER_JOIN + && currentNumOfResults < totalLimit) { + String t1Alias = requestBuilder.getFirstTable().getAlias(); + String t2Alias = requestBuilder.getSecondTable().getAlias(); + // todo: for each till Limit + addUnmatchedResults( + combinedResult, + this.hashJoinComparisonStructure.getAllSearchHits(), + requestBuilder.getSecondTable().getReturnedFields(), + currentNumOfResults, + totalLimit, + t1Alias, + t2Alias); } - - private Map>> initOptimizationStructure() { - Map>> optimizationTermsFilterStructure = new HashMap<>(); - for (String comparisonId : this.hashJoinComparisonStructure.getComparisons().keySet()) { - optimizationTermsFilterStructure.put(comparisonId, new HashMap>()); - } - return optimizationTermsFilterStructure; - } - - private void updateFirstTableLimitIfNeeded() { - if (requestBuilder.getJoinType() == SQLJoinTableSource.JoinType.LEFT_OUTER_JOIN) { - Integer firstTableHintLimit = requestBuilder.getFirstTable().getHintLimit(); - int totalLimit = requestBuilder.getTotalLimit(); - if (firstTableHintLimit == null || firstTableHintLimit > totalLimit) { - requestBuilder.getFirstTable().setHintLimit(totalLimit); + if (firstTableRequest.getOriginalSelect().isOrderdSelect()) { + Collections.sort( + combinedResult, + new Comparator() { + @Override + public int compare(SearchHit o1, SearchHit o2) { + return o1.docId() - o2.docId(); } - } + }); } + return combinedResult; + } - private List createCombinedResults(TableInJoinRequestBuilder secondTableRequest) { - List combinedResult = new ArrayList<>(); - int resultIds = 0; - int totalLimit = this.requestBuilder.getTotalLimit(); - Integer hintLimit = secondTableRequest.getHintLimit(); - SearchResponse searchResponse; - boolean finishedScrolling; - if (hintLimit != null && hintLimit < MAX_RESULTS_ON_ONE_FETCH) { - searchResponse = secondTableRequest.getRequestBuilder().setSize(hintLimit).get(); - finishedScrolling = true; - } else { - searchResponse = secondTableRequest.getRequestBuilder() - .setScroll(new TimeValue(60000)) - .setSize(MAX_RESULTS_ON_ONE_FETCH).get(); - //es5.0 no need to scroll again! -// searchResponse = client.prepareSearchScroll(searchResponse.getScrollId()) -// .setScroll(new TimeValue(600000)).get(); - finishedScrolling = false; + private Map>> initOptimizationStructure() { + Map>> optimizationTermsFilterStructure = new HashMap<>(); + for (String comparisonId : this.hashJoinComparisonStructure.getComparisons().keySet()) { + optimizationTermsFilterStructure.put(comparisonId, new HashMap>()); + } + return optimizationTermsFilterStructure; + } + + private void updateFirstTableLimitIfNeeded() { + if (requestBuilder.getJoinType() == SQLJoinTableSource.JoinType.LEFT_OUTER_JOIN) { + Integer firstTableHintLimit = requestBuilder.getFirstTable().getHintLimit(); + int totalLimit = requestBuilder.getTotalLimit(); + if (firstTableHintLimit == null || firstTableHintLimit > totalLimit) { + requestBuilder.getFirstTable().setHintLimit(totalLimit); + } + } + } + + private List createCombinedResults(TableInJoinRequestBuilder secondTableRequest) { + List combinedResult = new ArrayList<>(); + int resultIds = 0; + int totalLimit = this.requestBuilder.getTotalLimit(); + Integer hintLimit = secondTableRequest.getHintLimit(); + SearchResponse searchResponse; + boolean finishedScrolling; + if (hintLimit != null && hintLimit < MAX_RESULTS_ON_ONE_FETCH) { + searchResponse = secondTableRequest.getRequestBuilder().setSize(hintLimit).get(); + finishedScrolling = true; + } else { + searchResponse = + secondTableRequest + .getRequestBuilder() + .setScroll(new TimeValue(60000)) + .setSize(MAX_RESULTS_ON_ONE_FETCH) + .get(); + // es5.0 no need to scroll again! + // searchResponse = client.prepareSearchScroll(searchResponse.getScrollId()) + // .setScroll(new TimeValue(600000)).get(); + finishedScrolling = false; + } + updateMetaSearchResults(searchResponse); + + boolean limitReached = false; + int fetchedSoFarFromSecondTable = 0; + while (!limitReached) { + SearchHit[] secondTableHits = searchResponse.getHits().getHits(); + fetchedSoFarFromSecondTable += secondTableHits.length; + for (SearchHit secondTableHit : secondTableHits) { + if (limitReached) { + break; } - updateMetaSearchResults(searchResponse); - - boolean limitReached = false; - int fetchedSoFarFromSecondTable = 0; - while (!limitReached) { - SearchHit[] secondTableHits = searchResponse.getHits().getHits(); - fetchedSoFarFromSecondTable += secondTableHits.length; - for (SearchHit secondTableHit : secondTableHits) { - if (limitReached) { - break; - } - //todo: need to run on comparisons. for each comparison check if exists and add. - HashMap>> comparisons = - this.hashJoinComparisonStructure.getComparisons(); - - for (Map.Entry>> comparison : comparisons.entrySet()) { - String comparisonID = comparison.getKey(); - List> t1ToT2FieldsComparison = comparison.getValue(); - String key = getComparisonKey(t1ToT2FieldsComparison, secondTableHit, false, null); - - SearchHitsResult searchHitsResult = - this.hashJoinComparisonStructure.searchForMatchingSearchHits(comparisonID, key); - - if (searchHitsResult != null && searchHitsResult.getSearchHits().size() > 0) { - searchHitsResult.setMatchedWithOtherTable(true); - List searchHits = searchHitsResult.getSearchHits(); - for (SearchHit matchingHit : searchHits) { - String combinedId = matchingHit.getId() + "|" + secondTableHit.getId(); - //in order to prevent same matching when using OR on hashJoins. - if (this.alreadyMatched.contains(combinedId)) { - continue; - } else { - this.alreadyMatched.add(combinedId); - } - - Map copiedSource = new HashMap(); - copyMaps(copiedSource, secondTableHit.getSourceAsMap()); - onlyReturnedFields(copiedSource, secondTableRequest.getReturnedFields(), - secondTableRequest.getOriginalSelect().isSelectAll()); - - Map documentFields = new HashMap<>(); - Map metaFields = new HashMap<>(); - matchingHit.getFields().forEach((fieldName, docField) -> - (MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) ? metaFields : documentFields).put(fieldName, docField)); - SearchHit searchHit = new SearchHit(matchingHit.docId(), combinedId, - documentFields, metaFields); - searchHit.sourceRef(matchingHit.getSourceRef()); - searchHit.getSourceAsMap().clear(); - searchHit.getSourceAsMap().putAll(matchingHit.getSourceAsMap()); - String t1Alias = requestBuilder.getFirstTable().getAlias(); - String t2Alias = requestBuilder.getSecondTable().getAlias(); - mergeSourceAndAddAliases(copiedSource, searchHit, t1Alias, t2Alias); - - combinedResult.add(searchHit); - resultIds++; - if (resultIds >= totalLimit) { - limitReached = true; - break; - } - } - } - } - } - if (!finishedScrolling) { - if (secondTableHits.length > 0 && (hintLimit == null || fetchedSoFarFromSecondTable >= hintLimit)) { - searchResponse = client.prepareSearchScroll(searchResponse.getScrollId()) - .setScroll(new TimeValue(600000)).execute().actionGet(); - } else { - break; - } - } else { + // todo: need to run on comparisons. for each comparison check if exists and add. + HashMap>> comparisons = + this.hashJoinComparisonStructure.getComparisons(); + + for (Map.Entry>> comparison : comparisons.entrySet()) { + String comparisonID = comparison.getKey(); + List> t1ToT2FieldsComparison = comparison.getValue(); + String key = getComparisonKey(t1ToT2FieldsComparison, secondTableHit, false, null); + + SearchHitsResult searchHitsResult = + this.hashJoinComparisonStructure.searchForMatchingSearchHits(comparisonID, key); + + if (searchHitsResult != null && searchHitsResult.getSearchHits().size() > 0) { + searchHitsResult.setMatchedWithOtherTable(true); + List searchHits = searchHitsResult.getSearchHits(); + for (SearchHit matchingHit : searchHits) { + String combinedId = matchingHit.getId() + "|" + secondTableHit.getId(); + // in order to prevent same matching when using OR on hashJoins. + if (this.alreadyMatched.contains(combinedId)) { + continue; + } else { + this.alreadyMatched.add(combinedId); + } + + Map copiedSource = new HashMap(); + copyMaps(copiedSource, secondTableHit.getSourceAsMap()); + onlyReturnedFields( + copiedSource, + secondTableRequest.getReturnedFields(), + secondTableRequest.getOriginalSelect().isSelectAll()); + + Map documentFields = new HashMap<>(); + Map metaFields = new HashMap<>(); + matchingHit + .getFields() + .forEach( + (fieldName, docField) -> + (MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) + ? metaFields + : documentFields) + .put(fieldName, docField)); + SearchHit searchHit = + new SearchHit(matchingHit.docId(), combinedId, documentFields, metaFields); + searchHit.sourceRef(matchingHit.getSourceRef()); + searchHit.getSourceAsMap().clear(); + searchHit.getSourceAsMap().putAll(matchingHit.getSourceAsMap()); + String t1Alias = requestBuilder.getFirstTable().getAlias(); + String t2Alias = requestBuilder.getSecondTable().getAlias(); + mergeSourceAndAddAliases(copiedSource, searchHit, t1Alias, t2Alias); + + combinedResult.add(searchHit); + resultIds++; + if (resultIds >= totalLimit) { + limitReached = true; break; + } } + } } - return combinedResult; - } - - private void copyMaps(Map into, Map from) { - for (Map.Entry keyAndValue : from.entrySet()) { - into.put(keyAndValue.getKey(), keyAndValue.getValue()); + } + if (!finishedScrolling) { + if (secondTableHits.length > 0 + && (hintLimit == null || fetchedSoFarFromSecondTable >= hintLimit)) { + searchResponse = + client + .prepareSearchScroll(searchResponse.getScrollId()) + .setScroll(new TimeValue(600000)) + .execute() + .actionGet(); + } else { + break; } + } else { + break; + } } + return combinedResult; + } - private void createKeyToResultsAndFillOptimizationStructure( - Map>> optimizationTermsFilterStructure, - TableInJoinRequestBuilder firstTableRequest) { - List firstTableHits = fetchAllHits(firstTableRequest); - - int resultIds = 1; - for (SearchHit hit : firstTableHits) { - HashMap>> comparisons = - this.hashJoinComparisonStructure.getComparisons(); - for (Map.Entry>> comparison : comparisons.entrySet()) { - String comparisonID = comparison.getKey(); - List> t1ToT2FieldsComparison = comparison.getValue(); - - String key = getComparisonKey(t1ToT2FieldsComparison, hit, true, - optimizationTermsFilterStructure.get(comparisonID)); - - //int docid , id - Map documentFields = new HashMap<>(); - Map metaFields = new HashMap<>(); - hit.getFields().forEach((fieldName, docField) -> - (MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) ? metaFields : documentFields).put(fieldName, docField)); - SearchHit searchHit = new SearchHit(resultIds, hit.getId(), documentFields - , metaFields); - searchHit.sourceRef(hit.getSourceRef()); - - onlyReturnedFields(searchHit.getSourceAsMap(), firstTableRequest.getReturnedFields(), - firstTableRequest.getOriginalSelect().isSelectAll()); - resultIds++; - this.hashJoinComparisonStructure.insertIntoComparisonHash(comparisonID, key, searchHit); - } - } + private void copyMaps(Map into, Map from) { + for (Map.Entry keyAndValue : from.entrySet()) { + into.put(keyAndValue.getKey(), keyAndValue.getValue()); } - - private List fetchAllHits(TableInJoinRequestBuilder tableInJoinRequest) { - Integer hintLimit = tableInJoinRequest.getHintLimit(); - SearchRequestBuilder requestBuilder = tableInJoinRequest.getRequestBuilder(); - if (hintLimit != null && hintLimit < MAX_RESULTS_ON_ONE_FETCH) { - requestBuilder.setSize(hintLimit); - SearchResponse searchResponse = requestBuilder.get(); - updateMetaSearchResults(searchResponse); - return Arrays.asList(searchResponse.getHits().getHits()); - } - return scrollTillLimit(tableInJoinRequest, hintLimit); + } + + private void createKeyToResultsAndFillOptimizationStructure( + Map>> optimizationTermsFilterStructure, + TableInJoinRequestBuilder firstTableRequest) { + List firstTableHits = fetchAllHits(firstTableRequest); + + int resultIds = 1; + for (SearchHit hit : firstTableHits) { + HashMap>> comparisons = + this.hashJoinComparisonStructure.getComparisons(); + for (Map.Entry>> comparison : comparisons.entrySet()) { + String comparisonID = comparison.getKey(); + List> t1ToT2FieldsComparison = comparison.getValue(); + + String key = + getComparisonKey( + t1ToT2FieldsComparison, + hit, + true, + optimizationTermsFilterStructure.get(comparisonID)); + + // int docid , id + Map documentFields = new HashMap<>(); + Map metaFields = new HashMap<>(); + hit.getFields() + .forEach( + (fieldName, docField) -> + (MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) + ? metaFields + : documentFields) + .put(fieldName, docField)); + SearchHit searchHit = new SearchHit(resultIds, hit.getId(), documentFields, metaFields); + searchHit.sourceRef(hit.getSourceRef()); + + onlyReturnedFields( + searchHit.getSourceAsMap(), + firstTableRequest.getReturnedFields(), + firstTableRequest.getOriginalSelect().isSelectAll()); + resultIds++; + this.hashJoinComparisonStructure.insertIntoComparisonHash(comparisonID, key, searchHit); + } + } + } + + private List fetchAllHits(TableInJoinRequestBuilder tableInJoinRequest) { + Integer hintLimit = tableInJoinRequest.getHintLimit(); + SearchRequestBuilder requestBuilder = tableInJoinRequest.getRequestBuilder(); + if (hintLimit != null && hintLimit < MAX_RESULTS_ON_ONE_FETCH) { + requestBuilder.setSize(hintLimit); + SearchResponse searchResponse = requestBuilder.get(); + updateMetaSearchResults(searchResponse); + return Arrays.asList(searchResponse.getHits().getHits()); } + return scrollTillLimit(tableInJoinRequest, hintLimit); + } - private List scrollTillLimit(TableInJoinRequestBuilder tableInJoinRequest, Integer hintLimit) { - SearchResponse scrollResp = scrollOneTimeWithMax(client, tableInJoinRequest); + private List scrollTillLimit( + TableInJoinRequestBuilder tableInJoinRequest, Integer hintLimit) { + SearchResponse scrollResp = scrollOneTimeWithMax(client, tableInJoinRequest); - updateMetaSearchResults(scrollResp); - List hitsWithScan = new ArrayList<>(); - int curentNumOfResults = 0; - SearchHit[] hits = scrollResp.getHits().getHits(); + updateMetaSearchResults(scrollResp); + List hitsWithScan = new ArrayList<>(); + int curentNumOfResults = 0; + SearchHit[] hits = scrollResp.getHits().getHits(); - if (hintLimit == null) { - hintLimit = MAX_RESULTS_FOR_FIRST_TABLE; - } - - while (hits.length != 0 && curentNumOfResults < hintLimit) { - curentNumOfResults += hits.length; - Collections.addAll(hitsWithScan, hits); - if (curentNumOfResults >= MAX_RESULTS_FOR_FIRST_TABLE) { - //todo: log or exception? - System.out.println("too many results for first table, stoping at:" + curentNumOfResults); - break; - } - scrollResp = client.prepareSearchScroll(scrollResp.getScrollId()).setScroll(new TimeValue(600000)) - .execute().actionGet(); - hits = scrollResp.getHits().getHits(); - } - return hitsWithScan; + if (hintLimit == null) { + hintLimit = MAX_RESULTS_FOR_FIRST_TABLE; } - private boolean needToOptimize(Map>> optimizationTermsFilterStructure) { - if (!useQueryTermsFilterOptimization && optimizationTermsFilterStructure != null - && optimizationTermsFilterStructure.size() > 0) { - return false; - } - boolean allEmpty = true; - for (Map> optimization : optimizationTermsFilterStructure.values()) { - if (optimization.size() > 0) { - allEmpty = false; - break; - } - } - return !allEmpty; + while (hits.length != 0 && curentNumOfResults < hintLimit) { + curentNumOfResults += hits.length; + Collections.addAll(hitsWithScan, hits); + if (curentNumOfResults >= MAX_RESULTS_FOR_FIRST_TABLE) { + // todo: log or exception? + System.out.println("too many results for first table, stoping at:" + curentNumOfResults); + break; + } + scrollResp = + client + .prepareSearchScroll(scrollResp.getScrollId()) + .setScroll(new TimeValue(600000)) + .execute() + .actionGet(); + hits = scrollResp.getHits().getHits(); } - - private void updateRequestWithTermsFilter(Map>> optimizationTermsFilterStructure, - TableInJoinRequestBuilder secondTableRequest) throws SqlParseException { - Select select = secondTableRequest.getOriginalSelect(); - - BoolQueryBuilder orQuery = QueryBuilders.boolQuery(); - for (Map> optimization : optimizationTermsFilterStructure.values()) { - BoolQueryBuilder andQuery = QueryBuilders.boolQuery(); - for (Map.Entry> keyToValues : optimization.entrySet()) { - String fieldName = keyToValues.getKey(); - List values = keyToValues.getValue(); - andQuery.must(QueryBuilders.termsQuery(fieldName, values)); - } - orQuery.should(andQuery); - } - - Where where = select.getWhere(); - - BoolQueryBuilder boolQuery; - if (where != null) { - boolQuery = QueryMaker.explain(where, false); - boolQuery.must(orQuery); - } else { - boolQuery = orQuery; - } - secondTableRequest.getRequestBuilder().setQuery(boolQuery); + return hitsWithScan; + } + + private boolean needToOptimize( + Map>> optimizationTermsFilterStructure) { + if (!useQueryTermsFilterOptimization + && optimizationTermsFilterStructure != null + && optimizationTermsFilterStructure.size() > 0) { + return false; + } + boolean allEmpty = true; + for (Map> optimization : optimizationTermsFilterStructure.values()) { + if (optimization.size() > 0) { + allEmpty = false; + break; + } + } + return !allEmpty; + } + + private void updateRequestWithTermsFilter( + Map>> optimizationTermsFilterStructure, + TableInJoinRequestBuilder secondTableRequest) + throws SqlParseException { + Select select = secondTableRequest.getOriginalSelect(); + + BoolQueryBuilder orQuery = QueryBuilders.boolQuery(); + for (Map> optimization : optimizationTermsFilterStructure.values()) { + BoolQueryBuilder andQuery = QueryBuilders.boolQuery(); + for (Map.Entry> keyToValues : optimization.entrySet()) { + String fieldName = keyToValues.getKey(); + List values = keyToValues.getValue(); + andQuery.must(QueryBuilders.termsQuery(fieldName, values)); + } + orQuery.should(andQuery); } - private String getComparisonKey(List> t1ToT2FieldsComparison, SearchHit hit, - boolean firstTable, Map> optimizationTermsFilterStructure) { - String key = ""; - Map sourceAsMap = hit.getSourceAsMap(); - for (Map.Entry t1ToT2 : t1ToT2FieldsComparison) { - //todo: change to our function find if key contains '.' - String name; - if (firstTable) { - name = t1ToT2.getKey().getName(); - } else { - name = t1ToT2.getValue().getName(); - } + Where where = select.getWhere(); - Object data = deepSearchInMap(sourceAsMap, name); - if (firstTable && useQueryTermsFilterOptimization) { - updateOptimizationData(optimizationTermsFilterStructure, data, t1ToT2.getValue().getName()); - } - if (data == null) { - key += "|null|"; - } else { - key += "|" + data.toString() + "|"; - } - } - return key; + BoolQueryBuilder boolQuery; + if (where != null) { + boolQuery = QueryMaker.explain(where, false); + boolQuery.must(orQuery); + } else { + boolQuery = orQuery; } - - private void updateOptimizationData(Map> optimizationTermsFilterStructure, - Object data, String queryOptimizationKey) { - List values = optimizationTermsFilterStructure.get(queryOptimizationKey); - if (values == null) { - values = new ArrayList<>(); - optimizationTermsFilterStructure.put(queryOptimizationKey, values); - } - if (data instanceof String) { - //todo: analyzed or not analyzed check.. - data = ((String) data).toLowerCase(); - } - if (data != null) { - values.add(data); - } + secondTableRequest.getRequestBuilder().setQuery(boolQuery); + } + + private String getComparisonKey( + List> t1ToT2FieldsComparison, + SearchHit hit, + boolean firstTable, + Map> optimizationTermsFilterStructure) { + String key = ""; + Map sourceAsMap = hit.getSourceAsMap(); + for (Map.Entry t1ToT2 : t1ToT2FieldsComparison) { + // todo: change to our function find if key contains '.' + String name; + if (firstTable) { + name = t1ToT2.getKey().getName(); + } else { + name = t1ToT2.getValue().getName(); + } + + Object data = deepSearchInMap(sourceAsMap, name); + if (firstTable && useQueryTermsFilterOptimization) { + updateOptimizationData(optimizationTermsFilterStructure, data, t1ToT2.getValue().getName()); + } + if (data == null) { + key += "|null|"; + } else { + key += "|" + data.toString() + "|"; + } + } + return key; + } + + private void updateOptimizationData( + Map> optimizationTermsFilterStructure, + Object data, + String queryOptimizationKey) { + List values = optimizationTermsFilterStructure.get(queryOptimizationKey); + if (values == null) { + values = new ArrayList<>(); + optimizationTermsFilterStructure.put(queryOptimizationKey, values); + } + if (data instanceof String) { + // todo: analyzed or not analyzed check.. + data = ((String) data).toLowerCase(); + } + if (data != null) { + values.add(data); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/MetaSearchResult.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/MetaSearchResult.java index abdcf05751..a4174b7247 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/MetaSearchResult.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/MetaSearchResult.java @@ -3,64 +3,60 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.join; -/** - * Created by Eliran on 4/9/2015. - */ +/** Created by Eliran on 4/9/2015. */ public class MetaSearchResult { - private long tookImMilli; - private int totalNumOfShards; - private int successfulShards; - private int failedShards; - private boolean isTimedOut; - - public MetaSearchResult() { - totalNumOfShards = 0; - failedShards = 0; - successfulShards = 0; - isTimedOut = false; - } - - public int getTotalNumOfShards() { - return totalNumOfShards; - } - - public int getSuccessfulShards() { - return successfulShards; - } - - public int getFailedShards() { - return failedShards; - } - - public boolean isTimedOut() { - return isTimedOut; - } - - public long getTookImMilli() { - return tookImMilli; - } - - public void setTookImMilli(long tookImMilli) { - this.tookImMilli = tookImMilli; - } - - public void addFailedShards(int shards) { - this.failedShards += shards; - } - - public void addSuccessfulShards(int shards) { - this.successfulShards += shards; - } - - public void addTotalNumOfShards(int shards) { - this.totalNumOfShards += shards; - } - - public void updateTimeOut(boolean isTimedOut) { - this.isTimedOut = this.isTimedOut || isTimedOut; - } - + private long tookImMilli; + private int totalNumOfShards; + private int successfulShards; + private int failedShards; + private boolean isTimedOut; + + public MetaSearchResult() { + totalNumOfShards = 0; + failedShards = 0; + successfulShards = 0; + isTimedOut = false; + } + + public int getTotalNumOfShards() { + return totalNumOfShards; + } + + public int getSuccessfulShards() { + return successfulShards; + } + + public int getFailedShards() { + return failedShards; + } + + public boolean isTimedOut() { + return isTimedOut; + } + + public long getTookImMilli() { + return tookImMilli; + } + + public void setTookImMilli(long tookImMilli) { + this.tookImMilli = tookImMilli; + } + + public void addFailedShards(int shards) { + this.failedShards += shards; + } + + public void addSuccessfulShards(int shards) { + this.successfulShards += shards; + } + + public void addTotalNumOfShards(int shards) { + this.totalNumOfShards += shards; + } + + public void updateTimeOut(boolean isTimedOut) { + this.isTimedOut = this.isTimedOut || isTimedOut; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/MinusExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/MinusExecutor.java index 83901f1acb..03e16424e7 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/MinusExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/MinusExecutor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.multi; import java.util.ArrayList; @@ -36,409 +35,453 @@ import org.opensearch.sql.legacy.query.multi.MultiQueryRequestBuilder; import org.opensearch.sql.legacy.utils.Util; - -/** - * Created by Eliran on 26/8/2016. - */ +/** Created by Eliran on 26/8/2016. */ public class MinusExecutor implements ElasticHitsExecutor { - private Client client; - private MultiQueryRequestBuilder builder; - private SearchHits minusHits; - private boolean useTermsOptimization; - private boolean termsOptimizationWithToLower; - private boolean useScrolling; - private int maxDocsToFetchOnFirstTable; - private int maxDocsToFetchOnSecondTable; - private int maxDocsToFetchOnEachScrollShard; - private String[] fieldsOrderFirstTable; - private String[] fieldsOrderSecondTable; - private String seperator; - - public MinusExecutor(Client client, MultiQueryRequestBuilder builder) { - this.client = client; - this.builder = builder; - this.useTermsOptimization = false; - this.termsOptimizationWithToLower = false; - this.useScrolling = false; - parseHintsIfAny(builder.getOriginalSelect(true).getHints()); - fillFieldsOrder(); - seperator = UUID.randomUUID().toString(); + private Client client; + private MultiQueryRequestBuilder builder; + private SearchHits minusHits; + private boolean useTermsOptimization; + private boolean termsOptimizationWithToLower; + private boolean useScrolling; + private int maxDocsToFetchOnFirstTable; + private int maxDocsToFetchOnSecondTable; + private int maxDocsToFetchOnEachScrollShard; + private String[] fieldsOrderFirstTable; + private String[] fieldsOrderSecondTable; + private String seperator; + + public MinusExecutor(Client client, MultiQueryRequestBuilder builder) { + this.client = client; + this.builder = builder; + this.useTermsOptimization = false; + this.termsOptimizationWithToLower = false; + this.useScrolling = false; + parseHintsIfAny(builder.getOriginalSelect(true).getHints()); + fillFieldsOrder(); + seperator = UUID.randomUUID().toString(); + } + + @Override + public void run() throws SqlParseException { + if (this.useTermsOptimization && this.fieldsOrderFirstTable.length != 1) { + throw new SqlParseException( + "Terms optimization failed: terms optimization for minus execution is supported with one" + + " field"); } - - @Override - public void run() throws SqlParseException { - if (this.useTermsOptimization && this.fieldsOrderFirstTable.length != 1) { - throw new SqlParseException( - "Terms optimization failed: terms optimization for minus execution is supported with one field"); - } - if (this.useTermsOptimization && !this.useScrolling) { - throw new SqlParseException( - "Terms optimization failed: using scrolling is required for terms optimization"); - } - if (!this.useScrolling || !this.useTermsOptimization) { - Set comperableHitResults; - if (!this.useScrolling) { - //1. get results from first search , put in set - //2. get reults from second search - //2.1 for each result remove from set - comperableHitResults = simpleOneTimeQueryEach(); - } else { - //if scrolling - //1. get all results in scrolls (till some limit) . put on set - //2. scroll on second table - //3. on each scroll result remove items from set - comperableHitResults = runWithScrollings(); - } - fillMinusHitsFromResults(comperableHitResults); - return; - } else { - //if scrolling and optimization - // 0. save the original second table where , init set - // 1. on each scroll on first table , create miniSet - //1.1 build where from all results (terms filter) , and run query - //1.1.1 on each result remove from miniSet - //1.1.2 add all results left from miniset to bigset - Select firstSelect = this.builder.getOriginalSelect(true); - MinusOneFieldAndOptimizationResult optimizationResult = - runWithScrollingAndAddFilter(fieldsOrderFirstTable[0], fieldsOrderSecondTable[0]); - String fieldName = getFieldName(firstSelect.getFields().get(0)); - Set results = optimizationResult.getFieldValues(); - SearchHit someHit = optimizationResult.getSomeHit(); - fillMinusHitsFromOneField(fieldName, results, someHit); - + if (this.useTermsOptimization && !this.useScrolling) { + throw new SqlParseException( + "Terms optimization failed: using scrolling is required for terms optimization"); + } + if (!this.useScrolling || !this.useTermsOptimization) { + Set comperableHitResults; + if (!this.useScrolling) { + // 1. get results from first search , put in set + // 2. get reults from second search + // 2.1 for each result remove from set + comperableHitResults = simpleOneTimeQueryEach(); + } else { + // if scrolling + // 1. get all results in scrolls (till some limit) . put on set + // 2. scroll on second table + // 3. on each scroll result remove items from set + comperableHitResults = runWithScrollings(); + } + fillMinusHitsFromResults(comperableHitResults); + return; + } else { + // if scrolling and optimization + // 0. save the original second table where , init set + // 1. on each scroll on first table , create miniSet + // 1.1 build where from all results (terms filter) , and run query + // 1.1.1 on each result remove from miniSet + // 1.1.2 add all results left from miniset to bigset + Select firstSelect = this.builder.getOriginalSelect(true); + MinusOneFieldAndOptimizationResult optimizationResult = + runWithScrollingAndAddFilter(fieldsOrderFirstTable[0], fieldsOrderSecondTable[0]); + String fieldName = getFieldName(firstSelect.getFields().get(0)); + Set results = optimizationResult.getFieldValues(); + SearchHit someHit = optimizationResult.getSomeHit(); + fillMinusHitsFromOneField(fieldName, results, someHit); + } + } + + @Override + public SearchHits getHits() { + return this.minusHits; + } + + private void fillMinusHitsFromOneField( + String fieldName, Set fieldValues, SearchHit someHit) { + List minusHitsList = new ArrayList<>(); + int currentId = 1; + for (Object result : fieldValues) { + Map fields = new HashMap<>(); + ArrayList values = new ArrayList<>(); + values.add(result); + fields.put(fieldName, new DocumentField(fieldName, values)); + Map documentFields = new HashMap<>(); + Map metaFields = new HashMap<>(); + someHit + .getFields() + .forEach( + (field, docField) -> + (MapperService.META_FIELDS_BEFORE_7DOT8.contains(field) + ? metaFields + : documentFields) + .put(field, docField)); + SearchHit searchHit = new SearchHit(currentId, currentId + "", documentFields, metaFields); + searchHit.sourceRef(someHit.getSourceRef()); + searchHit.getSourceAsMap().clear(); + Map sourceAsMap = new HashMap<>(); + sourceAsMap.put(fieldName, result); + searchHit.getSourceAsMap().putAll(sourceAsMap); + currentId++; + minusHitsList.add(searchHit); + } + int totalSize = currentId - 1; + SearchHit[] unionHitsArr = minusHitsList.toArray(new SearchHit[totalSize]); + this.minusHits = + new SearchHits(unionHitsArr, new TotalHits(totalSize, Relation.EQUAL_TO), 1.0f); + } + + private void fillMinusHitsFromResults(Set comperableHitResults) { + int currentId = 1; + List minusHitsList = new ArrayList<>(); + for (ComperableHitResult result : comperableHitResults) { + ArrayList values = new ArrayList<>(); + values.add(result); + SearchHit originalHit = result.getOriginalHit(); + Map documentFields = new HashMap<>(); + Map metaFields = new HashMap<>(); + originalHit + .getFields() + .forEach( + (fieldName, docField) -> + (MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) + ? metaFields + : documentFields) + .put(fieldName, docField)); + SearchHit searchHit = + new SearchHit(currentId, originalHit.getId(), documentFields, metaFields); + searchHit.sourceRef(originalHit.getSourceRef()); + searchHit.getSourceAsMap().clear(); + Map sourceAsMap = result.getFlattenMap(); + for (Map.Entry entry : this.builder.getFirstTableFieldToAlias().entrySet()) { + if (sourceAsMap.containsKey(entry.getKey())) { + Object value = sourceAsMap.get(entry.getKey()); + sourceAsMap.remove(entry.getKey()); + sourceAsMap.put(entry.getValue(), value); } + } + searchHit.getSourceAsMap().putAll(sourceAsMap); + currentId++; + minusHitsList.add(searchHit); } - - - @Override - public SearchHits getHits() { - return this.minusHits; + int totalSize = currentId - 1; + SearchHit[] unionHitsArr = minusHitsList.toArray(new SearchHit[totalSize]); + this.minusHits = + new SearchHits(unionHitsArr, new TotalHits(totalSize, Relation.EQUAL_TO), 1.0f); + } + + private Set runWithScrollings() { + + SearchResponse scrollResp = + ElasticUtils.scrollOneTimeWithHits( + this.client, + this.builder.getFirstSearchRequest(), + builder.getOriginalSelect(true), + this.maxDocsToFetchOnEachScrollShard); + Set results = new HashSet<>(); + + SearchHit[] hits = scrollResp.getHits().getHits(); + if (hits == null || hits.length == 0) { + return new HashSet<>(); } - - private void fillMinusHitsFromOneField(String fieldName, Set fieldValues, SearchHit someHit) { - List minusHitsList = new ArrayList<>(); - int currentId = 1; - for (Object result : fieldValues) { - Map fields = new HashMap<>(); - ArrayList values = new ArrayList<>(); - values.add(result); - fields.put(fieldName, new DocumentField(fieldName, values)); - Map documentFields = new HashMap<>(); - Map metaFields = new HashMap<>(); - someHit.getFields().forEach((field, docField) -> - (MapperService.META_FIELDS_BEFORE_7DOT8.contains(field) ? metaFields : documentFields).put(field, docField)); - SearchHit searchHit = new SearchHit(currentId, currentId + "", - documentFields, metaFields); - searchHit.sourceRef(someHit.getSourceRef()); - searchHit.getSourceAsMap().clear(); - Map sourceAsMap = new HashMap<>(); - sourceAsMap.put(fieldName, result); - searchHit.getSourceAsMap().putAll(sourceAsMap); - currentId++; - minusHitsList.add(searchHit); - } - int totalSize = currentId - 1; - SearchHit[] unionHitsArr = minusHitsList.toArray(new SearchHit[totalSize]); - this.minusHits = new SearchHits(unionHitsArr, new TotalHits(totalSize, Relation.EQUAL_TO), 1.0f); + int totalDocsFetchedFromFirstTable = 0; + + // fetch from first table . fill set. + while (hits != null && hits.length != 0) { + totalDocsFetchedFromFirstTable += hits.length; + fillComperableSetFromHits(this.fieldsOrderFirstTable, hits, results); + if (totalDocsFetchedFromFirstTable > this.maxDocsToFetchOnFirstTable) { + break; + } + scrollResp = + client + .prepareSearchScroll(scrollResp.getScrollId()) + .setScroll(new TimeValue(600000)) + .execute() + .actionGet(); + hits = scrollResp.getHits().getHits(); } - - private void fillMinusHitsFromResults(Set comperableHitResults) { - int currentId = 1; - List minusHitsList = new ArrayList<>(); - for (ComperableHitResult result : comperableHitResults) { - ArrayList values = new ArrayList<>(); - values.add(result); - SearchHit originalHit = result.getOriginalHit(); - Map documentFields = new HashMap<>(); - Map metaFields = new HashMap<>(); - originalHit.getFields().forEach((fieldName, docField) -> - (MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) ? metaFields : documentFields).put(fieldName, docField)); - SearchHit searchHit = new SearchHit(currentId, originalHit.getId(), - documentFields, metaFields); - searchHit.sourceRef(originalHit.getSourceRef()); - searchHit.getSourceAsMap().clear(); - Map sourceAsMap = result.getFlattenMap(); - for (Map.Entry entry : this.builder.getFirstTableFieldToAlias().entrySet()) { - if (sourceAsMap.containsKey(entry.getKey())) { - Object value = sourceAsMap.get(entry.getKey()); - sourceAsMap.remove(entry.getKey()); - sourceAsMap.put(entry.getValue(), value); - } - } - - searchHit.getSourceAsMap().putAll(sourceAsMap); - currentId++; - minusHitsList.add(searchHit); - } - int totalSize = currentId - 1; - SearchHit[] unionHitsArr = minusHitsList.toArray(new SearchHit[totalSize]); - this.minusHits = new SearchHits(unionHitsArr, new TotalHits(totalSize, Relation.EQUAL_TO), 1.0f); + scrollResp = + ElasticUtils.scrollOneTimeWithHits( + this.client, + this.builder.getSecondSearchRequest(), + builder.getOriginalSelect(false), + this.maxDocsToFetchOnEachScrollShard); + + hits = scrollResp.getHits().getHits(); + if (hits == null || hits.length == 0) { + return results; } - - private Set runWithScrollings() { - - SearchResponse scrollResp = ElasticUtils.scrollOneTimeWithHits(this.client, - this.builder.getFirstSearchRequest(), - builder.getOriginalSelect(true), this.maxDocsToFetchOnEachScrollShard); - Set results = new HashSet<>(); - - SearchHit[] hits = scrollResp.getHits().getHits(); - if (hits == null || hits.length == 0) { - return new HashSet<>(); - } - int totalDocsFetchedFromFirstTable = 0; - - //fetch from first table . fill set. - while (hits != null && hits.length != 0) { - totalDocsFetchedFromFirstTable += hits.length; - fillComperableSetFromHits(this.fieldsOrderFirstTable, hits, results); - if (totalDocsFetchedFromFirstTable > this.maxDocsToFetchOnFirstTable) { - break; - } - scrollResp = client.prepareSearchScroll(scrollResp.getScrollId()) - .setScroll(new TimeValue(600000)).execute().actionGet(); - hits = scrollResp.getHits().getHits(); - } - scrollResp = ElasticUtils.scrollOneTimeWithHits(this.client, this.builder.getSecondSearchRequest(), - builder.getOriginalSelect(false), this.maxDocsToFetchOnEachScrollShard); - - - hits = scrollResp.getHits().getHits(); - if (hits == null || hits.length == 0) { - return results; - } - int totalDocsFetchedFromSecondTable = 0; - while (hits != null && hits.length != 0) { - totalDocsFetchedFromSecondTable += hits.length; - removeValuesFromSetAccordingToHits(this.fieldsOrderSecondTable, results, hits); - if (totalDocsFetchedFromSecondTable > this.maxDocsToFetchOnSecondTable) { - break; - } - scrollResp = client.prepareSearchScroll(scrollResp.getScrollId()) - .setScroll(new TimeValue(600000)).execute().actionGet(); - hits = scrollResp.getHits().getHits(); - } - - return results; + int totalDocsFetchedFromSecondTable = 0; + while (hits != null && hits.length != 0) { + totalDocsFetchedFromSecondTable += hits.length; + removeValuesFromSetAccordingToHits(this.fieldsOrderSecondTable, results, hits); + if (totalDocsFetchedFromSecondTable > this.maxDocsToFetchOnSecondTable) { + break; + } + scrollResp = + client + .prepareSearchScroll(scrollResp.getScrollId()) + .setScroll(new TimeValue(600000)) + .execute() + .actionGet(); + hits = scrollResp.getHits().getHits(); } - private Set simpleOneTimeQueryEach() { - SearchHit[] firstTableHits = this.builder.getFirstSearchRequest().get().getHits().getHits(); - if (firstTableHits == null || firstTableHits.length == 0) { - return new HashSet<>(); - } + return results; + } - Set result = new HashSet<>(); - fillComperableSetFromHits(this.fieldsOrderFirstTable, firstTableHits, result); - SearchHit[] secondTableHits = this.builder.getSecondSearchRequest().get().getHits().getHits(); - if (secondTableHits == null || secondTableHits.length == 0) { - return result; - } - removeValuesFromSetAccordingToHits(this.fieldsOrderSecondTable, result, secondTableHits); - return result; + private Set simpleOneTimeQueryEach() { + SearchHit[] firstTableHits = this.builder.getFirstSearchRequest().get().getHits().getHits(); + if (firstTableHits == null || firstTableHits.length == 0) { + return new HashSet<>(); } - private void removeValuesFromSetAccordingToHits(String[] fieldsOrder, - Set set, SearchHit[] hits) { - for (SearchHit hit : hits) { - ComperableHitResult comperableHitResult = new ComperableHitResult(hit, fieldsOrder, this.seperator); - if (!comperableHitResult.isAllNull()) { - set.remove(comperableHitResult); - } - } + Set result = new HashSet<>(); + fillComperableSetFromHits(this.fieldsOrderFirstTable, firstTableHits, result); + SearchHit[] secondTableHits = this.builder.getSecondSearchRequest().get().getHits().getHits(); + if (secondTableHits == null || secondTableHits.length == 0) { + return result; } - - private void fillComperableSetFromHits(String[] fieldsOrder, SearchHit[] hits, Set setToFill) { - for (SearchHit hit : hits) { - ComperableHitResult comperableHitResult = new ComperableHitResult(hit, fieldsOrder, this.seperator); - if (!comperableHitResult.isAllNull()) { - setToFill.add(comperableHitResult); - } - } + removeValuesFromSetAccordingToHits(this.fieldsOrderSecondTable, result, secondTableHits); + return result; + } + + private void removeValuesFromSetAccordingToHits( + String[] fieldsOrder, Set set, SearchHit[] hits) { + for (SearchHit hit : hits) { + ComperableHitResult comperableHitResult = + new ComperableHitResult(hit, fieldsOrder, this.seperator); + if (!comperableHitResult.isAllNull()) { + set.remove(comperableHitResult); + } } - - private String getFieldName(Field field) { - String alias = field.getAlias(); - if (alias != null && !alias.isEmpty()) { - return alias; - } - return field.getName(); + } + + private void fillComperableSetFromHits( + String[] fieldsOrder, SearchHit[] hits, Set setToFill) { + for (SearchHit hit : hits) { + ComperableHitResult comperableHitResult = + new ComperableHitResult(hit, fieldsOrder, this.seperator); + if (!comperableHitResult.isAllNull()) { + setToFill.add(comperableHitResult); + } } + } - private boolean checkIfOnlyOneField(Select firstSelect, Select secondSelect) { - return firstSelect.getFields().size() == 1 && secondSelect.getFields().size() == 1; + private String getFieldName(Field field) { + String alias = field.getAlias(); + if (alias != null && !alias.isEmpty()) { + return alias; } - - - // 0. save the original second table where , init set - // 1. on each scroll on first table , create miniSet - //1.1 build where from all results (terms filter) , and run query - //1.1.1 on each result remove from miniSet - //1.1.2 add all results left from miniset to bigset - private MinusOneFieldAndOptimizationResult runWithScrollingAndAddFilter(String firstFieldName, - String secondFieldName) - throws SqlParseException { - SearchResponse scrollResp = ElasticUtils.scrollOneTimeWithHits(this.client, - this.builder.getFirstSearchRequest(), - builder.getOriginalSelect(true), this.maxDocsToFetchOnEachScrollShard); - Set results = new HashSet<>(); - int currentNumOfResults = 0; - SearchHit[] hits = scrollResp.getHits().getHits(); - SearchHit someHit = null; - if (hits.length != 0) { - //we need some hit for creating InnerResults. - someHit = hits[0]; - } - int totalDocsFetchedFromFirstTable = 0; - int totalDocsFetchedFromSecondTable = 0; - Where originalWhereSecondTable = this.builder.getOriginalSelect(false).getWhere(); - while (hits.length != 0) { - totalDocsFetchedFromFirstTable += hits.length; - Set currentSetFromResults = new HashSet<>(); - fillSetFromHits(firstFieldName, hits, currentSetFromResults); - //fetch from second - Select secondQuerySelect = this.builder.getOriginalSelect(false); - Where where = createWhereWithOrigianlAndTermsFilter(secondFieldName, originalWhereSecondTable, - currentSetFromResults); - secondQuerySelect.setWhere(where); - DefaultQueryAction queryAction = new DefaultQueryAction(this.client, secondQuerySelect); - queryAction.explain(); - if (totalDocsFetchedFromSecondTable > this.maxDocsToFetchOnSecondTable) { - break; - } - SearchResponse responseForSecondTable = ElasticUtils.scrollOneTimeWithHits(this.client, - queryAction.getRequestBuilder(), secondQuerySelect, this.maxDocsToFetchOnEachScrollShard); - SearchHits secondQuerySearchHits = responseForSecondTable.getHits(); - - SearchHit[] secondQueryHits = secondQuerySearchHits.getHits(); - while (secondQueryHits.length > 0) { - totalDocsFetchedFromSecondTable += secondQueryHits.length; - removeValuesFromSetAccordingToHits(secondFieldName, currentSetFromResults, secondQueryHits); - if (totalDocsFetchedFromSecondTable > this.maxDocsToFetchOnSecondTable) { - break; - } - responseForSecondTable = client.prepareSearchScroll(responseForSecondTable.getScrollId()) - .setScroll(new TimeValue(600000)).execute().actionGet(); - secondQueryHits = responseForSecondTable.getHits().getHits(); - } - results.addAll(currentSetFromResults); - if (totalDocsFetchedFromFirstTable > this.maxDocsToFetchOnFirstTable) { - System.out.println("too many results for first table, stoping at:" + totalDocsFetchedFromFirstTable); - break; - } - - scrollResp = client.prepareSearchScroll(scrollResp.getScrollId()) - .setScroll(new TimeValue(600000)).execute().actionGet(); - hits = scrollResp.getHits().getHits(); - } - return new MinusOneFieldAndOptimizationResult(results, someHit); - - + return field.getName(); + } + + private boolean checkIfOnlyOneField(Select firstSelect, Select secondSelect) { + return firstSelect.getFields().size() == 1 && secondSelect.getFields().size() == 1; + } + + // 0. save the original second table where , init set + // 1. on each scroll on first table , create miniSet + // 1.1 build where from all results (terms filter) , and run query + // 1.1.1 on each result remove from miniSet + // 1.1.2 add all results left from miniset to bigset + private MinusOneFieldAndOptimizationResult runWithScrollingAndAddFilter( + String firstFieldName, String secondFieldName) throws SqlParseException { + SearchResponse scrollResp = + ElasticUtils.scrollOneTimeWithHits( + this.client, + this.builder.getFirstSearchRequest(), + builder.getOriginalSelect(true), + this.maxDocsToFetchOnEachScrollShard); + Set results = new HashSet<>(); + int currentNumOfResults = 0; + SearchHit[] hits = scrollResp.getHits().getHits(); + SearchHit someHit = null; + if (hits.length != 0) { + // we need some hit for creating InnerResults. + someHit = hits[0]; } - - private void removeValuesFromSetAccordingToHits(String fieldName, Set setToRemoveFrom, SearchHit[] hits) { - for (SearchHit hit : hits) { - Object fieldValue = getFieldValue(hit, fieldName); - if (fieldValue != null) { - if (setToRemoveFrom.contains(fieldValue)) { - setToRemoveFrom.remove(fieldValue); - } - } + int totalDocsFetchedFromFirstTable = 0; + int totalDocsFetchedFromSecondTable = 0; + Where originalWhereSecondTable = this.builder.getOriginalSelect(false).getWhere(); + while (hits.length != 0) { + totalDocsFetchedFromFirstTable += hits.length; + Set currentSetFromResults = new HashSet<>(); + fillSetFromHits(firstFieldName, hits, currentSetFromResults); + // fetch from second + Select secondQuerySelect = this.builder.getOriginalSelect(false); + Where where = + createWhereWithOrigianlAndTermsFilter( + secondFieldName, originalWhereSecondTable, currentSetFromResults); + secondQuerySelect.setWhere(where); + DefaultQueryAction queryAction = new DefaultQueryAction(this.client, secondQuerySelect); + queryAction.explain(); + if (totalDocsFetchedFromSecondTable > this.maxDocsToFetchOnSecondTable) { + break; + } + SearchResponse responseForSecondTable = + ElasticUtils.scrollOneTimeWithHits( + this.client, + queryAction.getRequestBuilder(), + secondQuerySelect, + this.maxDocsToFetchOnEachScrollShard); + SearchHits secondQuerySearchHits = responseForSecondTable.getHits(); + + SearchHit[] secondQueryHits = secondQuerySearchHits.getHits(); + while (secondQueryHits.length > 0) { + totalDocsFetchedFromSecondTable += secondQueryHits.length; + removeValuesFromSetAccordingToHits(secondFieldName, currentSetFromResults, secondQueryHits); + if (totalDocsFetchedFromSecondTable > this.maxDocsToFetchOnSecondTable) { + break; } + responseForSecondTable = + client + .prepareSearchScroll(responseForSecondTable.getScrollId()) + .setScroll(new TimeValue(600000)) + .execute() + .actionGet(); + secondQueryHits = responseForSecondTable.getHits().getHits(); + } + results.addAll(currentSetFromResults); + if (totalDocsFetchedFromFirstTable > this.maxDocsToFetchOnFirstTable) { + System.out.println( + "too many results for first table, stoping at:" + totalDocsFetchedFromFirstTable); + break; + } + + scrollResp = + client + .prepareSearchScroll(scrollResp.getScrollId()) + .setScroll(new TimeValue(600000)) + .execute() + .actionGet(); + hits = scrollResp.getHits().getHits(); } - - private void fillSetFromHits(String fieldName, SearchHit[] hits, Set setToFill) { - for (SearchHit hit : hits) { - Object fieldValue = getFieldValue(hit, fieldName); - if (fieldValue != null) { - setToFill.add(fieldValue); - } + return new MinusOneFieldAndOptimizationResult(results, someHit); + } + + private void removeValuesFromSetAccordingToHits( + String fieldName, Set setToRemoveFrom, SearchHit[] hits) { + for (SearchHit hit : hits) { + Object fieldValue = getFieldValue(hit, fieldName); + if (fieldValue != null) { + if (setToRemoveFrom.contains(fieldValue)) { + setToRemoveFrom.remove(fieldValue); } + } } - - private Where createWhereWithOrigianlAndTermsFilter(String secondFieldName, Where originalWhereSecondTable, - Set currentSetFromResults) throws SqlParseException { - Where where = Where.newInstance(); - where.setConn(Where.CONN.AND); - where.addWhere(originalWhereSecondTable); - where.addWhere(buildTermsFilterFromResults(currentSetFromResults, secondFieldName)); - return where; + } + + private void fillSetFromHits(String fieldName, SearchHit[] hits, Set setToFill) { + for (SearchHit hit : hits) { + Object fieldValue = getFieldValue(hit, fieldName); + if (fieldValue != null) { + setToFill.add(fieldValue); + } } - - private Where buildTermsFilterFromResults(Set results, String fieldName) throws SqlParseException { - return new Condition(Where.CONN.AND, fieldName, null, Condition.OPERATOR.IN_TERMS, results.toArray(), null); + } + + private Where createWhereWithOrigianlAndTermsFilter( + String secondFieldName, Where originalWhereSecondTable, Set currentSetFromResults) + throws SqlParseException { + Where where = Where.newInstance(); + where.setConn(Where.CONN.AND); + where.addWhere(originalWhereSecondTable); + where.addWhere(buildTermsFilterFromResults(currentSetFromResults, secondFieldName)); + return where; + } + + private Where buildTermsFilterFromResults(Set results, String fieldName) + throws SqlParseException { + return new Condition( + Where.CONN.AND, fieldName, null, Condition.OPERATOR.IN_TERMS, results.toArray(), null); + } + + private Object getFieldValue(SearchHit hit, String fieldName) { + Map sourceAsMap = hit.getSourceAsMap(); + if (fieldName.contains(".")) { + String[] split = fieldName.split("\\."); + return Util.searchPathInMap(sourceAsMap, split); + } else if (sourceAsMap.containsKey(fieldName)) { + return sourceAsMap.get(fieldName); } - - private Object getFieldValue(SearchHit hit, String fieldName) { - Map sourceAsMap = hit.getSourceAsMap(); - if (fieldName.contains(".")) { - String[] split = fieldName.split("\\."); - return Util.searchPathInMap(sourceAsMap, split); - } else if (sourceAsMap.containsKey(fieldName)) { - return sourceAsMap.get(fieldName); - } - return null; + return null; + } + + private void fillFieldsOrder() { + List fieldsOrAliases = new ArrayList<>(); + Map firstTableFieldToAlias = this.builder.getFirstTableFieldToAlias(); + List firstTableFields = this.builder.getOriginalSelect(true).getFields(); + + for (Field field : firstTableFields) { + if (firstTableFieldToAlias.containsKey(field.getName())) { + fieldsOrAliases.add(field.getAlias()); + } else { + fieldsOrAliases.add(field.getName()); + } } - - private void fillFieldsOrder() { - List fieldsOrAliases = new ArrayList<>(); - Map firstTableFieldToAlias = this.builder.getFirstTableFieldToAlias(); - List firstTableFields = this.builder.getOriginalSelect(true).getFields(); - - for (Field field : firstTableFields) { - if (firstTableFieldToAlias.containsKey(field.getName())) { - fieldsOrAliases.add(field.getAlias()); - } else { - fieldsOrAliases.add(field.getName()); - } - } - Collections.sort(fieldsOrAliases); - - int fieldsSize = fieldsOrAliases.size(); - this.fieldsOrderFirstTable = new String[fieldsSize]; - fillFieldsArray(fieldsOrAliases, firstTableFieldToAlias, this.fieldsOrderFirstTable); - this.fieldsOrderSecondTable = new String[fieldsSize]; - fillFieldsArray(fieldsOrAliases, this.builder.getSecondTableFieldToAlias(), this.fieldsOrderSecondTable); + Collections.sort(fieldsOrAliases); + + int fieldsSize = fieldsOrAliases.size(); + this.fieldsOrderFirstTable = new String[fieldsSize]; + fillFieldsArray(fieldsOrAliases, firstTableFieldToAlias, this.fieldsOrderFirstTable); + this.fieldsOrderSecondTable = new String[fieldsSize]; + fillFieldsArray( + fieldsOrAliases, this.builder.getSecondTableFieldToAlias(), this.fieldsOrderSecondTable); + } + + private void fillFieldsArray( + List fieldsOrAliases, Map fieldsToAlias, String[] fields) { + Map aliasToField = inverseMap(fieldsToAlias); + for (int i = 0; i < fields.length; i++) { + String field = fieldsOrAliases.get(i); + if (aliasToField.containsKey(field)) { + field = aliasToField.get(field); + } + fields[i] = field; } + } - private void fillFieldsArray(List fieldsOrAliases, Map fieldsToAlias, String[] fields) { - Map aliasToField = inverseMap(fieldsToAlias); - for (int i = 0; i < fields.length; i++) { - String field = fieldsOrAliases.get(i); - if (aliasToField.containsKey(field)) { - field = aliasToField.get(field); - } - fields[i] = field; - } + private Map inverseMap(Map mapToInverse) { + Map inversedMap = new HashMap<>(); + for (Map.Entry entry : mapToInverse.entrySet()) { + inversedMap.put(entry.getValue(), entry.getKey()); } + return inversedMap; + } - private Map inverseMap(Map mapToInverse) { - Map inversedMap = new HashMap<>(); - for (Map.Entry entry : mapToInverse.entrySet()) { - inversedMap.put(entry.getValue(), entry.getKey()); - } - return inversedMap; + private void parseHintsIfAny(List hints) { + if (hints == null) { + return; } - - private void parseHintsIfAny(List hints) { - if (hints == null) { - return; - } - for (Hint hint : hints) { - if (hint.getType() == HintType.MINUS_USE_TERMS_OPTIMIZATION) { - Object[] params = hint.getParams(); - if (params != null && params.length == 1) { - this.termsOptimizationWithToLower = (boolean) params[0]; - } - } else if (hint.getType() == HintType.MINUS_FETCH_AND_RESULT_LIMITS) { - Object[] params = hint.getParams(); - this.useScrolling = true; - this.maxDocsToFetchOnFirstTable = (int) params[0]; - this.maxDocsToFetchOnSecondTable = (int) params[1]; - this.maxDocsToFetchOnEachScrollShard = (int) params[2]; - } + for (Hint hint : hints) { + if (hint.getType() == HintType.MINUS_USE_TERMS_OPTIMIZATION) { + Object[] params = hint.getParams(); + if (params != null && params.length == 1) { + this.termsOptimizationWithToLower = (boolean) params[0]; } + } else if (hint.getType() == HintType.MINUS_FETCH_AND_RESULT_LIMITS) { + Object[] params = hint.getParams(); + this.useScrolling = true; + this.maxDocsToFetchOnFirstTable = (int) params[0]; + this.maxDocsToFetchOnSecondTable = (int) params[1]; + this.maxDocsToFetchOnEachScrollShard = (int) params[2]; + } } - + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/MinusOneFieldAndOptimizationResult.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/MinusOneFieldAndOptimizationResult.java index 3b4696bc1e..3d7206ab13 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/MinusOneFieldAndOptimizationResult.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/MinusOneFieldAndOptimizationResult.java @@ -3,30 +3,26 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.multi; import java.util.Set; import org.opensearch.search.SearchHit; - -/** - * Created by Eliran on 26/8/2016. - */ +/** Created by Eliran on 26/8/2016. */ class MinusOneFieldAndOptimizationResult { - private Set fieldValues; - private SearchHit someHit; + private Set fieldValues; + private SearchHit someHit; - MinusOneFieldAndOptimizationResult(Set fieldValues, SearchHit someHit) { - this.fieldValues = fieldValues; - this.someHit = someHit; - } + MinusOneFieldAndOptimizationResult(Set fieldValues, SearchHit someHit) { + this.fieldValues = fieldValues; + this.someHit = someHit; + } - public Set getFieldValues() { - return fieldValues; - } + public Set getFieldValues() { + return fieldValues; + } - public SearchHit getSomeHit() { - return someHit; - } + public SearchHit getSomeHit() { + return someHit; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/MultiRequestExecutorFactory.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/MultiRequestExecutorFactory.java index 239bc98772..03c6958076 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/MultiRequestExecutorFactory.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/MultiRequestExecutorFactory.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.multi; import com.alibaba.druid.sql.ast.statement.SQLUnionOperator; @@ -12,20 +11,19 @@ import org.opensearch.sql.legacy.executor.ElasticHitsExecutor; import org.opensearch.sql.legacy.query.multi.MultiQueryRequestBuilder; -/** - * Created by Eliran on 21/8/2016. - */ +/** Created by Eliran on 21/8/2016. */ public class MultiRequestExecutorFactory { - public static ElasticHitsExecutor createExecutor(Client client, MultiQueryRequestBuilder builder) { - SQLUnionOperator relation = builder.getRelation(); - switch (relation) { - case UNION_ALL: - case UNION: - return new UnionExecutor(client, builder); - case MINUS: - return new MinusExecutor(client, builder); - default: - throw new SemanticAnalysisException("Unsupported operator: " + relation); - } + public static ElasticHitsExecutor createExecutor( + Client client, MultiQueryRequestBuilder builder) { + SQLUnionOperator relation = builder.getRelation(); + switch (relation) { + case UNION_ALL: + case UNION: + return new UnionExecutor(client, builder); + case MINUS: + return new MinusExecutor(client, builder); + default: + throw new SemanticAnalysisException("Unsupported operator: " + relation); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/Expression.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/Expression.java index a858d99d3f..3a9ac5a66d 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/Expression.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/Expression.java @@ -3,21 +3,18 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.core; - import org.opensearch.sql.legacy.expression.domain.BindingTuple; import org.opensearch.sql.legacy.expression.model.ExprValue; -/** - * The definition of the Expression. - */ +/** The definition of the Expression. */ public interface Expression { - /** - * Evaluate the result on the BindingTuple context. - * @param tuple BindingTuple - * @return ExprValue - */ - ExprValue valueOf(BindingTuple tuple); + /** + * Evaluate the result on the BindingTuple context. + * + * @param tuple BindingTuple + * @return ExprValue + */ + ExprValue valueOf(BindingTuple tuple); } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/ExpressionFactory.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/ExpressionFactory.java index cf5fd4627f..1df81e34b3 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/ExpressionFactory.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/ExpressionFactory.java @@ -3,10 +3,8 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.core; - import static org.opensearch.sql.legacy.expression.core.operator.ScalarOperation.ABS; import static org.opensearch.sql.legacy.expression.core.operator.ScalarOperation.ACOS; import static org.opensearch.sql.legacy.expression.core.operator.ScalarOperation.ADD; @@ -38,89 +36,80 @@ import org.opensearch.sql.legacy.expression.domain.BindingTuple; import org.opensearch.sql.legacy.expression.model.ExprValue; - -/** - * The definition of Expression factory. - */ +/** The definition of Expression factory. */ public class ExpressionFactory { - private static final Map operationExpressionBuilderMap = - new ImmutableMap.Builder() - .put(ADD, ArithmeticFunctionFactory.add()) - .put(SUBTRACT, ArithmeticFunctionFactory.subtract()) - .put(MULTIPLY, ArithmeticFunctionFactory.multiply()) - .put(DIVIDE, ArithmeticFunctionFactory.divide()) - .put(MODULES, ArithmeticFunctionFactory.modules()) - .put(ABS, ArithmeticFunctionFactory.abs()) - .put(ACOS, ArithmeticFunctionFactory.acos()) - .put(ASIN, ArithmeticFunctionFactory.asin()) - .put(ATAN, ArithmeticFunctionFactory.atan()) - .put(ATAN2, ArithmeticFunctionFactory.atan2()) - .put(TAN, ArithmeticFunctionFactory.tan()) - .put(CBRT, ArithmeticFunctionFactory.cbrt()) - .put(CEIL, ArithmeticFunctionFactory.ceil()) - .put(COS, ArithmeticFunctionFactory.cos()) - .put(COSH, ArithmeticFunctionFactory.cosh()) - .put(EXP, ArithmeticFunctionFactory.exp()) - .put(FLOOR, ArithmeticFunctionFactory.floor()) - .put(LN, ArithmeticFunctionFactory.ln()) - .put(LOG, ArithmeticFunctionFactory.log()) - .put(LOG2, ArithmeticFunctionFactory.log2()) - .put(LOG10, ArithmeticFunctionFactory.log10()) - .build(); + private static final Map operationExpressionBuilderMap = + new ImmutableMap.Builder() + .put(ADD, ArithmeticFunctionFactory.add()) + .put(SUBTRACT, ArithmeticFunctionFactory.subtract()) + .put(MULTIPLY, ArithmeticFunctionFactory.multiply()) + .put(DIVIDE, ArithmeticFunctionFactory.divide()) + .put(MODULES, ArithmeticFunctionFactory.modules()) + .put(ABS, ArithmeticFunctionFactory.abs()) + .put(ACOS, ArithmeticFunctionFactory.acos()) + .put(ASIN, ArithmeticFunctionFactory.asin()) + .put(ATAN, ArithmeticFunctionFactory.atan()) + .put(ATAN2, ArithmeticFunctionFactory.atan2()) + .put(TAN, ArithmeticFunctionFactory.tan()) + .put(CBRT, ArithmeticFunctionFactory.cbrt()) + .put(CEIL, ArithmeticFunctionFactory.ceil()) + .put(COS, ArithmeticFunctionFactory.cos()) + .put(COSH, ArithmeticFunctionFactory.cosh()) + .put(EXP, ArithmeticFunctionFactory.exp()) + .put(FLOOR, ArithmeticFunctionFactory.floor()) + .put(LN, ArithmeticFunctionFactory.ln()) + .put(LOG, ArithmeticFunctionFactory.log()) + .put(LOG2, ArithmeticFunctionFactory.log2()) + .put(LOG10, ArithmeticFunctionFactory.log10()) + .build(); - public static Expression of(ScalarOperation op, List expressions) { - return operationExpressionBuilderMap.get(op).build(expressions); - } + public static Expression of(ScalarOperation op, List expressions) { + return operationExpressionBuilderMap.get(op).build(expressions); + } - /** - * Ref Expression. Define the binding name which could be resolved in {@link BindingTuple} - */ - public static Expression ref(String bindingName) { - return new Expression() { - @Override - public ExprValue valueOf(BindingTuple tuple) { - return tuple.resolve(bindingName); - } + /** Ref Expression. Define the binding name which could be resolved in {@link BindingTuple} */ + public static Expression ref(String bindingName) { + return new Expression() { + @Override + public ExprValue valueOf(BindingTuple tuple) { + return tuple.resolve(bindingName); + } - @Override - public String toString() { - return String.format("%s", bindingName); - } - }; - } + @Override + public String toString() { + return String.format("%s", bindingName); + } + }; + } - /** - * Literal Expression. - */ - public static Expression literal(ExprValue value) { - return new Expression() { - @Override - public ExprValue valueOf(BindingTuple tuple) { - return value; - } + /** Literal Expression. */ + public static Expression literal(ExprValue value) { + return new Expression() { + @Override + public ExprValue valueOf(BindingTuple tuple) { + return value; + } - @Override - public String toString() { - return String.format("%s", value); - } - }; - } + @Override + public String toString() { + return String.format("%s", value); + } + }; + } - /** - * Cast Expression. - */ - public static Expression cast(Expression expr) { - return new Expression() { - @Override - public ExprValue valueOf(BindingTuple tuple) { - return expr.valueOf(tuple); - } + /** Cast Expression. */ + public static Expression cast(Expression expr) { + return new Expression() { + @Override + public ExprValue valueOf(BindingTuple tuple) { + return expr.valueOf(tuple); + } - @Override - public String toString() { - return String.format("cast(%s)", expr); - } - }; - } + @Override + public String toString() { + return String.format("cast(%s)", expr); + } + }; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/builder/ExpressionBuilder.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/builder/ExpressionBuilder.java index 5f2cbb5776..76744d7d34 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/builder/ExpressionBuilder.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/builder/ExpressionBuilder.java @@ -3,15 +3,12 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.core.builder; import java.util.List; import org.opensearch.sql.legacy.expression.core.Expression; -/** - * The definition of the {@link Expression} builder. - */ +/** The definition of the {@link Expression} builder. */ public interface ExpressionBuilder { - Expression build(List expressionList); + Expression build(List expressionList); } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprBooleanValue.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprBooleanValue.java index 50b1523497..ce7c1a8fca 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprBooleanValue.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprBooleanValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.model; import lombok.EqualsAndHashCode; @@ -12,23 +11,23 @@ @EqualsAndHashCode @RequiredArgsConstructor public class ExprBooleanValue implements ExprValue { - private final Boolean value; + private final Boolean value; - @Override - public Object value() { - return value; - } + @Override + public Object value() { + return value; + } - @Override - public ExprValueKind kind() { - return ExprValueKind.BOOLEAN_VALUE; - } + @Override + public ExprValueKind kind() { + return ExprValueKind.BOOLEAN_VALUE; + } - @Override - public String toString() { - final StringBuffer sb = new StringBuffer("SSBooleanValue{"); - sb.append("value=").append(value); - sb.append('}'); - return sb.toString(); - } + @Override + public String toString() { + final StringBuffer sb = new StringBuffer("SSBooleanValue{"); + sb.append("value=").append(value); + sb.append('}'); + return sb.toString(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprCollectionValue.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprCollectionValue.java index 99eb35272d..f6200a2bea 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprCollectionValue.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprCollectionValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.model; import static org.opensearch.sql.legacy.expression.model.ExprValue.ExprValueKind.COLLECTION_VALUE; @@ -16,22 +15,20 @@ @EqualsAndHashCode @RequiredArgsConstructor public class ExprCollectionValue implements ExprValue { - private final List valueList; - - @Override - public Object value() { - return valueList; - } - - @Override - public ExprValueKind kind() { - return COLLECTION_VALUE; - } - - @Override - public String toString() { - return valueList.stream() - .map(Object::toString) - .collect(Collectors.joining(",", "[", "]")); - } + private final List valueList; + + @Override + public Object value() { + return valueList; + } + + @Override + public ExprValueKind kind() { + return COLLECTION_VALUE; + } + + @Override + public String toString() { + return valueList.stream().map(Object::toString).collect(Collectors.joining(",", "[", "]")); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprDoubleValue.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprDoubleValue.java index fdfacc4c55..16c607cae5 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprDoubleValue.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprDoubleValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.model; import lombok.EqualsAndHashCode; @@ -12,20 +11,20 @@ @EqualsAndHashCode @RequiredArgsConstructor public class ExprDoubleValue implements ExprValue { - private final Double value; + private final Double value; - @Override - public Object value() { - return value; - } + @Override + public Object value() { + return value; + } - @Override - public ExprValueKind kind() { - return ExprValueKind.DOUBLE_VALUE; - } + @Override + public ExprValueKind kind() { + return ExprValueKind.DOUBLE_VALUE; + } - @Override - public String toString() { - return value.toString(); - } + @Override + public String toString() { + return value.toString(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprFloatValue.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprFloatValue.java index f4d4dfc1b3..478ca645a7 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprFloatValue.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprFloatValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.model; import lombok.EqualsAndHashCode; @@ -12,20 +11,20 @@ @EqualsAndHashCode @RequiredArgsConstructor public class ExprFloatValue implements ExprValue { - private final Float value; + private final Float value; - @Override - public Object value() { - return value; - } + @Override + public Object value() { + return value; + } - @Override - public ExprValueKind kind() { - return ExprValueKind.DOUBLE_VALUE; - } + @Override + public ExprValueKind kind() { + return ExprValueKind.DOUBLE_VALUE; + } - @Override - public String toString() { - return value.toString(); - } + @Override + public String toString() { + return value.toString(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprIntegerValue.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprIntegerValue.java index 3285934280..92c4d38a4c 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprIntegerValue.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprIntegerValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.model; import lombok.EqualsAndHashCode; @@ -12,20 +11,20 @@ @EqualsAndHashCode @RequiredArgsConstructor public class ExprIntegerValue implements ExprValue { - private final Integer value; + private final Integer value; - @Override - public Object value() { - return value; - } + @Override + public Object value() { + return value; + } - @Override - public ExprValueKind kind() { - return ExprValueKind.INTEGER_VALUE; - } + @Override + public ExprValueKind kind() { + return ExprValueKind.INTEGER_VALUE; + } - @Override - public String toString() { - return value.toString(); - } + @Override + public String toString() { + return value.toString(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprLongValue.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprLongValue.java index b50a0088db..4f96ecf89a 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprLongValue.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprLongValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.model; import lombok.EqualsAndHashCode; @@ -12,20 +11,20 @@ @EqualsAndHashCode @RequiredArgsConstructor public class ExprLongValue implements ExprValue { - private final Long value; + private final Long value; - @Override - public Object value() { - return value; - } + @Override + public Object value() { + return value; + } - @Override - public ExprValueKind kind() { - return ExprValueKind.LONG_VALUE; - } + @Override + public ExprValueKind kind() { + return ExprValueKind.LONG_VALUE; + } - @Override - public String toString() { - return value.toString(); - } + @Override + public String toString() { + return value.toString(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprMissingValue.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprMissingValue.java index e05e32b920..c2b70537c9 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprMissingValue.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprMissingValue.java @@ -3,15 +3,12 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.model; -/** - * The definition of the missing value. - */ +/** The definition of the missing value. */ public class ExprMissingValue implements ExprValue { - @Override - public ExprValueKind kind() { - return ExprValueKind.MISSING_VALUE; - } + @Override + public ExprValueKind kind() { + return ExprValueKind.MISSING_VALUE; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprStringValue.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprStringValue.java index dcdec6117f..3c93b82187 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprStringValue.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprStringValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.model; import lombok.EqualsAndHashCode; @@ -12,20 +11,20 @@ @EqualsAndHashCode @RequiredArgsConstructor public class ExprStringValue implements ExprValue { - private final String value; + private final String value; - @Override - public Object value() { - return value; - } + @Override + public Object value() { + return value; + } - @Override - public ExprValueKind kind() { - return ExprValueKind.STRING_VALUE; - } + @Override + public ExprValueKind kind() { + return ExprValueKind.STRING_VALUE; + } - @Override - public String toString() { - return value; - } + @Override + public String toString() { + return value; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprTupleValue.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprTupleValue.java index 7debcef864..5f690a6b3e 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprTupleValue.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprTupleValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.model; import java.util.Map; @@ -14,23 +13,22 @@ @EqualsAndHashCode @RequiredArgsConstructor public class ExprTupleValue implements ExprValue { - private final Map valueMap; + private final Map valueMap; - @Override - public Object value() { - return valueMap; - } + @Override + public Object value() { + return valueMap; + } - @Override - public ExprValueKind kind() { - return ExprValueKind.TUPLE_VALUE; - } + @Override + public ExprValueKind kind() { + return ExprValueKind.TUPLE_VALUE; + } - @Override - public String toString() { - return valueMap.entrySet() - .stream() - .map(entry -> String.format("%s:%s", entry.getKey(), entry.getValue())) - .collect(Collectors.joining(",", "{", "}")); - } + @Override + public String toString() { + return valueMap.entrySet().stream() + .map(entry -> String.format("%s:%s", entry.getKey(), entry.getValue())) + .collect(Collectors.joining(",", "{", "}")); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprValue.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprValue.java index d15cb39270..aae1973d4b 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprValue.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprValue.java @@ -3,31 +3,28 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.model; -/** - * The definition of the Expression Value. - */ +/** The definition of the Expression Value. */ public interface ExprValue { - default Object value() { - throw new IllegalStateException("invalid value operation on " + kind()); - } + default Object value() { + throw new IllegalStateException("invalid value operation on " + kind()); + } - default ExprValueKind kind() { - throw new IllegalStateException("invalid kind operation"); - } + default ExprValueKind kind() { + throw new IllegalStateException("invalid kind operation"); + } - enum ExprValueKind { - TUPLE_VALUE, - COLLECTION_VALUE, - MISSING_VALUE, + enum ExprValueKind { + TUPLE_VALUE, + COLLECTION_VALUE, + MISSING_VALUE, - BOOLEAN_VALUE, - INTEGER_VALUE, - DOUBLE_VALUE, - LONG_VALUE, - FLOAT_VALUE, - STRING_VALUE - } + BOOLEAN_VALUE, + INTEGER_VALUE, + DOUBLE_VALUE, + LONG_VALUE, + FLOAT_VALUE, + STRING_VALUE + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprValueFactory.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprValueFactory.java index 5dc2b5b50a..28f4c70293 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprValueFactory.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprValueFactory.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.model; import java.math.BigDecimal; @@ -12,62 +11,60 @@ import java.util.List; import java.util.Map; -/** - * The definition of {@link ExprValue} factory. - */ +/** The definition of {@link ExprValue} factory. */ public class ExprValueFactory { - public static ExprValue booleanValue(Boolean value) { - return new ExprBooleanValue(value); - } + public static ExprValue booleanValue(Boolean value) { + return new ExprBooleanValue(value); + } - public static ExprValue integerValue(Integer value) { - return new ExprIntegerValue(value); - } + public static ExprValue integerValue(Integer value) { + return new ExprIntegerValue(value); + } - public static ExprValue doubleValue(Double value) { - return new ExprDoubleValue(value); - } + public static ExprValue doubleValue(Double value) { + return new ExprDoubleValue(value); + } - public static ExprValue stringValue(String value) { - return new ExprStringValue(value); - } + public static ExprValue stringValue(String value) { + return new ExprStringValue(value); + } - public static ExprValue longValue(Long value) { - return new ExprLongValue(value); - } + public static ExprValue longValue(Long value) { + return new ExprLongValue(value); + } - public static ExprValue tupleValue(Map map) { - Map valueMap = new HashMap<>(); - map.forEach((k, v) -> valueMap.put(k, from(v))); - return new ExprTupleValue(valueMap); - } + public static ExprValue tupleValue(Map map) { + Map valueMap = new HashMap<>(); + map.forEach((k, v) -> valueMap.put(k, from(v))); + return new ExprTupleValue(valueMap); + } - public static ExprValue collectionValue(List list) { - List valueList = new ArrayList<>(); - list.forEach(o -> valueList.add(from(o))); - return new ExprCollectionValue(valueList); - } + public static ExprValue collectionValue(List list) { + List valueList = new ArrayList<>(); + list.forEach(o -> valueList.add(from(o))); + return new ExprCollectionValue(valueList); + } - public static ExprValue from(Object o) { - if (o instanceof Map) { - return tupleValue((Map) o); - } else if (o instanceof List) { - return collectionValue(((List) o)); - } else if (o instanceof Integer) { - return integerValue((Integer) o); - } else if (o instanceof Long) { - return longValue(((Long) o)); - } else if (o instanceof Boolean) { - return booleanValue((Boolean) o); - } else if (o instanceof Double) { - return doubleValue((Double) o); - } else if (o instanceof BigDecimal) { - return doubleValue(((BigDecimal) o).doubleValue()); - } else if (o instanceof String) { - return stringValue((String) o); - } else { - throw new IllegalStateException("unsupported type " + o.getClass()); - } + public static ExprValue from(Object o) { + if (o instanceof Map) { + return tupleValue((Map) o); + } else if (o instanceof List) { + return collectionValue(((List) o)); + } else if (o instanceof Integer) { + return integerValue((Integer) o); + } else if (o instanceof Long) { + return longValue(((Long) o)); + } else if (o instanceof Boolean) { + return booleanValue((Boolean) o); + } else if (o instanceof Double) { + return doubleValue((Double) o); + } else if (o instanceof BigDecimal) { + return doubleValue(((BigDecimal) o).doubleValue()); + } else if (o instanceof String) { + return stringValue((String) o); + } else { + throw new IllegalStateException("unsupported type " + o.getClass()); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprValueUtils.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprValueUtils.java index 4688e74b6a..9873c72886 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprValueUtils.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprValueUtils.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.model; import static org.opensearch.sql.legacy.expression.model.ExprValue.ExprValueKind.BOOLEAN_VALUE; @@ -15,64 +14,62 @@ import java.util.List; import java.util.Map; -/** - * The definition of ExprValue Utils. - */ +/** The definition of ExprValue Utils. */ public class ExprValueUtils { - public static Integer getIntegerValue(ExprValue exprValue) { - return getNumberValue(exprValue).intValue(); - } + public static Integer getIntegerValue(ExprValue exprValue) { + return getNumberValue(exprValue).intValue(); + } - public static Double getDoubleValue(ExprValue exprValue) { - return getNumberValue(exprValue).doubleValue(); - } + public static Double getDoubleValue(ExprValue exprValue) { + return getNumberValue(exprValue).doubleValue(); + } - public static Long getLongValue(ExprValue exprValue) { - return getNumberValue(exprValue).longValue(); - } + public static Long getLongValue(ExprValue exprValue) { + return getNumberValue(exprValue).longValue(); + } - public static Float getFloatValue(ExprValue exprValue) { - return getNumberValue(exprValue).floatValue(); - } + public static Float getFloatValue(ExprValue exprValue) { + return getNumberValue(exprValue).floatValue(); + } - public static String getStringValue(ExprValue exprValue) { - return convert(exprValue, STRING_VALUE); - } + public static String getStringValue(ExprValue exprValue) { + return convert(exprValue, STRING_VALUE); + } - public static List getCollectionValue(ExprValue exprValue) { - return convert(exprValue, COLLECTION_VALUE); - } + public static List getCollectionValue(ExprValue exprValue) { + return convert(exprValue, COLLECTION_VALUE); + } - public static Map getTupleValue(ExprValue exprValue) { - return convert(exprValue, TUPLE_VALUE); - } + public static Map getTupleValue(ExprValue exprValue) { + return convert(exprValue, TUPLE_VALUE); + } - public static Boolean getBooleanValue(ExprValue exprValue) { - return convert(exprValue, BOOLEAN_VALUE); - } + public static Boolean getBooleanValue(ExprValue exprValue) { + return convert(exprValue, BOOLEAN_VALUE); + } - @VisibleForTesting - public static Number getNumberValue(ExprValue exprValue) { - switch (exprValue.kind()) { - case INTEGER_VALUE: - case DOUBLE_VALUE: - case LONG_VALUE: - case FLOAT_VALUE: - return (Number) exprValue.value(); - default: - break; - } - throw new IllegalStateException( - String.format("invalid to get NUMBER_VALUE from expr type of %s", exprValue.kind())); + @VisibleForTesting + public static Number getNumberValue(ExprValue exprValue) { + switch (exprValue.kind()) { + case INTEGER_VALUE: + case DOUBLE_VALUE: + case LONG_VALUE: + case FLOAT_VALUE: + return (Number) exprValue.value(); + default: + break; } + throw new IllegalStateException( + String.format("invalid to get NUMBER_VALUE from expr type of %s", exprValue.kind())); + } - @SuppressWarnings("unchecked") - private static T convert(ExprValue exprValue, ExprValue.ExprValueKind toType) { - if (exprValue.kind() == toType) { - return (T) exprValue.value(); - } else { - throw new IllegalStateException( - String.format("invalid to get %s from expr type of %s", toType, exprValue.kind())); - } + @SuppressWarnings("unchecked") + private static T convert(ExprValue exprValue, ExprValue.ExprValueKind toType) { + if (exprValue.kind() == toType) { + return (T) exprValue.value(); + } else { + throw new IllegalStateException( + String.format("invalid to get %s from expr type of %s", toType, exprValue.kind())); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/metrics/GaugeMetric.java b/legacy/src/main/java/org/opensearch/sql/legacy/metrics/GaugeMetric.java index 5752927952..2f7c269351 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/metrics/GaugeMetric.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/metrics/GaugeMetric.java @@ -3,29 +3,25 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.metrics; import java.util.function.Supplier; -/** - * Gauge metric, an instant value like cpu usage, state and so on - */ +/** Gauge metric, an instant value like cpu usage, state and so on */ public class GaugeMetric extends Metric { - private Supplier loadValue; - - public GaugeMetric(String name, Supplier supplier) { - super(name); - this.loadValue = supplier; - } + private Supplier loadValue; - public String getName() { - return super.getName(); - } + public GaugeMetric(String name, Supplier supplier) { + super(name); + this.loadValue = supplier; + } - public T getValue() { - return loadValue.get(); - } + public String getName() { + return super.getName(); + } + public T getValue() { + return loadValue.get(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/metrics/Metric.java b/legacy/src/main/java/org/opensearch/sql/legacy/metrics/Metric.java index 9e31b0d9cd..956e0f558c 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/metrics/Metric.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/metrics/Metric.java @@ -3,23 +3,21 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.metrics; public abstract class Metric implements java.io.Serializable { - private static final long serialVersionUID = 1L; - - private String name; + private static final long serialVersionUID = 1L; - public Metric(String name) { - this.name = name; - } + private String name; - public String getName() { - return name; - } + public Metric(String name) { + this.name = name; + } - public abstract T getValue(); + public String getName() { + return name; + } + public abstract T getValue(); } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/metrics/MetricFactory.java b/legacy/src/main/java/org/opensearch/sql/legacy/metrics/MetricFactory.java index 9319b77644..e4fbd173c9 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/metrics/MetricFactory.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/metrics/MetricFactory.java @@ -3,34 +3,33 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.metrics; import org.opensearch.sql.legacy.query.join.BackOffRetryStrategy; public class MetricFactory { - public static Metric createMetric(MetricName name) { + public static Metric createMetric(MetricName name) { - switch (name) { - case REQ_TOTAL: - case DEFAULT_CURSOR_REQUEST_TOTAL: - case DEFAULT: - case PPL_REQ_TOTAL: - return new NumericMetric<>(name.getName(), new BasicCounter()); - case CIRCUIT_BREAKER: - return new GaugeMetric<>(name.getName(), BackOffRetryStrategy.GET_CB_STATE); - case REQ_COUNT_TOTAL: - case DEFAULT_CURSOR_REQUEST_COUNT_TOTAL: - case FAILED_REQ_COUNT_CUS: - case FAILED_REQ_COUNT_SYS: - case FAILED_REQ_COUNT_CB: - case PPL_REQ_COUNT_TOTAL: - case PPL_FAILED_REQ_COUNT_CUS: - case PPL_FAILED_REQ_COUNT_SYS: - return new NumericMetric<>(name.getName(), new RollingCounter()); - default: - return new NumericMetric<>(name.getName(), new BasicCounter()); - } + switch (name) { + case REQ_TOTAL: + case DEFAULT_CURSOR_REQUEST_TOTAL: + case DEFAULT: + case PPL_REQ_TOTAL: + return new NumericMetric<>(name.getName(), new BasicCounter()); + case CIRCUIT_BREAKER: + return new GaugeMetric<>(name.getName(), BackOffRetryStrategy.GET_CB_STATE); + case REQ_COUNT_TOTAL: + case DEFAULT_CURSOR_REQUEST_COUNT_TOTAL: + case FAILED_REQ_COUNT_CUS: + case FAILED_REQ_COUNT_SYS: + case FAILED_REQ_COUNT_CB: + case PPL_REQ_COUNT_TOTAL: + case PPL_FAILED_REQ_COUNT_CUS: + case PPL_FAILED_REQ_COUNT_SYS: + return new NumericMetric<>(name.getName(), new RollingCounter()); + default: + return new NumericMetric<>(name.getName(), new BasicCounter()); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/metrics/MetricName.java b/legacy/src/main/java/org/opensearch/sql/legacy/metrics/MetricName.java index 16a719b97e..1c895f5d69 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/metrics/MetricName.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/metrics/MetricName.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.metrics; import com.google.common.collect.ImmutableSet; @@ -13,51 +12,55 @@ import java.util.stream.Collectors; public enum MetricName { + REQ_TOTAL("request_total"), + REQ_COUNT_TOTAL("request_count"), + FAILED_REQ_COUNT_SYS("failed_request_count_syserr"), + FAILED_REQ_COUNT_CUS("failed_request_count_cuserr"), + FAILED_REQ_COUNT_CB("failed_request_count_cb"), + DEFAULT_CURSOR_REQUEST_TOTAL("default_cursor_request_total"), + DEFAULT_CURSOR_REQUEST_COUNT_TOTAL("default_cursor_request_count"), + CIRCUIT_BREAKER("circuit_breaker"), + DEFAULT("default"), + + PPL_REQ_TOTAL("ppl_request_total"), + PPL_REQ_COUNT_TOTAL("ppl_request_count"), + PPL_FAILED_REQ_COUNT_SYS("ppl_failed_request_count_syserr"), + PPL_FAILED_REQ_COUNT_CUS("ppl_failed_request_count_cuserr"), + DATASOURCE_REQ_COUNT("datasource_request_count"), + DATASOURCE_FAILED_REQ_COUNT_SYS("datasource_failed_request_count_syserr"), + DATASOURCE_FAILED_REQ_COUNT_CUS("datasource_failed_request_count_cuserr"); + + private String name; + + MetricName(String name) { + this.name = name; + } + + public String getName() { + return name; + } + + public static List getNames() { + return Arrays.stream(MetricName.values()).map(v -> v.name).collect(Collectors.toList()); + } + + private static Set NUMERICAL_METRIC = + new ImmutableSet.Builder() + .add(PPL_REQ_TOTAL) + .add(PPL_REQ_COUNT_TOTAL) + .add(PPL_FAILED_REQ_COUNT_SYS) + .add(PPL_FAILED_REQ_COUNT_CUS) + .build(); - REQ_TOTAL("request_total"), - REQ_COUNT_TOTAL("request_count"), - FAILED_REQ_COUNT_SYS("failed_request_count_syserr"), - FAILED_REQ_COUNT_CUS("failed_request_count_cuserr"), - FAILED_REQ_COUNT_CB("failed_request_count_cb"), - DEFAULT_CURSOR_REQUEST_TOTAL("default_cursor_request_total"), - DEFAULT_CURSOR_REQUEST_COUNT_TOTAL("default_cursor_request_count"), - CIRCUIT_BREAKER("circuit_breaker"), - DEFAULT("default"), - - PPL_REQ_TOTAL("ppl_request_total"), - PPL_REQ_COUNT_TOTAL("ppl_request_count"), - PPL_FAILED_REQ_COUNT_SYS("ppl_failed_request_count_syserr"), - PPL_FAILED_REQ_COUNT_CUS("ppl_failed_request_count_cuserr"), - DATASOURCE_REQ_COUNT("datasource_request_count"), - DATASOURCE_FAILED_REQ_COUNT_SYS("datasource_failed_request_count_syserr"), - DATASOURCE_FAILED_REQ_COUNT_CUS("datasource_failed_request_count_cuserr"); - - private String name; - - MetricName(String name) { - this.name = name; - } - - public String getName() { - return name; - } - - public static List getNames() { - return Arrays.stream(MetricName.values()).map(v -> v.name).collect(Collectors.toList()); - } - - - private static Set NUMERICAL_METRIC = new ImmutableSet.Builder() - .add(PPL_REQ_TOTAL) - .add(PPL_REQ_COUNT_TOTAL) - .add(PPL_FAILED_REQ_COUNT_SYS) - .add(PPL_FAILED_REQ_COUNT_CUS) - .build(); - - public boolean isNumerical() { - return this == REQ_TOTAL || this == REQ_COUNT_TOTAL || this == FAILED_REQ_COUNT_SYS - || this == FAILED_REQ_COUNT_CUS || this == FAILED_REQ_COUNT_CB || this == DEFAULT - || this == DEFAULT_CURSOR_REQUEST_TOTAL || this == DEFAULT_CURSOR_REQUEST_COUNT_TOTAL - || NUMERICAL_METRIC.contains(this); - } + public boolean isNumerical() { + return this == REQ_TOTAL + || this == REQ_COUNT_TOTAL + || this == FAILED_REQ_COUNT_SYS + || this == FAILED_REQ_COUNT_CUS + || this == FAILED_REQ_COUNT_CB + || this == DEFAULT + || this == DEFAULT_CURSOR_REQUEST_TOTAL + || this == DEFAULT_CURSOR_REQUEST_COUNT_TOTAL + || NUMERICAL_METRIC.contains(this); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/metrics/Metrics.java b/legacy/src/main/java/org/opensearch/sql/legacy/metrics/Metrics.java index e53dfa6804..858f9e5cef 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/metrics/Metrics.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/metrics/Metrics.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.metrics; import java.util.ArrayList; @@ -13,69 +12,68 @@ public class Metrics { - private static Metrics metrics = new Metrics(); - private ConcurrentHashMap registeredMetricsByName = new ConcurrentHashMap<>(); - - public static Metrics getInstance() { - return metrics; - } + private static Metrics metrics = new Metrics(); + private ConcurrentHashMap registeredMetricsByName = new ConcurrentHashMap<>(); - private Metrics() { - } + public static Metrics getInstance() { + return metrics; + } - public void registerDefaultMetrics() { - for (MetricName metricName : MetricName.values()) { - registerMetric(MetricFactory.createMetric(metricName)); - } - } + private Metrics() {} - public void registerMetric(Metric metric) { - registeredMetricsByName.put(metric.getName(), metric); + public void registerDefaultMetrics() { + for (MetricName metricName : MetricName.values()) { + registerMetric(MetricFactory.createMetric(metricName)); } + } - public void unregisterMetric(String name) { - if (name == null) { - return; - } + public void registerMetric(Metric metric) { + registeredMetricsByName.put(metric.getName(), metric); + } - registeredMetricsByName.remove(name); + public void unregisterMetric(String name) { + if (name == null) { + return; } - public Metric getMetric(String name) { - if (name == null) { - return null; - } + registeredMetricsByName.remove(name); + } - return registeredMetricsByName.get(name); + public Metric getMetric(String name) { + if (name == null) { + return null; } - public NumericMetric getNumericalMetric(MetricName metricName) { - String name = metricName.getName(); - if (!metricName.isNumerical()) { - name = MetricName.DEFAULT.getName(); - } + return registeredMetricsByName.get(name); + } - return (NumericMetric) registeredMetricsByName.get(name); + public NumericMetric getNumericalMetric(MetricName metricName) { + String name = metricName.getName(); + if (!metricName.isNumerical()) { + name = MetricName.DEFAULT.getName(); } - public List getAllMetrics() { - return new ArrayList<>(registeredMetricsByName.values()); - } + return (NumericMetric) registeredMetricsByName.get(name); + } - public String collectToJSON() { - JSONObject metricsJSONObject = new JSONObject(); + public List getAllMetrics() { + return new ArrayList<>(registeredMetricsByName.values()); + } - for (Metric metric : registeredMetricsByName.values()) { - if (metric.getName().equals("default")) { - continue; - } - metricsJSONObject.put(metric.getName(), metric.getValue()); - } + public String collectToJSON() { + JSONObject metricsJSONObject = new JSONObject(); - return metricsJSONObject.toString(); + for (Metric metric : registeredMetricsByName.values()) { + if (metric.getName().equals("default")) { + continue; + } + metricsJSONObject.put(metric.getName(), metric.getValue()); } - public void clear() { - registeredMetricsByName.clear(); - } + return metricsJSONObject.toString(); + } + + public void clear() { + registeredMetricsByName.clear(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/parser/ElasticSqlExprParser.java b/legacy/src/main/java/org/opensearch/sql/legacy/parser/ElasticSqlExprParser.java index 5f6d03f0ac..be9c2f9652 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/parser/ElasticSqlExprParser.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/parser/ElasticSqlExprParser.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.parser; import static org.opensearch.sql.legacy.utils.StringUtils.isQuoted; @@ -53,987 +52,1002 @@ import com.alibaba.druid.util.JdbcConstants; import java.util.List; -/** - * Created by Eliran on 18/8/2015. - */ +/** Created by Eliran on 18/8/2015. */ public class ElasticSqlExprParser extends SQLExprParser { - public ElasticSqlExprParser(Lexer lexer) { - super(lexer); - this.aggregateFunctions = AGGREGATE_FUNCTIONS; + public ElasticSqlExprParser(Lexer lexer) { + super(lexer); + this.aggregateFunctions = AGGREGATE_FUNCTIONS; + } + + public ElasticSqlExprParser(String sql) { + this(new ElasticLexer(sql)); + this.lexer.nextToken(); + } + + @SuppressWarnings({"unchecked", "rawtypes"}) + @Override + public void parseHints(List hints) { + while (lexer.token() == Token.HINT) { + hints.add(new SQLCommentHint(lexer.stringVal())); + lexer.nextToken(); } + } - public ElasticSqlExprParser(String sql) { - this(new ElasticLexer(sql)); - this.lexer.nextToken(); + @Override + protected SQLExpr methodRest(SQLExpr expr, boolean acceptLPAREN) { + if (acceptLPAREN) { + accept(Token.LPAREN); } - @SuppressWarnings({"unchecked", "rawtypes"}) - @Override - public void parseHints(List hints) { - while (lexer.token() == Token.HINT) { - hints.add(new SQLCommentHint(lexer.stringVal())); - lexer.nextToken(); - } + if (expr instanceof SQLName || expr instanceof SQLDefaultExpr) { + String methodName; + + SQLMethodInvokeExpr methodInvokeExpr; + if (expr instanceof SQLPropertyExpr) { + methodName = ((SQLPropertyExpr) expr).getName(); + methodInvokeExpr = new SQLMethodInvokeExpr(methodName); + methodInvokeExpr.setOwner(((SQLPropertyExpr) expr).getOwner()); + } else { + methodName = expr.toString(); + methodInvokeExpr = new SQLMethodInvokeExpr(methodName); + } + + if (isAggreateFunction(methodName)) { + SQLAggregateExpr aggregateExpr = parseAggregateExpr(methodName); + + return aggregateExpr; + } + + if (lexer.token() != Token.RPAREN) { + exprList(methodInvokeExpr.getParameters(), methodInvokeExpr); + } + + accept(Token.RPAREN); + + return primaryRest(methodInvokeExpr); } - @Override - protected SQLExpr methodRest(SQLExpr expr, boolean acceptLPAREN) { - if (acceptLPAREN) { - accept(Token.LPAREN); + throw new ParserException("Syntax error: " + lexer.token()); + } + + public SQLExpr primary() { + + if (lexer.token() == Token.LBRACE) { + lexer.nextToken(); + boolean foundRBrace = false; + if (lexer.stringVal().equals("ts")) { + String current = lexer.stringVal(); + do { + if (current.equals(lexer.token().RBRACE.name())) { + foundRBrace = true; + break; + } + lexer.nextToken(); + current = lexer.token().name(); + } while (!foundRBrace && !current.trim().equals("")); + + if (foundRBrace) { + SQLOdbcExpr sdle = new SQLOdbcExpr(lexer.stringVal()); + + accept(Token.RBRACE); + return sdle; + } else { + throw new ParserException("Error. Unable to find closing RBRACE"); } + } else { + throw new ParserException("Error. Unable to parse ODBC Literal Timestamp"); + } + } else if (lexer.token() == Token.LBRACKET) { + StringBuilder identifier = new StringBuilder(); + lexer.nextToken(); + String prefix = ""; + while (lexer.token() != Token.RBRACKET) { + if (lexer.token() != Token.IDENTIFIER + && lexer.token() != Token.INDEX + && lexer.token() != Token.LITERAL_CHARS) { + throw new ParserException( + "All items between Brackets should be identifiers , got:" + lexer.token()); + } + identifier.append(prefix); + identifier.append(lexer.stringVal()); + prefix = " "; + lexer.nextToken(); + } + + accept(Token.RBRACKET); + return new SQLIdentifierExpr(identifier.toString()); + } else if (lexer.token() == Token.NOT) { + lexer.nextToken(); + SQLExpr sqlExpr; + if (lexer.token() == Token.EXISTS) { + lexer.nextToken(); + accept(Token.LPAREN); + sqlExpr = new SQLExistsExpr(createSelectParser().select(), true); + accept(Token.RPAREN); + } else if (lexer.token() == Token.LPAREN) { + lexer.nextToken(); - if (expr instanceof SQLName || expr instanceof SQLDefaultExpr) { - String methodName; - - SQLMethodInvokeExpr methodInvokeExpr; - if (expr instanceof SQLPropertyExpr) { - methodName = ((SQLPropertyExpr) expr).getName(); - methodInvokeExpr = new SQLMethodInvokeExpr(methodName); - methodInvokeExpr.setOwner(((SQLPropertyExpr) expr).getOwner()); - } else { - methodName = expr.toString(); - methodInvokeExpr = new SQLMethodInvokeExpr(methodName); - } + SQLExpr notTarget = expr(); - if (isAggreateFunction(methodName)) { - SQLAggregateExpr aggregateExpr = parseAggregateExpr(methodName); + accept(Token.RPAREN); - return aggregateExpr; - } + sqlExpr = new SQLNotExpr(notTarget); - if (lexer.token() != Token.RPAREN) { - exprList(methodInvokeExpr.getParameters(), methodInvokeExpr); - } + return primaryRest(sqlExpr); + } else { + SQLExpr restExpr = relational(); + sqlExpr = new SQLNotExpr(restExpr); + } + return sqlExpr; + } - accept(Token.RPAREN); + boolean parenWrapped = lexer.token() == Token.LPAREN; - return primaryRest(methodInvokeExpr); - } + SQLExpr expr = primary2(); - throw new ParserException("Syntax error: " + lexer.token()); + // keep track of if the identifier is wrapped in parens + if (parenWrapped && expr instanceof SQLIdentifierExpr) { + expr = new SQLParensIdentifierExpr((SQLIdentifierExpr) expr); } + return expr; + } - public SQLExpr primary() { + public static String[] AGGREGATE_FUNCTIONS = { + "AVG", "COUNT", "GROUP_CONCAT", "MAX", "MIN", "STDDEV", "SUM" + }; - if (lexer.token() == Token.LBRACE) { - lexer.nextToken(); - boolean foundRBrace = false; - if (lexer.stringVal().equals("ts")) { - String current = lexer.stringVal(); - do { - if (current.equals(lexer.token().RBRACE.name())) { - foundRBrace = true; - break; - } - lexer.nextToken(); - current = lexer.token().name(); - } while (!foundRBrace && !current.trim().equals("")); - - if (foundRBrace) { - SQLOdbcExpr sdle = new SQLOdbcExpr(lexer.stringVal()); - - accept(Token.RBRACE); - return sdle; - } else { - throw new ParserException("Error. Unable to find closing RBRACE"); - } - } else { - throw new ParserException("Error. Unable to parse ODBC Literal Timestamp"); - } - } else if (lexer.token() == Token.LBRACKET) { - StringBuilder identifier = new StringBuilder(); - lexer.nextToken(); - String prefix = ""; - while (lexer.token() != Token.RBRACKET) { - if (lexer.token() != Token.IDENTIFIER && lexer.token() != Token.INDEX - && lexer.token() != Token.LITERAL_CHARS) { - throw new ParserException("All items between Brackets should be identifiers , got:" - + lexer.token()); - } - identifier.append(prefix); - identifier.append(lexer.stringVal()); - prefix = " "; - lexer.nextToken(); - } + public SQLExpr relationalRest(SQLExpr expr) { + if (identifierEquals("REGEXP")) { + lexer.nextToken(); + SQLExpr rightExp = equality(); - accept(Token.RBRACKET); - return new SQLIdentifierExpr(identifier.toString()); - } else if (lexer.token() == Token.NOT) { - lexer.nextToken(); - SQLExpr sqlExpr; - if (lexer.token() == Token.EXISTS) { - lexer.nextToken(); - accept(Token.LPAREN); - sqlExpr = new SQLExistsExpr(createSelectParser().select(), true); - accept(Token.RPAREN); - } else if (lexer.token() == Token.LPAREN) { - lexer.nextToken(); + rightExp = relationalRest(rightExp); - SQLExpr notTarget = expr(); + return new SQLBinaryOpExpr(expr, SQLBinaryOperator.RegExp, rightExp, JdbcConstants.MYSQL); + } - accept(Token.RPAREN); + return super.relationalRest(expr); + } - sqlExpr = new SQLNotExpr(notTarget); + public SQLExpr multiplicativeRest(SQLExpr expr) { + if (lexer.token() == Token.IDENTIFIER && "MOD".equalsIgnoreCase(lexer.stringVal())) { + lexer.nextToken(); + SQLExpr rightExp = primary(); - return primaryRest(sqlExpr); - } else { - SQLExpr restExpr = relational(); - sqlExpr = new SQLNotExpr(restExpr); - } - return sqlExpr; - } + rightExp = relationalRest(rightExp); + + return new SQLBinaryOpExpr(expr, SQLBinaryOperator.Modulus, rightExp, JdbcConstants.MYSQL); + } - boolean parenWrapped = lexer.token() == Token.LPAREN; + return super.multiplicativeRest(expr); + } - SQLExpr expr = primary2(); + public SQLExpr notRationalRest(SQLExpr expr) { + if (identifierEquals("REGEXP")) { + lexer.nextToken(); + SQLExpr rightExp = primary(); - // keep track of if the identifier is wrapped in parens - if (parenWrapped && expr instanceof SQLIdentifierExpr) { - expr = new SQLParensIdentifierExpr((SQLIdentifierExpr) expr); - } + rightExp = relationalRest(rightExp); - return expr; + return new SQLBinaryOpExpr(expr, SQLBinaryOperator.NotRegExp, rightExp, JdbcConstants.MYSQL); } - public static String[] AGGREGATE_FUNCTIONS = {"AVG", "COUNT", "GROUP_CONCAT", "MAX", "MIN", "STDDEV", "SUM"}; + return super.notRationalRest(expr); + } + public SQLExpr primary2() { + final Token tok = lexer.token(); - public SQLExpr relationalRest(SQLExpr expr) { - if (identifierEquals("REGEXP")) { - lexer.nextToken(); - SQLExpr rightExp = equality(); + if (identifierEquals("outfile")) { + lexer.nextToken(); + SQLExpr file = primary(); + SQLExpr expr = new MySqlOutFileExpr(file); - rightExp = relationalRest(rightExp); + return primaryRest(expr); + } - return new SQLBinaryOpExpr(expr, SQLBinaryOperator.RegExp, rightExp, JdbcConstants.MYSQL); + switch (tok) { + case LITERAL_ALIAS: + String aliasValue = lexer.stringVal(); + lexer.nextToken(); + return primaryRest(new SQLCharExpr(aliasValue)); + case VARIANT: + SQLVariantRefExpr varRefExpr = new SQLVariantRefExpr(lexer.stringVal()); + lexer.nextToken(); + if (varRefExpr.getName().equalsIgnoreCase("@@global")) { + accept(Token.DOT); + varRefExpr = new SQLVariantRefExpr(lexer.stringVal(), true); + lexer.nextToken(); + } else if (varRefExpr.getName().equals("@") && lexer.token() == Token.LITERAL_CHARS) { + varRefExpr.setName("@'" + lexer.stringVal() + "'"); + lexer.nextToken(); + } else if (varRefExpr.getName().equals("@@") && lexer.token() == Token.LITERAL_CHARS) { + varRefExpr.setName("@@'" + lexer.stringVal() + "'"); + lexer.nextToken(); + } + return primaryRest(varRefExpr); + case VALUES: + lexer.nextToken(); + if (lexer.token() != Token.LPAREN) { + throw new ParserException("Syntax error: " + lexer.token()); + } + return this.methodRest(new SQLIdentifierExpr("VALUES"), true); + case BINARY: + lexer.nextToken(); + if (lexer.token() == Token.COMMA + || lexer.token() == Token.SEMI + || lexer.token() == Token.EOF) { + return new SQLIdentifierExpr("BINARY"); + } else { + SQLUnaryExpr binaryExpr = new SQLUnaryExpr(SQLUnaryOperator.BINARY, expr()); + return primaryRest(binaryExpr); } + case CACHE: + case GROUP: + lexer.nextToken(); + return primaryRest(new SQLIdentifierExpr(lexer.stringVal())); + case DOT: + lexer.nextToken(); + return primaryRest(new SQLIdentifierExpr("." + lexer.stringVal())); + default: + return super.primary(); + } + } - return super.relationalRest(expr); + public final SQLExpr primaryRest(SQLExpr expr) { + if (expr == null) { + throw new IllegalArgumentException("Illegal expression: NULL"); } - public SQLExpr multiplicativeRest(SQLExpr expr) { - if (lexer.token() == Token.IDENTIFIER && "MOD".equalsIgnoreCase(lexer.stringVal())) { + if (lexer.token() == Token.LITERAL_CHARS) { + if (expr instanceof SQLIdentifierExpr) { + SQLIdentifierExpr identExpr = (SQLIdentifierExpr) expr; + String ident = identExpr.getName(); + + if (ident.equalsIgnoreCase("x")) { + String charValue = lexer.stringVal(); + lexer.nextToken(); + expr = new SQLHexExpr(charValue); + + return primaryRest(expr); + } else if (ident.equalsIgnoreCase("b")) { + String charValue = lexer.stringVal(); + lexer.nextToken(); + expr = new SQLBinaryExpr(charValue); + + return primaryRest(expr); + } else if (ident.startsWith("_")) { + String charValue = lexer.stringVal(); + lexer.nextToken(); + + MySqlCharExpr mysqlCharExpr = new MySqlCharExpr(charValue); + mysqlCharExpr.setCharset(identExpr.getName()); + if (identifierEquals("COLLATE")) { lexer.nextToken(); - SQLExpr rightExp = primary(); - rightExp = relationalRest(rightExp); + String collate = lexer.stringVal(); + mysqlCharExpr.setCollate(collate); + accept(Token.IDENTIFIER); + } + + expr = mysqlCharExpr; - return new SQLBinaryOpExpr(expr, SQLBinaryOperator.Modulus, rightExp, JdbcConstants.MYSQL); + return primaryRest(expr); } + } else if (expr instanceof SQLCharExpr) { + SQLMethodInvokeExpr concat = new SQLMethodInvokeExpr("CONCAT"); + concat.addParameter(expr); + do { + String chars = lexer.stringVal(); + concat.addParameter(new SQLCharExpr(chars)); + lexer.nextToken(); + } while (lexer.token() == Token.LITERAL_CHARS || lexer.token() == Token.LITERAL_ALIAS); + expr = concat; + } + } else if (lexer.token() == Token.IDENTIFIER) { + if (expr instanceof SQLHexExpr) { + if ("USING".equalsIgnoreCase(lexer.stringVal())) { + lexer.nextToken(); + if (lexer.token() != Token.IDENTIFIER) { + throw new ParserException("Syntax error: " + lexer.token()); + } + String charSet = lexer.stringVal(); + lexer.nextToken(); + expr.getAttributes().put("USING", charSet); + + return primaryRest(expr); + } + } else if ("COLLATE".equalsIgnoreCase(lexer.stringVal())) { + lexer.nextToken(); - return super.multiplicativeRest(expr); - } + if (lexer.token() == Token.EQ) { + lexer.nextToken(); + } - public SQLExpr notRationalRest(SQLExpr expr) { - if (identifierEquals("REGEXP")) { - lexer.nextToken(); - SQLExpr rightExp = primary(); + if (lexer.token() != Token.IDENTIFIER) { + throw new ParserException("Syntax error: " + lexer.token()); + } - rightExp = relationalRest(rightExp); + String collate = lexer.stringVal(); + lexer.nextToken(); - return new SQLBinaryOpExpr(expr, SQLBinaryOperator.NotRegExp, rightExp, JdbcConstants.MYSQL); - } + expr = + new SQLBinaryOpExpr( + expr, + SQLBinaryOperator.COLLATE, + new SQLIdentifierExpr(collate), + JdbcConstants.MYSQL); - return super.notRationalRest(expr); - } + return primaryRest(expr); + } else if (expr instanceof SQLVariantRefExpr) { + if ("COLLATE".equalsIgnoreCase(lexer.stringVal())) { + lexer.nextToken(); - public SQLExpr primary2() { - final Token tok = lexer.token(); + if (lexer.token() != Token.IDENTIFIER) { + throw new ParserException("Syntax error: " + lexer.token()); + } - if (identifierEquals("outfile")) { - lexer.nextToken(); - SQLExpr file = primary(); - SQLExpr expr = new MySqlOutFileExpr(file); + String collate = lexer.stringVal(); + lexer.nextToken(); - return primaryRest(expr); + expr.putAttribute("COLLATE", collate); + return primaryRest(expr); } - - switch (tok) { - case LITERAL_ALIAS: - String aliasValue = lexer.stringVal(); - lexer.nextToken(); - return primaryRest(new SQLCharExpr(aliasValue)); - case VARIANT: - SQLVariantRefExpr varRefExpr = new SQLVariantRefExpr(lexer.stringVal()); - lexer.nextToken(); - if (varRefExpr.getName().equalsIgnoreCase("@@global")) { - accept(Token.DOT); - varRefExpr = new SQLVariantRefExpr(lexer.stringVal(), true); - lexer.nextToken(); - } else if (varRefExpr.getName().equals("@") && lexer.token() == Token.LITERAL_CHARS) { - varRefExpr.setName("@'" + lexer.stringVal() + "'"); - lexer.nextToken(); - } else if (varRefExpr.getName().equals("@@") && lexer.token() == Token.LITERAL_CHARS) { - varRefExpr.setName("@@'" + lexer.stringVal() + "'"); - lexer.nextToken(); - } - return primaryRest(varRefExpr); - case VALUES: - lexer.nextToken(); - if (lexer.token() != Token.LPAREN) { - throw new ParserException("Syntax error: " + lexer.token()); - } - return this.methodRest(new SQLIdentifierExpr("VALUES"), true); - case BINARY: - lexer.nextToken(); - if (lexer.token() == Token.COMMA || lexer.token() == Token.SEMI || lexer.token() == Token.EOF) { - return new SQLIdentifierExpr("BINARY"); - } else { - SQLUnaryExpr binaryExpr = new SQLUnaryExpr(SQLUnaryOperator.BINARY, expr()); - return primaryRest(binaryExpr); - } - case CACHE: - case GROUP: - lexer.nextToken(); - return primaryRest(new SQLIdentifierExpr(lexer.stringVal())); - case DOT: - lexer.nextToken(); - return primaryRest(new SQLIdentifierExpr("." + lexer.stringVal())); - default: - return super.primary(); + } else if (expr instanceof SQLIntegerExpr) { + SQLIntegerExpr intExpr = (SQLIntegerExpr) expr; + String binaryString = lexer.stringVal(); + if (intExpr.getNumber().intValue() == 0 && binaryString.startsWith("b")) { + lexer.nextToken(); + expr = new SQLBinaryExpr(binaryString.substring(1)); + + return primaryRest(expr); } - + } } + if (lexer.token() == Token.LPAREN && expr instanceof SQLIdentifierExpr) { + SQLIdentifierExpr identExpr = (SQLIdentifierExpr) expr; + String ident = identExpr.getName(); - public final SQLExpr primaryRest(SQLExpr expr) { - if (expr == null) { - throw new IllegalArgumentException("Illegal expression: NULL"); - } + if ("EXTRACT".equalsIgnoreCase(ident)) { + lexer.nextToken(); - if (lexer.token() == Token.LITERAL_CHARS) { - if (expr instanceof SQLIdentifierExpr) { - SQLIdentifierExpr identExpr = (SQLIdentifierExpr) expr; - String ident = identExpr.getName(); - - if (ident.equalsIgnoreCase("x")) { - String charValue = lexer.stringVal(); - lexer.nextToken(); - expr = new SQLHexExpr(charValue); - - return primaryRest(expr); - } else if (ident.equalsIgnoreCase("b")) { - String charValue = lexer.stringVal(); - lexer.nextToken(); - expr = new SQLBinaryExpr(charValue); - - return primaryRest(expr); - } else if (ident.startsWith("_")) { - String charValue = lexer.stringVal(); - lexer.nextToken(); - - MySqlCharExpr mysqlCharExpr = new MySqlCharExpr(charValue); - mysqlCharExpr.setCharset(identExpr.getName()); - if (identifierEquals("COLLATE")) { - lexer.nextToken(); - - String collate = lexer.stringVal(); - mysqlCharExpr.setCollate(collate); - accept(Token.IDENTIFIER); - } - - expr = mysqlCharExpr; - - return primaryRest(expr); - } - } else if (expr instanceof SQLCharExpr) { - SQLMethodInvokeExpr concat = new SQLMethodInvokeExpr("CONCAT"); - concat.addParameter(expr); - do { - String chars = lexer.stringVal(); - concat.addParameter(new SQLCharExpr(chars)); - lexer.nextToken(); - } while (lexer.token() == Token.LITERAL_CHARS || lexer.token() == Token.LITERAL_ALIAS); - expr = concat; - } - } else if (lexer.token() == Token.IDENTIFIER) { - if (expr instanceof SQLHexExpr) { - if ("USING".equalsIgnoreCase(lexer.stringVal())) { - lexer.nextToken(); - if (lexer.token() != Token.IDENTIFIER) { - throw new ParserException("Syntax error: " + lexer.token()); - } - String charSet = lexer.stringVal(); - lexer.nextToken(); - expr.getAttributes().put("USING", charSet); - - return primaryRest(expr); - } - } else if ("COLLATE".equalsIgnoreCase(lexer.stringVal())) { - lexer.nextToken(); - - if (lexer.token() == Token.EQ) { - lexer.nextToken(); - } - - if (lexer.token() != Token.IDENTIFIER) { - throw new ParserException("Syntax error: " + lexer.token()); - } - - String collate = lexer.stringVal(); - lexer.nextToken(); - - expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.COLLATE, - new SQLIdentifierExpr(collate), JdbcConstants.MYSQL); - - return primaryRest(expr); - } else if (expr instanceof SQLVariantRefExpr) { - if ("COLLATE".equalsIgnoreCase(lexer.stringVal())) { - lexer.nextToken(); - - if (lexer.token() != Token.IDENTIFIER) { - throw new ParserException("Syntax error: " + lexer.token()); - } - - String collate = lexer.stringVal(); - lexer.nextToken(); - - expr.putAttribute("COLLATE", collate); - - return primaryRest(expr); - } - } else if (expr instanceof SQLIntegerExpr) { - SQLIntegerExpr intExpr = (SQLIntegerExpr) expr; - String binaryString = lexer.stringVal(); - if (intExpr.getNumber().intValue() == 0 && binaryString.startsWith("b")) { - lexer.nextToken(); - expr = new SQLBinaryExpr(binaryString.substring(1)); - - return primaryRest(expr); - } - } + if (lexer.token() != Token.IDENTIFIER) { + throw new ParserException("Syntax error: " + lexer.token()); } - if (lexer.token() == Token.LPAREN && expr instanceof SQLIdentifierExpr) { - SQLIdentifierExpr identExpr = (SQLIdentifierExpr) expr; - String ident = identExpr.getName(); - - if ("EXTRACT".equalsIgnoreCase(ident)) { - lexer.nextToken(); - - if (lexer.token() != Token.IDENTIFIER) { - throw new ParserException("Syntax error: " + lexer.token()); - } - - String unitVal = lexer.stringVal(); - MySqlIntervalUnit unit = MySqlIntervalUnit.valueOf(unitVal.toUpperCase()); - lexer.nextToken(); - - accept(Token.FROM); - - SQLExpr value = expr(); - - MySqlExtractExpr extract = new MySqlExtractExpr(); - extract.setValue(value); - extract.setUnit(unit); - accept(Token.RPAREN); - - expr = extract; - - return primaryRest(expr); - } else if ("SUBSTRING".equalsIgnoreCase(ident)) { - lexer.nextToken(); - SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr(ident); - for (; ; ) { - SQLExpr param = expr(); - methodInvokeExpr.addParameter(param); - - if (lexer.token() == Token.COMMA) { - lexer.nextToken(); - continue; - } else if (lexer.token() == Token.FROM) { - lexer.nextToken(); - SQLExpr from = expr(); - methodInvokeExpr.addParameter(from); - - if (lexer.token() == Token.FOR) { - lexer.nextToken(); - SQLExpr forExpr = expr(); - methodInvokeExpr.addParameter(forExpr); - } - break; - } else if (lexer.token() == Token.RPAREN) { - break; - } else { - throw new ParserException("Syntax error: " + lexer.token()); - } - } - - accept(Token.RPAREN); - expr = methodInvokeExpr; - - return primaryRest(expr); - } else if ("TRIM".equalsIgnoreCase(ident)) { - lexer.nextToken(); - SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr(ident); - - if (lexer.token() == Token.IDENTIFIER) { - String flagVal = lexer.stringVal(); - if ("LEADING".equalsIgnoreCase(flagVal)) { - lexer.nextToken(); - methodInvokeExpr.getAttributes().put("TRIM_TYPE", "LEADING"); - } else if ("BOTH".equalsIgnoreCase(flagVal)) { - lexer.nextToken(); - methodInvokeExpr.getAttributes().put("TRIM_TYPE", "BOTH"); - } else if ("TRAILING".equalsIgnoreCase(flagVal)) { - lexer.nextToken(); - methodInvokeExpr.putAttribute("TRIM_TYPE", "TRAILING"); - } - } - - SQLExpr param = expr(); - methodInvokeExpr.addParameter(param); - - if (lexer.token() == Token.FROM) { - lexer.nextToken(); - SQLExpr from = expr(); - methodInvokeExpr.putAttribute("FROM", from); - } - - accept(Token.RPAREN); - expr = methodInvokeExpr; - - return primaryRest(expr); - } else if ("MATCH".equalsIgnoreCase(ident)) { - lexer.nextToken(); - MySqlMatchAgainstExpr matchAgainstExpr = new MySqlMatchAgainstExpr(); - - if (lexer.token() == Token.RPAREN) { - lexer.nextToken(); - } else { - exprList(matchAgainstExpr.getColumns(), matchAgainstExpr); - accept(Token.RPAREN); - } - - acceptIdentifier("AGAINST"); - - accept(Token.LPAREN); - SQLExpr against = primary(); - matchAgainstExpr.setAgainst(against); - - if (lexer.token() == Token.IN) { - lexer.nextToken(); - if (identifierEquals("NATURAL")) { - lexer.nextToken(); - acceptIdentifier("LANGUAGE"); - acceptIdentifier("MODE"); - if (lexer.token() == Token.WITH) { - lexer.nextToken(); - acceptIdentifier("QUERY"); - acceptIdentifier("EXPANSION"); - matchAgainstExpr.setSearchModifier( - MySqlMatchAgainstExpr.SearchModifier.IN_NATURAL_LANGUAGE_MODE_WITH_QUERY_EXPANSION); - } else { - matchAgainstExpr.setSearchModifier( - MySqlMatchAgainstExpr.SearchModifier.IN_NATURAL_LANGUAGE_MODE); - } - } else if (identifierEquals("BOOLEAN")) { - lexer.nextToken(); - acceptIdentifier("MODE"); - matchAgainstExpr.setSearchModifier(MySqlMatchAgainstExpr.SearchModifier.IN_BOOLEAN_MODE); - } else { - throw new ParserException("Syntax error: " + lexer.token()); - } - } else if (lexer.token() == Token.WITH) { - throw new ParserException("Syntax error: " + lexer.token()); - } - - accept(Token.RPAREN); - - expr = matchAgainstExpr; - - return primaryRest(expr); - } else if ("CONVERT".equalsIgnoreCase(ident)) { - lexer.nextToken(); - SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr(ident); - - if (lexer.token() != Token.RPAREN) { - exprList(methodInvokeExpr.getParameters(), methodInvokeExpr); - } - - if (identifierEquals("USING")) { - lexer.nextToken(); - if (lexer.token() != Token.IDENTIFIER) { - throw new ParserException("Syntax error: " + lexer.token()); - } - String charset = lexer.stringVal(); - lexer.nextToken(); - methodInvokeExpr.putAttribute("USING", charset); - } - - accept(Token.RPAREN); - - expr = methodInvokeExpr; - - return primaryRest(expr); - } else if ("POSITION".equalsIgnoreCase(ident)) { - accept(Token.LPAREN); - SQLExpr subStr = this.primary(); - accept(Token.IN); - SQLExpr str = this.expr(); - accept(Token.RPAREN); - - SQLMethodInvokeExpr locate = new SQLMethodInvokeExpr("LOCATE"); - locate.addParameter(subStr); - locate.addParameter(str); - - expr = locate; - return primaryRest(expr); - } - } + String unitVal = lexer.stringVal(); + MySqlIntervalUnit unit = MySqlIntervalUnit.valueOf(unitVal.toUpperCase()); + lexer.nextToken(); + + accept(Token.FROM); + + SQLExpr value = expr(); + + MySqlExtractExpr extract = new MySqlExtractExpr(); + extract.setValue(value); + extract.setUnit(unit); + accept(Token.RPAREN); + + expr = extract; + + return primaryRest(expr); + } else if ("SUBSTRING".equalsIgnoreCase(ident)) { + lexer.nextToken(); + SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr(ident); + for (; ; ) { + SQLExpr param = expr(); + methodInvokeExpr.addParameter(param); - if (lexer.token() == Token.VARIANT && "@".equals(lexer.stringVal())) { + if (lexer.token() == Token.COMMA) { lexer.nextToken(); - MySqlUserName userName = new MySqlUserName(); - if (expr instanceof SQLCharExpr) { - userName.setUserName(((SQLCharExpr) expr).toString()); - } else { - userName.setUserName(((SQLIdentifierExpr) expr).getName()); - } + continue; + } else if (lexer.token() == Token.FROM) { + lexer.nextToken(); + SQLExpr from = expr(); + methodInvokeExpr.addParameter(from); - if (lexer.token() == Token.LITERAL_CHARS) { - userName.setHost("'" + lexer.stringVal() + "'"); - } else { - userName.setHost(lexer.stringVal()); + if (lexer.token() == Token.FOR) { + lexer.nextToken(); + SQLExpr forExpr = expr(); + methodInvokeExpr.addParameter(forExpr); } - lexer.nextToken(); - return userName; + break; + } else if (lexer.token() == Token.RPAREN) { + break; + } else { + throw new ParserException("Syntax error: " + lexer.token()); + } } - // - if (expr instanceof SQLMethodInvokeExpr && lexer.token() == Token.LBRACKET) { + accept(Token.RPAREN); + expr = methodInvokeExpr; + + return primaryRest(expr); + } else if ("TRIM".equalsIgnoreCase(ident)) { + lexer.nextToken(); + SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr(ident); + + if (lexer.token() == Token.IDENTIFIER) { + String flagVal = lexer.stringVal(); + if ("LEADING".equalsIgnoreCase(flagVal)) { + lexer.nextToken(); + methodInvokeExpr.getAttributes().put("TRIM_TYPE", "LEADING"); + } else if ("BOTH".equalsIgnoreCase(flagVal)) { + lexer.nextToken(); + methodInvokeExpr.getAttributes().put("TRIM_TYPE", "BOTH"); + } else if ("TRAILING".equalsIgnoreCase(flagVal)) { lexer.nextToken(); - expr = bracketRest(expr); - return primaryRest(expr); + methodInvokeExpr.putAttribute("TRIM_TYPE", "TRAILING"); + } } - if (lexer.token() == Token.ERROR) { - throw new ParserException("Syntax error, token: " + lexer.token() + " " + lexer.stringVal() + ", pos: " - + lexer.pos()); - } + SQLExpr param = expr(); + methodInvokeExpr.addParameter(param); - /** - * When the druid parser parses the quoted field in SELECT clause, e.g. SELECT `b`.`lastname` FROM bank AS `b`, - * "`b`" is recognized as an identifier expr, and the token is DOT, then the next identifier "`lastname`" would - * be recognized as the property name of "`b`". The parser creates a SQLPropertyExpr with owner of "`b`" and - * property name of "`lastname`". - * - * The following block of code prevents this specific case to generate SQLPropertyExpr, but corrects the parser - * to generate a SQLIdentifierExpr with expr = "`b`.`lastname`". - */ - if (lexer.token() == Token.DOT && expr instanceof SQLIdentifierExpr) { - if (isQuoted(((SQLIdentifierExpr) expr).getName(), "`")) { - lexer.nextToken(); - ((SQLIdentifierExpr) expr).setName(((SQLIdentifierExpr) expr).getName() + "." + lexer.stringVal()); - lexer.nextToken(); - } + if (lexer.token() == Token.FROM) { + lexer.nextToken(); + SQLExpr from = expr(); + methodInvokeExpr.putAttribute("FROM", from); } - return super.primaryRest(expr); - } + accept(Token.RPAREN); + expr = methodInvokeExpr; - protected SQLExpr bracketRest(SQLExpr expr) { - Number index; + return primaryRest(expr); + } else if ("MATCH".equalsIgnoreCase(ident)) { + lexer.nextToken(); + MySqlMatchAgainstExpr matchAgainstExpr = new MySqlMatchAgainstExpr(); - if (lexer.token() == Token.LITERAL_INT) { - index = lexer.integerValue(); - lexer.nextToken(); + if (lexer.token() == Token.RPAREN) { + lexer.nextToken(); } else { - throw new ParserException("Syntax error : " + lexer.stringVal()); + exprList(matchAgainstExpr.getColumns(), matchAgainstExpr); + accept(Token.RPAREN); } - if (expr instanceof SQLMethodInvokeExpr) { - SQLMethodInvokeExpr methodInvokeExpr = (SQLMethodInvokeExpr) expr; - methodInvokeExpr.getParameters().add(new SQLIntegerExpr(index)); + acceptIdentifier("AGAINST"); + + accept(Token.LPAREN); + SQLExpr against = primary(); + matchAgainstExpr.setAgainst(against); + + if (lexer.token() == Token.IN) { + lexer.nextToken(); + if (identifierEquals("NATURAL")) { + lexer.nextToken(); + acceptIdentifier("LANGUAGE"); + acceptIdentifier("MODE"); + if (lexer.token() == Token.WITH) { + lexer.nextToken(); + acceptIdentifier("QUERY"); + acceptIdentifier("EXPANSION"); + matchAgainstExpr.setSearchModifier( + MySqlMatchAgainstExpr.SearchModifier + .IN_NATURAL_LANGUAGE_MODE_WITH_QUERY_EXPANSION); + } else { + matchAgainstExpr.setSearchModifier( + MySqlMatchAgainstExpr.SearchModifier.IN_NATURAL_LANGUAGE_MODE); + } + } else if (identifierEquals("BOOLEAN")) { + lexer.nextToken(); + acceptIdentifier("MODE"); + matchAgainstExpr.setSearchModifier( + MySqlMatchAgainstExpr.SearchModifier.IN_BOOLEAN_MODE); + } else { + throw new ParserException("Syntax error: " + lexer.token()); + } + } else if (lexer.token() == Token.WITH) { + throw new ParserException("Syntax error: " + lexer.token()); } + + accept(Token.RPAREN); + + expr = matchAgainstExpr; + + return primaryRest(expr); + } else if ("CONVERT".equalsIgnoreCase(ident)) { lexer.nextToken(); - expr = primaryRest(expr); - return expr; - } + SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr(ident); - public SQLSelectParser createSelectParser() { - return new ElasticSqlSelectParser(this); - } + if (lexer.token() != Token.RPAREN) { + exprList(methodInvokeExpr.getParameters(), methodInvokeExpr); + } - protected SQLExpr parseInterval() { - accept(Token.INTERVAL); + if (identifierEquals("USING")) { + lexer.nextToken(); + if (lexer.token() != Token.IDENTIFIER) { + throw new ParserException("Syntax error: " + lexer.token()); + } + String charset = lexer.stringVal(); + lexer.nextToken(); + methodInvokeExpr.putAttribute("USING", charset); + } - if (lexer.token() == Token.LPAREN) { - lexer.nextToken(); + accept(Token.RPAREN); - SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr("INTERVAL"); - if (lexer.token() != Token.RPAREN) { - exprList(methodInvokeExpr.getParameters(), methodInvokeExpr); - } + expr = methodInvokeExpr; - accept(Token.RPAREN); + return primaryRest(expr); + } else if ("POSITION".equalsIgnoreCase(ident)) { + accept(Token.LPAREN); + SQLExpr subStr = this.primary(); + accept(Token.IN); + SQLExpr str = this.expr(); + accept(Token.RPAREN); - return primaryRest(methodInvokeExpr); - } else { - SQLExpr value = expr(); + SQLMethodInvokeExpr locate = new SQLMethodInvokeExpr("LOCATE"); + locate.addParameter(subStr); + locate.addParameter(str); - if (lexer.token() != Token.IDENTIFIER) { - throw new ParserException("Syntax error: " + lexer.token()); - } + expr = locate; + return primaryRest(expr); + } + } - String unit = lexer.stringVal(); - lexer.nextToken(); + if (lexer.token() == Token.VARIANT && "@".equals(lexer.stringVal())) { + lexer.nextToken(); + MySqlUserName userName = new MySqlUserName(); + if (expr instanceof SQLCharExpr) { + userName.setUserName(((SQLCharExpr) expr).toString()); + } else { + userName.setUserName(((SQLIdentifierExpr) expr).getName()); + } + + if (lexer.token() == Token.LITERAL_CHARS) { + userName.setHost("'" + lexer.stringVal() + "'"); + } else { + userName.setHost(lexer.stringVal()); + } + lexer.nextToken(); + return userName; + } - MySqlIntervalExpr intervalExpr = new MySqlIntervalExpr(); - intervalExpr.setValue(value); - intervalExpr.setUnit(MySqlIntervalUnit.valueOf(unit.toUpperCase())); + // + if (expr instanceof SQLMethodInvokeExpr && lexer.token() == Token.LBRACKET) { + lexer.nextToken(); + expr = bracketRest(expr); + return primaryRest(expr); + } - return intervalExpr; - } + if (lexer.token() == Token.ERROR) { + throw new ParserException( + "Syntax error, token: " + + lexer.token() + + " " + + lexer.stringVal() + + ", pos: " + + lexer.pos()); + } + + /** + * When the druid parser parses the quoted field in SELECT clause, e.g. SELECT `b`.`lastname` + * FROM bank AS `b`, "`b`" is recognized as an identifier expr, and the token is DOT, then the + * next identifier "`lastname`" would be recognized as the property name of "`b`". The parser + * creates a SQLPropertyExpr with owner of "`b`" and property name of "`lastname`". + * + *

The following block of code prevents this specific case to generate SQLPropertyExpr, but + * corrects the parser to generate a SQLIdentifierExpr with expr = "`b`.`lastname`". + */ + if (lexer.token() == Token.DOT && expr instanceof SQLIdentifierExpr) { + if (isQuoted(((SQLIdentifierExpr) expr).getName(), "`")) { + lexer.nextToken(); + ((SQLIdentifierExpr) expr) + .setName(((SQLIdentifierExpr) expr).getName() + "." + lexer.stringVal()); + lexer.nextToken(); + } } - public SQLColumnDefinition parseColumn() { - MySqlSQLColumnDefinition column = new MySqlSQLColumnDefinition(); - column.setName(name()); - column.setDataType(parseDataType()); + return super.primaryRest(expr); + } + + protected SQLExpr bracketRest(SQLExpr expr) { + Number index; - return parseColumnRest(column); + if (lexer.token() == Token.LITERAL_INT) { + index = lexer.integerValue(); + lexer.nextToken(); + } else { + throw new ParserException("Syntax error : " + lexer.stringVal()); } - public SQLColumnDefinition parseColumnRest(SQLColumnDefinition column) { - if (lexer.token() == Token.ON) { - lexer.nextToken(); - accept(Token.UPDATE); - SQLExpr expr = this.expr(); - ((MySqlSQLColumnDefinition) column).setOnUpdate(expr); - } + if (expr instanceof SQLMethodInvokeExpr) { + SQLMethodInvokeExpr methodInvokeExpr = (SQLMethodInvokeExpr) expr; + methodInvokeExpr.getParameters().add(new SQLIntegerExpr(index)); + } + lexer.nextToken(); + expr = primaryRest(expr); + return expr; + } - if (identifierEquals("AUTO_INCREMENT")) { - lexer.nextToken(); - if (column instanceof MySqlSQLColumnDefinition) { - ((MySqlSQLColumnDefinition) column).setAutoIncrement(true); - } - return parseColumnRest(column); - } + public SQLSelectParser createSelectParser() { + return new ElasticSqlSelectParser(this); + } - if (identifierEquals("precision") && column.getDataType().getName().equalsIgnoreCase("double")) { - lexer.nextToken(); - } + protected SQLExpr parseInterval() { + accept(Token.INTERVAL); - if (identifierEquals("PARTITION")) { - throw new ParserException("syntax error " + lexer.token() + " " + lexer.stringVal()); - } + if (lexer.token() == Token.LPAREN) { + lexer.nextToken(); - if (identifierEquals("STORAGE")) { - lexer.nextToken(); - SQLExpr expr = expr(); - if (column instanceof MySqlSQLColumnDefinition) { - ((MySqlSQLColumnDefinition) column).setStorage(expr); - } - } + SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr("INTERVAL"); + if (lexer.token() != Token.RPAREN) { + exprList(methodInvokeExpr.getParameters(), methodInvokeExpr); + } - super.parseColumnRest(column); + accept(Token.RPAREN); - return column; + return primaryRest(methodInvokeExpr); + } else { + SQLExpr value = expr(); + + if (lexer.token() != Token.IDENTIFIER) { + throw new ParserException("Syntax error: " + lexer.token()); + } + + String unit = lexer.stringVal(); + lexer.nextToken(); + + MySqlIntervalExpr intervalExpr = new MySqlIntervalExpr(); + intervalExpr.setValue(value); + intervalExpr.setUnit(MySqlIntervalUnit.valueOf(unit.toUpperCase())); + + return intervalExpr; + } + } + + public SQLColumnDefinition parseColumn() { + MySqlSQLColumnDefinition column = new MySqlSQLColumnDefinition(); + column.setName(name()); + column.setDataType(parseDataType()); + + return parseColumnRest(column); + } + + public SQLColumnDefinition parseColumnRest(SQLColumnDefinition column) { + if (lexer.token() == Token.ON) { + lexer.nextToken(); + accept(Token.UPDATE); + SQLExpr expr = this.expr(); + ((MySqlSQLColumnDefinition) column).setOnUpdate(expr); } - protected SQLDataType parseDataTypeRest(SQLDataType dataType) { - super.parseDataTypeRest(dataType); + if (identifierEquals("AUTO_INCREMENT")) { + lexer.nextToken(); + if (column instanceof MySqlSQLColumnDefinition) { + ((MySqlSQLColumnDefinition) column).setAutoIncrement(true); + } + return parseColumnRest(column); + } - if (identifierEquals("UNSIGNED")) { - lexer.nextToken(); - dataType.getAttributes().put("UNSIGNED", true); - } + if (identifierEquals("precision") + && column.getDataType().getName().equalsIgnoreCase("double")) { + lexer.nextToken(); + } - if (identifierEquals("ZEROFILL")) { - lexer.nextToken(); - dataType.getAttributes().put("ZEROFILL", true); - } + if (identifierEquals("PARTITION")) { + throw new ParserException("syntax error " + lexer.token() + " " + lexer.stringVal()); + } - return dataType; + if (identifierEquals("STORAGE")) { + lexer.nextToken(); + SQLExpr expr = expr(); + if (column instanceof MySqlSQLColumnDefinition) { + ((MySqlSQLColumnDefinition) column).setStorage(expr); + } } - public SQLExpr orRest(SQLExpr expr) { + super.parseColumnRest(column); - for (; ; ) { - if (lexer.token() == Token.OR || lexer.token() == Token.BARBAR) { - lexer.nextToken(); - SQLExpr rightExp = and(); + return column; + } - expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.BooleanOr, rightExp, JdbcConstants.MYSQL); - } else if (lexer.token() == Token.XOR) { - lexer.nextToken(); - SQLExpr rightExp = and(); + protected SQLDataType parseDataTypeRest(SQLDataType dataType) { + super.parseDataTypeRest(dataType); - expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.BooleanXor, rightExp, JdbcConstants.MYSQL); - } else { - break; - } - } + if (identifierEquals("UNSIGNED")) { + lexer.nextToken(); + dataType.getAttributes().put("UNSIGNED", true); + } - return expr; + if (identifierEquals("ZEROFILL")) { + lexer.nextToken(); + dataType.getAttributes().put("ZEROFILL", true); } - public SQLExpr additiveRest(SQLExpr expr) { - if (lexer.token() == Token.PLUS) { - lexer.nextToken(); - SQLExpr rightExp = multiplicative(); + return dataType; + } - expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.Add, rightExp, JdbcConstants.MYSQL); - expr = additiveRest(expr); - } else if (lexer.token() == Token.SUB) { - lexer.nextToken(); - SQLExpr rightExp = multiplicative(); + public SQLExpr orRest(SQLExpr expr) { - expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.Subtract, rightExp, JdbcConstants.MYSQL); - expr = additiveRest(expr); - } + for (; ; ) { + if (lexer.token() == Token.OR || lexer.token() == Token.BARBAR) { + lexer.nextToken(); + SQLExpr rightExp = and(); + + expr = + new SQLBinaryOpExpr(expr, SQLBinaryOperator.BooleanOr, rightExp, JdbcConstants.MYSQL); + } else if (lexer.token() == Token.XOR) { + lexer.nextToken(); + SQLExpr rightExp = and(); - return expr; + expr = + new SQLBinaryOpExpr(expr, SQLBinaryOperator.BooleanXor, rightExp, JdbcConstants.MYSQL); + } else { + break; + } } - public SQLAssignItem parseAssignItem() { - SQLAssignItem item = new SQLAssignItem(); + return expr; + } - SQLExpr var = primary(); + public SQLExpr additiveRest(SQLExpr expr) { + if (lexer.token() == Token.PLUS) { + lexer.nextToken(); + SQLExpr rightExp = multiplicative(); - String ident = null; - if (var instanceof SQLIdentifierExpr) { - ident = ((SQLIdentifierExpr) var).getName(); + expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.Add, rightExp, JdbcConstants.MYSQL); + expr = additiveRest(expr); + } else if (lexer.token() == Token.SUB) { + lexer.nextToken(); + SQLExpr rightExp = multiplicative(); - if ("GLOBAL".equalsIgnoreCase(ident)) { - ident = lexer.stringVal(); - lexer.nextToken(); - var = new SQLVariantRefExpr(ident, true); - } else if ("SESSION".equalsIgnoreCase(ident)) { - ident = lexer.stringVal(); - lexer.nextToken(); - var = new SQLVariantRefExpr(ident, false); - } else { - var = new SQLVariantRefExpr(ident); - } - } + expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.Subtract, rightExp, JdbcConstants.MYSQL); + expr = additiveRest(expr); + } - if ("NAMES".equalsIgnoreCase(ident)) { - // skip - } else if ("CHARACTER".equalsIgnoreCase(ident)) { - var = new SQLIdentifierExpr("CHARACTER SET"); - accept(Token.SET); - if (lexer.token() == Token.EQ) { - lexer.nextToken(); - } - } else { - if (lexer.token() == Token.COLONEQ) { - lexer.nextToken(); - } else { - accept(Token.EQ); - } - } + return expr; + } + + public SQLAssignItem parseAssignItem() { + SQLAssignItem item = new SQLAssignItem(); - item.setValue(this.expr()); + SQLExpr var = primary(); - item.setTarget(var); - return item; + String ident = null; + if (var instanceof SQLIdentifierExpr) { + ident = ((SQLIdentifierExpr) var).getName(); + + if ("GLOBAL".equalsIgnoreCase(ident)) { + ident = lexer.stringVal(); + lexer.nextToken(); + var = new SQLVariantRefExpr(ident, true); + } else if ("SESSION".equalsIgnoreCase(ident)) { + ident = lexer.stringVal(); + lexer.nextToken(); + var = new SQLVariantRefExpr(ident, false); + } else { + var = new SQLVariantRefExpr(ident); + } } - public SQLName nameRest(SQLName name) { - if (lexer.token() == Token.VARIANT && "@".equals(lexer.stringVal())) { - lexer.nextToken(); - MySqlUserName userName = new MySqlUserName(); - userName.setUserName(((SQLIdentifierExpr) name).getName()); + if ("NAMES".equalsIgnoreCase(ident)) { + // skip + } else if ("CHARACTER".equalsIgnoreCase(ident)) { + var = new SQLIdentifierExpr("CHARACTER SET"); + accept(Token.SET); + if (lexer.token() == Token.EQ) { + lexer.nextToken(); + } + } else { + if (lexer.token() == Token.COLONEQ) { + lexer.nextToken(); + } else { + accept(Token.EQ); + } + } - if (lexer.token() == Token.LITERAL_CHARS) { - userName.setHost("'" + lexer.stringVal() + "'"); - } else { - userName.setHost(lexer.stringVal()); - } - lexer.nextToken(); - return userName; - } - return super.nameRest(name); + item.setValue(this.expr()); + + item.setTarget(var); + return item; + } + + public SQLName nameRest(SQLName name) { + if (lexer.token() == Token.VARIANT && "@".equals(lexer.stringVal())) { + lexer.nextToken(); + MySqlUserName userName = new MySqlUserName(); + userName.setUserName(((SQLIdentifierExpr) name).getName()); + + if (lexer.token() == Token.LITERAL_CHARS) { + userName.setHost("'" + lexer.stringVal() + "'"); + } else { + userName.setHost(lexer.stringVal()); + } + lexer.nextToken(); + return userName; } + return super.nameRest(name); + } - public MySqlSelectQueryBlock.Limit parseLimit() { - if (lexer.token() == Token.LIMIT) { - lexer.nextToken(); + public MySqlSelectQueryBlock.Limit parseLimit() { + if (lexer.token() == Token.LIMIT) { + lexer.nextToken(); - MySqlSelectQueryBlock.Limit limit = new MySqlSelectQueryBlock.Limit(); - - SQLExpr temp = this.expr(); - if (lexer.token() == (Token.COMMA)) { - limit.setOffset(temp); - lexer.nextToken(); - limit.setRowCount(this.expr()); - } else if (identifierEquals("OFFSET")) { - limit.setRowCount(temp); - lexer.nextToken(); - limit.setOffset(this.expr()); - } else { - limit.setRowCount(temp); - } - return limit; - } + MySqlSelectQueryBlock.Limit limit = new MySqlSelectQueryBlock.Limit(); - return null; + SQLExpr temp = this.expr(); + if (lexer.token() == (Token.COMMA)) { + limit.setOffset(temp); + lexer.nextToken(); + limit.setRowCount(this.expr()); + } else if (identifierEquals("OFFSET")) { + limit.setRowCount(temp); + lexer.nextToken(); + limit.setOffset(this.expr()); + } else { + limit.setRowCount(temp); + } + return limit; } - @Override - public MySqlPrimaryKey parsePrimaryKey() { - accept(Token.PRIMARY); - accept(Token.KEY); + return null; + } - MySqlPrimaryKey primaryKey = new MySqlPrimaryKey(); + @Override + public MySqlPrimaryKey parsePrimaryKey() { + accept(Token.PRIMARY); + accept(Token.KEY); - if (identifierEquals("USING")) { - lexer.nextToken(); - primaryKey.setIndexType(lexer.stringVal()); - lexer.nextToken(); - } + MySqlPrimaryKey primaryKey = new MySqlPrimaryKey(); - accept(Token.LPAREN); - for (; ; ) { - primaryKey.getColumns().add(this.expr()); - if (!(lexer.token() == (Token.COMMA))) { - break; - } else { - lexer.nextToken(); - } - } - accept(Token.RPAREN); + if (identifierEquals("USING")) { + lexer.nextToken(); + primaryKey.setIndexType(lexer.stringVal()); + lexer.nextToken(); + } - return primaryKey; + accept(Token.LPAREN); + for (; ; ) { + primaryKey.getColumns().add(this.expr()); + if (!(lexer.token() == (Token.COMMA))) { + break; + } else { + lexer.nextToken(); + } } + accept(Token.RPAREN); - public MySqlUnique parseUnique() { - accept(Token.UNIQUE); + return primaryKey; + } - if (lexer.token() == Token.KEY) { - lexer.nextToken(); - } + public MySqlUnique parseUnique() { + accept(Token.UNIQUE); - if (lexer.token() == Token.INDEX) { - lexer.nextToken(); - } + if (lexer.token() == Token.KEY) { + lexer.nextToken(); + } - MySqlUnique unique = new MySqlUnique(); + if (lexer.token() == Token.INDEX) { + lexer.nextToken(); + } - if (lexer.token() != Token.LPAREN) { - SQLName indexName = name(); - unique.setIndexName(indexName); - } + MySqlUnique unique = new MySqlUnique(); - accept(Token.LPAREN); - for (; ; ) { - unique.getColumns().add(this.expr()); - if (!(lexer.token() == (Token.COMMA))) { - break; - } else { - lexer.nextToken(); - } - } - accept(Token.RPAREN); + if (lexer.token() != Token.LPAREN) { + SQLName indexName = name(); + unique.setIndexName(indexName); + } - if (identifierEquals("USING")) { - lexer.nextToken(); - unique.setIndexType(lexer.stringVal()); - lexer.nextToken(); - } + accept(Token.LPAREN); + for (; ; ) { + unique.getColumns().add(this.expr()); + if (!(lexer.token() == (Token.COMMA))) { + break; + } else { + lexer.nextToken(); + } + } + accept(Token.RPAREN); - return unique; + if (identifierEquals("USING")) { + lexer.nextToken(); + unique.setIndexType(lexer.stringVal()); + lexer.nextToken(); } - public MysqlForeignKey parseForeignKey() { - accept(Token.FOREIGN); - accept(Token.KEY); + return unique; + } - MysqlForeignKey fk = new MysqlForeignKey(); + public MysqlForeignKey parseForeignKey() { + accept(Token.FOREIGN); + accept(Token.KEY); - if (lexer.token() != Token.LPAREN) { - SQLName indexName = name(); - fk.setIndexName(indexName); - } + MysqlForeignKey fk = new MysqlForeignKey(); - accept(Token.LPAREN); - this.names(fk.getReferencingColumns()); - accept(Token.RPAREN); + if (lexer.token() != Token.LPAREN) { + SQLName indexName = name(); + fk.setIndexName(indexName); + } - accept(Token.REFERENCES); + accept(Token.LPAREN); + this.names(fk.getReferencingColumns()); + accept(Token.RPAREN); - fk.setReferencedTableName(this.name()); + accept(Token.REFERENCES); - accept(Token.LPAREN); - this.names(fk.getReferencedColumns()); - accept(Token.RPAREN); + fk.setReferencedTableName(this.name()); - if (identifierEquals("MATCH")) { - if (identifierEquals("FULL")) { - fk.setReferenceMatch(MysqlForeignKey.Match.FULL); - } else if (identifierEquals("PARTIAL")) { - fk.setReferenceMatch(MysqlForeignKey.Match.PARTIAL); - } else if (identifierEquals("SIMPLE")) { - fk.setReferenceMatch(MysqlForeignKey.Match.SIMPLE); - } - } + accept(Token.LPAREN); + this.names(fk.getReferencedColumns()); + accept(Token.RPAREN); - if (lexer.token() == Token.ON) { - lexer.nextToken(); - if (lexer.token() == Token.DELETE) { - fk.setReferenceOn(MysqlForeignKey.On.DELETE); - } else if (lexer.token() == Token.UPDATE) { - fk.setReferenceOn(MysqlForeignKey.On.UPDATE); - } else { - throw new ParserException("Syntax error, expect DELETE or UPDATE, actual " + lexer.token() + " " - + lexer.stringVal()); - } - lexer.nextToken(); + if (identifierEquals("MATCH")) { + if (identifierEquals("FULL")) { + fk.setReferenceMatch(MysqlForeignKey.Match.FULL); + } else if (identifierEquals("PARTIAL")) { + fk.setReferenceMatch(MysqlForeignKey.Match.PARTIAL); + } else if (identifierEquals("SIMPLE")) { + fk.setReferenceMatch(MysqlForeignKey.Match.SIMPLE); + } + } - if (lexer.token() == Token.RESTRICT) { - fk.setReferenceOption(MysqlForeignKey.Option.RESTRICT); - } else if (identifierEquals("CASCADE")) { - fk.setReferenceOption(MysqlForeignKey.Option.CASCADE); - } else if (lexer.token() == Token.SET) { - accept(Token.NULL); - fk.setReferenceOption(MysqlForeignKey.Option.SET_NULL); - } else if (identifierEquals("ON")) { - lexer.nextToken(); - if (identifierEquals("ACTION")) { - fk.setReferenceOption(MysqlForeignKey.Option.NO_ACTION); - } else { - throw new ParserException("Syntax error, expect ACTION, actual " + lexer.token() + " " - + lexer.stringVal()); - } - } - lexer.nextToken(); + if (lexer.token() == Token.ON) { + lexer.nextToken(); + if (lexer.token() == Token.DELETE) { + fk.setReferenceOn(MysqlForeignKey.On.DELETE); + } else if (lexer.token() == Token.UPDATE) { + fk.setReferenceOn(MysqlForeignKey.On.UPDATE); + } else { + throw new ParserException( + "Syntax error, expect DELETE or UPDATE, actual " + + lexer.token() + + " " + + lexer.stringVal()); + } + lexer.nextToken(); + + if (lexer.token() == Token.RESTRICT) { + fk.setReferenceOption(MysqlForeignKey.Option.RESTRICT); + } else if (identifierEquals("CASCADE")) { + fk.setReferenceOption(MysqlForeignKey.Option.CASCADE); + } else if (lexer.token() == Token.SET) { + accept(Token.NULL); + fk.setReferenceOption(MysqlForeignKey.Option.SET_NULL); + } else if (identifierEquals("ON")) { + lexer.nextToken(); + if (identifierEquals("ACTION")) { + fk.setReferenceOption(MysqlForeignKey.Option.NO_ACTION); + } else { + throw new ParserException( + "Syntax error, expect ACTION, actual " + lexer.token() + " " + lexer.stringVal()); } - return fk; + } + lexer.nextToken(); } + return fk; + } - protected SQLAggregateExpr parseAggregateExprRest(SQLAggregateExpr aggregateExpr) { - if (lexer.token() == Token.ORDER) { - SQLOrderBy orderBy = this.parseOrderBy(); - aggregateExpr.putAttribute("ORDER BY", orderBy); - } - if (identifierEquals("SEPARATOR")) { - lexer.nextToken(); + protected SQLAggregateExpr parseAggregateExprRest(SQLAggregateExpr aggregateExpr) { + if (lexer.token() == Token.ORDER) { + SQLOrderBy orderBy = this.parseOrderBy(); + aggregateExpr.putAttribute("ORDER BY", orderBy); + } + if (identifierEquals("SEPARATOR")) { + lexer.nextToken(); - SQLExpr seperator = this.primary(); + SQLExpr seperator = this.primary(); - aggregateExpr.putAttribute("SEPARATOR", seperator); - } - return aggregateExpr; + aggregateExpr.putAttribute("SEPARATOR", seperator); } + return aggregateExpr; + } - public MySqlSelectGroupByExpr parseSelectGroupByItem() { - MySqlSelectGroupByExpr item = new MySqlSelectGroupByExpr(); - - item.setExpr(expr()); + public MySqlSelectGroupByExpr parseSelectGroupByItem() { + MySqlSelectGroupByExpr item = new MySqlSelectGroupByExpr(); - if (lexer.token() == Token.ASC) { - lexer.nextToken(); - item.setType(SQLOrderingSpecification.ASC); - } else if (lexer.token() == Token.DESC) { - lexer.nextToken(); - item.setType(SQLOrderingSpecification.DESC); - } + item.setExpr(expr()); - return item; + if (lexer.token() == Token.ASC) { + lexer.nextToken(); + item.setType(SQLOrderingSpecification.ASC); + } else if (lexer.token() == Token.DESC) { + lexer.nextToken(); + item.setType(SQLOrderingSpecification.DESC); } + return item; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/parser/ElasticSqlSelectParser.java b/legacy/src/main/java/org/opensearch/sql/legacy/parser/ElasticSqlSelectParser.java index 2038aa54ef..c405d90878 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/parser/ElasticSqlSelectParser.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/parser/ElasticSqlSelectParser.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.parser; import com.alibaba.druid.sql.ast.SQLExpr; @@ -29,313 +28,311 @@ import com.alibaba.druid.sql.parser.Token; import org.opensearch.sql.legacy.exception.SqlFeatureNotImplementedException; -/** - * Created by allwefantasy on 8/19/16. - */ +/** Created by allwefantasy on 8/19/16. */ public class ElasticSqlSelectParser extends SQLSelectParser { - public ElasticSqlSelectParser(SQLExprParser exprParser) { - super(exprParser); + public ElasticSqlSelectParser(SQLExprParser exprParser) { + super(exprParser); + } + + @Override + public SQLSelectQuery query() { + if (lexer.token() == (Token.LPAREN)) { + lexer.nextToken(); + + SQLSelectQuery select = query(); + accept(Token.RPAREN); + + return queryRest(select); } - @Override - public SQLSelectQuery query() { - if (lexer.token() == (Token.LPAREN)) { - lexer.nextToken(); + MySqlSelectQueryBlock queryBlock = new MySqlSelectQueryBlock(); + + if (lexer.token() == Token.SELECT) { + lexer.nextToken(); + + if (lexer.token() == Token.HINT) { + this.exprParser.parseHints(queryBlock.getHints()); + } + + if (lexer.token() == Token.COMMENT) { + lexer.nextToken(); + } + + if (lexer.token() == (Token.DISTINCT)) { + queryBlock.setDistionOption(SQLSetQuantifier.DISTINCT); + lexer.nextToken(); + } else if (identifierEquals("DISTINCTROW")) { + queryBlock.setDistionOption(SQLSetQuantifier.DISTINCTROW); + lexer.nextToken(); + } else if (lexer.token() == (Token.ALL)) { + queryBlock.setDistionOption(SQLSetQuantifier.ALL); + lexer.nextToken(); + } + + if (identifierEquals("HIGH_PRIORITY")) { + queryBlock.setHignPriority(true); + lexer.nextToken(); + } + + if (identifierEquals("STRAIGHT_JOIN")) { + queryBlock.setStraightJoin(true); + lexer.nextToken(); + } + + if (identifierEquals("SQL_SMALL_RESULT")) { + queryBlock.setSmallResult(true); + lexer.nextToken(); + } + + if (identifierEquals("SQL_BIG_RESULT")) { + queryBlock.setBigResult(true); + lexer.nextToken(); + } + + if (identifierEquals("SQL_BUFFER_RESULT")) { + queryBlock.setBufferResult(true); + lexer.nextToken(); + } + + if (identifierEquals("SQL_CACHE")) { + queryBlock.setCache(true); + lexer.nextToken(); + } + + if (identifierEquals("SQL_NO_CACHE")) { + queryBlock.setCache(false); + lexer.nextToken(); + } + + if (identifierEquals("SQL_CALC_FOUND_ROWS")) { + queryBlock.setCalcFoundRows(true); + lexer.nextToken(); + } + + parseSelectList(queryBlock); + + parseInto(queryBlock); + } - SQLSelectQuery select = query(); - accept(Token.RPAREN); + parseFrom(queryBlock); - return queryRest(select); - } + parseWhere(queryBlock); - MySqlSelectQueryBlock queryBlock = new MySqlSelectQueryBlock(); + parseGroupBy(queryBlock); - if (lexer.token() == Token.SELECT) { - lexer.nextToken(); + queryBlock.setOrderBy(this.exprParser.parseOrderBy()); - if (lexer.token() == Token.HINT) { - this.exprParser.parseHints(queryBlock.getHints()); - } - - if (lexer.token() == Token.COMMENT) { - lexer.nextToken(); - } - - if (lexer.token() == (Token.DISTINCT)) { - queryBlock.setDistionOption(SQLSetQuantifier.DISTINCT); - lexer.nextToken(); - } else if (identifierEquals("DISTINCTROW")) { - queryBlock.setDistionOption(SQLSetQuantifier.DISTINCTROW); - lexer.nextToken(); - } else if (lexer.token() == (Token.ALL)) { - queryBlock.setDistionOption(SQLSetQuantifier.ALL); - lexer.nextToken(); - } - - if (identifierEquals("HIGH_PRIORITY")) { - queryBlock.setHignPriority(true); - lexer.nextToken(); - } - - if (identifierEquals("STRAIGHT_JOIN")) { - queryBlock.setStraightJoin(true); - lexer.nextToken(); - } - - if (identifierEquals("SQL_SMALL_RESULT")) { - queryBlock.setSmallResult(true); - lexer.nextToken(); - } - - if (identifierEquals("SQL_BIG_RESULT")) { - queryBlock.setBigResult(true); - lexer.nextToken(); - } - - if (identifierEquals("SQL_BUFFER_RESULT")) { - queryBlock.setBufferResult(true); - lexer.nextToken(); - } - - if (identifierEquals("SQL_CACHE")) { - queryBlock.setCache(true); - lexer.nextToken(); - } - - if (identifierEquals("SQL_NO_CACHE")) { - queryBlock.setCache(false); - lexer.nextToken(); - } - - if (identifierEquals("SQL_CALC_FOUND_ROWS")) { - queryBlock.setCalcFoundRows(true); - lexer.nextToken(); - } - - parseSelectList(queryBlock); - - parseInto(queryBlock); - } + if (lexer.token() == Token.LIMIT) { + queryBlock.setLimit(parseLimit()); + } - parseFrom(queryBlock); + if (lexer.token() == Token.PROCEDURE) { + lexer.nextToken(); + throw new SqlFeatureNotImplementedException("Unsupported feature: " + Token.PROCEDURE.name); + } - parseWhere(queryBlock); + parseInto(queryBlock); - parseGroupBy(queryBlock); + if (lexer.token() == Token.FOR) { + lexer.nextToken(); + accept(Token.UPDATE); - queryBlock.setOrderBy(this.exprParser.parseOrderBy()); + queryBlock.setForUpdate(true); + } - if (lexer.token() == Token.LIMIT) { - queryBlock.setLimit(parseLimit()); - } + if (lexer.token() == Token.LOCK) { + lexer.nextToken(); + accept(Token.IN); + acceptIdentifier("SHARE"); + acceptIdentifier("MODE"); + queryBlock.setLockInShareMode(true); + } - if (lexer.token() == Token.PROCEDURE) { - lexer.nextToken(); - throw new SqlFeatureNotImplementedException("Unsupported feature: " + Token.PROCEDURE.name); - } + return queryRest(queryBlock); + } + + protected void parseInto(SQLSelectQueryBlock queryBlock) { + if (lexer.token() == (Token.INTO)) { + lexer.nextToken(); + + if (identifierEquals("OUTFILE")) { + lexer.nextToken(); + + MySqlOutFileExpr outFile = new MySqlOutFileExpr(); + outFile.setFile(expr()); + + queryBlock.setInto(outFile); + + if (identifierEquals("FIELDS") || identifierEquals("COLUMNS")) { + lexer.nextToken(); - parseInto(queryBlock); + if (identifierEquals("TERMINATED")) { + lexer.nextToken(); + accept(Token.BY); + } + outFile.setColumnsTerminatedBy((SQLLiteralExpr) expr()); - if (lexer.token() == Token.FOR) { + if (identifierEquals("OPTIONALLY")) { lexer.nextToken(); - accept(Token.UPDATE); + outFile.setColumnsEnclosedOptionally(true); + } - queryBlock.setForUpdate(true); - } + if (identifierEquals("ENCLOSED")) { + lexer.nextToken(); + accept(Token.BY); + outFile.setColumnsEnclosedBy((SQLLiteralExpr) expr()); + } - if (lexer.token() == Token.LOCK) { + if (identifierEquals("ESCAPED")) { lexer.nextToken(); - accept(Token.IN); - acceptIdentifier("SHARE"); - acceptIdentifier("MODE"); - queryBlock.setLockInShareMode(true); + accept(Token.BY); + outFile.setColumnsEscaped((SQLLiteralExpr) expr()); + } } - return queryRest(queryBlock); - } + if (identifierEquals("LINES")) { + lexer.nextToken(); - protected void parseInto(SQLSelectQueryBlock queryBlock) { - if (lexer.token() == (Token.INTO)) { + if (identifierEquals("STARTING")) { lexer.nextToken(); - - if (identifierEquals("OUTFILE")) { - lexer.nextToken(); - - MySqlOutFileExpr outFile = new MySqlOutFileExpr(); - outFile.setFile(expr()); - - queryBlock.setInto(outFile); - - if (identifierEquals("FIELDS") || identifierEquals("COLUMNS")) { - lexer.nextToken(); - - if (identifierEquals("TERMINATED")) { - lexer.nextToken(); - accept(Token.BY); - } - outFile.setColumnsTerminatedBy((SQLLiteralExpr) expr()); - - if (identifierEquals("OPTIONALLY")) { - lexer.nextToken(); - outFile.setColumnsEnclosedOptionally(true); - } - - if (identifierEquals("ENCLOSED")) { - lexer.nextToken(); - accept(Token.BY); - outFile.setColumnsEnclosedBy((SQLLiteralExpr) expr()); - } - - if (identifierEquals("ESCAPED")) { - lexer.nextToken(); - accept(Token.BY); - outFile.setColumnsEscaped((SQLLiteralExpr) expr()); - } - } - - if (identifierEquals("LINES")) { - lexer.nextToken(); - - if (identifierEquals("STARTING")) { - lexer.nextToken(); - accept(Token.BY); - outFile.setLinesStartingBy((SQLLiteralExpr) expr()); - } else { - identifierEquals("TERMINATED"); - lexer.nextToken(); - accept(Token.BY); - outFile.setLinesTerminatedBy((SQLLiteralExpr) expr()); - } - } - } else { - queryBlock.setInto(this.exprParser.name()); - } + accept(Token.BY); + outFile.setLinesStartingBy((SQLLiteralExpr) expr()); + } else { + identifierEquals("TERMINATED"); + lexer.nextToken(); + accept(Token.BY); + outFile.setLinesTerminatedBy((SQLLiteralExpr) expr()); + } } + } else { + queryBlock.setInto(this.exprParser.name()); + } } + } - protected void parseGroupBy(SQLSelectQueryBlock queryBlock) { - SQLSelectGroupByClause groupBy = null; + protected void parseGroupBy(SQLSelectQueryBlock queryBlock) { + SQLSelectGroupByClause groupBy = null; - if (lexer.token() == Token.GROUP) { - groupBy = new SQLSelectGroupByClause(); + if (lexer.token() == Token.GROUP) { + groupBy = new SQLSelectGroupByClause(); - lexer.nextToken(); - accept(Token.BY); + lexer.nextToken(); + accept(Token.BY); - while (true) { - groupBy.addItem(this.getExprParser().parseSelectGroupByItem()); - if (!(lexer.token() == (Token.COMMA))) { - break; - } - lexer.nextToken(); - } - - if (lexer.token() == Token.WITH) { - lexer.nextToken(); - acceptIdentifier("ROLLUP"); - - MySqlSelectGroupBy mySqlGroupBy = new MySqlSelectGroupBy(); - for (SQLExpr sqlExpr : groupBy.getItems()) { - mySqlGroupBy.addItem(sqlExpr); - } - mySqlGroupBy.setRollUp(true); - - groupBy = mySqlGroupBy; - } + while (true) { + groupBy.addItem(this.getExprParser().parseSelectGroupByItem()); + if (!(lexer.token() == (Token.COMMA))) { + break; } + lexer.nextToken(); + } - if (lexer.token() == Token.HAVING) { - lexer.nextToken(); + if (lexer.token() == Token.WITH) { + lexer.nextToken(); + acceptIdentifier("ROLLUP"); - if (groupBy == null) { - groupBy = new SQLSelectGroupByClause(); - } - groupBy.setHaving(this.exprParser.expr()); + MySqlSelectGroupBy mySqlGroupBy = new MySqlSelectGroupBy(); + for (SQLExpr sqlExpr : groupBy.getItems()) { + mySqlGroupBy.addItem(sqlExpr); } + mySqlGroupBy.setRollUp(true); - queryBlock.setGroupBy(groupBy); + groupBy = mySqlGroupBy; + } } - protected SQLTableSource parseTableSourceRest(SQLTableSource tableSource) { - if (identifierEquals("USING")) { - return tableSource; - } + if (lexer.token() == Token.HAVING) { + lexer.nextToken(); - if (lexer.token() == Token.USE) { - lexer.nextToken(); - MySqlUseIndexHint hint = new MySqlUseIndexHint(); - parseIndexHint(hint); - tableSource.getHints().add(hint); - } + if (groupBy == null) { + groupBy = new SQLSelectGroupByClause(); + } + groupBy.setHaving(this.exprParser.expr()); + } - if (identifierEquals("IGNORE")) { - lexer.nextToken(); - MySqlIgnoreIndexHint hint = new MySqlIgnoreIndexHint(); - parseIndexHint(hint); - tableSource.getHints().add(hint); - } + queryBlock.setGroupBy(groupBy); + } - if (identifierEquals("FORCE")) { - lexer.nextToken(); - MySqlForceIndexHint hint = new MySqlForceIndexHint(); - parseIndexHint(hint); - tableSource.getHints().add(hint); - } + protected SQLTableSource parseTableSourceRest(SQLTableSource tableSource) { + if (identifierEquals("USING")) { + return tableSource; + } - return super.parseTableSourceRest(tableSource); + if (lexer.token() == Token.USE) { + lexer.nextToken(); + MySqlUseIndexHint hint = new MySqlUseIndexHint(); + parseIndexHint(hint); + tableSource.getHints().add(hint); } - private void parseIndexHint(MySqlIndexHintImpl hint) { - if (lexer.token() == Token.INDEX) { - lexer.nextToken(); - } else { - accept(Token.KEY); - } + if (identifierEquals("IGNORE")) { + lexer.nextToken(); + MySqlIgnoreIndexHint hint = new MySqlIgnoreIndexHint(); + parseIndexHint(hint); + tableSource.getHints().add(hint); + } - if (lexer.token() == Token.FOR) { - lexer.nextToken(); + if (identifierEquals("FORCE")) { + lexer.nextToken(); + MySqlForceIndexHint hint = new MySqlForceIndexHint(); + parseIndexHint(hint); + tableSource.getHints().add(hint); + } - if (lexer.token() == Token.JOIN) { - lexer.nextToken(); - hint.setOption(MySqlIndexHint.Option.JOIN); - } else if (lexer.token() == Token.ORDER) { - lexer.nextToken(); - accept(Token.BY); - hint.setOption(MySqlIndexHint.Option.ORDER_BY); - } else { - accept(Token.GROUP); - accept(Token.BY); - hint.setOption(MySqlIndexHint.Option.GROUP_BY); - } - } + return super.parseTableSourceRest(tableSource); + } - accept(Token.LPAREN); - if (lexer.token() == Token.PRIMARY) { - lexer.nextToken(); - hint.getIndexList().add(new SQLIdentifierExpr("PRIMARY")); - } else { - this.exprParser.names(hint.getIndexList()); - } - accept(Token.RPAREN); + private void parseIndexHint(MySqlIndexHintImpl hint) { + if (lexer.token() == Token.INDEX) { + lexer.nextToken(); + } else { + accept(Token.KEY); } - protected MySqlUnionQuery createSQLUnionQuery() { - return new MySqlUnionQuery(); + if (lexer.token() == Token.FOR) { + lexer.nextToken(); + + if (lexer.token() == Token.JOIN) { + lexer.nextToken(); + hint.setOption(MySqlIndexHint.Option.JOIN); + } else if (lexer.token() == Token.ORDER) { + lexer.nextToken(); + accept(Token.BY); + hint.setOption(MySqlIndexHint.Option.ORDER_BY); + } else { + accept(Token.GROUP); + accept(Token.BY); + hint.setOption(MySqlIndexHint.Option.GROUP_BY); + } } - public SQLUnionQuery unionRest(SQLUnionQuery union) { - if (lexer.token() == Token.LIMIT) { - MySqlUnionQuery mysqlUnionQuery = (MySqlUnionQuery) union; - mysqlUnionQuery.setLimit(parseLimit()); - } - return super.unionRest(union); + accept(Token.LPAREN); + if (lexer.token() == Token.PRIMARY) { + lexer.nextToken(); + hint.getIndexList().add(new SQLIdentifierExpr("PRIMARY")); + } else { + this.exprParser.names(hint.getIndexList()); } + accept(Token.RPAREN); + } - public MySqlSelectQueryBlock.Limit parseLimit() { - return ((ElasticSqlExprParser) this.exprParser).parseLimit(); - } + protected MySqlUnionQuery createSQLUnionQuery() { + return new MySqlUnionQuery(); + } - public ElasticSqlExprParser getExprParser() { - return (ElasticSqlExprParser) exprParser; + public SQLUnionQuery unionRest(SQLUnionQuery union) { + if (lexer.token() == Token.LIMIT) { + MySqlUnionQuery mysqlUnionQuery = (MySqlUnionQuery) union; + mysqlUnionQuery.setLimit(parseLimit()); } + return super.unionRest(union); + } + + public MySqlSelectQueryBlock.Limit parseLimit() { + return ((ElasticSqlExprParser) this.exprParser).parseLimit(); + } + + public ElasticSqlExprParser getExprParser() { + return (ElasticSqlExprParser) exprParser; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/parser/FieldMaker.java b/legacy/src/main/java/org/opensearch/sql/legacy/parser/FieldMaker.java index 89e9a16d1c..da08f81453 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/parser/FieldMaker.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/parser/FieldMaker.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.parser; import com.alibaba.druid.sql.ast.SQLExpr; @@ -41,369 +40,396 @@ import org.opensearch.sql.legacy.utils.Util; /** - * - * * @author ansj */ public class FieldMaker { - private SQLFunctions sqlFunctions = new SQLFunctions(); - - public Field makeField(SQLExpr expr, String alias, String tableAlias) throws SqlParseException { - Field field = makeFieldImpl(expr, alias, tableAlias); - addGroupByForDistinctFieldsInSelect(expr, field); + private SQLFunctions sqlFunctions = new SQLFunctions(); - // why we may get null as a field??? - if (field != null) { - field.setExpression(expr); - } + public Field makeField(SQLExpr expr, String alias, String tableAlias) throws SqlParseException { + Field field = makeFieldImpl(expr, alias, tableAlias); + addGroupByForDistinctFieldsInSelect(expr, field); - return field; + // why we may get null as a field??? + if (field != null) { + field.setExpression(expr); } - private Field makeFieldImpl(SQLExpr expr, String alias, String tableAlias) throws SqlParseException { - if (expr instanceof SQLIdentifierExpr || expr instanceof SQLPropertyExpr || expr instanceof SQLVariantRefExpr) { - return handleIdentifier(expr, alias, tableAlias); - } else if (expr instanceof SQLQueryExpr) { - throw new SqlParseException("unknown field name : " + expr); - } else if (expr instanceof SQLBinaryOpExpr) { - //make a SCRIPT method field; - return makeFieldImpl(makeBinaryMethodField((SQLBinaryOpExpr) expr, alias, true), alias, tableAlias); - } else if (expr instanceof SQLAllColumnExpr) { - return Field.STAR; - } else if (expr instanceof SQLMethodInvokeExpr) { - SQLMethodInvokeExpr mExpr = (SQLMethodInvokeExpr) expr; - - String methodName = mExpr.getMethodName(); - - if (methodName.equalsIgnoreCase("nested") || methodName.equalsIgnoreCase("reverse_nested")) { - NestedType nestedType = new NestedType(); - if (nestedType.tryFillFromExpr(mExpr)) { - return handleIdentifier(nestedType, alias, tableAlias); - } - } else if (methodName.equalsIgnoreCase("children")) { - ChildrenType childrenType = new ChildrenType(); - if (childrenType.tryFillFromExpr(mExpr)) { - return handleIdentifier(childrenType, alias, tableAlias); - } - } else if (methodName.equalsIgnoreCase("filter")) { - return makeFilterMethodField(mExpr, alias); - } - - if ((SQLFunctions.builtInFunctions.contains(methodName.toLowerCase())) && Strings.isNullOrEmpty(alias)) { - alias = mExpr.toString(); - } - return makeMethodField(methodName, mExpr.getParameters(), null, alias, tableAlias, true); - } else if (expr instanceof SQLAggregateExpr) { - SQLAggregateExpr sExpr = (SQLAggregateExpr) expr; - return makeMethodField(sExpr.getMethodName(), sExpr.getArguments(), sExpr.getOption(), - alias, tableAlias, true); - } else if (expr instanceof SQLCaseExpr) { - String scriptCode = new CaseWhenParser((SQLCaseExpr) expr, alias, tableAlias).parse(); - List methodParameters = new ArrayList<>(); - methodParameters.add(new KVValue(alias)); - methodParameters.add(new KVValue(scriptCode)); - return new MethodField("script", methodParameters, null, alias); - } else if (expr instanceof SQLCastExpr) { - SQLCastExpr castExpr = (SQLCastExpr) expr; - if (alias == null) { - alias = "cast_" + castExpr.getExpr().toString(); - } - ArrayList methodParameters = new ArrayList<>(); - methodParameters.add(((SQLCastExpr) expr).getExpr()); - return makeMethodField("CAST", methodParameters, null, alias, tableAlias, true); - } else if (expr instanceof SQLNumericLiteralExpr) { - SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr("assign", null); - methodInvokeExpr.addParameter(expr); - return makeMethodField(methodInvokeExpr.getMethodName(), methodInvokeExpr.getParameters(), - null, alias, tableAlias, true); - } else { - throw new SqlParseException("unknown field name : " + expr); + return field; + } + + private Field makeFieldImpl(SQLExpr expr, String alias, String tableAlias) + throws SqlParseException { + if (expr instanceof SQLIdentifierExpr + || expr instanceof SQLPropertyExpr + || expr instanceof SQLVariantRefExpr) { + return handleIdentifier(expr, alias, tableAlias); + } else if (expr instanceof SQLQueryExpr) { + throw new SqlParseException("unknown field name : " + expr); + } else if (expr instanceof SQLBinaryOpExpr) { + // make a SCRIPT method field; + return makeFieldImpl( + makeBinaryMethodField((SQLBinaryOpExpr) expr, alias, true), alias, tableAlias); + } else if (expr instanceof SQLAllColumnExpr) { + return Field.STAR; + } else if (expr instanceof SQLMethodInvokeExpr) { + SQLMethodInvokeExpr mExpr = (SQLMethodInvokeExpr) expr; + + String methodName = mExpr.getMethodName(); + + if (methodName.equalsIgnoreCase("nested") || methodName.equalsIgnoreCase("reverse_nested")) { + NestedType nestedType = new NestedType(); + if (nestedType.tryFillFromExpr(mExpr)) { + return handleIdentifier(nestedType, alias, tableAlias); } - } - - private void addGroupByForDistinctFieldsInSelect(SQLExpr expr, Field field) { - if (expr.getParent() != null && expr.getParent() instanceof SQLSelectItem - && expr.getParent().getParent() != null - && expr.getParent().getParent() instanceof SQLSelectQueryBlock) { - SQLSelectQueryBlock queryBlock = (SQLSelectQueryBlock) expr.getParent().getParent(); - if (queryBlock.getDistionOption() == SQLSetQuantifier.DISTINCT) { - SQLAggregateOption option = SQLAggregateOption.DISTINCT; - field.setAggregationOption(option); - if (queryBlock.getGroupBy() == null) { - queryBlock.setGroupBy(new SQLSelectGroupByClause()); - } - SQLSelectGroupByClause groupByClause = queryBlock.getGroupBy(); - groupByClause.addItem(expr); - queryBlock.setGroupBy(groupByClause); - } + } else if (methodName.equalsIgnoreCase("children")) { + ChildrenType childrenType = new ChildrenType(); + if (childrenType.tryFillFromExpr(mExpr)) { + return handleIdentifier(childrenType, alias, tableAlias); } + } else if (methodName.equalsIgnoreCase("filter")) { + return makeFilterMethodField(mExpr, alias); + } + + if ((SQLFunctions.builtInFunctions.contains(methodName.toLowerCase())) + && Strings.isNullOrEmpty(alias)) { + alias = mExpr.toString(); + } + return makeMethodField(methodName, mExpr.getParameters(), null, alias, tableAlias, true); + } else if (expr instanceof SQLAggregateExpr) { + SQLAggregateExpr sExpr = (SQLAggregateExpr) expr; + return makeMethodField( + sExpr.getMethodName(), sExpr.getArguments(), sExpr.getOption(), alias, tableAlias, true); + } else if (expr instanceof SQLCaseExpr) { + String scriptCode = new CaseWhenParser((SQLCaseExpr) expr, alias, tableAlias).parse(); + List methodParameters = new ArrayList<>(); + methodParameters.add(new KVValue(alias)); + methodParameters.add(new KVValue(scriptCode)); + return new MethodField("script", methodParameters, null, alias); + } else if (expr instanceof SQLCastExpr) { + SQLCastExpr castExpr = (SQLCastExpr) expr; + if (alias == null) { + alias = "cast_" + castExpr.getExpr().toString(); + } + ArrayList methodParameters = new ArrayList<>(); + methodParameters.add(((SQLCastExpr) expr).getExpr()); + return makeMethodField("CAST", methodParameters, null, alias, tableAlias, true); + } else if (expr instanceof SQLNumericLiteralExpr) { + SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr("assign", null); + methodInvokeExpr.addParameter(expr); + return makeMethodField( + methodInvokeExpr.getMethodName(), + methodInvokeExpr.getParameters(), + null, + alias, + tableAlias, + true); + } else { + throw new SqlParseException("unknown field name : " + expr); } - - private static Object getScriptValue(SQLExpr expr) throws SqlParseException { - return Util.getScriptValue(expr); - } - - private Field makeScriptMethodField(SQLBinaryOpExpr binaryExpr, String alias, String tableAlias) - throws SqlParseException { - List params = new ArrayList<>(); - - String scriptFieldAlias; - if (alias == null || alias.equals("")) { - scriptFieldAlias = binaryExpr.toString(); - } else { - scriptFieldAlias = alias; + } + + private void addGroupByForDistinctFieldsInSelect(SQLExpr expr, Field field) { + if (expr.getParent() != null + && expr.getParent() instanceof SQLSelectItem + && expr.getParent().getParent() != null + && expr.getParent().getParent() instanceof SQLSelectQueryBlock) { + SQLSelectQueryBlock queryBlock = (SQLSelectQueryBlock) expr.getParent().getParent(); + if (queryBlock.getDistionOption() == SQLSetQuantifier.DISTINCT) { + SQLAggregateOption option = SQLAggregateOption.DISTINCT; + field.setAggregationOption(option); + if (queryBlock.getGroupBy() == null) { + queryBlock.setGroupBy(new SQLSelectGroupByClause()); } - params.add(new SQLCharExpr(scriptFieldAlias)); + SQLSelectGroupByClause groupByClause = queryBlock.getGroupBy(); + groupByClause.addItem(expr); + queryBlock.setGroupBy(groupByClause); + } + } + } - Object left = getScriptValue(binaryExpr.getLeft()); - Object right = getScriptValue(binaryExpr.getRight()); - String script = String.format("%s %s %s", left, binaryExpr.getOperator().getName(), right); + private static Object getScriptValue(SQLExpr expr) throws SqlParseException { + return Util.getScriptValue(expr); + } - params.add(new SQLCharExpr(script)); + private Field makeScriptMethodField(SQLBinaryOpExpr binaryExpr, String alias, String tableAlias) + throws SqlParseException { + List params = new ArrayList<>(); - return makeMethodField("script", params, null, null, tableAlias, false); + String scriptFieldAlias; + if (alias == null || alias.equals("")) { + scriptFieldAlias = binaryExpr.toString(); + } else { + scriptFieldAlias = alias; } + params.add(new SQLCharExpr(scriptFieldAlias)); + Object left = getScriptValue(binaryExpr.getLeft()); + Object right = getScriptValue(binaryExpr.getRight()); + String script = String.format("%s %s %s", left, binaryExpr.getOperator().getName(), right); - private static Field makeFilterMethodField(SQLMethodInvokeExpr filterMethod, String alias) - throws SqlParseException { - List parameters = filterMethod.getParameters(); - int parametersSize = parameters.size(); - if (parametersSize != 1 && parametersSize != 2) { - throw new SqlParseException("filter group by field should only have one or 2 parameters" - + " filter(Expr) or filter(name,Expr)"); - } - String filterAlias = filterMethod.getMethodName(); - SQLExpr exprToCheck = null; - if (parametersSize == 1) { - exprToCheck = parameters.get(0); - filterAlias = "filter(" + exprToCheck.toString().replaceAll("\n", " ") + ")"; - } - if (parametersSize == 2) { - filterAlias = Util.extendedToString(parameters.get(0)); - exprToCheck = parameters.get(1); - } - Where where = Where.newInstance(); - new WhereParser(new SqlParser()).parseWhere(exprToCheck, where); - if (where.getWheres().size() == 0) { - throw new SqlParseException("Failed to parse filter condition"); - } - List methodParameters = new ArrayList<>(); - methodParameters.add(new KVValue("where", where)); - methodParameters.add(new KVValue("alias", filterAlias + "@FILTER")); - return new MethodField("filter", methodParameters, null, alias); - } + params.add(new SQLCharExpr(script)); + return makeMethodField("script", params, null, null, tableAlias, false); + } - private static Field handleIdentifier(NestedType nestedType, String alias, String tableAlias) { - Field field = handleIdentifier(new SQLIdentifierExpr(nestedType.field), alias, tableAlias); - field.setNested(nestedType); - field.setChildren(null); - return field; + private static Field makeFilterMethodField(SQLMethodInvokeExpr filterMethod, String alias) + throws SqlParseException { + List parameters = filterMethod.getParameters(); + int parametersSize = parameters.size(); + if (parametersSize != 1 && parametersSize != 2) { + throw new SqlParseException( + "filter group by field should only have one or 2 parameters" + + " filter(Expr) or filter(name,Expr)"); } - - private static Field handleIdentifier(ChildrenType childrenType, String alias, String tableAlias) { - Field field = handleIdentifier(new SQLIdentifierExpr(childrenType.field), alias, tableAlias); - field.setNested(null); - field.setChildren(childrenType); - return field; + String filterAlias = filterMethod.getMethodName(); + SQLExpr exprToCheck = null; + if (parametersSize == 1) { + exprToCheck = parameters.get(0); + filterAlias = "filter(" + exprToCheck.toString().replaceAll("\n", " ") + ")"; } - - - //binary method can nested - public SQLMethodInvokeExpr makeBinaryMethodField(SQLBinaryOpExpr expr, String alias, boolean first) - throws SqlParseException { - List params = new ArrayList<>(); - - String scriptFieldAlias; - if (first && (alias == null || alias.equals(""))) { - scriptFieldAlias = sqlFunctions.nextId("field"); - } else { - scriptFieldAlias = alias; - } - params.add(new SQLCharExpr(scriptFieldAlias)); - - switch (expr.getOperator()) { - case Add: - return convertBinaryOperatorToMethod("add", expr); - case Multiply: - return convertBinaryOperatorToMethod("multiply", expr); - - case Divide: - return convertBinaryOperatorToMethod("divide", expr); - - case Modulus: - return convertBinaryOperatorToMethod("modulus", expr); - - case Subtract: - return convertBinaryOperatorToMethod("subtract", expr); - default: - throw new SqlParseException("Unsupported operator: " + expr.getOperator().getName()); - } + if (parametersSize == 2) { + filterAlias = Util.extendedToString(parameters.get(0)); + exprToCheck = parameters.get(1); } - - private static SQLMethodInvokeExpr convertBinaryOperatorToMethod(String operator, SQLBinaryOpExpr expr) { - SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr(operator, null); - methodInvokeExpr.addParameter(expr.getLeft()); - methodInvokeExpr.addParameter(expr.getRight()); - methodInvokeExpr.putAttribute("source", expr); - return methodInvokeExpr; + Where where = Where.newInstance(); + new WhereParser(new SqlParser()).parseWhere(exprToCheck, where); + if (where.getWheres().size() == 0) { + throw new SqlParseException("Failed to parse filter condition"); + } + List methodParameters = new ArrayList<>(); + methodParameters.add(new KVValue("where", where)); + methodParameters.add(new KVValue("alias", filterAlias + "@FILTER")); + return new MethodField("filter", methodParameters, null, alias); + } + + private static Field handleIdentifier(NestedType nestedType, String alias, String tableAlias) { + Field field = handleIdentifier(new SQLIdentifierExpr(nestedType.field), alias, tableAlias); + field.setNested(nestedType); + field.setChildren(null); + return field; + } + + private static Field handleIdentifier( + ChildrenType childrenType, String alias, String tableAlias) { + Field field = handleIdentifier(new SQLIdentifierExpr(childrenType.field), alias, tableAlias); + field.setNested(null); + field.setChildren(childrenType); + return field; + } + + // binary method can nested + public SQLMethodInvokeExpr makeBinaryMethodField( + SQLBinaryOpExpr expr, String alias, boolean first) throws SqlParseException { + List params = new ArrayList<>(); + + String scriptFieldAlias; + if (first && (alias == null || alias.equals(""))) { + scriptFieldAlias = sqlFunctions.nextId("field"); + } else { + scriptFieldAlias = alias; } + params.add(new SQLCharExpr(scriptFieldAlias)); + switch (expr.getOperator()) { + case Add: + return convertBinaryOperatorToMethod("add", expr); + case Multiply: + return convertBinaryOperatorToMethod("multiply", expr); - private static Field handleIdentifier(SQLExpr expr, String alias, String tableAlias) { - String name = expr.toString().replace("`", ""); - String newFieldName = name; - Field field = null; - if (tableAlias != null) { - String aliasPrefix = tableAlias + "."; - if (name.startsWith(aliasPrefix)) { - newFieldName = name.replaceFirst(aliasPrefix, ""); - field = new Field(newFieldName, alias); - } - } + case Divide: + return convertBinaryOperatorToMethod("divide", expr); - if (tableAlias == null) { - field = new Field(newFieldName, alias); - } + case Modulus: + return convertBinaryOperatorToMethod("modulus", expr); - return field; + case Subtract: + return convertBinaryOperatorToMethod("subtract", expr); + default: + throw new SqlParseException("Unsupported operator: " + expr.getOperator().getName()); + } + } + + private static SQLMethodInvokeExpr convertBinaryOperatorToMethod( + String operator, SQLBinaryOpExpr expr) { + SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr(operator, null); + methodInvokeExpr.addParameter(expr.getLeft()); + methodInvokeExpr.addParameter(expr.getRight()); + methodInvokeExpr.putAttribute("source", expr); + return methodInvokeExpr; + } + + private static Field handleIdentifier(SQLExpr expr, String alias, String tableAlias) { + String name = expr.toString().replace("`", ""); + String newFieldName = name; + Field field = null; + if (tableAlias != null) { + String aliasPrefix = tableAlias + "."; + if (name.startsWith(aliasPrefix)) { + newFieldName = name.replaceFirst(aliasPrefix, ""); + field = new Field(newFieldName, alias); + } } - public MethodField makeMethodField(String name, List arguments, SQLAggregateOption option, - String alias, String tableAlias, boolean first) throws SqlParseException { - List paramers = new LinkedList<>(); - - for (SQLExpr object : arguments) { - - if (object instanceof SQLBinaryOpExpr) { - - SQLBinaryOpExpr binaryOpExpr = (SQLBinaryOpExpr) object; - - if (SQLFunctions.isFunctionTranslatedToScript(binaryOpExpr.getOperator().toString())) { - SQLMethodInvokeExpr mExpr = makeBinaryMethodField(binaryOpExpr, alias, first); - MethodField abc = makeMethodField(mExpr.getMethodName(), mExpr.getParameters(), - null, null, tableAlias, false); - paramers.add(new KVValue(abc.getParams().get(0).toString(), - new SQLCharExpr(abc.getParams().get(1).toString()))); - } else { - if (!binaryOpExpr.getOperator().getName().equals("=")) { - paramers.add(new KVValue("script", makeScriptMethodField(binaryOpExpr, null, tableAlias))); - } else { - SQLExpr right = binaryOpExpr.getRight(); - Object value = Util.expr2Object(right); - paramers.add(new KVValue(binaryOpExpr.getLeft().toString(), value)); - } - } - - } else if (object instanceof SQLMethodInvokeExpr) { - SQLMethodInvokeExpr mExpr = (SQLMethodInvokeExpr) object; - String methodName = mExpr.getMethodName().toLowerCase(); - if (methodName.equals("script")) { - KVValue script = new KVValue("script", makeMethodField(mExpr.getMethodName(), mExpr.getParameters(), - null, alias, tableAlias, true)); - paramers.add(script); - } else if (methodName.equals("nested") || methodName.equals("reverse_nested")) { - NestedType nestedType = new NestedType(); - - if (!nestedType.tryFillFromExpr(object)) { - throw new SqlParseException("Failed to parse nested expression: " + object); - } - - // Fix bug: method name of reversed_nested() was set to "nested" wrongly - paramers.add(new KVValue(methodName, nestedType)); - } else if (methodName.equals("children")) { - ChildrenType childrenType = new ChildrenType(); - - if (!childrenType.tryFillFromExpr(object)) { - throw new SqlParseException("Failed to parse children expression: " + object); - } - - paramers.add(new KVValue("children", childrenType)); - } else if (SQLFunctions.isFunctionTranslatedToScript(methodName)) { - //throw new SqlParseException("only support script/nested as inner functions"); - MethodField abc = makeMethodField(methodName, mExpr.getParameters(), null, null, tableAlias, false); - paramers.add(new KVValue(abc.getParams().get(0).toString(), - new SQLCharExpr(abc.getParams().get(1).toString()))); - } else { - throw new SqlParseException("only support script/nested/children as inner functions"); - } - } else if (object instanceof SQLCaseExpr) { - String scriptCode = new CaseWhenParser((SQLCaseExpr) object, alias, tableAlias).parse(); - paramers.add(new KVValue("script", new SQLCharExpr(scriptCode))); - } else if (object instanceof SQLCastExpr) { - String castName = sqlFunctions.nextId("cast"); - List methodParameters = new ArrayList<>(); - methodParameters.add(new KVValue(((SQLCastExpr) object).getExpr().toString())); - String castType = ((SQLCastExpr) object).getDataType().getName(); - String scriptCode = sqlFunctions.getCastScriptStatement(castName, castType, methodParameters); - - // Parameter "first" indicates if return statement is required. Take CAST statement nested in - // aggregate function SUM(CAST...) for example, return statement is required in this case. - // Otherwise DSL with metric aggregation always returns 0 as result. And this works also because - // the caller makeFieldImpl(SQLExpr("SUM...")) does pass first=true to here. - if (first) { - scriptCode += "; return " + castName; - } - methodParameters.add(new KVValue(scriptCode)); - paramers.add(new KVValue("script", new SQLCharExpr(scriptCode))); - } else if (object instanceof SQLAggregateExpr) { - SQLObject parent = object.getParent(); - SQLExpr source = (SQLExpr) parent.getAttribute("source"); - - if (parent instanceof SQLMethodInvokeExpr && source == null) { - throw new SqlFeatureNotImplementedException( - "Function calls of form '" - + ((SQLMethodInvokeExpr) parent).getMethodName() - + "(" - + ((SQLAggregateExpr) object).getMethodName() - + "(...))' are not implemented yet"); - } - - throw new SqlFeatureNotImplementedException( - "The complex aggregate expressions are not implemented yet: " + source); - } else { - paramers.add(new KVValue(Util.removeTableAilasFromField(object, tableAlias))); - } + if (tableAlias == null) { + field = new Field(newFieldName, alias); + } + return field; + } + + public MethodField makeMethodField( + String name, + List arguments, + SQLAggregateOption option, + String alias, + String tableAlias, + boolean first) + throws SqlParseException { + List paramers = new LinkedList<>(); + + for (SQLExpr object : arguments) { + + if (object instanceof SQLBinaryOpExpr) { + + SQLBinaryOpExpr binaryOpExpr = (SQLBinaryOpExpr) object; + + if (SQLFunctions.isFunctionTranslatedToScript(binaryOpExpr.getOperator().toString())) { + SQLMethodInvokeExpr mExpr = makeBinaryMethodField(binaryOpExpr, alias, first); + MethodField abc = + makeMethodField( + mExpr.getMethodName(), mExpr.getParameters(), null, null, tableAlias, false); + paramers.add( + new KVValue( + abc.getParams().get(0).toString(), + new SQLCharExpr(abc.getParams().get(1).toString()))); + } else { + if (!binaryOpExpr.getOperator().getName().equals("=")) { + paramers.add( + new KVValue("script", makeScriptMethodField(binaryOpExpr, null, tableAlias))); + } else { + SQLExpr right = binaryOpExpr.getRight(); + Object value = Util.expr2Object(right); + paramers.add(new KVValue(binaryOpExpr.getLeft().toString(), value)); + } } - //just check we can find the function - boolean builtInScriptFunction = SQLFunctions.isFunctionTranslatedToScript(name); - if (builtInScriptFunction) { - if (alias == null && first) { - alias = sqlFunctions.nextId(name); - } - //should check if field and first . - Tuple newFunctions = sqlFunctions.function(name.toLowerCase(), paramers, - paramers.isEmpty() ? null : paramers.get(0).key, first); - paramers.clear(); - if (!first) { - //variance - paramers.add(new KVValue(newFunctions.v1())); - } else { - paramers.add(new KVValue(alias)); - } - - paramers.add(new KVValue(newFunctions.v2())); + } else if (object instanceof SQLMethodInvokeExpr) { + SQLMethodInvokeExpr mExpr = (SQLMethodInvokeExpr) object; + String methodName = mExpr.getMethodName().toLowerCase(); + if (methodName.equals("script")) { + KVValue script = + new KVValue( + "script", + makeMethodField( + mExpr.getMethodName(), mExpr.getParameters(), null, alias, tableAlias, true)); + paramers.add(script); + } else if (methodName.equals("nested") || methodName.equals("reverse_nested")) { + NestedType nestedType = new NestedType(); + + if (!nestedType.tryFillFromExpr(object)) { + throw new SqlParseException("Failed to parse nested expression: " + object); + } + + // Fix bug: method name of reversed_nested() was set to "nested" wrongly + paramers.add(new KVValue(methodName, nestedType)); + } else if (methodName.equals("children")) { + ChildrenType childrenType = new ChildrenType(); + + if (!childrenType.tryFillFromExpr(object)) { + throw new SqlParseException("Failed to parse children expression: " + object); + } + + paramers.add(new KVValue("children", childrenType)); + } else if (SQLFunctions.isFunctionTranslatedToScript(methodName)) { + // throw new SqlParseException("only support script/nested as inner functions"); + MethodField abc = + makeMethodField(methodName, mExpr.getParameters(), null, null, tableAlias, false); + paramers.add( + new KVValue( + abc.getParams().get(0).toString(), + new SQLCharExpr(abc.getParams().get(1).toString()))); + } else { + throw new SqlParseException("only support script/nested/children as inner functions"); } + } else if (object instanceof SQLCaseExpr) { + String scriptCode = new CaseWhenParser((SQLCaseExpr) object, alias, tableAlias).parse(); + paramers.add(new KVValue("script", new SQLCharExpr(scriptCode))); + } else if (object instanceof SQLCastExpr) { + String castName = sqlFunctions.nextId("cast"); + List methodParameters = new ArrayList<>(); + methodParameters.add(new KVValue(((SQLCastExpr) object).getExpr().toString())); + String castType = ((SQLCastExpr) object).getDataType().getName(); + String scriptCode = + sqlFunctions.getCastScriptStatement(castName, castType, methodParameters); + + // Parameter "first" indicates if return statement is required. Take CAST statement nested + // in + // aggregate function SUM(CAST...) for example, return statement is required in this case. + // Otherwise DSL with metric aggregation always returns 0 as result. And this works also + // because + // the caller makeFieldImpl(SQLExpr("SUM...")) does pass first=true to here. if (first) { - List tempParamers = new LinkedList<>(); - for (KVValue temp : paramers) { - if (temp.value instanceof SQLExpr) { - tempParamers.add(new KVValue(temp.key, Util.expr2Object((SQLExpr) temp.value))); - } else { - tempParamers.add(new KVValue(temp.key, temp.value)); - } - } - paramers.clear(); - paramers.addAll(tempParamers); + scriptCode += "; return " + castName; + } + methodParameters.add(new KVValue(scriptCode)); + paramers.add(new KVValue("script", new SQLCharExpr(scriptCode))); + } else if (object instanceof SQLAggregateExpr) { + SQLObject parent = object.getParent(); + SQLExpr source = (SQLExpr) parent.getAttribute("source"); + + if (parent instanceof SQLMethodInvokeExpr && source == null) { + throw new SqlFeatureNotImplementedException( + "Function calls of form '" + + ((SQLMethodInvokeExpr) parent).getMethodName() + + "(" + + ((SQLAggregateExpr) object).getMethodName() + + "(...))' are not implemented yet"); } - if (builtInScriptFunction) { - return new ScriptMethodField(name, paramers, option, alias); + throw new SqlFeatureNotImplementedException( + "The complex aggregate expressions are not implemented yet: " + source); + } else { + paramers.add(new KVValue(Util.removeTableAilasFromField(object, tableAlias))); + } + } + + // just check we can find the function + boolean builtInScriptFunction = SQLFunctions.isFunctionTranslatedToScript(name); + if (builtInScriptFunction) { + if (alias == null && first) { + alias = sqlFunctions.nextId(name); + } + // should check if field and first . + Tuple newFunctions = + sqlFunctions.function( + name.toLowerCase(), paramers, paramers.isEmpty() ? null : paramers.get(0).key, first); + paramers.clear(); + if (!first) { + // variance + paramers.add(new KVValue(newFunctions.v1())); + } else { + paramers.add(new KVValue(alias)); + } + + paramers.add(new KVValue(newFunctions.v2())); + } + if (first) { + List tempParamers = new LinkedList<>(); + for (KVValue temp : paramers) { + if (temp.value instanceof SQLExpr) { + tempParamers.add(new KVValue(temp.key, Util.expr2Object((SQLExpr) temp.value))); } else { - return new MethodField(name, paramers, option, alias); + tempParamers.add(new KVValue(temp.key, temp.value)); } + } + paramers.clear(); + paramers.addAll(tempParamers); + } + + if (builtInScriptFunction) { + return new ScriptMethodField(name, paramers, option, alias); + } else { + return new MethodField(name, paramers, option, alias); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/parser/HavingParser.java b/legacy/src/main/java/org/opensearch/sql/legacy/parser/HavingParser.java index 307d87f6e8..e0d933a405 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/parser/HavingParser.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/parser/HavingParser.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.parser; import com.alibaba.druid.sql.ast.SQLExpr; @@ -21,93 +20,89 @@ import org.opensearch.sql.legacy.domain.Where; import org.opensearch.sql.legacy.exception.SqlParseException; -/** - * Parse expression in the Having clause. - */ +/** Parse expression in the Having clause. */ public class HavingParser { - private final WhereParser whereParser; - private final List havingFields; - private final HavingConditionRewriter havingConditionRewriter; + private final WhereParser whereParser; + private final List havingFields; + private final HavingConditionRewriter havingConditionRewriter; - public HavingParser(WhereParser whereParser) { - this.whereParser = whereParser; - this.havingFields = new ArrayList<>(); - this.havingConditionRewriter = new HavingConditionRewriter(); - } + public HavingParser(WhereParser whereParser) { + this.whereParser = whereParser; + this.havingFields = new ArrayList<>(); + this.havingConditionRewriter = new HavingConditionRewriter(); + } - public void parseWhere(SQLExpr expr, Where where) throws SqlParseException { - expr.accept(havingConditionRewriter); - whereParser.parseWhere(expr, where); - } + public void parseWhere(SQLExpr expr, Where where) throws SqlParseException { + expr.accept(havingConditionRewriter); + whereParser.parseWhere(expr, where); + } - public List getHavingFields() { - return havingFields; - } - - private class HavingConditionRewriter extends MySqlASTVisitorAdapter { - private int aliasSuffix = 0; + public List getHavingFields() { + return havingFields; + } - @Override - public boolean visit(SQLAggregateExpr expr) { - SQLIdentifierExpr translatedExpr = translateAggExpr(expr); - SQLObject parent = expr.getParent(); - // Rewrite {@link SQLAggregateExpr} in {@link SQLBinaryOpExpr}, e.g. HAVING AVG(age) > 30) - if (parent instanceof SQLBinaryOpExpr) { - SQLBinaryOpExpr parentOpExpr = (SQLBinaryOpExpr) parent; - if (parentOpExpr.getLeft() == expr) { - parentOpExpr.setLeft(translatedExpr); - } else { - parentOpExpr.setRight(translatedExpr); - } - // Rewrite {@link SQLAggregateExpr} in {@link SQLNotExpr}, e.g. HAVING NOT (AVG(a) > 30) - } else if (parent instanceof SQLNotExpr) { - SQLNotExpr parentNotExpr = (SQLNotExpr) parent; - parentNotExpr.setExpr(translatedExpr); - // Rewrite {@link SQLAggregateExpr} in {@link SQLInListExpr}, e.g. HAVING AVG(a) IN (30, 40, 50) - } else if (parent instanceof SQLInListExpr) { - SQLInListExpr parentInListExpr = (SQLInListExpr) parent; - parentInListExpr.setExpr(translatedExpr); - // Rewrite {@link SQLAggregateExpr} in {@link SQLBetweenExpr}, e.g. HAVING AVG(a) BETWEEN 30, 40 - } else if (parent instanceof SQLBetweenExpr) { - SQLBetweenExpr parentBetweenExpr = (SQLBetweenExpr) parent; - parentBetweenExpr.setTestExpr(translatedExpr); - } else { - throw new IllegalStateException("Unsupported aggregation function in having clause " - + parent.getClass()); - } + private class HavingConditionRewriter extends MySqlASTVisitorAdapter { + private int aliasSuffix = 0; - return true; + @Override + public boolean visit(SQLAggregateExpr expr) { + SQLIdentifierExpr translatedExpr = translateAggExpr(expr); + SQLObject parent = expr.getParent(); + // Rewrite {@link SQLAggregateExpr} in {@link SQLBinaryOpExpr}, e.g. HAVING AVG(age) > 30) + if (parent instanceof SQLBinaryOpExpr) { + SQLBinaryOpExpr parentOpExpr = (SQLBinaryOpExpr) parent; + if (parentOpExpr.getLeft() == expr) { + parentOpExpr.setLeft(translatedExpr); + } else { + parentOpExpr.setRight(translatedExpr); } + // Rewrite {@link SQLAggregateExpr} in {@link SQLNotExpr}, e.g. HAVING NOT (AVG(a) > 30) + } else if (parent instanceof SQLNotExpr) { + SQLNotExpr parentNotExpr = (SQLNotExpr) parent; + parentNotExpr.setExpr(translatedExpr); + // Rewrite {@link SQLAggregateExpr} in {@link SQLInListExpr}, e.g. HAVING AVG(a) IN (30, 40, + // 50) + } else if (parent instanceof SQLInListExpr) { + SQLInListExpr parentInListExpr = (SQLInListExpr) parent; + parentInListExpr.setExpr(translatedExpr); + // Rewrite {@link SQLAggregateExpr} in {@link SQLBetweenExpr}, e.g. HAVING AVG(a) BETWEEN + // 30, 40 + } else if (parent instanceof SQLBetweenExpr) { + SQLBetweenExpr parentBetweenExpr = (SQLBetweenExpr) parent; + parentBetweenExpr.setTestExpr(translatedExpr); + } else { + throw new IllegalStateException( + "Unsupported aggregation function in having clause " + parent.getClass()); + } - /** - * If the expr is {@link SQLAggregateExpr} - * 1) rewrite as {@link SQLIdentifierExpr} - * 2) add the {@link SQLIdentifierExpr} to the havingFields - *

- * For example, the COUNT(age) is the {@link SQLAggregateExpr} in expression COUNT(age) > 1 - * 1) parsing COUNT(age) as {@link SQLIdentifierExpr} count_1 - * 2) return {@link SQLIdentifierExpr} count_1 to the havingFields - */ - private SQLIdentifierExpr translateAggExpr(SQLAggregateExpr expr) { - String methodAlias = methodAlias(expr.getMethodName()); - SQLIdentifierExpr sqlExpr = new SQLIdentifierExpr(methodAlias); - try { - havingFields.add(new FieldMaker().makeField( - expr, - methodAlias, - null)); - return sqlExpr; - } catch (SqlParseException e) { - throw new IllegalStateException(e); - } - } + return true; + } - private String methodAlias(String methodName) { - return String.format("%s_%d", methodName.toLowerCase(), nextAlias()); - } + /** + * If the expr is {@link SQLAggregateExpr} 1) rewrite as {@link SQLIdentifierExpr} 2) add the + * {@link SQLIdentifierExpr} to the havingFields + * + *

For example, the COUNT(age) is the {@link SQLAggregateExpr} in expression COUNT(age) > 1 + * 1) parsing COUNT(age) as {@link SQLIdentifierExpr} count_1 2) return {@link + * SQLIdentifierExpr} count_1 to the havingFields + */ + private SQLIdentifierExpr translateAggExpr(SQLAggregateExpr expr) { + String methodAlias = methodAlias(expr.getMethodName()); + SQLIdentifierExpr sqlExpr = new SQLIdentifierExpr(methodAlias); + try { + havingFields.add(new FieldMaker().makeField(expr, methodAlias, null)); + return sqlExpr; + } catch (SqlParseException e) { + throw new IllegalStateException(e); + } + } - private Integer nextAlias() { - return aliasSuffix++; - } + private String methodAlias(String methodName) { + return String.format("%s_%d", methodName.toLowerCase(), nextAlias()); + } + + private Integer nextAlias() { + return aliasSuffix++; } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/join/HashJoinElasticRequestBuilder.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/join/HashJoinElasticRequestBuilder.java index 3ab8c11ee0..c8b44e1bbb 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/join/HashJoinElasticRequestBuilder.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/join/HashJoinElasticRequestBuilder.java @@ -3,42 +3,39 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.join; import java.util.List; import java.util.Map; import org.opensearch.sql.legacy.domain.Field; -/** - * Created by Eliran on 22/8/2015. - */ +/** Created by Eliran on 22/8/2015. */ public class HashJoinElasticRequestBuilder extends JoinRequestBuilder { - private List>> t1ToT2FieldsComparison; - private boolean useTermFiltersOptimization; + private List>> t1ToT2FieldsComparison; + private boolean useTermFiltersOptimization; - public HashJoinElasticRequestBuilder() { - } + public HashJoinElasticRequestBuilder() {} - @Override - public String explain() { - return "HashJoin " + super.explain(); - } + @Override + public String explain() { + return "HashJoin " + super.explain(); + } - public List>> getT1ToT2FieldsComparison() { - return t1ToT2FieldsComparison; - } + public List>> getT1ToT2FieldsComparison() { + return t1ToT2FieldsComparison; + } - public void setT1ToT2FieldsComparison(List>> t1ToT2FieldsComparison) { - this.t1ToT2FieldsComparison = t1ToT2FieldsComparison; - } + public void setT1ToT2FieldsComparison( + List>> t1ToT2FieldsComparison) { + this.t1ToT2FieldsComparison = t1ToT2FieldsComparison; + } - public boolean isUseTermFiltersOptimization() { - return useTermFiltersOptimization; - } + public boolean isUseTermFiltersOptimization() { + return useTermFiltersOptimization; + } - public void setUseTermFiltersOptimization(boolean useTermFiltersOptimization) { - this.useTermFiltersOptimization = useTermFiltersOptimization; - } + public void setUseTermFiltersOptimization(boolean useTermFiltersOptimization) { + this.useTermFiltersOptimization = useTermFiltersOptimization; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/join/JoinRequestBuilder.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/join/JoinRequestBuilder.java index 316d17a275..82ebd1b225 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/join/JoinRequestBuilder.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/join/JoinRequestBuilder.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.join; import com.alibaba.druid.sql.ast.statement.SQLJoinTableSource; @@ -18,95 +17,99 @@ import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.sql.legacy.query.SqlElasticRequestBuilder; -/** - * Created by Eliran on 15/9/2015. - */ +/** Created by Eliran on 15/9/2015. */ public class JoinRequestBuilder implements SqlElasticRequestBuilder { - private MultiSearchRequest multi; - private TableInJoinRequestBuilder firstTable; - private TableInJoinRequestBuilder secondTable; - private SQLJoinTableSource.JoinType joinType; - private int totalLimit; - - public JoinRequestBuilder() { - firstTable = new TableInJoinRequestBuilder(); - secondTable = new TableInJoinRequestBuilder(); - } - - - @Override - public ActionRequest request() { - if (multi == null) { - buildMulti(); - } - return multi; - - } - - private void buildMulti() { - multi = new MultiSearchRequest(); - multi.add(firstTable.getRequestBuilder()); - multi.add(secondTable.getRequestBuilder()); - } - - @Override - public String explain() { - try { - XContentBuilder firstBuilder = XContentFactory.jsonBuilder().prettyPrint(); - firstTable.getRequestBuilder().request().source().toXContent(firstBuilder, ToXContent.EMPTY_PARAMS); - - XContentBuilder secondBuilder = XContentFactory.jsonBuilder().prettyPrint(); - secondTable.getRequestBuilder().request().source().toXContent(secondBuilder, ToXContent.EMPTY_PARAMS); - return String.format(" first query:\n%s\n second query:\n%s", - BytesReference.bytes(firstBuilder).utf8ToString(), - BytesReference.bytes(secondBuilder).utf8ToString()); - } catch (IOException e) { - e.printStackTrace(); - } - return null; - } - - @Override - public ActionResponse get() { - return null; - } - - @Override - public ActionRequestBuilder getBuilder() { - return this.firstTable.getRequestBuilder(); + private MultiSearchRequest multi; + private TableInJoinRequestBuilder firstTable; + private TableInJoinRequestBuilder secondTable; + private SQLJoinTableSource.JoinType joinType; + private int totalLimit; + + public JoinRequestBuilder() { + firstTable = new TableInJoinRequestBuilder(); + secondTable = new TableInJoinRequestBuilder(); + } + + @Override + public ActionRequest request() { + if (multi == null) { + buildMulti(); } - - public MultiSearchRequest getMulti() { - return multi; + return multi; + } + + private void buildMulti() { + multi = new MultiSearchRequest(); + multi.add(firstTable.getRequestBuilder()); + multi.add(secondTable.getRequestBuilder()); + } + + @Override + public String explain() { + try { + XContentBuilder firstBuilder = XContentFactory.jsonBuilder().prettyPrint(); + firstTable + .getRequestBuilder() + .request() + .source() + .toXContent(firstBuilder, ToXContent.EMPTY_PARAMS); + + XContentBuilder secondBuilder = XContentFactory.jsonBuilder().prettyPrint(); + secondTable + .getRequestBuilder() + .request() + .source() + .toXContent(secondBuilder, ToXContent.EMPTY_PARAMS); + return String.format( + " first query:\n%s\n second query:\n%s", + BytesReference.bytes(firstBuilder).utf8ToString(), + BytesReference.bytes(secondBuilder).utf8ToString()); + } catch (IOException e) { + e.printStackTrace(); } - - public void setMulti(MultiSearchRequest multi) { - this.multi = multi; - } - - public SQLJoinTableSource.JoinType getJoinType() { - return joinType; - } - - public void setJoinType(SQLJoinTableSource.JoinType joinType) { - this.joinType = joinType; - } - - public TableInJoinRequestBuilder getFirstTable() { - return firstTable; - } - - public TableInJoinRequestBuilder getSecondTable() { - return secondTable; - } - - public int getTotalLimit() { - return totalLimit; - } - - public void setTotalLimit(int totalLimit) { - this.totalLimit = totalLimit; - } - + return null; + } + + @Override + public ActionResponse get() { + return null; + } + + @Override + public ActionRequestBuilder getBuilder() { + return this.firstTable.getRequestBuilder(); + } + + public MultiSearchRequest getMulti() { + return multi; + } + + public void setMulti(MultiSearchRequest multi) { + this.multi = multi; + } + + public SQLJoinTableSource.JoinType getJoinType() { + return joinType; + } + + public void setJoinType(SQLJoinTableSource.JoinType joinType) { + this.joinType = joinType; + } + + public TableInJoinRequestBuilder getFirstTable() { + return firstTable; + } + + public TableInJoinRequestBuilder getSecondTable() { + return secondTable; + } + + public int getTotalLimit() { + return totalLimit; + } + + public void setTotalLimit(int totalLimit) { + this.totalLimit = totalLimit; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/maker/Maker.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/maker/Maker.java index 08018d94de..302af70ea8 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/maker/Maker.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/maker/Maker.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.maker; import static org.opensearch.sql.legacy.parser.WhereParser.getConditionForMethod; @@ -66,460 +65,472 @@ public abstract class Maker { - /** - * UTC. - */ - private static final ZoneId UTC = ZoneId.of("UTC"); - - public static final Object NONE = new Object(); - - public static final Set queryFunctions = Sets.newHashSet( - "query", - "matchquery", "match_query", // match - "multimatchquery", "multi_match", "multimatch", // multi-match - "score", "scorequery", "score_query", // score - "wildcardquery", "wildcard_query", // wildcard - "matchphrasequery", "match_phrase", "matchphrase" // match-phrase - ); - - private static final Set NOT_OPERATOR_SET = ImmutableSet.of( - Condition.OPERATOR.N, Condition.OPERATOR.NIN, Condition.OPERATOR.ISN, Condition.OPERATOR.NBETWEEN, - Condition.OPERATOR.NLIKE, Condition.OPERATOR.NIN_TERMS, Condition.OPERATOR.NTERM, - Condition.OPERATOR.NOT_EXISTS_NESTED_COMPLEX, Condition.OPERATOR.NREGEXP - ); - - protected Maker(Boolean isQuery) { - + /** UTC. */ + private static final ZoneId UTC = ZoneId.of("UTC"); + + public static final Object NONE = new Object(); + + public static final Set queryFunctions = + Sets.newHashSet( + "query", + "matchquery", + "match_query", // match + "multimatchquery", + "multi_match", + "multimatch", // multi-match + "score", + "scorequery", + "score_query", // score + "wildcardquery", + "wildcard_query", // wildcard + "matchphrasequery", + "match_phrase", + "matchphrase" // match-phrase + ); + + private static final Set NOT_OPERATOR_SET = + ImmutableSet.of( + Condition.OPERATOR.N, + Condition.OPERATOR.NIN, + Condition.OPERATOR.ISN, + Condition.OPERATOR.NBETWEEN, + Condition.OPERATOR.NLIKE, + Condition.OPERATOR.NIN_TERMS, + Condition.OPERATOR.NTERM, + Condition.OPERATOR.NOT_EXISTS_NESTED_COMPLEX, + Condition.OPERATOR.NREGEXP); + + protected Maker(Boolean isQuery) {} + + /** + * @param cond + * @return + * @throws SqlParseException + */ + protected ToXContent make(Condition cond) throws SqlParseException { + + String name = cond.getName(); + Object value = cond.getValue(); + + ToXContent toXContent = null; + + if (value instanceof SQLMethodInvokeExpr) { + toXContent = make(cond, name, (SQLMethodInvokeExpr) value); + } else if (value instanceof SubQueryExpression) { + toXContent = make(cond, name, ((SubQueryExpression) value).getValues()); + } else { + if (cond.getValue() == NONE) { + toXContent = new MatchNoneQueryBuilder(); + } else { + toXContent = make(cond, name, value); + } } - /** - * - * - * @param cond - * @return - * @throws SqlParseException - */ - protected ToXContent make(Condition cond) throws SqlParseException { - - String name = cond.getName(); - Object value = cond.getValue(); - - ToXContent toXContent = null; - - if (value instanceof SQLMethodInvokeExpr) { - toXContent = make(cond, name, (SQLMethodInvokeExpr) value); - } else if (value instanceof SubQueryExpression) { - toXContent = make(cond, name, ((SubQueryExpression) value).getValues()); - } else { - if (cond.getValue() == NONE) { - toXContent = new MatchNoneQueryBuilder(); - } else { - toXContent = make(cond, name, value); - } + return toXContent; + } + + private ToXContent make(Condition cond, String name, SQLMethodInvokeExpr value) + throws SqlParseException { + ToXContent bqb = null; + Paramer paramer = null; + switch (value.getMethodName().toLowerCase()) { + case "query": + paramer = Paramer.parseParamer(value); + QueryStringQueryBuilder queryString = QueryBuilders.queryStringQuery(paramer.value); + bqb = Paramer.fullParamer(queryString, paramer); + bqb = applyNot(cond.getOPERATOR(), bqb); + break; + case "matchquery": + case "match_query": + paramer = Paramer.parseParamer(value); + MatchQueryBuilder matchQuery = QueryBuilders.matchQuery(name, paramer.value); + bqb = Paramer.fullParamer(matchQuery, paramer); + bqb = applyNot(cond.getOPERATOR(), bqb); + break; + case "score": + case "scorequery": + case "score_query": + Float boost = Float.parseFloat(value.getParameters().get(1).toString()); + Condition subCond = getConditionForMethod(value.getParameters().get(0), cond.getConn()); + QueryBuilder subQuery = (QueryBuilder) make(subCond); + if (subCond.isNested()) { + subQuery = QueryBuilders.nestedQuery(subCond.getNestedPath(), subQuery, ScoreMode.None); } - - return toXContent; + bqb = QueryBuilders.constantScoreQuery(subQuery).boost(boost); + break; + case "wildcardquery": + case "wildcard_query": + paramer = Paramer.parseParamer(value); + WildcardQueryBuilder wildcardQuery = QueryBuilders.wildcardQuery(name, paramer.value); + bqb = Paramer.fullParamer(wildcardQuery, paramer); + break; + + case "matchphrasequery": + case "match_phrase": + case "matchphrase": + paramer = Paramer.parseParamer(value); + MatchPhraseQueryBuilder matchPhraseQuery = + QueryBuilders.matchPhraseQuery(name, paramer.value); + bqb = Paramer.fullParamer(matchPhraseQuery, paramer); + break; + + case "multimatchquery": + case "multi_match": + case "multimatch": + paramer = Paramer.parseParamer(value); + MultiMatchQueryBuilder multiMatchQuery = + QueryBuilders.multiMatchQuery(paramer.value).fields(paramer.fieldsBoosts); + bqb = Paramer.fullParamer(multiMatchQuery, paramer); + break; + default: + throw new SqlParseException( + "The following query method is not supported: " + value.getMethodName()); } - private ToXContent make(Condition cond, String name, SQLMethodInvokeExpr value) throws SqlParseException { - ToXContent bqb = null; - Paramer paramer = null; - switch (value.getMethodName().toLowerCase()) { - case "query": - paramer = Paramer.parseParamer(value); - QueryStringQueryBuilder queryString = QueryBuilders.queryStringQuery(paramer.value); - bqb = Paramer.fullParamer(queryString, paramer); - bqb = applyNot(cond.getOPERATOR(), bqb); - break; - case "matchquery": - case "match_query": - paramer = Paramer.parseParamer(value); - MatchQueryBuilder matchQuery = QueryBuilders.matchQuery(name, paramer.value); - bqb = Paramer.fullParamer(matchQuery, paramer); - bqb = applyNot(cond.getOPERATOR(), bqb); - break; - case "score": - case "scorequery": - case "score_query": - Float boost = Float.parseFloat(value.getParameters().get(1).toString()); - Condition subCond = getConditionForMethod(value.getParameters().get(0), cond.getConn()); - QueryBuilder subQuery = (QueryBuilder) make(subCond); - if (subCond.isNested()) { - subQuery = QueryBuilders.nestedQuery(subCond.getNestedPath(), subQuery, ScoreMode.None); - } - bqb = QueryBuilders.constantScoreQuery(subQuery).boost(boost); - break; - case "wildcardquery": - case "wildcard_query": - paramer = Paramer.parseParamer(value); - WildcardQueryBuilder wildcardQuery = QueryBuilders.wildcardQuery(name, paramer.value); - bqb = Paramer.fullParamer(wildcardQuery, paramer); - break; - - case "matchphrasequery": - case "match_phrase": - case "matchphrase": - paramer = Paramer.parseParamer(value); - MatchPhraseQueryBuilder matchPhraseQuery = QueryBuilders.matchPhraseQuery(name, paramer.value); - bqb = Paramer.fullParamer(matchPhraseQuery, paramer); - break; - - case "multimatchquery": - case "multi_match": - case "multimatch": - paramer = Paramer.parseParamer(value); - MultiMatchQueryBuilder multiMatchQuery = QueryBuilders.multiMatchQuery(paramer.value) - .fields(paramer.fieldsBoosts); - bqb = Paramer.fullParamer(multiMatchQuery, paramer); - break; - default: - throw new SqlParseException("The following query method is not supported: " + value.getMethodName()); + return bqb; + } + + private ToXContent make(Condition cond, String name, Object value) throws SqlParseException { + ToXContent toXContent = null; + switch (cond.getOPERATOR()) { + case ISN: + case IS: + case N: + case EQ: + if (value == null || value instanceof SQLIdentifierExpr) { + // todo: change to exists + if (value == null || ((SQLIdentifierExpr) value).getName().equalsIgnoreCase("missing")) { + toXContent = QueryBuilders.boolQuery().mustNot(QueryBuilders.existsQuery(name)); + } else { + throw new SqlParseException( + String.format( + "Cannot recoginze Sql identifer %s", ((SQLIdentifierExpr) value).getName())); + } + break; + } else { + toXContent = QueryBuilders.termQuery(name, value); + break; } - - return bqb; - } - - private ToXContent make(Condition cond, String name, Object value) throws SqlParseException { - ToXContent toXContent = null; - switch (cond.getOPERATOR()) { - case ISN: - case IS: - case N: - case EQ: - if (value == null || value instanceof SQLIdentifierExpr) { - //todo: change to exists - if (value == null || ((SQLIdentifierExpr) value).getName().equalsIgnoreCase("missing")) { - toXContent = QueryBuilders.boolQuery().mustNot(QueryBuilders.existsQuery(name)); - } else { - throw new SqlParseException(String.format("Cannot recoginze Sql identifer %s", - ((SQLIdentifierExpr) value).getName())); - } - break; - } else { - toXContent = QueryBuilders.termQuery(name, value); - break; - } - case LIKE: - case NLIKE: - String queryStr = ((String) value); - queryStr = queryStr.replace('%', '*').replace('_', '?'); - queryStr = queryStr.replace("&PERCENT", "%").replace("&UNDERSCORE", "_"); - toXContent = QueryBuilders.wildcardQuery(name, queryStr); - break; - case REGEXP: - case NREGEXP: - Object[] values = (Object[]) value; - RegexpQueryBuilder regexpQuery = QueryBuilders.regexpQuery(name, values[0].toString()); - if (1 < values.length) { - String[] flags = values[1].toString().split("\\|"); - RegexpFlag[] regexpFlags = new RegexpFlag[flags.length]; - for (int i = 0; i < flags.length; ++i) { - regexpFlags[i] = RegexpFlag.valueOf(flags[i]); - } - regexpQuery.flags(regexpFlags); - } - if (2 < values.length) { - regexpQuery.maxDeterminizedStates(Integer.parseInt(values[2].toString())); - } - toXContent = regexpQuery; - break; - case GT: - toXContent = QueryBuilders.rangeQuery(name).gt(value); - break; - case GTE: - toXContent = QueryBuilders.rangeQuery(name).gte(value); - break; - case LT: - toXContent = QueryBuilders.rangeQuery(name).lt(value); - break; - case LTE: - toXContent = QueryBuilders.rangeQuery(name).lte(value); - break; - case NIN: - case IN: - //todo: value is subquery? here or before - values = (Object[]) value; - TermQueryBuilder[] termQueries = new TermQueryBuilder[values.length]; - for (int i = 0; i < values.length; i++) { - termQueries[i] = QueryBuilders.termQuery(name, values[i]); - } - - BoolQueryBuilder boolQuery = QueryBuilders.boolQuery(); - for (TermQueryBuilder termQuery : termQueries) { - boolQuery.should(termQuery); - } - toXContent = boolQuery; - break; - case BETWEEN: - case NBETWEEN: - toXContent = QueryBuilders.rangeQuery(name).gte(((Object[]) value)[0]).lte(((Object[]) value)[1]); - break; - case GEO_INTERSECTS: - String wkt = cond.getValue().toString(); - try { - ShapeBuilder shapeBuilder = getShapeBuilderFromString(wkt); - toXContent = QueryBuilders.geoShapeQuery(cond.getName(), shapeBuilder); - } catch (IOException e) { - e.printStackTrace(); - throw new SqlParseException(StringUtils.format("Failed to create shapeBuilder from [%s]", wkt)); - } - break; - case GEO_BOUNDING_BOX: - BoundingBoxFilterParams boxFilterParams = (BoundingBoxFilterParams) cond.getValue(); - Point topLeft = boxFilterParams.getTopLeft(); - Point bottomRight = boxFilterParams.getBottomRight(); - toXContent = QueryBuilders.geoBoundingBoxQuery(cond.getName()).setCorners(topLeft.getLat(), - topLeft.getLon(), bottomRight.getLat(), bottomRight.getLon()); - break; - case GEO_DISTANCE: - DistanceFilterParams distanceFilterParams = (DistanceFilterParams) cond.getValue(); - Point fromPoint = distanceFilterParams.getFrom(); - String distance = trimApostrophes(distanceFilterParams.getDistance()); - toXContent = QueryBuilders.geoDistanceQuery(cond.getName()).distance(distance) - .point(fromPoint.getLat(), fromPoint.getLon()); - break; - case GEO_POLYGON: - PolygonFilterParams polygonFilterParams = (PolygonFilterParams) cond.getValue(); - ArrayList geoPoints = new ArrayList(); - for (Point p : polygonFilterParams.getPolygon()) { - geoPoints.add(new GeoPoint(p.getLat(), p.getLon())); - } - GeoPolygonQueryBuilder polygonFilterBuilder = QueryBuilders.geoPolygonQuery(cond.getName(), geoPoints); - toXContent = polygonFilterBuilder; - break; - case NIN_TERMS: - case IN_TERMS: - Object[] termValues = (Object[]) value; - if (termValues.length == 1 && termValues[0] instanceof SubQueryExpression) { - termValues = ((SubQueryExpression) termValues[0]).getValues(); - } - Object[] termValuesObjects = new Object[termValues.length]; - for (int i = 0; i < termValues.length; i++) { - termValuesObjects[i] = parseTermValue(termValues[i]); - } - toXContent = QueryBuilders.termsQuery(name, termValuesObjects); - break; - case NTERM: - case TERM: - Object term = ((Object[]) value)[0]; - toXContent = QueryBuilders.termQuery(name, parseTermValue(term)); - break; - case IDS_QUERY: - Object[] idsParameters = (Object[]) value; - String[] ids; - if (idsParameters.length == 2 && idsParameters[1] instanceof SubQueryExpression) { - Object[] idsFromSubQuery = ((SubQueryExpression) idsParameters[1]).getValues(); - ids = arrayOfObjectsToStringArray(idsFromSubQuery, 0, idsFromSubQuery.length - 1); - } else { - ids = arrayOfObjectsToStringArray(idsParameters, 1, idsParameters.length - 1); - } - toXContent = QueryBuilders.idsQuery().addIds(ids); - break; - case NESTED_COMPLEX: - case NOT_EXISTS_NESTED_COMPLEX: - if (value == null || !(value instanceof Where)) { - throw new SqlParseException("unsupported nested condition"); - } - - Where whereNested = (Where) value; - BoolQueryBuilder nestedFilter = QueryMaker.explain(whereNested); - - toXContent = QueryBuilders.nestedQuery(name, nestedFilter, ScoreMode.None); - break; - case CHILDREN_COMPLEX: - if (value == null || !(value instanceof Where)) { - throw new SqlParseException("unsupported nested condition"); - } - - Where whereChildren = (Where) value; - BoolQueryBuilder childrenFilter = QueryMaker.explain(whereChildren); - //todo: pass score mode - toXContent = JoinQueryBuilders.hasChildQuery(name, childrenFilter, ScoreMode.None); - - break; - case SCRIPT: - ScriptFilter scriptFilter = (ScriptFilter) value; - Map params = new HashMap<>(); - if (scriptFilter.containsParameters()) { - params = scriptFilter.getArgs(); - } - - SQLExpr nameExpr = cond.getNameExpr(); - SQLExpr valueExpr = cond.getValueExpr(); - if (nameExpr instanceof SQLMethodInvokeExpr - && ((SQLMethodInvokeExpr) nameExpr).getMethodName().equalsIgnoreCase("date_format")) { - toXContent = makeForDateFormat((SQLMethodInvokeExpr) nameExpr, (SQLCharExpr) valueExpr); - } else { - toXContent = QueryBuilders.scriptQuery( - new Script( - scriptFilter.getScriptType(), - Script.DEFAULT_SCRIPT_LANG, - scriptFilter.getScript(), - params)); - } - break; - default: - throw new SqlParseException("Undefined condition: " + cond.getName()); + case LIKE: + case NLIKE: + String queryStr = ((String) value); + queryStr = queryStr.replace('%', '*').replace('_', '?'); + queryStr = queryStr.replace("&PERCENT", "%").replace("&UNDERSCORE", "_"); + toXContent = QueryBuilders.wildcardQuery(name, queryStr); + break; + case REGEXP: + case NREGEXP: + Object[] values = (Object[]) value; + RegexpQueryBuilder regexpQuery = QueryBuilders.regexpQuery(name, values[0].toString()); + if (1 < values.length) { + String[] flags = values[1].toString().split("\\|"); + RegexpFlag[] regexpFlags = new RegexpFlag[flags.length]; + for (int i = 0; i < flags.length; ++i) { + regexpFlags[i] = RegexpFlag.valueOf(flags[i]); + } + regexpQuery.flags(regexpFlags); + } + if (2 < values.length) { + regexpQuery.maxDeterminizedStates(Integer.parseInt(values[2].toString())); + } + toXContent = regexpQuery; + break; + case GT: + toXContent = QueryBuilders.rangeQuery(name).gt(value); + break; + case GTE: + toXContent = QueryBuilders.rangeQuery(name).gte(value); + break; + case LT: + toXContent = QueryBuilders.rangeQuery(name).lt(value); + break; + case LTE: + toXContent = QueryBuilders.rangeQuery(name).lte(value); + break; + case NIN: + case IN: + // todo: value is subquery? here or before + values = (Object[]) value; + TermQueryBuilder[] termQueries = new TermQueryBuilder[values.length]; + for (int i = 0; i < values.length; i++) { + termQueries[i] = QueryBuilders.termQuery(name, values[i]); } - toXContent = applyNot(cond.getOPERATOR(), toXContent); - return toXContent; - } - - public static boolean isQueryFunction(String methodName) { - return queryFunctions.contains(methodName.toLowerCase()); - } - - /** - * Helper method used to form a range query object for the date_format function. - *

- * Example: WHERE date_format(dateField, "YYYY-MM-dd") > "2012-01-01" - * Expected range query: - * "range": { - * "dateField": { - * "from": "2012-01-01", - * "to": null, - * "include_lower": false, - * "include_upper": true, - * "time_zone": "America/Los_Angeles", - * "format": "YYYY-MM-dd", - * "boost": 1 - * } - * } - * - * @param nameExpr SQL method expression (ex. date_format(dateField, "YYYY-MM-dd")) - * @param valueExpr Value expression being compared to the SQL method result (ex. "2012-01-01") - * @throws SqlParseException - */ - private ToXContent makeForDateFormat(SQLMethodInvokeExpr nameExpr, SQLCharExpr valueExpr) throws SqlParseException { - ToXContent toXContent = null; - List params = nameExpr.getParameters(); - - String field = params.get(0).toString(); - String format = removeSingleQuote(params.get(1).toString()); - String dateToCompare = valueExpr.getText(); - String oper = ((SQLBinaryOpExpr) nameExpr.getParent()).getOperator().name; - - String zoneId; - if (params.size() > 2) { - zoneId = ZoneId.of(removeSingleQuote(params.get(2).toString())).toString(); + BoolQueryBuilder boolQuery = QueryBuilders.boolQuery(); + for (TermQueryBuilder termQuery : termQueries) { + boolQuery.should(termQuery); + } + toXContent = boolQuery; + break; + case BETWEEN: + case NBETWEEN: + toXContent = + QueryBuilders.rangeQuery(name).gte(((Object[]) value)[0]).lte(((Object[]) value)[1]); + break; + case GEO_INTERSECTS: + String wkt = cond.getValue().toString(); + try { + ShapeBuilder shapeBuilder = getShapeBuilderFromString(wkt); + toXContent = QueryBuilders.geoShapeQuery(cond.getName(), shapeBuilder); + } catch (IOException e) { + e.printStackTrace(); + throw new SqlParseException( + StringUtils.format("Failed to create shapeBuilder from [%s]", wkt)); + } + break; + case GEO_BOUNDING_BOX: + BoundingBoxFilterParams boxFilterParams = (BoundingBoxFilterParams) cond.getValue(); + Point topLeft = boxFilterParams.getTopLeft(); + Point bottomRight = boxFilterParams.getBottomRight(); + toXContent = + QueryBuilders.geoBoundingBoxQuery(cond.getName()) + .setCorners( + topLeft.getLat(), topLeft.getLon(), bottomRight.getLat(), bottomRight.getLon()); + break; + case GEO_DISTANCE: + DistanceFilterParams distanceFilterParams = (DistanceFilterParams) cond.getValue(); + Point fromPoint = distanceFilterParams.getFrom(); + String distance = trimApostrophes(distanceFilterParams.getDistance()); + toXContent = + QueryBuilders.geoDistanceQuery(cond.getName()) + .distance(distance) + .point(fromPoint.getLat(), fromPoint.getLon()); + break; + case GEO_POLYGON: + PolygonFilterParams polygonFilterParams = (PolygonFilterParams) cond.getValue(); + ArrayList geoPoints = new ArrayList(); + for (Point p : polygonFilterParams.getPolygon()) { + geoPoints.add(new GeoPoint(p.getLat(), p.getLon())); + } + GeoPolygonQueryBuilder polygonFilterBuilder = + QueryBuilders.geoPolygonQuery(cond.getName(), geoPoints); + toXContent = polygonFilterBuilder; + break; + case NIN_TERMS: + case IN_TERMS: + Object[] termValues = (Object[]) value; + if (termValues.length == 1 && termValues[0] instanceof SubQueryExpression) { + termValues = ((SubQueryExpression) termValues[0]).getValues(); + } + Object[] termValuesObjects = new Object[termValues.length]; + for (int i = 0; i < termValues.length; i++) { + termValuesObjects[i] = parseTermValue(termValues[i]); + } + toXContent = QueryBuilders.termsQuery(name, termValuesObjects); + break; + case NTERM: + case TERM: + Object term = ((Object[]) value)[0]; + toXContent = QueryBuilders.termQuery(name, parseTermValue(term)); + break; + case IDS_QUERY: + Object[] idsParameters = (Object[]) value; + String[] ids; + if (idsParameters.length == 2 && idsParameters[1] instanceof SubQueryExpression) { + Object[] idsFromSubQuery = ((SubQueryExpression) idsParameters[1]).getValues(); + ids = arrayOfObjectsToStringArray(idsFromSubQuery, 0, idsFromSubQuery.length - 1); } else { - // Using UTC, if there is no Zone provided. - zoneId = UTC.getId(); + ids = arrayOfObjectsToStringArray(idsParameters, 1, idsParameters.length - 1); } - - RangeQueryBuilder rangeQuery = QueryBuilders.rangeQuery(field).format(format).timeZone(zoneId); - switch (oper) { - case "<>": - case "=": - toXContent = rangeQuery.gte(dateToCompare).lte(dateToCompare); - break; - case ">": - toXContent = rangeQuery.gt(dateToCompare); - break; - case "<": - toXContent = rangeQuery.lt(dateToCompare); - break; - case ">=": - toXContent = rangeQuery.gte(dateToCompare); - break; - case "<=": - toXContent = rangeQuery.lte(dateToCompare); - break; - case "BETWEEN": - case "NOT BETWEEN": - //todo: Add support for BETWEEN - break; - default: - throw new SqlParseException("date_format does not support the operation " + oper); + toXContent = QueryBuilders.idsQuery().addIds(ids); + break; + case NESTED_COMPLEX: + case NOT_EXISTS_NESTED_COMPLEX: + if (value == null || !(value instanceof Where)) { + throw new SqlParseException("unsupported nested condition"); } - toXContent = applyNot(Condition.OPERATOR.operStringToOpear.get(oper), toXContent); - return toXContent; - } + Where whereNested = (Where) value; + BoolQueryBuilder nestedFilter = QueryMaker.explain(whereNested); - private String removeSingleQuote(String param) { - return param.replaceAll("\'", ""); - } + toXContent = QueryBuilders.nestedQuery(name, nestedFilter, ScoreMode.None); + break; + case CHILDREN_COMPLEX: + if (value == null || !(value instanceof Where)) { + throw new SqlParseException("unsupported nested condition"); + } - private String[] arrayOfObjectsToStringArray(Object[] values, int from, int to) { - String[] strings = new String[to - from + 1]; - int counter = 0; - for (int i = from; i <= to; i++) { - strings[counter] = values[i].toString(); - counter++; + Where whereChildren = (Where) value; + BoolQueryBuilder childrenFilter = QueryMaker.explain(whereChildren); + // todo: pass score mode + toXContent = JoinQueryBuilders.hasChildQuery(name, childrenFilter, ScoreMode.None); + + break; + case SCRIPT: + ScriptFilter scriptFilter = (ScriptFilter) value; + Map params = new HashMap<>(); + if (scriptFilter.containsParameters()) { + params = scriptFilter.getArgs(); } - return strings; - } - private ShapeBuilder getShapeBuilderFromString(String str) throws IOException, SqlParseException { - String json; - if (str.contains("{")) { - json = fixJsonFromElastic(str); + SQLExpr nameExpr = cond.getNameExpr(); + SQLExpr valueExpr = cond.getValueExpr(); + if (nameExpr instanceof SQLMethodInvokeExpr + && ((SQLMethodInvokeExpr) nameExpr).getMethodName().equalsIgnoreCase("date_format")) { + toXContent = makeForDateFormat((SQLMethodInvokeExpr) nameExpr, (SQLCharExpr) valueExpr); } else { - json = WktToGeoJsonConverter.toGeoJson(trimApostrophes(str)); + toXContent = + QueryBuilders.scriptQuery( + new Script( + scriptFilter.getScriptType(), + Script.DEFAULT_SCRIPT_LANG, + scriptFilter.getScript(), + params)); } - - return getShapeBuilderFromJson(json); + break; + default: + throw new SqlParseException("Undefined condition: " + cond.getName()); } - /* - * elastic sends {coordinates=[[[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]]], - * type=Polygon} - * proper form is {"coordinates":[[[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]]], - * "type":"Polygon"} - * */ - private String fixJsonFromElastic(String elasticJson) { - String properJson = elasticJson.replaceAll("=", ":"); - properJson = properJson.replaceAll("(type)(:)([a-zA-Z]+)", "\"type\":\"$3\""); - properJson = properJson.replaceAll("coordinates", "\"coordinates\""); - return properJson; + toXContent = applyNot(cond.getOPERATOR(), toXContent); + return toXContent; + } + + public static boolean isQueryFunction(String methodName) { + return queryFunctions.contains(methodName.toLowerCase()); + } + + /** + * Helper method used to form a range query object for the date_format function. + * + *

Example: WHERE date_format(dateField, "YYYY-MM-dd") > "2012-01-01" Expected range query: + * "range": { "dateField": { "from": "2012-01-01", "to": null, "include_lower": false, + * "include_upper": true, "time_zone": "America/Los_Angeles", "format": "YYYY-MM-dd", "boost": 1 } + * } + * + * @param nameExpr SQL method expression (ex. date_format(dateField, "YYYY-MM-dd")) + * @param valueExpr Value expression being compared to the SQL method result (ex. "2012-01-01") + * @throws SqlParseException + */ + private ToXContent makeForDateFormat(SQLMethodInvokeExpr nameExpr, SQLCharExpr valueExpr) + throws SqlParseException { + ToXContent toXContent = null; + List params = nameExpr.getParameters(); + + String field = params.get(0).toString(); + String format = removeSingleQuote(params.get(1).toString()); + String dateToCompare = valueExpr.getText(); + String oper = ((SQLBinaryOpExpr) nameExpr.getParent()).getOperator().name; + + String zoneId; + if (params.size() > 2) { + zoneId = ZoneId.of(removeSingleQuote(params.get(2).toString())).toString(); + } else { + // Using UTC, if there is no Zone provided. + zoneId = UTC.getId(); } - private ShapeBuilder getShapeBuilderFromJson(String json) throws IOException { - XContentParser parser = null; - parser = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, json); - parser.nextToken(); - return ShapeParser.parse(parser); + RangeQueryBuilder rangeQuery = QueryBuilders.rangeQuery(field).format(format).timeZone(zoneId); + switch (oper) { + case "<>": + case "=": + toXContent = rangeQuery.gte(dateToCompare).lte(dateToCompare); + break; + case ">": + toXContent = rangeQuery.gt(dateToCompare); + break; + case "<": + toXContent = rangeQuery.lt(dateToCompare); + break; + case ">=": + toXContent = rangeQuery.gte(dateToCompare); + break; + case "<=": + toXContent = rangeQuery.lte(dateToCompare); + break; + case "BETWEEN": + case "NOT BETWEEN": + // todo: Add support for BETWEEN + break; + default: + throw new SqlParseException("date_format does not support the operation " + oper); } - private String trimApostrophes(String str) { - return str.substring(1, str.length() - 1); - } + toXContent = applyNot(Condition.OPERATOR.operStringToOpear.get(oper), toXContent); + return toXContent; + } - /** - * Applies negation to query builder if the operation is a "not" operation. - */ - private ToXContent applyNot(Condition.OPERATOR OPERATOR, ToXContent bqb) { - if (NOT_OPERATOR_SET.contains(OPERATOR)) { - bqb = QueryBuilders.boolQuery().mustNot((QueryBuilder) bqb); - } - return bqb; - } + private String removeSingleQuote(String param) { + return param.replaceAll("\'", ""); + } - private Object parseTermValue(Object termValue) { - if (termValue instanceof SQLNumericLiteralExpr) { - termValue = ((SQLNumericLiteralExpr) termValue).getNumber(); - if (termValue instanceof BigDecimal || termValue instanceof Double) { - termValue = ((Number) termValue).doubleValue(); - } else if (termValue instanceof Float) { - termValue = ((Number) termValue).floatValue(); - } else if (termValue instanceof BigInteger || termValue instanceof Long) { - termValue = ((Number) termValue).longValue(); - } else if (termValue instanceof Integer) { - termValue = ((Number) termValue).intValue(); - } else if (termValue instanceof Short) { - termValue = ((Number) termValue).shortValue(); - } else if (termValue instanceof Byte) { - termValue = ((Number) termValue).byteValue(); - } - } else if (termValue instanceof SQLBooleanExpr) { - termValue = ((SQLBooleanExpr) termValue).getValue(); - } else { - termValue = termValue.toString(); - } + private String[] arrayOfObjectsToStringArray(Object[] values, int from, int to) { + String[] strings = new String[to - from + 1]; + int counter = 0; + for (int i = from; i <= to; i++) { + strings[counter] = values[i].toString(); + counter++; + } + return strings; + } + + private ShapeBuilder getShapeBuilderFromString(String str) throws IOException, SqlParseException { + String json; + if (str.contains("{")) { + json = fixJsonFromElastic(str); + } else { + json = WktToGeoJsonConverter.toGeoJson(trimApostrophes(str)); + } - return termValue; + return getShapeBuilderFromJson(json); + } + + /* + * elastic sends {coordinates=[[[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]]], + * type=Polygon} + * proper form is {"coordinates":[[[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]]], + * "type":"Polygon"} + * */ + private String fixJsonFromElastic(String elasticJson) { + String properJson = elasticJson.replaceAll("=", ":"); + properJson = properJson.replaceAll("(type)(:)([a-zA-Z]+)", "\"type\":\"$3\""); + properJson = properJson.replaceAll("coordinates", "\"coordinates\""); + return properJson; + } + + private ShapeBuilder getShapeBuilderFromJson(String json) throws IOException { + XContentParser parser = null; + parser = + JsonXContent.jsonXContent.createParser( + NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, json); + parser.nextToken(); + return ShapeParser.parse(parser); + } + + private String trimApostrophes(String str) { + return str.substring(1, str.length() - 1); + } + + /** Applies negation to query builder if the operation is a "not" operation. */ + private ToXContent applyNot(Condition.OPERATOR OPERATOR, ToXContent bqb) { + if (NOT_OPERATOR_SET.contains(OPERATOR)) { + bqb = QueryBuilders.boolQuery().mustNot((QueryBuilder) bqb); } + return bqb; + } + + private Object parseTermValue(Object termValue) { + if (termValue instanceof SQLNumericLiteralExpr) { + termValue = ((SQLNumericLiteralExpr) termValue).getNumber(); + if (termValue instanceof BigDecimal || termValue instanceof Double) { + termValue = ((Number) termValue).doubleValue(); + } else if (termValue instanceof Float) { + termValue = ((Number) termValue).floatValue(); + } else if (termValue instanceof BigInteger || termValue instanceof Long) { + termValue = ((Number) termValue).longValue(); + } else if (termValue instanceof Integer) { + termValue = ((Number) termValue).intValue(); + } else if (termValue instanceof Short) { + termValue = ((Number) termValue).shortValue(); + } else if (termValue instanceof Byte) { + termValue = ((Number) termValue).byteValue(); + } + } else if (termValue instanceof SQLBooleanExpr) { + termValue = ((SQLBooleanExpr) termValue).getValue(); + } else { + termValue = termValue.toString(); + } + + return termValue; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/MultiQueryAction.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/MultiQueryAction.java index cd9b1f4030..a9eb6113f7 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/MultiQueryAction.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/MultiQueryAction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.multi; import java.util.HashSet; @@ -18,66 +17,68 @@ import org.opensearch.sql.legacy.query.QueryAction; import org.opensearch.sql.legacy.query.SqlElasticRequestBuilder; -/** - * Created by Eliran on 19/8/2016. - */ +/** Created by Eliran on 19/8/2016. */ public class MultiQueryAction extends QueryAction { - private MultiQuerySelect multiQuerySelect; + private MultiQuerySelect multiQuerySelect; + + public MultiQueryAction(Client client, MultiQuerySelect multiSelect) { + super(client, null); + this.multiQuerySelect = multiSelect; + } - public MultiQueryAction(Client client, MultiQuerySelect multiSelect) { - super(client, null); - this.multiQuerySelect = multiSelect; + @Override + public SqlElasticRequestBuilder explain() throws SqlParseException { + if (!isValidMultiSelectReturnFields()) { + throw new SqlParseException( + "on multi query fields/aliases of one table should be subset of other"); } + MultiQueryRequestBuilder requestBuilder = new MultiQueryRequestBuilder(this.multiQuerySelect); + requestBuilder.setFirstSearchRequest( + createRequestBuilder(this.multiQuerySelect.getFirstSelect())); + requestBuilder.setSecondSearchRequest( + createRequestBuilder(this.multiQuerySelect.getSecondSelect())); + requestBuilder.fillTableAliases( + this.multiQuerySelect.getFirstSelect().getFields(), + this.multiQuerySelect.getSecondSelect().getFields()); - @Override - public SqlElasticRequestBuilder explain() throws SqlParseException { - if (!isValidMultiSelectReturnFields()) { - throw new SqlParseException("on multi query fields/aliases of one table should be subset of other"); - } - MultiQueryRequestBuilder requestBuilder = new MultiQueryRequestBuilder(this.multiQuerySelect); - requestBuilder.setFirstSearchRequest(createRequestBuilder(this.multiQuerySelect.getFirstSelect())); - requestBuilder.setSecondSearchRequest(createRequestBuilder(this.multiQuerySelect.getSecondSelect())); - requestBuilder.fillTableAliases(this.multiQuerySelect.getFirstSelect().getFields(), - this.multiQuerySelect.getSecondSelect().getFields()); + return requestBuilder; + } - return requestBuilder; + private boolean isValidMultiSelectReturnFields() { + List firstQueryFields = multiQuerySelect.getFirstSelect().getFields(); + List secondQueryFields = multiQuerySelect.getSecondSelect().getFields(); + if (firstQueryFields.size() > secondQueryFields.size()) { + return isSubsetFields(firstQueryFields, secondQueryFields); } + return isSubsetFields(secondQueryFields, firstQueryFields); + } - private boolean isValidMultiSelectReturnFields() { - List firstQueryFields = multiQuerySelect.getFirstSelect().getFields(); - List secondQueryFields = multiQuerySelect.getSecondSelect().getFields(); - if (firstQueryFields.size() > secondQueryFields.size()) { - return isSubsetFields(firstQueryFields, secondQueryFields); - } - return isSubsetFields(secondQueryFields, firstQueryFields); + private boolean isSubsetFields(List bigGroup, List smallerGroup) { + Set biggerGroup = new HashSet<>(); + for (Field field : bigGroup) { + String fieldName = getNameOrAlias(field); + biggerGroup.add(fieldName); } - - private boolean isSubsetFields(List bigGroup, List smallerGroup) { - Set biggerGroup = new HashSet<>(); - for (Field field : bigGroup) { - String fieldName = getNameOrAlias(field); - biggerGroup.add(fieldName); - } - for (Field field : smallerGroup) { - String fieldName = getNameOrAlias(field); - if (!biggerGroup.contains(fieldName)) { - return false; - } - } - return true; + for (Field field : smallerGroup) { + String fieldName = getNameOrAlias(field); + if (!biggerGroup.contains(fieldName)) { + return false; + } } + return true; + } - private String getNameOrAlias(Field field) { - String fieldName = field.getName(); - if (field.getAlias() != null && !field.getAlias().isEmpty()) { - fieldName = field.getAlias(); - } - return fieldName; + private String getNameOrAlias(Field field) { + String fieldName = field.getName(); + if (field.getAlias() != null && !field.getAlias().isEmpty()) { + fieldName = field.getAlias(); } + return fieldName; + } - protected SearchRequestBuilder createRequestBuilder(Select select) throws SqlParseException { - DefaultQueryAction queryAction = new DefaultQueryAction(client, select); - queryAction.explain(); - return queryAction.getRequestBuilder(); - } + protected SearchRequestBuilder createRequestBuilder(Select select) throws SqlParseException { + DefaultQueryAction queryAction = new DefaultQueryAction(client, select); + queryAction.explain(); + return queryAction.getRequestBuilder(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/MultiQueryRequestBuilder.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/MultiQueryRequestBuilder.java index 5340a701ed..b4e92a8de6 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/MultiQueryRequestBuilder.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/MultiQueryRequestBuilder.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.multi; import com.alibaba.druid.sql.ast.statement.SQLUnionOperator; @@ -23,108 +22,108 @@ import org.opensearch.sql.legacy.domain.Select; import org.opensearch.sql.legacy.query.SqlElasticRequestBuilder; -/** - * Created by Eliran on 19/8/2016. - */ +/** Created by Eliran on 19/8/2016. */ public class MultiQueryRequestBuilder implements SqlElasticRequestBuilder { - private SearchRequestBuilder firstSearchRequest; - private SearchRequestBuilder secondSearchRequest; - private Map firstTableFieldToAlias; - private Map secondTableFieldToAlias; - private MultiQuerySelect multiQuerySelect; - private SQLUnionOperator relation; - - - public MultiQueryRequestBuilder(MultiQuerySelect multiQuerySelect) { - this.multiQuerySelect = multiQuerySelect; - this.relation = multiQuerySelect.getOperation(); - this.firstTableFieldToAlias = new HashMap<>(); - this.secondTableFieldToAlias = new HashMap<>(); - } - - @Override - public ActionRequest request() { - return null; - } - - - @Override - public String explain() { - - try { - XContentBuilder firstBuilder = XContentFactory.jsonBuilder().prettyPrint(); - this.firstSearchRequest.request().source().toXContent(firstBuilder, ToXContent.EMPTY_PARAMS); - - XContentBuilder secondBuilder = XContentFactory.jsonBuilder().prettyPrint(); - this.secondSearchRequest.request().source().toXContent(secondBuilder, ToXContent.EMPTY_PARAMS); - return String.format("performing %s on :\n left query:\n%s\n right query:\n%s", - this.relation.name, BytesReference.bytes(firstBuilder).utf8ToString(), - BytesReference.bytes(secondBuilder).utf8ToString()); - - } catch (IOException e) { - e.printStackTrace(); - } - return null; - } - - @Override - public ActionResponse get() { - return null; - } - - @Override - public ActionRequestBuilder getBuilder() { - return null; + private SearchRequestBuilder firstSearchRequest; + private SearchRequestBuilder secondSearchRequest; + private Map firstTableFieldToAlias; + private Map secondTableFieldToAlias; + private MultiQuerySelect multiQuerySelect; + private SQLUnionOperator relation; + + public MultiQueryRequestBuilder(MultiQuerySelect multiQuerySelect) { + this.multiQuerySelect = multiQuerySelect; + this.relation = multiQuerySelect.getOperation(); + this.firstTableFieldToAlias = new HashMap<>(); + this.secondTableFieldToAlias = new HashMap<>(); + } + + @Override + public ActionRequest request() { + return null; + } + + @Override + public String explain() { + + try { + XContentBuilder firstBuilder = XContentFactory.jsonBuilder().prettyPrint(); + this.firstSearchRequest.request().source().toXContent(firstBuilder, ToXContent.EMPTY_PARAMS); + + XContentBuilder secondBuilder = XContentFactory.jsonBuilder().prettyPrint(); + this.secondSearchRequest + .request() + .source() + .toXContent(secondBuilder, ToXContent.EMPTY_PARAMS); + return String.format( + "performing %s on :\n left query:\n%s\n right query:\n%s", + this.relation.name, + BytesReference.bytes(firstBuilder).utf8ToString(), + BytesReference.bytes(secondBuilder).utf8ToString()); + + } catch (IOException e) { + e.printStackTrace(); } - - - public SearchRequestBuilder getFirstSearchRequest() { - return firstSearchRequest; - } - - public SearchRequestBuilder getSecondSearchRequest() { - return secondSearchRequest; - } - - public SQLUnionOperator getRelation() { - return relation; - } - - public void setFirstSearchRequest(SearchRequestBuilder firstSearchRequest) { - this.firstSearchRequest = firstSearchRequest; + return null; + } + + @Override + public ActionResponse get() { + return null; + } + + @Override + public ActionRequestBuilder getBuilder() { + return null; + } + + public SearchRequestBuilder getFirstSearchRequest() { + return firstSearchRequest; + } + + public SearchRequestBuilder getSecondSearchRequest() { + return secondSearchRequest; + } + + public SQLUnionOperator getRelation() { + return relation; + } + + public void setFirstSearchRequest(SearchRequestBuilder firstSearchRequest) { + this.firstSearchRequest = firstSearchRequest; + } + + public void setSecondSearchRequest(SearchRequestBuilder secondSearchRequest) { + this.secondSearchRequest = secondSearchRequest; + } + + public void fillTableAliases(List firstTableFields, List secondTableFields) { + fillTableToAlias(this.firstTableFieldToAlias, firstTableFields); + fillTableToAlias(this.secondTableFieldToAlias, secondTableFields); + } + + private void fillTableToAlias(Map fieldToAlias, List fields) { + for (Field field : fields) { + if (field.getAlias() != null && !field.getAlias().isEmpty()) { + fieldToAlias.put(field.getName(), field.getAlias()); + } } + } - public void setSecondSearchRequest(SearchRequestBuilder secondSearchRequest) { - this.secondSearchRequest = secondSearchRequest; - } - - public void fillTableAliases(List firstTableFields, List secondTableFields) { - fillTableToAlias(this.firstTableFieldToAlias, firstTableFields); - fillTableToAlias(this.secondTableFieldToAlias, secondTableFields); - } - - private void fillTableToAlias(Map fieldToAlias, List fields) { - for (Field field : fields) { - if (field.getAlias() != null && !field.getAlias().isEmpty()) { - fieldToAlias.put(field.getName(), field.getAlias()); - } - } - } - - public Map getFirstTableFieldToAlias() { - return firstTableFieldToAlias; - } + public Map getFirstTableFieldToAlias() { + return firstTableFieldToAlias; + } - public Map getSecondTableFieldToAlias() { - return secondTableFieldToAlias; - } + public Map getSecondTableFieldToAlias() { + return secondTableFieldToAlias; + } - public Select getOriginalSelect(boolean first) { - if (first) { - return this.multiQuerySelect.getFirstSelect(); - } else { - return this.multiQuerySelect.getSecondSelect(); - } + public Select getOriginalSelect(boolean first) { + if (first) { + return this.multiQuerySelect.getFirstSelect(); + } else { + return this.multiQuerySelect.getSecondSelect(); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/MultiQuerySelect.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/MultiQuerySelect.java index e5dd1716ed..72e7232a30 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/MultiQuerySelect.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/MultiQuerySelect.java @@ -3,35 +3,32 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.multi; import com.alibaba.druid.sql.ast.statement.SQLUnionOperator; import org.opensearch.sql.legacy.domain.Select; -/** - * Created by Eliran on 19/8/2016. - */ +/** Created by Eliran on 19/8/2016. */ public class MultiQuerySelect { - private SQLUnionOperator operation; - private Select firstSelect; - private Select secondSelect; - - public MultiQuerySelect(SQLUnionOperator operation, Select firstSelect, Select secondSelect) { - this.operation = operation; - this.firstSelect = firstSelect; - this.secondSelect = secondSelect; - } - - public SQLUnionOperator getOperation() { - return operation; - } - - public Select getFirstSelect() { - return firstSelect; - } - - public Select getSecondSelect() { - return secondSelect; - } + private SQLUnionOperator operation; + private Select firstSelect; + private Select secondSelect; + + public MultiQuerySelect(SQLUnionOperator operation, Select firstSelect, Select secondSelect) { + this.operation = operation; + this.firstSelect = firstSelect; + this.secondSelect = secondSelect; + } + + public SQLUnionOperator getOperation() { + return operation; + } + + public Select getFirstSelect() { + return firstSelect; + } + + public Select getSecondSelect() { + return secondSelect; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/HashJoinQueryPlanRequestBuilder.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/HashJoinQueryPlanRequestBuilder.java index 312ade197a..25146294bc 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/HashJoinQueryPlanRequestBuilder.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/HashJoinQueryPlanRequestBuilder.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner; import org.opensearch.client.Client; @@ -14,64 +13,50 @@ import org.opensearch.sql.legacy.request.SqlRequest; /** - * QueryPlanner builder for Hash Join query. In future, different queries could have its own builders to generate - * QueryPlanner. QueryPlanner would run all stages in its pipeline no matter how it be assembled. + * QueryPlanner builder for Hash Join query. In the future, different queries could have its own + * builders to generate QueryPlanner. QueryPlanner would run all stages in its pipeline no matter + * how it is assembled. */ public class HashJoinQueryPlanRequestBuilder extends HashJoinElasticRequestBuilder { - /** - * Client connection to OpenSearch cluster - */ - private final Client client; - - /** - * Query request - */ - private final SqlRequest request; - - /** - * Query planner configuration - */ - private final Config config; - - - public HashJoinQueryPlanRequestBuilder(Client client, SqlRequest request) { - this.client = client; - this.request = request; - this.config = new Config(); - } - - @Override - public String explain() { - return plan().explain(); - } - - /** - * Planning for the query and create planner for explain/execute later. - * - * @return query planner - */ - public QueryPlanner plan() { - config.configureLimit( - getTotalLimit(), - getFirstTable().getHintLimit(), - getSecondTable().getHintLimit() - ); - config.configureTermsFilterOptimization(isUseTermFiltersOptimization()); - - return new QueryPlanner( - client, - config, - new QueryParams( - getFirstTable(), - getSecondTable(), - getJoinType(), - getT1ToT2FieldsComparison() - ) - ); - } - - public Config getConfig() { - return config; - } + /** Client connection to OpenSearch cluster */ + private final Client client; + + /** Query request */ + private final SqlRequest request; + + /** Query planner configuration */ + private final Config config; + + public HashJoinQueryPlanRequestBuilder(Client client, SqlRequest request) { + this.client = client; + this.request = request; + this.config = new Config(); + } + + @Override + public String explain() { + return plan().explain(); + } + + /** + * Planning for the query and create planner for explain/execute later. + * + * @return query planner + */ + public QueryPlanner plan() { + config.configureLimit( + getTotalLimit(), getFirstTable().getHintLimit(), getSecondTable().getHintLimit()); + config.configureTermsFilterOptimization(isUseTermFiltersOptimization()); + + return new QueryPlanner( + client, + config, + new QueryParams( + getFirstTable(), getSecondTable(), getJoinType(), getT1ToT2FieldsComparison())); + } + + public Config getConfig() { + return config; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/ExecuteParams.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/ExecuteParams.java index dcb3c3b727..c5ed48a514 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/ExecuteParams.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/ExecuteParams.java @@ -3,32 +3,29 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.core; import java.util.EnumMap; -/** - * Parameters needed for physical operator execution. - */ +/** Parameters needed for physical operator execution. */ public class ExecuteParams { - /** - * Mapping from type to parameters - */ - private EnumMap params = new EnumMap<>(ExecuteParamType.class); - - public void add(ExecuteParamType type, T param) { - params.put(type, param); - } + /** Mapping from type to parameters */ + private EnumMap params = new EnumMap<>(ExecuteParamType.class); - @SuppressWarnings("unchecked") - public T get(ExecuteParamType type) { - return (T) params.get(type); - } + public void add(ExecuteParamType type, T param) { + params.put(type, param); + } - public enum ExecuteParamType { - CLIENT, RESOURCE_MANAGER, EXTRA_QUERY_FILTER, TIMEOUT - } + @SuppressWarnings("unchecked") + public T get(ExecuteParamType type) { + return (T) params.get(type); + } + public enum ExecuteParamType { + CLIENT, + RESOURCE_MANAGER, + EXTRA_QUERY_FILTER, + TIMEOUT + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/explain/Explanation.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/explain/Explanation.java index a22f2c5b7f..635ea3aace 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/explain/Explanation.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/explain/Explanation.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.explain; import com.google.common.collect.ImmutableMap; @@ -14,87 +13,71 @@ import org.opensearch.sql.legacy.query.planner.logical.node.Group; import org.opensearch.sql.legacy.query.planner.physical.PhysicalOperator; -/** - * Base class for different explanation implementation - */ +/** Base class for different explanation implementation */ public class Explanation implements Visitor { - /** - * Hard coding description to be consistent with old nested join explanation - */ - private static final String DESCRIPTION = - "Hash Join algorithm builds hash table based on result of first query, " - + "and then probes hash table to find matched rows for each row returned by second query"; - - /** - * Plans to be explained - */ - private final Plan logicalPlan; - private final Plan physicalPlan; - - /** - * Explanation format - */ - private final ExplanationFormat format; - - public Explanation(Plan logicalPlan, - Plan physicalPlan, - ExplanationFormat format) { - this.logicalPlan = logicalPlan; - this.physicalPlan = physicalPlan; - this.format = format; - } + /** Hard coding description to be consistent with old nested join explanation */ + private static final String DESCRIPTION = + "Hash Join algorithm builds hash table based on result of first query, " + + "and then probes hash table to find matched rows for each row returned by second query"; - @Override - public String toString() { - format.prepare(ImmutableMap.of("description", DESCRIPTION)); + /** Plans to be explained */ + private final Plan logicalPlan; - format.start("Logical Plan"); - logicalPlan.traverse(this); - format.end(); + private final Plan physicalPlan; - format.start("Physical Plan"); - physicalPlan.traverse(this); - format.end(); + /** Explanation format */ + private final ExplanationFormat format; - return format.toString(); - } + public Explanation(Plan logicalPlan, Plan physicalPlan, ExplanationFormat format) { + this.logicalPlan = logicalPlan; + this.physicalPlan = physicalPlan; + this.format = format; + } - @Override - public boolean visit(PlanNode node) { - if (isValidOp(node)) { - format.explain(node); - } - return true; - } + @Override + public String toString() { + format.prepare(ImmutableMap.of("description", DESCRIPTION)); - @Override - public void endVisit(PlanNode node) { - if (isValidOp(node)) { - format.end(); - } - } + format.start("Logical Plan"); + logicalPlan.traverse(this); + format.end(); - /** - * Check if node is a valid logical or physical operator - */ - private boolean isValidOp(PlanNode node) { - return isValidLogical(node) || isPhysical(node); - } + format.start("Physical Plan"); + physicalPlan.traverse(this); + format.end(); - /** - * Valid logical operator means it's Group OR NOT a no-op because Group clarify explanation - */ - private boolean isValidLogical(PlanNode node) { - return (node instanceof LogicalOperator) - && (node instanceof Group || !((LogicalOperator) node).isNoOp()); - } + return format.toString(); + } - /** - * Right now all physical operators are valid and non-no-op - */ - private boolean isPhysical(PlanNode node) { - return node instanceof PhysicalOperator; + @Override + public boolean visit(PlanNode node) { + if (isValidOp(node)) { + format.explain(node); } + return true; + } + @Override + public void endVisit(PlanNode node) { + if (isValidOp(node)) { + format.end(); + } + } + + /** Check if node is a valid logical or physical operator */ + private boolean isValidOp(PlanNode node) { + return isValidLogical(node) || isPhysical(node); + } + + /** Valid logical operator means it's Group OR NOT a no-op because Group clarify explanation */ + private boolean isValidLogical(PlanNode node) { + return (node instanceof LogicalOperator) + && (node instanceof Group || !((LogicalOperator) node).isNoOp()); + } + + /** Right now all physical operators are valid and non-no-op */ + private boolean isPhysical(PlanNode node) { + return node instanceof PhysicalOperator; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/explain/ExplanationFormat.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/explain/ExplanationFormat.java index 23c8bb76fe..a349666221 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/explain/ExplanationFormat.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/explain/ExplanationFormat.java @@ -3,42 +3,34 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.explain; import java.util.Map; -/** - * Explanation format - */ +/** Explanation format */ public interface ExplanationFormat { - /** - * Initialize internal data structure - * - * @param kvs key-value pairs - */ - void prepare(Map kvs); - - /** - * Start a new section in explanation. - * - * @param name section name - */ - void start(String name); - - - /** - * Explain and add to current section. - * - * @param object object to be added to explanation - */ - void explain(Object object); - - - /** - * End current section. - */ - void end(); - + /** + * Initialize internal data structure + * + * @param kvs key-value pairs + */ + void prepare(Map kvs); + + /** + * Start a new section in explanation. + * + * @param name section name + */ + void start(String name); + + /** + * Explain and add to current section. + * + * @param object object to be added to explanation + */ + void explain(Object object); + + /** End current section. */ + void end(); } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/explain/JsonExplanationFormat.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/explain/JsonExplanationFormat.java index 404205d30b..7bf4f833de 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/explain/JsonExplanationFormat.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/explain/JsonExplanationFormat.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.explain; import java.util.ArrayDeque; @@ -13,89 +12,80 @@ import org.json.JSONException; import org.json.JSONObject; -/** - * Explain query plan in JSON format. - */ +/** Explain query plan in JSON format. */ public class JsonExplanationFormat implements ExplanationFormat { - /** - * JSONObject stack to track the path from root to current ndoe - */ - private final Deque jsonObjStack = new ArrayDeque<>(); - - /** - * Indentation in final output string - */ - private final int indent; - - public JsonExplanationFormat(int indent) { - this.indent = indent; - } - - @Override - public void prepare(Map kvs) { - jsonObjStack.push(new JSONObject(kvs)); - } - - @Override - public void start(String name) { - JSONObject json = new JSONObject(); - jsonObjStack.peek().put(name, json); - jsonObjStack.push(json); - } - - @Override - public void explain(Object obj) { - JSONObject json = new JSONObject(obj); // JSONify using getter - jsonifyValueIfValidJson(json); - appendToArrayIfExist(nodeName(obj), json); - jsonObjStack.push(json); + /** JSONObject stack to track the path from root to current ndoe */ + private final Deque jsonObjStack = new ArrayDeque<>(); + + /** Indentation in final output string */ + private final int indent; + + public JsonExplanationFormat(int indent) { + this.indent = indent; + } + + @Override + public void prepare(Map kvs) { + jsonObjStack.push(new JSONObject(kvs)); + } + + @Override + public void start(String name) { + JSONObject json = new JSONObject(); + jsonObjStack.peek().put(name, json); + jsonObjStack.push(json); + } + + @Override + public void explain(Object obj) { + JSONObject json = new JSONObject(obj); // JSONify using getter + jsonifyValueIfValidJson(json); + appendToArrayIfExist(nodeName(obj), json); + jsonObjStack.push(json); + } + + @Override + public void end() { + jsonObjStack.pop(); + } + + @Override + public String toString() { + return jsonObjStack.pop().toString(indent); + } + + /** + * Trick to parse JSON in field getter due to missing support for custom processor in org.json. + * And also because it's not appropriate to make getter aware of concrete format logic + */ + private void jsonifyValueIfValidJson(JSONObject json) { + for (String key : json.keySet()) { + try { + JSONObject jsonValue = new JSONObject(json.getString(key)); + json.put(key, jsonValue); + } catch (JSONException e) { + // Ignore value that is not a valid JSON. + } } - - @Override - public void end() { - jsonObjStack.pop(); - } - - @Override - public String toString() { - return jsonObjStack.pop().toString(indent); - } - - /** - * Trick to parse JSON in field getter due to missing support for custom processor - * in org.json. And also because it's not appropriate to make getter aware of concrete format logic - */ - private void jsonifyValueIfValidJson(JSONObject json) { - for (String key : json.keySet()) { - try { - JSONObject jsonValue = new JSONObject(json.getString(key)); - json.put(key, jsonValue); - } catch (JSONException e) { - // Ignore value that is not a valid JSON. - } - } + } + + private String nodeName(Object obj) { + return obj.toString(); // obj.getClass().getSimpleName(); + } + + /** Replace JSONObject by JSONArray if key is duplicate */ + private void appendToArrayIfExist(String name, JSONObject child) { + JSONObject parent = jsonObjStack.peek(); + Object otherChild = parent.opt(name); + if (otherChild == null) { + parent.put(name, child); + } else { + if (!(otherChild instanceof JSONArray)) { + parent.remove(name); + parent.append(name, otherChild); + } + parent.append(name, child); } - - private String nodeName(Object obj) { - return obj.toString(); //obj.getClass().getSimpleName(); - } - - /** - * Replace JSONObject by JSONArray if key is duplicate - */ - private void appendToArrayIfExist(String name, JSONObject child) { - JSONObject parent = jsonObjStack.peek(); - Object otherChild = parent.opt(name); - if (otherChild == null) { - parent.put(name, child); - } else { - if (!(otherChild instanceof JSONArray)) { - parent.remove(name); - parent.append(name, otherChild); - } - parent.append(name, child); - } - } - + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/LogicalOperator.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/LogicalOperator.java index 825af762f5..b814f1f563 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/LogicalOperator.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/LogicalOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.logical; import java.util.Map; @@ -11,32 +10,29 @@ import org.opensearch.sql.legacy.query.planner.core.PlanNode; import org.opensearch.sql.legacy.query.planner.physical.PhysicalOperator; -/** - * Logical operator in logical plan tree. - */ +/** Logical operator in logical plan tree. */ public interface LogicalOperator extends PlanNode { - /** - * If current operator is no operation. It depends on specific internal state of operator - *

- * Ignore this field in explanation because all explainable operator are NOT no-op. - * - * @return true if NoOp - */ - @JSONPropertyIgnore - default boolean isNoOp() { - return false; - } - - /** - * Map logical operator to physical operators (possibly 1 to N mapping) - *

- * Note that generic type on PhysicalOperator[] would enforce all impl convert array to generic type array - * because generic type array is unable to be created directly. - * - * @param optimalOps optimal physical operators estimated so far - * @return list of physical operator - */ - PhysicalOperator[] toPhysical(Map> optimalOps); + /** + * If current operator is no operation. It depends on specific internal state of operator + * + *

Ignore this field in explanation because all explainable operator are NOT no-op. + * + * @return true if NoOp + */ + @JSONPropertyIgnore + default boolean isNoOp() { + return false; + } + /** + * Map logical operator to physical operators (possibly 1 to N mapping) + * + *

Note that generic type on PhysicalOperator[] would enforce all impl convert array to generic + * type array because generic type array is unable to be created directly. + * + * @param optimalOps optimal physical operators estimated so far + * @return list of physical operator + */ + PhysicalOperator[] toPhysical(Map> optimalOps); } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/LogicalPlan.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/LogicalPlan.java index 369da44e7f..05a797bbe0 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/LogicalPlan.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/LogicalPlan.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.logical; import static org.opensearch.sql.legacy.query.planner.logical.node.Project.Visitor; @@ -32,176 +31,154 @@ import org.opensearch.sql.legacy.query.planner.logical.rule.ProjectionPushDown; import org.opensearch.sql.legacy.query.planner.logical.rule.SelectionPushDown; -/** - * Logical query plan. - */ +/** Logical query plan. */ public class LogicalPlan implements Plan { - /** - * Planner configuration - */ - private final Config config; - - /** - * Parameters - */ - private final QueryParams params; - - /** - * Root node of logical query plan tree - */ - private final LogicalOperator root; - - /** - * Transformation rule - */ - private final List rules = Arrays.asList( - new SelectionPushDown(), //Enforce this run first to simplify Group. Avoid this order dependency in future. - new ProjectionPushDown() - ); - - public LogicalPlan(Config config, QueryParams params) { - this.config = config; - this.params = params; - this.root = buildPlanTree(); - } - - @Override - public void traverse(Visitor visitor) { - root.accept(visitor); - } - - @Override - public void optimize() { - for (LogicalPlanVisitor rule : rules) { - root.accept(rule); - } + /** Planner configuration */ + private final Config config; + + /** Parameters */ + private final QueryParams params; + + /** Root node of logical query plan tree */ + private final LogicalOperator root; + + /** Transformation rule */ + private final List rules = + Arrays.asList( + new SelectionPushDown(), // Enforce this run first to simplify Group. Avoid this order + // dependency in future. + new ProjectionPushDown()); + + public LogicalPlan(Config config, QueryParams params) { + this.config = config; + this.params = params; + this.root = buildPlanTree(); + } + + @Override + public void traverse(Visitor visitor) { + root.accept(visitor); + } + + @Override + public void optimize() { + for (LogicalPlanVisitor rule : rules) { + root.accept(rule); } - - /** - * Build logical plan tree - */ - private LogicalOperator buildPlanTree() { - return project( - top( - sort( - filter( - join( - top( - group(params.firstRequest(), config.scrollPageSize()[0]), - config.tableLimit1() - ), - top( - group(params.secondRequest(), config.scrollPageSize()[1]), - config.tableLimit2() - ) - ) - ) - ), config.totalLimit() - ) - ); - } - - /** - * Create projection operator - */ - private LogicalOperator project(LogicalOperator next) { - Project project = new Project(next); - for (TableInJoinRequestBuilder req : getRequests()) { - if (req.getOriginalSelect().isSelectAll()) { - project.projectAll(req.getAlias()); - } else { - project.project(req.getAlias(), req.getReturnedFields()); - } - } - return project; + } + + /** Build logical plan tree */ + private LogicalOperator buildPlanTree() { + return project( + top( + sort( + filter( + join( + top( + group(params.firstRequest(), config.scrollPageSize()[0]), + config.tableLimit1()), + top( + group(params.secondRequest(), config.scrollPageSize()[1]), + config.tableLimit2())))), + config.totalLimit())); + } + + /** Create projection operator */ + private LogicalOperator project(LogicalOperator next) { + Project project = new Project(next); + for (TableInJoinRequestBuilder req : getRequests()) { + if (req.getOriginalSelect().isSelectAll()) { + project.projectAll(req.getAlias()); + } else { + project.project(req.getAlias(), req.getReturnedFields()); + } } + return project; + } - private LogicalOperator top(LogicalOperator next, int limit) { - if (limit > 0) { - return new Top(next, limit); - } - return next; + private LogicalOperator top(LogicalOperator next, int limit) { + if (limit > 0) { + return new Top(next, limit); } - - private LogicalOperator sort(LogicalOperator next) { - List orderByColNames = new ArrayList<>(); - String orderByType = ""; - for (TableInJoinRequestBuilder request : getRequests()) { - List orderBys = request.getOriginalSelect().getOrderBys(); - if (orderBys != null) { - String tableAlias = request.getAlias() == null ? "" : request.getAlias() + "."; - for (Order orderBy : orderBys) { - orderByColNames.add(tableAlias + orderBy.getName()); - orderByType = orderBy.getType(); - } - } - } - - if (orderByColNames.isEmpty()) { - return next; + return next; + } + + private LogicalOperator sort(LogicalOperator next) { + List orderByColNames = new ArrayList<>(); + String orderByType = ""; + for (TableInJoinRequestBuilder request : getRequests()) { + List orderBys = request.getOriginalSelect().getOrderBys(); + if (orderBys != null) { + String tableAlias = request.getAlias() == null ? "" : request.getAlias() + "."; + for (Order orderBy : orderBys) { + orderByColNames.add(tableAlias + orderBy.getName()); + orderByType = orderBy.getType(); } - return new Sort(next, orderByColNames, orderByType); + } } - private LogicalOperator filter(LogicalOperator next) { - Filter filter = new Filter(next, getRequests()); - if (filter.isNoOp()) { - return next; - } - return filter; + if (orderByColNames.isEmpty()) { + return next; } + return new Sort(next, orderByColNames, orderByType); + } - private LogicalOperator join(LogicalOperator left, LogicalOperator right) { - return new Join( - left, right, - params.joinType(), - groupJoinConditionByOr(), - config.blockSize(), - config.isUseTermsFilterOptimization() - ); + private LogicalOperator filter(LogicalOperator next) { + Filter filter = new Filter(next, getRequests()); + if (filter.isNoOp()) { + return next; } - - /** - * Group conditions in ON by OR because it makes hash table group be required too - */ - private JoinCondition groupJoinConditionByOr() { - String leftTableAlias = params.firstRequest().getAlias(); - String rightTableAlias = params.secondRequest().getAlias(); - - JoinCondition orCond; - if (params.joinConditions().isEmpty()) { - orCond = new JoinCondition(leftTableAlias, rightTableAlias, 0); - } else { - orCond = new JoinCondition(leftTableAlias, rightTableAlias, params.joinConditions().size()); - for (int i = 0; i < params.joinConditions().size(); i++) { - List> andCond = params.joinConditions().get(i); - String[] leftColumnNames = new String[andCond.size()]; - String[] rightColumnNames = new String[andCond.size()]; - - for (int j = 0; j < andCond.size(); j++) { - Map.Entry cond = andCond.get(j); - leftColumnNames[j] = cond.getKey().getName(); - rightColumnNames[j] = cond.getValue().getName(); - } - - orCond.addLeftColumnNames(i, leftColumnNames); - orCond.addRightColumnNames(i, rightColumnNames); - } + return filter; + } + + private LogicalOperator join(LogicalOperator left, LogicalOperator right) { + return new Join( + left, + right, + params.joinType(), + groupJoinConditionByOr(), + config.blockSize(), + config.isUseTermsFilterOptimization()); + } + + /** Group conditions in ON by OR because it makes hash table group be required too */ + private JoinCondition groupJoinConditionByOr() { + String leftTableAlias = params.firstRequest().getAlias(); + String rightTableAlias = params.secondRequest().getAlias(); + + JoinCondition orCond; + if (params.joinConditions().isEmpty()) { + orCond = new JoinCondition(leftTableAlias, rightTableAlias, 0); + } else { + orCond = new JoinCondition(leftTableAlias, rightTableAlias, params.joinConditions().size()); + for (int i = 0; i < params.joinConditions().size(); i++) { + List> andCond = params.joinConditions().get(i); + String[] leftColumnNames = new String[andCond.size()]; + String[] rightColumnNames = new String[andCond.size()]; + + for (int j = 0; j < andCond.size(); j++) { + Map.Entry cond = andCond.get(j); + leftColumnNames[j] = cond.getKey().getName(); + rightColumnNames[j] = cond.getValue().getName(); } - return orCond; - } - private LogicalOperator group(TableInJoinRequestBuilder request, int pageSize) { - return new Group(new TableScan(request, pageSize)); + orCond.addLeftColumnNames(i, leftColumnNames); + orCond.addRightColumnNames(i, rightColumnNames); + } } + return orCond; + } - private List getRequests() { - return Arrays.asList(params.firstRequest(), params.secondRequest()); - } + private LogicalOperator group(TableInJoinRequestBuilder request, int pageSize) { + return new Group(new TableScan(request, pageSize)); + } - private List map(Collection source, Function func) { - return source.stream().map(func).collect(Collectors.toList()); - } + private List getRequests() { + return Arrays.asList(params.firstRequest(), params.secondRequest()); + } + private List map(Collection source, Function func) { + return source.stream().map(func).collect(Collectors.toList()); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/LogicalPlanVisitor.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/LogicalPlanVisitor.java index b779242a09..ef9e1a8d93 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/LogicalPlanVisitor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/LogicalPlanVisitor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.logical; import org.opensearch.sql.legacy.query.planner.core.PlanNode; @@ -21,93 +20,86 @@ */ public interface LogicalPlanVisitor extends Visitor { - @Override - default boolean visit(PlanNode op) { - if (op instanceof Project) { - return visit((Project) op); - } else if (op instanceof Filter) { - return visit((Filter) op); - } else if (op instanceof Join) { - return visit((Join) op); - } else if (op instanceof Group) { - return visit((Group) op); - } else if (op instanceof TableScan) { - return visit((TableScan) op); - } else if (op instanceof Top) { - return visit((Top) op); - } else if (op instanceof Sort) { - return visit((Sort) op); - } - throw new IllegalArgumentException("Unknown operator type: " + op); - } - - @Override - default void endVisit(PlanNode op) { - if (op instanceof Project) { - endVisit((Project) op); - } else if (op instanceof Filter) { - endVisit((Filter) op); - } else if (op instanceof Join) { - endVisit((Join) op); - } else if (op instanceof Group) { - endVisit((Group) op); - } else if (op instanceof TableScan) { - endVisit((TableScan) op); - } else if (op instanceof Top) { - endVisit((Top) op); - } else if (op instanceof Sort) { - endVisit((Sort) op); - } else { - throw new IllegalArgumentException("Unknown operator type: " + op); - } - } - - default boolean visit(Project project) { - return true; - } - - default void endVisit(Project project) { - } - - default boolean visit(Filter filter) { - return true; - } - - default void endVisit(Filter filter) { - } - - default boolean visit(Join join) { - return true; - } - - default void endVisit(Join join) { - } - - default boolean visit(Group group) { - return true; - } - - default void endVisit(Group group) { - } - - default boolean visit(TableScan scan) { - return true; - } - - default void endVisit(TableScan scan) { - } - - default boolean visit(Top top) { - return true; - } - - default void endVisit(Top top) { - } - - default boolean visit(Sort sort) { - return true; - } - - default void endVisit(Sort sort) { - } + @Override + default boolean visit(PlanNode op) { + if (op instanceof Project) { + return visit((Project) op); + } else if (op instanceof Filter) { + return visit((Filter) op); + } else if (op instanceof Join) { + return visit((Join) op); + } else if (op instanceof Group) { + return visit((Group) op); + } else if (op instanceof TableScan) { + return visit((TableScan) op); + } else if (op instanceof Top) { + return visit((Top) op); + } else if (op instanceof Sort) { + return visit((Sort) op); + } + throw new IllegalArgumentException("Unknown operator type: " + op); + } + + @Override + default void endVisit(PlanNode op) { + if (op instanceof Project) { + endVisit((Project) op); + } else if (op instanceof Filter) { + endVisit((Filter) op); + } else if (op instanceof Join) { + endVisit((Join) op); + } else if (op instanceof Group) { + endVisit((Group) op); + } else if (op instanceof TableScan) { + endVisit((TableScan) op); + } else if (op instanceof Top) { + endVisit((Top) op); + } else if (op instanceof Sort) { + endVisit((Sort) op); + } else { + throw new IllegalArgumentException("Unknown operator type: " + op); + } + } + + default boolean visit(Project project) { + return true; + } + + default void endVisit(Project project) {} + + default boolean visit(Filter filter) { + return true; + } + + default void endVisit(Filter filter) {} + + default boolean visit(Join join) { + return true; + } + + default void endVisit(Join join) {} + + default boolean visit(Group group) { + return true; + } + + default void endVisit(Group group) {} + + default boolean visit(TableScan scan) { + return true; + } + + default void endVisit(TableScan scan) {} + + default boolean visit(Top top) { + return true; + } + + default void endVisit(Top top) {} + + default boolean visit(Sort sort) { + return true; + } + + default void endVisit(Sort sort) {} } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Filter.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Filter.java index f5e3e40f2d..5d4423d67a 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Filter.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Filter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.logical.node; import java.util.HashMap; @@ -16,59 +15,54 @@ import org.opensearch.sql.legacy.query.planner.logical.LogicalOperator; import org.opensearch.sql.legacy.query.planner.physical.PhysicalOperator; -/** - * Selection expression - */ +/** Selection expression */ public class Filter implements LogicalOperator { - private final LogicalOperator next; + private final LogicalOperator next; - /** - * Alias to WHERE clause mapping - */ - private final Map aliasWhereMap = new HashMap<>(); + /** Alias to WHERE clause mapping */ + private final Map aliasWhereMap = new HashMap<>(); - public Filter(LogicalOperator next, List tables) { - this.next = next; - - for (TableInJoinRequestBuilder table : tables) { - Select select = table.getOriginalSelect(); - if (select.getWhere() != null) { - aliasWhereMap.put(table.getAlias(), select.getWhere()); - } - } - } + public Filter(LogicalOperator next, List tables) { + this.next = next; - public Filter(LogicalOperator next) { - this.next = next; + for (TableInJoinRequestBuilder table : tables) { + Select select = table.getOriginalSelect(); + if (select.getWhere() != null) { + aliasWhereMap.put(table.getAlias(), select.getWhere()); + } } + } - @Override - public PlanNode[] children() { - return new PlanNode[]{next}; - } + public Filter(LogicalOperator next) { + this.next = next; + } - @Override - public boolean isNoOp() { - return aliasWhereMap.isEmpty(); - } + @Override + public PlanNode[] children() { + return new PlanNode[] {next}; + } - @Override - public PhysicalOperator[] toPhysical(Map> optimalOps) { - // Always no-op after push down, skip it by returning next - return new PhysicalOperator[]{optimalOps.get(next)}; - } + @Override + public boolean isNoOp() { + return aliasWhereMap.isEmpty(); + } - public void pushDown(String tableAlias, Filter pushedDownFilter) { - Where pushedDownWhere = pushedDownFilter.aliasWhereMap.remove(tableAlias); - if (pushedDownWhere != null) { - aliasWhereMap.put(tableAlias, pushedDownWhere); - } - } + @Override + public PhysicalOperator[] toPhysical(Map> optimalOps) { + // Always no-op after push down, skip it by returning next + return new PhysicalOperator[] {optimalOps.get(next)}; + } - @Override - public String toString() { - return "Filter [ conditions=" + aliasWhereMap.values() + " ]"; + public void pushDown(String tableAlias, Filter pushedDownFilter) { + Where pushedDownWhere = pushedDownFilter.aliasWhereMap.remove(tableAlias); + if (pushedDownWhere != null) { + aliasWhereMap.put(tableAlias, pushedDownWhere); } + } + @Override + public String toString() { + return "Filter [ conditions=" + aliasWhereMap.values() + " ]"; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Group.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Group.java index 5ae9ddc0a2..da94ae74da 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Group.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Group.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.logical.node; import java.util.Map; @@ -11,66 +10,57 @@ import org.opensearch.sql.legacy.query.planner.logical.LogicalOperator; import org.opensearch.sql.legacy.query.planner.physical.PhysicalOperator; -/** - * Project-Filter-TableScan group for push down optimization convenience. - */ +/** Project-Filter-TableScan group for push down optimization convenience. */ public class Group implements LogicalOperator { - /** - * Optional pushed down projection - */ - private Project project; - - /** - * Optional pushed down filter (selection) - */ - private Filter filter; + /** Optional pushed down projection */ + private Project project; - /** - * Required table scan operator - */ - private final TableScan tableScan; + /** Optional pushed down filter (selection) */ + private Filter filter; + /** Required table scan operator */ + private final TableScan tableScan; - public Group(TableScan tableScan) { - this.tableScan = tableScan; - this.filter = new Filter(tableScan); - this.project = new Project<>(filter); - } + public Group(TableScan tableScan) { + this.tableScan = tableScan; + this.filter = new Filter(tableScan); + this.project = new Project<>(filter); + } - @Override - public boolean isNoOp() { - return true; - } + @Override + public boolean isNoOp() { + return true; + } - @Override - public PhysicalOperator[] toPhysical(Map> optimalOps) { - return tableScan.toPhysical(optimalOps); - } + @Override + public PhysicalOperator[] toPhysical(Map> optimalOps) { + return tableScan.toPhysical(optimalOps); + } - @Override - public PlanNode[] children() { - return new PlanNode[]{topNonNullNode()}; - } + @Override + public PlanNode[] children() { + return new PlanNode[] {topNonNullNode()}; + } - private PlanNode topNonNullNode() { - return project != null ? project : (filter != null ? filter : tableScan); - } + private PlanNode topNonNullNode() { + return project != null ? project : (filter != null ? filter : tableScan); + } - public String id() { - return tableScan.getTableAlias(); - } + public String id() { + return tableScan.getTableAlias(); + } - public void pushDown(Project project) { - this.project.pushDown(id(), project); - } + public void pushDown(Project project) { + this.project.pushDown(id(), project); + } - public void pushDown(Filter filter) { - this.filter.pushDown(id(), filter); - } + public void pushDown(Filter filter) { + this.filter.pushDown(id(), filter); + } - @Override - public String toString() { - return "Group"; - } + @Override + public String toString() { + return "Group"; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Join.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Join.java index ae833ca580..405a8a9f72 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Join.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Join.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.logical.node; import static com.alibaba.druid.sql.ast.statement.SQLJoinTableSource.JoinType; @@ -15,76 +14,65 @@ import org.opensearch.sql.legacy.query.planner.physical.node.join.BlockHashJoin; import org.opensearch.sql.legacy.query.planner.resource.blocksize.BlockSize; -/** - * Join expression - */ +/** Join expression */ public class Join implements LogicalOperator { - private final LogicalOperator left; - private final LogicalOperator right; - - /** - * Join type, ex inner join, left join - */ - private final JoinType type; - - /** - * Joined columns in ON condition - */ - private final JoinCondition condition; - - /** - * Block size calculator - */ - private final BlockSize blockSize; - - /** - * Use terms filter optimization or not - */ - private final boolean isUseTermsFilterOptimization; - - - public Join(LogicalOperator left, - LogicalOperator right, - JoinType joinType, - JoinCondition condition, - BlockSize blockSize, - boolean isUseTermsFilterOptimization) { - this.left = left; - this.right = right; - this.type = joinType; - this.condition = condition; - this.blockSize = blockSize; - this.isUseTermsFilterOptimization = isUseTermsFilterOptimization; - } - - @Override - public PlanNode[] children() { - return new PlanNode[]{left, right}; - } - - @Override - public PhysicalOperator[] toPhysical(Map> optimalOps) { - PhysicalOperator optimalLeft = optimalOps.get(left); - PhysicalOperator optimalRight = optimalOps.get(right); - return new PhysicalOperator[]{ - new BlockHashJoin<>( - optimalLeft, optimalRight, type, condition, - blockSize, isUseTermsFilterOptimization - ) - }; - } - - public JoinCondition conditions() { - return condition; - } - - @Override - public String toString() { - return "Join [ conditions=" + condition + " type=" + type + " ]"; - } + private final LogicalOperator left; + private final LogicalOperator right; + + /** Join type, ex inner join, left join */ + private final JoinType type; + + /** Joined columns in ON condition */ + private final JoinCondition condition; + + /** Block size calculator */ + private final BlockSize blockSize; + + /** Use terms filter optimization or not */ + private final boolean isUseTermsFilterOptimization; + + public Join( + LogicalOperator left, + LogicalOperator right, + JoinType joinType, + JoinCondition condition, + BlockSize blockSize, + boolean isUseTermsFilterOptimization) { + this.left = left; + this.right = right; + this.type = joinType; + this.condition = condition; + this.blockSize = blockSize; + this.isUseTermsFilterOptimization = isUseTermsFilterOptimization; + } + + @Override + public PlanNode[] children() { + return new PlanNode[] {left, right}; + } + + @Override + public PhysicalOperator[] toPhysical(Map> optimalOps) { + PhysicalOperator optimalLeft = optimalOps.get(left); + PhysicalOperator optimalRight = optimalOps.get(right); + return new PhysicalOperator[] { + new BlockHashJoin<>( + optimalLeft, optimalRight, type, condition, blockSize, isUseTermsFilterOptimization) + }; + } + + public JoinCondition conditions() { + return condition; + } + + @Override + public String toString() { + return "Join [ conditions=" + condition + " type=" + type + " ]"; + } /** + *

      * Join condition in ON clause grouped by OR.
      * 

* For example, "ON (a.name = b.id AND a.age = b.age) OR a.location = b.address" @@ -97,80 +85,77 @@ public String toString() { * leftTableAlias: "a", rightTableAlias: "b" * leftColumnNames: [ ["name", "age"], ["location"] ] * rightColumnNames: [ ["id", "age"], ["address" ] ] + *

*/ public static class JoinCondition { - private final String leftTableAlias; - private final String rightTableAlias; + private final String leftTableAlias; + private final String rightTableAlias; - private final String[][] leftColumnNames; - private final String[][] rightColumnNames; + private final String[][] leftColumnNames; + private final String[][] rightColumnNames; - public JoinCondition(String leftTableAlias, - String rightTableAlias, - int groupSize) { - this.leftTableAlias = leftTableAlias; - this.rightTableAlias = rightTableAlias; - this.leftColumnNames = new String[groupSize][]; - this.rightColumnNames = new String[groupSize][]; - } + public JoinCondition(String leftTableAlias, String rightTableAlias, int groupSize) { + this.leftTableAlias = leftTableAlias; + this.rightTableAlias = rightTableAlias; + this.leftColumnNames = new String[groupSize][]; + this.rightColumnNames = new String[groupSize][]; + } - public void addLeftColumnNames(int groupNum, String[] colNames) { - leftColumnNames[groupNum] = colNames; - } + public void addLeftColumnNames(int groupNum, String[] colNames) { + leftColumnNames[groupNum] = colNames; + } - public void addRightColumnNames(int groupNum, String[] colNames) { - rightColumnNames[groupNum] = colNames; - } + public void addRightColumnNames(int groupNum, String[] colNames) { + rightColumnNames[groupNum] = colNames; + } - public int groupSize() { - return leftColumnNames.length; - } + public int groupSize() { + return leftColumnNames.length; + } - public String leftTableAlias() { - return leftTableAlias; - } + public String leftTableAlias() { + return leftTableAlias; + } - public String rightTableAlias() { - return rightTableAlias; - } + public String rightTableAlias() { + return rightTableAlias; + } - public String[] leftColumnNames(int groupNum) { - return leftColumnNames[groupNum]; - } + public String[] leftColumnNames(int groupNum) { + return leftColumnNames[groupNum]; + } - public String[] rightColumnNames(int groupNum) { - return rightColumnNames[groupNum]; - } + public String[] rightColumnNames(int groupNum) { + return rightColumnNames[groupNum]; + } - @Override - public String toString() { - StringBuilder str = new StringBuilder(); - int groupSize = leftColumnNames.length; - for (int i = 0; i < groupSize; i++) { - if (i > 0) { - str.append(" OR "); - } - - str.append("( "); - int condSize = leftColumnNames[i].length; - for (int j = 0; j < condSize; j++) { - if (j > 0) { - str.append(" AND "); - } - str.append(leftTableAlias). - append("."). - append(leftColumnNames[i][j]). - append(" = "). - append(rightTableAlias). - append("."). - append(rightColumnNames[i][j]); - } - str.append(" )"); - } - return str.toString(); + @Override + public String toString() { + StringBuilder str = new StringBuilder(); + int groupSize = leftColumnNames.length; + for (int i = 0; i < groupSize; i++) { + if (i > 0) { + str.append(" OR "); } + str.append("( "); + int condSize = leftColumnNames[i].length; + for (int j = 0; j < condSize; j++) { + if (j > 0) { + str.append(" AND "); + } + str.append(leftTableAlias) + .append(".") + .append(leftColumnNames[i][j]) + .append(" = ") + .append(rightTableAlias) + .append(".") + .append(rightColumnNames[i][j]); + } + str.append(" )"); + } + return str.toString(); } - + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/estimation/Estimation.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/estimation/Estimation.java index 1648cf854d..72ffbd4652 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/estimation/Estimation.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/estimation/Estimation.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.physical.estimation; import static java.util.Comparator.comparing; @@ -18,39 +17,35 @@ import org.opensearch.sql.legacy.query.planner.physical.PhysicalOperator; /** - * Convert and estimate the cost of each operator and generate one optimal plan. - * Memorize cost of candidate physical operators in the bottom-up way to avoid duplicate computation. + * Convert and estimate the cost of each operator and generate one optimal plan. Memorize cost of + * candidate physical operators in the bottom-up way to avoid duplicate computation. */ public class Estimation implements LogicalPlanVisitor { - /** - * Optimal physical operator for logical operator based on completed estimation - */ - private Map> optimalOps = new IdentityHashMap<>(); - - /** - * Keep tracking of the operator that exit visit() - */ - private PhysicalOperator root; - - @Override - public boolean visit(Group group) { - return false; - } - - @SuppressWarnings("unchecked") - @Override - public void endVisit(PlanNode node) { - LogicalOperator op = (LogicalOperator) node; - PhysicalOperator optimal = Arrays.stream(op.toPhysical(optimalOps)). - min(comparing(PhysicalOperator::estimate)). - orElseThrow(() -> new IllegalStateException( - "No optimal operator found: " + op)); - optimalOps.put(op, optimal); - root = optimal; - } - - public PhysicalOperator optimalPlan() { - return root; - } + /** Optimal physical operator for logical operator based on completed estimation */ + private Map> optimalOps = new IdentityHashMap<>(); + + /** Keep tracking of the operator that exit visit() */ + private PhysicalOperator root; + + @Override + public boolean visit(Group group) { + return false; + } + + @SuppressWarnings("unchecked") + @Override + public void endVisit(PlanNode node) { + LogicalOperator op = (LogicalOperator) node; + PhysicalOperator optimal = + Arrays.stream(op.toPhysical(optimalOps)) + .min(comparing(PhysicalOperator::estimate)) + .orElseThrow(() -> new IllegalStateException("No optimal operator found: " + op)); + optimalOps.put(op, optimal); + root = optimal; + } + + public PhysicalOperator optimalPlan() { + return root; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/HashTable.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/HashTable.java index 4a20b1833b..1811af5158 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/HashTable.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/HashTable.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.physical.node.join; import java.util.Collection; @@ -17,50 +16,42 @@ */ public interface HashTable { - /** - * Add one row to the hash table - * - * @param row row - */ - void add(Row row); - - - /** - * Find all matched row(s) in the hash table. - * - * @param row row to be matched - * @return all matches - */ - Collection> match(Row row); - - - /** - * Mapping from right field to value(s) of left size - * - * @return - */ - Map>[] rightFieldWithLeftValues(); - - - /** - * Get size of hash table - * - * @return size of hash table - */ - int size(); - - - /** - * Is hash table empty? - * - * @return true for yes - */ - boolean isEmpty(); - - - /** - * Clear internal data structure - */ - void clear(); - + /** + * Add one row to the hash table + * + * @param row row + */ + void add(Row row); + + /** + * Find all matched row(s) in the hash table. + * + * @param row row to be matched + * @return all matches + */ + Collection> match(Row row); + + /** + * Mapping from right field to value(s) of left size + * + * @return + */ + Map>[] rightFieldWithLeftValues(); + + /** + * Get size of hash table + * + * @return size of hash table + */ + int size(); + + /** + * Is hash table empty? + * + * @return true for yes + */ + boolean isEmpty(); + + /** Clear internal data structure */ + void clear(); } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/HashTableGroup.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/HashTableGroup.java index c22eb9dc19..08867f8c5d 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/HashTableGroup.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/HashTableGroup.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.physical.node.join; import static org.opensearch.sql.legacy.query.planner.logical.node.Join.JoinCondition; @@ -15,75 +14,69 @@ import java.util.Set; import org.opensearch.sql.legacy.query.planner.physical.Row; -/** - * Hash table group with each hash table per AND join condition. - */ +/** Hash table group with each hash table per AND join condition. */ public class HashTableGroup implements HashTable { - private final HashTable[] hashTables; + private final HashTable[] hashTables; - /** - * Number of rows stored in the hash table (in other words, = block size) - */ - private int numOfRows = 0; + /** Number of rows stored in the hash table (in other words, = block size) */ + private int numOfRows = 0; - @SuppressWarnings("unchecked") - public HashTableGroup(JoinCondition condition) { - int groupSize = condition.groupSize(); - if (groupSize == 0) { - // Create one hash table (degraded to list) for Cross Join - hashTables = new HashTable[]{new ListHashTable()}; - } else { - hashTables = new HashTable[groupSize]; - for (int i = 0; i < groupSize; i++) { - hashTables[i] = new DefaultHashTable<>( - condition.leftColumnNames(i), - condition.rightColumnNames(i) - ); - } - } + @SuppressWarnings("unchecked") + public HashTableGroup(JoinCondition condition) { + int groupSize = condition.groupSize(); + if (groupSize == 0) { + // Create one hash table (degraded to list) for Cross Join + hashTables = new HashTable[] {new ListHashTable()}; + } else { + hashTables = new HashTable[groupSize]; + for (int i = 0; i < groupSize; i++) { + hashTables[i] = + new DefaultHashTable<>(condition.leftColumnNames(i), condition.rightColumnNames(i)); + } } + } - @Override - public void add(Row row) { - for (HashTable hashTable : hashTables) { - hashTable.add(row); - } - numOfRows++; + @Override + public void add(Row row) { + for (HashTable hashTable : hashTables) { + hashTable.add(row); } + numOfRows++; + } - @Override - public Collection> match(Row row) { - Set> allMatched = Sets.newIdentityHashSet(); - for (HashTable hashTable : hashTables) { - allMatched.addAll(hashTable.match(row)); - } - return allMatched; + @Override + public Collection> match(Row row) { + Set> allMatched = Sets.newIdentityHashSet(); + for (HashTable hashTable : hashTables) { + allMatched.addAll(hashTable.match(row)); } + return allMatched; + } - @SuppressWarnings("unchecked") - public Map>[] rightFieldWithLeftValues() { - return Arrays.stream(hashTables). - map(hashTable -> hashTable.rightFieldWithLeftValues()[0]). // Make interface consistent - toArray(Map[]::new); - } + @SuppressWarnings("unchecked") + public Map>[] rightFieldWithLeftValues() { + return Arrays.stream(hashTables) + // Make interface consistent + .map(hashTable -> hashTable.rightFieldWithLeftValues()[0]) + .toArray(Map[]::new); + } - @Override - public boolean isEmpty() { - return numOfRows == 0; - } + @Override + public boolean isEmpty() { + return numOfRows == 0; + } - @Override - public int size() { - return numOfRows; - } + @Override + public int size() { + return numOfRows; + } - @Override - public void clear() { - for (HashTable hashTable : hashTables) { - hashTable.clear(); - } - numOfRows = 0; + @Override + public void clear() { + for (HashTable hashTable : hashTables) { + hashTable.clear(); } - + numOfRows = 0; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/JoinAlgorithm.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/JoinAlgorithm.java index 07f008bea4..9fcb977beb 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/JoinAlgorithm.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/JoinAlgorithm.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.physical.node.join; import static java.util.Collections.emptyList; @@ -31,253 +30,232 @@ */ public abstract class JoinAlgorithm extends BatchPhysicalOperator { - protected static final Logger LOG = LogManager.getLogger(); - - /** - * Left child operator - */ - private final PhysicalOperator left; - - /** - * Right child operator handled by concrete join algorithm subclass - */ - protected final PhysicalOperator right; - - /** - * Join type ex. inner join, left join - */ - private final JoinType type; - - /** - * Joined columns in ON conditions - */ - private final JoinCondition condition; - - /** - * Block size calculator - */ - private final BlockSize blockSize; - - /** - * Bookkeeping unmatched rows in current block from left - */ - private final Set> leftMismatch; - - /** - * Hash table for right table probing - */ - protected HashTable hashTable; - - /** - * Execute params to reset right side for each left block - */ - protected ExecuteParams params; - - JoinAlgorithm(PhysicalOperator left, - PhysicalOperator right, - JoinType type, - JoinCondition condition, - BlockSize blockSize) { - this.left = left; - this.right = right; - this.type = type; - this.condition = condition; - this.blockSize = blockSize; - this.hashTable = new HashTableGroup<>(condition); - this.leftMismatch = Sets.newIdentityHashSet(); - } - - @Override - public PlanNode[] children() { - return new PlanNode[]{left, right}; - } - - @Override - public void open(ExecuteParams params) throws Exception { - super.open(params); - left.open(params); - this.params = params; - } - - @Override - public void close() { - super.close(); - hashTable.clear(); - leftMismatch.clear(); - LOG.debug("Cleared all resources used by join"); - } + protected static final Logger LOG = LogManager.getLogger(); + + /** Left child operator */ + private final PhysicalOperator left; + + /** Right child operator handled by concrete join algorithm subclass */ + protected final PhysicalOperator right; + + /** Join type ex. inner join, left join */ + private final JoinType type; + + /** Joined columns in ON conditions */ + private final JoinCondition condition; + + /** Block size calculator */ + private final BlockSize blockSize; + + /** Bookkeeping unmatched rows in current block from left */ + private final Set> leftMismatch; + + /** Hash table for right table probing */ + protected HashTable hashTable; + + /** Execute params to reset right side for each left block */ + protected ExecuteParams params; + + JoinAlgorithm( + PhysicalOperator left, + PhysicalOperator right, + JoinType type, + JoinCondition condition, + BlockSize blockSize) { + this.left = left; + this.right = right; + this.type = type; + this.condition = condition; + this.blockSize = blockSize; + this.hashTable = new HashTableGroup<>(condition); + this.leftMismatch = Sets.newIdentityHashSet(); + } + + @Override + public PlanNode[] children() { + return new PlanNode[] {left, right}; + } + + @Override + public void open(ExecuteParams params) throws Exception { + super.open(params); + left.open(params); + this.params = params; + } + + @Override + public void close() { + super.close(); + hashTable.clear(); + leftMismatch.clear(); + LOG.debug("Cleared all resources used by join"); + } /** * Build-probe left and right block by block to prefetch next matches (and mismatches if outer join). - *

- * 1) Build hash table and open right side. - * 2) Keep probing right to find matched rows (meanwhile update mismatched set) - * 3) Check if any row in mismatched set to return in the case of outer join. - * 4) Nothing remained now, move on to next block of left. Go back to step 1. - *

+ *

    + *
  1. Build hash table and open right side. + *
  2. Keep probing right to find matched rows (meanwhile update mismatched set) + *
  3. Check if any row in mismatched set to return in the case of outer join. + *
  4. Nothing remained now, move on to next block of left. Go back to step 1. + *
* This is a new run AND no block from left means algorithm should stop and return empty. */ @Override protected Collection> prefetch() throws Exception { while (!isNewRunButNoMoreBlockFromLeft()) { - // 1.Build hash table and (re-)open right side for the new run - if (isNewRun()) { - buildHashTableByNextBlock(); - reopenRight(); - } - - // 2.Keep probing right by the hash table and bookkeeping mismatch - while (isAnyMoreDataFromRight()) { - Collection> matched = probeMatchAndBookkeepMismatch(); - if (!matched.isEmpty()) { - return matched; - } - } - - // 3.You know it's a mismatch only after this run finished (left block + all right). - if (isAnyMismatchForOuterJoin()) { - return returnAndClearMismatch(); - } - - // 4.Clean up and close right - cleanUpAndCloseRight(); - } - return emptyList(); - } - - /** - * Probe right by hash table built from left. Handle matched and mismatched rows. - */ - private Collection> probeMatchAndBookkeepMismatch() { - if (hashTable.isEmpty()) { - throw new IllegalStateException("Hash table is NOT supposed to be empty"); + // 1.Build hash table and (re-)open right side for the new run + if (isNewRun()) { + buildHashTableByNextBlock(); + reopenRight(); + } + + // 2.Keep probing right by the hash table and bookkeeping mismatch + while (isAnyMoreDataFromRight()) { + Collection> matched = probeMatchAndBookkeepMismatch(); + if (!matched.isEmpty()) { + return matched; } + } - List> combinedRows = probe(); + // 3.You know it's a mismatch only after this run finished (left block + all right). + if (isAnyMismatchForOuterJoin()) { + return returnAndClearMismatch(); + } - List> matchRows = new ArrayList<>(); - if (combinedRows.isEmpty()) { - LOG.debug("No matched row found"); - } else { - if (LOG.isTraceEnabled()) { - combinedRows.forEach(row -> LOG.trace("Matched row before combined: {}", row)); - } + // 4.Clean up and close right + cleanUpAndCloseRight(); + } + return emptyList(); + } - for (CombinedRow row : combinedRows) { - matchRows.addAll(row.combine()); - } + /** Probe right by hash table built from left. Handle matched and mismatched rows. */ + private Collection> probeMatchAndBookkeepMismatch() { + if (hashTable.isEmpty()) { + throw new IllegalStateException("Hash table is NOT supposed to be empty"); + } - if (LOG.isTraceEnabled()) { - matchRows.forEach(row -> LOG.trace("Matched row after combined: {}", row)); - } + List> combinedRows = probe(); - bookkeepMismatchedRows(combinedRows); - } - return matchRows; - } + List> matchRows = new ArrayList<>(); + if (combinedRows.isEmpty()) { + LOG.debug("No matched row found"); + } else { + if (LOG.isTraceEnabled()) { + combinedRows.forEach(row -> LOG.trace("Matched row before combined: {}", row)); + } - private boolean isNewRunButNoMoreBlockFromLeft() { - return isNewRun() && !isAnyMoreBlockFromLeft(); - } + for (CombinedRow row : combinedRows) { + matchRows.addAll(row.combine()); + } - private boolean isNewRun() { - return hashTable.isEmpty(); - } + if (LOG.isTraceEnabled()) { + matchRows.forEach(row -> LOG.trace("Matched row after combined: {}", row)); + } - private boolean isAnyMoreBlockFromLeft() { - return left.hasNext(); + bookkeepMismatchedRows(combinedRows); } - - private boolean isAnyMoreDataFromRight() { - return right.hasNext(); + return matchRows; + } + + private boolean isNewRunButNoMoreBlockFromLeft() { + return isNewRun() && !isAnyMoreBlockFromLeft(); + } + + private boolean isNewRun() { + return hashTable.isEmpty(); + } + + private boolean isAnyMoreBlockFromLeft() { + return left.hasNext(); + } + + private boolean isAnyMoreDataFromRight() { + return right.hasNext(); + } + + private boolean isAnyMismatchForOuterJoin() { + return !leftMismatch.isEmpty(); + } + + /** Clone mismatch list and clear it so that we won't return it forever */ + @SuppressWarnings("unchecked") + private Collection> returnAndClearMismatch() { + if (LOG.isTraceEnabled()) { + leftMismatch.forEach(row -> LOG.trace("Mismatched rows before combined: {}", row)); } - private boolean isAnyMismatchForOuterJoin() { - return !leftMismatch.isEmpty(); + List> result = new ArrayList<>(); + for (Row row : leftMismatch) { + result.add(row.combine(Row.NULL)); } - /** - * Clone mismatch list and clear it so that we won't return it forever - */ - @SuppressWarnings("unchecked") - private Collection> returnAndClearMismatch() { - if (LOG.isTraceEnabled()) { - leftMismatch.forEach(row -> LOG.trace("Mismatched rows before combined: {}", row)); - } - - List> result = new ArrayList<>(); - for (Row row : leftMismatch) { - result.add(row.combine(Row.NULL)); - } - - if (LOG.isTraceEnabled()) { - result.forEach(row -> LOG.trace("Mismatched rows after combined: {}", row)); - } - leftMismatch.clear(); - return result; + if (LOG.isTraceEnabled()) { + result.forEach(row -> LOG.trace("Mismatched rows after combined: {}", row)); } - - /** - * Building phase: - * Build hash table from data block. - */ - private void buildHashTableByNextBlock() { - List> block = loadNextBlockFromLeft(blockSize.size()); - if (LOG.isTraceEnabled()) { - LOG.trace("Build hash table on conditions with block: {}, {}", condition, block); - } - - for (Row data : block) { - hashTable.add(data); - } - - if (type == JoinType.LEFT_OUTER_JOIN) { - leftMismatch.addAll(block); - } + leftMismatch.clear(); + return result; + } + + /** Building phase: Build hash table from data block. */ + private void buildHashTableByNextBlock() { + List> block = loadNextBlockFromLeft(blockSize.size()); + if (LOG.isTraceEnabled()) { + LOG.trace("Build hash table on conditions with block: {}, {}", condition, block); } - private void cleanUpAndCloseRight() { - LOG.debug("No more data from right. Clean up and close right."); - hashTable.clear(); - leftMismatch.clear(); - right.close(); + for (Row data : block) { + hashTable.add(data); } - private List> loadNextBlockFromLeft(int blockSize) { - List> block = new ArrayList<>(); - for (int i = 0; i < blockSize && left.hasNext(); i++) { - block.add(left.next()); - } - return block; + if (type == JoinType.LEFT_OUTER_JOIN) { + leftMismatch.addAll(block); } - - private void bookkeepMismatchedRows(List> combinedRows) { - if (type == JoinType.LEFT_OUTER_JOIN) { - for (CombinedRow row : combinedRows) { - leftMismatch.removeAll(row.leftMatchedRows()); - } - } + } + + private void cleanUpAndCloseRight() { + LOG.debug("No more data from right. Clean up and close right."); + hashTable.clear(); + leftMismatch.clear(); + right.close(); + } + + private List> loadNextBlockFromLeft(int blockSize) { + List> block = new ArrayList<>(); + for (int i = 0; i < blockSize && left.hasNext(); i++) { + block.add(left.next()); } - - /** - * (Re-)open right side by params. - */ - protected abstract void reopenRight() throws Exception; - - - /** - * Probing phase - * - * @return matched rows from left and right in - */ - protected abstract List> probe(); - - - @Override - public String toString() { - return getClass().getSimpleName() + "[ conditions=" + condition - + ", type=" + type + ", blockSize=[" + blockSize + "] ]"; + return block; + } + + private void bookkeepMismatchedRows(List> combinedRows) { + if (type == JoinType.LEFT_OUTER_JOIN) { + for (CombinedRow row : combinedRows) { + leftMismatch.removeAll(row.leftMatchedRows()); + } } - + } + + /** (Re-)open right side by params. */ + protected abstract void reopenRight() throws Exception; + + /** + * Probing phase + * + * @return matched rows from left and right in + */ + protected abstract List> probe(); + + @Override + public String toString() { + return getClass().getSimpleName() + + "[ conditions=" + + condition + + ", type=" + + type + + ", blockSize=[" + + blockSize + + "] ]"; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/ListHashTable.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/ListHashTable.java index 5d39529632..baf0af8c86 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/ListHashTable.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/ListHashTable.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.physical.node.join; import java.util.ArrayList; @@ -13,41 +12,39 @@ import java.util.Map; import org.opensearch.sql.legacy.query.planner.physical.Row; -/** - * List implementation to avoid normal hash table degrading into linked list. - */ +/** List implementation to avoid normal hash table degrading into linked list. */ public class ListHashTable implements HashTable { - private List> rows = new ArrayList<>(); - - @Override - public void add(Row row) { - rows.add(row); - } - - @Override - public Collection> match(Row row) { - return rows; - } - - @SuppressWarnings("unchecked") - @Override - public Map>[] rightFieldWithLeftValues() { - return new Map[]{new HashMap()}; - } - - @Override - public int size() { - return rows.size(); - } - - @Override - public boolean isEmpty() { - return rows.isEmpty(); - } - - @Override - public void clear() { - rows.clear(); - } + private List> rows = new ArrayList<>(); + + @Override + public void add(Row row) { + rows.add(row); + } + + @Override + public Collection> match(Row row) { + return rows; + } + + @SuppressWarnings("unchecked") + @Override + public Map>[] rightFieldWithLeftValues() { + return new Map[] {new HashMap()}; + } + + @Override + public int size() { + return rows.size(); + } + + @Override + public boolean isEmpty() { + return rows.isEmpty(); + } + + @Override + public void clear() { + rows.clear(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/blocksize/AdaptiveBlockSize.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/blocksize/AdaptiveBlockSize.java index 7990b8c8d4..339e326cc3 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/blocksize/AdaptiveBlockSize.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/blocksize/AdaptiveBlockSize.java @@ -3,28 +3,25 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.resource.blocksize; -/** - * Adaptive block size calculator based on resource usage dynamically. - */ +/** Adaptive block size calculator based on resource usage dynamically. */ public class AdaptiveBlockSize implements BlockSize { - private int upperLimit; + private int upperLimit; - public AdaptiveBlockSize(int upperLimit) { - this.upperLimit = upperLimit; - } + public AdaptiveBlockSize(int upperLimit) { + this.upperLimit = upperLimit; + } - @Override - public int size() { - //TODO: calculate dynamically on each call - return upperLimit; - } + @Override + public int size() { + // TODO: calculate dynamically on each call + return upperLimit; + } - @Override - public String toString() { - return "AdaptiveBlockSize with " + "upperLimit=" + upperLimit; - } + @Override + public String toString() { + return "AdaptiveBlockSize with " + "upperLimit=" + upperLimit; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/monitor/Monitor.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/monitor/Monitor.java index 10b36f2483..52bc42587f 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/monitor/Monitor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/monitor/Monitor.java @@ -3,19 +3,15 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.resource.monitor; -/** - * Interface for different monitor component - */ +/** Interface for different monitor component */ public interface Monitor { - /** - * Is resource being monitored exhausted. - * - * @return true if yes - */ - boolean isHealthy(); - + /** + * Is resource being monitored exhausted. + * + * @return true if yes + */ + boolean isHealthy(); } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/alias/Identifier.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/alias/Identifier.java index 6c708b91b0..9863862af9 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/alias/Identifier.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/alias/Identifier.java @@ -3,42 +3,39 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.alias; import com.alibaba.druid.sql.ast.expr.SQLIdentifierExpr; -/** - * Util class for identifier expression parsing - */ +/** Util class for identifier expression parsing */ class Identifier { - private final SQLIdentifierExpr idExpr; + private final SQLIdentifierExpr idExpr; - Identifier(SQLIdentifierExpr idExpr) { - this.idExpr = idExpr; - } + Identifier(SQLIdentifierExpr idExpr) { + this.idExpr = idExpr; + } - String name() { - return idExpr.getName(); - } + String name() { + return idExpr.getName(); + } - boolean hasPrefix() { - return firstDotIndex() != -1; - } + boolean hasPrefix() { + return firstDotIndex() != -1; + } - /** Assumption: identifier has prefix */ - String prefix() { - return name().substring(0, firstDotIndex()); - } + /** Assumption: identifier has prefix */ + String prefix() { + return name().substring(0, firstDotIndex()); + } - /** Assumption: identifier has prefix */ - void removePrefix() { - String nameWithoutPrefix = name().substring(prefix().length() + 1); - idExpr.setName(nameWithoutPrefix); - } + /** Assumption: identifier has prefix */ + void removePrefix() { + String nameWithoutPrefix = name().substring(prefix().length() + 1); + idExpr.setName(nameWithoutPrefix); + } - private int firstDotIndex() { - return name().indexOf('.', 1); - } + private int firstDotIndex() { + return name().indexOf('.', 1); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/join/JoinRewriteRule.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/join/JoinRewriteRule.java index b32803561e..69178b7e83 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/join/JoinRewriteRule.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/join/JoinRewriteRule.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.join; import com.alibaba.druid.sql.ast.expr.SQLIdentifierExpr; @@ -28,23 +27,21 @@ import org.opensearch.sql.legacy.utils.StringUtils; /** + *
  *  Rewrite rule to add table alias to columnNames for JOIN queries without table alias.
  * 

* We use a map from columnName to tableName. This is required to remove any ambiguity * while mapping fields to right table. If there is no explicit alias we create one and use that * to prefix columnName. * - * Different tableName on either side of join: - * Case a: If columnName(without alias) present in both tables, throw error. - * Case b: If columnName already has some alias, and that alias is a table name, - * change it to explicit alias of that table. - * Case c: If columnName is unique to a table + *

Different tableName on either side of join: Case a: If columnName(without alias) present in + * both tables, throw error. Case b: If columnName already has some alias, and that alias is a table + * name, change it to explicit alias of that table. Case c: If columnName is unique to a table + * + *

Same tableName on either side of join: Case a: If neither has explicit alias, throw error. + * Case b: If any one table has explicit alias, use explicit alias of other table for columnNames + * with tableName as prefix. (See below example) * - * Same tableName on either side of join: - * Case a: If neither has explicit alias, throw error. - * Case b: If any one table has explicit alias, - * use explicit alias of other table for columnNames with tableName as prefix. (See below example) - *

  *       ex: SELECT table.field_a , a.field_b  | SELECT table.field_a , a.field_b
  *            FROM table a                     |  FROM table
  *             JOIN table                      |   JOIN table a
@@ -54,164 +51,172 @@
  *                            FROM table a
  *                             JOIN table table_0
  *                              ON table_0.field_c = a.field_d
- *
- *

+ *
*/ public class JoinRewriteRule implements RewriteRule { - private static final String DOT = "."; - private int aliasSuffix = 0; - private final LocalClusterState clusterState; + private static final String DOT = "."; + private int aliasSuffix = 0; + private final LocalClusterState clusterState; - public JoinRewriteRule(LocalClusterState clusterState) { - this.clusterState = clusterState; - } + public JoinRewriteRule(LocalClusterState clusterState) { + this.clusterState = clusterState; + } - @Override - public boolean match(SQLQueryExpr root) { - return isJoin(root); - } - - private boolean isJoin(SQLQueryExpr sqlExpr) { - SQLSelectQuery sqlSelectQuery = sqlExpr.getSubQuery().getQuery(); + @Override + public boolean match(SQLQueryExpr root) { + return isJoin(root); + } - if (!(sqlSelectQuery instanceof MySqlSelectQueryBlock)) { - return false; - } + private boolean isJoin(SQLQueryExpr sqlExpr) { + SQLSelectQuery sqlSelectQuery = sqlExpr.getSubQuery().getQuery(); - MySqlSelectQueryBlock query = (MySqlSelectQueryBlock) sqlSelectQuery; - return query.getFrom() instanceof SQLJoinTableSource - && ((SQLJoinTableSource) query.getFrom()).getJoinType() != SQLJoinTableSource.JoinType.COMMA; + if (!(sqlSelectQuery instanceof MySqlSelectQueryBlock)) { + return false; } - @Override - public void rewrite(SQLQueryExpr root) { + MySqlSelectQueryBlock query = (MySqlSelectQueryBlock) sqlSelectQuery; + return query.getFrom() instanceof SQLJoinTableSource + && ((SQLJoinTableSource) query.getFrom()).getJoinType() + != SQLJoinTableSource.JoinType.COMMA; + } - final Multimap tableByFieldName = ArrayListMultimap.create(); - final Map tableNameToAlias = new HashMap<>(); + @Override + public void rewrite(SQLQueryExpr root) { - // Used to handle case of same tableNames in JOIN - final Set explicitAliases = new HashSet<>(); + final Multimap tableByFieldName = ArrayListMultimap.create(); + final Map tableNameToAlias = new HashMap<>(); - visitTable(root, tableExpr -> { - // Copied from SubqueryAliasRewriter ; Removes index type name if any - String tableName = tableExpr.getExpr().toString().replaceAll(" ", "").split("/")[0]; + // Used to handle case of same tableNames in JOIN + final Set explicitAliases = new HashSet<>(); - if (tableExpr.getAlias() == null) { - String alias = createAlias(tableName); - tableExpr.setAlias(alias); - explicitAliases.add(alias); - } + visitTable( + root, + tableExpr -> { + // Copied from SubqueryAliasRewriter ; Removes index type name if any + String tableName = tableExpr.getExpr().toString().replaceAll(" ", "").split("/")[0]; - Table table = new Table(tableName, tableExpr.getAlias()); + if (tableExpr.getAlias() == null) { + String alias = createAlias(tableName); + tableExpr.setAlias(alias); + explicitAliases.add(alias); + } - tableNameToAlias.put(table.getName(), table.getAlias()); + Table table = new Table(tableName, tableExpr.getAlias()); - FieldMappings fieldMappings = clusterState. getFieldMappings( - new String[]{tableName}).firstMapping(); - fieldMappings.flat((fieldName, type) -> tableByFieldName.put(fieldName, table)); - }); + tableNameToAlias.put(table.getName(), table.getAlias()); - //Handling cases for same tableName on either side of JOIN - if (tableNameToAlias.size() == 1) { - String tableName = tableNameToAlias.keySet().iterator().next(); - if (explicitAliases.size() == 2) { - // Neither table has explicit alias - throw new VerificationException(StringUtils.format("Not unique table/alias: [%s]", tableName)); - } else if (explicitAliases.size() == 1) { - // One table has explicit alias; use created alias for other table as alias to override fields - // starting with actual tableName as alias to explicit alias - tableNameToAlias.put(tableName, explicitAliases.iterator().next()); - } - } - - visitColumnName(root, idExpr -> { - String columnName = idExpr.getName(); - Collection
tables = tableByFieldName.get(columnName); - if (tables.size() > 1) { - // columnName without alias present in both tables - throw new VerificationException(StringUtils.format("Field name [%s] is ambiguous", columnName)); - } else if (tables.isEmpty()) { - // size() == 0? - // 1. Either the columnName does not exist (handled by SemanticAnalyzer [SemanticAnalysisException]) - // 2. Or column starts with tableName as alias or explicit alias - // If starts with tableName as alias change to explicit alias - tableNameToAlias.keySet().stream().forEach(tableName -> { - if (columnName.startsWith(tableName + DOT)) { - idExpr.setName(columnName.replace(tableName + DOT, tableNameToAlias.get(tableName) + DOT)); - } - }); - } else { - // columnName with any alias and unique to one table - Table table = tables.iterator().next(); - idExpr.setName(String.join(DOT, table.getAlias(), columnName)); - } + FieldMappings fieldMappings = + clusterState.getFieldMappings(new String[] {tableName}).firstMapping(); + fieldMappings.flat((fieldName, type) -> tableByFieldName.put(fieldName, table)); }); + + // Handling cases for same tableName on either side of JOIN + if (tableNameToAlias.size() == 1) { + String tableName = tableNameToAlias.keySet().iterator().next(); + if (explicitAliases.size() == 2) { + // Neither table has explicit alias + throw new VerificationException( + StringUtils.format("Not unique table/alias: [%s]", tableName)); + } else if (explicitAliases.size() == 1) { + // One table has explicit alias; use created alias for other table as alias to override + // fields + // starting with actual tableName as alias to explicit alias + tableNameToAlias.put(tableName, explicitAliases.iterator().next()); + } } - private void visitTable(SQLQueryExpr root, - Consumer visit) { - root.accept(new MySqlASTVisitorAdapter() { - @Override - public void endVisit(SQLExprTableSource tableExpr) { - visit.accept(tableExpr); - } + visitColumnName( + root, + idExpr -> { + String columnName = idExpr.getName(); + Collection
tables = tableByFieldName.get(columnName); + if (tables.size() > 1) { + // columnName without alias present in both tables + throw new VerificationException( + StringUtils.format("Field name [%s] is ambiguous", columnName)); + } else if (tables.isEmpty()) { + // size() == 0? + // 1. Either the columnName does not exist (handled by SemanticAnalyzer + // [SemanticAnalysisException]) + // 2. Or column starts with tableName as alias or explicit alias + // If starts with tableName as alias change to explicit alias + tableNameToAlias.keySet().stream() + .forEach( + tableName -> { + if (columnName.startsWith(tableName + DOT)) { + idExpr.setName( + columnName.replace( + tableName + DOT, tableNameToAlias.get(tableName) + DOT)); + } + }); + } else { + // columnName with any alias and unique to one table + Table table = tables.iterator().next(); + idExpr.setName(String.join(DOT, table.getAlias(), columnName)); + } }); - } + } + + private void visitTable(SQLQueryExpr root, Consumer visit) { + root.accept( + new MySqlASTVisitorAdapter() { + @Override + public void endVisit(SQLExprTableSource tableExpr) { + visit.accept(tableExpr); + } + }); + } + + private void visitColumnName(SQLQueryExpr expr, Consumer visit) { + expr.accept( + new MySqlASTVisitorAdapter() { + @Override + public boolean visit(SQLExprTableSource x) { + // Avoid rewriting identifier in table name + return false; + } - private void visitColumnName(SQLQueryExpr expr, - Consumer visit) { - expr.accept(new MySqlASTVisitorAdapter() { - @Override - public boolean visit(SQLExprTableSource x) { - // Avoid rewriting identifier in table name - return false; - } - - @Override - public void endVisit(SQLIdentifierExpr idExpr) { - visit.accept(idExpr); - } + @Override + public void endVisit(SQLIdentifierExpr idExpr) { + visit.accept(idExpr); + } }); - } + } - private String createAlias(String alias) { - return String.format("%s_%d", alias, next()); - } + private String createAlias(String alias) { + return String.format("%s_%d", alias, next()); + } - private Integer next() { - return aliasSuffix++; - } + private Integer next() { + return aliasSuffix++; + } - private static class Table { + private static class Table { - public String getName() { - return name; - } + public String getName() { + return name; + } - public String getAlias() { - return alias; - } + public String getAlias() { + return alias; + } - /** - * Table Name. - */ - private String name; + /** Table Name. */ + private String name; - /** - * Table Alias. - */ - private String alias; + /** Table Alias. */ + private String alias; - Table(String name, String alias) { - this.name = name; - this.alias = alias; - } + Table(String name, String alias) { + this.name = name; + this.alias = alias; + } - // Added for debugging - @Override - public String toString() { - return this.name + "-->" + this.alias; - } + // Added for debugging + @Override + public String toString() { + return this.name + "-->" + this.alias; } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/From.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/From.java index 609d26f4a1..b39907366e 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/From.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/From.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.nestedfield; import static com.alibaba.druid.sql.ast.statement.SQLJoinTableSource.JoinType.COMMA; @@ -14,68 +13,65 @@ import com.alibaba.druid.sql.ast.statement.SQLTableSource; import com.alibaba.druid.sql.dialect.mysql.ast.statement.MySqlSelectQueryBlock; -/** - * Table (OpenSearch Index) expression in FROM statement. - */ +/** Table (OpenSearch Index) expression in FROM statement. */ class From extends SQLClause { - From(SQLTableSource expr) { - super(expr); + From(SQLTableSource expr) { + super(expr); + } + + /** + * Collect nested field(s) information and then remove them from FROM statement. Assumption: only + * 1 regular table in FROM (which is the first one) and nested field(s) has alias. + */ + @Override + void rewrite(Scope scope) { + if (!isJoin()) { + return; } - /** - * Collect nested field(s) information and then remove them from FROM statement. - * Assumption: only 1 regular table in FROM (which is the first one) and nested field(s) has alias. - */ - @Override - void rewrite(Scope scope) { - if (!isJoin()) { - return; - } - - // At this point, FROM expr is SQLJoinTableSource. - if (!isCommaJoin()) { - scope.setActualJoinType(((SQLJoinTableSource) expr).getJoinType()); - ((SQLJoinTableSource) expr).setJoinType(COMMA); - } - - if (parentAlias(scope).isEmpty()) { - // Could also be empty now since normal JOIN tables may not have alias - if (scope.getActualJoinType() != null) { - ((SQLJoinTableSource) expr).setJoinType(scope.getActualJoinType()); - } - return; - } - - collectNestedFields(scope); - if (scope.isAnyNestedField()) { - eraseParentAlias(); - keepParentTableOnly(); - } else if (scope.getActualJoinType() != null){ - // set back the JoinType to original value if non COMMA JOIN on regular tables - ((SQLJoinTableSource) expr).setJoinType(scope.getActualJoinType()); - } + // At this point, FROM expr is SQLJoinTableSource. + if (!isCommaJoin()) { + scope.setActualJoinType(((SQLJoinTableSource) expr).getJoinType()); + ((SQLJoinTableSource) expr).setJoinType(COMMA); } - private String parentAlias(Scope scope) { - scope.setParentAlias(((SQLJoinTableSource) expr).getLeft().getAlias()); - return emptyIfNull(scope.getParentAlias()); + if (parentAlias(scope).isEmpty()) { + // Could also be empty now since normal JOIN tables may not have alias + if (scope.getActualJoinType() != null) { + ((SQLJoinTableSource) expr).setJoinType(scope.getActualJoinType()); + } + return; } - /** - * Erase alias otherwise NLPchina has problem parsing nested field like 't.employees.name' - */ - private void eraseParentAlias() { - left().expr.setAlias(null); + collectNestedFields(scope); + if (scope.isAnyNestedField()) { + eraseParentAlias(); + keepParentTableOnly(); + } else if (scope.getActualJoinType() != null) { + // set back the JoinType to original value if non COMMA JOIN on regular tables + ((SQLJoinTableSource) expr).setJoinType(scope.getActualJoinType()); } + } - private void keepParentTableOnly() { - MySqlSelectQueryBlock query = (MySqlSelectQueryBlock) expr.getParent(); - query.setFrom(left().expr); - left().expr.setParent(query); - } + private String parentAlias(Scope scope) { + scope.setParentAlias(((SQLJoinTableSource) expr).getLeft().getAlias()); + return emptyIfNull(scope.getParentAlias()); + } + + /** Erase alias otherwise NLPchina has problem parsing nested field like 't.employees.name' */ + private void eraseParentAlias() { + left().expr.setAlias(null); + } + + private void keepParentTableOnly() { + MySqlSelectQueryBlock query = (MySqlSelectQueryBlock) expr.getParent(); + query.setFrom(left().expr); + left().expr.setParent(query); + } /** + *
      * Collect path alias and full path mapping of nested field in FROM clause.
      * Sample:
      * FROM team t, t.employees e ...
@@ -88,6 +84,7 @@ private void keepParentTableOnly() {
      * 

* t.employees is nested because path "t" == parentAlias "t" * Save path alias to full path name mapping {"e": "employees"} to Scope + *

*/ private void collectNestedFields(Scope scope) { From clause = this; @@ -97,36 +94,35 @@ private void collectNestedFields(Scope scope) { clause.addIfNestedField(scope); } - private boolean isCommaJoin() { - return expr instanceof SQLJoinTableSource && ((SQLJoinTableSource) expr).getJoinType() == COMMA; - } + private boolean isCommaJoin() { + return expr instanceof SQLJoinTableSource && ((SQLJoinTableSource) expr).getJoinType() == COMMA; + } - private boolean isJoin() { - return expr instanceof SQLJoinTableSource; - } + private boolean isJoin() { + return expr instanceof SQLJoinTableSource; + } - private From left() { - return new From(((SQLJoinTableSource) expr).getLeft()); - } + private From left() { + return new From(((SQLJoinTableSource) expr).getLeft()); + } - private From right() { - return new From(((SQLJoinTableSource) expr).getRight()); - } + private From right() { + return new From(((SQLJoinTableSource) expr).getRight()); + } - private void addIfNestedField(Scope scope) { - if (!(expr instanceof SQLExprTableSource - && ((SQLExprTableSource) expr).getExpr() instanceof SQLIdentifierExpr)) { - return; - } - - Identifier table = new Identifier((SQLIdentifierExpr) ((SQLExprTableSource) expr).getExpr()); - if (table.path().equals(scope.getParentAlias())) { - scope.addAliasFullPath(emptyIfNull(expr.getAlias()), table.name()); - } + private void addIfNestedField(Scope scope) { + if (!(expr instanceof SQLExprTableSource + && ((SQLExprTableSource) expr).getExpr() instanceof SQLIdentifierExpr)) { + return; } - private String emptyIfNull(String str) { - return str == null ? "" : str; + Identifier table = new Identifier((SQLIdentifierExpr) ((SQLExprTableSource) expr).getExpr()); + if (table.path().equals(scope.getParentAlias())) { + scope.addAliasFullPath(emptyIfNull(expr.getAlias()), table.name()); } + } + private String emptyIfNull(String str) { + return str == null ? "" : str; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/Identifier.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/Identifier.java index 635cc63671..e3e1cfb7ce 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/Identifier.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/Identifier.java @@ -3,95 +3,95 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.nestedfield; import com.alibaba.druid.sql.ast.expr.SQLBinaryOpExpr; import com.alibaba.druid.sql.ast.expr.SQLIdentifierExpr; /** + *
  * Identifier expression in SELECT, FROM, WHERE, GROUP BY, ORDER BY etc.
- * 

+ * * Ex. To make concepts clear, for "e.firstname AND t.region" in "FROM team t, t.employees e": * parent alias (to erase): 't' * path: 'e' (full path saved in Scope is 'employees') * name: 'firstname' + *

*/ class Identifier extends SQLClause { - private static final String SEPARATOR = "."; - - Identifier(SQLIdentifierExpr expr) { - super(expr); - } - - /** - * Erase parent alias for all identifiers but only rewrite those (nested field identifier) NOT in WHERE. - * For identifier in conditions in WHERE, use full path as tag and delay the rewrite in Where.rewrite(). - */ - @Override - void rewrite(Scope scope) { - eraseParentAlias(scope); - if (isNestedField(scope)) { - renameByFullPath(scope); - if (isInCondition()) { - useFullPathAsTag(scope); - } else { - replaceByNestedFunction(expr, pathFromIdentifier(expr)); - } - } - } - - /** - * return the path of the expr name. e.g. - * expecting p returned as path in both WHERE p.name = 'A' and WHERE p IS NULL cases, - * in which expr.name = p.name and p separately - */ - String path() { - return separatorIndex() == -1 ? expr.getName() : expr.getName().substring(0, separatorIndex()); - } - - String name() { - return expr.getName().substring(separatorIndex() + 1); + private static final String SEPARATOR = "."; + + Identifier(SQLIdentifierExpr expr) { + super(expr); + } + + /** + * Erase parent alias for all identifiers but only rewrite those (nested field identifier) NOT in + * WHERE. For identifier in conditions in WHERE, use full path as tag and delay the rewrite in + * Where.rewrite(). + */ + @Override + void rewrite(Scope scope) { + eraseParentAlias(scope); + if (isNestedField(scope)) { + renameByFullPath(scope); + if (isInCondition()) { + useFullPathAsTag(scope); + } else { + replaceByNestedFunction(expr, pathFromIdentifier(expr)); + } } - - private int separatorIndex() { - return expr.getName().indexOf(SEPARATOR); - } - - /** - * Erase parent alias otherwise it's required to specify it everywhere even on nested - * field (which NLPchina has problem with). - * Sample: "FROM team t, t.employees e WHERE t.region = 'US'" => "WHERE region = 'US'" - */ - private void eraseParentAlias(Scope scope) { - if (isStartWithParentAlias(scope)) { - expr.setName(name()); - } - } - - private boolean isStartWithParentAlias(Scope scope) { - return path().equals(scope.getParentAlias()); + } + + /** + * return the path of the expr name. e.g. expecting p returned as path in both WHERE p.name = 'A' + * and WHERE p IS NULL cases, in which expr.name = p.name and p separately + */ + String path() { + return separatorIndex() == -1 ? expr.getName() : expr.getName().substring(0, separatorIndex()); + } + + String name() { + return expr.getName().substring(separatorIndex() + 1); + } + + private int separatorIndex() { + return expr.getName().indexOf(SEPARATOR); + } + + /** + * Erase parent alias otherwise it's required to specify it everywhere even on nested field (which + * NLPchina has problem with). Sample: "FROM team t, t.employees e WHERE t.region = 'US'" => + * "WHERE region = 'US'" + */ + private void eraseParentAlias(Scope scope) { + if (isStartWithParentAlias(scope)) { + expr.setName(name()); } + } - private boolean isNestedField(Scope scope) { - return !scope.getFullPath(path()).isEmpty(); - } + private boolean isStartWithParentAlias(Scope scope) { + return path().equals(scope.getParentAlias()); + } - private void renameByFullPath(Scope scope) { - String fullPath = scope.getFullPath(path()); - if (fullPath.isEmpty()) { - throw new IllegalStateException("Full path not found for identifier:" + expr.getName()); - } - expr.setName(expr.getName().replaceFirst(path(), fullPath)); - } + private boolean isNestedField(Scope scope) { + return !scope.getFullPath(path()).isEmpty(); + } - private void useFullPathAsTag(Scope scope) { - scope.addConditionTag((SQLBinaryOpExpr) expr.getParent(), path()); + private void renameByFullPath(Scope scope) { + String fullPath = scope.getFullPath(path()); + if (fullPath.isEmpty()) { + throw new IllegalStateException("Full path not found for identifier:" + expr.getName()); } + expr.setName(expr.getName().replaceFirst(path(), fullPath)); + } - private boolean isInCondition() { - return expr.getParent() instanceof SQLBinaryOpExpr; - } + private void useFullPathAsTag(Scope scope) { + scope.addConditionTag((SQLBinaryOpExpr) expr.getParent(), path()); + } + private boolean isInCondition() { + return expr.getParent() instanceof SQLBinaryOpExpr; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/InRewriter.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/InRewriter.java index 99505e5e49..281918d52c 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/InRewriter.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/InRewriter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.subquery.rewriter; import com.alibaba.druid.sql.ast.SQLExpr; @@ -17,28 +16,31 @@ import org.opensearch.sql.legacy.rewriter.subquery.RewriterContext; /** + *
  * IN Subquery Rewriter.
  * For example,
  * SELECT * FROM A WHERE a IN (SELECT b FROM B) and c > 10 should be rewritten to
  * SELECT A.* FROM A JOIN B ON A.a = B.b WHERE c > 10 and B.b IS NOT NULL.
+ * 
*/ public class InRewriter implements Rewriter { - private final SQLInSubQueryExpr inExpr; - private final RewriterContext ctx; - private final MySqlSelectQueryBlock queryBlock; + private final SQLInSubQueryExpr inExpr; + private final RewriterContext ctx; + private final MySqlSelectQueryBlock queryBlock; - public InRewriter(SQLInSubQueryExpr inExpr, RewriterContext ctx) { - this.inExpr = inExpr; - this.ctx = ctx; - this.queryBlock = (MySqlSelectQueryBlock) inExpr.getSubQuery().getQuery(); - } + public InRewriter(SQLInSubQueryExpr inExpr, RewriterContext ctx) { + this.inExpr = inExpr; + this.ctx = ctx; + this.queryBlock = (MySqlSelectQueryBlock) inExpr.getSubQuery().getQuery(); + } - @Override - public boolean canRewrite() { - return !inExpr.isNot(); - } + @Override + public boolean canRewrite() { + return !inExpr.isNot(); + } /** + *
      * Build Where clause from input query.
      * 

* With the input query. @@ -54,26 +56,27 @@ public boolean canRewrite() { * | | * b B *

- * + *

*/ @Override public void rewrite() { SQLTableSource from = queryBlock.getFrom(); addJoinTable(from); - SQLExpr where = queryBlock.getWhere(); - if (null == where) { - ctx.addWhere(generateNullOp()); - } else if (where instanceof SQLBinaryOpExpr) { - ctx.addWhere(and(generateNullOp(), (SQLBinaryOpExpr) where)); - } else { - throw new IllegalStateException("unsupported where class type " + where.getClass()); - } + SQLExpr where = queryBlock.getWhere(); + if (null == where) { + ctx.addWhere(generateNullOp()); + } else if (where instanceof SQLBinaryOpExpr) { + ctx.addWhere(and(generateNullOp(), (SQLBinaryOpExpr) where)); + } else { + throw new IllegalStateException("unsupported where class type " + where.getClass()); } + } /** - * Build the Null check expression. For example, - * SELECT * FROM A WHERE a IN (SELECT b FROM B), should return B.b IS NOT NULL + * Build the Null check expression. For example,
+ * SELECT * FROM A WHERE a IN (SELECT b FROM B)
+ * should return B.b IS NOT NULL */ private SQLBinaryOpExpr generateNullOp() { SQLBinaryOpExpr binaryOpExpr = new SQLBinaryOpExpr(); @@ -81,23 +84,24 @@ private SQLBinaryOpExpr generateNullOp() { binaryOpExpr.setRight(new SQLNullExpr()); binaryOpExpr.setOperator(SQLBinaryOperator.IsNot); - return binaryOpExpr; - } + return binaryOpExpr; + } - /** - * Add the {@link SQLTableSource} with {@link JoinType} and {@link SQLBinaryOpExpr} to the {@link RewriterContext}. - */ - private void addJoinTable(SQLTableSource right) { - SQLBinaryOpExpr binaryOpExpr = new SQLBinaryOpExpr(inExpr.getExpr(), - SQLBinaryOperator.Equality, - fetchJoinExpr()); - ctx.addJoin(right, JoinType.JOIN, binaryOpExpr); - } + /** + * Add the {@link SQLTableSource} with {@link JoinType} and {@link SQLBinaryOpExpr} to the {@link + * RewriterContext}. + */ + private void addJoinTable(SQLTableSource right) { + SQLBinaryOpExpr binaryOpExpr = + new SQLBinaryOpExpr(inExpr.getExpr(), SQLBinaryOperator.Equality, fetchJoinExpr()); + ctx.addJoin(right, JoinType.JOIN, binaryOpExpr); + } - private SQLExpr fetchJoinExpr() { - if (queryBlock.getSelectList().size() > 1) { - throw new IllegalStateException("Unsupported subquery with multiple select " + queryBlock.getSelectList()); - } - return queryBlock.getSelectList().get(0).getExpr(); + private SQLExpr fetchJoinExpr() { + if (queryBlock.getSelectList().size() > 1) { + throw new IllegalStateException( + "Unsupported subquery with multiple select " + queryBlock.getSelectList()); } + return queryBlock.getSelectList().get(0).getExpr(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/NestedExistsRewriter.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/NestedExistsRewriter.java index c7656e420f..26684f4f61 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/NestedExistsRewriter.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/NestedExistsRewriter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.subquery.rewriter; import com.alibaba.druid.sql.ast.SQLExpr; @@ -18,9 +17,9 @@ import org.opensearch.sql.legacy.rewriter.subquery.RewriterContext; /** - * Nested EXISTS SQL Rewriter. - * The EXISTS clause will be remove from the SQL. The translated SQL will use ElasticSearch's nested query logic. - * + * Nested EXISTS SQL Rewriter. The EXISTS clause will be remove from the SQL. The translated SQL + * will use ElasticSearch's nested query logic. + *
  * For example,
  * 

* SELECT e.name @@ -31,65 +30,65 @@ * FROM employee as e, e.projects as p * WHERE p is not null *

+ *
*/ public class NestedExistsRewriter implements Rewriter { - private final SQLExistsExpr existsExpr; - private final RewriterContext ctx; - private final SQLExprTableSource from; - private final SQLExpr where; + private final SQLExistsExpr existsExpr; + private final RewriterContext ctx; + private final SQLExprTableSource from; + private final SQLExpr where; - public NestedExistsRewriter(SQLExistsExpr existsExpr, RewriterContext board) { - this.existsExpr = existsExpr; - this.ctx = board; - MySqlSelectQueryBlock queryBlock = (MySqlSelectQueryBlock) existsExpr.getSubQuery().getQuery(); - if (queryBlock.getFrom() instanceof SQLExprTableSource) { - this.from = (SQLExprTableSource) queryBlock.getFrom(); - } else { - throw new IllegalStateException("unsupported expression in from " + queryBlock.getFrom().getClass()); - } - this.where = queryBlock.getWhere(); + public NestedExistsRewriter(SQLExistsExpr existsExpr, RewriterContext board) { + this.existsExpr = existsExpr; + this.ctx = board; + MySqlSelectQueryBlock queryBlock = (MySqlSelectQueryBlock) existsExpr.getSubQuery().getQuery(); + if (queryBlock.getFrom() instanceof SQLExprTableSource) { + this.from = (SQLExprTableSource) queryBlock.getFrom(); + } else { + throw new IllegalStateException( + "unsupported expression in from " + queryBlock.getFrom().getClass()); } + this.where = queryBlock.getWhere(); + } - /** - * The from table must be nested field. - */ - @Override - public boolean canRewrite() { - return ctx.isNestedQuery(from); - } + /** The from table must be nested field. */ + @Override + public boolean canRewrite() { + return ctx.isNestedQuery(from); + } - @Override - public void rewrite() { - ctx.addJoin(from, JoinType.COMMA); - ctx.addWhere(rewriteExistsWhere()); - } + @Override + public void rewrite() { + ctx.addJoin(from, JoinType.COMMA); + ctx.addWhere(rewriteExistsWhere()); + } - private SQLExpr rewriteExistsWhere() { - SQLBinaryOpExpr translatedWhere; - SQLBinaryOpExpr notMissingOp = buildNotMissingOp(); - if (null == where) { - translatedWhere = notMissingOp; - } else if (where instanceof SQLBinaryOpExpr) { - translatedWhere = and(notMissingOp, (SQLBinaryOpExpr) where); - } else { - throw new IllegalStateException("unsupported expression in where " + where.getClass()); - } + private SQLExpr rewriteExistsWhere() { + SQLBinaryOpExpr translatedWhere; + SQLBinaryOpExpr notMissingOp = buildNotMissingOp(); + if (null == where) { + translatedWhere = notMissingOp; + } else if (where instanceof SQLBinaryOpExpr) { + translatedWhere = and(notMissingOp, (SQLBinaryOpExpr) where); + } else { + throw new IllegalStateException("unsupported expression in where " + where.getClass()); + } - if (existsExpr.isNot()) { - SQLNotExpr sqlNotExpr = new SQLNotExpr(translatedWhere); - translatedWhere.setParent(sqlNotExpr); - return sqlNotExpr; - } else { - return translatedWhere; - } + if (existsExpr.isNot()) { + SQLNotExpr sqlNotExpr = new SQLNotExpr(translatedWhere); + translatedWhere.setParent(sqlNotExpr); + return sqlNotExpr; + } else { + return translatedWhere; } + } - private SQLBinaryOpExpr buildNotMissingOp() { - SQLBinaryOpExpr binaryOpExpr = new SQLBinaryOpExpr(); - binaryOpExpr.setLeft(new SQLIdentifierExpr(from.getAlias())); - binaryOpExpr.setRight(new SQLIdentifierExpr("MISSING")); - binaryOpExpr.setOperator(SQLBinaryOperator.IsNot); + private SQLBinaryOpExpr buildNotMissingOp() { + SQLBinaryOpExpr binaryOpExpr = new SQLBinaryOpExpr(); + binaryOpExpr.setLeft(new SQLIdentifierExpr(from.getAlias())); + binaryOpExpr.setRight(new SQLIdentifierExpr("MISSING")); + binaryOpExpr.setOperator(SQLBinaryOperator.IsNot); - return binaryOpExpr; - } + return binaryOpExpr; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/utils/FindSubQuery.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/utils/FindSubQuery.java index ec35151e4d..de6694d90d 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/utils/FindSubQuery.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/utils/FindSubQuery.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.subquery.utils; import com.alibaba.druid.sql.ast.expr.SQLExistsExpr; @@ -12,43 +11,39 @@ import java.util.ArrayList; import java.util.List; -/** - * Visitor which try to find the SubQuery. - */ +/** Visitor which try to find the SubQuery. */ public class FindSubQuery extends MySqlASTVisitorAdapter { - private final List sqlInSubQueryExprs = new ArrayList<>(); - private final List sqlExistsExprs = new ArrayList<>(); - private boolean continueVisit = true; - - public FindSubQuery continueVisitWhenFound(boolean continueVisit) { - this.continueVisit = continueVisit; - return this; - } - - /** - * Return true if has SubQuery. - */ - public boolean hasSubQuery() { - return !sqlInSubQueryExprs.isEmpty() || !sqlExistsExprs.isEmpty(); - } - - @Override - public boolean visit(SQLInSubQueryExpr query) { - sqlInSubQueryExprs.add(query); - return continueVisit; - } - - @Override - public boolean visit(SQLExistsExpr query) { - sqlExistsExprs.add(query); - return continueVisit; - } - - public List getSqlInSubQueryExprs() { - return sqlInSubQueryExprs; - } - - public List getSqlExistsExprs() { - return sqlExistsExprs; - } + private final List sqlInSubQueryExprs = new ArrayList<>(); + private final List sqlExistsExprs = new ArrayList<>(); + private boolean continueVisit = true; + + public FindSubQuery continueVisitWhenFound(boolean continueVisit) { + this.continueVisit = continueVisit; + return this; + } + + /** Return true if has SubQuery. */ + public boolean hasSubQuery() { + return !sqlInSubQueryExprs.isEmpty() || !sqlExistsExprs.isEmpty(); + } + + @Override + public boolean visit(SQLInSubQueryExpr query) { + sqlInSubQueryExprs.add(query); + return continueVisit; + } + + @Override + public boolean visit(SQLExistsExpr query) { + sqlExistsExprs.add(query); + return continueVisit; + } + + public List getSqlInSubQueryExprs() { + return sqlInSubQueryExprs; + } + + public List getSqlExistsExprs() { + return sqlExistsExprs; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/utils/JsonPrettyFormatter.java b/legacy/src/main/java/org/opensearch/sql/legacy/utils/JsonPrettyFormatter.java index ecc86877ee..26f17feeb6 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/utils/JsonPrettyFormatter.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/utils/JsonPrettyFormatter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.utils; import com.fasterxml.jackson.core.JsonFactory; @@ -15,31 +14,29 @@ import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.core.xcontent.XContentParser; -/** - * Utility Class for formatting Json string pretty. - */ +/** Utility Class for formatting Json string pretty. */ public class JsonPrettyFormatter { - /** - * @param jsonString Json string without/with pretty format - * @return A standard and pretty formatted json string - * @throws IOException - */ - public static String format(String jsonString) throws IOException { - //turn _explain response into pretty formatted Json - XContentBuilder contentBuilder = XContentFactory.jsonBuilder().prettyPrint(); - try ( - XContentParser contentParser = new JsonXContentParser( - NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, - new JsonFactory().createParser(jsonString)) - ){ - contentBuilder.copyCurrentStructure(contentParser); - } - return contentBuilder.toString(); + /** + * @param jsonString Json string without/with pretty format + * @return A standard and pretty formatted json string + * @throws IOException + */ + public static String format(String jsonString) throws IOException { + // turn _explain response into pretty formatted Json + XContentBuilder contentBuilder = XContentFactory.jsonBuilder().prettyPrint(); + try (XContentParser contentParser = + new JsonXContentParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + new JsonFactory().createParser(jsonString))) { + contentBuilder.copyCurrentStructure(contentParser); } + return contentBuilder.toString(); + } - private JsonPrettyFormatter() { - throw new AssertionError(getClass().getCanonicalName() + " is a utility class and must not be initialized"); - } + private JsonPrettyFormatter() { + throw new AssertionError( + getClass().getCanonicalName() + " is a utility class and must not be initialized"); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/scope/EnvironmentTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/scope/EnvironmentTest.java index 05d8b048e2..c7639e542e 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/scope/EnvironmentTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/scope/EnvironmentTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.scope; import static org.hamcrest.MatcherAssert.assertThat; @@ -23,141 +22,136 @@ import org.opensearch.sql.legacy.antlr.semantic.types.Type; import org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchIndex; -/** - * Test cases for environment - */ +/** Test cases for environment */ public class EnvironmentTest { - /** Use context class for push/pop */ - private final SemanticContext context = new SemanticContext(); - - @Test - public void defineFieldSymbolInDifferentEnvironmentsShouldBeAbleToResolve() { - // Root environment - Symbol birthday = new Symbol(Namespace.FIELD_NAME, "s.birthday"); - environment().define(birthday, DATE); - Assert.assertTrue(environment().resolve(birthday).isPresent()); - - // New environment 1 - context.push(); - Symbol city = new Symbol(Namespace.FIELD_NAME, "s.city"); - environment().define(city, KEYWORD); - Assert.assertTrue(environment().resolve(birthday).isPresent()); - Assert.assertTrue(environment().resolve(city).isPresent()); - - // New environment 2 - context.push(); - Symbol manager = new Symbol(Namespace.FIELD_NAME, "s.manager"); - environment().define(manager, OBJECT); - Assert.assertTrue(environment().resolve(birthday).isPresent()); - Assert.assertTrue(environment().resolve(city).isPresent()); - Assert.assertTrue(environment().resolve(manager).isPresent()); - } - - @Test - public void defineFieldSymbolInDifferentEnvironmentsShouldNotAbleToResolveOncePopped() { - // Root environment - Symbol birthday = new Symbol(Namespace.FIELD_NAME, "s.birthday"); - environment().define(birthday, DATE); - - // New environment - context.push(); - Symbol city = new Symbol(Namespace.FIELD_NAME, "s.city"); - Symbol manager = new Symbol(Namespace.FIELD_NAME, "s.manager"); - environment().define(city, OBJECT); - environment().define(manager, OBJECT); - Assert.assertTrue(environment().resolve(birthday).isPresent()); - Assert.assertTrue(environment().resolve(city).isPresent()); - Assert.assertTrue(environment().resolve(manager).isPresent()); - - context.pop(); - Assert.assertFalse(environment().resolve(city).isPresent()); - Assert.assertFalse(environment().resolve(manager).isPresent()); - Assert.assertTrue(environment().resolve(birthday).isPresent()); - } - - @Test - public void defineFieldSymbolInDifferentEnvironmentsShouldBeAbleToResolveByPrefix() { - // Root environment - Symbol birthday = new Symbol(Namespace.FIELD_NAME, "s.birthday"); - environment().define(birthday, DATE); - - // New environment 1 - context.push(); - Symbol city = new Symbol(Namespace.FIELD_NAME, "s.city"); - environment().define(city, KEYWORD); - - // New environment 2 - context.push(); - Symbol manager = new Symbol(Namespace.FIELD_NAME, "s.manager"); - environment().define(manager, OBJECT); - - Map typeByName = environment().resolveByPrefix(new Symbol(Namespace.FIELD_NAME, "s")); - assertThat( - typeByName, - allOf( - aMapWithSize(3), - hasEntry("s.birthday", DATE), - hasEntry("s.city", KEYWORD), - hasEntry("s.manager", OBJECT) - ) - ); - } - - @Test - public void defineFieldSymbolShouldBeAbleToResolveAll() { - environment().define(new Symbol(Namespace.FIELD_NAME, "s.projects"), new OpenSearchIndex("s.projects", NESTED_FIELD)); - environment().define(new Symbol(Namespace.FIELD_NAME, "s.projects.release"), DATE); - environment().define(new Symbol(Namespace.FIELD_NAME, "s.projects.active"), BOOLEAN); - environment().define(new Symbol(Namespace.FIELD_NAME, "s.address"), TEXT); - environment().define(new Symbol(Namespace.FIELD_NAME, "s.city"), KEYWORD); - environment().define(new Symbol(Namespace.FIELD_NAME, "s.manager.name"), TEXT); - - Map typeByName = environment().resolveAll(Namespace.FIELD_NAME); - assertThat( - typeByName, - allOf( - aMapWithSize(6), - hasEntry("s.projects", (Type) new OpenSearchIndex("s.projects", NESTED_FIELD)), - hasEntry("s.projects.release", DATE), - hasEntry("s.projects.active", BOOLEAN), - hasEntry("s.address", TEXT), - hasEntry("s.city", KEYWORD), - hasEntry("s.manager.name", TEXT) - ) - ); - } - - @Test - public void defineFieldSymbolInDifferentEnvironmentsShouldBeAbleToResolveAll() { - // Root environment - Symbol birthday = new Symbol(Namespace.FIELD_NAME, "s.birthday"); - environment().define(birthday, DATE); - - // New environment 1 - context.push(); - Symbol city = new Symbol(Namespace.FIELD_NAME, "s.city"); - environment().define(city, KEYWORD); - - // New environment 2 - context.push(); - Symbol manager = new Symbol(Namespace.FIELD_NAME, "s.manager"); - environment().define(manager, OBJECT); - - Map typeByName = environment().resolveAll(Namespace.FIELD_NAME); - assertThat( - typeByName, - allOf( - aMapWithSize(3), - hasEntry("s.birthday", DATE), - hasEntry("s.city", KEYWORD), - hasEntry("s.manager", OBJECT) - ) - ); - } - - private Environment environment() { - return context.peek(); - } - + /** Use context class for push/pop */ + private final SemanticContext context = new SemanticContext(); + + @Test + public void defineFieldSymbolInDifferentEnvironmentsShouldBeAbleToResolve() { + // Root environment + Symbol birthday = new Symbol(Namespace.FIELD_NAME, "s.birthday"); + environment().define(birthday, DATE); + Assert.assertTrue(environment().resolve(birthday).isPresent()); + + // New environment 1 + context.push(); + Symbol city = new Symbol(Namespace.FIELD_NAME, "s.city"); + environment().define(city, KEYWORD); + Assert.assertTrue(environment().resolve(birthday).isPresent()); + Assert.assertTrue(environment().resolve(city).isPresent()); + + // New environment 2 + context.push(); + Symbol manager = new Symbol(Namespace.FIELD_NAME, "s.manager"); + environment().define(manager, OBJECT); + Assert.assertTrue(environment().resolve(birthday).isPresent()); + Assert.assertTrue(environment().resolve(city).isPresent()); + Assert.assertTrue(environment().resolve(manager).isPresent()); + } + + @Test + public void defineFieldSymbolInDifferentEnvironmentsShouldNotAbleToResolveOncePopped() { + // Root environment + Symbol birthday = new Symbol(Namespace.FIELD_NAME, "s.birthday"); + environment().define(birthday, DATE); + + // New environment + context.push(); + Symbol city = new Symbol(Namespace.FIELD_NAME, "s.city"); + Symbol manager = new Symbol(Namespace.FIELD_NAME, "s.manager"); + environment().define(city, OBJECT); + environment().define(manager, OBJECT); + Assert.assertTrue(environment().resolve(birthday).isPresent()); + Assert.assertTrue(environment().resolve(city).isPresent()); + Assert.assertTrue(environment().resolve(manager).isPresent()); + + context.pop(); + Assert.assertFalse(environment().resolve(city).isPresent()); + Assert.assertFalse(environment().resolve(manager).isPresent()); + Assert.assertTrue(environment().resolve(birthday).isPresent()); + } + + @Test + public void defineFieldSymbolInDifferentEnvironmentsShouldBeAbleToResolveByPrefix() { + // Root environment + Symbol birthday = new Symbol(Namespace.FIELD_NAME, "s.birthday"); + environment().define(birthday, DATE); + + // New environment 1 + context.push(); + Symbol city = new Symbol(Namespace.FIELD_NAME, "s.city"); + environment().define(city, KEYWORD); + + // New environment 2 + context.push(); + Symbol manager = new Symbol(Namespace.FIELD_NAME, "s.manager"); + environment().define(manager, OBJECT); + + Map typeByName = + environment().resolveByPrefix(new Symbol(Namespace.FIELD_NAME, "s")); + assertThat( + typeByName, + allOf( + aMapWithSize(3), + hasEntry("s.birthday", DATE), + hasEntry("s.city", KEYWORD), + hasEntry("s.manager", OBJECT))); + } + + @Test + public void defineFieldSymbolShouldBeAbleToResolveAll() { + environment() + .define( + new Symbol(Namespace.FIELD_NAME, "s.projects"), + new OpenSearchIndex("s.projects", NESTED_FIELD)); + environment().define(new Symbol(Namespace.FIELD_NAME, "s.projects.release"), DATE); + environment().define(new Symbol(Namespace.FIELD_NAME, "s.projects.active"), BOOLEAN); + environment().define(new Symbol(Namespace.FIELD_NAME, "s.address"), TEXT); + environment().define(new Symbol(Namespace.FIELD_NAME, "s.city"), KEYWORD); + environment().define(new Symbol(Namespace.FIELD_NAME, "s.manager.name"), TEXT); + + Map typeByName = environment().resolveAll(Namespace.FIELD_NAME); + assertThat( + typeByName, + allOf( + aMapWithSize(6), + hasEntry("s.projects", (Type) new OpenSearchIndex("s.projects", NESTED_FIELD)), + hasEntry("s.projects.release", DATE), + hasEntry("s.projects.active", BOOLEAN), + hasEntry("s.address", TEXT), + hasEntry("s.city", KEYWORD), + hasEntry("s.manager.name", TEXT))); + } + + @Test + public void defineFieldSymbolInDifferentEnvironmentsShouldBeAbleToResolveAll() { + // Root environment + Symbol birthday = new Symbol(Namespace.FIELD_NAME, "s.birthday"); + environment().define(birthday, DATE); + + // New environment 1 + context.push(); + Symbol city = new Symbol(Namespace.FIELD_NAME, "s.city"); + environment().define(city, KEYWORD); + + // New environment 2 + context.push(); + Symbol manager = new Symbol(Namespace.FIELD_NAME, "s.manager"); + environment().define(manager, OBJECT); + + Map typeByName = environment().resolveAll(Namespace.FIELD_NAME); + assertThat( + typeByName, + allOf( + aMapWithSize(3), + hasEntry("s.birthday", DATE), + hasEntry("s.city", KEYWORD), + hasEntry("s.manager", OBJECT))); + } + + private Environment environment() { + return context.peek(); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/types/GenericTypeTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/types/GenericTypeTest.java index db76c01947..0bd8b526bb 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/types/GenericTypeTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/types/GenericTypeTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.types; import static java.util.Collections.singletonList; @@ -19,34 +18,31 @@ import org.junit.Test; -/** - * Generic type test - */ +/** Generic type test */ public class GenericTypeTest { - @Test - public void passNumberArgToLogShouldReturnNumber() { - assertEquals(DOUBLE, LOG.construct(singletonList(NUMBER))); - } - - @Test - public void passIntegerArgToLogShouldReturnDouble() { - assertEquals(DOUBLE, LOG.construct(singletonList(INTEGER))); - } - - @Test - public void passLongArgToLogShouldReturnDouble() { - assertEquals(DOUBLE, LOG.construct(singletonList(LONG))); - } - - @Test - public void passTextArgToLogShouldReturnTypeError() { - assertEquals(TYPE_ERROR, LOG.construct(singletonList(TEXT))); - } - - @Test - public void passKeywordArgToLogShouldReturnTypeError() { - assertEquals(TYPE_ERROR, LOG.construct(singletonList(KEYWORD))); - } - + @Test + public void passNumberArgToLogShouldReturnNumber() { + assertEquals(DOUBLE, LOG.construct(singletonList(NUMBER))); + } + + @Test + public void passIntegerArgToLogShouldReturnDouble() { + assertEquals(DOUBLE, LOG.construct(singletonList(INTEGER))); + } + + @Test + public void passLongArgToLogShouldReturnDouble() { + assertEquals(DOUBLE, LOG.construct(singletonList(LONG))); + } + + @Test + public void passTextArgToLogShouldReturnTypeError() { + assertEquals(TYPE_ERROR, LOG.construct(singletonList(TEXT))); + } + + @Test + public void passKeywordArgToLogShouldReturnTypeError() { + assertEquals(TYPE_ERROR, LOG.construct(singletonList(KEYWORD))); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMappingTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMappingTest.java index cca69d8af9..ed2611786a 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMappingTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMappingTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.esdomain.mapping; import static java.util.Collections.emptyMap; @@ -22,109 +21,83 @@ import org.opensearch.sql.legacy.util.MatcherUtils; /** - * Unit test for {@code FieldMapping} with trivial methods ignored such as isSpecified, isMetaField etc. + * Unit test for {@code FieldMapping} with trivial methods ignored such as isSpecified, isMetaField + * etc. */ public class FieldMappingTest { - @Test - public void testFieldMatchesWildcardPatternSpecifiedInQuery() { - assertThat( - new FieldMapping("employee.first", emptyMap(), fieldsSpecifiedInQuery("employee.*")), - isWildcardSpecified(true) - ); - } - - @Test - public void testFieldMismatchesWildcardPatternSpecifiedInQuery() { - assertThat( - new FieldMapping("employee.first", emptyMap(), fieldsSpecifiedInQuery("manager.*")), - isWildcardSpecified(false) - ); - } - - @Test - public void testFieldIsProperty() { - assertThat( - new FieldMapping("employee.first"), - isPropertyField(true) - ); - } - - @Test - public void testNestedMultiFieldIsProperty() { - assertThat( - new FieldMapping("employee.first.keyword"), - isPropertyField(true) - ); - } - - @Test - public void testFieldIsNotProperty() { - assertThat( - new FieldMapping("employee"), - isPropertyField(false) - ); - } - - @Test - public void testMultiFieldIsNotProperty() { - assertThat( - new FieldMapping("employee.keyword"), - isPropertyField(false) - ); - } - - @Test - public void testUnknownFieldTreatedAsObject() { - assertThat( - new FieldMapping("employee"), - hasType("object") - ); - } - - @Test - public void testDeepNestedField() { - assertThat( - new FieldMapping( + @Test + public void testFieldMatchesWildcardPatternSpecifiedInQuery() { + assertThat( + new FieldMapping("employee.first", emptyMap(), fieldsSpecifiedInQuery("employee.*")), + isWildcardSpecified(true)); + } + + @Test + public void testFieldMismatchesWildcardPatternSpecifiedInQuery() { + assertThat( + new FieldMapping("employee.first", emptyMap(), fieldsSpecifiedInQuery("manager.*")), + isWildcardSpecified(false)); + } + + @Test + public void testFieldIsProperty() { + assertThat(new FieldMapping("employee.first"), isPropertyField(true)); + } + + @Test + public void testNestedMultiFieldIsProperty() { + assertThat(new FieldMapping("employee.first.keyword"), isPropertyField(true)); + } + + @Test + public void testFieldIsNotProperty() { + assertThat(new FieldMapping("employee"), isPropertyField(false)); + } + + @Test + public void testMultiFieldIsNotProperty() { + assertThat(new FieldMapping("employee.keyword"), isPropertyField(false)); + } + + @Test + public void testUnknownFieldTreatedAsObject() { + assertThat(new FieldMapping("employee"), hasType("object")); + } + + @Test + public void testDeepNestedField() { + assertThat( + new FieldMapping( + "employee.location.city", + ImmutableMap.of( "employee.location.city", - ImmutableMap.of( + new FieldMappingMetadata( "employee.location.city", - new FieldMappingMetadata("employee.location.city", new BytesArray( - "{\n" + - " \"city\" : {\n" + - " \"type\" : \"text\"\n" + - " }\n" + - "}") - ) - ), - emptyMap() - ), - hasType("text") - ); - } - - private Matcher isWildcardSpecified(boolean isMatched) { - return MatcherUtils.featureValueOf("is field match wildcard specified in query", - is(isMatched), - FieldMapping::isWildcardSpecified); - } - - private Matcher isPropertyField(boolean isProperty) { - return MatcherUtils.featureValueOf("isPropertyField", - is(isProperty), - FieldMapping::isPropertyField); - } - - private Matcher hasType(String expected) { - return MatcherUtils.featureValueOf("type", - is(expected), - FieldMapping::type); - } - - private Map fieldsSpecifiedInQuery(String...fieldNames) { - return Arrays.stream(fieldNames). - collect(Collectors.toMap(name -> name, - name -> new Field(name, ""))); - } - + new BytesArray( + "{\n" + " \"city\" : {\n" + " \"type\" : \"text\"\n" + " }\n" + "}"))), + emptyMap()), + hasType("text")); + } + + private Matcher isWildcardSpecified(boolean isMatched) { + return MatcherUtils.featureValueOf( + "is field match wildcard specified in query", + is(isMatched), + FieldMapping::isWildcardSpecified); + } + + private Matcher isPropertyField(boolean isProperty) { + return MatcherUtils.featureValueOf( + "isPropertyField", is(isProperty), FieldMapping::isPropertyField); + } + + private Matcher hasType(String expected) { + return MatcherUtils.featureValueOf("type", is(expected), FieldMapping::type); + } + + private Map fieldsSpecifiedInQuery(String... fieldNames) { + return Arrays.stream(fieldNames) + .collect(Collectors.toMap(name -> name, name -> new Field(name, ""))); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMappingsTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMappingsTest.java index 412c351c56..f6de8a98e6 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMappingsTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMappingsTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.esdomain.mapping; import static org.hamcrest.MatcherAssert.assertThat; @@ -23,51 +22,47 @@ import org.junit.Test; import org.opensearch.sql.legacy.esdomain.LocalClusterState; -/** - * Test for FieldMappings class - */ +/** Test for FieldMappings class */ public class FieldMappingsTest { - private static final String TEST_MAPPING_FILE = "mappings/field_mappings.json"; - - @Before - public void setUp() throws IOException { - URL url = Resources.getResource(TEST_MAPPING_FILE); - String mappings = Resources.toString(url, Charsets.UTF_8); - mockLocalClusterState(mappings); - } + private static final String TEST_MAPPING_FILE = "mappings/field_mappings.json"; - @After - public void cleanUp() { - LocalClusterState.state(null); - } + @Before + public void setUp() throws IOException { + URL url = Resources.getResource(TEST_MAPPING_FILE); + String mappings = Resources.toString(url, Charsets.UTF_8); + mockLocalClusterState(mappings); + } - @Test - public void flatFieldMappingsShouldIncludeFieldsOnAllLevels() { - IndexMappings indexMappings = LocalClusterState.state().getFieldMappings(new String[]{"field_mappings"}); - FieldMappings fieldMappings = indexMappings.firstMapping(); + @After + public void cleanUp() { + LocalClusterState.state(null); + } - Map typeByFieldName = new HashMap<>(); - fieldMappings.flat(typeByFieldName::put); - assertThat( - typeByFieldName, - allOf( - aMapWithSize(13), - hasEntry("address", "text"), - hasEntry("age", "integer"), - hasEntry("employer", "text"), - hasEntry("employer.raw", "text"), - hasEntry("employer.keyword", "keyword"), - hasEntry("projects", "nested"), - hasEntry("projects.active", "boolean"), - hasEntry("projects.members", "nested"), - hasEntry("projects.members.name", "text"), - hasEntry("manager", "object"), - hasEntry("manager.name", "text"), - hasEntry("manager.name.keyword", "keyword"), - hasEntry("manager.address", "keyword") - ) - ); - } + @Test + public void flatFieldMappingsShouldIncludeFieldsOnAllLevels() { + IndexMappings indexMappings = + LocalClusterState.state().getFieldMappings(new String[] {"field_mappings"}); + FieldMappings fieldMappings = indexMappings.firstMapping(); + Map typeByFieldName = new HashMap<>(); + fieldMappings.flat(typeByFieldName::put); + assertThat( + typeByFieldName, + allOf( + aMapWithSize(13), + hasEntry("address", "text"), + hasEntry("age", "integer"), + hasEntry("employer", "text"), + hasEntry("employer.raw", "text"), + hasEntry("employer.keyword", "keyword"), + hasEntry("projects", "nested"), + hasEntry("projects.active", "boolean"), + hasEntry("projects.members", "nested"), + hasEntry("projects.members.name", "text"), + hasEntry("manager", "object"), + hasEntry("manager.name", "text"), + hasEntry("manager.name.keyword", "keyword"), + hasEntry("manager.address", "keyword"))); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/rewriter/alias/IdentifierTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/rewriter/alias/IdentifierTest.java index b9c4935f50..5a6bc4541e 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/rewriter/alias/IdentifierTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/rewriter/alias/IdentifierTest.java @@ -3,46 +3,43 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.alias; import com.alibaba.druid.sql.ast.expr.SQLIdentifierExpr; import org.junit.Assert; import org.junit.Test; -/** - * Test cases for util class {@link Identifier}. - */ +/** Test cases for util class {@link Identifier}. */ public class IdentifierTest { - @Test - public void identifierWithWordBeforeFirstDotShouldBeConsideredHavePrefix() { - Assert.assertTrue(identifier("accounts.age").hasPrefix()); - } - - @Test - public void identifierWithoutDotShouldNotBeConsideredHavePrefix() { - Assert.assertFalse(identifier("age").hasPrefix()); - } - - @Test - public void identifierStartingWithDotShouldNotBeConsideredHavePrefix() { - Assert.assertFalse(identifier(".age").hasPrefix()); - } - - @Test - public void prefixOfIdentifierShouldBeWordBeforeFirstDot() { - Assert.assertEquals("accounts", identifier("accounts.age").prefix()); - } - - @Test - public void removePrefixShouldRemoveFirstWordAndDot() { - Identifier identifier = identifier("accounts.age"); - identifier.removePrefix(); - Assert.assertEquals("age", identifier.name()); - } - - private Identifier identifier(String name) { - return new Identifier(new SQLIdentifierExpr(name)); - } + @Test + public void identifierWithWordBeforeFirstDotShouldBeConsideredHavePrefix() { + Assert.assertTrue(identifier("accounts.age").hasPrefix()); + } + + @Test + public void identifierWithoutDotShouldNotBeConsideredHavePrefix() { + Assert.assertFalse(identifier("age").hasPrefix()); + } + + @Test + public void identifierStartingWithDotShouldNotBeConsideredHavePrefix() { + Assert.assertFalse(identifier(".age").hasPrefix()); + } + + @Test + public void prefixOfIdentifierShouldBeWordBeforeFirstDot() { + Assert.assertEquals("accounts", identifier("accounts.age").prefix()); + } + + @Test + public void removePrefixShouldRemoveFirstWordAndDot() { + Identifier identifier = identifier("accounts.age"); + identifier.removePrefix(); + Assert.assertEquals("age", identifier.name()); + } + + private Identifier identifier(String name) { + return new Identifier(new SQLIdentifierExpr(name)); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/ErrorMessageFactoryTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/ErrorMessageFactoryTest.java index c4c9504486..09cd9e9efc 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/ErrorMessageFactoryTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/ErrorMessageFactoryTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest; import org.junit.Assert; @@ -16,35 +15,39 @@ public class ErrorMessageFactoryTest { - private Throwable nonOpenSearchThrowable = new Throwable(); - private Throwable openSearchThrowable = new OpenSearchException(nonOpenSearchThrowable); - - @Test - public void openSearchExceptionShouldCreateEsErrorMessage() { - Exception exception = new OpenSearchException(nonOpenSearchThrowable); - ErrorMessage msg = ErrorMessageFactory.createErrorMessage(exception, RestStatus.BAD_REQUEST.getStatus()); - Assert.assertTrue(msg instanceof OpenSearchErrorMessage); - } - - @Test - public void nonOpenSearchExceptionShouldCreateGenericErrorMessage() { - Exception exception = new Exception(nonOpenSearchThrowable); - ErrorMessage msg = ErrorMessageFactory.createErrorMessage(exception, RestStatus.BAD_REQUEST.getStatus()); - Assert.assertFalse(msg instanceof OpenSearchErrorMessage); - } - - @Test - public void nonOpenSearchExceptionWithWrappedEsExceptionCauseShouldCreateEsErrorMessage() { - Exception exception = (Exception) openSearchThrowable; - ErrorMessage msg = ErrorMessageFactory.createErrorMessage(exception, RestStatus.BAD_REQUEST.getStatus()); - Assert.assertTrue(msg instanceof OpenSearchErrorMessage); - } - - @Test - public void nonOpenSearchExceptionWithMultiLayerWrappedEsExceptionCauseShouldCreateEsErrorMessage() { - Exception exception = new Exception(new Throwable(new Throwable(openSearchThrowable))); - ErrorMessage msg = ErrorMessageFactory.createErrorMessage(exception, RestStatus.BAD_REQUEST.getStatus()); - Assert.assertTrue(msg instanceof OpenSearchErrorMessage); - } - + private Throwable nonOpenSearchThrowable = new Throwable(); + private Throwable openSearchThrowable = new OpenSearchException(nonOpenSearchThrowable); + + @Test + public void openSearchExceptionShouldCreateEsErrorMessage() { + Exception exception = new OpenSearchException(nonOpenSearchThrowable); + ErrorMessage msg = + ErrorMessageFactory.createErrorMessage(exception, RestStatus.BAD_REQUEST.getStatus()); + Assert.assertTrue(msg instanceof OpenSearchErrorMessage); + } + + @Test + public void nonOpenSearchExceptionShouldCreateGenericErrorMessage() { + Exception exception = new Exception(nonOpenSearchThrowable); + ErrorMessage msg = + ErrorMessageFactory.createErrorMessage(exception, RestStatus.BAD_REQUEST.getStatus()); + Assert.assertFalse(msg instanceof OpenSearchErrorMessage); + } + + @Test + public void nonOpenSearchExceptionWithWrappedEsExceptionCauseShouldCreateEsErrorMessage() { + Exception exception = (Exception) openSearchThrowable; + ErrorMessage msg = + ErrorMessageFactory.createErrorMessage(exception, RestStatus.BAD_REQUEST.getStatus()); + Assert.assertTrue(msg instanceof OpenSearchErrorMessage); + } + + @Test + public void + nonOpenSearchExceptionWithMultiLayerWrappedEsExceptionCauseShouldCreateEsErrorMessage() { + Exception exception = new Exception(new Throwable(new Throwable(openSearchThrowable))); + ErrorMessage msg = + ErrorMessageFactory.createErrorMessage(exception, RestStatus.BAD_REQUEST.getStatus()); + Assert.assertTrue(msg instanceof OpenSearchErrorMessage); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/FormatTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/FormatTest.java index 5a13125013..deb7b5f600 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/FormatTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/FormatTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest; import static org.junit.Assert.assertEquals; @@ -16,15 +15,15 @@ public class FormatTest { - @Test - public void ofJdbcShouldReturnJDBCFormat() { - Optional format = Format.of(Format.JDBC.getFormatName()); - assertTrue(format.isPresent()); - assertEquals(Format.JDBC, format.get()); - } + @Test + public void ofJdbcShouldReturnJDBCFormat() { + Optional format = Format.of(Format.JDBC.getFormatName()); + assertTrue(format.isPresent()); + assertEquals(Format.JDBC, format.get()); + } - @Test - public void ofUnknownFormatShouldReturnEmpty() { - assertFalse(Format.of("xml").isPresent()); - } + @Test + public void ofUnknownFormatShouldReturnEmpty() { + assertFalse(Format.of("xml").isPresent()); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/HavingTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/HavingTest.java index fee440c3e9..8863af0463 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/HavingTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/HavingTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest; import static java.util.stream.Collectors.toMap; @@ -35,360 +34,277 @@ import org.opensearch.sql.legacy.query.maker.AggMaker; import org.opensearch.sql.legacy.util.SqlParserUtils; - public class HavingTest { - private static final String SELECT_CNT = "SELECT COUNT(*) as c "; - private static final String SELECT_CNT_AVG = "SELECT COUNT(*) as c, AVG(age) as a "; - private static final String SELECT_CNT_AVG_SUM = "SELECT COUNT(*) as c, AVG(age) as a, SUM(income) as i "; - private static final String FROM_BANK = "FROM bank "; - private static final String GROUP_BY_AGE = "GROUP BY age "; - private static final String SELECT_CNT_FROM_BANK_GROUP_BY_AGE = SELECT_CNT + FROM_BANK + GROUP_BY_AGE; - private static final String SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE = SELECT_CNT_AVG + FROM_BANK + GROUP_BY_AGE; - private static final String SELECT_CNT_AVG_SUM_FROM_BANK_GROUP_BY_AGE = SELECT_CNT_AVG_SUM + FROM_BANK + GROUP_BY_AGE; - private static final String NESTED_SELECT_COUNT = "SELECT COUNT(nested(income, 'income')) as c "; - private static final String NESTED_SELECT_CNT_FROM_BANK_GROUP_BY_AGE = NESTED_SELECT_COUNT + FROM_BANK + GROUP_BY_AGE; - - @Test - public void singleCondition() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a > 30"), - contains( - bucketSelector( - hasBucketPath("c: c", "a: a"), - hasScript("params.a > 30") - ) - )); - } - - @Ignore - @Test - public void singleConditionWithTwoAggExpr() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a > c"), - contains( - bucketSelector( - hasBucketPath("c: c", "a: a"), - hasScript("params.a > params.c") - ) - )); - } - - @Test - public void singleConditionWithHavingAgg() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(age) > 30"), - contains( - bucketSelector( - hasBucketPath("c: c", "a: a", "avg_0: avg_0"), - hasScript("params.avg_0 > 30") - ) - )); - } - - @Ignore - @Test - public void singleConditionWithHavingTwoAggExpr() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(age) > COUNT(*)"), - contains( - bucketSelector( - hasBucketPath("c: c", "a: a", "avg_0: avg_0", "count_0: count_0"), - hasScript("params.avg_0 > count_0") - ) - )); - } - - @Test - public void nestedSingleCondition() { - assertThat( - query(NESTED_SELECT_CNT_FROM_BANK_GROUP_BY_AGE + "HAVING c > 30"), - contains( - bucketSelector( - hasBucketPath("c: income@NESTED.c"), - hasScript("params.c > 30") - ) - )); - } - - @Test - public void singleConditionWithOneFieldInSelect() { - assertThat( - query(SELECT_CNT_FROM_BANK_GROUP_BY_AGE + "HAVING a > 30"), - contains( - bucketSelector( - hasBucketPath("c: c") - ) - )); - } - - @Test - public void singleConditionWithOneFieldInSelectWithHavingAgg() { - assertThat( - query(SELECT_CNT_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) > 30"), - contains( - bucketSelector( - hasBucketPath("c: c", "avg_0: avg_0"), - hasScript("params.avg_0 > 30") - ) - )); - } - - @Test - public void singleConditionWithThreeFieldsInSelect() { - assertThat( - query(SELECT_CNT_AVG_SUM_FROM_BANK_GROUP_BY_AGE + "HAVING a > 30"), - contains( - bucketSelector( - hasBucketPath("c: c", "a: a", "i: i") - ) - )); - } - - @Test - public void singleConditionWithThreeFieldsInSelectWithHavingAgg() { - assertThat( - query(SELECT_CNT_AVG_SUM_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) > 30"), - contains( - bucketSelector( - hasBucketPath("c: c", "a: a", "i: i", "avg_0: avg_0"), - hasScript("params.avg_0 > 30") - ) - )); - } - - @Test - public void notEqualCondition() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a <> 30"), - contains( - bucketSelector( - hasScript("params.a != 30") - ) - )); - } - - @Test - public void notEqualConditionWithHavingAgg() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) <> 30"), - contains( - bucketSelector( - hasScript("params.avg_0 != 30") - ) - )); - } - - @Test - public void notCondition() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING NOT (a > 30)"), - contains( - bucketSelector( - hasScript("params.a <= 30") - ) - )); - } - - @Test - public void notConditionWithHavingAgg() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING NOT (AVG(a) > 30)"), - contains( - bucketSelector( - hasScript("params.avg_0 <= 30") - ) - )); - } - - @Test - public void andConditions() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a > 30 AND c <= 10"), - contains( - bucketSelector( - hasScript("params.a > 30 && params.c <= 10") - ) - )); - } - - @Test - public void andConditionsWithHavingAgg() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) > 30 AND SUM(c) <= 10"), - contains( - bucketSelector( - hasScript("params.avg_0 > 30 && params.sum_1 <= 10") - ) - )); - } - - @Test - public void orConditions() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a > 30 OR c <= 10"), - contains( - bucketSelector( - hasScript("params.a > 30 || params.c <= 10") - ) - )); - } - - @Test - public void orConditionsWithHavingAgg() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) > 30 OR SUM(c) <= 10"), - contains( - bucketSelector( - hasScript("params.avg_0 > 30 || params.sum_1 <= 10") - ) - )); - } - - @Test - public void betweenCondition() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a BETWEEN 30 AND 50"), - contains( - bucketSelector( - hasScript("params.a >= 30 && params.a <= 50") - ) - )); - } - - @Test - public void betweenConditionWithHavingAgg() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) BETWEEN 30 AND 50"), - contains( - bucketSelector( - hasScript("params.avg_0 >= 30 && params.avg_0 <= 50") - ) - )); - } - - @Test - public void notBetweenCondition() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a NOT BETWEEN 30 AND 50"), - contains( - bucketSelector( - hasScript("params.a < 30 || params.a > 50") - ) - )); - } - - @Test - public void notBetweenConditionWithHavingAgg() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) NOT BETWEEN 30 AND 50"), - contains( - bucketSelector( - hasScript("params.avg_0 < 30 || params.avg_0 > 50") - ) - )); - } - - @Test - public void inCondition() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a IN (30, 40, 50)"), - contains( - bucketSelector( - hasScript("params.a == 30 || params.a == 40 || params.a == 50") - ) - )); - } - - @Test - public void inConditionWithHavingAgg() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) IN (30, 40, 50)"), - contains( - bucketSelector( - hasScript("params.avg_0 == 30 || params.avg_0 == 40 || params.avg_0 == 50") - ) - )); - } - - @Test - public void notInCondition() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a NOT IN (30, 40, 50)"), - contains( - bucketSelector( - hasScript("params.a != 30 && params.a != 40 && params.a != 50") - ) - )); - } - - @Test - public void notInConditionWithHavingAgg() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) NOT IN (30, 40, 50)"), - contains( - bucketSelector( - hasScript("params.avg_0 != 30 && params.avg_0 != 40 && params.avg_0 != 50") - ) - )); - } - - @Test - public void nestedConditions() { - assertThat( - query(SELECT_CNT_AVG_SUM_FROM_BANK_GROUP_BY_AGE + "HAVING i <= 10000 OR NOT (a < 10 OR a > 30) AND c <= 10"), - contains( - bucketSelector( - hasScript("params.i <= 10000 || ((params.a >= 10 && params.a <= 30) && params.c <= 10)") - ) - )); - } - - @Test(expected = ParserException.class) - public void aggregationFunctionOnTheRight() { - query(SELECT_CNT_AVG_SUM_FROM_BANK_GROUP_BY_AGE + "HAVING 10 < a"); - } - - private Collection query(String sql) { - return translate(SqlParserUtils.parse(sql)); - } - - private Collection translate(SQLQueryExpr expr) { - try { - Select select = new SqlParser().parseSelect(expr); - select.getFields().forEach(field -> { + private static final String SELECT_CNT = "SELECT COUNT(*) as c "; + private static final String SELECT_CNT_AVG = "SELECT COUNT(*) as c, AVG(age) as a "; + private static final String SELECT_CNT_AVG_SUM = + "SELECT COUNT(*) as c, AVG(age) as a, SUM(income) as i "; + private static final String FROM_BANK = "FROM bank "; + private static final String GROUP_BY_AGE = "GROUP BY age "; + private static final String SELECT_CNT_FROM_BANK_GROUP_BY_AGE = + SELECT_CNT + FROM_BANK + GROUP_BY_AGE; + private static final String SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE = + SELECT_CNT_AVG + FROM_BANK + GROUP_BY_AGE; + private static final String SELECT_CNT_AVG_SUM_FROM_BANK_GROUP_BY_AGE = + SELECT_CNT_AVG_SUM + FROM_BANK + GROUP_BY_AGE; + private static final String NESTED_SELECT_COUNT = "SELECT COUNT(nested(income, 'income')) as c "; + private static final String NESTED_SELECT_CNT_FROM_BANK_GROUP_BY_AGE = + NESTED_SELECT_COUNT + FROM_BANK + GROUP_BY_AGE; + + @Test + public void singleCondition() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a > 30"), + contains(bucketSelector(hasBucketPath("c: c", "a: a"), hasScript("params.a > 30")))); + } + + @Ignore + @Test + public void singleConditionWithTwoAggExpr() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a > c"), + contains(bucketSelector(hasBucketPath("c: c", "a: a"), hasScript("params.a > params.c")))); + } + + @Test + public void singleConditionWithHavingAgg() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(age) > 30"), + contains( + bucketSelector( + hasBucketPath("c: c", "a: a", "avg_0: avg_0"), hasScript("params.avg_0 > 30")))); + } + + @Ignore + @Test + public void singleConditionWithHavingTwoAggExpr() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(age) > COUNT(*)"), + contains( + bucketSelector( + hasBucketPath("c: c", "a: a", "avg_0: avg_0", "count_0: count_0"), + hasScript("params.avg_0 > count_0")))); + } + + @Test + public void nestedSingleCondition() { + assertThat( + query(NESTED_SELECT_CNT_FROM_BANK_GROUP_BY_AGE + "HAVING c > 30"), + contains(bucketSelector(hasBucketPath("c: income@NESTED.c"), hasScript("params.c > 30")))); + } + + @Test + public void singleConditionWithOneFieldInSelect() { + assertThat( + query(SELECT_CNT_FROM_BANK_GROUP_BY_AGE + "HAVING a > 30"), + contains(bucketSelector(hasBucketPath("c: c")))); + } + + @Test + public void singleConditionWithOneFieldInSelectWithHavingAgg() { + assertThat( + query(SELECT_CNT_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) > 30"), + contains( + bucketSelector(hasBucketPath("c: c", "avg_0: avg_0"), hasScript("params.avg_0 > 30")))); + } + + @Test + public void singleConditionWithThreeFieldsInSelect() { + assertThat( + query(SELECT_CNT_AVG_SUM_FROM_BANK_GROUP_BY_AGE + "HAVING a > 30"), + contains(bucketSelector(hasBucketPath("c: c", "a: a", "i: i")))); + } + + @Test + public void singleConditionWithThreeFieldsInSelectWithHavingAgg() { + assertThat( + query(SELECT_CNT_AVG_SUM_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) > 30"), + contains( + bucketSelector( + hasBucketPath("c: c", "a: a", "i: i", "avg_0: avg_0"), + hasScript("params.avg_0 > 30")))); + } + + @Test + public void notEqualCondition() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a <> 30"), + contains(bucketSelector(hasScript("params.a != 30")))); + } + + @Test + public void notEqualConditionWithHavingAgg() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) <> 30"), + contains(bucketSelector(hasScript("params.avg_0 != 30")))); + } + + @Test + public void notCondition() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING NOT (a > 30)"), + contains(bucketSelector(hasScript("params.a <= 30")))); + } + + @Test + public void notConditionWithHavingAgg() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING NOT (AVG(a) > 30)"), + contains(bucketSelector(hasScript("params.avg_0 <= 30")))); + } + + @Test + public void andConditions() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a > 30 AND c <= 10"), + contains(bucketSelector(hasScript("params.a > 30 && params.c <= 10")))); + } + + @Test + public void andConditionsWithHavingAgg() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) > 30 AND SUM(c) <= 10"), + contains(bucketSelector(hasScript("params.avg_0 > 30 && params.sum_1 <= 10")))); + } + + @Test + public void orConditions() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a > 30 OR c <= 10"), + contains(bucketSelector(hasScript("params.a > 30 || params.c <= 10")))); + } + + @Test + public void orConditionsWithHavingAgg() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) > 30 OR SUM(c) <= 10"), + contains(bucketSelector(hasScript("params.avg_0 > 30 || params.sum_1 <= 10")))); + } + + @Test + public void betweenCondition() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a BETWEEN 30 AND 50"), + contains(bucketSelector(hasScript("params.a >= 30 && params.a <= 50")))); + } + + @Test + public void betweenConditionWithHavingAgg() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) BETWEEN 30 AND 50"), + contains(bucketSelector(hasScript("params.avg_0 >= 30 && params.avg_0 <= 50")))); + } + + @Test + public void notBetweenCondition() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a NOT BETWEEN 30 AND 50"), + contains(bucketSelector(hasScript("params.a < 30 || params.a > 50")))); + } + + @Test + public void notBetweenConditionWithHavingAgg() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) NOT BETWEEN 30 AND 50"), + contains(bucketSelector(hasScript("params.avg_0 < 30 || params.avg_0 > 50")))); + } + + @Test + public void inCondition() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a IN (30, 40, 50)"), + contains(bucketSelector(hasScript("params.a == 30 || params.a == 40 || params.a == 50")))); + } + + @Test + public void inConditionWithHavingAgg() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) IN (30, 40, 50)"), + contains( + bucketSelector( + hasScript("params.avg_0 == 30 || params.avg_0 == 40 || params.avg_0 == 50")))); + } + + @Test + public void notInCondition() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a NOT IN (30, 40, 50)"), + contains(bucketSelector(hasScript("params.a != 30 && params.a != 40 && params.a != 50")))); + } + + @Test + public void notInConditionWithHavingAgg() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) NOT IN (30, 40, 50)"), + contains( + bucketSelector( + hasScript("params.avg_0 != 30 && params.avg_0 != 40 && params.avg_0 != 50")))); + } + + @Test + public void nestedConditions() { + assertThat( + query( + SELECT_CNT_AVG_SUM_FROM_BANK_GROUP_BY_AGE + + "HAVING i <= 10000 OR NOT (a < 10 OR a > 30) AND c <= 10"), + contains( + bucketSelector( + hasScript( + "params.i <= 10000 || ((params.a >= 10 && params.a <= 30) && params.c <=" + + " 10)")))); + } + + @Test(expected = ParserException.class) + public void aggregationFunctionOnTheRight() { + query(SELECT_CNT_AVG_SUM_FROM_BANK_GROUP_BY_AGE + "HAVING 10 < a"); + } + + private Collection query(String sql) { + return translate(SqlParserUtils.parse(sql)); + } + + private Collection translate(SQLQueryExpr expr) { + try { + Select select = new SqlParser().parseSelect(expr); + select + .getFields() + .forEach( + field -> { try { - new AggMaker() - .withWhere(select.getWhere()) - .makeFieldAgg((MethodField) field, AggregationBuilders.terms("")); + new AggMaker() + .withWhere(select.getWhere()) + .makeFieldAgg((MethodField) field, AggregationBuilders.terms("")); } catch (SqlParseException e) { - throw new RuntimeException(e); + throw new RuntimeException(e); } - }); - AggregationBuilder agg = AggregationBuilders.terms(""); - select.getHaving().explain(agg, select.getFields()); - return agg.getPipelineAggregations(); - } catch (SqlParseException e) { - throw new ParserException("Illegal sql expr: " + expr.toString()); - } - } - - @SafeVarargs - private final Matcher bucketSelector(Matcher... matchers) { - return both(Matchers. // instanceOf() has type inference problem - instanceOf(BucketSelectorPipelineAggregationBuilder.class) - ). - and(allOf(matchers)); - } - - private Matcher hasBucketPath(String... expectedBucketPath) { - Map expectedMap = Arrays.stream(expectedBucketPath). - map(e -> e.split(":")). - collect(toMap(e -> e[0].trim(), e -> e[1].trim())); - return hasFieldWithValue("bucketsPathsMap", "has bucket path", is(expectedMap)); - } - - private Matcher hasScript(String expectedCode) { - return hasFieldWithValue("script", "has script", is(new Script(expectedCode))); - } + }); + AggregationBuilder agg = AggregationBuilders.terms(""); + select.getHaving().explain(agg, select.getFields()); + return agg.getPipelineAggregations(); + } catch (SqlParseException e) { + throw new ParserException("Illegal sql expr: " + expr.toString()); + } + } + + @SafeVarargs + private final Matcher bucketSelector( + Matcher... matchers) { + return both(Matchers + . // instanceOf() has type inference problem + instanceOf(BucketSelectorPipelineAggregationBuilder.class)) + .and(allOf(matchers)); + } + + private Matcher hasBucketPath(String... expectedBucketPath) { + Map expectedMap = + Arrays.stream(expectedBucketPath) + .map(e -> e.split(":")) + .collect(toMap(e -> e[0].trim(), e -> e[1].trim())); + return hasFieldWithValue("bucketsPathsMap", "has bucket path", is(expectedMap)); + } + + private Matcher hasScript(String expectedCode) { + return hasFieldWithValue("script", "has script", is(new Script(expectedCode))); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/JSONRequestTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/JSONRequestTest.java index b70779110a..fc08e7c516 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/JSONRequestTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/JSONRequestTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest; import static org.hamcrest.MatcherAssert.assertThat; @@ -49,360 +48,461 @@ @RunWith(MockitoJUnitRunner.Silent.class) public class JSONRequestTest { - @Mock - private ColumnTypeProvider columnTypeProvider; - - @Before - public void setup() { - when(columnTypeProvider.get(anyInt())).thenReturn(Schema.Type.DOUBLE); - } - - @Test - public void aggWithoutWhere() { - String explainSQL = explainSQL("SELECT name, COUNT(nested(projects, 'projects')) AS c " + - "FROM employee " + - "GROUP BY name " + - "HAVING c > 1"); - assertThat(explainSQL, containsString( - "\"projects@NESTED\":{\"nested\":{\"path\":\"projects\"},\"aggregations\":{\"c\":{\"value_count\":{\"field\":\"_index\"}}}")); - assertThat(explainSQL, containsString( - "\"buckets_path\":{\"c\":\"projects@NESTED.c\"}")); - } - - @Test - public void aggWithWhereOnParent() { - String explainSQL = explainSQL("SELECT name, COUNT(nested(projects, 'projects')) AS c " + - "FROM employee " + - "WHERE name LIKE '%smith%' " + - "GROUP BY name " + - "HAVING c > 1"); - - assertThat(explainSQL, containsString( - "\"projects@NESTED\":{\"nested\":{\"path\":\"projects\"},\"aggregations\":{\"c\":{\"value_count\":{\"field\":\"_index\"}}}}")); - assertThat(explainSQL, containsString( - "\"buckets_path\":{\"c\":\"projects@NESTED.c\"}")); - } - - @Test - public void aggWithWhereOnNested() { - String explainSQL = explainSQL("SELECT name, COUNT(nested(projects, 'projects')) AS c " + - "FROM employee " + - "WHERE nested(projects.name, 'projects') LIKE '%security%' " + - "GROUP BY name " + - "HAVING c > 1"); - - assertThat(explainSQL, containsString("\"aggregations\":{\"projects@NESTED\":{\"nested\":{\"path\":\"projects\"},\"aggregations\":{\"projects@FILTER\":{\"filter\":{\"bool\":{\"must\":[{\"wildcard\":{\"projects.name\":{\"wildcard\":\"*security*\",\"boost\":1.0}}}],\"adjust_pure_negative\":true,\"boost\":1.0}},\"aggregations\":{\"c\":{\"value_count\":{\"field\":\"_index\"}}}}}}")); - assertThat(explainSQL, containsString("\"buckets_path\":{\"c\":\"projects@NESTED>projects@FILTER.c\"}")); - } - - @Test - public void aggWithWhereOnParentOrNested() { - String explainSQL = explainSQL("SELECT name, COUNT(nested(projects, 'projects')) AS c " + - "FROM employee " + - "WHERE name LIKE '%smith%' OR nested(projects.name, 'projects') LIKE '%security%' " + - "GROUP BY name " + - "HAVING c > 1"); - assertThat(explainSQL, containsString( - "\"projects@NESTED\":{\"nested\":{\"path\":\"projects\"},\"aggregations\":{\"c\":{\"value_count\":{\"field\":\"_index\"}}}}")); - assertThat(explainSQL, containsString( - "\"buckets_path\":{\"c\":\"projects@NESTED.c\"}")); - } - - @Test - public void aggWithWhereOnParentAndNested() { - String explainSQL = explainSQL("SELECT name, COUNT(nested(projects, 'projects')) AS c " + - "FROM employee " + - "WHERE name LIKE '%smith%' AND nested(projects.name, 'projects') LIKE '%security%' " + - "GROUP BY name " + - "HAVING c > 1"); - assertThat(explainSQL, containsString( - "\"aggregations\":{\"projects@NESTED\":{\"nested\":{\"path\":\"projects\"},\"aggregations\":{\"projects@FILTER\":{\"filter\":{\"bool\":{\"must\":[{\"wildcard\":{\"projects.name\":{\"wildcard\":\"*security*\",\"boost\":1.0}}}],\"adjust_pure_negative\":true,\"boost\":1.0}},\"aggregations\":{\"c\":{\"value_count\":{\"field\":\"_index\"}}}}}")); - assertThat(explainSQL, containsString("\"buckets_path\":{\"c\":\"projects@NESTED>projects@FILTER.c\"}")); - } - - @Test - public void aggWithWhereOnNestedAndNested() { - String explainSQL = explainSQL("SELECT name, COUNT(nested(projects, 'projects')) AS c " + - "FROM employee " + - "WHERE nested('projects', projects.started_year > 2000 AND projects.name LIKE '%security%') " + - "GROUP BY name " + - "HAVING c > 1"); - assertThat(explainSQL, containsString("\"aggregations\":{\"projects@NESTED\":{\"nested\":{\"path\":\"projects\"},\"aggregations\":{\"projects@FILTER\":{\"filter\":{\"bool\":{\"must\":[{\"bool\":{\"must\":[{\"range\":{\"projects.started_year\":{\"from\":2000,\"to\":null,\"include_lower\":false,\"include_upper\":true,\"boost\":1.0}}},{\"wildcard\":{\"projects.name\":{\"wildcard\":\"*security*\",\"boost\":1.0}}}")); - assertThat(explainSQL, containsString("\"buckets_path\":{\"c\":\"projects@NESTED>projects@FILTER.c\"}")); - } - - @Test - public void aggWithWhereOnNestedOrNested() { - String explainSQL = explainSQL("SELECT name, COUNT(nested(projects, 'projects')) AS c " + - "FROM employee " + - "WHERE nested('projects', projects.started_year > 2000 OR projects.name LIKE '%security%') " + - "GROUP BY name " + - "HAVING c > 1"); - assertThat(explainSQL, containsString("\"aggregations\":{\"projects@NESTED\":{\"nested\":{\"path\":\"projects\"},\"aggregations\":{\"projects@FILTER\":{\"filter\":{\"bool\":{\"must\":[{\"bool\":{\"should\":[{\"range\":{\"projects.started_year\":{\"from\":2000,\"to\":null,\"include_lower\":false,\"include_upper\":true,\"boost\":1.0}}},{\"wildcard\":{\"projects.name\":{\"wildcard\":\"*security*\",\"boost\":1.0}}}")); - assertThat(explainSQL, containsString("\"buckets_path\":{\"c\":\"projects@NESTED>projects@FILTER.c\"}")); - } - - @Test - public void aggInHavingWithoutWhere() { - JSONObject explainSQL = explainSQLToJson("SELECT name " + - "FROM employee " + - "GROUP BY name " + - "HAVING COUNT(nested(projects, 'projects')) > 1"); - assertThat( - query(explainSQL, "/aggregations/name/aggregations/projects@NESTED/aggregations/count_0/value_count"), - equalTo("{\"field\":\"_index\"}")); - assertThat( - query(explainSQL, "/aggregations/name/aggregations/bucket_filter/bucket_selector/buckets_path"), - equalTo("{\"count_0\":\"projects@NESTED.count_0\"}")); - } - - @Test - public void aggInHavingWithWhereOnParent() { - JSONObject explainSQL = explainSQLToJson("SELECT name " + - "FROM employee " + - "WHERE name LIKE '%smith%' " + - "GROUP BY name " + - "HAVING COUNT(nested(projects, 'projects')) > 1"); - assertThat( - query(explainSQL, "/aggregations/name/aggregations/projects@NESTED/aggregations/count_0/value_count"), - equalTo("{\"field\":\"_index\"}")); - assertThat( - query(explainSQL, "/aggregations/name/aggregations/bucket_filter/bucket_selector/buckets_path"), - equalTo("{\"count_0\":\"projects@NESTED.count_0\"}")); + @Mock private ColumnTypeProvider columnTypeProvider; + + @Before + public void setup() { + when(columnTypeProvider.get(anyInt())).thenReturn(Schema.Type.DOUBLE); + } + + @Test + public void aggWithoutWhere() { + String explainSQL = + explainSQL( + "SELECT name, COUNT(nested(projects, 'projects')) AS c " + + "FROM employee " + + "GROUP BY name " + + "HAVING c > 1"); + assertThat( + explainSQL, + containsString( + "\"projects@NESTED\":{\"nested\":{\"path\":\"projects\"},\"aggregations\":{\"c\":{\"value_count\":{\"field\":\"_index\"}}}")); + assertThat(explainSQL, containsString("\"buckets_path\":{\"c\":\"projects@NESTED.c\"}")); + } + + @Test + public void aggWithWhereOnParent() { + String explainSQL = + explainSQL( + "SELECT name, COUNT(nested(projects, 'projects')) AS c " + + "FROM employee " + + "WHERE name LIKE '%smith%' " + + "GROUP BY name " + + "HAVING c > 1"); + + assertThat( + explainSQL, + containsString( + "\"projects@NESTED\":{\"nested\":{\"path\":\"projects\"},\"aggregations\":{\"c\":{\"value_count\":{\"field\":\"_index\"}}}}")); + assertThat(explainSQL, containsString("\"buckets_path\":{\"c\":\"projects@NESTED.c\"}")); + } + + @Test + public void aggWithWhereOnNested() { + String explainSQL = + explainSQL( + "SELECT name, COUNT(nested(projects, 'projects')) AS c " + + "FROM employee " + + "WHERE nested(projects.name, 'projects') LIKE '%security%' " + + "GROUP BY name " + + "HAVING c > 1"); + + assertThat( + explainSQL, + containsString( + "\"aggregations\":{\"projects@NESTED\":{\"nested\":{\"path\":\"projects\"},\"aggregations\":{\"projects@FILTER\":{\"filter\":{\"bool\":{\"must\":[{\"wildcard\":{\"projects.name\":{\"wildcard\":\"*security*\",\"boost\":1.0}}}],\"adjust_pure_negative\":true,\"boost\":1.0}},\"aggregations\":{\"c\":{\"value_count\":{\"field\":\"_index\"}}}}}}")); + assertThat( + explainSQL, + containsString("\"buckets_path\":{\"c\":\"projects@NESTED>projects@FILTER.c\"}")); + } + + @Test + public void aggWithWhereOnParentOrNested() { + String explainSQL = + explainSQL( + "SELECT name, COUNT(nested(projects, 'projects')) AS c FROM employee WHERE name LIKE" + + " '%smith%' OR nested(projects.name, 'projects') LIKE '%security%' GROUP BY name" + + " HAVING c > 1"); + assertThat( + explainSQL, + containsString( + "\"projects@NESTED\":{\"nested\":{\"path\":\"projects\"},\"aggregations\":{\"c\":{\"value_count\":{\"field\":\"_index\"}}}}")); + assertThat(explainSQL, containsString("\"buckets_path\":{\"c\":\"projects@NESTED.c\"}")); + } + + @Test + public void aggWithWhereOnParentAndNested() { + String explainSQL = + explainSQL( + "SELECT name, COUNT(nested(projects, 'projects')) AS c FROM employee WHERE name LIKE" + + " '%smith%' AND nested(projects.name, 'projects') LIKE '%security%' GROUP BY name" + + " HAVING c > 1"); + assertThat( + explainSQL, + containsString( + "\"aggregations\":{\"projects@NESTED\":{\"nested\":{\"path\":\"projects\"},\"aggregations\":{\"projects@FILTER\":{\"filter\":{\"bool\":{\"must\":[{\"wildcard\":{\"projects.name\":{\"wildcard\":\"*security*\",\"boost\":1.0}}}],\"adjust_pure_negative\":true,\"boost\":1.0}},\"aggregations\":{\"c\":{\"value_count\":{\"field\":\"_index\"}}}}}")); + assertThat( + explainSQL, + containsString("\"buckets_path\":{\"c\":\"projects@NESTED>projects@FILTER.c\"}")); + } + + @Test + public void aggWithWhereOnNestedAndNested() { + String explainSQL = + explainSQL( + "SELECT name, COUNT(nested(projects, 'projects')) AS c FROM employee WHERE" + + " nested('projects', projects.started_year > 2000 AND projects.name LIKE" + + " '%security%') GROUP BY name HAVING c > 1"); + assertThat( + explainSQL, + containsString( + "\"aggregations\":{\"projects@NESTED\":{\"nested\":{\"path\":\"projects\"},\"aggregations\":{\"projects@FILTER\":{\"filter\":{\"bool\":{\"must\":[{\"bool\":{\"must\":[{\"range\":{\"projects.started_year\":{\"from\":2000,\"to\":null,\"include_lower\":false,\"include_upper\":true,\"boost\":1.0}}},{\"wildcard\":{\"projects.name\":{\"wildcard\":\"*security*\",\"boost\":1.0}}}")); + assertThat( + explainSQL, + containsString("\"buckets_path\":{\"c\":\"projects@NESTED>projects@FILTER.c\"}")); + } + + @Test + public void aggWithWhereOnNestedOrNested() { + String explainSQL = + explainSQL( + "SELECT name, COUNT(nested(projects, 'projects')) AS c FROM employee WHERE" + + " nested('projects', projects.started_year > 2000 OR projects.name LIKE" + + " '%security%') GROUP BY name HAVING c > 1"); + assertThat( + explainSQL, + containsString( + "\"aggregations\":{\"projects@NESTED\":{\"nested\":{\"path\":\"projects\"},\"aggregations\":{\"projects@FILTER\":{\"filter\":{\"bool\":{\"must\":[{\"bool\":{\"should\":[{\"range\":{\"projects.started_year\":{\"from\":2000,\"to\":null,\"include_lower\":false,\"include_upper\":true,\"boost\":1.0}}},{\"wildcard\":{\"projects.name\":{\"wildcard\":\"*security*\",\"boost\":1.0}}}")); + assertThat( + explainSQL, + containsString("\"buckets_path\":{\"c\":\"projects@NESTED>projects@FILTER.c\"}")); + } + + @Test + public void aggInHavingWithoutWhere() { + JSONObject explainSQL = + explainSQLToJson( + "SELECT name " + + "FROM employee " + + "GROUP BY name " + + "HAVING COUNT(nested(projects, 'projects')) > 1"); + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/projects@NESTED/aggregations/count_0/value_count"), + equalTo("{\"field\":\"_index\"}")); + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/bucket_filter/bucket_selector/buckets_path"), + equalTo("{\"count_0\":\"projects@NESTED.count_0\"}")); + } + + @Test + public void aggInHavingWithWhereOnParent() { + JSONObject explainSQL = + explainSQLToJson( + "SELECT name " + + "FROM employee " + + "WHERE name LIKE '%smith%' " + + "GROUP BY name " + + "HAVING COUNT(nested(projects, 'projects')) > 1"); + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/projects@NESTED/aggregations/count_0/value_count"), + equalTo("{\"field\":\"_index\"}")); + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/bucket_filter/bucket_selector/buckets_path"), + equalTo("{\"count_0\":\"projects@NESTED.count_0\"}")); + } + + @Test + public void aggInHavingWithWhereOnNested() { + JSONObject explainSQL = + explainSQLToJson( + "SELECT name " + + "FROM employee " + + "WHERE nested(projects.name, 'projects') LIKE '%security%' " + + "GROUP BY name " + + "HAVING COUNT(nested(projects, 'projects')) > 1"); + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/projects@NESTED/aggregations/projects@FILTER/aggregations/count_0/value_count"), + equalTo("{\"field\":\"_index\"}")); + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/bucket_filter/bucket_selector/buckets_path"), + equalTo("{\"count_0\":\"projects@NESTED>projects@FILTER.count_0\"}")); + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/projects@NESTED/aggregations/projects@FILTER/filter/bool/must"), + equalTo("[{\"wildcard\":{\"projects.name\":{\"boost\":1,\"wildcard\":\"*security*\"}}}]")); + } + + @Test + public void aggInHavingWithWhereOnParentOrNested() { + JSONObject explainSQL = + explainSQLToJson( + "SELECT name FROM employee WHERE name LIKE '%smith%' OR nested(projects.name," + + " 'projects') LIKE '%security%' GROUP BY name HAVING COUNT(nested(projects," + + " 'projects')) > 1"); + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/projects@NESTED/aggregations/count_0/value_count"), + equalTo("{\"field\":\"_index\"}")); + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/bucket_filter/bucket_selector/buckets_path"), + equalTo("{\"count_0\":\"projects@NESTED.count_0\"}")); + } + + @Test + public void aggInHavingWithWhereOnParentAndNested() { + JSONObject explainSQL = + explainSQLToJson( + "SELECT name FROM employee WHERE name LIKE '%smith%' AND nested(projects.name," + + " 'projects') LIKE '%security%' GROUP BY name HAVING COUNT(nested(projects," + + " 'projects')) > 1"); + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/projects@NESTED/aggregations/projects@FILTER/aggregations/count_0/value_count"), + equalTo("{\"field\":\"_index\"}")); + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/bucket_filter/bucket_selector/buckets_path"), + equalTo("{\"count_0\":\"projects@NESTED>projects@FILTER.count_0\"}")); + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/projects@NESTED/aggregations/projects@FILTER/filter/bool/must"), + equalTo("[{\"wildcard\":{\"projects.name\":{\"boost\":1,\"wildcard\":\"*security*\"}}}]")); + } + + @Test + public void aggInHavingWithWhereOnNestedAndNested() { + JSONObject explainSQL = + explainSQLToJson( + "SELECT name FROM employee WHERE nested('projects', projects.started_year > 2000 AND" + + " projects.name LIKE '%security%') GROUP BY name HAVING COUNT(nested(projects," + + " 'projects')) > 1"); + + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/projects@NESTED/aggregations/projects@FILTER/aggregations/count_0/value_count"), + equalTo("{\"field\":\"_index\"}")); + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/bucket_filter/bucket_selector/buckets_path"), + equalTo("{\"count_0\":\"projects@NESTED>projects@FILTER.count_0\"}")); + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/projects@NESTED/aggregations/projects@FILTER/filter/bool/must"), + equalTo( + "[{\"bool\":{\"adjust_pure_negative\":true,\"must\":[{\"range\":{\"projects.started_year\":{\"include_lower\":false,\"include_upper\":true,\"from\":2000,\"boost\":1,\"to\":null}}},{\"wildcard\":{\"projects.name\":{\"boost\":1,\"wildcard\":\"*security*\"}}}],\"boost\":1}}]")); + } + + @Test + public void aggInHavingWithWhereOnNestedOrNested() { + JSONObject explainSQL = + explainSQLToJson( + "SELECT name FROM employee WHERE nested('projects', projects.started_year > 2000 OR" + + " projects.name LIKE '%security%') GROUP BY name HAVING COUNT(nested(projects," + + " 'projects')) > 1"); + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/projects@NESTED/aggregations/projects@FILTER/aggregations/count_0/value_count"), + equalTo("{\"field\":\"_index\"}")); + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/bucket_filter/bucket_selector/buckets_path"), + equalTo("{\"count_0\":\"projects@NESTED>projects@FILTER.count_0\"}")); + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/projects@NESTED/aggregations/projects@FILTER/filter/bool/must"), + equalTo( + "[{\"bool\":{\"adjust_pure_negative\":true,\"should\":[{\"range\":{\"projects.started_year\":{\"include_lower\":false,\"include_upper\":true,\"from\":2000,\"boost\":1,\"to\":null}}},{\"wildcard\":{\"projects.name\":{\"boost\":1,\"wildcard\":\"*security*\"}}}],\"boost\":1}}]")); + } + + @Test + public void searchSanity() throws IOException { + String result = + explain( + String.format( + "{\"query\":\"" + + "SELECT * " + + "FROM %s " + + "WHERE firstname LIKE 'A%%' AND age > 20 " + + "GROUP BY gender " + + "ORDER BY _score\"}", + TestsConstants.TEST_INDEX_ACCOUNT)); + String expectedOutput = + Files.toString( + new File( + getResourcePath() + "src/test/resources/expectedOutput/search_explain.json"), + StandardCharsets.UTF_8) + .replaceAll("\r", ""); + + assertThat(removeSpaces(result), equalTo(removeSpaces(expectedOutput))); + } + + // This test was ignored because group by case function is not supported + @Ignore + @Test + public void aggregationQuery() throws IOException { + String result = + explain( + String.format( + "{\"query\":\"SELECT address, CASE WHEN gender='0' THEN 'aaa' ELSE 'bbb' END AS" + + " a2345, count(age) FROM %s GROUP BY" + + " terms('field'='address','execution_hint'='global_ordinals'), a2345\"}", + TestsConstants.TEST_INDEX_ACCOUNT)); + String expectedOutput = + Files.toString( + new File( + getResourcePath() + + "src/test/resources/expectedOutput/aggregation_query_explain.json"), + StandardCharsets.UTF_8) + .replaceAll("\r", ""); + + assertThat(removeSpaces(result), equalTo(removeSpaces(expectedOutput))); + } + + @Test + public void deleteSanity() throws IOException { + try (MockedStatic localClusterStateMockedStatic = + Mockito.mockStatic(LocalClusterState.class)) { + LocalClusterState state = mock(LocalClusterState.class); + localClusterStateMockedStatic.when(LocalClusterState::state).thenReturn(state); + when(state.getSettingValue(any(Settings.Key.class))).thenReturn(true); + + String result = + explain( + String.format( + "{\"query\":\"" + + "DELETE " + + "FROM %s " + + "WHERE firstname LIKE 'A%%' AND age > 20\"}", + TestsConstants.TEST_INDEX_ACCOUNT)); + String expectedOutput = + Files.toString( + new File( + getResourcePath() + "src/test/resources/expectedOutput/delete_explain.json"), + StandardCharsets.UTF_8) + .replaceAll("\r", ""); + assertThat(removeSpaces(result), equalTo(removeSpaces(expectedOutput))); } - - @Test - public void aggInHavingWithWhereOnNested() { - JSONObject explainSQL = explainSQLToJson("SELECT name " + - "FROM employee " + - "WHERE nested(projects.name, 'projects') LIKE '%security%' " + - "GROUP BY name " + - "HAVING COUNT(nested(projects, 'projects')) > 1"); - assertThat( - query(explainSQL, "/aggregations/name/aggregations/projects@NESTED/aggregations/projects@FILTER/aggregations/count_0/value_count"), - equalTo("{\"field\":\"_index\"}")); - assertThat( - query(explainSQL, "/aggregations/name/aggregations/bucket_filter/bucket_selector/buckets_path"), - equalTo("{\"count_0\":\"projects@NESTED>projects@FILTER.count_0\"}")); - assertThat( - query(explainSQL, "/aggregations/name/aggregations/projects@NESTED/aggregations/projects@FILTER/filter/bool/must"), - equalTo("[{\"wildcard\":{\"projects.name\":{\"boost\":1,\"wildcard\":\"*security*\"}}}]")); - } - - @Test - public void aggInHavingWithWhereOnParentOrNested() { - JSONObject explainSQL = explainSQLToJson("SELECT name " + - "FROM employee " + - "WHERE name LIKE '%smith%' OR nested(projects.name, 'projects') LIKE '%security%' " + - "GROUP BY name " + - "HAVING COUNT(nested(projects, 'projects')) > 1"); - assertThat( - query(explainSQL, "/aggregations/name/aggregations/projects@NESTED/aggregations/count_0/value_count"), - equalTo("{\"field\":\"_index\"}")); - assertThat( - query(explainSQL, "/aggregations/name/aggregations/bucket_filter/bucket_selector/buckets_path"), - equalTo("{\"count_0\":\"projects@NESTED.count_0\"}")); + } + + @Test(expected = SQLFeatureDisabledException.class) + public void deleteShouldThrowExceptionWhenDisabled() + throws SQLFeatureDisabledException, SQLFeatureNotSupportedException, SqlParseException { + try (MockedStatic localClusterStateMockedStatic = + Mockito.mockStatic(LocalClusterState.class)) { + LocalClusterState state = mock(LocalClusterState.class); + localClusterStateMockedStatic.when(LocalClusterState::state).thenReturn(state); + when(state.getSettingValue(any(Settings.Key.class))).thenReturn(false); + + JSONObject jsonRequest = + new JSONObject( + StringUtils.format( + "{\"query\":\"" + + "DELETE " + + "FROM %s " + + "WHERE firstname LIKE 'A%%' AND age > 20\"}", + TestsConstants.TEST_INDEX_ACCOUNT)); + translate(jsonRequest.getString("query"), jsonRequest); } - - @Test - public void aggInHavingWithWhereOnParentAndNested() { - JSONObject explainSQL = explainSQLToJson("SELECT name " + - "FROM employee " + - "WHERE name LIKE '%smith%' AND nested(projects.name, 'projects') LIKE '%security%' " + - "GROUP BY name " + - "HAVING COUNT(nested(projects, 'projects')) > 1"); - assertThat( - query(explainSQL, "/aggregations/name/aggregations/projects@NESTED/aggregations/projects@FILTER/aggregations/count_0/value_count"), - equalTo("{\"field\":\"_index\"}")); - assertThat( - query(explainSQL, "/aggregations/name/aggregations/bucket_filter/bucket_selector/buckets_path"), - equalTo("{\"count_0\":\"projects@NESTED>projects@FILTER.count_0\"}")); - assertThat( - query(explainSQL, "/aggregations/name/aggregations/projects@NESTED/aggregations/projects@FILTER/filter/bool/must"), - equalTo("[{\"wildcard\":{\"projects.name\":{\"boost\":1,\"wildcard\":\"*security*\"}}}]")); + } + + @Test + public void queryFilter() throws IOException { + /* + * Human-readable format of the request defined below: + * { + * "query": "SELECT * FROM accounts WHERE age > 25", + * "filter": { + * "range": { + * "balance": { + * "lte": 30000 + * } + * } + * } + * } + */ + String result = + explain( + String.format( + "{\"query\":\"" + + "SELECT * " + + "FROM %s " + + "WHERE age > 25\"," + + "\"filter\":{\"range\":{\"balance\":{\"lte\":30000}}}}", + TestsConstants.TEST_INDEX_ACCOUNT)); + String expectedOutput = + Files.toString( + new File( + getResourcePath() + + "src/test/resources/expectedOutput/json_filter_explain.json"), + StandardCharsets.UTF_8) + .replaceAll("\r", ""); + + assertThat(removeSpaces(result), equalTo(removeSpaces(expectedOutput))); + } + + private String removeSpaces(String s) { + return s.replaceAll("\\s+", ""); + } + + private String explainSQL(String sql) { + return explain(String.format("{\"query\":\"%s\"}", sql)); + } + + private JSONObject explainSQLToJson(String sql) { + return new JSONObject(explain(String.format("{\"query\":\"%s\"}", sql))); + } + + private String query(JSONObject jsonObject, String jsonPath) { + return jsonObject.query(jsonPath).toString(); + } + + private String explain(String request) { + try { + JSONObject jsonRequest = new JSONObject(request); + String sql = jsonRequest.getString("query"); + + return translate(sql, jsonRequest); + } catch (SqlParseException | SQLFeatureNotSupportedException | SQLFeatureDisabledException e) { + throw new ParserException("Illegal sql expr in request: " + request); } - - @Test - public void aggInHavingWithWhereOnNestedAndNested() { - JSONObject explainSQL = explainSQLToJson("SELECT name " + - "FROM employee " + - "WHERE nested('projects', projects.started_year > 2000 AND projects.name LIKE '%security%') " + - "GROUP BY name " + - "HAVING COUNT(nested(projects, 'projects')) > 1"); - - assertThat( - query(explainSQL, "/aggregations/name/aggregations/projects@NESTED/aggregations/projects@FILTER/aggregations/count_0/value_count"), - equalTo("{\"field\":\"_index\"}")); - assertThat( - query(explainSQL, "/aggregations/name/aggregations/bucket_filter/bucket_selector/buckets_path"), - equalTo("{\"count_0\":\"projects@NESTED>projects@FILTER.count_0\"}")); - assertThat( - query(explainSQL, "/aggregations/name/aggregations/projects@NESTED/aggregations/projects@FILTER/filter/bool/must"), - equalTo("[{\"bool\":{\"adjust_pure_negative\":true,\"must\":[{\"range\":{\"projects.started_year\":{\"include_lower\":false,\"include_upper\":true,\"from\":2000,\"boost\":1,\"to\":null}}},{\"wildcard\":{\"projects.name\":{\"boost\":1,\"wildcard\":\"*security*\"}}}],\"boost\":1}}]")); - } - - @Test - public void aggInHavingWithWhereOnNestedOrNested() { - JSONObject explainSQL = explainSQLToJson("SELECT name " + - "FROM employee " + - "WHERE nested('projects', projects.started_year > 2000 OR projects.name LIKE '%security%') " + - "GROUP BY name " + - "HAVING COUNT(nested(projects, 'projects')) > 1"); - assertThat( - query(explainSQL, - "/aggregations/name/aggregations/projects@NESTED/aggregations/projects@FILTER/aggregations/count_0/value_count"), - equalTo("{\"field\":\"_index\"}")); - assertThat( - query(explainSQL, "/aggregations/name/aggregations/bucket_filter/bucket_selector/buckets_path"), - equalTo("{\"count_0\":\"projects@NESTED>projects@FILTER.count_0\"}")); - assertThat( - query(explainSQL, - "/aggregations/name/aggregations/projects@NESTED/aggregations/projects@FILTER/filter/bool/must"), - equalTo("[{\"bool\":{\"adjust_pure_negative\":true,\"should\":[{\"range\":{\"projects.started_year\":{\"include_lower\":false,\"include_upper\":true,\"from\":2000,\"boost\":1,\"to\":null}}},{\"wildcard\":{\"projects.name\":{\"boost\":1,\"wildcard\":\"*security*\"}}}],\"boost\":1}}]")); - } - - @Test - public void searchSanity() throws IOException { - String result = explain(String.format("{\"query\":\"" + - "SELECT * " + - "FROM %s " + - "WHERE firstname LIKE 'A%%' AND age > 20 " + - "GROUP BY gender " + - "ORDER BY _score\"}", TestsConstants.TEST_INDEX_ACCOUNT)); - String expectedOutput = Files.toString( - new File(getResourcePath() + "src/test/resources/expectedOutput/search_explain.json"), StandardCharsets.UTF_8) - .replaceAll("\r", ""); - - assertThat(removeSpaces(result), equalTo(removeSpaces(expectedOutput))); - } - - // This test was ignored because group by case function is not supported - @Ignore - @Test - public void aggregationQuery() throws IOException { - String result = explain(String.format("{\"query\":\"" + - "SELECT address, CASE WHEN gender='0' THEN 'aaa' ELSE 'bbb' END AS a2345, count(age) " + - "FROM %s " + - "GROUP BY terms('field'='address','execution_hint'='global_ordinals'), a2345\"}", TestsConstants.TEST_INDEX_ACCOUNT)); - String expectedOutput = Files.toString( - new File(getResourcePath() + "src/test/resources/expectedOutput/aggregation_query_explain.json"), StandardCharsets.UTF_8) - .replaceAll("\r", ""); - - assertThat(removeSpaces(result), equalTo(removeSpaces(expectedOutput))); - } - - @Test - public void deleteSanity() throws IOException { - try (MockedStatic localClusterStateMockedStatic = - Mockito.mockStatic(LocalClusterState.class)) { - LocalClusterState state = mock(LocalClusterState.class); - localClusterStateMockedStatic.when(LocalClusterState::state).thenReturn(state); - when(state.getSettingValue(any(Settings.Key.class))).thenReturn(true); - - String result = explain(String.format("{\"query\":\"" + - "DELETE " + - "FROM %s " + - "WHERE firstname LIKE 'A%%' AND age > 20\"}", TestsConstants.TEST_INDEX_ACCOUNT)); - String expectedOutput = Files.toString( - new File(getResourcePath() + "src/test/resources/expectedOutput/delete_explain.json"), StandardCharsets.UTF_8) - .replaceAll("\r", ""); - assertThat(removeSpaces(result), equalTo(removeSpaces(expectedOutput))); - } - } - - @Test(expected = SQLFeatureDisabledException.class) - public void deleteShouldThrowExceptionWhenDisabled() - throws SQLFeatureDisabledException, SQLFeatureNotSupportedException, - SqlParseException { - try (MockedStatic localClusterStateMockedStatic = - Mockito.mockStatic(LocalClusterState.class)) { - LocalClusterState state = mock(LocalClusterState.class); - localClusterStateMockedStatic.when(LocalClusterState::state).thenReturn(state); - when(state.getSettingValue(any(Settings.Key.class))).thenReturn(false); - - JSONObject jsonRequest = new JSONObject(StringUtils.format("{\"query\":\"" + - "DELETE " + - "FROM %s " + - "WHERE firstname LIKE 'A%%' AND age > 20\"}", TestsConstants.TEST_INDEX_ACCOUNT)); - translate(jsonRequest.getString("query"), jsonRequest); - } - } - - @Test - public void queryFilter() throws IOException { - /* - * Human readable format of the request defined below: - * { - * "query": "SELECT * FROM accounts WHERE age > 25", - * "filter": { - * "range": { - * "balance": { - * "lte": 30000 - * } - * } - * } - * } - */ - String result = explain(String.format("{\"query\":\"" + - "SELECT * " + - "FROM %s " + - "WHERE age > 25\"," + - "\"filter\":{\"range\":{\"balance\":{\"lte\":30000}}}}", TestsConstants.TEST_INDEX_ACCOUNT)); - String expectedOutput = Files.toString( - new File(getResourcePath() + "src/test/resources/expectedOutput/json_filter_explain.json"), StandardCharsets.UTF_8) - .replaceAll("\r", ""); - - assertThat(removeSpaces(result), equalTo(removeSpaces(expectedOutput))); - } - - private String removeSpaces(String s) { - return s.replaceAll("\\s+", ""); - } - - private String explainSQL(String sql) { - return explain(String.format("{\"query\":\"%s\"}", sql)); - } - - private JSONObject explainSQLToJson(String sql) { - return new JSONObject(explain(String.format("{\"query\":\"%s\"}", sql))); - } - - private String query(JSONObject jsonObject, String jsonPath) { - return jsonObject.query(jsonPath).toString(); - } - - private String explain(String request) { - try { - JSONObject jsonRequest = new JSONObject(request); - String sql = jsonRequest.getString("query"); - - return translate(sql, jsonRequest); - } catch (SqlParseException | SQLFeatureNotSupportedException | SQLFeatureDisabledException e) { - throw new ParserException("Illegal sql expr in request: " + request); - } - } - - private String translate(String sql, JSONObject jsonRequest) - throws SQLFeatureNotSupportedException, SqlParseException, SQLFeatureDisabledException { - Client mockClient = mock(Client.class); - CheckScriptContents.stubMockClient(mockClient); - QueryAction queryAction = - OpenSearchActionFactory - .create(mockClient, new QueryActionRequest(sql, columnTypeProvider, Format.JDBC)); - - SqlRequest sqlRequest = new SqlRequest(sql, jsonRequest); - queryAction.setSqlRequest(sqlRequest); - - SqlElasticRequestBuilder requestBuilder = queryAction.explain(); - return requestBuilder.explain(); - } - - private String getResourcePath() { - String projectRoot = System.getProperty("project.root"); - if ( projectRoot!= null && projectRoot.trim().length() > 0) { - return projectRoot.trim() + "/"; - } else { - return ""; - } + } + + private String translate(String sql, JSONObject jsonRequest) + throws SQLFeatureNotSupportedException, SqlParseException, SQLFeatureDisabledException { + Client mockClient = mock(Client.class); + CheckScriptContents.stubMockClient(mockClient); + QueryAction queryAction = + OpenSearchActionFactory.create( + mockClient, new QueryActionRequest(sql, columnTypeProvider, Format.JDBC)); + + SqlRequest sqlRequest = new SqlRequest(sql, jsonRequest); + queryAction.setSqlRequest(sqlRequest); + + SqlElasticRequestBuilder requestBuilder = queryAction.explain(); + return requestBuilder.explain(); + } + + private String getResourcePath() { + String projectRoot = System.getProperty("project.root"); + if (projectRoot != null && projectRoot.trim().length() > 0) { + return projectRoot.trim() + "/"; + } else { + return ""; } + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/LocalClusterStateTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/LocalClusterStateTest.java index cb8568925d..6c38af05af 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/LocalClusterStateTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/LocalClusterStateTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest; import static org.junit.Assert.assertEquals; @@ -35,154 +34,161 @@ import org.opensearch.sql.legacy.util.TestsConstants; import org.opensearch.sql.opensearch.setting.OpenSearchSettings; -/** - * Local cluster state testing without covering OpenSearch logic, ex. resolve index pattern. - */ +/** Local cluster state testing without covering OpenSearch logic, ex. resolve index pattern. */ public class LocalClusterStateTest { - private static final String INDEX_NAME = TestsConstants.TEST_INDEX_BANK; - private static final String TYPE_NAME = "account"; - - private static final String MAPPING = "{\n" + - " \"opensearch-sql_test_index_bank\": {\n" + - " \"mappings\": {\n" + - " \"account\": {\n" + - " \"properties\": {\n" + - " \"address\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"age\": {\n" + - " \"type\": \"integer\"\n" + - " },\n" + - " \"city\": {\n" + - " \"type\": \"keyword\"\n" + - " },\n" + - " \"employer\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }\n" + - " }\n" + - " },\n" + - " \"state\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"raw\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }\n" + - " }\n" + - " },\n" + - " \"manager\": {\n" + - " \"properties\": {\n" + - " \"name\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }\n" + - " }\n" + - " },\n" + - " \"address\": {\n" + - " \"type\": \"keyword\"\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - " },\n" + - // ==== All required by IndexMetaData.fromXContent() ==== - " \"settings\": {\n" + - " \"index\": {\n" + - " \"number_of_shards\": 5,\n" + - " \"number_of_replicas\": 0,\n" + - " \"version\": {\n" + - " \"created\": \"6050399\"\n" + - " }\n" + - " }\n" + - " },\n" + - " \"mapping_version\": \"1\",\n" + - " \"settings_version\": \"1\",\n" + - " \"aliases_version\": \"1\"\n" + - //======================================================= - " }\n" + - "}"; - - @Mock private ClusterSettings clusterSettings; - - @Before - public void init() { - MockitoAnnotations.openMocks(this); - LocalClusterState.state(null); - mockLocalClusterState(MAPPING); - } - - @Test - public void getMappingForExistingField() { - IndexMappings indexMappings = LocalClusterState.state().getFieldMappings(new String[]{INDEX_NAME}); - Assert.assertNotNull(indexMappings); - - FieldMappings fieldMappings = indexMappings.mapping(INDEX_NAME); - Assert.assertNotNull(fieldMappings); - - Assert.assertEquals("text", fieldMappings.mapping("address").get("type")); - Assert.assertEquals("integer", fieldMappings.mapping("age").get("type")); - Assert.assertEquals("keyword", fieldMappings.mapping("city").get("type")); - Assert.assertEquals("text", fieldMappings.mapping("employer").get("type")); - - Assert.assertEquals("text", fieldMappings.mapping("manager.name").get("type")); - Assert.assertEquals("keyword", fieldMappings.mapping("manager.address").get("type")); + private static final String INDEX_NAME = TestsConstants.TEST_INDEX_BANK; + private static final String TYPE_NAME = "account"; + + private static final String MAPPING = + "{\n" + + " \"opensearch-sql_test_index_bank\": {\n" + + " \"mappings\": {\n" + + " \"account\": {\n" + + " \"properties\": {\n" + + " \"address\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"age\": {\n" + + " \"type\": \"integer\"\n" + + " },\n" + + " \"city\": {\n" + + " \"type\": \"keyword\"\n" + + " },\n" + + " \"employer\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }\n" + + " }\n" + + " },\n" + + " \"state\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"raw\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }\n" + + " }\n" + + " },\n" + + " \"manager\": {\n" + + " \"properties\": {\n" + + " \"name\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }\n" + + " }\n" + + " },\n" + + " \"address\": {\n" + + " \"type\": \"keyword\"\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " },\n" + + + // ==== All required by IndexMetaData.fromXContent() ==== + " \"settings\": {\n" + + " \"index\": {\n" + + " \"number_of_shards\": 5,\n" + + " \"number_of_replicas\": 0,\n" + + " \"version\": {\n" + + " \"created\": \"6050399\"\n" + + " }\n" + + " }\n" + + " },\n" + + " \"mapping_version\": \"1\",\n" + + " \"settings_version\": \"1\",\n" + + " \"aliases_version\": \"1\"\n" + + + // ======================================================= + " }\n" + + "}"; + + @Mock private ClusterSettings clusterSettings; + + @Before + public void init() { + MockitoAnnotations.openMocks(this); + LocalClusterState.state(null); + mockLocalClusterState(MAPPING); + } + + @Test + public void getMappingForExistingField() { + IndexMappings indexMappings = + LocalClusterState.state().getFieldMappings(new String[] {INDEX_NAME}); + Assert.assertNotNull(indexMappings); + + FieldMappings fieldMappings = indexMappings.mapping(INDEX_NAME); + Assert.assertNotNull(fieldMappings); + + Assert.assertEquals("text", fieldMappings.mapping("address").get("type")); + Assert.assertEquals("integer", fieldMappings.mapping("age").get("type")); + Assert.assertEquals("keyword", fieldMappings.mapping("city").get("type")); + Assert.assertEquals("text", fieldMappings.mapping("employer").get("type")); + + Assert.assertEquals("text", fieldMappings.mapping("manager.name").get("type")); + Assert.assertEquals("keyword", fieldMappings.mapping("manager.address").get("type")); + } + + @Test + public void getMappingForInvalidField() { + IndexMappings indexMappings = + LocalClusterState.state().getFieldMappings(new String[] {INDEX_NAME}); + FieldMappings fieldMappings = indexMappings.mapping(INDEX_NAME); + + Assert.assertNull(fieldMappings.mapping("work-email")); + Assert.assertNull(fieldMappings.mapping("manager.home-address")); + Assert.assertNull(fieldMappings.mapping("manager.name.first")); + Assert.assertNull(fieldMappings.mapping("manager.name.first.uppercase")); + } + + @Test + public void getMappingFromCache() throws IOException { + // Mock here again for verification below and mock addListener() + ClusterService mockService = mockClusterService(MAPPING); + ClusterStateListener[] listener = new ClusterStateListener[1]; // Trick to access inside lambda + doAnswer( + invocation -> { + listener[0] = (ClusterStateListener) invocation.getArguments()[0]; + return null; + }) + .when(mockService) + .addListener(any()); + LocalClusterState.state().setClusterService(mockService); + + // 1.Actual findMappings be invoked only once + for (int i = 0; i < 10; i++) { + LocalClusterState.state().getFieldMappings(new String[] {INDEX_NAME}); } - - @Test - public void getMappingForInvalidField() { - IndexMappings indexMappings = LocalClusterState.state().getFieldMappings(new String[]{INDEX_NAME}); - FieldMappings fieldMappings = indexMappings.mapping(INDEX_NAME); - - Assert.assertNull(fieldMappings.mapping("work-email")); - Assert.assertNull(fieldMappings.mapping("manager.home-address")); - Assert.assertNull(fieldMappings.mapping("manager.name.first")); - Assert.assertNull(fieldMappings.mapping("manager.name.first.uppercase")); - } - - @Test - public void getMappingFromCache() throws IOException { - // Mock here again for verification below and mock addListener() - ClusterService mockService = mockClusterService(MAPPING); - ClusterStateListener[] listener = new ClusterStateListener[1]; // Trick to access inside lambda - doAnswer(invocation -> { - listener[0] = (ClusterStateListener) invocation.getArguments()[0]; - return null; - }).when(mockService).addListener(any()); - LocalClusterState.state().setClusterService(mockService); - - // 1.Actual findMappings be invoked only once - for (int i = 0; i < 10; i++) { - LocalClusterState.state().getFieldMappings(new String[]{INDEX_NAME}); - } - verify(mockService.state().metadata(), times(1)).findMappings(eq(new String[]{INDEX_NAME}), any()); - - // 2.Fire cluster state change event - Assert.assertNotNull(listener[0]); - ClusterChangedEvent mockEvent = mock(ClusterChangedEvent.class); - when(mockEvent.metadataChanged()).thenReturn(true); - listener[0].clusterChanged(mockEvent); - - // 3.Cache should be invalidated and call findMapping another time only - for (int i = 0; i < 5; i++) { - LocalClusterState.state().getFieldMappings(new String[]{INDEX_NAME}); - } - verify(mockService.state().metadata(), times(2)).findMappings(eq(new String[]{INDEX_NAME}), any()); + verify(mockService.state().metadata(), times(1)) + .findMappings(eq(new String[] {INDEX_NAME}), any()); + + // 2.Fire cluster state change event + Assert.assertNotNull(listener[0]); + ClusterChangedEvent mockEvent = mock(ClusterChangedEvent.class); + when(mockEvent.metadataChanged()).thenReturn(true); + listener[0].clusterChanged(mockEvent); + + // 3.Cache should be invalidated and call findMapping another time only + for (int i = 0; i < 5; i++) { + LocalClusterState.state().getFieldMappings(new String[] {INDEX_NAME}); } - - @Test - public void getDefaultValueForQuerySlowLog() { - when(clusterSettings.get(ClusterName.CLUSTER_NAME_SETTING)).thenReturn(ClusterName.DEFAULT); - OpenSearchSettings settings = new OpenSearchSettings(clusterSettings); - assertEquals(Integer.valueOf(2), settings.getSettingValue(Settings.Key.SQL_SLOWLOG)); - } - + verify(mockService.state().metadata(), times(2)) + .findMappings(eq(new String[] {INDEX_NAME}), any()); + } + + @Test + public void getDefaultValueForQuerySlowLog() { + when(clusterSettings.get(ClusterName.CLUSTER_NAME_SETTING)).thenReturn(ClusterName.DEFAULT); + OpenSearchSettings settings = new OpenSearchSettings(clusterSettings); + assertEquals(Integer.valueOf(2), settings.getSettingValue(Settings.Key.SQL_SLOWLOG)); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/MathFunctionsTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/MathFunctionsTest.java index b52dd3efc6..e62060c574 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/MathFunctionsTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/MathFunctionsTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest; import static org.junit.Assert.assertTrue; @@ -17,437 +16,291 @@ public class MathFunctionsTest { - private static SqlParser parser; - - @BeforeClass - public static void init() { parser = new SqlParser(); } - - /** Tests for case insensitivity when calling SQL functions */ - @Test - public void lowerCaseInSelect() { - String query = "SELECT abs(age) " + - "FROM bank"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.abs(doc['age'].value)")); - } - - @Test - public void upperCaseInSelect() { - String query = "SELECT ABS(age) " + - "FROM bank"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.abs(doc['age'].value)")); - } - - @Test - public void lowerCaseInWhere() { - String query = "SELECT * " + - "FROM bank " + - "WHERE sqrt(age) > 5"; - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "Math.sqrt(doc['age'].value)")); - assertTrue( - CheckScriptContents.scriptHasPattern( - scriptFilter, - "sqrt_\\d+ > 5")); - } - - @Test - public void upperCaseInWhere() { - String query = "SELECT * " + - "FROM bank " + - "WHERE SQRT(age) > 5"; - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "Math.sqrt(doc['age'].value)")); - assertTrue( - CheckScriptContents.scriptHasPattern( - scriptFilter, - "sqrt_\\d+ > 5")); - } - - /** Tests for constant functions */ - @Test - public void eulersNumberInSelect() { - String query = "SELECT E() " + - "FROM bank"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.E")); - } - - @Test - public void eulersNumberInWhere() { - String query = "SELECT * " + - "FROM bank " + - "WHERE E() > 2"; - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "Math.E")); - assertTrue( - CheckScriptContents.scriptHasPattern( - scriptFilter, - "E_\\d+ > 2")); - } - - @Test - public void piInSelect() { - String query = "SELECT PI() " + - "FROM bank"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.PI")); - } - - @Test - public void piInWhere() { - String query = "SELECT * " + - "FROM bank " + - "WHERE PI() < 4"; - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "Math.PI")); - assertTrue( - CheckScriptContents.scriptHasPattern( - scriptFilter, - "PI_\\d+ < 4")); - } - - /** Tests for general math functions */ - @Test - public void expm1WithPropertyArgument() { - String query = "SELECT * " + - "FROM bank " + - "WHERE expm1(age) > 10"; - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "Math.expm1(doc['age'].value)")); - assertTrue( - CheckScriptContents.scriptHasPattern( - scriptFilter, - "expm1_\\d+ > 10")); - } - - @Test - public void expm1WithValueArgument() { - String query = "SELECT * " + - "FROM bank " + - "WHERE expm1(5) > 10"; - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "Math.expm1(5)")); - assertTrue( - CheckScriptContents.scriptHasPattern( - scriptFilter, - "expm1_\\d+ > 10")); - } - - - /** Tests for trigonometric functions */ - @Test - public void degreesWithPropertyArgument() { - String query = "SELECT degrees(age) " + - "FROM bank"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.toDegrees(doc['age'].value)")); - } - - @Test - public void degreesWithValueArgument() { - String query = "SELECT degrees(10) " + - "FROM bank"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.toDegrees(10)")); - } - - @Test - public void radiansWithPropertyArgument() { - String query = "SELECT radians(age) " + - "FROM bank"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.toRadians(doc['age'].value)")); - } - - @Test - public void radiansWithValueArgument() { - String query = "SELECT radians(180) " + - "FROM bank"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.toRadians(180)")); - } - - @Test - public void sinWithPropertyArgument() { - String query = "SELECT sin(radians(age)) " + - "FROM bank"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.toRadians(doc['age'].value)")); - assertTrue( - CheckScriptContents.scriptHasPattern( - scriptField, - "Math.sin\\(radians_\\d+\\)")); - } - - @Test - public void sinWithValueArgument() { - String query = "SELECT sin(radians(180)) " + - "FROM bank"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.toRadians(180)")); - assertTrue( - CheckScriptContents.scriptHasPattern( - scriptField, - "Math.sin\\(radians_\\d+\\)")); - } - - @Test - public void atanWithPropertyArgument() { - String query = "SELECT atan(age) " + - "FROM bank"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.atan(doc['age'].value)")); - } - - @Test - public void atanWithValueArgument() { - String query = "SELECT atan(1) " + - "FROM bank"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.atan(1)")); - } - - @Test - public void atanWithFunctionArgument() { - String query = "SELECT atan(PI() / 2) " + - "FROM bank"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.PI")); - assertTrue( - CheckScriptContents.scriptHasPattern( - scriptField, - "PI_\\d+ / 2")); - assertTrue( - CheckScriptContents.scriptHasPattern( - scriptField, - "Math.atan\\(divide_\\d+\\)")); - } - - @Test - public void coshWithPropertyArgument() { - String query = "SELECT cosh(age) " + - "FROM bank"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.cosh(doc['age'].value)")); - } - - @Test - public void coshWithValueArgument() { - String query = "SELECT cosh(0) " + - "FROM bank"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.cosh(0)")); - } - - @Test - public void powerWithPropertyArgument() { - String query = "SELECT POWER(age, 2) FROM bank WHERE POWER(balance, 3) > 0"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.pow(doc['age'].value, 2)")); - - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "Math.pow(doc['balance'].value, 3)")); - } - - @Test - public void atan2WithPropertyArgument() { - String query = "SELECT ATAN2(age, 2) FROM bank WHERE ATAN2(balance, 3) > 0"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.atan2(doc['age'].value, 2)")); - - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "Math.atan2(doc['balance'].value, 3)")); - } - - @Test - public void cotWithPropertyArgument() { - String query = "SELECT COT(age) FROM bank WHERE COT(balance) > 0"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "1 / Math.tan(doc['age'].value)")); - - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "1 / Math.tan(doc['balance'].value)")); - } - - @Test - public void signWithFunctionPropertyArgument() { - String query = "SELECT SIGN(age) FROM bank WHERE SIGNUM(balance) = 1"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue(CheckScriptContents.scriptContainsString( - scriptField, - "Math.signum(doc['age'].value)")); - - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "Math.signum(doc['balance'].value)")); - } - - @Test - public void logWithOneParam() { - String query = "SELECT LOG(age) FROM bank WHERE LOG(age) = 5.0"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.log(doc['age'].value)")); - - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "Math.log(doc['age'].value)")); - } - - @Test - public void logWithTwoParams() { - String query = "SELECT LOG(3, age) FROM bank WHERE LOG(3, age) = 5.0"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.log(doc['age'].value)/Math.log(3)")); - - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "Math.log(doc['age'].value)/Math.log(3)")); - } - - @Test - public void log10Test() { - String query = "SELECT LOG10(age) FROM accounts"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.log10(doc['age'].value)" - ) - ); - } - - @Test - public void lnTest() { - String query = "SELECT LN(age) FROM age WHERE LN(age) = 5.0"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.log(doc['age'].value)")); - - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "Math.log(doc['age'].value)")); - } - - @Test - public void randWithoutParamTest() { - String query = "SELECT RAND() FROM bank"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "new Random().nextDouble()" - ) - ); - } - - @Test - public void randWithOneParamTest() { - String query = "SELECT RAND(age) FROM bank"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "new Random(doc['age'].value).nextDouble()" - ) - ); - } + private static SqlParser parser; + + @BeforeClass + public static void init() { + parser = new SqlParser(); + } + + /** Tests for case insensitivity when calling SQL functions */ + @Test + public void lowerCaseInSelect() { + String query = "SELECT abs(age) FROM bank"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue(CheckScriptContents.scriptContainsString(scriptField, "Math.abs(doc['age'].value)")); + } + + @Test + public void upperCaseInSelect() { + String query = "SELECT ABS(age) FROM bank"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue(CheckScriptContents.scriptContainsString(scriptField, "Math.abs(doc['age'].value)")); + } + + @Test + public void lowerCaseInWhere() { + String query = "SELECT * " + "FROM bank WHERE sqrt(age) > 5"; + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue( + CheckScriptContents.scriptContainsString(scriptFilter, "Math.sqrt(doc['age'].value)")); + assertTrue(CheckScriptContents.scriptHasPattern(scriptFilter, "sqrt_\\d+ > 5")); + } + + @Test + public void upperCaseInWhere() { + String query = "SELECT * " + "FROM bank WHERE SQRT(age) > 5"; + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue( + CheckScriptContents.scriptContainsString(scriptFilter, "Math.sqrt(doc['age'].value)")); + assertTrue(CheckScriptContents.scriptHasPattern(scriptFilter, "sqrt_\\d+ > 5")); + } + + /** Tests for constant functions */ + @Test + public void eulersNumberInSelect() { + String query = "SELECT E() FROM bank"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue(CheckScriptContents.scriptContainsString(scriptField, "Math.E")); + } + + @Test + public void eulersNumberInWhere() { + String query = "SELECT * " + "FROM bank WHERE E() > 2"; + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue(CheckScriptContents.scriptContainsString(scriptFilter, "Math.E")); + assertTrue(CheckScriptContents.scriptHasPattern(scriptFilter, "E_\\d+ > 2")); + } + + @Test + public void piInSelect() { + String query = "SELECT PI() FROM bank"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue(CheckScriptContents.scriptContainsString(scriptField, "Math.PI")); + } + + @Test + public void piInWhere() { + String query = "SELECT * FROM bank WHERE PI() < 4"; + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue(CheckScriptContents.scriptContainsString(scriptFilter, "Math.PI")); + assertTrue(CheckScriptContents.scriptHasPattern(scriptFilter, "PI_\\d+ < 4")); + } + + /** Tests for general math functions */ + @Test + public void expm1WithPropertyArgument() { + String query = "SELECT * FROM bank WHERE expm1(age) > 10"; + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue( + CheckScriptContents.scriptContainsString(scriptFilter, "Math.expm1(doc['age'].value)")); + assertTrue(CheckScriptContents.scriptHasPattern(scriptFilter, "expm1_\\d+ > 10")); + } + + @Test + public void expm1WithValueArgument() { + String query = "SELECT * FROM bank WHERE expm1(5) > 10"; + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue(CheckScriptContents.scriptContainsString(scriptFilter, "Math.expm1(5)")); + assertTrue(CheckScriptContents.scriptHasPattern(scriptFilter, "expm1_\\d+ > 10")); + } + + /** Tests for trigonometric functions */ + @Test + public void degreesWithPropertyArgument() { + String query = "SELECT degrees(age) FROM bank"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString(scriptField, "Math.toDegrees(doc['age'].value)")); + } + + @Test + public void degreesWithValueArgument() { + String query = "SELECT degrees(10) FROM bank"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue(CheckScriptContents.scriptContainsString(scriptField, "Math.toDegrees(10)")); + } + + @Test + public void radiansWithPropertyArgument() { + String query = "SELECT radians(age) FROM bank"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString(scriptField, "Math.toRadians(doc['age'].value)")); + } + + @Test + public void radiansWithValueArgument() { + String query = "SELECT radians(180) FROM bank"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue(CheckScriptContents.scriptContainsString(scriptField, "Math.toRadians(180)")); + } + + @Test + public void sinWithPropertyArgument() { + String query = "SELECT sin(radians(age)) FROM bank"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString(scriptField, "Math.toRadians(doc['age'].value)")); + assertTrue(CheckScriptContents.scriptHasPattern(scriptField, "Math.sin\\(radians_\\d+\\)")); + } + + @Test + public void sinWithValueArgument() { + String query = "SELECT sin(radians(180)) FROM bank"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue(CheckScriptContents.scriptContainsString(scriptField, "Math.toRadians(180)")); + assertTrue(CheckScriptContents.scriptHasPattern(scriptField, "Math.sin\\(radians_\\d+\\)")); + } + + @Test + public void atanWithPropertyArgument() { + String query = "SELECT atan(age) FROM bank"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString(scriptField, "Math.atan(doc['age'].value)")); + } + + @Test + public void atanWithValueArgument() { + String query = "SELECT atan(1) FROM bank"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue(CheckScriptContents.scriptContainsString(scriptField, "Math.atan(1)")); + } + + @Test + public void atanWithFunctionArgument() { + String query = "SELECT atan(PI() / 2) FROM bank"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue(CheckScriptContents.scriptContainsString(scriptField, "Math.PI")); + assertTrue(CheckScriptContents.scriptHasPattern(scriptField, "PI_\\d+ / 2")); + assertTrue(CheckScriptContents.scriptHasPattern(scriptField, "Math.atan\\(divide_\\d+\\)")); + } + + @Test + public void coshWithPropertyArgument() { + String query = "SELECT cosh(age) FROM bank"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString(scriptField, "Math.cosh(doc['age'].value)")); + } + + @Test + public void coshWithValueArgument() { + String query = "SELECT cosh(0) FROM bank"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue(CheckScriptContents.scriptContainsString(scriptField, "Math.cosh(0)")); + } + + @Test + public void powerWithPropertyArgument() { + String query = "SELECT POWER(age, 2) FROM bank WHERE POWER(balance, 3) > 0"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString(scriptField, "Math.pow(doc['age'].value, 2)")); + + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptFilter, "Math.pow(doc['balance'].value, 3)")); + } + + @Test + public void atan2WithPropertyArgument() { + String query = "SELECT ATAN2(age, 2) FROM bank WHERE ATAN2(balance, 3) > 0"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString(scriptField, "Math.atan2(doc['age'].value, 2)")); + + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptFilter, "Math.atan2(doc['balance'].value, 3)")); + } + + @Test + public void cotWithPropertyArgument() { + String query = "SELECT COT(age) FROM bank WHERE COT(balance) > 0"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString(scriptField, "1 / Math.tan(doc['age'].value)")); + + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptFilter, "1 / Math.tan(doc['balance'].value)")); + } + + @Test + public void signWithFunctionPropertyArgument() { + String query = "SELECT SIGN(age) FROM bank WHERE SIGNUM(balance) = 1"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString(scriptField, "Math.signum(doc['age'].value)")); + + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptFilter, "Math.signum(doc['balance'].value)")); + } + + @Test + public void logWithOneParam() { + String query = "SELECT LOG(age) FROM bank WHERE LOG(age) = 5.0"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue(CheckScriptContents.scriptContainsString(scriptField, "Math.log(doc['age'].value)")); + + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue( + CheckScriptContents.scriptContainsString(scriptFilter, "Math.log(doc['age'].value)")); + } + + @Test + public void logWithTwoParams() { + String query = "SELECT LOG(3, age) FROM bank WHERE LOG(3, age) = 5.0"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptField, "Math.log(doc['age'].value)/Math.log(3)")); + + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptFilter, "Math.log(doc['age'].value)/Math.log(3)")); + } + + @Test + public void log10Test() { + String query = "SELECT LOG10(age) FROM accounts"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString(scriptField, "Math.log10(doc['age'].value)")); + } + + @Test + public void lnTest() { + String query = "SELECT LN(age) FROM age WHERE LN(age) = 5.0"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue(CheckScriptContents.scriptContainsString(scriptField, "Math.log(doc['age'].value)")); + + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue( + CheckScriptContents.scriptContainsString(scriptFilter, "Math.log(doc['age'].value)")); + } + + @Test + public void randWithoutParamTest() { + String query = "SELECT RAND() FROM bank"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue(CheckScriptContents.scriptContainsString(scriptField, "new Random().nextDouble()")); + } + + @Test + public void randWithOneParamTest() { + String query = "SELECT RAND(age) FROM bank"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptField, "new Random(doc['age'].value).nextDouble()")); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/join/ElasticUtilsTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/join/ElasticUtilsTest.java index 2160affda0..34c9b941d5 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/join/ElasticUtilsTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/join/ElasticUtilsTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.executor.join; import java.io.IOException; @@ -23,30 +22,28 @@ @RunWith(MockitoJUnitRunner.class) public class ElasticUtilsTest { - @Mock - MetaSearchResult metaSearchResult; - - /** - * test handling {@link TotalHits} correctly. - */ - @Test - public void hitsAsStringResult() throws IOException { - final SearchHits searchHits = new SearchHits(new SearchHit[]{}, new TotalHits(1, Relation.EQUAL_TO), 0); - final String result = ElasticUtils.hitsAsStringResult(searchHits, metaSearchResult); - - Assert.assertEquals(1, new JSONObject(result).query("/hits/total/value")); - Assert.assertEquals(Relation.EQUAL_TO.toString(), new JSONObject(result).query("/hits/total/relation")); - } - - /** - * test handling {@link TotalHits} with null value correctly. - */ - @Test - public void test_hitsAsStringResult_withNullTotalHits() throws IOException { - final SearchHits searchHits = new SearchHits(new SearchHit[]{}, null, 0); - final String result = ElasticUtils.hitsAsStringResult(searchHits, metaSearchResult); - - Assert.assertEquals(0, new JSONObject(result).query("/hits/total/value")); - Assert.assertEquals(Relation.EQUAL_TO.toString(), new JSONObject(result).query("/hits/total/relation")); - } + @Mock MetaSearchResult metaSearchResult; + + /** test handling {@link TotalHits} correctly. */ + @Test + public void hitsAsStringResult() throws IOException { + final SearchHits searchHits = + new SearchHits(new SearchHit[] {}, new TotalHits(1, Relation.EQUAL_TO), 0); + final String result = ElasticUtils.hitsAsStringResult(searchHits, metaSearchResult); + + Assert.assertEquals(1, new JSONObject(result).query("/hits/total/value")); + Assert.assertEquals( + Relation.EQUAL_TO.toString(), new JSONObject(result).query("/hits/total/relation")); + } + + /** test handling {@link TotalHits} with null value correctly. */ + @Test + public void test_hitsAsStringResult_withNullTotalHits() throws IOException { + final SearchHits searchHits = new SearchHits(new SearchHit[] {}, null, 0); + final String result = ElasticUtils.hitsAsStringResult(searchHits, metaSearchResult); + + Assert.assertEquals(0, new JSONObject(result).query("/hits/total/value")); + Assert.assertEquals( + Relation.EQUAL_TO.toString(), new JSONObject(result).query("/hits/total/relation")); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/ExpressionTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/ExpressionTest.java index a6b736eca1..08bac51d77 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/ExpressionTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/ExpressionTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.expression.core; import static org.opensearch.sql.legacy.expression.model.ExprValueUtils.getNumberValue; @@ -15,35 +14,35 @@ import org.opensearch.sql.legacy.expression.core.operator.ScalarOperation; import org.opensearch.sql.legacy.expression.domain.BindingTuple; - public class ExpressionTest { - protected BindingTuple bindingTuple() { - String json = "{\n" + - " \"intValue\": 1,\n" + - " \"intValue2\": 2,\n" + - " \"doubleValue\": 2.0,\n" + - " \"negDoubleValue\": -2.0,\n" + - " \"stringValue\": \"string\",\n" + - " \"booleanValue\": true,\n" + - " \"tupleValue\": {\n" + - " \"intValue\": 1,\n" + - " \"doubleValue\": 2.0,\n" + - " \"stringValue\": \"string\"\n" + - " },\n" + - " \"collectValue\": [\n" + - " 1,\n" + - " 2,\n" + - " 3\n" + - " ]\n" + - "}"; - return BindingTuple.from(new JSONObject(json)); - } - - protected Expression of(ScalarOperation op, Expression... expressions) { - return ExpressionFactory.of(op, Arrays.asList(expressions)); - } - - protected Number apply(ScalarOperation op, Expression... expressions) { - return getNumberValue(of(op, expressions).valueOf(bindingTuple())); - } + protected BindingTuple bindingTuple() { + String json = + "{\n" + + " \"intValue\": 1,\n" + + " \"intValue2\": 2,\n" + + " \"doubleValue\": 2.0,\n" + + " \"negDoubleValue\": -2.0,\n" + + " \"stringValue\": \"string\",\n" + + " \"booleanValue\": true,\n" + + " \"tupleValue\": {\n" + + " \"intValue\": 1,\n" + + " \"doubleValue\": 2.0,\n" + + " \"stringValue\": \"string\"\n" + + " },\n" + + " \"collectValue\": [\n" + + " 1,\n" + + " 2,\n" + + " 3\n" + + " ]\n" + + "}"; + return BindingTuple.from(new JSONObject(json)); + } + + protected Expression of(ScalarOperation op, Expression... expressions) { + return ExpressionFactory.of(op, Arrays.asList(expressions)); + } + + protected Number apply(ScalarOperation op, Expression... expressions) { + return getNumberValue(of(op, expressions).valueOf(bindingTuple())); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/model/ExprValueUtilsTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/model/ExprValueUtilsTest.java index 2555df4f13..d84543956d 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/model/ExprValueUtilsTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/model/ExprValueUtilsTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.expression.model; import static org.hamcrest.MatcherAssert.assertThat; @@ -19,42 +18,41 @@ @RunWith(MockitoJUnitRunner.class) public class ExprValueUtilsTest { - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); - - @Test - public void getIntegerValueWithIntegerExprValueShouldPass() { - assertThat(ExprValueUtils.getIntegerValue(ExprValueFactory.integerValue(1)), equalTo(1)); - } - - @Test - public void getDoubleValueWithIntegerExprValueShouldPass() { - assertThat(ExprValueUtils.getDoubleValue(ExprValueFactory.integerValue(1)), equalTo(1d)); - } - - @Test - public void getIntegerWithDoubleExprValueShouldPass() { - assertThat(ExprValueUtils.getIntegerValue(ExprValueFactory.doubleValue(1d)), equalTo(1)); - } - - @Test - public void getLongValueFromLongExprValueShouldPass() { - assertThat(ExprValueUtils.getLongValue(ExprValueFactory.from(1L)), equalTo(1L)); - } - - @Test - public void getIntegerValueFromStringExprValueShouldThrowException() { - exceptionRule.expect(IllegalStateException.class); - exceptionRule.expectMessage("invalid to get NUMBER_VALUE from expr type of STRING_VALUE"); - - ExprValueUtils.getIntegerValue(ExprValueFactory.stringValue("string")); - } - - @Test - public void getStringValueFromIntegerExprValueShouldThrowException() { - exceptionRule.expect(IllegalStateException.class); - exceptionRule.expectMessage("invalid to get STRING_VALUE from expr type of INTEGER_VALUE"); - - ExprValueUtils.getStringValue(ExprValueFactory.integerValue(1)); - } + @Rule public ExpectedException exceptionRule = ExpectedException.none(); + + @Test + public void getIntegerValueWithIntegerExprValueShouldPass() { + assertThat(ExprValueUtils.getIntegerValue(ExprValueFactory.integerValue(1)), equalTo(1)); + } + + @Test + public void getDoubleValueWithIntegerExprValueShouldPass() { + assertThat(ExprValueUtils.getDoubleValue(ExprValueFactory.integerValue(1)), equalTo(1d)); + } + + @Test + public void getIntegerWithDoubleExprValueShouldPass() { + assertThat(ExprValueUtils.getIntegerValue(ExprValueFactory.doubleValue(1d)), equalTo(1)); + } + + @Test + public void getLongValueFromLongExprValueShouldPass() { + assertThat(ExprValueUtils.getLongValue(ExprValueFactory.from(1L)), equalTo(1L)); + } + + @Test + public void getIntegerValueFromStringExprValueShouldThrowException() { + exceptionRule.expect(IllegalStateException.class); + exceptionRule.expectMessage("invalid to get NUMBER_VALUE from expr type of STRING_VALUE"); + + ExprValueUtils.getIntegerValue(ExprValueFactory.stringValue("string")); + } + + @Test + public void getStringValueFromIntegerExprValueShouldThrowException() { + exceptionRule.expect(IllegalStateException.class); + exceptionRule.expectMessage("invalid to get STRING_VALUE from expr type of INTEGER_VALUE"); + + ExprValueUtils.getStringValue(ExprValueFactory.integerValue(1)); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/GaugeMetricTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/GaugeMetricTest.java index a818a115fd..1ec499ce9b 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/GaugeMetricTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/GaugeMetricTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.metrics; import static org.hamcrest.MatcherAssert.assertThat; @@ -14,19 +13,17 @@ public class GaugeMetricTest { - private static long x = 0; - - @Test - public void getValue() { - GaugeMetric gaugeMetric = new GaugeMetric<>("test", this::getSeq); - - assertThat(gaugeMetric.getValue(), equalTo(1L)); - assertThat(gaugeMetric.getValue(), equalTo(2L)); + private static long x = 0; - } + @Test + public void getValue() { + GaugeMetric gaugeMetric = new GaugeMetric<>("test", this::getSeq); - private long getSeq() { - return ++x; - } + assertThat(gaugeMetric.getValue(), equalTo(1L)); + assertThat(gaugeMetric.getValue(), equalTo(2L)); + } + private long getSeq() { + return ++x; + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/MetricsTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/MetricsTest.java index ff6d8e0c49..885ce6a7cd 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/MetricsTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/MetricsTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.metrics; import static org.hamcrest.MatcherAssert.assertThat; @@ -20,55 +19,53 @@ public class MetricsTest { - @Test - public void registerMetric() { - Metrics.getInstance().clear(); - Metrics.getInstance().registerMetric(new NumericMetric("test", new BasicCounter())); - - assertThat(Metrics.getInstance().getAllMetrics().size(), equalTo(1)); - } - - @Test - public void unRegisterMetric() { - Metrics.getInstance().clear(); - Metrics.getInstance().registerMetric(new NumericMetric("test1", new BasicCounter())); - Metrics.getInstance().registerMetric(new NumericMetric("test2", new BasicCounter())); - assertThat(Metrics.getInstance().getAllMetrics().size(), equalTo(2)); - - Metrics.getInstance().unregisterMetric("test2"); - assertThat(Metrics.getInstance().getAllMetrics().size(), equalTo(1)); - } - - @Test - public void getMetric() { - Metrics.getInstance().clear(); - Metrics.getInstance().registerMetric(new NumericMetric("test1", new BasicCounter())); - Metric metric = Metrics.getInstance().getMetric("test1"); - - assertThat(metric, notNullValue()); - } - - - @Test - public void getAllMetric() { - Metrics.getInstance().clear(); - Metrics.getInstance().registerMetric(new NumericMetric("test1", new BasicCounter())); - Metrics.getInstance().registerMetric(new NumericMetric("test2", new BasicCounter())); - List list = Metrics.getInstance().getAllMetrics(); - - assertThat(list.size(), equalTo(2)); - } - - @Test - public void collectToJSON() { - Metrics.getInstance().clear(); - Metrics.getInstance().registerMetric(new NumericMetric("test1", new BasicCounter())); - Metrics.getInstance().registerMetric(new NumericMetric("test2", new BasicCounter())); - String res = Metrics.getInstance().collectToJSON(); - JSONObject jsonObject = new JSONObject(res); - - assertThat(jsonObject.getLong("test1"), equalTo(0L)); - assertThat(jsonObject.getInt("test2"), equalTo(0)); - } - + @Test + public void registerMetric() { + Metrics.getInstance().clear(); + Metrics.getInstance().registerMetric(new NumericMetric("test", new BasicCounter())); + + assertThat(Metrics.getInstance().getAllMetrics().size(), equalTo(1)); + } + + @Test + public void unRegisterMetric() { + Metrics.getInstance().clear(); + Metrics.getInstance().registerMetric(new NumericMetric("test1", new BasicCounter())); + Metrics.getInstance().registerMetric(new NumericMetric("test2", new BasicCounter())); + assertThat(Metrics.getInstance().getAllMetrics().size(), equalTo(2)); + + Metrics.getInstance().unregisterMetric("test2"); + assertThat(Metrics.getInstance().getAllMetrics().size(), equalTo(1)); + } + + @Test + public void getMetric() { + Metrics.getInstance().clear(); + Metrics.getInstance().registerMetric(new NumericMetric("test1", new BasicCounter())); + Metric metric = Metrics.getInstance().getMetric("test1"); + + assertThat(metric, notNullValue()); + } + + @Test + public void getAllMetric() { + Metrics.getInstance().clear(); + Metrics.getInstance().registerMetric(new NumericMetric("test1", new BasicCounter())); + Metrics.getInstance().registerMetric(new NumericMetric("test2", new BasicCounter())); + List list = Metrics.getInstance().getAllMetrics(); + + assertThat(list.size(), equalTo(2)); + } + + @Test + public void collectToJSON() { + Metrics.getInstance().clear(); + Metrics.getInstance().registerMetric(new NumericMetric("test1", new BasicCounter())); + Metrics.getInstance().registerMetric(new NumericMetric("test2", new BasicCounter())); + String res = Metrics.getInstance().collectToJSON(); + JSONObject jsonObject = new JSONObject(res); + + assertThat(jsonObject.getLong("test1"), equalTo(0L)); + assertThat(jsonObject.getInt("test2"), equalTo(0)); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/parser/FieldMakerTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/parser/FieldMakerTest.java index 5115757c9c..c33e768f43 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/parser/FieldMakerTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/parser/FieldMakerTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.parser; import static org.junit.Assert.assertEquals; @@ -19,36 +18,40 @@ public class FieldMakerTest { - private static final String ALIAS = "a"; - - private static final String TABLE_ALIAS = "t"; - - private FieldMaker fieldMaker; - - @Before - public void init() { - fieldMaker = new FieldMaker(); - } - - @Test - public void makeFieldAssign() throws SqlParseException { - final SQLIntegerExpr sqlExpr = new SQLIntegerExpr(10); - final MethodField field = (MethodField) fieldMaker.makeField(sqlExpr, ALIAS, TABLE_ALIAS); - - assertEquals("script", field.getName()); - assertEquals(ALIAS, field.getParams().get(0).value); - assertTrue(((String)field.getParams().get(1).value).matches("def assign_[0-9]+ = 10;return assign_[0-9]+;")); - assertEquals(ALIAS, field.getAlias()); - } - - @Test - public void makeFieldAssignDouble() throws SqlParseException { - final SQLNumberExpr sqlExpr = new SQLNumberExpr(10.0); - final MethodField field = (MethodField) fieldMaker.makeField(sqlExpr, ALIAS, TABLE_ALIAS); - - assertEquals("script", field.getName()); - assertEquals(ALIAS, field.getParams().get(0).value); - assertTrue(((String)field.getParams().get(1).value).matches("def assign_[0-9]+ = 10.0;return assign_[0-9]+;")); - assertEquals(ALIAS, field.getAlias()); - } + private static final String ALIAS = "a"; + + private static final String TABLE_ALIAS = "t"; + + private FieldMaker fieldMaker; + + @Before + public void init() { + fieldMaker = new FieldMaker(); + } + + @Test + public void makeFieldAssign() throws SqlParseException { + final SQLIntegerExpr sqlExpr = new SQLIntegerExpr(10); + final MethodField field = (MethodField) fieldMaker.makeField(sqlExpr, ALIAS, TABLE_ALIAS); + + assertEquals("script", field.getName()); + assertEquals(ALIAS, field.getParams().get(0).value); + assertTrue( + ((String) field.getParams().get(1).value) + .matches("def assign_[0-9]+ = 10;return assign_[0-9]+;")); + assertEquals(ALIAS, field.getAlias()); + } + + @Test + public void makeFieldAssignDouble() throws SqlParseException { + final SQLNumberExpr sqlExpr = new SQLNumberExpr(10.0); + final MethodField field = (MethodField) fieldMaker.makeField(sqlExpr, ALIAS, TABLE_ALIAS); + + assertEquals("script", field.getName()); + assertEquals(ALIAS, field.getParams().get(0).value); + assertTrue( + ((String) field.getParams().get(1).value) + .matches("def assign_[0-9]+ = 10.0;return assign_[0-9]+;")); + assertEquals(ALIAS, field.getAlias()); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/ExistsSubQueryRewriterTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/ExistsSubQueryRewriterTest.java index 9b88336a85..ed57335980 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/ExistsSubQueryRewriterTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/ExistsSubQueryRewriterTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.rewriter.subquery; import static org.junit.Assert.assertEquals; @@ -14,101 +13,103 @@ public class ExistsSubQueryRewriterTest extends SubQueryRewriterTestBase { - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); - - @Test - public void nonCorrelatedExists() { - assertEquals( - sqlString(expr( - "SELECT e.name " + - "FROM employee e, e.projects p " + - "WHERE p IS NOT MISSING")), - sqlString(rewrite(expr( - "SELECT e.name " + - "FROM employee as e " + - "WHERE EXISTS (SELECT * FROM e.projects as p)"))) - ); - } + @Rule public ExpectedException exceptionRule = ExpectedException.none(); - @Test - public void nonCorrelatedExistsWhere() { - assertEquals( - sqlString(expr( - "SELECT e.name " + - "FROM employee e, e.projects p " + - "WHERE p IS NOT MISSING AND p.name LIKE 'security'")), - sqlString(rewrite(expr( - "SELECT e.name " + - "FROM employee as e " + - "WHERE EXISTS (SELECT * FROM e.projects as p WHERE p.name LIKE 'security')"))) - ); - } + @Test + public void nonCorrelatedExists() { + assertEquals( + sqlString( + expr("SELECT e.name FROM employee e, e.projects p WHERE p IS NOT MISSING")), + sqlString( + rewrite( + expr( + "SELECT e.name FROM employee as e WHERE EXISTS (SELECT * FROM e.projects as p)" + )))); + } - @Test - public void nonCorrelatedExistsParentWhere() { - assertEquals( - sqlString(expr( - "SELECT e.name " + - "FROM employee e, e.projects p " + - "WHERE p IS NOT MISSING AND e.name LIKE 'security'")), - sqlString(rewrite(expr( - "SELECT e.name " + - "FROM employee as e " + - "WHERE EXISTS (SELECT * FROM e.projects as p) AND e.name LIKE 'security'"))) - ); - } + @Test + public void nonCorrelatedExistsWhere() { + assertEquals( + sqlString( + expr( + "SELECT e.name " + + "FROM employee e, e.projects p " + + "WHERE p IS NOT MISSING AND p.name LIKE 'security'")), + sqlString( + rewrite( + expr( + "SELECT e.name FROM employee as e WHERE EXISTS (SELECT * FROM e.projects as p" + + " WHERE p.name LIKE 'security')")))); + } - @Test - public void nonCorrelatedNotExists() { - assertEquals( - sqlString(expr( - "SELECT e.name " + - "FROM employee e, e.projects p " + - "WHERE NOT (p IS NOT MISSING)")), - sqlString(rewrite(expr( - "SELECT e.name " + - "FROM employee as e " + - "WHERE NOT EXISTS (SELECT * FROM e.projects as p)"))) - ); - } + @Test + public void nonCorrelatedExistsParentWhere() { + assertEquals( + sqlString( + expr( + "SELECT e.name " + + "FROM employee e, e.projects p " + + "WHERE p IS NOT MISSING AND e.name LIKE 'security'")), + sqlString( + rewrite( + expr( + "SELECT e.name FROM employee as e WHERE EXISTS (SELECT * FROM e.projects as p)" + + " AND e.name LIKE 'security'")))); + } - @Test - public void nonCorrelatedNotExistsWhere() { - assertEquals( - sqlString(expr( - "SELECT e.name " + - "FROM employee e, e.projects p " + - "WHERE NOT (p IS NOT MISSING AND p.name LIKE 'security')")), - sqlString(rewrite(expr( - "SELECT e.name " + - "FROM employee as e " + - "WHERE NOT EXISTS (SELECT * FROM e.projects as p WHERE p.name LIKE 'security')"))) - ); - } + @Test + public void nonCorrelatedNotExists() { + assertEquals( + sqlString( + expr( + "SELECT e.name " + + "FROM employee e, e.projects p " + + "WHERE NOT (p IS NOT MISSING)")), + sqlString( + rewrite( + expr( + "SELECT e.name " + + "FROM employee as e " + + "WHERE NOT EXISTS (SELECT * FROM e.projects as p)")))); + } - @Test - public void nonCorrelatedNotExistsParentWhere() { - assertEquals( - sqlString(expr( - "SELECT e.name " + - "FROM employee e, e.projects p " + - "WHERE NOT (p IS NOT MISSING) AND e.name LIKE 'security'")), - sqlString(rewrite(expr( - "SELECT e.name " + - "FROM employee as e " + - "WHERE NOT EXISTS (SELECT * FROM e.projects as p) AND e.name LIKE 'security'"))) - ); - } + @Test + public void nonCorrelatedNotExistsWhere() { + assertEquals( + sqlString( + expr( + "SELECT e.name " + + "FROM employee e, e.projects p " + + "WHERE NOT (p IS NOT MISSING AND p.name LIKE 'security')")), + sqlString( + rewrite( + expr( + "SELECT e.name FROM employee as e WHERE NOT EXISTS (SELECT * FROM e.projects as" + + " p WHERE p.name LIKE 'security')")))); + } - @Test - public void nonCorrelatedExistsAnd() { - exceptionRule.expect(IllegalStateException.class); - exceptionRule.expectMessage("Unsupported subquery"); - rewrite(expr( - "SELECT e.name " + - "FROM employee as e " + - "WHERE EXISTS (SELECT * FROM e.projects as p) AND EXISTS (SELECT * FROM e.comments as c)")); - } + @Test + public void nonCorrelatedNotExistsParentWhere() { + assertEquals( + sqlString( + expr( + "SELECT e.name " + + "FROM employee e, e.projects p " + + "WHERE NOT (p IS NOT MISSING) AND e.name LIKE 'security'")), + sqlString( + rewrite( + expr( + "SELECT e.name FROM employee as e WHERE NOT EXISTS (SELECT * FROM e.projects as" + + " p) AND e.name LIKE 'security'")))); + } + @Test + public void nonCorrelatedExistsAnd() { + exceptionRule.expect(IllegalStateException.class); + exceptionRule.expectMessage("Unsupported subquery"); + rewrite( + expr( + "SELECT e.name FROM employee as e WHERE EXISTS (SELECT * FROM e.projects as p) AND" + + " EXISTS (SELECT * FROM e.comments as c)")); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/InSubqueryRewriterTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/InSubqueryRewriterTest.java index e6bd42a273..bb33baae7d 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/InSubqueryRewriterTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/InSubqueryRewriterTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.rewriter.subquery; import static org.junit.Assert.assertEquals; @@ -14,73 +13,62 @@ public class InSubqueryRewriterTest extends SubQueryRewriterTestBase { - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); - - @Test - public void nonCorrleatedIn() throws Exception { - assertEquals( - sqlString(expr( - "SELECT TbA_0.* " + - "FROM TbA as TbA_0 " + - "JOIN TbB as TbB_1 " + - "ON TbA_0.a = TbB_1.b " + - "WHERE TbB_1.b IS NOT NULL")), - sqlString(rewrite(expr( - "SELECT * FROM TbA " + - "WHERE a in (SELECT b FROM TbB)"))) - ); - } + @Rule public ExpectedException exceptionRule = ExpectedException.none(); - @Test - public void nonCorrleatedInWithWhere() throws Exception { - assertEquals( - sqlString(expr( - "SELECT TbA_0.* " + - "FROM TbA as TbA_0 " + - "JOIN TbB as TbB_1 " + - "ON TbA_0.a = TbB_1.b " + - "WHERE TbB_1.b IS NOT NULL AND TbB_1.b > 0")), - sqlString(rewrite(expr( - "SELECT * " + - "FROM TbA " + - "WHERE a in (SELECT b FROM TbB WHERE b > 0)"))) - ); - } + @Test + public void nonCorrleatedIn() throws Exception { + assertEquals( + sqlString( + expr( + "SELECT TbA_0.* " + + "FROM TbA as TbA_0 " + + "JOIN TbB as TbB_1 " + + "ON TbA_0.a = TbB_1.b " + + "WHERE TbB_1.b IS NOT NULL")), + sqlString(rewrite(expr("SELECT * FROM TbA " + "WHERE a in (SELECT b FROM TbB)")))); + } - @Test - public void nonCorrleatedInWithOuterWhere() throws Exception { - assertEquals( - sqlString(expr( - "SELECT TbA_0.* " + - "FROM TbA as TbA_0 " + - "JOIN TbB as TbB_1 " + - "ON TbA_0.a = TbB_1.b " + - "WHERE TbB_1.b IS NOT NULL AND TbA_0.a > 10")), - sqlString(rewrite(expr( - "SELECT * " + - "FROM TbA " + - "WHERE a in (SELECT b FROM TbB) AND a > 10"))) - ); - } + @Test + public void nonCorrleatedInWithWhere() throws Exception { + assertEquals( + sqlString( + expr( + "SELECT TbA_0.* " + + "FROM TbA as TbA_0 " + + "JOIN TbB as TbB_1 " + + "ON TbA_0.a = TbB_1.b " + + "WHERE TbB_1.b IS NOT NULL AND TbB_1.b > 0")), + sqlString( + rewrite( + expr("SELECT * " + "FROM TbA " + "WHERE a in (SELECT b FROM TbB WHERE b > 0)")))); + } + @Test + public void nonCorrleatedInWithOuterWhere() throws Exception { + assertEquals( + sqlString( + expr( + "SELECT TbA_0.* " + + "FROM TbA as TbA_0 " + + "JOIN TbB as TbB_1 " + + "ON TbA_0.a = TbB_1.b " + + "WHERE TbB_1.b IS NOT NULL AND TbA_0.a > 10")), + sqlString( + rewrite( + expr("SELECT * " + "FROM TbA " + "WHERE a in (SELECT b FROM TbB) AND a > 10")))); + } - @Test - public void notInUnsupported() throws Exception { - exceptionRule.expect(IllegalStateException.class); - exceptionRule.expectMessage("Unsupported subquery"); - rewrite(expr( - "SELECT * FROM TbA " + - "WHERE a not in (SELECT b FROM TbB)")); - } + @Test + public void notInUnsupported() throws Exception { + exceptionRule.expect(IllegalStateException.class); + exceptionRule.expectMessage("Unsupported subquery"); + rewrite(expr("SELECT * FROM TbA WHERE a not in (SELECT b FROM TbB)")); + } - @Test - public void testMultipleSelectException() throws Exception { - exceptionRule.expect(IllegalStateException.class); - exceptionRule.expectMessage("Unsupported subquery with multiple select [TbB_1.b1, TbB_1.b2]"); - rewrite(expr( - "SELECT * " + - "FROM TbA " + - "WHERE a in (SELECT b1, b2 FROM TbB) AND a > 10")); - } + @Test + public void testMultipleSelectException() throws Exception { + exceptionRule.expect(IllegalStateException.class); + exceptionRule.expectMessage("Unsupported subquery with multiple select [TbB_1.b1, TbB_1.b2]"); + rewrite(expr("SELECT * " + "FROM TbA WHERE a in (SELECT b1, b2 FROM TbB) AND a > 10")); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/utils/FindSubQueryTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/utils/FindSubQueryTest.java index 34a915ac2b..8aae3996a0 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/utils/FindSubQueryTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/utils/FindSubQueryTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.rewriter.subquery.utils; import static org.junit.Assert.assertEquals; @@ -16,36 +15,34 @@ public class FindSubQueryTest { - @Test - public void hasInSubQuery() { - FindSubQuery findSubQuery = new FindSubQuery(); - - parse("SELECT * FROM TbA " + - "WHERE a in (SELECT b FROM TbB)").accept(findSubQuery); - assertTrue(findSubQuery.hasSubQuery()); - assertFalse(findSubQuery.getSqlInSubQueryExprs().isEmpty()); - assertEquals(1, findSubQuery.getSqlInSubQueryExprs().size()); - } - - @Test - public void hasExistSubQuery() { - FindSubQuery findSubQuery = new FindSubQuery(); - - parse("SELECT * FROM TbA " + - "WHERE EXISTS (SELECT * FROM TbB)").accept(findSubQuery); - assertTrue(findSubQuery.hasSubQuery()); - assertFalse(findSubQuery.getSqlExistsExprs().isEmpty()); - assertEquals(1, findSubQuery.getSqlExistsExprs().size()); - } - - @Test - public void stopVisitWhenFound() { - FindSubQuery findSubQuery = new FindSubQuery().continueVisitWhenFound(false); - - parse("SELECT * FROM TbA " + - "WHERE a in (SELECT b FROM TbB WHERE b2 in (SELECT c FROM Tbc))").accept(findSubQuery); - assertTrue(findSubQuery.hasSubQuery()); - assertFalse(findSubQuery.getSqlInSubQueryExprs().isEmpty()); - assertEquals(1, findSubQuery.getSqlInSubQueryExprs().size()); - } + @Test + public void hasInSubQuery() { + FindSubQuery findSubQuery = new FindSubQuery(); + + parse("SELECT * FROM TbA " + "WHERE a in (SELECT b FROM TbB)").accept(findSubQuery); + assertTrue(findSubQuery.hasSubQuery()); + assertFalse(findSubQuery.getSqlInSubQueryExprs().isEmpty()); + assertEquals(1, findSubQuery.getSqlInSubQueryExprs().size()); + } + + @Test + public void hasExistSubQuery() { + FindSubQuery findSubQuery = new FindSubQuery(); + + parse("SELECT * FROM TbA WHERE EXISTS (SELECT * FROM TbB)").accept(findSubQuery); + assertTrue(findSubQuery.hasSubQuery()); + assertFalse(findSubQuery.getSqlExistsExprs().isEmpty()); + assertEquals(1, findSubQuery.getSqlExistsExprs().size()); + } + + @Test + public void stopVisitWhenFound() { + FindSubQuery findSubQuery = new FindSubQuery().continueVisitWhenFound(false); + + parse("SELECT * FROM TbA WHERE a in (SELECT b FROM TbB WHERE b2 in (SELECT c FROM Tbc))") + .accept(findSubQuery); + assertTrue(findSubQuery.hasSubQuery()); + assertFalse(findSubQuery.getSqlInSubQueryExprs().isEmpty()); + assertEquals(1, findSubQuery.getSqlInSubQueryExprs().size()); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/util/HasFieldWithValue.java b/legacy/src/test/java/org/opensearch/sql/legacy/util/HasFieldWithValue.java index 3a7f074a0f..74f6411f73 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/util/HasFieldWithValue.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/util/HasFieldWithValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.util; import java.lang.reflect.Field; @@ -13,42 +12,43 @@ /** * A matcher for private field value extraction along with matcher to assert its value. * - * @param Type of target (actual) object - * @param Type of field member (feature) extracted from target object by reflection + * @param Type of target (actual) object + * @param Type of field member (feature) extracted from target object by reflection */ public class HasFieldWithValue extends FeatureMatcher { - private final String fieldName; - - /** - * Construct a matcher. Reordered the argument list. - * - * @param name Identifying text for mismatch message - * @param desc Descriptive text to use in describeTo - * @param matcher The matcher to apply to the feature - */ - private HasFieldWithValue(String name, String desc, Matcher matcher) { - super(matcher, desc, name); - this.fieldName = name; - } - - public static HasFieldWithValue hasFieldWithValue(String name, String desc, Matcher matcher) { - return new HasFieldWithValue<>(name, desc, matcher); - } - - @Override - protected U featureValueOf(T targetObj) { - return getFieldValue(targetObj, fieldName); - } - - @SuppressWarnings("unchecked") - private U getFieldValue(Object obj, String fieldName) { - try { - Field field = obj.getClass().getDeclaredField(fieldName); - field.setAccessible(true); - return (U) field.get(obj); - } catch (NoSuchFieldException | IllegalAccessException e) { - throw new IllegalArgumentException(e); - } + private final String fieldName; + + /** + * Construct a matcher. Reordered the argument list. + * + * @param name Identifying text for mismatch message + * @param desc Descriptive text to use in describeTo + * @param matcher The matcher to apply to the feature + */ + private HasFieldWithValue(String name, String desc, Matcher matcher) { + super(matcher, desc, name); + this.fieldName = name; + } + + public static HasFieldWithValue hasFieldWithValue( + String name, String desc, Matcher matcher) { + return new HasFieldWithValue<>(name, desc, matcher); + } + + @Override + protected U featureValueOf(T targetObj) { + return getFieldValue(targetObj, fieldName); + } + + @SuppressWarnings("unchecked") + private U getFieldValue(Object obj, String fieldName) { + try { + Field field = obj.getClass().getDeclaredField(fieldName); + field.setAccessible(true); + return (U) field.get(obj); + } catch (NoSuchFieldException | IllegalAccessException e) { + throw new IllegalArgumentException(e); } + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/util/MatcherUtils.java b/legacy/src/test/java/org/opensearch/sql/legacy/util/MatcherUtils.java index 95eed26670..0e5f699092 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/util/MatcherUtils.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/util/MatcherUtils.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.util; import static org.hamcrest.MatcherAssert.assertThat; @@ -36,260 +35,266 @@ public class MatcherUtils { - /** - * Assert field value in object by a custom matcher and getter to access the field. - * - * @param name description - * @param subMatcher sub-matcher for field - * @param getter getter function to access the field - * @param type of outer object - * @param type of inner field - * @return matcher - */ - public static FeatureMatcher featureValueOf(String name, - Matcher subMatcher, - Function getter) { - return new FeatureMatcher(subMatcher, name, name) { - @Override - protected U featureValueOf(T actual) { - return getter.apply(actual); - } - }; + /** + * Assert field value in object by a custom matcher and getter to access the field. + * + * @param name description + * @param subMatcher sub-matcher for field + * @param getter getter function to access the field + * @param type of outer object + * @param type of inner field + * @return matcher + */ + public static FeatureMatcher featureValueOf( + String name, Matcher subMatcher, Function getter) { + return new FeatureMatcher(subMatcher, name, name) { + @Override + protected U featureValueOf(T actual) { + return getter.apply(actual); + } + }; + } + + @SafeVarargs + public static Matcher hits(Matcher... hitMatchers) { + if (hitMatchers.length == 0) { + return featureValueOf("SearchHits", emptyArray(), SearchHits::getHits); } - - @SafeVarargs - public static Matcher hits(Matcher... hitMatchers) { - if (hitMatchers.length == 0) { - return featureValueOf("SearchHits", emptyArray(), SearchHits::getHits); - } - return featureValueOf("SearchHits", arrayContainingInAnyOrder(hitMatchers), SearchHits::getHits); + return featureValueOf( + "SearchHits", arrayContainingInAnyOrder(hitMatchers), SearchHits::getHits); + } + + @SafeVarargs + public static Matcher hitsInOrder(Matcher... hitMatchers) { + if (hitMatchers.length == 0) { + return featureValueOf("SearchHits", emptyArray(), SearchHits::getHits); } - - @SafeVarargs - public static Matcher hitsInOrder(Matcher... hitMatchers) { - if (hitMatchers.length == 0) { - return featureValueOf("SearchHits", emptyArray(), SearchHits::getHits); - } - return featureValueOf("SearchHits", arrayContaining(hitMatchers), SearchHits::getHits); - } - - @SuppressWarnings("unchecked") - public static Matcher hit(Matcher>... entryMatchers) { - return featureValueOf("SearchHit", allOf(entryMatchers), SearchHit::getSourceAsMap); - } - - @SuppressWarnings("unchecked") - public static Matcher> kv(String key, Object value) { - // Use raw type to avoid generic type problem from Matcher> to Matcher - return (Matcher) hasEntry(key, value); - } - - public static Matcher hitAny(String query, Matcher... matcher) { - return featureValueOf("SearchHits", hasItems(matcher), actual -> { - JSONArray array = (JSONArray) (actual.query(query)); - List results = new ArrayList<>(array.length()); - for (Object element : array) { - results.add((JSONObject) element); - } - return results; + return featureValueOf("SearchHits", arrayContaining(hitMatchers), SearchHits::getHits); + } + + @SuppressWarnings("unchecked") + public static Matcher hit(Matcher>... entryMatchers) { + return featureValueOf("SearchHit", allOf(entryMatchers), SearchHit::getSourceAsMap); + } + + @SuppressWarnings("unchecked") + public static Matcher> kv(String key, Object value) { + // Use raw type to avoid generic type problem from Matcher> to Matcher + return (Matcher) hasEntry(key, value); + } + + public static Matcher hitAny(String query, Matcher... matcher) { + return featureValueOf( + "SearchHits", + hasItems(matcher), + actual -> { + JSONArray array = (JSONArray) (actual.query(query)); + List results = new ArrayList<>(array.length()); + for (Object element : array) { + results.add((JSONObject) element); + } + return results; }); - } - - public static Matcher hitAny(Matcher... matcher) { - return hitAny("/hits/hits", matcher); - } - - public static Matcher hitAll(Matcher... matcher) { - return featureValueOf("SearchHits", containsInAnyOrder(matcher), actual -> { - JSONArray array = (JSONArray) (actual.query("/hits/hits")); - List results = new ArrayList<>(array.length()); - for (Object element : array) { - results.add((JSONObject) element); - } - return results; + } + + public static Matcher hitAny(Matcher... matcher) { + return hitAny("/hits/hits", matcher); + } + + public static Matcher hitAll(Matcher... matcher) { + return featureValueOf( + "SearchHits", + containsInAnyOrder(matcher), + actual -> { + JSONArray array = (JSONArray) (actual.query("/hits/hits")); + List results = new ArrayList<>(array.length()); + for (Object element : array) { + results.add((JSONObject) element); + } + return results; }); + } + + public static Matcher kvString(String key, Matcher matcher) { + return featureValueOf("Json Match", matcher, actual -> (String) actual.query(key)); + } + + public static Matcher kvDouble(String key, Matcher matcher) { + return featureValueOf("Json Match", matcher, actual -> (Double) actual.query(key)); + } + + public static Matcher kvInt(String key, Matcher matcher) { + return featureValueOf("Json Match", matcher, actual -> (Integer) actual.query(key)); + } + + @SafeVarargs + public static void verifySchema(JSONObject response, Matcher... matchers) { + verify(response.getJSONArray("schema"), matchers); + } + + @SafeVarargs + public static void verifyDataRows(JSONObject response, Matcher... matchers) { + verify(response.getJSONArray("datarows"), matchers); + } + + @SafeVarargs + public static void verifyColumn(JSONObject response, Matcher... matchers) { + verify(response.getJSONArray("schema"), matchers); + } + + @SafeVarargs + public static void verifyOrder(JSONObject response, Matcher... matchers) { + verifyOrder(response.getJSONArray("datarows"), matchers); + } + + @SafeVarargs + @SuppressWarnings("unchecked") + public static void verifyDataRowsInOrder(JSONObject response, Matcher... matchers) { + verifyInOrder(response.getJSONArray("datarows"), matchers); + } + + @SuppressWarnings("unchecked") + public static void verify(JSONArray array, Matcher... matchers) { + List objects = new ArrayList<>(); + array.iterator().forEachRemaining(o -> objects.add((T) o)); + assertEquals(matchers.length, objects.size()); + assertThat(objects, containsInAnyOrder(matchers)); + } + + @SafeVarargs + @SuppressWarnings("unchecked") + public static void verifyInOrder(JSONArray array, Matcher... matchers) { + List objects = new ArrayList<>(); + array.iterator().forEachRemaining(o -> objects.add((T) o)); + assertEquals(matchers.length, objects.size()); + assertThat(objects, contains(matchers)); + } + + @SuppressWarnings("unchecked") + public static void verifySome(JSONArray array, Matcher... matchers) { + List objects = new ArrayList<>(); + array.iterator().forEachRemaining(o -> objects.add((T) o)); + + assertThat(matchers.length, greaterThan(0)); + for (Matcher matcher : matchers) { + assertThat(objects, hasItems(matcher)); } - - public static Matcher kvString(String key, Matcher matcher) { - return featureValueOf("Json Match", matcher, actual -> (String) actual.query(key)); - } - - public static Matcher kvDouble(String key, Matcher matcher) { - return featureValueOf("Json Match", matcher, actual -> (Double) actual.query(key)); - } - - public static Matcher kvInt(String key, Matcher matcher) { - return featureValueOf("Json Match", matcher, actual -> (Integer) actual.query(key)); - } - - @SafeVarargs - public static void verifySchema(JSONObject response, Matcher... matchers) { - verify(response.getJSONArray("schema"), matchers); - } - - @SafeVarargs - public static void verifyDataRows(JSONObject response, Matcher... matchers) { - verify(response.getJSONArray("datarows"), matchers); - } - - @SafeVarargs - public static void verifyColumn(JSONObject response, Matcher... matchers) { - verify(response.getJSONArray("schema"), matchers); - } - - @SafeVarargs - public static void verifyOrder(JSONObject response, Matcher... matchers) { - verifyOrder(response.getJSONArray("datarows"), matchers); - } - - @SafeVarargs - @SuppressWarnings("unchecked") - public static void verifyDataRowsInOrder(JSONObject response, Matcher... matchers) { - verifyInOrder(response.getJSONArray("datarows"), matchers); + } + + @SafeVarargs + public static void verifyOrder(JSONArray array, Matcher... matchers) { + List objects = new ArrayList<>(); + array.iterator().forEachRemaining(o -> objects.add((T) o)); + assertEquals(matchers.length, objects.size()); + assertThat(objects, containsInRelativeOrder(matchers)); + } + + public static TypeSafeMatcher schema( + String expectedName, String expectedAlias, String expectedType) { + return new TypeSafeMatcher() { + @Override + public void describeTo(Description description) { + description.appendText( + String.format( + "(name=%s, alias=%s, type=%s)", expectedName, expectedAlias, expectedType)); + } + + @Override + protected boolean matchesSafely(JSONObject jsonObject) { + String actualName = (String) jsonObject.query("/name"); + String actualAlias = (String) jsonObject.query("/alias"); + String actualType = (String) jsonObject.query("/type"); + return expectedName.equals(actualName) + && (Strings.isNullOrEmpty(actualAlias) && Strings.isNullOrEmpty(expectedAlias) + || expectedAlias.equals(actualAlias)) + && expectedType.equals(actualType); + } + }; + } + + public static TypeSafeMatcher rows(Object... expectedObjects) { + return new TypeSafeMatcher() { + @Override + public void describeTo(Description description) { + description.appendText(String.join(",", Arrays.asList(expectedObjects).toString())); + } + + @Override + protected boolean matchesSafely(JSONArray array) { + List actualObjects = new ArrayList<>(); + array.iterator().forEachRemaining(actualObjects::add); + return Arrays.asList(expectedObjects).equals(actualObjects); + } + }; + } + + public static TypeSafeMatcher columnPattern(String regex) { + return new TypeSafeMatcher() { + @Override + protected boolean matchesSafely(JSONObject jsonObject) { + return ((String) jsonObject.query("/name")).matches(regex); + } + + @Override + public void describeTo(Description description) { + description.appendText(String.format("(column_pattern=%s)", regex)); + } + }; + } + + public static TypeSafeMatcher columnName(String name) { + return new TypeSafeMatcher() { + @Override + protected boolean matchesSafely(JSONObject jsonObject) { + return jsonObject.query("/name").equals(name); + } + + @Override + public void describeTo(Description description) { + description.appendText(String.format("(name=%s)", name)); + } + }; + } + + /** Tests if a string is equal to another string, ignore the case and whitespace. */ + public static class IsEqualIgnoreCaseAndWhiteSpace extends TypeSafeMatcher { + private final String string; + + public IsEqualIgnoreCaseAndWhiteSpace(String string) { + if (string == null) { + throw new IllegalArgumentException("Non-null value required"); + } + this.string = string; } - @SuppressWarnings("unchecked") - public static void verify(JSONArray array, Matcher... matchers) { - List objects = new ArrayList<>(); - array.iterator().forEachRemaining(o -> objects.add((T) o)); - assertEquals(matchers.length, objects.size()); - assertThat(objects, containsInAnyOrder(matchers)); + @Override + public boolean matchesSafely(String item) { + return ignoreCase(ignoreSpaces(string)).equals(ignoreCase(ignoreSpaces(item))); } - @SafeVarargs - @SuppressWarnings("unchecked") - public static void verifyInOrder(JSONArray array, Matcher... matchers) { - List objects = new ArrayList<>(); - array.iterator().forEachRemaining(o -> objects.add((T) o)); - assertEquals(matchers.length, objects.size()); - assertThat(objects, contains(matchers)); + @Override + public void describeMismatchSafely(String item, Description mismatchDescription) { + mismatchDescription.appendText("was ").appendValue(item); } - @SuppressWarnings("unchecked") - public static void verifySome(JSONArray array, Matcher... matchers) { - List objects = new ArrayList<>(); - array.iterator().forEachRemaining(o -> objects.add((T) o)); - - assertThat(matchers.length, greaterThan(0)); - for (Matcher matcher : matchers) { - assertThat(objects, hasItems(matcher)); - } + @Override + public void describeTo(Description description) { + description + .appendText("a string equal to ") + .appendValue(string) + .appendText(" ignore case and white space"); } - @SafeVarargs - public static void verifyOrder(JSONArray array, Matcher... matchers) { - List objects = new ArrayList<>(); - array.iterator().forEachRemaining(o -> objects.add((T) o)); - assertEquals(matchers.length, objects.size()); - assertThat(objects, containsInRelativeOrder(matchers)); + public String ignoreSpaces(String toBeStripped) { + return toBeStripped.replaceAll("\\s+", "").trim(); } - public static TypeSafeMatcher schema(String expectedName, String expectedAlias, String expectedType) { - return new TypeSafeMatcher() { - @Override - public void describeTo(Description description) { - description.appendText( - String.format("(name=%s, alias=%s, type=%s)", expectedName, expectedAlias, expectedType)); - } - - @Override - protected boolean matchesSafely(JSONObject jsonObject) { - String actualName = (String) jsonObject.query("/name"); - String actualAlias = (String) jsonObject.query("/alias"); - String actualType = (String) jsonObject.query("/type"); - return expectedName.equals(actualName) && - (Strings.isNullOrEmpty(actualAlias) && Strings.isNullOrEmpty(expectedAlias) || - expectedAlias.equals(actualAlias)) && - expectedType.equals(actualType); - } - }; + public String ignoreCase(String toBeLower) { + return toBeLower.toLowerCase(); } - public static TypeSafeMatcher rows(Object... expectedObjects) { - return new TypeSafeMatcher() { - @Override - public void describeTo(Description description) { - description.appendText(String.join(",", Arrays.asList(expectedObjects).toString())); - } - - @Override - protected boolean matchesSafely(JSONArray array) { - List actualObjects = new ArrayList<>(); - array.iterator().forEachRemaining(actualObjects::add); - return Arrays.asList(expectedObjects).equals(actualObjects); - } - }; - } - - public static TypeSafeMatcher columnPattern(String regex) { - return new TypeSafeMatcher() { - @Override - protected boolean matchesSafely(JSONObject jsonObject) { - return ((String)jsonObject.query("/name")).matches(regex); - } - - @Override - public void describeTo(Description description) { - description.appendText(String.format("(column_pattern=%s)", regex)); - } - }; - } - - public static TypeSafeMatcher columnName(String name) { - return new TypeSafeMatcher() { - @Override - protected boolean matchesSafely(JSONObject jsonObject) { - return jsonObject.query("/name").equals(name); - } - - @Override - public void describeTo(Description description) { - description.appendText(String.format("(name=%s)", name)); - } - }; - } - - - /** - * Tests if a string is equal to another string, ignore the case and whitespace. - */ - public static class IsEqualIgnoreCaseAndWhiteSpace extends TypeSafeMatcher { - private final String string; - - public IsEqualIgnoreCaseAndWhiteSpace(String string) { - if (string == null) { - throw new IllegalArgumentException("Non-null value required"); - } - this.string = string; - } - - @Override - public boolean matchesSafely(String item) { - return ignoreCase(ignoreSpaces(string)).equals(ignoreCase(ignoreSpaces(item))); - } - - @Override - public void describeMismatchSafely(String item, Description mismatchDescription) { - mismatchDescription.appendText("was ").appendValue(item); - } - - @Override - public void describeTo(Description description) { - description.appendText("a string equal to ") - .appendValue(string) - .appendText(" ignore case and white space"); - } - - public String ignoreSpaces(String toBeStripped) { - return toBeStripped.replaceAll("\\s+", "").trim(); - } - - public String ignoreCase(String toBeLower) { - return toBeLower.toLowerCase(); - } - - public static Matcher equalToIgnoreCaseAndWhiteSpace(String expectedString) { - return new IsEqualIgnoreCaseAndWhiteSpace(expectedString); - } + public static Matcher equalToIgnoreCaseAndWhiteSpace(String expectedString) { + return new IsEqualIgnoreCaseAndWhiteSpace(expectedString); } + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/util/MultipleIndexClusterUtils.java b/legacy/src/test/java/org/opensearch/sql/legacy/util/MultipleIndexClusterUtils.java index c7b484239c..58234d73b7 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/util/MultipleIndexClusterUtils.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/util/MultipleIndexClusterUtils.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.util; import static org.mockito.Matchers.any; @@ -24,172 +23,179 @@ import org.opensearch.cluster.service.ClusterService; import org.opensearch.sql.legacy.esdomain.LocalClusterState; -/** - * Test Utility which provide the cluster have 2 indices. - */ +/** Test Utility which provide the cluster have 2 indices. */ public class MultipleIndexClusterUtils { - public final static String INDEX_ACCOUNT_1 = "account1"; - public final static String INDEX_ACCOUNT_2 = "account2"; - public final static String INDEX_ACCOUNT_ALL = "account*"; + public static final String INDEX_ACCOUNT_1 = "account1"; + public static final String INDEX_ACCOUNT_2 = "account2"; + public static final String INDEX_ACCOUNT_ALL = "account*"; - public static String INDEX_ACCOUNT_1_MAPPING = "{\n" + - " \"field_mappings\": {\n" + - " \"mappings\": {\n" + - " \"account1\": {\n" + - " \"properties\": {\n" + - " \"id\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"address\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\"\n" + - " }\n" + - " },\n" + - " \"fielddata\": true\n" + - " },\n" + - " \"age\": {\n" + - " \"type\": \"integer\"\n" + - " },\n" + - " \"projects\": {\n" + - " \"type\": \"nested\",\n" + - " \"properties\": {\n" + - " \"name\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\"\n" + - " }\n" + - " },\n" + - " \"fielddata\": true\n" + - " },\n" + - " \"started_year\": {\n" + - " \"type\": \"int\"\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - " },\n" + - " \"settings\": {\n" + - " \"index\": {\n" + - " \"number_of_shards\": 1,\n" + - " \"number_of_replicas\": 0,\n" + - " \"version\": {\n" + - " \"created\": \"6050399\"\n" + - " }\n" + - " }\n" + - " },\n" + - " \"mapping_version\": \"1\",\n" + - " \"settings_version\": \"1\",\n" + - " \"aliases_version\": \"1\"\n" + - " }\n" + - "}"; + public static String INDEX_ACCOUNT_1_MAPPING = + "{\n" + + " \"field_mappings\": {\n" + + " \"mappings\": {\n" + + " \"account1\": {\n" + + " \"properties\": {\n" + + " \"id\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"address\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\"\n" + + " }\n" + + " },\n" + + " \"fielddata\": true\n" + + " },\n" + + " \"age\": {\n" + + " \"type\": \"integer\"\n" + + " },\n" + + " \"projects\": {\n" + + " \"type\": \"nested\",\n" + + " \"properties\": {\n" + + " \"name\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\"\n" + + " }\n" + + " },\n" + + " \"fielddata\": true\n" + + " },\n" + + " \"started_year\": {\n" + + " \"type\": \"int\"\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " },\n" + + " \"settings\": {\n" + + " \"index\": {\n" + + " \"number_of_shards\": 1,\n" + + " \"number_of_replicas\": 0,\n" + + " \"version\": {\n" + + " \"created\": \"6050399\"\n" + + " }\n" + + " }\n" + + " },\n" + + " \"mapping_version\": \"1\",\n" + + " \"settings_version\": \"1\",\n" + + " \"aliases_version\": \"1\"\n" + + " }\n" + + "}"; - /** - * The difference with account1. - * 1. missing address. - * 2. age has different type. - * 3. projects.started_year has different type. - */ - public static String INDEX_ACCOUNT_2_MAPPING = "{\n" + - " \"field_mappings\": {\n" + - " \"mappings\": {\n" + - " \"account2\": {\n" + - " \"properties\": {\n" + - " \"id\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"age\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"projects\": {\n" + - " \"type\": \"nested\",\n" + - " \"properties\": {\n" + - " \"name\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\"\n" + - " }\n" + - " },\n" + - " \"fielddata\": true\n" + - " },\n" + - " \"started_year\": {\n" + - " \"type\": \"long\"\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - " },\n" + - " \"settings\": {\n" + - " \"index\": {\n" + - " \"number_of_shards\": 1,\n" + - " \"number_of_replicas\": 0,\n" + - " \"version\": {\n" + - " \"created\": \"6050399\"\n" + - " }\n" + - " }\n" + - " },\n" + - " \"mapping_version\": \"1\",\n" + - " \"settings_version\": \"1\",\n" + - " \"aliases_version\": \"1\"\n" + - " }\n" + - "}"; + /** + * The difference with account1. 1. missing address. 2. age has different type. 3. + * projects.started_year has different type. + */ + public static String INDEX_ACCOUNT_2_MAPPING = + "{\n" + + " \"field_mappings\": {\n" + + " \"mappings\": {\n" + + " \"account2\": {\n" + + " \"properties\": {\n" + + " \"id\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"age\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"projects\": {\n" + + " \"type\": \"nested\",\n" + + " \"properties\": {\n" + + " \"name\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\"\n" + + " }\n" + + " },\n" + + " \"fielddata\": true\n" + + " },\n" + + " \"started_year\": {\n" + + " \"type\": \"long\"\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " },\n" + + " \"settings\": {\n" + + " \"index\": {\n" + + " \"number_of_shards\": 1,\n" + + " \"number_of_replicas\": 0,\n" + + " \"version\": {\n" + + " \"created\": \"6050399\"\n" + + " }\n" + + " }\n" + + " },\n" + + " \"mapping_version\": \"1\",\n" + + " \"settings_version\": \"1\",\n" + + " \"aliases_version\": \"1\"\n" + + " }\n" + + "}"; - public static void mockMultipleIndexEnv() { - mockLocalClusterState( - Map.of(INDEX_ACCOUNT_1, buildIndexMapping(INDEX_ACCOUNT_1, INDEX_ACCOUNT_1_MAPPING), - INDEX_ACCOUNT_2, buildIndexMapping(INDEX_ACCOUNT_2, INDEX_ACCOUNT_2_MAPPING), - INDEX_ACCOUNT_ALL, buildIndexMapping(Map.of(INDEX_ACCOUNT_1, INDEX_ACCOUNT_1_MAPPING, - INDEX_ACCOUNT_2, INDEX_ACCOUNT_2_MAPPING)))); - } + public static void mockMultipleIndexEnv() { + mockLocalClusterState( + Map.of( + INDEX_ACCOUNT_1, + buildIndexMapping(INDEX_ACCOUNT_1, INDEX_ACCOUNT_1_MAPPING), + INDEX_ACCOUNT_2, + buildIndexMapping(INDEX_ACCOUNT_2, INDEX_ACCOUNT_2_MAPPING), + INDEX_ACCOUNT_ALL, + buildIndexMapping( + Map.of( + INDEX_ACCOUNT_1, + INDEX_ACCOUNT_1_MAPPING, + INDEX_ACCOUNT_2, + INDEX_ACCOUNT_2_MAPPING)))); + } - public static void mockLocalClusterState(Map> indexMapping) { - LocalClusterState.state().setClusterService(mockClusterService(indexMapping)); - LocalClusterState.state().setResolver(mockIndexNameExpressionResolver()); - LocalClusterState.state().setPluginSettings(mockPluginSettings()); - } + public static void mockLocalClusterState(Map> indexMapping) { + LocalClusterState.state().setClusterService(mockClusterService(indexMapping)); + LocalClusterState.state().setResolver(mockIndexNameExpressionResolver()); + LocalClusterState.state().setPluginSettings(mockPluginSettings()); + } + public static ClusterService mockClusterService( + Map> indexMapping) { + ClusterService mockService = mock(ClusterService.class); + ClusterState mockState = mock(ClusterState.class); + Metadata mockMetaData = mock(Metadata.class); - public static ClusterService mockClusterService(Map> - indexMapping) { - ClusterService mockService = mock(ClusterService.class); - ClusterState mockState = mock(ClusterState.class); - Metadata mockMetaData = mock(Metadata.class); - - when(mockService.state()).thenReturn(mockState); - when(mockState.metadata()).thenReturn(mockMetaData); - try { - for (var entry : indexMapping.entrySet()) { - when(mockMetaData.findMappings(eq(new String[]{entry.getKey()}), any())) - .thenReturn(entry.getValue()); - } - } catch (IOException e) { - throw new IllegalStateException(e); - } - return mockService; + when(mockService.state()).thenReturn(mockState); + when(mockState.metadata()).thenReturn(mockMetaData); + try { + for (var entry : indexMapping.entrySet()) { + when(mockMetaData.findMappings(eq(new String[] {entry.getKey()}), any())) + .thenReturn(entry.getValue()); + } + } catch (IOException e) { + throw new IllegalStateException(e); } + return mockService; + } - private static Map buildIndexMapping(Map indexMapping) { - return indexMapping.entrySet().stream().collect(Collectors.toUnmodifiableMap( - Map.Entry::getKey, e -> { - try { - return IndexMetadata.fromXContent(createParser(e.getValue())).mapping(); - } catch (IOException ex) { - throw new IllegalStateException(ex); - } - })); - - } + private static Map buildIndexMapping(Map indexMapping) { + return indexMapping.entrySet().stream() + .collect( + Collectors.toUnmodifiableMap( + Map.Entry::getKey, + e -> { + try { + return IndexMetadata.fromXContent(createParser(e.getValue())).mapping(); + } catch (IOException ex) { + throw new IllegalStateException(ex); + } + })); + } - private static Map buildIndexMapping(String index, String mapping) { - try { - return Map.of(index, IndexMetadata.fromXContent(createParser(mapping)).mapping()); - } catch (IOException e) { - throw new IllegalStateException(e); - } + private static Map buildIndexMapping(String index, String mapping) { + try { + return Map.of(index, IndexMetadata.fromXContent(createParser(mapping)).mapping()); + } catch (IOException e) { + throw new IllegalStateException(e); } + } } From 6055c7e13ac8f763537f10bcfec0fe4c80b39146 Mon Sep 17 00:00:00 2001 From: Yury-Fridlyand Date: Mon, 21 Aug 2023 15:28:03 -0700 Subject: [PATCH 38/42] Fix `ASCII` function and groom UT for text functions. (#301) (#1895) * Fix `ASCII` function and groom UT for text functions. * Code cleanup. --------- Signed-off-by: Yury-Fridlyand --- .../sql/expression/text/TextFunction.java | 3 +- .../sql/expression/text/TextFunctionTest.java | 304 ++++++------------ 2 files changed, 95 insertions(+), 212 deletions(-) diff --git a/core/src/main/java/org/opensearch/sql/expression/text/TextFunction.java b/core/src/main/java/org/opensearch/sql/expression/text/TextFunction.java index 1cf7f64867..d670843551 100644 --- a/core/src/main/java/org/opensearch/sql/expression/text/TextFunction.java +++ b/core/src/main/java/org/opensearch/sql/expression/text/TextFunction.java @@ -416,7 +416,8 @@ private static ExprValue exprLeft(ExprValue expr, ExprValue length) { } private static ExprValue exprAscii(ExprValue expr) { - return new ExprIntegerValue((int) expr.stringValue().charAt(0)); + return new ExprIntegerValue( + expr.stringValue().length() == 0 ? 0 : (int) expr.stringValue().charAt(0)); } private static ExprValue exprLocate(ExprValue subStr, ExprValue str) { diff --git a/core/src/test/java/org/opensearch/sql/expression/text/TextFunctionTest.java b/core/src/test/java/org/opensearch/sql/expression/text/TextFunctionTest.java index 84ae0b844f..b58f3031b7 100644 --- a/core/src/test/java/org/opensearch/sql/expression/text/TextFunctionTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/text/TextFunctionTest.java @@ -6,6 +6,8 @@ package org.opensearch.sql.expression.text; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import static org.opensearch.sql.data.model.ExprValueUtils.missingValue; import static org.opensearch.sql.data.model.ExprValueUtils.nullValue; @@ -17,13 +19,12 @@ import java.util.List; import java.util.Objects; import java.util.stream.Collectors; +import java.util.stream.Stream; import lombok.AllArgsConstructor; import lombok.Getter; -import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.Mock; -import org.mockito.junit.jupiter.MockitoExtension; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; import org.opensearch.sql.data.model.ExprIntegerValue; import org.opensearch.sql.data.model.ExprStringValue; import org.opensearch.sql.data.model.ExprValue; @@ -31,48 +32,52 @@ import org.opensearch.sql.expression.Expression; import org.opensearch.sql.expression.ExpressionTestBase; import org.opensearch.sql.expression.FunctionExpression; -import org.opensearch.sql.expression.env.Environment; -@ExtendWith(MockitoExtension.class) public class TextFunctionTest extends ExpressionTestBase { - @Mock Environment env; - - @Mock Expression nullRef; - - @Mock Expression missingRef; - - private static List SUBSTRING_STRINGS = - ImmutableList.of( - new SubstringInfo("", 1, 1, ""), - new SubstringInfo("Quadratically", 5, null, "ratically"), - new SubstringInfo("foobarbar", 4, null, "barbar"), - new SubstringInfo("Quadratically", 5, 6, "ratica"), - new SubstringInfo("Quadratically", 5, 600, "ratically"), - new SubstringInfo("Quadratically", 500, 1, ""), - new SubstringInfo("Quadratically", 500, null, ""), - new SubstringInfo("Sakila", -3, null, "ila"), - new SubstringInfo("Sakila", -5, 3, "aki"), - new SubstringInfo("Sakila", -4, 2, "ki"), - new SubstringInfo("Quadratically", 0, null, ""), - new SubstringInfo("Sakila", 0, 2, ""), - new SubstringInfo("Sakila", 2, 0, ""), - new SubstringInfo("Sakila", 0, 0, "")); - private static List UPPER_LOWER_STRINGS = - ImmutableList.of( - "test", " test", "test ", " test ", "TesT", "TEST", " TEST", "TEST ", " TEST ", " ", ""); - private static List STRING_PATTERN_PAIRS = - ImmutableList.of( - new StringPatternPair("Michael!", "Michael!"), - new StringPatternPair("hello", "world"), - new StringPatternPair("world", "hello")); - private static List TRIM_STRINGS = - ImmutableList.of(" test", " test", "test ", "test", " test ", "", " "); - private static List> CONCAT_STRING_LISTS = - ImmutableList.of(ImmutableList.of("hello", "world"), ImmutableList.of("123", "5325")); - private static List> CONCAT_STRING_LISTS_WITH_MANY_STRINGS = - ImmutableList.of( - ImmutableList.of("he", "llo", "wo", "rld", "!"), - ImmutableList.of("0", "123", "53", "25", "7")); + + private static Stream getStringsForSubstr() { + return Stream.of( + new SubstringInfo("", 1, 1, ""), + new SubstringInfo("Quadratically", 5, null, "ratically"), + new SubstringInfo("foobarbar", 4, null, "barbar"), + new SubstringInfo("Quadratically", 5, 6, "ratica"), + new SubstringInfo("Quadratically", 5, 600, "ratically"), + new SubstringInfo("Quadratically", 500, 1, ""), + new SubstringInfo("Quadratically", 500, null, ""), + new SubstringInfo("Sakila", -3, null, "ila"), + new SubstringInfo("Sakila", -5, 3, "aki"), + new SubstringInfo("Sakila", -4, 2, "ki"), + new SubstringInfo("Quadratically", 0, null, ""), + new SubstringInfo("Sakila", 0, 2, ""), + new SubstringInfo("Sakila", 2, 0, ""), + new SubstringInfo("Sakila", 0, 0, "")); + } + + private static Stream getStringsForUpperAndLower() { + return Stream.of( + "test", " test", "test ", " test ", "TesT", "TEST", " TEST", "TEST ", " TEST ", " ", ""); + } + + private static Stream getStringsForComparison() { + return Stream.of( + new StringPatternPair("Michael!", "Michael!"), + new StringPatternPair("hello", "world"), + new StringPatternPair("world", "hello")); + } + + private static Stream getStringsForTrim() { + return Stream.of(" test", " test", "test ", "test", " test ", "", " "); + } + + private static Stream> getStringsForConcat() { + return Stream.of(ImmutableList.of("hello", "world"), ImmutableList.of("123", "5325")); + } + + private static Stream> getMultipleStringsForConcat() { + return Stream.of( + ImmutableList.of("he", "llo", "wo", "rld", "!"), + ImmutableList.of("0", "123", "53", "25", "7")); + } interface SubstrSubstring { FunctionExpression getFunction(SubstringInfo strInfo); @@ -130,30 +135,11 @@ static class SubstringInfo { String res; } - @BeforeEach - public void setup() { - when(nullRef.valueOf(env)).thenReturn(nullValue()); - when(missingRef.valueOf(env)).thenReturn(missingValue()); - } - - @Test - public void substrSubstring() { - SUBSTRING_STRINGS.forEach(s -> substrSubstringTest(s, new Substr())); - SUBSTRING_STRINGS.forEach(s -> substrSubstringTest(s, new Substring())); - - when(nullRef.type()).thenReturn(STRING); - when(missingRef.type()).thenReturn(STRING); - assertEquals(missingValue(), eval(DSL.substr(missingRef, DSL.literal(1)))); - assertEquals(nullValue(), eval(DSL.substr(nullRef, DSL.literal(1)))); - assertEquals(missingValue(), eval(DSL.substring(missingRef, DSL.literal(1)))); - assertEquals(nullValue(), eval(DSL.substring(nullRef, DSL.literal(1)))); - - when(nullRef.type()).thenReturn(INTEGER); - when(missingRef.type()).thenReturn(INTEGER); - assertEquals(missingValue(), eval(DSL.substr(DSL.literal("hello"), missingRef))); - assertEquals(nullValue(), eval(DSL.substr(DSL.literal("hello"), nullRef))); - assertEquals(missingValue(), eval(DSL.substring(DSL.literal("hello"), missingRef))); - assertEquals(nullValue(), eval(DSL.substring(DSL.literal("hello"), nullRef))); + @ParameterizedTest + @MethodSource("getStringsForSubstr") + void substrSubstring(SubstringInfo s) { + substrSubstringTest(s, new Substr()); + substrSubstringTest(s, new Substring()); } void substrSubstringTest(SubstringInfo strInfo, SubstrSubstring substrSubstring) { @@ -162,79 +148,41 @@ void substrSubstringTest(SubstringInfo strInfo, SubstrSubstring substrSubstring) assertEquals(strInfo.getRes(), eval(expr).stringValue()); } - @Test - public void ltrim() { - TRIM_STRINGS.forEach(this::ltrimString); - - when(nullRef.type()).thenReturn(STRING); - when(missingRef.type()).thenReturn(STRING); - assertEquals(missingValue(), eval(DSL.ltrim(missingRef))); - assertEquals(nullValue(), eval(DSL.ltrim(nullRef))); - } - - @Test - public void rtrim() { - TRIM_STRINGS.forEach(this::rtrimString); - - when(nullRef.type()).thenReturn(STRING); - when(missingRef.type()).thenReturn(STRING); - assertEquals(missingValue(), eval(DSL.ltrim(missingRef))); - assertEquals(nullValue(), eval(DSL.ltrim(nullRef))); - } - - @Test - public void trim() { - TRIM_STRINGS.forEach(this::trimString); - - when(nullRef.type()).thenReturn(STRING); - when(missingRef.type()).thenReturn(STRING); - assertEquals(missingValue(), eval(DSL.ltrim(missingRef))); - assertEquals(nullValue(), eval(DSL.ltrim(nullRef))); - } - - void ltrimString(String str) { + @ParameterizedTest + @MethodSource("getStringsForTrim") + void ltrim(String str) { FunctionExpression expression = DSL.ltrim(DSL.literal(str)); assertEquals(STRING, expression.type()); assertEquals(str.stripLeading(), eval(expression).stringValue()); } - void rtrimString(String str) { + @ParameterizedTest + @MethodSource("getStringsForTrim") + void rtrim(String str) { FunctionExpression expression = DSL.rtrim(DSL.literal(str)); assertEquals(STRING, expression.type()); assertEquals(str.stripTrailing(), eval(expression).stringValue()); } - void trimString(String str) { + @ParameterizedTest + @MethodSource("getStringsForTrim") + void trim(String str) { FunctionExpression expression = DSL.trim(DSL.literal(str)); assertEquals(STRING, expression.type()); assertEquals(str.trim(), eval(expression).stringValue()); } - @Test - public void lower() { - UPPER_LOWER_STRINGS.forEach(this::testLowerString); - - when(nullRef.type()).thenReturn(STRING); - when(missingRef.type()).thenReturn(STRING); - assertEquals(missingValue(), eval(DSL.lower(missingRef))); - assertEquals(nullValue(), eval(DSL.lower(nullRef))); - } - - @Test - public void upper() { - UPPER_LOWER_STRINGS.forEach(this::testUpperString); - - when(nullRef.type()).thenReturn(STRING); - when(missingRef.type()).thenReturn(STRING); - assertEquals(missingValue(), eval(DSL.upper(missingRef))); - assertEquals(nullValue(), eval(DSL.upper(nullRef))); - } - - @Test - void concat() { - CONCAT_STRING_LISTS.forEach(this::testConcatString); - CONCAT_STRING_LISTS_WITH_MANY_STRINGS.forEach(this::testConcatMultipleString); + @ParameterizedTest + @MethodSource("getStringsForConcat") + void concat(List strings) { + testConcatString(strings); + // Since `concat` isn't wrapped with `nullMissingHandling` (which has its own tests), + // we have to test there case with NULL and MISSING values + Expression nullRef = mock(Expression.class); + Expression missingRef = mock(Expression.class); + when(nullRef.valueOf(any())).thenReturn(nullValue()); + when(missingRef.valueOf(any())).thenReturn(missingValue()); when(nullRef.type()).thenReturn(STRING); when(missingRef.type()).thenReturn(STRING); assertEquals(missingValue(), eval(DSL.concat(missingRef, DSL.literal("1")))); @@ -244,43 +192,10 @@ void concat() { assertEquals(nullValue(), eval(DSL.concat(DSL.literal("1"), nullRef))); } - @Test - void concat_ws() { - CONCAT_STRING_LISTS.forEach(s -> testConcatString(s, ",")); - - when(nullRef.type()).thenReturn(STRING); - when(missingRef.type()).thenReturn(STRING); - assertEquals( - missingValue(), eval(DSL.concat_ws(missingRef, DSL.literal("1"), DSL.literal("1")))); - assertEquals(nullValue(), eval(DSL.concat_ws(nullRef, DSL.literal("1"), DSL.literal("1")))); - assertEquals( - missingValue(), eval(DSL.concat_ws(DSL.literal("1"), missingRef, DSL.literal("1")))); - assertEquals(nullValue(), eval(DSL.concat_ws(DSL.literal("1"), nullRef, DSL.literal("1")))); - assertEquals( - missingValue(), eval(DSL.concat_ws(DSL.literal("1"), DSL.literal("1"), missingRef))); - assertEquals(nullValue(), eval(DSL.concat_ws(DSL.literal("1"), DSL.literal("1"), nullRef))); - } - - @Test - void length() { - UPPER_LOWER_STRINGS.forEach(this::testLengthString); - - when(nullRef.type()).thenReturn(STRING); - when(missingRef.type()).thenReturn(STRING); - assertEquals(missingValue(), eval(DSL.length(missingRef))); - assertEquals(nullValue(), eval(DSL.length(nullRef))); - } - - @Test - void strcmp() { - STRING_PATTERN_PAIRS.forEach(this::testStcmpString); - - when(nullRef.type()).thenReturn(STRING); - when(missingRef.type()).thenReturn(STRING); - assertEquals(missingValue(), eval(DSL.strcmp(missingRef, missingRef))); - assertEquals(nullValue(), eval(DSL.strcmp(nullRef, nullRef))); - assertEquals(missingValue(), eval(DSL.strcmp(nullRef, missingRef))); - assertEquals(missingValue(), eval(DSL.strcmp(missingRef, nullRef))); + @ParameterizedTest + @MethodSource("getStringsForConcat") + void concat_ws(List strings) { + testConcatString(strings, ","); } @Test @@ -302,14 +217,6 @@ void right() { expression = DSL.right(DSL.literal(""), DSL.literal(10)); assertEquals(STRING, expression.type()); assertEquals("", eval(expression).value()); - - when(nullRef.type()).thenReturn(STRING); - when(missingRef.type()).thenReturn(INTEGER); - assertEquals(missingValue(), eval(DSL.right(nullRef, missingRef))); - assertEquals(nullValue(), eval(DSL.right(nullRef, DSL.literal(new ExprIntegerValue(1))))); - - when(nullRef.type()).thenReturn(INTEGER); - assertEquals(nullValue(), eval(DSL.right(DSL.literal(new ExprStringValue("value")), nullRef))); } @Test @@ -331,14 +238,6 @@ void left() { expression = DSL.left(DSL.literal(""), DSL.literal(10)); assertEquals(STRING, expression.type()); assertEquals("", eval(expression).value()); - - when(nullRef.type()).thenReturn(STRING); - when(missingRef.type()).thenReturn(INTEGER); - assertEquals(missingValue(), eval(DSL.left(nullRef, missingRef))); - assertEquals(nullValue(), eval(DSL.left(nullRef, DSL.literal(new ExprIntegerValue(1))))); - - when(nullRef.type()).thenReturn(INTEGER); - assertEquals(nullValue(), eval(DSL.left(DSL.literal(new ExprStringValue("value")), nullRef))); } @Test @@ -346,11 +245,7 @@ void ascii() { FunctionExpression expression = DSL.ascii(DSL.literal(new ExprStringValue("hello"))); assertEquals(INTEGER, expression.type()); assertEquals(104, eval(expression).integerValue()); - - when(nullRef.type()).thenReturn(STRING); - assertEquals(nullValue(), eval(DSL.ascii(nullRef))); - when(missingRef.type()).thenReturn(STRING); - assertEquals(missingValue(), eval(DSL.ascii(missingRef))); + assertEquals(0, DSL.ascii(DSL.literal("")).valueOf().integerValue()); } @Test @@ -362,14 +257,6 @@ void locate() { expression = DSL.locate(DSL.literal("world"), DSL.literal("helloworldworld"), DSL.literal(7)); assertEquals(INTEGER, expression.type()); assertEquals(11, eval(expression).integerValue()); - - when(nullRef.type()).thenReturn(STRING); - assertEquals(nullValue(), eval(DSL.locate(nullRef, DSL.literal("hello")))); - assertEquals(nullValue(), eval(DSL.locate(nullRef, DSL.literal("hello"), DSL.literal(1)))); - when(missingRef.type()).thenReturn(STRING); - assertEquals(missingValue(), eval(DSL.locate(missingRef, DSL.literal("hello")))); - assertEquals( - missingValue(), eval(DSL.locate(missingRef, DSL.literal("hello"), DSL.literal(1)))); } @Test @@ -382,11 +269,6 @@ void position() { expression = DSL.position(DSL.literal("abc"), DSL.literal("hello world")); assertEquals(INTEGER, expression.type()); assertEquals(0, eval(expression).integerValue()); - - when(nullRef.type()).thenReturn(STRING); - assertEquals(nullValue(), eval(DSL.position(nullRef, DSL.literal("hello")))); - when(missingRef.type()).thenReturn(STRING); - assertEquals(missingValue(), eval(DSL.position(missingRef, DSL.literal("hello")))); } @Test @@ -395,11 +277,6 @@ void replace() { DSL.replace(DSL.literal("helloworld"), DSL.literal("world"), DSL.literal("opensearch")); assertEquals(STRING, expression.type()); assertEquals("helloopensearch", eval(expression).stringValue()); - - when(nullRef.type()).thenReturn(STRING); - assertEquals(nullValue(), eval(DSL.replace(nullRef, DSL.literal("a"), DSL.literal("b")))); - when(missingRef.type()).thenReturn(STRING); - assertEquals(missingValue(), eval(DSL.replace(missingRef, DSL.literal("a"), DSL.literal("b")))); } @Test @@ -407,11 +284,6 @@ void reverse() { FunctionExpression expression = DSL.reverse(DSL.literal("abcde")); assertEquals(STRING, expression.type()); assertEquals("edcba", eval(expression).stringValue()); - - when(nullRef.type()).thenReturn(STRING); - assertEquals(nullValue(), eval(DSL.reverse(nullRef))); - when(missingRef.type()).thenReturn(STRING); - assertEquals(missingValue(), eval(DSL.reverse(missingRef))); } void testConcatString(List strings) { @@ -435,6 +307,8 @@ void testConcatString(List strings, String delim) { assertEquals(expected, eval(expression).stringValue()); } + @ParameterizedTest + @MethodSource("getMultipleStringsForConcat") void testConcatMultipleString(List strings) { String expected = null; if (strings.stream().noneMatch(Objects::isNull)) { @@ -452,13 +326,17 @@ void testConcatMultipleString(List strings) { assertEquals(expected, eval(expression).stringValue()); } - void testLengthString(String str) { + @ParameterizedTest + @MethodSource("getStringsForUpperAndLower") + void length(String str) { FunctionExpression expression = DSL.length(DSL.literal(new ExprStringValue(str))); assertEquals(INTEGER, expression.type()); assertEquals(str.getBytes().length, eval(expression).integerValue()); } - void testStcmpString(StringPatternPair stringPatternPair) { + @ParameterizedTest + @MethodSource("getStringsForComparison") + void strcmp(StringPatternPair stringPatternPair) { FunctionExpression expression = DSL.strcmp( DSL.literal(new ExprStringValue(stringPatternPair.getStr())), @@ -467,19 +345,23 @@ void testStcmpString(StringPatternPair stringPatternPair) { assertEquals(stringPatternPair.strCmpTest(), eval(expression).integerValue()); } - void testLowerString(String str) { + @ParameterizedTest + @MethodSource("getStringsForUpperAndLower") + void lower(String str) { FunctionExpression expression = DSL.lower(DSL.literal(new ExprStringValue(str))); assertEquals(STRING, expression.type()); assertEquals(stringValue(str.toLowerCase()), eval(expression)); } - void testUpperString(String str) { + @ParameterizedTest + @MethodSource("getStringsForUpperAndLower") + void upper(String str) { FunctionExpression expression = DSL.upper(DSL.literal(new ExprStringValue(str))); assertEquals(STRING, expression.type()); assertEquals(stringValue(str.toUpperCase()), eval(expression)); } private ExprValue eval(Expression expression) { - return expression.valueOf(env); + return expression.valueOf(); } } From 8f650e8c68eb4331a8d88f91aa78394dda18711e Mon Sep 17 00:00:00 2001 From: Mitchell Gale Date: Mon, 21 Aug 2023 15:28:37 -0700 Subject: [PATCH 39/42] Developer guide update with Spotless details (#2000) * Added spotless java format rules to dev guide Signed-off-by: Mitchell Gale * Fixed formatting of URLS in DEVELOPER_GUIDE.rst Signed-off-by: Mitchell Gale * addressed PR comments Signed-off-by: Mitchell Gale * addressed PR comments Signed-off-by: Mitchell Gale * addressed PR comments Signed-off-by: Mitchell Gale * Update DEVELOPER_GUIDE.rst Co-authored-by: Yury-Fridlyand Signed-off-by: Mitchell Gale * Update DEVELOPER_GUIDE.rst Co-authored-by: Yury-Fridlyand Signed-off-by: Mitchell Gale * Update DEVELOPER_GUIDE.rst Co-authored-by: Yury-Fridlyand Signed-off-by: Mitchell Gale * Update DEVELOPER_GUIDE.rst Co-authored-by: Yury-Fridlyand Signed-off-by: Mitchell Gale * Update DEVELOPER_GUIDE.rst Co-authored-by: Yury-Fridlyand Signed-off-by: Mitchell Gale * fixed link format Signed-off-by: Mitchell Gale --------- Signed-off-by: Mitchell Gale Signed-off-by: Mitchell Gale Co-authored-by: Yury-Fridlyand --- DEVELOPER_GUIDE.rst | 29 +++++++++++++++++++++++------ 1 file changed, 23 insertions(+), 6 deletions(-) diff --git a/DEVELOPER_GUIDE.rst b/DEVELOPER_GUIDE.rst index 257e3bb8f9..8c23d98d91 100644 --- a/DEVELOPER_GUIDE.rst +++ b/DEVELOPER_GUIDE.rst @@ -185,14 +185,31 @@ Note that other related project code has already merged into this single reposit Code Convention --------------- -We’re integrated Checkstyle plugin into Gradle build: https://github.com/opensearch-project/sql/blob/main/config/checkstyle/google_checks.xml. So any violation will fail the build. You need to identify the offending code from Gradle error message and fix them and rerun the Gradle build. Here are the highlight of some Checkstyle rules: +Java files in the OpenSearch codebase are formatted with the Eclipse JDT formatter, using the `Spotless Gradle `_ plugin. This plugin is configured in the project `./gradle.properties`. -* 2 spaces indentation. -* No line starts with tab character in source file. -* Line width <= 100 characters. -* Wildcard imports: You can enforce single import by configuring your IDE. Instructions for Intellij IDEA: https://www.jetbrains.com/help/idea/creating-and-optimizing-imports.html#disable-wildcard-imports. -* Operator needs to wrap at next line. +The formatting check can be run explicitly with:: +./gradlew spotlessJavaCheck + +The code can be formatted with:: + +./gradlew spotlessApply + +These tasks can also be run for specific modules, e.g.:: + +./gradlew server:spotlessJavaCheck + +For more information on the spotless for the OpenSearch project please see `https://github.com/opensearch-project/OpenSearch/blob/main/DEVELOPER_GUIDE.md#java-language-formatting-guidelines `_. + +Java files are formatted using `Spotless `_ conforming to `Google Java Format `_. + * - New line at end of file + * - No unused import statements + * - Fix import order to be alphabetical with static imports first (one block for static and one for non-static imports) + * - Max line length is 100 characters (does not apply to import statements) + * - Line spacing is 2 spaces + * - Javadocs should be properly formatted in accordance to `Javadoc guidelines `_ + * - Javadoc format can be maintained by wrapping javadoc with `
` HTML tags
+   * - Strings can be formatted on multiple lines with a `+` with the correct indentation for the string.
 
 Building and Running Tests
 ==========================

From b3fd4c4154080ab271c33ef4d5d12b67aa2d713b Mon Sep 17 00:00:00 2001
From: Vamsi Manohar 
Date: Tue, 22 Aug 2023 01:20:50 -0700
Subject: [PATCH 40/42] Fixed exception when datasource is updated with
 existing configuration (#2006)

Signed-off-by: Vamsi Manohar 
---
 .../OpenSearchDataSourceMetadataStorage.java   |  3 ++-
 ...penSearchDataSourceMetadataStorageTest.java | 18 ++++++++++++++++++
 2 files changed, 20 insertions(+), 1 deletion(-)

diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/storage/OpenSearchDataSourceMetadataStorage.java b/datasources/src/main/java/org/opensearch/sql/datasources/storage/OpenSearchDataSourceMetadataStorage.java
index 73eb297fea..4eb16924c4 100644
--- a/datasources/src/main/java/org/opensearch/sql/datasources/storage/OpenSearchDataSourceMetadataStorage.java
+++ b/datasources/src/main/java/org/opensearch/sql/datasources/storage/OpenSearchDataSourceMetadataStorage.java
@@ -150,7 +150,8 @@ public void updateDataSourceMetadata(DataSourceMetadata dataSourceMetadata) {
       throw new RuntimeException(e);
     }
 
-    if (updateResponse.getResult().equals(DocWriteResponse.Result.UPDATED)) {
+    if (updateResponse.getResult().equals(DocWriteResponse.Result.UPDATED)
+        || updateResponse.getResult().equals(DocWriteResponse.Result.NOOP)) {
       LOG.debug("DatasourceMetadata : {}  successfully updated", dataSourceMetadata.getName());
     } else {
       throw new RuntimeException(
diff --git a/datasources/src/test/java/org/opensearch/sql/datasources/storage/OpenSearchDataSourceMetadataStorageTest.java b/datasources/src/test/java/org/opensearch/sql/datasources/storage/OpenSearchDataSourceMetadataStorageTest.java
index cc663d56e6..7d41737b2d 100644
--- a/datasources/src/test/java/org/opensearch/sql/datasources/storage/OpenSearchDataSourceMetadataStorageTest.java
+++ b/datasources/src/test/java/org/opensearch/sql/datasources/storage/OpenSearchDataSourceMetadataStorageTest.java
@@ -451,6 +451,24 @@ public void testUpdateDataSourceMetadata() {
     Mockito.verify(client.threadPool().getThreadContext(), Mockito.times(1)).stashContext();
   }
 
+  @Test
+  public void testUpdateDataSourceMetadataWithNOOP() {
+    Mockito.when(encryptor.encrypt("secret_key")).thenReturn("secret_key");
+    Mockito.when(encryptor.encrypt("access_key")).thenReturn("access_key");
+    Mockito.when(client.update(ArgumentMatchers.any())).thenReturn(updateResponseActionFuture);
+    Mockito.when(updateResponseActionFuture.actionGet()).thenReturn(updateResponse);
+    Mockito.when(updateResponse.getResult()).thenReturn(DocWriteResponse.Result.NOOP);
+    DataSourceMetadata dataSourceMetadata = getDataSourceMetadata();
+
+    this.openSearchDataSourceMetadataStorage.updateDataSourceMetadata(dataSourceMetadata);
+
+    Mockito.verify(encryptor, Mockito.times(1)).encrypt("secret_key");
+    Mockito.verify(encryptor, Mockito.times(1)).encrypt("access_key");
+    Mockito.verify(client.admin().indices(), Mockito.times(0)).create(ArgumentMatchers.any());
+    Mockito.verify(client, Mockito.times(1)).update(ArgumentMatchers.any());
+    Mockito.verify(client.threadPool().getThreadContext(), Mockito.times(1)).stashContext();
+  }
+
   @Test
   public void testUpdateDataSourceMetadataWithNotFoundResult() {
     Mockito.when(encryptor.encrypt("secret_key")).thenReturn("secret_key");

From 09d222bd83d6ee1f2ab0933b09499cea92d71b5a Mon Sep 17 00:00:00 2001
From: Mitchell Gale 
Date: Tue, 22 Aug 2023 11:15:21 -0700
Subject: [PATCH 41/42] [Spotless] Applying Google Code Format for entire
 project #22 (#2005)

* Spotless apply on entire project

Signed-off-by: Mitchell Gale 

* Spotless apply

Signed-off-by: Mitchell Gale 

* Added spotless in sql-test-workflow.yml

Signed-off-by: Mitchell Gale 

* fixed error log message in sql-test-workflow.yml

Signed-off-by: Mitchell Gale 

* Added spotless to plugins: common, core, doctest, integ-test, legacy, opensearch, plugin, ppl, protocol, sql

Signed-off-by: Mitchell Gale 

* Update common/build.gradle

Signed-off-by: Mitchell Gale 

* deleted duplicated code in build gradle integ test

Signed-off-by: Mitchell Gale 

---------

Signed-off-by: Mitchell Gale 
Signed-off-by: Mitchell Gale 
---
 .../workflows/sql-test-and-build-workflow.yml |    1 -
 .github/workflows/sql-test-workflow.yml       |   28 +-
 DEVELOPER_GUIDE.rst                           |   10 +-
 build.gradle                                  |   31 +-
 common/build.gradle                           |   25 +-
 config/checkstyle/checkstyle.xml              |  120 -
 config/checkstyle/google_checks.xml           |  316 --
 config/checkstyle/suppressions.xml            |   12 -
 core/build.gradle                             |   26 +-
 datasources/build.gradle                      |    3 -
 doctest/build.gradle                          |   19 +
 integ-test/build.gradle                       |   25 +-
 legacy/build.gradle                           |   20 +-
 .../sql/legacy/antlr/SimilarSymbols.java      |   82 +-
 .../sql/legacy/antlr/SqlAnalysisConfig.java   |   81 +-
 .../legacy/antlr/SqlAnalysisException.java    |   11 +-
 .../semantic/SemanticAnalysisException.java   |   12 +-
 .../antlr/semantic/scope/SemanticContext.java |   66 +-
 .../legacy/antlr/semantic/scope/Symbol.java   |   38 +-
 .../antlr/semantic/scope/SymbolTable.java     |  133 +-
 .../antlr/semantic/scope/TypeSupplier.java    |   58 +-
 .../sql/legacy/antlr/semantic/types/Type.java |  107 +-
 .../antlr/semantic/types/TypeExpression.java  |  173 +-
 .../semantic/types/operator/SetOperator.java  |   71 +-
 .../semantic/visitor/SemanticAnalyzer.java    |  237 +-
 .../antlr/semantic/visitor/TypeChecker.java   |  353 ++-
 .../syntax/SyntaxAnalysisErrorListener.java   |   78 +-
 .../antlr/syntax/SyntaxAnalysisException.java |   11 +-
 .../visitor/AntlrSqlParseTreeVisitor.java     |   36 +-
 .../visitor/UnsupportedSemanticVerifier.java  |  110 +-
 .../opensearch/sql/legacy/cursor/Cursor.java  |   10 +-
 .../sql/legacy/domain/Condition.java          |   16 +-
 .../opensearch/sql/legacy/domain/Having.java  |   64 +-
 .../sql/legacy/domain/TableOnJoinSelect.java  |   51 +-
 .../opensearch/sql/legacy/domain/Where.java   |   89 +-
 .../legacy/esdomain/LocalClusterState.java    |    9 +-
 .../esdomain/mapping/FieldMappings.java       |    2 +
 .../esdomain/mapping/IndexMappings.java       |    2 +
 .../SQLFeatureDisabledException.java          |   10 +-
 .../SqlFeatureNotImplementedException.java    |   19 +-
 .../legacy/exception/SqlParseException.java   |   11 +-
 .../sql/legacy/executor/csv/CSVResult.java    |   23 +-
 .../executor/cursor/CursorResultExecutor.java |   29 +-
 .../executor/format/SelectResultSet.java      |  165 +-
 .../legacy/executor/format/ShowResultSet.java |  105 +-
 .../legacy/executor/multi/UnionExecutor.java  |  152 +-
 .../core/builder/UnaryExpressionBuilder.java  |   46 +-
 .../core/operator/UnaryScalarOperator.java    |   54 +-
 .../sql/legacy/parser/NestedType.java         |   46 +-
 .../sql/legacy/parser/SQLOdbcExpr.java        |   64 +-
 .../parser/SQLParensIdentifierExpr.java       |   23 +-
 .../sql/legacy/parser/SqlParser.java          |  966 +++---
 .../sql/legacy/parser/SubQueryExpression.java |   63 +-
 .../sql/legacy/parser/SubQueryParser.java     |  142 +-
 .../sql/legacy/parser/WhereParser.java        | 1268 ++++----
 .../sql/legacy/plugin/RestSQLQueryAction.java |    7 +-
 .../sql/legacy/query/ShowQueryAction.java     |   30 +-
 ...SqlElasticDeleteByQueryRequestBuilder.java |   63 +-
 .../query/SqlElasticRequestBuilder.java       |   13 +-
 .../query/SqlOpenSearchRequestBuilder.java    |   65 +-
 .../query/join/TableInJoinRequestBuilder.java |   78 +-
 .../converter/SQLAggregationParser.java       |  477 +--
 .../SQLExprToExpressionConverter.java         |  145 +-
 .../converter/SQLToOperatorConverter.java     |   75 +-
 .../query/planner/core/QueryParams.java       |   24 +-
 .../query/planner/logical/node/Join.java      |   36 +-
 .../query/planner/logical/node/Sort.java      |   59 +-
 .../query/planner/logical/node/TableScan.java |   71 +-
 .../query/planner/logical/node/Top.java       |   78 +-
 .../physical/node/join/JoinAlgorithm.java     |   29 +-
 .../physical/node/scroll/SearchHitRow.java    |    2 +
 .../legacy/query/planner/resource/Stats.java  |   84 +-
 .../resource/monitor/TotalMemoryMonitor.java  |   56 +-
 .../sql/legacy/request/SqlRequest.java        |  176 +-
 .../sql/legacy/request/SqlRequestFactory.java |  223 +-
 .../sql/legacy/request/SqlRequestParam.java   |   85 +-
 .../sql/legacy/rewriter/alias/Table.java      |   63 +-
 .../alias/TableAliasPrefixRemoveRule.java     |  134 +-
 .../identifier/UnquoteIdentifierRule.java     |   86 +-
 .../legacy/rewriter/join/JoinRewriteRule.java |    2 +
 .../matchtoterm/TermFieldRewriter.java        |  395 ++-
 .../rewriter/matchtoterm/TermFieldScope.java  |   92 +-
 .../matchtoterm/VerificationException.java    |   15 +-
 .../sql/legacy/rewriter/nestedfield/From.java |   46 +-
 .../rewriter/nestedfield/Identifier.java      |    2 +
 .../nestedfield/NestedFieldProjection.java    |   45 +-
 .../nestedfield/NestedFieldRewriter.java      |    2 +
 .../rewriter/nestedfield/SQLClause.java       |  119 +-
 .../legacy/rewriter/nestedfield/Where.java    |  197 +-
 .../rewriter/ordinal/OrdinalRewriterRule.java |   39 +-
 .../rewriter/parent/SQLExprParentSetter.java  |   47 +-
 .../parent/SQLExprParentSetterRule.java       |   21 +-
 .../rewriter/subquery/NestedQueryContext.java |   28 +-
 .../subquery/SubQueryRewriteRule.java         |   73 +-
 .../rewriter/subquery/SubQueryRewriter.java   |  113 +-
 .../subquery/rewriter/InRewriter.java         |   70 +-
 .../rewriter/NestedExistsRewriter.java        |    1 +
 .../rewriter/SubqueryAliasRewriter.java       |  199 +-
 .../legacy/spatial/SpatialParamsFactory.java  |  172 +-
 .../legacy/spatial/WktToGeoJsonConverter.java |  285 +-
 .../sql/legacy/utils/SQLFunctions.java        | 2137 +++++++------
 .../sql/legacy/utils/StringUtils.java         |  177 +-
 .../org/opensearch/sql/legacy/utils/Util.java |  407 +--
 .../legacy/antlr/SymbolSimilarityTest.java    |   70 +-
 .../sql/legacy/antlr/SyntaxAnalysisTest.java  |  222 +-
 ...SemanticAnalyzerAggregateFunctionTest.java |  279 +-
 .../semantic/SemanticAnalyzerBasicTest.java   | 1080 +++----
 .../semantic/SemanticAnalyzerConfigTest.java  |   90 +-
 .../SemanticAnalyzerConstantTest.java         |   18 +-
 .../SemanticAnalyzerESScalarFunctionTest.java |   84 +-
 .../SemanticAnalyzerFieldTypeTest.java        |  131 +-
 .../SemanticAnalyzerFromClauseTest.java       |  338 +-
 .../SemanticAnalyzerIdentifierTest.java       |  305 +-
 .../SemanticAnalyzerMultiQueryTest.java       |  158 +-
 .../SemanticAnalyzerOperatorTest.java         |   99 +-
 .../SemanticAnalyzerScalarFunctionTest.java   |  453 ++-
 .../SemanticAnalyzerSubqueryTest.java         |  177 +-
 .../semantic/SemanticAnalyzerTestBase.java    |   69 +-
 .../antlr/semantic/SemanticAnalyzerTests.java |   30 +-
 .../semantic/scope/SemanticContextTest.java   |   57 +-
 .../antlr/semantic/scope/SymbolTableTest.java |  100 +-
 .../semantic/scope/TypeSupplierTest.java      |   42 +-
 .../semantic/types/TypeExpressionTest.java    |   78 +-
 .../legacy/executor/format/ResultSetTest.java |   29 +-
 .../alias/TableAliasPrefixRemoveRuleTest.java |  209 +-
 .../sql/legacy/rewriter/alias/TableTest.java  |   38 +-
 .../unittest/NestedFieldRewriterTest.java     |   13 +-
 .../unittest/SqlRequestFactoryTest.java       |  347 +-
 .../legacy/unittest/SqlRequestParamTest.java  |   77 +-
 .../legacy/unittest/StringOperatorsTest.java  |  343 +-
 .../unittest/WhereWithBoolConditionTest.java  |   93 +-
 .../expression/core/UnaryExpressionTest.java  |  199 +-
 .../legacy/unittest/parser/SqlParserTest.java | 2779 +++++++++--------
 .../unittest/parser/SubQueryParserTest.java   |  255 +-
 .../converter/SQLAggregationParserTest.java   |  681 ++--
 .../SQLExprToExpressionConverterTest.java     |  236 +-
 .../converter/SQLToOperatorConverterTest.java |   79 +-
 .../identifier/UnquoteIdentifierRuleTest.java |  128 +-
 .../parent/SQLExprParentSetterRuleTest.java   |   14 +-
 .../parent/SQLExprParentSetterTest.java       |   54 +-
 .../subquery/ExistsSubQueryRewriterTest.java  |    7 +-
 .../subquery/SubQueryRewriteRuleTest.java     |  100 +-
 .../subquery/SubQueryRewriterTestBase.java    |   27 +-
 .../rewriter/SubqueryAliasRewriterTest.java   |  251 +-
 .../rewriter/term/TermFieldRewriterTest.java  |  175 +-
 .../spatial/WktToGeoJsonConverterTest.java    |  363 ++-
 .../unittest/utils/SQLFunctionsTest.java      |  126 +-
 .../unittest/utils/StringUtilsTest.java       |   81 +-
 .../sql/legacy/unittest/utils/UtilTest.java   |   93 +-
 .../sql/legacy/util/SqlExplainUtils.java      |   25 +-
 .../sql/legacy/util/SqlParserUtils.java       |   34 +-
 .../opensearch/sql/legacy/util/TestUtils.java | 1530 ++++-----
 .../sql/legacy/util/TestsConstants.java       |   78 +-
 opensearch/build.gradle                       |   20 +-
 plugin/build.gradle                           |   22 +-
 ppl/build.gradle                              |   24 +-
 prometheus/build.gradle                       |    3 -
 protocol/build.gradle                         |   24 +-
 spark/build.gradle                            |    3 -
 sql/build.gradle                              |   22 +-
 160 files changed, 12488 insertions(+), 12539 deletions(-)
 delete mode 100644 config/checkstyle/checkstyle.xml
 delete mode 100644 config/checkstyle/google_checks.xml
 delete mode 100644 config/checkstyle/suppressions.xml

diff --git a/.github/workflows/sql-test-and-build-workflow.yml b/.github/workflows/sql-test-and-build-workflow.yml
index 87256e6175..fecfe7adc2 100644
--- a/.github/workflows/sql-test-and-build-workflow.yml
+++ b/.github/workflows/sql-test-and-build-workflow.yml
@@ -11,7 +11,6 @@ on:
       - '!sql-jdbc/**'
       - '**gradle*'
       - '**lombok*'
-      - '**checkstyle*'
       - 'integ-test/**'
       - '**/*.jar'
       - '**/*.pom'
diff --git a/.github/workflows/sql-test-workflow.yml b/.github/workflows/sql-test-workflow.yml
index cdc08c7480..9ca27dffaf 100644
--- a/.github/workflows/sql-test-workflow.yml
+++ b/.github/workflows/sql-test-workflow.yml
@@ -31,24 +31,16 @@ jobs:
     - name: Run tests
       id: tests
       run: |
-        # checkstyle
-        ./gradlew :opensearch:checkstyleMain              || echo "* Checkstyle failed for opensearch/src" > report.log
-        ./gradlew :opensearch:checkstyleTest              || echo "* Checkstyle failed for opensearch/test" >> report.log
-        ./gradlew :sql:checkstyleMain                     || echo "* Checkstyle failed for sql/src" >> report.log
-        ./gradlew :sql:checkstyleTest                     || echo "* Checkstyle failed for sql/test" >> report.log
-        ./gradlew :ppl:checkstyleMain                     || echo "* Checkstyle failed for ppl/src" >> report.log
-        ./gradlew :ppl:checkstyleTest                     || echo "* Checkstyle failed for ppl/test" >> report.log
-        ./gradlew :core:checkstyleMain                    || echo "* Checkstyle failed for core/src" >> report.log
-        ./gradlew :core:checkstyleTest                    || echo "* Checkstyle failed for core/test" >> report.log
-        ./gradlew :common:checkstyleMain                  || echo "* Checkstyle failed for common/src" >> report.log
-        ./gradlew :common:checkstyleTest                  || echo "* Checkstyle failed for common/test" >> report.log
-        ./gradlew :legacy:checkstyleMain                  || echo "* Checkstyle failed for legacy/src" >> report.log
-        ./gradlew :legacy:checkstyleTest                  || echo "* Checkstyle failed for legacy/test" >> report.log
-        ./gradlew :protocol:checkstyleMain                || echo "* Checkstyle failed for protocol/src" >> report.log
-        ./gradlew :protocol:checkstyleTest                || echo "* Checkstyle failed for protocol/test" >> report.log
-        ./gradlew :opensearch-sql-plugin:checkstyleMain   || echo "* Checkstyle failed for plugin/src" >> report.log
-        ./gradlew :opensearch-sql-plugin:checkstyleTest   || echo "* Checkstyle failed for plugin/test" >> report.log
-        # Add checkstyle for `integ-test` when fixed
+        # Spotless
+        ./gradlew :opensearch:spotlessCheck              || echo "* Spotless failed for opensearch" > report.log
+        ./gradlew :sql:spotlessCheck                     || echo "* Spotless failed for sql" >> report.log
+        ./gradlew :ppl:spotlessCheck                     || echo "* Spotless failed for ppl" >> report.log
+        ./gradlew :core:spotlessCheck                    || echo "* Spotless failed for core" >> report.log
+        ./gradlew :common:spotlessCheck                  || echo "* Spotless failed for common" >> report.log
+        ./gradlew :legacy:spotlessCheck                  || echo "* Spotless failed for legacy" >> report.log
+        ./gradlew :protocol:spotlessCheck                || echo "* Spotless failed for protocol" >> report.log
+        ./gradlew :opensearch-sql-plugin:spotlessCheck   || echo "* Spotless failed for plugin" >> report.log
+        ./gradlew :integ-test:spotlessCheck              || echo "* Spotless failed for integ-test" >> report.log
         # Unit tests
         ./gradlew :opensearch:test                        || echo "* Unit tests failed for opensearch" >> report.log
         ./gradlew :ppl:test                               || echo "* Unit tests failed for sql" >> report.log
diff --git a/DEVELOPER_GUIDE.rst b/DEVELOPER_GUIDE.rst
index 8c23d98d91..c0d2f85668 100644
--- a/DEVELOPER_GUIDE.rst
+++ b/DEVELOPER_GUIDE.rst
@@ -127,7 +127,6 @@ The plugin codebase is in standard layout of Gradle project::
    ├── THIRD-PARTY
    ├── build.gradle
    ├── config
-   │   └── checkstyle
    ├── docs
    │   ├── attributions.md
    │   ├── category.json
@@ -170,7 +169,6 @@ Here are sub-folders (Gradle modules) for plugin source code:
 Here are other files and sub-folders that you are likely to touch:
 
 - ``build.gradle``: Gradle build script.
-- ``config``: only Checkstyle configuration files for now.
 - ``docs``: documentation for developers and reference manual for users.
 - ``doc-test``: code that run .rst docs in ``docs`` folder by Python doctest library.
 
@@ -189,7 +187,7 @@ Java files in the OpenSearch codebase are formatted with the Eclipse JDT formatt
 
 The formatting check can be run explicitly with::
 
-./gradlew spotlessJavaCheck
+./gradlew spotlessCheck
 
 The code can be formatted with::
 
@@ -197,7 +195,7 @@ The code can be formatted with::
 
 These tasks can also be run for specific modules, e.g.::
 
-./gradlew server:spotlessJavaCheck
+./gradlew server:spotlessCheck
 
 For more information on the spotless for the OpenSearch project please see `https://github.com/opensearch-project/OpenSearch/blob/main/DEVELOPER_GUIDE.md#java-language-formatting-guidelines `_.
 
@@ -230,9 +228,7 @@ Most of the time you just need to run ./gradlew build which will make sure you p
    * - ./gradlew generateGrammarSource
      - (Re-)Generate ANTLR parser from grammar file.
    * - ./gradlew compileJava
-     - Compile all Java source files. 
-   * - ./gradlew checkstyle
-     - Run all checks according to Checkstyle configuration.
+     - Compile all Java source files.
    * - ./gradlew test
      - Run all unit tests.
    * - ./gradlew :integ-test:integTest
diff --git a/build.gradle b/build.gradle
index 2ab7abc42a..ebe9291e23 100644
--- a/build.gradle
+++ b/build.gradle
@@ -62,7 +62,6 @@ buildscript {
 plugins {
     id 'nebula.ospackage' version "8.3.0"
     id 'java-library'
-    id 'checkstyle'
     id "io.freefair.lombok" version "6.4.0"
     id 'jacoco'
     id 'com.diffplug.spotless' version '6.19.0'
@@ -80,22 +79,10 @@ repositories {
     maven { url 'https://jitpack.io' }
 }
 
-// Spotless checks will be added as PRs are applied to resolve each style issue is approved.
 spotless {
     java {
         target fileTree('.') {
-            include 'datasources/**/*.java',
-                    'core/**/*.java',
-                    'protocol/**/*.java',
-                    'prometheus/**/*.java',
-                    'sql/**/*.java',
-                    'common/**/*.java',
-                    'spark/**/*.java',
-                    'plugin/**/*.java',
-                    'ppl/**/*.java',
-                    'integ-test/**/*java',
-                    'core/**/*.java',
-                    'opensearch/**/*.java'
+            include '**/*.java'
             exclude '**/build/**', '**/build-*/**'
         }
         importOrder()
@@ -194,22 +181,6 @@ jacocoTestCoverageVerification {
 }
 check.dependsOn jacocoTestCoverageVerification
 
-// TODO: fix code style in main and test source code
-allprojects {
-    apply plugin: 'checkstyle'
-    checkstyle {
-        configFile rootProject.file("config/checkstyle/google_checks.xml")
-        toolVersion "10.3.2"
-        configProperties = [
-                "org.checkstyle.google.suppressionfilter.config": rootProject.file("config/checkstyle/suppressions.xml")]
-        ignoreFailures = false
-    }
-}
-checkstyle {
-    configFile file("config/checkstyle/checkstyle.xml")
-}
-checkstyleMain.ignoreFailures = false
-checkstyleTest.ignoreFailures = true
 
 configurations.all {
     resolutionStrategy.force 'junit:junit:4.13.2'
diff --git a/common/build.gradle b/common/build.gradle
index 25cdcd6566..d27e213db1 100644
--- a/common/build.gradle
+++ b/common/build.gradle
@@ -25,17 +25,13 @@
 plugins {
     id 'java-library'
     id "io.freefair.lombok"
+    id 'com.diffplug.spotless' version '6.19.0'
 }
 
 repositories {
     mavenCentral()
 }
 
-// Being ignored as a temporary measure before being removed in favour of
-// spotless https://github.com/opensearch-project/sql/issues/1101
-checkstyleTest.ignoreFailures = true
-checkstyleMain.ignoreFailures = true
-
 dependencies {
     api "org.antlr:antlr4-runtime:4.7.1"
     api group: 'com.google.guava', name: 'guava', version: '32.0.1-jre'
@@ -68,3 +64,22 @@ configurations.all {
     resolutionStrategy.force "joda-time:joda-time:2.10.12"
     resolutionStrategy.force "org.slf4j:slf4j-api:1.7.36"
 }
+
+spotless {
+    java {
+        target fileTree('.') {
+            include '**/*.java'
+            exclude '**/build/**', '**/build-*/**'
+        }
+        importOrder()
+// Needs https://github.com/opensearch-project/sql/issues/1893 to be addressed first
+//        licenseHeader("/*\n" +
+//                " * Copyright OpenSearch Contributors\n" +
+//                " * SPDX-License-Identifier: Apache-2.0\n" +
+//                " */\n\n")
+        removeUnusedImports()
+        trimTrailingWhitespace()
+        endWithNewline()
+        googleJavaFormat('1.17.0').reflowLongStrings().groupArtifact('com.google.googlejavaformat:google-java-format')
+    }
+}
diff --git a/config/checkstyle/checkstyle.xml b/config/checkstyle/checkstyle.xml
deleted file mode 100644
index 3d0e8f074d..0000000000
--- a/config/checkstyle/checkstyle.xml
+++ /dev/null
@@ -1,120 +0,0 @@
-
-
-
-
-  
-
-  
-    
-  
-
-  
-  
-    
-    
-    
-    
-  
-
-  
-  
-    
-    
-    
-    
-  
-
-  
-  
-    
-  
-
-  
-    
-      
-      
-    
-
-    
-      
-    
-
-    
-
-    
-    
-
-    
-    
-    
-    
-    
-    
-    
-    
-    
-    
-
-    
-
-    
-
-    
-
-    
-      
-    
-
-    
-    
-      
-      
-    
-
-    
-      
-      
-    
-
-    
-    
-    
-    
-    
-
-    
-    
-    
-    
-  
-
diff --git a/config/checkstyle/google_checks.xml b/config/checkstyle/google_checks.xml
deleted file mode 100644
index 12c90f8495..0000000000
--- a/config/checkstyle/google_checks.xml
+++ /dev/null
@@ -1,316 +0,0 @@
-
-
-
-
-
-
-  
-
-  
-
-  
-  
-  
-  
-    
-  
-  
-  
-    
-    
-  
-  
-  
-  
-    
-  
-
-  
-    
-    
-  
-  
-  
-    
-    
-    
-      
-      
-      
-    
-    
-      
-      
-      
-    
-    
-    
-    
-      
-    
-    
-      
-      
-    
-    
-      
-    
-    
-      
-    
-    
-      
-      
-    
-    
-      
-      
-      
-    
-    
-      
-      
-      
-      
-      
-      
-      
-      
-    
-    
-    
-    
-    
-    
-    
-    
-    
-      
-      
-    
-    
-      
-      
-      
-    
-    
-      
-      
-      
-    
-    
-      
-      
-      
-      
-    
-    
-      
-      
-      
-      
-    
-    
-      
-      
-      
-    
-    
-      
-      
-    
-    
-      
-      
-    
-    
-      
-      
-    
-    
-      
-      
-    
-    
-      
-      
-    
-    
-      
-      
-    
-    
-      
-      
-    
-    
-      
-      
-    
-    
-      
-      
-    
-    
-      
-      
-    
-    
-    
-      
-      
-      
-      
-    
-    
-      
-      
-      
-      
-      
-      
-    
-    
-      
-      
-      
-    
-    
-    
-    
-      
-      
-      
-      
-    
-    
-      
-    
-    
-      
-      
-    
-    
-      
-    
-    
-      
-      
-    
-    
-      
-      
-    
-    
-      
-      
-      
-    
-    
-    
-    
-    
-      
-    
-    
-    
-      
-      
-    
-    
-      
-      
-      
-      
-      
-    
-    
-      
-      
-      
-      
-    
-    
-      
-      
-    
-    
-      
-    
-    
-      
-    
-    
-      
-    
-    
-    
-      
-      
-    
-  
-
\ No newline at end of file
diff --git a/config/checkstyle/suppressions.xml b/config/checkstyle/suppressions.xml
deleted file mode 100644
index ff366c9457..0000000000
--- a/config/checkstyle/suppressions.xml
+++ /dev/null
@@ -1,12 +0,0 @@
-
-
-
-
-
-    
-    
-    
-
-
\ No newline at end of file
diff --git a/core/build.gradle b/core/build.gradle
index 0e563b274e..675e73ba32 100644
--- a/core/build.gradle
+++ b/core/build.gradle
@@ -28,18 +28,14 @@ plugins {
     id 'jacoco'
     id 'info.solidsoft.pitest' version '1.9.0'
     id 'java-test-fixtures'
+    id 'com.diffplug.spotless' version '6.19.0'
+
 }
 
 repositories {
     mavenCentral()
 }
 
-// Being ignored as a temporary measure before being removed in favour of
-// spotless https://github.com/opensearch-project/sql/issues/1101
-checkstyleTest.ignoreFailures = true
-checkstyleMain.ignoreFailures = true
-checkstyleTestFixtures.ignoreFailures = true
-
 pitest {
     targetClasses = ['org.opensearch.sql.*']
     pitestVersion = '1.9.0'
@@ -67,6 +63,24 @@ dependencies {
     testImplementation group: 'org.mockito', name: 'mockito-junit-jupiter', version: '3.12.4'
 }
 
+spotless {
+    java {
+        target fileTree('.') {
+            include '**/*.java'
+            exclude '**/build/**', '**/build-*/**'
+        }
+        importOrder()
+//        licenseHeader("/*\n" +
+//                " * Copyright OpenSearch Contributors\n" +
+//                " * SPDX-License-Identifier: Apache-2.0\n" +
+//                " */\n\n")
+        removeUnusedImports()
+        trimTrailingWhitespace()
+        endWithNewline()
+        googleJavaFormat('1.17.0').reflowLongStrings().groupArtifact('com.google.googlejavaformat:google-java-format')
+    }
+}
+
 test {
     useJUnitPlatform()
     testLogging {
diff --git a/datasources/build.gradle b/datasources/build.gradle
index 830fadbc35..ef52db2305 100644
--- a/datasources/build.gradle
+++ b/datasources/build.gradle
@@ -31,9 +31,6 @@ dependencies {
     testImplementation 'org.junit.jupiter:junit-jupiter:5.6.2'
 }
 
-checkstyleTest.ignoreFailures = true
-checkstyleMain.ignoreFailures = true
-
 test {
     useJUnitPlatform()
     testLogging {
diff --git a/doctest/build.gradle b/doctest/build.gradle
index c3a177f900..5cab1060e2 100644
--- a/doctest/build.gradle
+++ b/doctest/build.gradle
@@ -9,6 +9,7 @@ plugins {
     id 'base'
     id 'com.wiredforcode.spawn'
     id "de.undercouch.download" version "5.3.0"
+    id 'com.diffplug.spotless' version '6.19.0'
 }
 
 apply plugin: 'opensearch.testclusters'
@@ -140,3 +141,21 @@ tasks.register("runRestTestCluster", RunTask) {
     description = 'Runs OpenSearch SQL plugin'
     useCluster testClusters.docTestCluster;
 }
+
+spotless {
+    java {
+        target fileTree('.') {
+            include '**/*.java'
+            exclude '**/build/**', '**/build-*/**'
+        }
+        importOrder()
+//        licenseHeader("/*\n" +
+//                " * Copyright OpenSearch Contributors\n" +
+//                " * SPDX-License-Identifier: Apache-2.0\n" +
+//                " */\n\n")
+        removeUnusedImports()
+        trimTrailingWhitespace()
+        endWithNewline()
+        googleJavaFormat('1.17.0').reflowLongStrings().groupArtifact('com.google.googlejavaformat:google-java-format')
+    }
+}
diff --git a/integ-test/build.gradle b/integ-test/build.gradle
index 7cb0983670..eedf2f4a03 100644
--- a/integ-test/build.gradle
+++ b/integ-test/build.gradle
@@ -33,6 +33,7 @@ import java.util.stream.Collectors
 
 plugins {
     id "de.undercouch.download" version "5.3.0"
+    id 'com.diffplug.spotless' version '6.19.0'
 }
 
 apply plugin: 'opensearch.build'
@@ -56,11 +57,6 @@ repositories {
     }
 }
 
-// Being ignored as a temporary measure before being removed in favour of
-// spotless https://github.com/opensearch-project/sql/issues/1101
-checkstyleTest.ignoreFailures = true
-checkstyleMain.ignoreFailures = true
-
 ext {
     projectSubstitutions = [:]
     licenseFile = rootProject.file('LICENSE.TXT')
@@ -194,7 +190,6 @@ dependencies {
 
 dependencyLicenses.enabled = false
 testingConventions.enabled = false
-checkstyleTest.ignoreFailures = true
 forbiddenApisTest.enabled = false
 thirdPartyAudit.enabled = false
 
@@ -634,6 +629,24 @@ task bwcTestSuite(type: StandaloneRestIntegTestTask) {
 def opensearch_tmp_dir = rootProject.file('build/private/es_tmp').absoluteFile
 opensearch_tmp_dir.mkdirs()
 
+spotless {
+    java {
+        target fileTree('.') {
+            include '**/*.java'
+            exclude '**/build/**', '**/build-*/**'
+        }
+        importOrder()
+//        licenseHeader("/*\n" +
+//                " * Copyright OpenSearch Contributors\n" +
+//                " * SPDX-License-Identifier: Apache-2.0\n" +
+//                " */\n\n")
+        removeUnusedImports()
+        trimTrailingWhitespace()
+        endWithNewline()
+        googleJavaFormat('1.17.0').reflowLongStrings().groupArtifact('com.google.googlejavaformat:google-java-format')
+    }
+}
+
 task integTestRemote(type: RestIntegTestTask) {
     testLogging {
         events "passed", "skipped", "failed"
diff --git a/legacy/build.gradle b/legacy/build.gradle
index fce04ae9ba..fc985989e5 100644
--- a/legacy/build.gradle
+++ b/legacy/build.gradle
@@ -26,6 +26,7 @@ plugins {
     id 'java'
     id 'io.freefair.lombok'
     id 'antlr'
+    id 'com.diffplug.spotless' version '6.19.0'
 }
 
 generateGrammarSource {
@@ -53,8 +54,23 @@ compileJava {
     }
 }
 
-checkstyleTest.ignoreFailures = true
-checkstyleMain.ignoreFailures = true
+spotless {
+    java {
+        target fileTree('.') {
+            include '**/*.java'
+            exclude '**/build/**', '**/build-*/**'
+        }
+        importOrder()
+//        licenseHeader("/*\n" +
+//                " * Copyright OpenSearch Contributors\n" +
+//                " * SPDX-License-Identifier: Apache-2.0\n" +
+//                " */\n\n")
+        removeUnusedImports()
+        trimTrailingWhitespace()
+        endWithNewline()
+        googleJavaFormat('1.17.0').reflowLongStrings().groupArtifact('com.google.googlejavaformat:google-java-format')
+    }
+}
 
 // TODO: Similarly, need to fix compiling errors in test source code
 compileTestJava.options.warnings = false
diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/SimilarSymbols.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/SimilarSymbols.java
index 0f87b9eb05..7410e56e49 100644
--- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/SimilarSymbols.java
+++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/SimilarSymbols.java
@@ -3,7 +3,6 @@
  * SPDX-License-Identifier: Apache-2.0
  */
 
-
 package org.opensearch.sql.legacy.antlr;
 
 import java.util.Collection;
@@ -13,50 +12,51 @@
 import org.apache.lucene.search.spell.LevenshteinDistance;
 import org.apache.lucene.search.spell.StringDistance;
 
-/**
- * String similarity for finding most similar string.
- */
+/** String similarity for finding most similar string. */
 public class SimilarSymbols {
 
-    /** LevenshteinDistance instance is basically a math util which is supposed to be thread safe */
-    private static final StringDistance ALGORITHM = new LevenshteinDistance();
-
-    /** Symbol candidate list from which to pick one as most similar symbol to a target */
-    private final Collection candidates;
-
-    public SimilarSymbols(Collection candidates) {
-        this.candidates = Collections.unmodifiableCollection(candidates);
+  /** LevenshteinDistance instance is basically a math util which is supposed to be thread safe */
+  private static final StringDistance ALGORITHM = new LevenshteinDistance();
+
+  /** Symbol candidate list from which to pick one as most similar symbol to a target */
+  private final Collection candidates;
+
+  public SimilarSymbols(Collection candidates) {
+    this.candidates = Collections.unmodifiableCollection(candidates);
+  }
+
+  /**
+   * Find most similar string in candidates by calculating similarity distance among target and
+   * candidate strings.
+   *
+   * @param target string to match
+   * @return most similar string to the target
+   */
+  public String mostSimilarTo(String target) {
+    Optional closest =
+        candidates.stream()
+            .map(candidate -> new SymbolDistance(candidate, target))
+            .max(Comparator.comparing(SymbolDistance::similarity));
+    if (closest.isPresent()) {
+      return closest.get().candidate;
     }
-
-    /**
-     * Find most similar string in candidates by calculating similarity distance
-     * among target and candidate strings.
-     *
-     * @param target    string to match
-     * @return          most similar string to the target
-     */
-    public String mostSimilarTo(String target) {
-        Optional closest = candidates.stream().
-                                                      map(candidate -> new SymbolDistance(candidate, target)).
-                                                      max(Comparator.comparing(SymbolDistance::similarity));
-        if (closest.isPresent()) {
-            return closest.get().candidate;
-        }
-        return target;
+    return target;
+  }
+
+  /**
+   * Distance (similarity) between 2 symbols. This class is mainly for Java 8 stream comparator API
+   */
+  private static class SymbolDistance {
+    private final String candidate;
+    private final String target;
+
+    private SymbolDistance(String candidate, String target) {
+      this.candidate = candidate;
+      this.target = target;
     }
 
-    /** Distance (similarity) between 2 symbols. This class is mainly for Java 8 stream comparator API */
-    private static class SymbolDistance {
-        private final String candidate;
-        private final String target;
-
-        private SymbolDistance(String candidate, String target) {
-            this.candidate = candidate;
-            this.target = target;
-        }
-
-        public float similarity() {
-            return ALGORITHM.getDistance(candidate, target);
-        }
+    public float similarity() {
+      return ALGORITHM.getDistance(candidate, target);
     }
+  }
 }
diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/SqlAnalysisConfig.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/SqlAnalysisConfig.java
index 56c69755a6..703c7d6586 100644
--- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/SqlAnalysisConfig.java
+++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/SqlAnalysisConfig.java
@@ -3,49 +3,48 @@
  * SPDX-License-Identifier: Apache-2.0
  */
 
-
 package org.opensearch.sql.legacy.antlr;
 
-/**
- * Configuration for SQL analysis.
- */
+/** Configuration for SQL analysis. */
 public class SqlAnalysisConfig {
 
-    /** Is entire analyzer enabled to perform the analysis */
-    private final boolean isAnalyzerEnabled;
-
-    /** Is suggestion enabled for field name typo */
-    private final boolean isFieldSuggestionEnabled;
-
-    /** Skip entire analysis for index mapping larger than this threhold */
-    private final int analysisThreshold;
-
-    public SqlAnalysisConfig(boolean isAnalyzerEnabled,
-                             boolean isFieldSuggestionEnabled,
-                             int analysisThreshold) {
-        this.isAnalyzerEnabled = isAnalyzerEnabled;
-        this.isFieldSuggestionEnabled = isFieldSuggestionEnabled;
-        this.analysisThreshold = analysisThreshold;
-    }
-
-    public boolean isAnalyzerEnabled() {
-        return isAnalyzerEnabled;
-    }
-
-    public boolean isFieldSuggestionEnabled() {
-        return isFieldSuggestionEnabled;
-    }
-
-    public int getAnalysisThreshold() {
-        return analysisThreshold;
-    }
-
-    @Override
-    public String toString() {
-        return "SqlAnalysisConfig{"
-            + "isAnalyzerEnabled=" + isAnalyzerEnabled
-            + ", isFieldSuggestionEnabled=" + isFieldSuggestionEnabled
-            + ", analysisThreshold=" + analysisThreshold
-            + '}';
-    }
+  /** Is entire analyzer enabled to perform the analysis */
+  private final boolean isAnalyzerEnabled;
+
+  /** Is suggestion enabled for field name typo */
+  private final boolean isFieldSuggestionEnabled;
+
+  /** Skip entire analysis for index mapping larger than this threhold */
+  private final int analysisThreshold;
+
+  public SqlAnalysisConfig(
+      boolean isAnalyzerEnabled, boolean isFieldSuggestionEnabled, int analysisThreshold) {
+    this.isAnalyzerEnabled = isAnalyzerEnabled;
+    this.isFieldSuggestionEnabled = isFieldSuggestionEnabled;
+    this.analysisThreshold = analysisThreshold;
+  }
+
+  public boolean isAnalyzerEnabled() {
+    return isAnalyzerEnabled;
+  }
+
+  public boolean isFieldSuggestionEnabled() {
+    return isFieldSuggestionEnabled;
+  }
+
+  public int getAnalysisThreshold() {
+    return analysisThreshold;
+  }
+
+  @Override
+  public String toString() {
+    return "SqlAnalysisConfig{"
+        + "isAnalyzerEnabled="
+        + isAnalyzerEnabled
+        + ", isFieldSuggestionEnabled="
+        + isFieldSuggestionEnabled
+        + ", analysisThreshold="
+        + analysisThreshold
+        + '}';
+  }
 }
diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/SqlAnalysisException.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/SqlAnalysisException.java
index 1856d568a2..b1d1204f21 100644
--- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/SqlAnalysisException.java
+++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/SqlAnalysisException.java
@@ -3,15 +3,12 @@
  * SPDX-License-Identifier: Apache-2.0
  */
 
-
 package org.opensearch.sql.legacy.antlr;
 
-/**
- * SQL query analysis abstract exception.
- */
+/** SQL query analysis abstract exception. */
 public class SqlAnalysisException extends RuntimeException {
 
-    public SqlAnalysisException(String message) {
-        super(message);
-    }
+  public SqlAnalysisException(String message) {
+    super(message);
+  }
 }
diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalysisException.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalysisException.java
index 742642fb42..45c2dbc1dc 100644
--- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalysisException.java
+++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalysisException.java
@@ -3,18 +3,14 @@
  * SPDX-License-Identifier: Apache-2.0
  */
 
-
 package org.opensearch.sql.legacy.antlr.semantic;
 
 import org.opensearch.sql.legacy.antlr.SqlAnalysisException;
 
-/**
- * Exception for semantic analysis
- */
+/** Exception for semantic analysis */
 public class SemanticAnalysisException extends SqlAnalysisException {
 
-    public SemanticAnalysisException(String message) {
-        super(message);
-    }
-
+  public SemanticAnalysisException(String message) {
+    super(message);
+  }
 }
diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/SemanticContext.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/SemanticContext.java
index 968aff0df2..73fa5d1655 100644
--- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/SemanticContext.java
+++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/SemanticContext.java
@@ -3,46 +3,44 @@
  * SPDX-License-Identifier: Apache-2.0
  */
 
-
 package org.opensearch.sql.legacy.antlr.semantic.scope;
 
 import java.util.Objects;
 
 /**
- * Semantic context responsible for environment chain (stack) management and everything required for analysis.
- * This context should be shared by different stages in future, particularly
- * from semantic analysis to logical planning to physical planning.
+ * Semantic context responsible for environment chain (stack) management and everything required for
+ * analysis. This context should be shared by different stages in future, particularly from semantic
+ * analysis to logical planning to physical planning.
  */
 public class SemanticContext {
 
-    /** Environment stack for symbol scope management */
-    private Environment environment = new Environment(null);
-
-    /**
-     * Push a new environment
-     */
-    public void push() {
-        environment = new Environment(environment);
-    }
-
-    /**
-     * Return current environment
-     * @return  current environment
-     */
-    public Environment peek() {
-        return environment;
-    }
-
-    /**
-     * Pop up current environment from environment chain
-     * @return  current environment (before pop)
-     */
-    public Environment pop() {
-        Objects.requireNonNull(environment, "Fail to pop context due to no environment present");
-
-        Environment curEnv = environment;
-        environment = curEnv.getParent();
-        return curEnv;
-    }
-
+  /** Environment stack for symbol scope management */
+  private Environment environment = new Environment(null);
+
+  /** Push a new environment */
+  public void push() {
+    environment = new Environment(environment);
+  }
+
+  /**
+   * Return current environment
+   *
+   * @return current environment
+   */
+  public Environment peek() {
+    return environment;
+  }
+
+  /**
+   * Pop up current environment from environment chain
+   *
+   * @return current environment (before pop)
+   */
+  public Environment pop() {
+    Objects.requireNonNull(environment, "Fail to pop context due to no environment present");
+
+    Environment curEnv = environment;
+    environment = curEnv.getParent();
+    return curEnv;
+  }
 }
diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/Symbol.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/Symbol.java
index e9b6892e68..837baf1c00 100644
--- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/Symbol.java
+++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/Symbol.java
@@ -3,34 +3,30 @@
  * SPDX-License-Identifier: Apache-2.0
  */
 
-
 package org.opensearch.sql.legacy.antlr.semantic.scope;
 
-/**
- * Symbol in the scope
- */
+/** Symbol in the scope */
 public class Symbol {
 
-    private final Namespace namespace;
-
-    private final String name;
+  private final Namespace namespace;
 
-    public Symbol(Namespace namespace, String name) {
-        this.namespace = namespace;
-        this.name = name;
-    }
+  private final String name;
 
-    public Namespace getNamespace() {
-        return namespace;
-    }
+  public Symbol(Namespace namespace, String name) {
+    this.namespace = namespace;
+    this.name = name;
+  }
 
-    public String getName() {
-        return name;
-    }
+  public Namespace getNamespace() {
+    return namespace;
+  }
 
-    @Override
-    public String toString() {
-        return namespace + " [" + name + "]";
-    }
+  public String getName() {
+    return name;
+  }
 
+  @Override
+  public String toString() {
+    return namespace + " [" + name + "]";
+  }
 }
diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/SymbolTable.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/SymbolTable.java
index a8f0174c25..ee9f4545a6 100644
--- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/SymbolTable.java
+++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/SymbolTable.java
@@ -3,7 +3,6 @@
  * SPDX-License-Identifier: Apache-2.0
  */
 
-
 package org.opensearch.sql.legacy.antlr.semantic.scope;
 
 import static java.util.Collections.emptyMap;
@@ -17,79 +16,79 @@
 import java.util.stream.Collectors;
 import org.opensearch.sql.legacy.antlr.semantic.types.Type;
 
-/**
- * Symbol table for symbol definition and resolution.
- */
+/** Symbol table for symbol definition and resolution. */
 public class SymbolTable {
 
-    /**
-     * Two-dimension hash table to manage symbols with type in different namespace
-     */
-    private Map> tableByNamespace = new EnumMap<>(Namespace.class);
+  /** Two-dimension hash table to manage symbols with type in different namespace */
+  private Map> tableByNamespace =
+      new EnumMap<>(Namespace.class);
 
-    /**
-     * Store symbol with the type. Create new map for namespace for the first time.
-     * @param symbol    symbol to define
-     * @param type      symbol type
-     */
-    public void store(Symbol symbol, Type type) {
-        tableByNamespace.computeIfAbsent(
-                symbol.getNamespace(),
-                ns -> new TreeMap<>()
-        ).computeIfAbsent(
-                symbol.getName(),
-                symbolName -> new TypeSupplier(symbolName, type)
-        ).add(type);
-    }
+  /**
+   * Store symbol with the type. Create new map for namespace for the first time.
+   *
+   * @param symbol symbol to define
+   * @param type symbol type
+   */
+  public void store(Symbol symbol, Type type) {
+    tableByNamespace
+        .computeIfAbsent(symbol.getNamespace(), ns -> new TreeMap<>())
+        .computeIfAbsent(symbol.getName(), symbolName -> new TypeSupplier(symbolName, type))
+        .add(type);
+  }
 
-    /**
-     * Look up symbol in the namespace map.
-     * @param symbol    symbol to look up
-     * @return          symbol type which is optional
-     */
-    public Optional lookup(Symbol symbol) {
-        Map table = tableByNamespace.get(symbol.getNamespace());
-        TypeSupplier typeSupplier = null;
-        if (table != null) {
-            typeSupplier = table.get(symbol.getName());
-        }
-        return Optional.ofNullable(typeSupplier).map(TypeSupplier::get);
+  /**
+   * Look up symbol in the namespace map.
+   *
+   * @param symbol symbol to look up
+   * @return symbol type which is optional
+   */
+  public Optional lookup(Symbol symbol) {
+    Map table = tableByNamespace.get(symbol.getNamespace());
+    TypeSupplier typeSupplier = null;
+    if (table != null) {
+      typeSupplier = table.get(symbol.getName());
     }
+    return Optional.ofNullable(typeSupplier).map(TypeSupplier::get);
+  }
 
-    /**
-     * Look up symbols by a prefix.
-     * @param prefix    a symbol prefix
-     * @return          symbols starting with the prefix
-     */
-    public Map lookupByPrefix(Symbol prefix) {
-        NavigableMap table = tableByNamespace.get(prefix.getNamespace());
-        if (table != null) {
-            return table.subMap(prefix.getName(), prefix.getName() + Character.MAX_VALUE)
-                    .entrySet().stream()
-                    .filter(entry -> null != entry.getValue().get())
-                    .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().get()));
-        }
-        return emptyMap();
+  /**
+   * Look up symbols by a prefix.
+   *
+   * @param prefix a symbol prefix
+   * @return symbols starting with the prefix
+   */
+  public Map lookupByPrefix(Symbol prefix) {
+    NavigableMap table = tableByNamespace.get(prefix.getNamespace());
+    if (table != null) {
+      return table
+          .subMap(prefix.getName(), prefix.getName() + Character.MAX_VALUE)
+          .entrySet()
+          .stream()
+          .filter(entry -> null != entry.getValue().get())
+          .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().get()));
     }
+    return emptyMap();
+  }
 
-    /**
-     * Look up all symbols in the namespace.
-     * @param namespace     a namespace
-     * @return              all symbols in the namespace map
-     */
-    public Map lookupAll(Namespace namespace) {
-        return tableByNamespace.getOrDefault(namespace, emptyNavigableMap())
-                .entrySet().stream()
-                .filter(entry -> null != entry.getValue().get())
-                .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().get()));
-    }
+  /**
+   * Look up all symbols in the namespace.
+   *
+   * @param namespace a namespace
+   * @return all symbols in the namespace map
+   */
+  public Map lookupAll(Namespace namespace) {
+    return tableByNamespace.getOrDefault(namespace, emptyNavigableMap()).entrySet().stream()
+        .filter(entry -> null != entry.getValue().get())
+        .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().get()));
+  }
 
-    /**
-     * Check if namespace map in empty (none definition)
-     * @param namespace     a namespace
-     * @return              true for empty
-     */
-    public boolean isEmpty(Namespace namespace) {
-        return tableByNamespace.getOrDefault(namespace, emptyNavigableMap()).isEmpty();
-    }
+  /**
+   * Check if namespace map in empty (none definition)
+   *
+   * @param namespace a namespace
+   * @return true for empty
+   */
+  public boolean isEmpty(Namespace namespace) {
+    return tableByNamespace.getOrDefault(namespace, emptyNavigableMap()).isEmpty();
+  }
 }
diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/TypeSupplier.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/TypeSupplier.java
index 355ae70249..7c2410cf76 100644
--- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/TypeSupplier.java
+++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/TypeSupplier.java
@@ -3,7 +3,6 @@
  * SPDX-License-Identifier: Apache-2.0
  */
 
-
 package org.opensearch.sql.legacy.antlr.semantic.scope;
 
 import java.util.HashSet;
@@ -13,39 +12,38 @@
 import org.opensearch.sql.legacy.antlr.semantic.types.Type;
 
 /**
- * The TypeSupplier is construct by the symbolName and symbolType.
- * The TypeSupplier implement the {@link Supplier} interface to provide the {@link Type}.
- * The TypeSupplier maintain types to track different {@link Type} definition for the same symbolName.
+ * The TypeSupplier is construct by the symbolName and symbolType. The TypeSupplier implement the
+ * {@link Supplier} interface to provide the {@link Type}. The TypeSupplier maintain types to
+ * track different {@link Type} definition for the same symbolName.
  */
 public class TypeSupplier implements Supplier {
-    private final String symbolName;
-    private final Type symbolType;
-    private final Set types;
+  private final String symbolName;
+  private final Type symbolType;
+  private final Set types;
 
-    public TypeSupplier(String symbolName, Type symbolType) {
-        this.symbolName = symbolName;
-        this.symbolType = symbolType;
-        this.types = new HashSet<>();
-        this.types.add(symbolType);
-    }
+  public TypeSupplier(String symbolName, Type symbolType) {
+    this.symbolName = symbolName;
+    this.symbolType = symbolType;
+    this.types = new HashSet<>();
+    this.types.add(symbolType);
+  }
 
-    public TypeSupplier add(Type type) {
-        types.add(type);
-        return this;
-    }
+  public TypeSupplier add(Type type) {
+    types.add(type);
+    return this;
+  }
 
-    /**
-     * Get the {@link Type}
-     * Throw {@link SemanticAnalysisException} if conflict found.
-     * Currently, if the two types not equal, they are treated as conflicting.
-     */
-    @Override
-    public Type get() {
-        if (types.size() > 1) {
-            throw new SemanticAnalysisException(
-                    String.format("Field [%s] have conflict type [%s]", symbolName, types));
-        } else {
-            return symbolType;
-        }
+  /**
+   * Get the {@link Type} Throw {@link SemanticAnalysisException} if conflict found. Currently, if
+   * the two types not equal, they are treated as conflicting.
+   */
+  @Override
+  public Type get() {
+    if (types.size() > 1) {
+      throw new SemanticAnalysisException(
+          String.format("Field [%s] have conflict type [%s]", symbolName, types));
+    } else {
+      return symbolType;
     }
+  }
 }
diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/Type.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/Type.java
index 0491c4e568..539e3478d2 100644
--- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/Type.java
+++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/Type.java
@@ -3,7 +3,6 @@
  * SPDX-License-Identifier: Apache-2.0
  */
 
-
 package org.opensearch.sql.legacy.antlr.semantic.types;
 
 import static org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchDataType.TYPE_ERROR;
@@ -15,66 +14,64 @@
 import org.opensearch.sql.legacy.antlr.visitor.Reducible;
 import org.opensearch.sql.legacy.utils.StringUtils;
 
-/**
- * Type interface which represents any type of symbol in the SQL.
- */
+/** Type interface which represents any type of symbol in the SQL. */
 public interface Type extends Reducible {
 
-    /**
-     * Hide generic type ugliness and error check here in one place.
-     */
-    @SuppressWarnings("unchecked")
-    @Override
-    default  T reduce(List others) {
-        List actualArgTypes = (List) others;
-        Type result = construct(actualArgTypes);
-        if (result != TYPE_ERROR) {
-            return (T) result;
-        }
-
-        // Generate error message by current type name, argument types and usage of current type
-        // For example, 'Function [LOG] cannot work with [TEXT, INTEGER]. Usage: LOG(NUMBER) -> NUMBER
-        String actualArgTypesStr;
-        if (actualArgTypes.isEmpty()) {
-            actualArgTypesStr = "";
-        } else {
-            actualArgTypesStr = actualArgTypes.stream().
-                                               map(Type::usage).
-                                               collect(Collectors.joining(", "));
-        }
+  /** Hide generic type ugliness and error check here in one place. */
+  @SuppressWarnings("unchecked")
+  @Override
+  default  T reduce(List others) {
+    List actualArgTypes = (List) others;
+    Type result = construct(actualArgTypes);
+    if (result != TYPE_ERROR) {
+      return (T) result;
+    }
 
-        throw new SemanticAnalysisException(
-            StringUtils.format("%s cannot work with [%s]. Usage: %s",
-                this, actualArgTypesStr, usage()));
+    // Generate error message by current type name, argument types and usage of current type
+    // For example, 'Function [LOG] cannot work with [TEXT, INTEGER]. Usage: LOG(NUMBER) -> NUMBER
+    String actualArgTypesStr;
+    if (actualArgTypes.isEmpty()) {
+      actualArgTypesStr = "";
+    } else {
+      actualArgTypesStr =
+          actualArgTypes.stream().map(Type::usage).collect(Collectors.joining(", "));
     }
 
-    /**
-     * Type descriptive name
-     * @return  name
-     */
-    String getName();
+    throw new SemanticAnalysisException(
+        StringUtils.format(
+            "%s cannot work with [%s]. Usage: %s", this, actualArgTypesStr, usage()));
+  }
 
-    /**
-     * Check if current type is compatible with other of same type.
-     * @param other     other type
-     * @return          true if compatible
-     */
-    default boolean isCompatible(Type other) {
-        return other == UNKNOWN || this == other;
-    }
+  /**
+   * Type descriptive name
+   *
+   * @return name
+   */
+  String getName();
+
+  /**
+   * Check if current type is compatible with other of same type.
+   *
+   * @param other other type
+   * @return true if compatible
+   */
+  default boolean isCompatible(Type other) {
+    return other == UNKNOWN || this == other;
+  }
 
-    /**
-     * Construct a new type by applying current constructor on other types.
-     * Constructor is a generic conception that could be function, operator, join etc.
-     *
-     * @param others other types
-     * @return  a new type as result
-     */
-    Type construct(List others);
+  /**
+   * Construct a new type by applying current constructor on other types. Constructor is a generic
+   * conception that could be function, operator, join etc.
+   *
+   * @param others other types
+   * @return a new type as result
+   */
+  Type construct(List others);
 
-    /**
-     * Return typical usage of current type
-     * @return  usage string
-     */
-    String usage();
+  /**
+   * Return typical usage of current type
+   *
+   * @return usage string
+   */
+  String usage();
 }
diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/TypeExpression.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/TypeExpression.java
index eacca7b00d..5a9d4d7410 100644
--- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/TypeExpression.java
+++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/TypeExpression.java
@@ -3,7 +3,6 @@
  * SPDX-License-Identifier: Apache-2.0
  */
 
-
 package org.opensearch.sql.legacy.antlr.semantic.types;
 
 import static org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchDataType.TYPE_ERROR;
@@ -17,105 +16,107 @@
 import org.opensearch.sql.legacy.utils.StringUtils;
 
 /**
- * Type expression representing specification(s) of constructor such as function, operator etc.
- * Type expression has to be an interface with default methods because most subclass needs to be Enum.
+ * Type expression representing specification(s) of constructor such as function, operator etc. Type
+ * expression has to be an interface with default methods because most subclass needs to be Enum.
  */
 public interface TypeExpression extends Type {
 
-    @Override
-    default Type construct(List actualArgs) {
-        TypeExpressionSpec[] specifications = specifications();
-        if (specifications.length == 0) {
-            // Empty spec means type check for this type expression is not implemented yet.
-            // Return this to be compatible with everything.
-            return UNKNOWN;
-        }
+  @Override
+  default Type construct(List actualArgs) {
+    TypeExpressionSpec[] specifications = specifications();
+    if (specifications.length == 0) {
+      // Empty spec means type check for this type expression is not implemented yet.
+      // Return this to be compatible with everything.
+      return UNKNOWN;
+    }
 
-        // Create a temp specification for compatibility check.
-        TypeExpressionSpec actualSpec = new TypeExpressionSpec();
-        actualSpec.argTypes = actualArgs.toArray(new Type[0]);
-
-        // Perform compatibility check between actual spec (argument types) and expected.
-        // If found any compatible spec, it means actual spec is legal and thus apply to get result type.
-        // Ex. Actual=[INTEGER], Specs=[NUMBER->NUMBER], [STRING->NUMBER]. So first spec matches and return NUMBER.
-        for (TypeExpressionSpec spec : specifications) {
-            if (spec.isCompatible(actualSpec)) {
-                return spec.constructFunc.apply(actualArgs.toArray(new Type[0]));
-            }
-        }
-        return TYPE_ERROR;
+    // Create a temp specification for compatibility check.
+    TypeExpressionSpec actualSpec = new TypeExpressionSpec();
+    actualSpec.argTypes = actualArgs.toArray(new Type[0]);
+
+    // Perform compatibility check between actual spec (argument types) and expected.
+    // If found any compatible spec, it means actual spec is legal and thus apply to get result
+    // type.
+    // Ex. Actual=[INTEGER], Specs=[NUMBER->NUMBER], [STRING->NUMBER]. So first spec matches and
+    // return NUMBER.
+    for (TypeExpressionSpec spec : specifications) {
+      if (spec.isCompatible(actualSpec)) {
+        return spec.constructFunc.apply(actualArgs.toArray(new Type[0]));
+      }
+    }
+    return TYPE_ERROR;
+  }
+
+  @Override
+  default String usage() {
+    return Arrays.stream(specifications())
+        .map(spec -> getName() + spec)
+        .collect(Collectors.joining(" or "));
+  }
+
+  /**
+   * Each type expression may be overloaded and include multiple specifications.
+   *
+   * @return all valid specifications or empty which means not implemented yet
+   */
+  TypeExpressionSpec[] specifications();
+
+  /**
+   * A specification is combination of a construct function and arg types for a type expression
+   * (represent a constructor)
+   */
+  class TypeExpressionSpec {
+    Type[] argTypes;
+    Function constructFunc;
+
+    public TypeExpressionSpec map(Type... args) {
+      this.argTypes = args;
+      return this;
     }
 
-    @Override
-    default String usage() {
-        return Arrays.stream(specifications()).
-                      map(spec -> getName() + spec).
-                      collect(Collectors.joining(" or "));
+    public TypeExpressionSpec to(Function constructFunc) {
+      // Required for generic type to replace placeholder ex.T with actual position in argument
+      // list.
+      // So construct function of generic type can return binding type finally.
+      this.constructFunc = Generic.specialize(constructFunc, argTypes);
+      return this;
     }
 
-    /**
-     * Each type expression may be overloaded and include multiple specifications.
-     * @return  all valid specifications or empty which means not implemented yet
-     */
-    TypeExpressionSpec[] specifications();
-
-    /**
-     * A specification is combination of a construct function and arg types
-     * for a type expression (represent a constructor)
-     */
-    class TypeExpressionSpec {
-        Type[] argTypes;
-        Function constructFunc;
-
-        public TypeExpressionSpec map(Type... args) {
-            this.argTypes = args;
-            return this;
-        }
+    /** Return a base type no matter what's the arg types Mostly this is used for empty arg types */
+    public TypeExpressionSpec to(Type returnType) {
+      this.constructFunc = x -> returnType;
+      return this;
+    }
 
-        public TypeExpressionSpec to(Function constructFunc) {
-            // Required for generic type to replace placeholder ex.T with actual position in argument list.
-            // So construct function of generic type can return binding type finally.
-            this.constructFunc = Generic.specialize(constructFunc, argTypes);
-            return this;
-        }
+    public boolean isCompatible(TypeExpressionSpec otherSpec) {
+      Type[] expectArgTypes = this.argTypes;
+      Type[] actualArgTypes = otherSpec.argTypes;
 
-        /** Return a base type no matter what's the arg types
-            Mostly this is used for empty arg types */
-        public TypeExpressionSpec to(Type returnType) {
-            this.constructFunc = x -> returnType;
-            return this;
-        }
+      // Check if arg numbers exactly match
+      if (expectArgTypes.length != actualArgTypes.length) {
+        return false;
+      }
 
-        public boolean isCompatible(TypeExpressionSpec otherSpec) {
-            Type[] expectArgTypes = this.argTypes;
-            Type[] actualArgTypes = otherSpec.argTypes;
-
-            // Check if arg numbers exactly match
-            if (expectArgTypes.length != actualArgTypes.length) {
-                return false;
-            }
-
-            // Check if all arg types are compatible
-            for (int i = 0; i < expectArgTypes.length; i++) {
-                if (!expectArgTypes[i].isCompatible(actualArgTypes[i])) {
-                    return false;
-                }
-            }
-            return true;
+      // Check if all arg types are compatible
+      for (int i = 0; i < expectArgTypes.length; i++) {
+        if (!expectArgTypes[i].isCompatible(actualArgTypes[i])) {
+          return false;
         }
+      }
+      return true;
+    }
 
-        @Override
-        public String toString() {
-            String argTypesStr = Arrays.stream(argTypes).
-                                        map(Type::usage).
-                                        collect(Collectors.joining(", "));
+    @Override
+    public String toString() {
+      String argTypesStr =
+          Arrays.stream(argTypes).map(Type::usage).collect(Collectors.joining(", "));
 
-            // Only show generic type name in return value for clarity
-            Type returnType = constructFunc.apply(argTypes);
-            String returnTypeStr = (returnType instanceof Generic) ? returnType.getName() : returnType.usage();
+      // Only show generic type name in return value for clarity
+      Type returnType = constructFunc.apply(argTypes);
+      String returnTypeStr =
+          (returnType instanceof Generic) ? returnType.getName() : returnType.usage();
 
-            return StringUtils.format("(%s) -> %s", argTypesStr, returnTypeStr);
-        }
+      return StringUtils.format("(%s) -> %s", argTypesStr, returnTypeStr);
     }
-
+  }
 }
diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/operator/SetOperator.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/operator/SetOperator.java
index 988c9856e3..e8a80cd821 100644
--- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/operator/SetOperator.java
+++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/operator/SetOperator.java
@@ -3,7 +3,6 @@
  * SPDX-License-Identifier: Apache-2.0
  */
 
-
 package org.opensearch.sql.legacy.antlr.semantic.types.operator;
 
 import static org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchDataType.TYPE_ERROR;
@@ -11,45 +10,43 @@
 import java.util.List;
 import org.opensearch.sql.legacy.antlr.semantic.types.Type;
 
-/**
- * Set operator between queries.
- */
+/** Set operator between queries. */
 public enum SetOperator implements Type {
-    UNION,
-    MINUS,
-    IN;
-
-    @Override
-    public String getName() {
-        return name();
+  UNION,
+  MINUS,
+  IN;
+
+  @Override
+  public String getName() {
+    return name();
+  }
+
+  @Override
+  public Type construct(List others) {
+    if (others.size() < 2) {
+      throw new IllegalStateException("");
     }
 
-    @Override
-    public Type construct(List others) {
-        if (others.size() < 2) {
-            throw new IllegalStateException("");
-        }
-
-        // Compare each type and return anyone for now if pass
-        for (int i = 0; i < others.size() - 1; i++) {
-            Type type1 = others.get(i);
-            Type type2 = others.get(i + 1);
-
-            // Do it again as in Product because single base type won't be wrapped in Product
-            if (!type1.isCompatible(type2) && !type2.isCompatible(type1)) {
-                return TYPE_ERROR;
-            }
-        }
-        return others.get(0);
-    }
-
-    @Override
-    public String usage() {
-        return "Please return field(s) of compatible type from each query.";
-    }
+    // Compare each type and return anyone for now if pass
+    for (int i = 0; i < others.size() - 1; i++) {
+      Type type1 = others.get(i);
+      Type type2 = others.get(i + 1);
 
-    @Override
-    public String toString() {
-        return "Operator [" + getName() + "]";
+      // Do it again as in Product because single base type won't be wrapped in Product
+      if (!type1.isCompatible(type2) && !type2.isCompatible(type1)) {
+        return TYPE_ERROR;
+      }
     }
+    return others.get(0);
+  }
+
+  @Override
+  public String usage() {
+    return "Please return field(s) of compatible type from each query.";
+  }
+
+  @Override
+  public String toString() {
+    return "Operator [" + getName() + "]";
+  }
 }
diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/visitor/SemanticAnalyzer.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/visitor/SemanticAnalyzer.java
index 32bad91737..0655062be3 100644
--- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/visitor/SemanticAnalyzer.java
+++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/visitor/SemanticAnalyzer.java
@@ -3,7 +3,6 @@
  * SPDX-License-Identifier: Apache-2.0
  */
 
-
 package org.opensearch.sql.legacy.antlr.semantic.visitor;
 
 import static org.opensearch.sql.legacy.utils.StringUtils.unquoteFullColumn;
@@ -13,125 +12,123 @@
 import org.opensearch.sql.legacy.antlr.semantic.types.Type;
 import org.opensearch.sql.legacy.antlr.visitor.GenericSqlParseTreeVisitor;
 
-/**
- * Main visitor implementation to drive the entire semantic analysis.
- */
+/** Main visitor implementation to drive the entire semantic analysis. */
 public class SemanticAnalyzer implements GenericSqlParseTreeVisitor {
 
-    private final OpenSearchMappingLoader mappingLoader;
-
-    private final TypeChecker typeChecker;
-
-    public SemanticAnalyzer(OpenSearchMappingLoader mappingLoader, TypeChecker typeChecker) {
-        this.mappingLoader = mappingLoader;
-        this.typeChecker = typeChecker;
-    }
-
-    @Override
-    public void visitRoot() {
-        mappingLoader.visitRoot();
-        typeChecker.visitRoot();
-    }
-
-    @Override
-    public void visitQuery() {
-        mappingLoader.visitQuery();
-        typeChecker.visitQuery();
-    }
-
-    @Override
-    public void endVisitQuery() {
-        mappingLoader.endVisitQuery();
-        typeChecker.endVisitQuery();
-    }
-
-    @Override
-    public Type visitSelect(List itemTypes) {
-        mappingLoader.visitSelect(itemTypes);
-        return typeChecker.visitSelect(itemTypes);
-    }
-
-    @Override
-    public Type visitSelectAllColumn() {
-        mappingLoader.visitSelectAllColumn();
-        return typeChecker.visitSelectAllColumn();
-    }
-
-    @Override
-    public void visitAs(String alias, Type type) {
-        mappingLoader.visitAs(unquoteSingleField(alias), type);
-        typeChecker.visitAs(unquoteSingleField(alias), type);
-    }
-
-    @Override
-    public Type visitIndexName(String indexName) {
-        mappingLoader.visitIndexName(unquoteSingleField(indexName));
-        return typeChecker.visitIndexName(unquoteSingleField(indexName));
-    }
-
-    @Override
-    public Type visitFieldName(String fieldName) {
-        mappingLoader.visitFieldName(unquoteFullColumn(fieldName));
-        return typeChecker.visitFieldName(unquoteFullColumn(fieldName));
-    }
-
-    @Override
-    public Type visitFunctionName(String funcName) {
-        mappingLoader.visitFunctionName(funcName);
-        return typeChecker.visitFunctionName(funcName);
-    }
-
-    @Override
-    public Type visitOperator(String opName) {
-        mappingLoader.visitOperator(opName);
-        return typeChecker.visitOperator(opName);
-    }
-
-    @Override
-    public Type visitString(String text) {
-        mappingLoader.visitString(text);
-        return typeChecker.visitString(text);
-    }
-
-    @Override
-    public Type visitInteger(String text) {
-        mappingLoader.visitInteger(text);
-        return typeChecker.visitInteger(text);
-    }
-
-    @Override
-    public Type visitFloat(String text) {
-        mappingLoader.visitFloat(text);
-        return typeChecker.visitFloat(text);
-    }
-
-    @Override
-    public Type visitBoolean(String text) {
-        mappingLoader.visitBoolean(text);
-        return typeChecker.visitBoolean(text);
-    }
-
-    @Override
-    public Type visitDate(String text) {
-        mappingLoader.visitDate(text);
-        return typeChecker.visitDate(text);
-    }
-
-    @Override
-    public Type visitNull() {
-        mappingLoader.visitNull();
-        return typeChecker.visitNull();
-    }
-
-    @Override
-    public Type visitConvertedType(String text) {
-        mappingLoader.visitConvertedType(text);
-        return typeChecker.visitConvertedType(text);
-    }
-
-    @Override
-    public Type defaultValue() {
-        mappingLoader.defaultValue();
-        return typeChecker.defaultValue();
-    }
+  private final OpenSearchMappingLoader mappingLoader;
+
+  private final TypeChecker typeChecker;
+
+  public SemanticAnalyzer(OpenSearchMappingLoader mappingLoader, TypeChecker typeChecker) {
+    this.mappingLoader = mappingLoader;
+    this.typeChecker = typeChecker;
+  }
+
+  @Override
+  public void visitRoot() {
+    mappingLoader.visitRoot();
+    typeChecker.visitRoot();
+  }
+
+  @Override
+  public void visitQuery() {
+    mappingLoader.visitQuery();
+    typeChecker.visitQuery();
+  }
+
+  @Override
+  public void endVisitQuery() {
+    mappingLoader.endVisitQuery();
+    typeChecker.endVisitQuery();
+  }
+
+  @Override
+  public Type visitSelect(List itemTypes) {
+    mappingLoader.visitSelect(itemTypes);
+    return typeChecker.visitSelect(itemTypes);
+  }
+
+  @Override
+  public Type visitSelectAllColumn() {
+    mappingLoader.visitSelectAllColumn();
+    return typeChecker.visitSelectAllColumn();
+  }
+
+  @Override
+  public void visitAs(String alias, Type type) {
+    mappingLoader.visitAs(unquoteSingleField(alias), type);
+    typeChecker.visitAs(unquoteSingleField(alias), type);
+  }
+
+  @Override
+  public Type visitIndexName(String indexName) {
+    mappingLoader.visitIndexName(unquoteSingleField(indexName));
+    return typeChecker.visitIndexName(unquoteSingleField(indexName));
+  }
+
+  @Override
+  public Type visitFieldName(String fieldName) {
+    mappingLoader.visitFieldName(unquoteFullColumn(fieldName));
+    return typeChecker.visitFieldName(unquoteFullColumn(fieldName));
+  }
+
+  @Override
+  public Type visitFunctionName(String funcName) {
+    mappingLoader.visitFunctionName(funcName);
+    return typeChecker.visitFunctionName(funcName);
+  }
+
+  @Override
+  public Type visitOperator(String opName) {
+    mappingLoader.visitOperator(opName);
+    return typeChecker.visitOperator(opName);
+  }
+
+  @Override
+  public Type visitString(String text) {
+    mappingLoader.visitString(text);
+    return typeChecker.visitString(text);
+  }
+
+  @Override
+  public Type visitInteger(String text) {
+    mappingLoader.visitInteger(text);
+    return typeChecker.visitInteger(text);
+  }
+
+  @Override
+  public Type visitFloat(String text) {
+    mappingLoader.visitFloat(text);
+    return typeChecker.visitFloat(text);
+  }
+
+  @Override
+  public Type visitBoolean(String text) {
+    mappingLoader.visitBoolean(text);
+    return typeChecker.visitBoolean(text);
+  }
+
+  @Override
+  public Type visitDate(String text) {
+    mappingLoader.visitDate(text);
+    return typeChecker.visitDate(text);
+  }
+
+  @Override
+  public Type visitNull() {
+    mappingLoader.visitNull();
+    return typeChecker.visitNull();
+  }
+
+  @Override
+  public Type visitConvertedType(String text) {
+    mappingLoader.visitConvertedType(text);
+    return typeChecker.visitConvertedType(text);
+  }
+
+  @Override
+  public Type defaultValue() {
+    mappingLoader.defaultValue();
+    return typeChecker.defaultValue();
+  }
 }
diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/visitor/TypeChecker.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/visitor/TypeChecker.java
index 59c0036575..19119c776c 100644
--- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/visitor/TypeChecker.java
+++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/visitor/TypeChecker.java
@@ -3,7 +3,6 @@
  * SPDX-License-Identifier: Apache-2.0
  */
 
-
 package org.opensearch.sql.legacy.antlr.semantic.visitor;
 
 import static org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchDataType.UNKNOWN;
@@ -31,198 +30,196 @@
 import org.opensearch.sql.legacy.antlr.visitor.GenericSqlParseTreeVisitor;
 import org.opensearch.sql.legacy.utils.StringUtils;
 
-/**
- * SQL semantic analyzer that determines if a syntactical correct query is meaningful.
- */
+/** SQL semantic analyzer that determines if a syntactical correct query is meaningful. */
 public class TypeChecker implements GenericSqlParseTreeVisitor {
 
-    private static final Type NULL_TYPE = new Type() {
+  private static final Type NULL_TYPE =
+      new Type() {
         @Override
         public String getName() {
-            return "NULL";
+          return "NULL";
         }
 
         @Override
         public boolean isCompatible(Type other) {
-            throw new IllegalStateException("Compatibility check on NULL type with " + other);
+          throw new IllegalStateException("Compatibility check on NULL type with " + other);
         }
 
         @Override
         public Type construct(List others) {
-            throw new IllegalStateException("Construct operation on NULL type with " + others);
+          throw new IllegalStateException("Construct operation on NULL type with " + others);
         }
 
         @Override
         public String usage() {
-            throw new IllegalStateException("Usage print operation on NULL type");
-        }
-    };
-
-    /** Semantic context for symbol scope management */
-    private final SemanticContext context;
-
-    /** Should suggestion provided. Disabled by default for security concern. */
-    private final boolean isSuggestEnabled;
-
-    public TypeChecker(SemanticContext context) {
-        this.context = context;
-        this.isSuggestEnabled = false;
-    }
-
-    public TypeChecker(SemanticContext context, boolean isSuggestEnabled) {
-        this.context = context;
-        this.isSuggestEnabled = isSuggestEnabled;
-    }
-
-    @Override
-    public void visitRoot() {
-        defineFunctionNames(ScalarFunction.values());
-        defineFunctionNames(OpenSearchScalarFunction.values());
-        defineFunctionNames(AggregateFunction.values());
-        defineOperatorNames(ComparisonOperator.values());
-        defineOperatorNames(SetOperator.values());
-        defineOperatorNames(JoinOperator.values());
-    }
-
-    @Override
-    public void visitQuery() {
-        context.push();
-    }
-
-    @Override
-    public void endVisitQuery() {
-        context.pop();
-    }
-
-    @Override
-    public Type visitSelect(List itemTypes) {
-        if (itemTypes.size() == 1) {
-            return itemTypes.get(0);
-        } else if (itemTypes.size() == 0) {
-            return visitSelectAllColumn();
-        }
-        // Return product for empty (SELECT *) and #items > 1
-        return new Product(itemTypes);
-    }
-
-    @Override
-    public Type visitSelectAllColumn() {
-        return resolveAllColumn();
-    }
-
-    @Override
-    public void visitAs(String alias, Type type) {
-        defineFieldName(alias, type);
-    }
-
-    @Override
-    public Type visitIndexName(String indexName) {
-        return resolve(new Symbol(Namespace.FIELD_NAME, indexName));
-    }
-
-    @Override
-    public Type visitFieldName(String fieldName) {
-        // Bypass hidden fields which is not present in mapping, ex. _id, _type.
-        if (fieldName.startsWith("_")) {
-            return UNKNOWN;
-        }
-        // Ignore case for function/operator though field name is case sensitive
-        return resolve(new Symbol(Namespace.FIELD_NAME, fieldName));
-    }
-
-    @Override
-    public Type visitFunctionName(String funcName) {
-        return resolve(new Symbol(Namespace.FUNCTION_NAME, StringUtils.toUpper(funcName)));
-    }
-
-    @Override
-    public Type visitOperator(String opName) {
-        return resolve(new Symbol(Namespace.OPERATOR_NAME, StringUtils.toUpper(opName)));
-    }
-
-    @Override
-    public Type visitString(String text) {
-        return OpenSearchDataType.STRING;
-    }
-
-    @Override
-    public Type visitInteger(String text) {
-        return OpenSearchDataType.INTEGER;
-    }
-
-    @Override
-    public Type visitFloat(String text) {
-        return OpenSearchDataType.FLOAT;
-    }
-
-    @Override
-    public Type visitBoolean(String text) {
-        // "IS [NOT] MISSING" can be used on any data type
-        return "MISSING".equalsIgnoreCase(text) ? UNKNOWN : OpenSearchDataType.BOOLEAN;
-    }
-
-    @Override
-    public Type visitDate(String text) {
-        return OpenSearchDataType.DATE;
-    }
-
-    @Override
-    public Type visitNull() {
-        return UNKNOWN;
-    }
-
-    @Override
-    public Type visitConvertedType(String text) {
-        return OpenSearchDataType.typeOf(text);
-    }
-
-    @Override
-    public Type defaultValue() {
-        return NULL_TYPE;
-    }
-
-    private void defineFieldName(String fieldName, Type type) {
-        Symbol symbol = new Symbol(Namespace.FIELD_NAME, fieldName);
-        if (!environment().resolve(symbol).isPresent()) {
-            environment().define(symbol, type);
+          throw new IllegalStateException("Usage print operation on NULL type");
         }
-    }
-
-    private void defineFunctionNames(TypeExpression[] expressions) {
-        for (TypeExpression expr : expressions) {
-            environment().define(new Symbol(Namespace.FUNCTION_NAME, expr.getName()), expr);
-        }
-    }
-
-    private void defineOperatorNames(Type[] expressions) {
-        for (Type expr : expressions) {
-            environment().define(new Symbol(Namespace.OPERATOR_NAME, expr.getName()), expr);
-        }
-    }
-
-    private Type resolve(Symbol symbol) {
-        Optional type = environment().resolve(symbol);
-        if (type.isPresent()) {
-            return type.get();
-        }
-
-        String errorMsg = StringUtils.format("%s cannot be found or used here.", symbol);
-
-        if (isSuggestEnabled || symbol.getNamespace() != Namespace.FIELD_NAME) {
-            Set allSymbolsInScope = environment().resolveAll(symbol.getNamespace()).keySet();
-            String suggestedWord = new SimilarSymbols(allSymbolsInScope).mostSimilarTo(symbol.getName());
-            errorMsg += StringUtils.format(" Did you mean [%s]?", suggestedWord);
-        }
-        throw new SemanticAnalysisException(errorMsg);
-    }
-
-    private Type resolveAllColumn() {
-        environment().resolveAll(Namespace.FIELD_NAME);
-        return new Product(ImmutableList.of());
-    }
-
-    private Environment environment() {
-        return context.peek();
-    }
-
+      };
+
+  /** Semantic context for symbol scope management */
+  private final SemanticContext context;
+
+  /** Should suggestion provided. Disabled by default for security concern. */
+  private final boolean isSuggestEnabled;
+
+  public TypeChecker(SemanticContext context) {
+    this.context = context;
+    this.isSuggestEnabled = false;
+  }
+
+  public TypeChecker(SemanticContext context, boolean isSuggestEnabled) {
+    this.context = context;
+    this.isSuggestEnabled = isSuggestEnabled;
+  }
+
+  @Override
+  public void visitRoot() {
+    defineFunctionNames(ScalarFunction.values());
+    defineFunctionNames(OpenSearchScalarFunction.values());
+    defineFunctionNames(AggregateFunction.values());
+    defineOperatorNames(ComparisonOperator.values());
+    defineOperatorNames(SetOperator.values());
+    defineOperatorNames(JoinOperator.values());
+  }
+
+  @Override
+  public void visitQuery() {
+    context.push();
+  }
+
+  @Override
+  public void endVisitQuery() {
+    context.pop();
+  }
+
+  @Override
+  public Type visitSelect(List itemTypes) {
+    if (itemTypes.size() == 1) {
+      return itemTypes.get(0);
+    } else if (itemTypes.size() == 0) {
+      return visitSelectAllColumn();
+    }
+    // Return product for empty (SELECT *) and #items > 1
+    return new Product(itemTypes);
+  }
+
+  @Override
+  public Type visitSelectAllColumn() {
+    return resolveAllColumn();
+  }
+
+  @Override
+  public void visitAs(String alias, Type type) {
+    defineFieldName(alias, type);
+  }
+
+  @Override
+  public Type visitIndexName(String indexName) {
+    return resolve(new Symbol(Namespace.FIELD_NAME, indexName));
+  }
+
+  @Override
+  public Type visitFieldName(String fieldName) {
+    // Bypass hidden fields which is not present in mapping, ex. _id, _type.
+    if (fieldName.startsWith("_")) {
+      return UNKNOWN;
+    }
+    // Ignore case for function/operator though field name is case sensitive
+    return resolve(new Symbol(Namespace.FIELD_NAME, fieldName));
+  }
+
+  @Override
+  public Type visitFunctionName(String funcName) {
+    return resolve(new Symbol(Namespace.FUNCTION_NAME, StringUtils.toUpper(funcName)));
+  }
+
+  @Override
+  public Type visitOperator(String opName) {
+    return resolve(new Symbol(Namespace.OPERATOR_NAME, StringUtils.toUpper(opName)));
+  }
+
+  @Override
+  public Type visitString(String text) {
+    return OpenSearchDataType.STRING;
+  }
+
+  @Override
+  public Type visitInteger(String text) {
+    return OpenSearchDataType.INTEGER;
+  }
+
+  @Override
+  public Type visitFloat(String text) {
+    return OpenSearchDataType.FLOAT;
+  }
+
+  @Override
+  public Type visitBoolean(String text) {
+    // "IS [NOT] MISSING" can be used on any data type
+    return "MISSING".equalsIgnoreCase(text) ? UNKNOWN : OpenSearchDataType.BOOLEAN;
+  }
+
+  @Override
+  public Type visitDate(String text) {
+    return OpenSearchDataType.DATE;
+  }
+
+  @Override
+  public Type visitNull() {
+    return UNKNOWN;
+  }
+
+  @Override
+  public Type visitConvertedType(String text) {
+    return OpenSearchDataType.typeOf(text);
+  }
+
+  @Override
+  public Type defaultValue() {
+    return NULL_TYPE;
+  }
+
+  private void defineFieldName(String fieldName, Type type) {
+    Symbol symbol = new Symbol(Namespace.FIELD_NAME, fieldName);
+    if (!environment().resolve(symbol).isPresent()) {
+      environment().define(symbol, type);
+    }
+  }
+
+  private void defineFunctionNames(TypeExpression[] expressions) {
+    for (TypeExpression expr : expressions) {
+      environment().define(new Symbol(Namespace.FUNCTION_NAME, expr.getName()), expr);
+    }
+  }
+
+  private void defineOperatorNames(Type[] expressions) {
+    for (Type expr : expressions) {
+      environment().define(new Symbol(Namespace.OPERATOR_NAME, expr.getName()), expr);
+    }
+  }
+
+  private Type resolve(Symbol symbol) {
+    Optional type = environment().resolve(symbol);
+    if (type.isPresent()) {
+      return type.get();
+    }
+
+    String errorMsg = StringUtils.format("%s cannot be found or used here.", symbol);
+
+    if (isSuggestEnabled || symbol.getNamespace() != Namespace.FIELD_NAME) {
+      Set allSymbolsInScope = environment().resolveAll(symbol.getNamespace()).keySet();
+      String suggestedWord = new SimilarSymbols(allSymbolsInScope).mostSimilarTo(symbol.getName());
+      errorMsg += StringUtils.format(" Did you mean [%s]?", suggestedWord);
+    }
+    throw new SemanticAnalysisException(errorMsg);
+  }
+
+  private Type resolveAllColumn() {
+    environment().resolveAll(Namespace.FIELD_NAME);
+    return new Product(ImmutableList.of());
+  }
+
+  private Environment environment() {
+    return context.peek();
+  }
 }
diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/syntax/SyntaxAnalysisErrorListener.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/syntax/SyntaxAnalysisErrorListener.java
index 185f2696b7..5f0c7e38d1 100644
--- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/syntax/SyntaxAnalysisErrorListener.java
+++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/syntax/SyntaxAnalysisErrorListener.java
@@ -3,7 +3,6 @@
  * SPDX-License-Identifier: Apache-2.0
  */
 
-
 package org.opensearch.sql.legacy.antlr.syntax;
 
 import org.antlr.v4.runtime.BaseErrorListener;
@@ -15,50 +14,53 @@
 import org.opensearch.sql.legacy.utils.StringUtils;
 
 /**
- * Syntax analysis error listener that handles any syntax error by throwing exception with useful information.
+ * Syntax analysis error listener that handles any syntax error by throwing exception with useful
+ * information.
  */
 public class SyntaxAnalysisErrorListener extends BaseErrorListener {
 
-    @Override
-    public void syntaxError(Recognizer recognizer, Object offendingSymbol,
-                            int line, int charPositionInLine, String msg,
-                            RecognitionException e) {
+  @Override
+  public void syntaxError(
+      Recognizer recognizer,
+      Object offendingSymbol,
+      int line,
+      int charPositionInLine,
+      String msg,
+      RecognitionException e) {
 
-        CommonTokenStream tokens = (CommonTokenStream) recognizer.getInputStream();
-        Token offendingToken = (Token) offendingSymbol;
-        String query = tokens.getText();
+    CommonTokenStream tokens = (CommonTokenStream) recognizer.getInputStream();
+    Token offendingToken = (Token) offendingSymbol;
+    String query = tokens.getText();
 
-        throw new SyntaxAnalysisException(
-            StringUtils.format(
-                "Failed to parse query due to offending symbol [%s] at: '%s' <--- HERE... More details: %s",
-                getOffendingText(offendingToken),
-                truncateQueryAtOffendingToken(query, offendingToken),
-                getDetails(recognizer, msg, e)
-            )
-        );
-    }
+    throw new SyntaxAnalysisException(
+        StringUtils.format(
+            "Failed to parse query due to offending symbol [%s] at: '%s' <--- HERE... More details:"
+                + " %s",
+            getOffendingText(offendingToken),
+            truncateQueryAtOffendingToken(query, offendingToken),
+            getDetails(recognizer, msg, e)));
+  }
 
-    private String getOffendingText(Token offendingToken) {
-        return offendingToken.getText();
-    }
+  private String getOffendingText(Token offendingToken) {
+    return offendingToken.getText();
+  }
 
-    private String truncateQueryAtOffendingToken(String query, Token offendingToken) {
-        return query.substring(0, offendingToken.getStopIndex() + 1);
-    }
+  private String truncateQueryAtOffendingToken(String query, Token offendingToken) {
+    return query.substring(0, offendingToken.getStopIndex() + 1);
+  }
 
-    /**
-     * As official JavaDoc says, e=null means parser was able to recover from the error.
-     * In other words, "msg" argument includes the information we want.
-     */
-    private String getDetails(Recognizer recognizer, String msg, RecognitionException e) {
-        String details;
-        if (e == null) {
-            details = msg;
-        } else {
-            IntervalSet followSet = e.getExpectedTokens();
-            details = "Expecting tokens in " + followSet.toString(recognizer.getVocabulary());
-        }
-        return details;
+  /**
+   * As official JavaDoc says, e=null means parser was able to recover from the error. In other
+   * words, "msg" argument includes the information we want.
+   */
+  private String getDetails(Recognizer recognizer, String msg, RecognitionException e) {
+    String details;
+    if (e == null) {
+      details = msg;
+    } else {
+      IntervalSet followSet = e.getExpectedTokens();
+      details = "Expecting tokens in " + followSet.toString(recognizer.getVocabulary());
     }
-
+    return details;
+  }
 }
diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/syntax/SyntaxAnalysisException.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/syntax/SyntaxAnalysisException.java
index f79de62229..dce5437a19 100644
--- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/syntax/SyntaxAnalysisException.java
+++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/syntax/SyntaxAnalysisException.java
@@ -3,17 +3,14 @@
  * SPDX-License-Identifier: Apache-2.0
  */
 
-
 package org.opensearch.sql.legacy.antlr.syntax;
 
 import org.opensearch.sql.legacy.antlr.SqlAnalysisException;
 
-/**
- * Exception for syntax analysis
- */
+/** Exception for syntax analysis */
 public class SyntaxAnalysisException extends SqlAnalysisException {
 
-    public SyntaxAnalysisException(String message) {
-        super(message);
-    }
+  public SyntaxAnalysisException(String message) {
+    super(message);
+  }
 }
diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/AntlrSqlParseTreeVisitor.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/AntlrSqlParseTreeVisitor.java
index 00db9a6591..5e89b3b8ae 100644
--- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/AntlrSqlParseTreeVisitor.java
+++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/AntlrSqlParseTreeVisitor.java
@@ -120,23 +120,25 @@ public T visitOuterJoin(OuterJoinContext ctx) {
     return visitJoin(ctx.children, ctx.tableSourceItem());
   }
 
-    /**
-     * 
-     * Enforce visit order because ANTLR is generic and unaware.
-     *
-     * Visiting order is:
-     *  FROM
-     *  => WHERE
-     *   => SELECT
-     *    => GROUP BY
-     *     => HAVING
-     *      => ORDER BY
-     *       => LIMIT
-     *  
- */ - @Override - public T visitQuerySpecification(QuerySpecificationContext ctx) { - visitor.visitQuery(); + /** + * + * + *
+   * Enforce visit order because ANTLR is generic and unaware.
+   *
+   * Visiting order is:
+   *  FROM
+   *  => WHERE
+   *   => SELECT
+   *    => GROUP BY
+   *     => HAVING
+   *      => ORDER BY
+   *       => LIMIT
+   *  
+ */ + @Override + public T visitQuerySpecification(QuerySpecificationContext ctx) { + visitor.visitQuery(); // Always visit FROM clause first to define symbols FromClauseContext fromClause = ctx.fromClause(); diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/UnsupportedSemanticVerifier.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/UnsupportedSemanticVerifier.java index dc37425a62..919af8e6e2 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/UnsupportedSemanticVerifier.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/UnsupportedSemanticVerifier.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.visitor; import com.google.common.collect.Sets; @@ -17,71 +16,68 @@ public class UnsupportedSemanticVerifier { - private static final Set mathConstants = Sets.newHashSet( - "e", "pi" - ); - - private static final Set supportedNestedFunctions = Sets.newHashSet( - "nested", "reverse_nested", "score", "match_query", "matchquery" - ); + private static final Set mathConstants = Sets.newHashSet("e", "pi"); - /** - * The following two sets include the functions and operators that have requested or issued by users - * but the plugin does not support yet. - */ - private static final Set unsupportedFunctions = Sets.newHashSet( - "adddate", "addtime", "datetime", "greatest", "least" - ); + private static final Set supportedNestedFunctions = + Sets.newHashSet("nested", "reverse_nested", "score", "match_query", "matchquery"); - private static final Set unsupportedOperators = Sets.newHashSet( - "div" - ); + /** + * The following two sets include the functions and operators that have requested or issued by + * users but the plugin does not support yet. + */ + private static final Set unsupportedFunctions = + Sets.newHashSet("adddate", "addtime", "datetime", "greatest", "least"); + private static final Set unsupportedOperators = Sets.newHashSet("div"); - /** - * The scalar function calls are separated into (a)typical function calls; (b)nested function calls with functions - * as arguments, like abs(log(...)); (c)aggregations with functions as aggregators, like max(abs(....)). - * Currently, we do not support nested functions or nested aggregations, aka (b) and (c). - * However, for the special EsFunctions included in the [supportedNestedFunctions] set, we have supported them in - * nested function calls and aggregations (b&c). Besides, the math constants included in the [mathConstants] set - * are regraded as scalar functions, but they are working well in the painless script. - * - * Thus, the types of functions to throw exceptions: - * (I)case (b) except that the arguments are from the [mathConstants] set; - * (II) case (b) except that the arguments are from the [supportedNestedFunctions] set; - * (III) case (c) except that the aggregators are from thet [supportedNestedFunctions] set. - */ - public static void verify(ScalarFunctionCallContext ctx) { - String funcName = StringUtils.toLower(ctx.scalarFunctionName().getText()); + /** + * The scalar function calls are separated into (a)typical function calls; (b)nested function + * calls with functions as arguments, like abs(log(...)); (c)aggregations with functions as + * aggregators, like max(abs(....)). Currently, we do not support nested functions or nested + * aggregations, aka (b) and (c). However, for the special EsFunctions included in the + * [supportedNestedFunctions] set, we have supported them in nested function calls and + * aggregations (b&c). Besides, the math constants included in the [mathConstants] set are + * regraded as scalar functions, but they are working well in the painless script. + * + *

Thus, the types of functions to throw exceptions: (I)case (b) except that the arguments are + * from the [mathConstants] set; (II) case (b) except that the arguments are from the + * [supportedNestedFunctions] set; (III) case (c) except that the aggregators are from thet + * [supportedNestedFunctions] set. + */ + public static void verify(ScalarFunctionCallContext ctx) { + String funcName = StringUtils.toLower(ctx.scalarFunctionName().getText()); - // type (III) - if (ctx.parent.parent instanceof OpenSearchLegacySqlParser.FunctionAsAggregatorFunctionContext - && !(supportedNestedFunctions.contains(StringUtils.toLower(funcName)))) { - throw new SqlFeatureNotImplementedException(StringUtils.format( - "Aggregation calls with function aggregator like [%s] are not supported yet", - ctx.parent.parent.getText())); + // type (III) + if (ctx.parent.parent instanceof OpenSearchLegacySqlParser.FunctionAsAggregatorFunctionContext + && !(supportedNestedFunctions.contains(StringUtils.toLower(funcName)))) { + throw new SqlFeatureNotImplementedException( + StringUtils.format( + "Aggregation calls with function aggregator like [%s] are not supported yet", + ctx.parent.parent.getText())); - // type (I) and (II) - } else if (ctx.parent.parent instanceof OpenSearchLegacySqlParser.NestedFunctionArgsContext - && !(mathConstants.contains(funcName) || supportedNestedFunctions.contains(funcName))) { - throw new SqlFeatureNotImplementedException(StringUtils.format( - "Nested function calls like [%s] are not supported yet", ctx.parent.parent.parent.getText())); + // type (I) and (II) + } else if (ctx.parent.parent instanceof OpenSearchLegacySqlParser.NestedFunctionArgsContext + && !(mathConstants.contains(funcName) || supportedNestedFunctions.contains(funcName))) { + throw new SqlFeatureNotImplementedException( + StringUtils.format( + "Nested function calls like [%s] are not supported yet", + ctx.parent.parent.parent.getText())); - // unsupported functions - } else if (unsupportedFunctions.contains(funcName)) { - throw new SqlFeatureNotImplementedException(StringUtils.format("Function [%s] is not supported yet", - funcName)); - } + // unsupported functions + } else if (unsupportedFunctions.contains(funcName)) { + throw new SqlFeatureNotImplementedException( + StringUtils.format("Function [%s] is not supported yet", funcName)); } + } - public static void verify(MathOperatorContext ctx) { - if (unsupportedOperators.contains(StringUtils.toLower(ctx.getText()))) { - throw new SqlFeatureNotImplementedException(StringUtils.format("Operator [%s] is not supported yet", - ctx.getText())); - } + public static void verify(MathOperatorContext ctx) { + if (unsupportedOperators.contains(StringUtils.toLower(ctx.getText()))) { + throw new SqlFeatureNotImplementedException( + StringUtils.format("Operator [%s] is not supported yet", ctx.getText())); } + } - public static void verify(RegexpPredicateContext ctx) { - throw new SqlFeatureNotImplementedException("Regexp predicate is not supported yet"); - } + public static void verify(RegexpPredicateContext ctx) { + throw new SqlFeatureNotImplementedException("Regexp predicate is not supported yet"); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/cursor/Cursor.java b/legacy/src/main/java/org/opensearch/sql/legacy/cursor/Cursor.java index 8cc83a5fe2..300706a80b 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/cursor/Cursor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/cursor/Cursor.java @@ -9,11 +9,11 @@ public interface Cursor { NullCursor NULL_CURSOR = new NullCursor(); - /** - * All cursor's are of the form :
- * The serialized form before encoding is upto Cursor implementation - */ - String generateCursorId(); + /** + * All cursor's are of the form :
+ * The serialized form before encoding is upto Cursor implementation + */ + String generateCursorId(); CursorType getType(); } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/Condition.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/Condition.java index 8804c543f6..f86635910a 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/Condition.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/Condition.java @@ -377,14 +377,14 @@ public void setChildType(String childType) { this.childType = childType; } - /** - * Return true if the opear is {@link OPERATOR#NESTED_COMPLEX}
- * For example, the opear is {@link OPERATOR#NESTED_COMPLEX} when condition is - * nested('projects', projects.started_year > 2000 OR projects.name LIKE '%security%') - */ - public boolean isNestedComplex() { - return OPERATOR.NESTED_COMPLEX == OPERATOR; - } + /** + * Return true if the opear is {@link OPERATOR#NESTED_COMPLEX}
+ * For example, the opear is {@link OPERATOR#NESTED_COMPLEX} when condition is nested('projects', + * projects.started_year > 2000 OR projects.name LIKE '%security%') + */ + public boolean isNestedComplex() { + return OPERATOR.NESTED_COMPLEX == OPERATOR; + } @Override public String toString() { diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/Having.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/Having.java index 7d0765580b..a53fb0c275 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/Having.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/Having.java @@ -26,11 +26,12 @@ /** * Domain object for HAVING clause in SQL which covers both the parsing and explain logic. - *

- * Responsibilities: + * + *

Responsibilities: + * *

    - *
  1. Parsing: parse conditions out during initialization - *
  2. Explain: translate conditions to OpenSearch query DSL (Bucket Selector Aggregation) + *
  3. Parsing: parse conditions out during initialization + *
  4. Explain: translate conditions to OpenSearch query DSL (Bucket Selector Aggregation) *
*/ public class Having { @@ -126,32 +127,35 @@ private Script explainConditions() throws SqlParseException { return new Script(doExplain(conditions)); } - /** - *
-     * Explain conditions recursively.
-     * Example: HAVING c >= 2 OR NOT (a > 20 AND c <= 10 OR a < 1) OR a < 5
-     * Object: Where(?:
-     * [
-     * Condition(?:c>=2),
-     * Where(or:
-     * [
-     * Where(?:a<=20), Where(or:c>10), Where(and:a>=1)],
-     * ]),
-     * Condition(or:a<5)
-     * ])
-     * 

- * Note: a) Where(connector : condition expression). - * b) Condition is a subclass of Where. - * c) connector=? means it doesn't matter for first condition in the list - *

- * @param wheres conditions - * @return painless script string - * @throws SqlParseException unknown type of expression other than identifier and value - */ - private String doExplain(List wheres) throws SqlParseException { - if (wheres == null || wheres.isEmpty()) { - return ""; - } + /** + * + * + *
+   * Explain conditions recursively.
+   * Example: HAVING c >= 2 OR NOT (a > 20 AND c <= 10 OR a < 1) OR a < 5
+   * Object: Where(?:
+   * [
+   * Condition(?:c>=2),
+   * Where(or:
+   * [
+   * Where(?:a<=20), Where(or:c>10), Where(and:a>=1)],
+   * ]),
+   * Condition(or:a<5)
+   * ])
+   * 

+ * Note: a) Where(connector : condition expression). + * b) Condition is a subclass of Where. + * c) connector=? means it doesn't matter for first condition in the list + *

+ * + * @param wheres conditions + * @return painless script string + * @throws SqlParseException unknown type of expression other than identifier and value + */ + private String doExplain(List wheres) throws SqlParseException { + if (wheres == null || wheres.isEmpty()) { + return ""; + } StringBuilder script = new StringBuilder(); for (Where cond : wheres) { diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/TableOnJoinSelect.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/TableOnJoinSelect.java index cf27cb51ee..e0dcb2899f 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/TableOnJoinSelect.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/TableOnJoinSelect.java @@ -3,45 +3,40 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain; import java.util.List; -/** - * Created by Eliran on 28/8/2015. - */ +/** Created by Eliran on 28/8/2015. */ public class TableOnJoinSelect extends Select { - private List connectedFields; - private List selectedFields; - private String alias; - - public TableOnJoinSelect() { - } + private List connectedFields; + private List selectedFields; + private String alias; + public TableOnJoinSelect() {} - public List getConnectedFields() { - return connectedFields; - } + public List getConnectedFields() { + return connectedFields; + } - public void setConnectedFields(List connectedFields) { - this.connectedFields = connectedFields; - } + public void setConnectedFields(List connectedFields) { + this.connectedFields = connectedFields; + } - public List getSelectedFields() { - return selectedFields; - } + public List getSelectedFields() { + return selectedFields; + } - public void setSelectedFields(List selectedFields) { - this.selectedFields = selectedFields; - } + public void setSelectedFields(List selectedFields) { + this.selectedFields = selectedFields; + } - public String getAlias() { - return alias; - } + public String getAlias() { + return alias; + } - public void setAlias(String alias) { - this.alias = alias; - } + public void setAlias(String alias) { + this.alias = alias; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/Where.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/Where.java index ae05e33e51..d6f767203b 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/Where.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/Where.java @@ -3,70 +3,69 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain; import java.util.LinkedList; public class Where implements Cloneable { - public enum CONN { - AND, OR; + public enum CONN { + AND, + OR; - public CONN negative() { - return this == AND ? OR : AND; - } + public CONN negative() { + return this == AND ? OR : AND; } + } - public static Where newInstance() { - return new Where(CONN.AND); - } + public static Where newInstance() { + return new Where(CONN.AND); + } - private LinkedList wheres = new LinkedList<>(); + private LinkedList wheres = new LinkedList<>(); - protected CONN conn; + protected CONN conn; - public Where(String connStr) { - this.conn = CONN.valueOf(connStr.toUpperCase()); - } + public Where(String connStr) { + this.conn = CONN.valueOf(connStr.toUpperCase()); + } - public Where(CONN conn) { - this.conn = conn; - } + public Where(CONN conn) { + this.conn = conn; + } - public void addWhere(Where where) { - wheres.add(where); - } + public void addWhere(Where where) { + wheres.add(where); + } - public CONN getConn() { - return this.conn; - } - - public void setConn(CONN conn) { - this.conn = conn; - } + public CONN getConn() { + return this.conn; + } - public LinkedList getWheres() { - return wheres; - } + public void setConn(CONN conn) { + this.conn = conn; + } - @Override - public String toString() { - if (wheres.size() > 0) { - String whereStr = wheres.toString(); - return this.conn + " ( " + whereStr.substring(1, whereStr.length() - 1) + " ) "; - } else { - return ""; - } + public LinkedList getWheres() { + return wheres; + } + @Override + public String toString() { + if (wheres.size() > 0) { + String whereStr = wheres.toString(); + return this.conn + " ( " + whereStr.substring(1, whereStr.length() - 1) + " ) "; + } else { + return ""; } + } - @Override - public Object clone() throws CloneNotSupportedException { - Where clonedWhere = new Where(this.getConn()); - for (Where innerWhere : this.getWheres()) { - clonedWhere.addWhere((Where) innerWhere.clone()); - } - return clonedWhere; + @Override + public Object clone() throws CloneNotSupportedException { + Where clonedWhere = new Where(this.getConn()); + for (Where innerWhere : this.getWheres()) { + clonedWhere.addWhere((Where) innerWhere.clone()); } + return clonedWhere; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/LocalClusterState.java b/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/LocalClusterState.java index 84875b9531..1e6595bd32 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/LocalClusterState.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/LocalClusterState.java @@ -31,11 +31,12 @@ /** * Local cluster state information which may be stale but help avoid blocking operation in NIO * thread. + * *
    - *
  1. Why extending TransportAction doesn't work here? TransportAction enforce implementation to - * be performed remotely but local cluster state read is expected here. - *
  2. Why injection by AbstractModule doesn't work here? Because this state needs to be used - * across the plugin, ex. in rewriter, pretty formatter etc. + *
  3. Why extending TransportAction doesn't work here? TransportAction enforce implementation to + * be performed remotely but local cluster state read is expected here. + *
  4. Why injection by AbstractModule doesn't work here? Because this state needs to be used + * across the plugin, ex. in rewriter, pretty formatter etc. *
*/ public class LocalClusterState { diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMappings.java b/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMappings.java index 05b3f2854e..e92fdbea33 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMappings.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMappings.java @@ -14,6 +14,8 @@ import org.opensearch.cluster.metadata.MappingMetadata; /** + * + * *
  * Field mappings in a specific type.
  * 

diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/IndexMappings.java b/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/IndexMappings.java index 22cb99c44e..61e707e8ef 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/IndexMappings.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/IndexMappings.java @@ -13,6 +13,8 @@ import org.opensearch.cluster.metadata.Metadata; /** + * + * *

  * Index mappings in the cluster.
  * 

diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/exception/SQLFeatureDisabledException.java b/legacy/src/main/java/org/opensearch/sql/legacy/exception/SQLFeatureDisabledException.java index 52cdda3cdd..4578cd6c93 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/exception/SQLFeatureDisabledException.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/exception/SQLFeatureDisabledException.java @@ -3,15 +3,13 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.exception; public class SQLFeatureDisabledException extends Exception { - private static final long serialVersionUID = 1L; - - public SQLFeatureDisabledException(String message) { - super(message); - } + private static final long serialVersionUID = 1L; + public SQLFeatureDisabledException(String message) { + super(message); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/exception/SqlFeatureNotImplementedException.java b/legacy/src/main/java/org/opensearch/sql/legacy/exception/SqlFeatureNotImplementedException.java index 9225986132..43ad6d97b5 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/exception/SqlFeatureNotImplementedException.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/exception/SqlFeatureNotImplementedException.java @@ -3,21 +3,20 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.exception; /** - * Intended for cases when we knowingly omitted some case, letting users know that we didn't implemented feature, but - * it may be implemented in future. + * Intended for cases when we knowingly omitted some case, letting users know that we didn't + * implemented feature, but it may be implemented in future. */ public class SqlFeatureNotImplementedException extends RuntimeException { - private static final long serialVersionUID = 1; + private static final long serialVersionUID = 1; - public SqlFeatureNotImplementedException(String message) { - super(message); - } + public SqlFeatureNotImplementedException(String message) { + super(message); + } - public SqlFeatureNotImplementedException(String message, Throwable cause) { - super(message, cause); - } + public SqlFeatureNotImplementedException(String message, Throwable cause) { + super(message, cause); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/exception/SqlParseException.java b/legacy/src/main/java/org/opensearch/sql/legacy/exception/SqlParseException.java index c93ad2a2fa..a09ddc97d1 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/exception/SqlParseException.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/exception/SqlParseException.java @@ -3,16 +3,13 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.exception; public class SqlParseException extends Exception { - public SqlParseException(String message) { - super(message); - } - - - private static final long serialVersionUID = 1L; + public SqlParseException(String message) { + super(message); + } + private static final long serialVersionUID = 1L; } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CSVResult.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CSVResult.java index 28bc559a01..eb76cd021e 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CSVResult.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CSVResult.java @@ -28,17 +28,18 @@ public CSVResult(List headers, List lines) { this.lines = lines; } - /** - * Sanitize both headers and data lines by: - *

    - *
  1. First prepend single quote if first char is sensitive (= - + @) - *
  2. Second double quote entire cell if any comma found - *
- */ - public CSVResult(String separator, List headers, List> lines) { - this.headers = sanitizeHeaders(separator, headers); - this.lines = sanitizeLines(separator, lines); - } + /** + * Sanitize both headers and data lines by: + * + *
    + *
  1. First prepend single quote if first char is sensitive (= - + @) + *
  2. Second double quote entire cell if any comma found + *
+ */ + public CSVResult(String separator, List headers, List> lines) { + this.headers = sanitizeHeaders(separator, headers); + this.lines = sanitizeLines(separator, lines); + } /** * Return CSV header names which are sanitized because OpenSearch allows special character present diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorResultExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorResultExecutor.java index 620b8e7b86..66c69f3430 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorResultExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorResultExecutor.java @@ -103,19 +103,22 @@ private String handleDefaultCursorRequest(Client client, DefaultCursor cursor) { int rowsLeft = (int) cursor.getRowsLeft(); int fetch = cursor.getFetchSize(); - if (rowsLeft < fetch && rowsLeft < searchHitArray.length) { - /** - * This condition implies we are on the last page, and we might need to truncate the result from SearchHit[] - * Avoid truncating in following two scenarios - *
    - *
  1. number of rows to be sent equals fetchSize - *
  2. size of SearchHit[] is already less that rows that needs to be sent - *
- * Else truncate to desired number of rows - */ - SearchHit[] newSearchHits = Arrays.copyOf(searchHitArray, rowsLeft); - searchHits = new SearchHits(newSearchHits, searchHits.getTotalHits(), searchHits.getMaxScore()); - } + if (rowsLeft < fetch && rowsLeft < searchHitArray.length) { + /** + * This condition implies we are on the last page, and we might need to truncate the result + * from SearchHit[] Avoid truncating in following two scenarios + * + *
    + *
  1. number of rows to be sent equals fetchSize + *
  2. size of SearchHit[] is already less that rows that needs to be sent + *
+ * + * Else truncate to desired number of rows + */ + SearchHit[] newSearchHits = Arrays.copyOf(searchHitArray, rowsLeft); + searchHits = + new SearchHits(newSearchHits, searchHits.getTotalHits(), searchHits.getMaxScore()); + } rowsLeft = rowsLeft - fetch; diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/SelectResultSet.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/SelectResultSet.java index 445bdd45a0..c60691cb7c 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/SelectResultSet.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/SelectResultSet.java @@ -721,29 +721,31 @@ private Map addNumericAggregation( return data; } - /** - *
-     * Simplifies the structure of row's source Map by flattening it, making the full path of an object the key
-     * and the Object it refers to the value. This handles the case of regular object since nested objects will not
-     * be in hit.source but rather in hit.innerHits
-     * 

- * Sample input: - * keys = ['comments.likes'] - * row = comments: { - * likes: 2 - * } - *

- * Return: - * flattenedRow = {comment.likes: 2} - *

- */ - @SuppressWarnings("unchecked") - private Map flatRow(List keys, Map row) { - Map flattenedRow = new HashMap<>(); - for (String key : keys) { - String[] splitKeys = key.split("\\."); - boolean found = true; - Object currentObj = row; + /** + * + * + *
+   * Simplifies the structure of row's source Map by flattening it, making the full path of an object the key
+   * and the Object it refers to the value. This handles the case of regular object since nested objects will not
+   * be in hit.source but rather in hit.innerHits
+   * 

+ * Sample input: + * keys = ['comments.likes'] + * row = comments: { + * likes: 2 + * } + *

+ * Return: + * flattenedRow = {comment.likes: 2} + *

+ */ + @SuppressWarnings("unchecked") + private Map flatRow(List keys, Map row) { + Map flattenedRow = new HashMap<>(); + for (String key : keys) { + String[] splitKeys = key.split("\\."); + boolean found = true; + Object currentObj = row; for (String splitKey : splitKeys) { // This check is made to prevent Cast Exception as an ArrayList of objects can be in the @@ -770,31 +772,33 @@ private Map flatRow(List keys, Map row) return flattenedRow; } - /** - *
-     * If innerHits associated with column name exists, flatten both the inner field name and the inner rows in it.
-     * 

- * Sample input: - * newKeys = {'region', 'employees.age'}, row = {'region': 'US'} - * innerHits = employees: { - * hits: [{ - * source: { - * age: 26, - * firstname: 'Hank' - * } - * },{ - * source: { - * age: 30, - * firstname: 'John' - * } - * }] - * } - *

- */ - private List flatNestedField(Set newKeys, Map row, - Map innerHits) { - List result = new ArrayList<>(); - result.add(new DataRows.Row(row)); + /** + * + * + *
+   * If innerHits associated with column name exists, flatten both the inner field name and the inner rows in it.
+   * 

+ * Sample input: + * newKeys = {'region', 'employees.age'}, row = {'region': 'US'} + * innerHits = employees: { + * hits: [{ + * source: { + * age: 26, + * firstname: 'Hank' + * } + * },{ + * source: { + * age: 30, + * firstname: 'John' + * } + * }] + * } + *

+ */ + private List flatNestedField( + Set newKeys, Map row, Map innerHits) { + List result = new ArrayList<>(); + result.add(new DataRows.Row(row)); if (innerHits == null) { return result; @@ -819,37 +823,40 @@ private void doFlatNestedFieldName(String colName, SearchHit[] colValue, Set - * Do Cartesian Product between current outer row and inner rows by nested loop and remove original outer row. - *

- * Sample input: - * colName = 'employees', rows = [{region: 'US'}] - * colValue= [{ - * source: { - * age: 26, - * firstname: 'Hank' - * } - * },{ - * source: { - * age: 30, - * firstname: 'John' - * } - * }] - *

- * Return: - * [ - * {region:'US', employees.age:26, employees.firstname:'Hank'}, - * {region:'US', employees.age:30, employees.firstname:'John'} - * ] - *

- */ - private List doFlatNestedFieldValue(String colName, SearchHit[] colValue, List rows) { - List result = new ArrayList<>(); - for (DataRows.Row row : rows) { - for (SearchHit hit : colValue) { - Map innerRow = hit.getSourceAsMap(); - Map copy = new HashMap<>(); + /** + * + * + *
+   * Do Cartesian Product between current outer row and inner rows by nested loop and remove original outer row.
+   * 

+ * Sample input: + * colName = 'employees', rows = [{region: 'US'}] + * colValue= [{ + * source: { + * age: 26, + * firstname: 'Hank' + * } + * },{ + * source: { + * age: 30, + * firstname: 'John' + * } + * }] + *

+ * Return: + * [ + * {region:'US', employees.age:26, employees.firstname:'Hank'}, + * {region:'US', employees.age:30, employees.firstname:'John'} + * ] + *

+ */ + private List doFlatNestedFieldValue( + String colName, SearchHit[] colValue, List rows) { + List result = new ArrayList<>(); + for (DataRows.Row row : rows) { + for (SearchHit hit : colValue) { + Map innerRow = hit.getSourceAsMap(); + Map copy = new HashMap<>(); for (String field : row.getContents().keySet()) { copy.put(field, row.getData(field)); diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/ShowResultSet.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/ShowResultSet.java index 0a32f6c582..263bf1e7db 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/ShowResultSet.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/ShowResultSet.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.format; import java.util.ArrayList; @@ -21,62 +20,62 @@ public class ShowResultSet extends ResultSet { - private static final String TABLE_TYPE = "BASE TABLE"; - - private IndexStatement statement; - private Object queryResult; - - public ShowResultSet(Client client, IndexStatement statement, Object queryResult) { - this.client = client; - this.clusterName = getClusterName(); - this.statement = statement; - this.queryResult = queryResult; - - this.schema = new Schema(statement, loadColumns()); - this.dataRows = new DataRows(loadRows()); + private static final String TABLE_TYPE = "BASE TABLE"; + + private IndexStatement statement; + private Object queryResult; + + public ShowResultSet(Client client, IndexStatement statement, Object queryResult) { + this.client = client; + this.clusterName = getClusterName(); + this.statement = statement; + this.queryResult = queryResult; + + this.schema = new Schema(statement, loadColumns()); + this.dataRows = new DataRows(loadRows()); + } + + private List loadColumns() { + List columns = new ArrayList<>(); + // Unused Columns are still included in Schema to match JDBC/ODBC standard + columns.add(new Column("TABLE_CAT", null, Type.KEYWORD)); + columns.add(new Column("TABLE_SCHEM", null, Type.KEYWORD)); // Not used + columns.add(new Column("TABLE_NAME", null, Type.KEYWORD)); + columns.add(new Column("TABLE_TYPE", null, Type.KEYWORD)); + columns.add(new Column("REMARKS", null, Type.KEYWORD)); // Not used + columns.add(new Column("TYPE_CAT", null, Type.KEYWORD)); // Not used + columns.add(new Column("TYPE_SCHEM", null, Type.KEYWORD)); // Not used + columns.add(new Column("TYPE_NAME", null, Type.KEYWORD)); // Not used + columns.add(new Column("SELF_REFERENCING_COL_NAME", null, Type.KEYWORD)); // Not used + columns.add(new Column("REF_GENERATION", null, Type.KEYWORD)); // Not used + + return columns; + } + + private List loadRows() { + List rows = new ArrayList<>(); + for (String index : extractIndices()) { + rows.add(new Row(loadData(index))); } - private List loadColumns() { - List columns = new ArrayList<>(); - // Unused Columns are still included in Schema to match JDBC/ODBC standard - columns.add(new Column("TABLE_CAT", null, Type.KEYWORD)); - columns.add(new Column("TABLE_SCHEM", null, Type.KEYWORD)); // Not used - columns.add(new Column("TABLE_NAME", null, Type.KEYWORD)); - columns.add(new Column("TABLE_TYPE", null, Type.KEYWORD)); - columns.add(new Column("REMARKS", null, Type.KEYWORD)); // Not used - columns.add(new Column("TYPE_CAT", null, Type.KEYWORD)); // Not used - columns.add(new Column("TYPE_SCHEM", null, Type.KEYWORD)); // Not used - columns.add(new Column("TYPE_NAME", null, Type.KEYWORD)); // Not used - columns.add(new Column("SELF_REFERENCING_COL_NAME", null, Type.KEYWORD)); // Not used - columns.add(new Column("REF_GENERATION", null, Type.KEYWORD)); // Not used + return rows; + } - return columns; - } + private List extractIndices() { + String indexPattern = statement.getIndexPattern(); + String[] indices = ((GetIndexResponse) queryResult).getIndices(); - private List loadRows() { - List rows = new ArrayList<>(); - for (String index : extractIndices()) { - rows.add(new Row(loadData(index))); - } + return Arrays.stream(indices) + .filter(index -> matchesPatternIfRegex(index, indexPattern)) + .collect(Collectors.toList()); + } - return rows; - } + private Map loadData(String tableName) { + Map data = new HashMap<>(); + data.put("TABLE_CAT", clusterName); + data.put("TABLE_NAME", tableName); + data.put("TABLE_TYPE", TABLE_TYPE); - private List extractIndices() { - String indexPattern = statement.getIndexPattern(); - String[] indices = ((GetIndexResponse) queryResult).getIndices(); - - return Arrays.stream(indices) - .filter(index -> matchesPatternIfRegex(index, indexPattern)) - .collect(Collectors.toList()); - } - - private Map loadData(String tableName) { - Map data = new HashMap<>(); - data.put("TABLE_CAT", clusterName); - data.put("TABLE_NAME", tableName); - data.put("TABLE_TYPE", TABLE_TYPE); - - return data; - } + return data; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/UnionExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/UnionExecutor.java index 4b4080156d..6b8b64c4e8 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/UnionExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/UnionExecutor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.multi; import java.util.ArrayList; @@ -23,87 +22,92 @@ import org.opensearch.sql.legacy.query.multi.MultiQueryRequestBuilder; import org.opensearch.sql.legacy.utils.Util; -/** - * Created by Eliran on 21/8/2016. - */ +/** Created by Eliran on 21/8/2016. */ public class UnionExecutor implements ElasticHitsExecutor { - private MultiQueryRequestBuilder multiQueryBuilder; - private SearchHits results; - private Client client; - private int currentId; + private MultiQueryRequestBuilder multiQueryBuilder; + private SearchHits results; + private Client client; + private int currentId; - public UnionExecutor(Client client, MultiQueryRequestBuilder builder) { - multiQueryBuilder = builder; - this.client = client; - currentId = 0; - } + public UnionExecutor(Client client, MultiQueryRequestBuilder builder) { + multiQueryBuilder = builder; + this.client = client; + currentId = 0; + } - @Override - public void run() { - SearchResponse firstResponse = this.multiQueryBuilder.getFirstSearchRequest().get(); - SearchHit[] hits = firstResponse.getHits().getHits(); - List unionHits = new ArrayList<>(hits.length); - fillInternalSearchHits(unionHits, hits, this.multiQueryBuilder.getFirstTableFieldToAlias()); - SearchResponse secondResponse = this.multiQueryBuilder.getSecondSearchRequest().get(); - fillInternalSearchHits(unionHits, secondResponse.getHits().getHits(), - this.multiQueryBuilder.getSecondTableFieldToAlias()); - int totalSize = unionHits.size(); - SearchHit[] unionHitsArr = unionHits.toArray(new SearchHit[totalSize]); - this.results = new SearchHits(unionHitsArr, new TotalHits(totalSize, Relation.EQUAL_TO), 1.0f); - } + @Override + public void run() { + SearchResponse firstResponse = this.multiQueryBuilder.getFirstSearchRequest().get(); + SearchHit[] hits = firstResponse.getHits().getHits(); + List unionHits = new ArrayList<>(hits.length); + fillInternalSearchHits(unionHits, hits, this.multiQueryBuilder.getFirstTableFieldToAlias()); + SearchResponse secondResponse = this.multiQueryBuilder.getSecondSearchRequest().get(); + fillInternalSearchHits( + unionHits, + secondResponse.getHits().getHits(), + this.multiQueryBuilder.getSecondTableFieldToAlias()); + int totalSize = unionHits.size(); + SearchHit[] unionHitsArr = unionHits.toArray(new SearchHit[totalSize]); + this.results = new SearchHits(unionHitsArr, new TotalHits(totalSize, Relation.EQUAL_TO), 1.0f); + } - private void fillInternalSearchHits(List unionHits, SearchHit[] hits, - Map fieldNameToAlias) { - for (SearchHit hit : hits) { - Map documentFields = new HashMap<>(); - Map metaFields = new HashMap<>(); - hit.getFields().forEach((fieldName, docField) -> - (MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) ? metaFields : documentFields).put(fieldName, docField)); - SearchHit searchHit = new SearchHit(currentId, hit.getId(), documentFields, metaFields); - searchHit.sourceRef(hit.getSourceRef()); - searchHit.getSourceAsMap().clear(); - Map sourceAsMap = hit.getSourceAsMap(); - if (!fieldNameToAlias.isEmpty()) { - updateFieldNamesToAlias(sourceAsMap, fieldNameToAlias); - } - searchHit.getSourceAsMap().putAll(sourceAsMap); - currentId++; - unionHits.add(searchHit); - } + private void fillInternalSearchHits( + List unionHits, SearchHit[] hits, Map fieldNameToAlias) { + for (SearchHit hit : hits) { + Map documentFields = new HashMap<>(); + Map metaFields = new HashMap<>(); + hit.getFields() + .forEach( + (fieldName, docField) -> + (MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) + ? metaFields + : documentFields) + .put(fieldName, docField)); + SearchHit searchHit = new SearchHit(currentId, hit.getId(), documentFields, metaFields); + searchHit.sourceRef(hit.getSourceRef()); + searchHit.getSourceAsMap().clear(); + Map sourceAsMap = hit.getSourceAsMap(); + if (!fieldNameToAlias.isEmpty()) { + updateFieldNamesToAlias(sourceAsMap, fieldNameToAlias); + } + searchHit.getSourceAsMap().putAll(sourceAsMap); + currentId++; + unionHits.add(searchHit); } + } - - private void updateFieldNamesToAlias(Map sourceAsMap, Map fieldNameToAlias) { - for (Map.Entry fieldToAlias : fieldNameToAlias.entrySet()) { - String fieldName = fieldToAlias.getKey(); - Object value = null; - Map deleteFrom = null; - if (fieldName.contains(".")) { - String[] split = fieldName.split("\\."); - String[] path = Arrays.copyOf(split, split.length - 1); - Object placeInMap = Util.searchPathInMap(sourceAsMap, path); - if (placeInMap != null) { - if (!Map.class.isAssignableFrom(placeInMap.getClass())) { - continue; - } - } - deleteFrom = (Map) placeInMap; - value = deleteFrom.get(split[split.length - 1]); - } else if (sourceAsMap.containsKey(fieldName)) { - value = sourceAsMap.get(fieldName); - deleteFrom = sourceAsMap; - } - if (value != null) { - sourceAsMap.put(fieldToAlias.getValue(), value); - deleteFrom.remove(fieldName); - } + private void updateFieldNamesToAlias( + Map sourceAsMap, Map fieldNameToAlias) { + for (Map.Entry fieldToAlias : fieldNameToAlias.entrySet()) { + String fieldName = fieldToAlias.getKey(); + Object value = null; + Map deleteFrom = null; + if (fieldName.contains(".")) { + String[] split = fieldName.split("\\."); + String[] path = Arrays.copyOf(split, split.length - 1); + Object placeInMap = Util.searchPathInMap(sourceAsMap, path); + if (placeInMap != null) { + if (!Map.class.isAssignableFrom(placeInMap.getClass())) { + continue; + } } - Util.clearEmptyPaths(sourceAsMap); + deleteFrom = (Map) placeInMap; + value = deleteFrom.get(split[split.length - 1]); + } else if (sourceAsMap.containsKey(fieldName)) { + value = sourceAsMap.get(fieldName); + deleteFrom = sourceAsMap; + } + if (value != null) { + sourceAsMap.put(fieldToAlias.getValue(), value); + deleteFrom.remove(fieldName); + } } + Util.clearEmptyPaths(sourceAsMap); + } - @Override - public SearchHits getHits() { - return results; - } + @Override + public SearchHits getHits() { + return results; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/builder/UnaryExpressionBuilder.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/builder/UnaryExpressionBuilder.java index f9bdce8ce4..3d40c3a527 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/builder/UnaryExpressionBuilder.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/builder/UnaryExpressionBuilder.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.core.builder; import java.util.Arrays; @@ -14,32 +13,31 @@ import org.opensearch.sql.legacy.expression.domain.BindingTuple; import org.opensearch.sql.legacy.expression.model.ExprValue; -/** - * The definition of the Expression Builder which has one argument. - */ +/** The definition of the Expression Builder which has one argument. */ @RequiredArgsConstructor public class UnaryExpressionBuilder implements ExpressionBuilder { - private final ScalarOperator op; + private final ScalarOperator op; - /** - * Build the expression with two {@link Expression} as arguments. - * @param expressionList expression list. - * @return expression. - */ - @Override - public Expression build(List expressionList) { - Expression expression = expressionList.get(0); + /** + * Build the expression with two {@link Expression} as arguments. + * + * @param expressionList expression list. + * @return expression. + */ + @Override + public Expression build(List expressionList) { + Expression expression = expressionList.get(0); - return new Expression() { - @Override - public ExprValue valueOf(BindingTuple tuple) { - return op.apply(Arrays.asList(expression.valueOf(tuple))); - } + return new Expression() { + @Override + public ExprValue valueOf(BindingTuple tuple) { + return op.apply(Arrays.asList(expression.valueOf(tuple))); + } - @Override - public String toString() { - return String.format("%s(%s)", op.name(), expression); - } - }; - } + @Override + public String toString() { + return String.format("%s(%s)", op.name(), expression); + } + }; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/UnaryScalarOperator.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/UnaryScalarOperator.java index a6bfc48a1a..deb979f767 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/UnaryScalarOperator.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/UnaryScalarOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.core.operator; import static org.opensearch.sql.legacy.expression.model.ExprValueUtils.getDoubleValue; @@ -18,36 +17,37 @@ import org.opensearch.sql.legacy.expression.model.ExprValueFactory; /** - * Unary Scalar Operator take one {@link ExprValue} as arguments ans return one {@link ExprValue} as result. + * Unary Scalar Operator take one {@link ExprValue} as arguments ans return one {@link ExprValue} as + * result. */ @RequiredArgsConstructor public class UnaryScalarOperator implements ScalarOperator { - private final ScalarOperation op; - private final Function integerFunc; - private final Function longFunc; - private final Function doubleFunc; - private final Function floatFunc; + private final ScalarOperation op; + private final Function integerFunc; + private final Function longFunc; + private final Function doubleFunc; + private final Function floatFunc; - @Override - public ExprValue apply(List exprValues) { - ExprValue exprValue = exprValues.get(0); - switch (exprValue.kind()) { - case DOUBLE_VALUE: - return ExprValueFactory.from(doubleFunc.apply(getDoubleValue(exprValue))); - case INTEGER_VALUE: - return ExprValueFactory.from(integerFunc.apply(getIntegerValue(exprValue))); - case LONG_VALUE: - return ExprValueFactory.from(longFunc.apply(getLongValue(exprValue))); - case FLOAT_VALUE: - return ExprValueFactory.from(floatFunc.apply(getFloatValue(exprValue))); - default: - throw new RuntimeException(String.format("unexpected operation type: %s(%s)", op.name(), - exprValue.kind())); - } + @Override + public ExprValue apply(List exprValues) { + ExprValue exprValue = exprValues.get(0); + switch (exprValue.kind()) { + case DOUBLE_VALUE: + return ExprValueFactory.from(doubleFunc.apply(getDoubleValue(exprValue))); + case INTEGER_VALUE: + return ExprValueFactory.from(integerFunc.apply(getIntegerValue(exprValue))); + case LONG_VALUE: + return ExprValueFactory.from(longFunc.apply(getLongValue(exprValue))); + case FLOAT_VALUE: + return ExprValueFactory.from(floatFunc.apply(getFloatValue(exprValue))); + default: + throw new RuntimeException( + String.format("unexpected operation type: %s(%s)", op.name(), exprValue.kind())); } + } - @Override - public String name() { - return op.name(); - } + @Override + public String name() { + return op.name(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/parser/NestedType.java b/legacy/src/main/java/org/opensearch/sql/legacy/parser/NestedType.java index 4deeba1309..5951975077 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/parser/NestedType.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/parser/NestedType.java @@ -116,26 +116,28 @@ public String getBucketPath() { return bucketPath.getBucketPath(); } - /** - *
-     * Return true if the filed is the nested filed.
-     * For example, the mapping
-     * {
-     * "projects":{
-     * "type": "nested"
-     * "properties": {
-     * "name": {
-     * "type": "text"
-     * }
-     * }
-     * }
-     * }
-     * 

- * If the filed is projects, return true. - * If the filed is projects.name, return false. - *

- */ - public boolean isNestedField() { - return !field.contains(".") && field.equalsIgnoreCase(path); - } + /** + * + * + *
+   * Return true if the filed is the nested filed.
+   * For example, the mapping
+   * {
+   * "projects":{
+   * "type": "nested"
+   * "properties": {
+   * "name": {
+   * "type": "text"
+   * }
+   * }
+   * }
+   * }
+   * 

+ * If the filed is projects, return true. + * If the filed is projects.name, return false. + *

+ */ + public boolean isNestedField() { + return !field.contains(".") && field.equalsIgnoreCase(path); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/parser/SQLOdbcExpr.java b/legacy/src/main/java/org/opensearch/sql/legacy/parser/SQLOdbcExpr.java index ed03051b66..64d1235f4d 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/parser/SQLOdbcExpr.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/parser/SQLOdbcExpr.java @@ -3,50 +3,44 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.parser; import com.alibaba.druid.sql.ast.expr.SQLCharExpr; import com.alibaba.druid.sql.visitor.SQLASTVisitor; - -/** - * Created by jheimbouch on 3/17/15. - */ +/** Created by jheimbouch on 3/17/15. */ public class SQLOdbcExpr extends SQLCharExpr { - private static final long serialVersionUID = 1L; - - public SQLOdbcExpr() { - - } + private static final long serialVersionUID = 1L; - public SQLOdbcExpr(String text) { - super(text); - } + public SQLOdbcExpr() {} - @Override - public void output(StringBuffer buf) { - if ((this.text == null) || (this.text.length() == 0)) { - buf.append("NULL"); - } else { - buf.append("{ts '"); - buf.append(this.text.replaceAll("'", "''")); - buf.append("'}"); - } - } - - @Override - public String getText() { - StringBuilder sb = new StringBuilder(); - sb.append("{ts '"); - sb.append(this.text); - sb.append("'}"); - return sb.toString(); - } + public SQLOdbcExpr(String text) { + super(text); + } - protected void accept0(SQLASTVisitor visitor) { - visitor.visit(this); - visitor.endVisit(this); + @Override + public void output(StringBuffer buf) { + if ((this.text == null) || (this.text.length() == 0)) { + buf.append("NULL"); + } else { + buf.append("{ts '"); + buf.append(this.text.replaceAll("'", "''")); + buf.append("'}"); } + } + + @Override + public String getText() { + StringBuilder sb = new StringBuilder(); + sb.append("{ts '"); + sb.append(this.text); + sb.append("'}"); + return sb.toString(); + } + + protected void accept0(SQLASTVisitor visitor) { + visitor.visit(this); + visitor.endVisit(this); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/parser/SQLParensIdentifierExpr.java b/legacy/src/main/java/org/opensearch/sql/legacy/parser/SQLParensIdentifierExpr.java index b9682ce84a..96c95e4e2f 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/parser/SQLParensIdentifierExpr.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/parser/SQLParensIdentifierExpr.java @@ -3,27 +3,24 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.parser; import com.alibaba.druid.sql.ast.expr.SQLIdentifierExpr; - /** - * An Identifier that is wrapped in parenthesis. - * This is for tracking in group bys the difference between "group by state, age" and "group by (state), (age)". - * For non group by identifiers, it acts as a normal SQLIdentifierExpr. + * An Identifier that is wrapped in parentheses. This is for tracking in group bys the difference + * between "group by state, age" and "group by (state), (age)". For non group by identifiers, it + * acts as a normal SQLIdentifierExpr. */ public class SQLParensIdentifierExpr extends SQLIdentifierExpr { - public SQLParensIdentifierExpr() { - } + public SQLParensIdentifierExpr() {} - public SQLParensIdentifierExpr(String name) { - super(name); - } + public SQLParensIdentifierExpr(String name) { + super(name); + } - public SQLParensIdentifierExpr(SQLIdentifierExpr expr) { - super(expr.getName()); - } + public SQLParensIdentifierExpr(SQLIdentifierExpr expr) { + super(expr.getName()); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/parser/SqlParser.java b/legacy/src/main/java/org/opensearch/sql/legacy/parser/SqlParser.java index cf184750f2..947533630b 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/parser/SqlParser.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/parser/SqlParser.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.parser; import static org.opensearch.sql.legacy.utils.Util.NESTED_JOIN_TYPE; @@ -50,546 +49,571 @@ import org.opensearch.sql.legacy.exception.SqlParseException; import org.opensearch.sql.legacy.query.multi.MultiQuerySelect; - /** * OpenSearch sql support * * @author ansj */ public class SqlParser { - private FieldMaker fieldMaker = new FieldMaker(); + private FieldMaker fieldMaker = new FieldMaker(); - public SqlParser() { + public SqlParser() {} + public Select parseSelect(SQLQueryExpr mySqlExpr) throws SqlParseException { + MySqlSelectQueryBlock query = (MySqlSelectQueryBlock) mySqlExpr.getSubQuery().getQuery(); + SubQueryParser subQueryParser = new SubQueryParser(this); + if (subQueryParser.containSubqueryInFrom(query)) { + return subQueryParser.parseSubQueryInFrom(query); + } else { + return parseSelect(query); } + } - public Select parseSelect(SQLQueryExpr mySqlExpr) throws SqlParseException { - MySqlSelectQueryBlock query = (MySqlSelectQueryBlock) mySqlExpr.getSubQuery().getQuery(); - SubQueryParser subQueryParser = new SubQueryParser(this); - if (subQueryParser.containSubqueryInFrom(query)) { - return subQueryParser.parseSubQueryInFrom(query); - } else { - return parseSelect(query); - } - } + public Select parseSelect(MySqlSelectQueryBlock query) throws SqlParseException { - public Select parseSelect(MySqlSelectQueryBlock query) throws SqlParseException { + Select select = new Select(); + WhereParser whereParser = new WhereParser(this, query, fieldMaker); - Select select = new Select(); - WhereParser whereParser = new WhereParser(this, query, fieldMaker); + if (query.getAttribute(NESTED_JOIN_TYPE) != null) { + select.setNestedJoinType((SQLJoinTableSource.JoinType) query.getAttribute(NESTED_JOIN_TYPE)); + } - if (query.getAttribute(NESTED_JOIN_TYPE) != null) { - select.setNestedJoinType((SQLJoinTableSource.JoinType) query.getAttribute(NESTED_JOIN_TYPE)); - } + findSelect(query, select, query.getFrom().getAlias()); - findSelect(query, select, query.getFrom().getAlias()); + select.getFrom().addAll(findFrom(query.getFrom())); - select.getFrom().addAll(findFrom(query.getFrom())); + select.setWhere(whereParser.findWhere()); - select.setWhere(whereParser.findWhere()); + select.fillSubQueries(); - select.fillSubQueries(); + select.getHints().addAll(parseHints(query.getHints())); - select.getHints().addAll(parseHints(query.getHints())); + findLimit(query.getLimit(), select); - findLimit(query.getLimit(), select); + if (query.getOrderBy() != null) { + addOrderByToSelect(select, query, query.getOrderBy().getItems(), null); + } - if (query.getOrderBy() != null) { - addOrderByToSelect(select, query, query.getOrderBy().getItems(), null); - } + if (query.getGroupBy() != null) { + findGroupBy(query, select); + } - if (query.getGroupBy() != null) { - findGroupBy(query, select); - } + return select; + } - return select; - } + public Delete parseDelete(SQLDeleteStatement deleteStatement) throws SqlParseException { + Delete delete = new Delete(); + WhereParser whereParser = new WhereParser(this, deleteStatement); - public Delete parseDelete(SQLDeleteStatement deleteStatement) throws SqlParseException { - Delete delete = new Delete(); - WhereParser whereParser = new WhereParser(this, deleteStatement); + delete.getFrom().addAll(findFrom(deleteStatement.getTableSource())); - delete.getFrom().addAll(findFrom(deleteStatement.getTableSource())); + delete.setWhere(whereParser.findWhere()); - delete.setWhere(whereParser.findWhere()); + return delete; + } - return delete; - } + public MultiQuerySelect parseMultiSelect(SQLUnionQuery query) throws SqlParseException { + Select firstTableSelect = this.parseSelect((MySqlSelectQueryBlock) query.getLeft()); + Select secondTableSelect = this.parseSelect((MySqlSelectQueryBlock) query.getRight()); + return new MultiQuerySelect(query.getOperator(), firstTableSelect, secondTableSelect); + } - public MultiQuerySelect parseMultiSelect(SQLUnionQuery query) throws SqlParseException { - Select firstTableSelect = this.parseSelect((MySqlSelectQueryBlock) query.getLeft()); - Select secondTableSelect = this.parseSelect((MySqlSelectQueryBlock) query.getRight()); - return new MultiQuerySelect(query.getOperator(), firstTableSelect, secondTableSelect); + private void findSelect(MySqlSelectQueryBlock query, Select select, String tableAlias) + throws SqlParseException { + List selectList = query.getSelectList(); + for (SQLSelectItem sqlSelectItem : selectList) { + Field field = + fieldMaker.makeField(sqlSelectItem.getExpr(), sqlSelectItem.getAlias(), tableAlias); + select.addField(field); } - - private void findSelect(MySqlSelectQueryBlock query, Select select, String tableAlias) throws SqlParseException { - List selectList = query.getSelectList(); - for (SQLSelectItem sqlSelectItem : selectList) { - Field field = fieldMaker.makeField(sqlSelectItem.getExpr(), sqlSelectItem.getAlias(), tableAlias); - select.addField(field); - } + } + + private void findGroupBy(MySqlSelectQueryBlock query, Select select) throws SqlParseException { + Map aliasesToExperssions = + query.getSelectList().stream() + .filter(item -> item.getAlias() != null) + .collect(Collectors.toMap(SQLSelectItem::getAlias, SQLSelectItem::getExpr)); + + SQLSelectGroupByClause groupBy = query.getGroupBy(); + SQLTableSource sqlTableSource = query.getFrom(); + + findHaving(query, select); + + List items = groupBy.getItems(); + + List standardGroupBys = new ArrayList<>(); + for (SQLExpr sqlExpr : items) { + // todo: mysql expr patch + if (sqlExpr instanceof MySqlSelectGroupByExpr) { + MySqlSelectGroupByExpr sqlSelectGroupByExpr = (MySqlSelectGroupByExpr) sqlExpr; + sqlExpr = sqlSelectGroupByExpr.getExpr(); + } + + if ((sqlExpr instanceof SQLParensIdentifierExpr + || !(sqlExpr instanceof SQLIdentifierExpr || sqlExpr instanceof SQLMethodInvokeExpr)) + && !standardGroupBys.isEmpty()) { + // flush the standard group bys + select.addGroupBy(convertExprsToFields(standardGroupBys, sqlTableSource)); + standardGroupBys = new ArrayList<>(); + } + + if (sqlExpr instanceof SQLParensIdentifierExpr) { + // single item with parens (should get its own aggregation) + select.addGroupBy(fieldMaker.makeField(sqlExpr, null, sqlTableSource.getAlias())); + } else if (sqlExpr instanceof SQLListExpr) { + // multiple items in their own list + SQLListExpr listExpr = (SQLListExpr) sqlExpr; + select.addGroupBy(convertExprsToFields(listExpr.getItems(), sqlTableSource)); + } else { + // check if field is actually alias + if (aliasesToExperssions.containsKey(sqlExpr.toString())) { + sqlExpr = aliasesToExperssions.get(sqlExpr.toString()); + } + standardGroupBys.add(sqlExpr); + } } - - private void findGroupBy(MySqlSelectQueryBlock query, Select select) throws SqlParseException { - Map aliasesToExperssions = query - .getSelectList() - .stream() - .filter(item -> item.getAlias() != null) - .collect(Collectors.toMap(SQLSelectItem::getAlias, SQLSelectItem::getExpr)); - - SQLSelectGroupByClause groupBy = query.getGroupBy(); - SQLTableSource sqlTableSource = query.getFrom(); - - findHaving(query, select); - - List items = groupBy.getItems(); - - List standardGroupBys = new ArrayList<>(); - for (SQLExpr sqlExpr : items) { - //todo: mysql expr patch - if (sqlExpr instanceof MySqlSelectGroupByExpr) { - MySqlSelectGroupByExpr sqlSelectGroupByExpr = (MySqlSelectGroupByExpr) sqlExpr; - sqlExpr = sqlSelectGroupByExpr.getExpr(); - } - - if ((sqlExpr instanceof SQLParensIdentifierExpr || !(sqlExpr instanceof SQLIdentifierExpr - || sqlExpr instanceof SQLMethodInvokeExpr)) && !standardGroupBys.isEmpty()) { - // flush the standard group bys - select.addGroupBy(convertExprsToFields(standardGroupBys, sqlTableSource)); - standardGroupBys = new ArrayList<>(); - } - - if (sqlExpr instanceof SQLParensIdentifierExpr) { - // single item with parens (should get its own aggregation) - select.addGroupBy(fieldMaker.makeField(sqlExpr, null, sqlTableSource.getAlias())); - } else if (sqlExpr instanceof SQLListExpr) { - // multiple items in their own list - SQLListExpr listExpr = (SQLListExpr) sqlExpr; - select.addGroupBy(convertExprsToFields(listExpr.getItems(), sqlTableSource)); - } else { - // check if field is actually alias - if (aliasesToExperssions.containsKey(sqlExpr.toString())) { - sqlExpr = aliasesToExperssions.get(sqlExpr.toString()); - } - standardGroupBys.add(sqlExpr); - } - } - if (!standardGroupBys.isEmpty()) { - select.addGroupBy(convertExprsToFields(standardGroupBys, sqlTableSource)); - } + if (!standardGroupBys.isEmpty()) { + select.addGroupBy(convertExprsToFields(standardGroupBys, sqlTableSource)); } - - private void findHaving(MySqlSelectQueryBlock query, Select select) throws SqlParseException { - select.setHaving(new Having(query.getGroupBy(), new WhereParser(this, query, fieldMaker))); + } + + private void findHaving(MySqlSelectQueryBlock query, Select select) throws SqlParseException { + select.setHaving(new Having(query.getGroupBy(), new WhereParser(this, query, fieldMaker))); + } + + private List convertExprsToFields( + List exprs, SQLTableSource sqlTableSource) throws SqlParseException { + List fields = new ArrayList<>(exprs.size()); + for (SQLExpr expr : exprs) { + // here we suppose groupby field will not have alias,so set null in second parameter + fields.add(fieldMaker.makeField(expr, null, sqlTableSource.getAlias())); } + return fields; + } - private List convertExprsToFields(List exprs, SQLTableSource sqlTableSource) - throws SqlParseException { - List fields = new ArrayList<>(exprs.size()); - for (SQLExpr expr : exprs) { - //here we suppose groupby field will not have alias,so set null in second parameter - fields.add(fieldMaker.makeField(expr, null, sqlTableSource.getAlias())); - } - return fields; + private String sameAliasWhere(Where where, String... aliases) throws SqlParseException { + if (where == null) { + return null; } - private String sameAliasWhere(Where where, String... aliases) throws SqlParseException { - if (where == null) { - return null; - } - - if (where instanceof Condition) { - Condition condition = (Condition) where; - String fieldName = condition.getName(); - for (String alias : aliases) { - String prefix = alias + "."; - if (fieldName.startsWith(prefix)) { - return alias; - } - } - throw new SqlParseException(String.format("Field [%s] with condition [%s] does not contain an alias", - fieldName, condition.toString())); - } - List sameAliases = new ArrayList<>(); - if (where.getWheres() != null && where.getWheres().size() > 0) { - for (Where innerWhere : where.getWheres()) { - sameAliases.add(sameAliasWhere(innerWhere, aliases)); - } - } - - if (sameAliases.contains(null)) { - return null; - } - String firstAlias = sameAliases.get(0); - //return null if more than one alias - for (String alias : sameAliases) { - if (!alias.equals(firstAlias)) { - return null; - } - } - return firstAlias; - } - - private void addOrderByToSelect(Select select, MySqlSelectQueryBlock queryBlock, List items, - String alias) - throws SqlParseException { - - Map aliasesToExpressions = queryBlock - .getSelectList() - .stream() - .filter(item -> item.getAlias() != null) - .collect(Collectors.toMap(SQLSelectItem::getAlias, SQLSelectItem::getExpr)); - - for (SQLSelectOrderByItem sqlSelectOrderByItem : items) { - if (sqlSelectOrderByItem.getType() == null) { - sqlSelectOrderByItem.setType(SQLOrderingSpecification.ASC); - } - String type = sqlSelectOrderByItem.getType().toString(); - SQLExpr expr = extractExprFromOrderExpr(sqlSelectOrderByItem); - - if (expr instanceof SQLIdentifierExpr) { - if (queryBlock.getGroupBy() == null || queryBlock.getGroupBy().getItems().isEmpty()) { - if (aliasesToExpressions.containsKey(((SQLIdentifierExpr) expr).getName())) { - expr = aliasesToExpressions.get(((SQLIdentifierExpr) expr).getName()); - } - } - } - - Field field = fieldMaker.makeField(expr, null, null); - - SQLExpr sqlExpr = sqlSelectOrderByItem.getExpr(); - if (sqlExpr instanceof SQLBinaryOpExpr && hasNullOrderInBinaryOrderExpr(sqlExpr)) { - // override Field.expression to SQLBinaryOpExpr, - // which was set by FieldMaker.makeField() to SQLIdentifierExpr above - field.setExpression(sqlExpr); - } - - String orderByName; - if (field.isScriptField()) { - MethodField methodField = (MethodField) field; - - // 0 - generated field name - final int SCRIPT_CONTENT_INDEX = 1; - orderByName = methodField.getParams().get(SCRIPT_CONTENT_INDEX).toString(); - - } else { - orderByName = field.toString(); - } - - orderByName = orderByName.replace("`", ""); - if (alias != null) { - orderByName = orderByName.replaceFirst(alias + "\\.", ""); - } - select.addOrderBy(field.getNestedPath(), orderByName, type, field); - } + if (where instanceof Condition) { + Condition condition = (Condition) where; + String fieldName = condition.getName(); + for (String alias : aliases) { + String prefix = alias + "."; + if (fieldName.startsWith(prefix)) { + return alias; + } + } + throw new SqlParseException( + String.format( + "Field [%s] with condition [%s] does not contain an alias", + fieldName, condition.toString())); } - - private SQLExpr extractExprFromOrderExpr(SQLSelectOrderByItem sqlSelectOrderByItem) { - SQLExpr expr = sqlSelectOrderByItem.getExpr(); - - // extract SQLIdentifier from Order IS NULL/NOT NULL expression to generate Field - // else passing SQLBinaryOpExpr to FieldMaker.makeFieldImpl tries to convert to SQLMethodInvokeExpr - // and throws SQLParserException - if (hasNullOrderInBinaryOrderExpr(expr)) { - return ((SQLBinaryOpExpr) expr).getLeft(); - } - return expr; - } - - private boolean hasNullOrderInBinaryOrderExpr(SQLExpr expr) { - /** - * Valid AST that meets ORDER BY IS NULL/NOT NULL condition (true) - * - * SQLSelectOrderByItem - * | - * SQLBinaryOpExpr (Is || IsNot) - * / \ - * SQLIdentifierExpr SQLNullExpr - */ - if (!(expr instanceof SQLBinaryOpExpr)) { - return false; - } - - // check "shape of expression": - SQLBinaryOpExpr binaryExpr = (SQLBinaryOpExpr) expr; - if (!(binaryExpr.getLeft() instanceof SQLIdentifierExpr)|| !(binaryExpr.getRight() instanceof SQLNullExpr)) { - return false; - } - - // check that operator IS or IS NOT - SQLBinaryOperator operator = binaryExpr.getOperator(); - return operator == SQLBinaryOperator.Is || operator == SQLBinaryOperator.IsNot; - + List sameAliases = new ArrayList<>(); + if (where.getWheres() != null && where.getWheres().size() > 0) { + for (Where innerWhere : where.getWheres()) { + sameAliases.add(sameAliasWhere(innerWhere, aliases)); + } } - private void findLimit(MySqlSelectQueryBlock.Limit limit, Select select) { - - if (limit == null) { - return; - } + if (sameAliases.contains(null)) { + return null; + } + String firstAlias = sameAliases.get(0); + // return null if more than one alias + for (String alias : sameAliases) { + if (!alias.equals(firstAlias)) { + return null; + } + } + return firstAlias; + } + + private void addOrderByToSelect( + Select select, + MySqlSelectQueryBlock queryBlock, + List items, + String alias) + throws SqlParseException { + + Map aliasesToExpressions = + queryBlock.getSelectList().stream() + .filter(item -> item.getAlias() != null) + .collect(Collectors.toMap(SQLSelectItem::getAlias, SQLSelectItem::getExpr)); + + for (SQLSelectOrderByItem sqlSelectOrderByItem : items) { + if (sqlSelectOrderByItem.getType() == null) { + sqlSelectOrderByItem.setType(SQLOrderingSpecification.ASC); + } + String type = sqlSelectOrderByItem.getType().toString(); + SQLExpr expr = extractExprFromOrderExpr(sqlSelectOrderByItem); + + if (expr instanceof SQLIdentifierExpr) { + if (queryBlock.getGroupBy() == null || queryBlock.getGroupBy().getItems().isEmpty()) { + if (aliasesToExpressions.containsKey(((SQLIdentifierExpr) expr).getName())) { + expr = aliasesToExpressions.get(((SQLIdentifierExpr) expr).getName()); + } + } + } + + Field field = fieldMaker.makeField(expr, null, null); + + SQLExpr sqlExpr = sqlSelectOrderByItem.getExpr(); + if (sqlExpr instanceof SQLBinaryOpExpr && hasNullOrderInBinaryOrderExpr(sqlExpr)) { + // override Field.expression to SQLBinaryOpExpr, + // which was set by FieldMaker.makeField() to SQLIdentifierExpr above + field.setExpression(sqlExpr); + } + + String orderByName; + if (field.isScriptField()) { + MethodField methodField = (MethodField) field; + + // 0 - generated field name + final int SCRIPT_CONTENT_INDEX = 1; + orderByName = methodField.getParams().get(SCRIPT_CONTENT_INDEX).toString(); + + } else { + orderByName = field.toString(); + } + + orderByName = orderByName.replace("`", ""); + if (alias != null) { + orderByName = orderByName.replaceFirst(alias + "\\.", ""); + } + select.addOrderBy(field.getNestedPath(), orderByName, type, field); + } + } - select.setRowCount(Integer.parseInt(limit.getRowCount().toString())); + private SQLExpr extractExprFromOrderExpr(SQLSelectOrderByItem sqlSelectOrderByItem) { + SQLExpr expr = sqlSelectOrderByItem.getExpr(); - if (limit.getOffset() != null) { - select.setOffset(Integer.parseInt(limit.getOffset().toString())); - } + // extract SQLIdentifier from Order IS NULL/NOT NULL expression to generate Field + // else passing SQLBinaryOpExpr to FieldMaker.makeFieldImpl tries to convert to + // SQLMethodInvokeExpr + // and throws SQLParserException + if (hasNullOrderInBinaryOrderExpr(expr)) { + return ((SQLBinaryOpExpr) expr).getLeft(); } + return expr; + } + private boolean hasNullOrderInBinaryOrderExpr(SQLExpr expr) { /** - * Parse the from clause + * Valid AST that meets ORDER BY IS NULL/NOT NULL condition (true) * - * @param from the from clause. - * @return list of From objects represents all the sources. + *

SQLSelectOrderByItem | SQLBinaryOpExpr (Is || IsNot) / \ SQLIdentifierExpr SQLNullExpr */ - private List findFrom(SQLTableSource from) { - boolean isSqlExprTable = from.getClass().isAssignableFrom(SQLExprTableSource.class); - - if (isSqlExprTable) { - SQLExprTableSource fromExpr = (SQLExprTableSource) from; - String[] split = fromExpr.getExpr().toString().split(","); - - ArrayList fromList = new ArrayList<>(); - for (String source : split) { - fromList.add(new From(source.trim(), fromExpr.getAlias())); - } - return fromList; - } + if (!(expr instanceof SQLBinaryOpExpr)) { + return false; + } - SQLJoinTableSource joinTableSource = ((SQLJoinTableSource) from); - List fromList = new ArrayList<>(); - fromList.addAll(findFrom(joinTableSource.getLeft())); - fromList.addAll(findFrom(joinTableSource.getRight())); - return fromList; + // check "shape of expression": + SQLBinaryOpExpr binaryExpr = (SQLBinaryOpExpr) expr; + if (!(binaryExpr.getLeft() instanceof SQLIdentifierExpr) + || !(binaryExpr.getRight() instanceof SQLNullExpr)) { + return false; } - public JoinSelect parseJoinSelect(SQLQueryExpr sqlExpr) throws SqlParseException { + // check that operator IS or IS NOT + SQLBinaryOperator operator = binaryExpr.getOperator(); + return operator == SQLBinaryOperator.Is || operator == SQLBinaryOperator.IsNot; + } - MySqlSelectQueryBlock query = (MySqlSelectQueryBlock) sqlExpr.getSubQuery().getQuery(); + private void findLimit(MySqlSelectQueryBlock.Limit limit, Select select) { - List joinedFrom = findJoinedFrom(query.getFrom()); - if (joinedFrom.size() != 2) { - throw new RuntimeException("currently supports only 2 tables join"); - } + if (limit == null) { + return; + } - JoinSelect joinSelect = createBasicJoinSelectAccordingToTableSource((SQLJoinTableSource) query.getFrom()); - List hints = parseHints(query.getHints()); - joinSelect.setHints(hints); - String firstTableAlias = joinedFrom.get(0).getAlias(); - String secondTableAlias = joinedFrom.get(1).getAlias(); - Map aliasToWhere = splitAndFindWhere(query.getWhere(), firstTableAlias, secondTableAlias); - Map> aliasToOrderBy = splitAndFindOrder(query.getOrderBy(), firstTableAlias, - secondTableAlias); - List connectedConditions = getConditionsFlatten(joinSelect.getConnectedWhere()); - joinSelect.setConnectedConditions(connectedConditions); - fillTableSelectedJoin(joinSelect.getFirstTable(), query, joinedFrom.get(0), - aliasToWhere.get(firstTableAlias), aliasToOrderBy.get(firstTableAlias), connectedConditions); - fillTableSelectedJoin(joinSelect.getSecondTable(), query, joinedFrom.get(1), - aliasToWhere.get(secondTableAlias), aliasToOrderBy.get(secondTableAlias), connectedConditions); - - updateJoinLimit(query.getLimit(), joinSelect); - - //todo: throw error feature not supported: no group bys on joins ? - return joinSelect; - } - - private Map> splitAndFindOrder(SQLOrderBy orderBy, String firstTableAlias, - String secondTableAlias) - throws SqlParseException { - Map> aliasToOrderBys = new HashMap<>(); - aliasToOrderBys.put(firstTableAlias, new ArrayList<>()); - aliasToOrderBys.put(secondTableAlias, new ArrayList<>()); - if (orderBy == null) { - return aliasToOrderBys; - } - List orderByItems = orderBy.getItems(); - for (SQLSelectOrderByItem orderByItem : orderByItems) { - if (orderByItem.getExpr().toString().startsWith(firstTableAlias + ".")) { - aliasToOrderBys.get(firstTableAlias).add(orderByItem); - } else if (orderByItem.getExpr().toString().startsWith(secondTableAlias + ".")) { - aliasToOrderBys.get(secondTableAlias).add(orderByItem); - } else { - throw new SqlParseException("order by field on join request should have alias before, got " - + orderByItem.getExpr().toString()); - } + select.setRowCount(Integer.parseInt(limit.getRowCount().toString())); - } - return aliasToOrderBys; + if (limit.getOffset() != null) { + select.setOffset(Integer.parseInt(limit.getOffset().toString())); } - - private void updateJoinLimit(MySqlSelectQueryBlock.Limit limit, JoinSelect joinSelect) { - if (limit != null && limit.getRowCount() != null) { - int sizeLimit = Integer.parseInt(limit.getRowCount().toString()); - joinSelect.setTotalLimit(sizeLimit); - } + } + + /** + * Parse the from clause + * + * @param from the from clause. + * @return list of From objects represents all the sources. + */ + private List findFrom(SQLTableSource from) { + boolean isSqlExprTable = from.getClass().isAssignableFrom(SQLExprTableSource.class); + + if (isSqlExprTable) { + SQLExprTableSource fromExpr = (SQLExprTableSource) from; + String[] split = fromExpr.getExpr().toString().split(","); + + ArrayList fromList = new ArrayList<>(); + for (String source : split) { + fromList.add(new From(source.trim(), fromExpr.getAlias())); + } + return fromList; } - private List parseHints(List sqlHints) throws SqlParseException { - List hints = new ArrayList<>(); - for (SQLCommentHint sqlHint : sqlHints) { - Hint hint = HintFactory.getHintFromString(sqlHint.getText()); - if (hint != null) { - hints.add(hint); - } - } - return hints; - } + SQLJoinTableSource joinTableSource = ((SQLJoinTableSource) from); + List fromList = new ArrayList<>(); + fromList.addAll(findFrom(joinTableSource.getLeft())); + fromList.addAll(findFrom(joinTableSource.getRight())); + return fromList; + } - private JoinSelect createBasicJoinSelectAccordingToTableSource(SQLJoinTableSource joinTableSource) - throws SqlParseException { - JoinSelect joinSelect = new JoinSelect(); - if (joinTableSource.getCondition() != null) { - Where where = Where.newInstance(); - WhereParser whereParser = new WhereParser(this, joinTableSource.getCondition()); - whereParser.parseWhere(joinTableSource.getCondition(), where); - joinSelect.setConnectedWhere(where); - } - SQLJoinTableSource.JoinType joinType = joinTableSource.getJoinType(); - joinSelect.setJoinType(joinType); - return joinSelect; - } + public JoinSelect parseJoinSelect(SQLQueryExpr sqlExpr) throws SqlParseException { - private Map splitAndFindWhere(SQLExpr whereExpr, String firstTableAlias, String secondTableAlias) - throws SqlParseException { - WhereParser whereParser = new WhereParser(this, whereExpr); - Where where = whereParser.findWhere(); - return splitWheres(where, firstTableAlias, secondTableAlias); - } + MySqlSelectQueryBlock query = (MySqlSelectQueryBlock) sqlExpr.getSubQuery().getQuery(); - private void fillTableSelectedJoin(TableOnJoinSelect tableOnJoin, MySqlSelectQueryBlock query, From tableFrom, - Where where, List orderBys, List conditions) - throws SqlParseException { - String alias = tableFrom.getAlias(); - fillBasicTableSelectJoin(tableOnJoin, tableFrom, where, orderBys, query); - tableOnJoin.setConnectedFields(getConnectedFields(conditions, alias)); - tableOnJoin.setSelectedFields(new ArrayList<>(tableOnJoin.getFields())); - tableOnJoin.setAlias(alias); - tableOnJoin.fillSubQueries(); + List joinedFrom = findJoinedFrom(query.getFrom()); + if (joinedFrom.size() != 2) { + throw new RuntimeException("currently supports only 2 tables join"); } - private List getConnectedFields(List conditions, String alias) throws SqlParseException { - List fields = new ArrayList<>(); - String prefix = alias + "."; - for (Condition condition : conditions) { - if (condition.getName().startsWith(prefix)) { - fields.add(new Field(condition.getName().replaceFirst(prefix, ""), null)); - } else { - if (!((condition.getValue() instanceof SQLPropertyExpr) - || (condition.getValue() instanceof SQLIdentifierExpr) - || (condition.getValue() instanceof String))) { - throw new SqlParseException("Illegal condition content: " + condition.toString()); - } - String aliasDotValue = condition.getValue().toString(); - int indexOfDot = aliasDotValue.indexOf("."); - String owner = aliasDotValue.substring(0, indexOfDot); - if (owner.equals(alias)) { - fields.add(new Field(aliasDotValue.substring(indexOfDot + 1), null)); - } - } - } - return fields; + JoinSelect joinSelect = + createBasicJoinSelectAccordingToTableSource((SQLJoinTableSource) query.getFrom()); + List hints = parseHints(query.getHints()); + joinSelect.setHints(hints); + String firstTableAlias = joinedFrom.get(0).getAlias(); + String secondTableAlias = joinedFrom.get(1).getAlias(); + Map aliasToWhere = + splitAndFindWhere(query.getWhere(), firstTableAlias, secondTableAlias); + Map> aliasToOrderBy = + splitAndFindOrder(query.getOrderBy(), firstTableAlias, secondTableAlias); + List connectedConditions = getConditionsFlatten(joinSelect.getConnectedWhere()); + joinSelect.setConnectedConditions(connectedConditions); + fillTableSelectedJoin( + joinSelect.getFirstTable(), + query, + joinedFrom.get(0), + aliasToWhere.get(firstTableAlias), + aliasToOrderBy.get(firstTableAlias), + connectedConditions); + fillTableSelectedJoin( + joinSelect.getSecondTable(), + query, + joinedFrom.get(1), + aliasToWhere.get(secondTableAlias), + aliasToOrderBy.get(secondTableAlias), + connectedConditions); + + updateJoinLimit(query.getLimit(), joinSelect); + + // todo: throw error feature not supported: no group bys on joins ? + return joinSelect; + } + + private Map> splitAndFindOrder( + SQLOrderBy orderBy, String firstTableAlias, String secondTableAlias) + throws SqlParseException { + Map> aliasToOrderBys = new HashMap<>(); + aliasToOrderBys.put(firstTableAlias, new ArrayList<>()); + aliasToOrderBys.put(secondTableAlias, new ArrayList<>()); + if (orderBy == null) { + return aliasToOrderBys; } - - private void fillBasicTableSelectJoin(TableOnJoinSelect select, From from, Where where, - List orderBys, MySqlSelectQueryBlock query) - throws SqlParseException { - select.getFrom().add(from); - findSelect(query, select, from.getAlias()); - select.setWhere(where); - addOrderByToSelect(select, query, orderBys, from.getAlias()); + List orderByItems = orderBy.getItems(); + for (SQLSelectOrderByItem orderByItem : orderByItems) { + if (orderByItem.getExpr().toString().startsWith(firstTableAlias + ".")) { + aliasToOrderBys.get(firstTableAlias).add(orderByItem); + } else if (orderByItem.getExpr().toString().startsWith(secondTableAlias + ".")) { + aliasToOrderBys.get(secondTableAlias).add(orderByItem); + } else { + throw new SqlParseException( + "order by field on join request should have alias before, got " + + orderByItem.getExpr().toString()); + } } + return aliasToOrderBys; + } - private List getJoinConditionsFlatten(SQLJoinTableSource from) throws SqlParseException { - List conditions = new ArrayList<>(); - if (from.getCondition() == null) { - return conditions; - } - Where where = Where.newInstance(); - WhereParser whereParser = new WhereParser(this, from.getCondition()); - whereParser.parseWhere(from.getCondition(), where); - addIfConditionRecursive(where, conditions); - return conditions; + private void updateJoinLimit(MySqlSelectQueryBlock.Limit limit, JoinSelect joinSelect) { + if (limit != null && limit.getRowCount() != null) { + int sizeLimit = Integer.parseInt(limit.getRowCount().toString()); + joinSelect.setTotalLimit(sizeLimit); } - - private List getConditionsFlatten(Where where) throws SqlParseException { - List conditions = new ArrayList<>(); - if (where == null) { - return conditions; - } - addIfConditionRecursive(where, conditions); - return conditions; + } + + private List parseHints(List sqlHints) throws SqlParseException { + List hints = new ArrayList<>(); + for (SQLCommentHint sqlHint : sqlHints) { + Hint hint = HintFactory.getHintFromString(sqlHint.getText()); + if (hint != null) { + hints.add(hint); + } + } + return hints; + } + + private JoinSelect createBasicJoinSelectAccordingToTableSource(SQLJoinTableSource joinTableSource) + throws SqlParseException { + JoinSelect joinSelect = new JoinSelect(); + if (joinTableSource.getCondition() != null) { + Where where = Where.newInstance(); + WhereParser whereParser = new WhereParser(this, joinTableSource.getCondition()); + whereParser.parseWhere(joinTableSource.getCondition(), where); + joinSelect.setConnectedWhere(where); + } + SQLJoinTableSource.JoinType joinType = joinTableSource.getJoinType(); + joinSelect.setJoinType(joinType); + return joinSelect; + } + + private Map splitAndFindWhere( + SQLExpr whereExpr, String firstTableAlias, String secondTableAlias) throws SqlParseException { + WhereParser whereParser = new WhereParser(this, whereExpr); + Where where = whereParser.findWhere(); + return splitWheres(where, firstTableAlias, secondTableAlias); + } + + private void fillTableSelectedJoin( + TableOnJoinSelect tableOnJoin, + MySqlSelectQueryBlock query, + From tableFrom, + Where where, + List orderBys, + List conditions) + throws SqlParseException { + String alias = tableFrom.getAlias(); + fillBasicTableSelectJoin(tableOnJoin, tableFrom, where, orderBys, query); + tableOnJoin.setConnectedFields(getConnectedFields(conditions, alias)); + tableOnJoin.setSelectedFields(new ArrayList<>(tableOnJoin.getFields())); + tableOnJoin.setAlias(alias); + tableOnJoin.fillSubQueries(); + } + + private List getConnectedFields(List conditions, String alias) + throws SqlParseException { + List fields = new ArrayList<>(); + String prefix = alias + "."; + for (Condition condition : conditions) { + if (condition.getName().startsWith(prefix)) { + fields.add(new Field(condition.getName().replaceFirst(prefix, ""), null)); + } else { + if (!((condition.getValue() instanceof SQLPropertyExpr) + || (condition.getValue() instanceof SQLIdentifierExpr) + || (condition.getValue() instanceof String))) { + throw new SqlParseException("Illegal condition content: " + condition.toString()); + } + String aliasDotValue = condition.getValue().toString(); + int indexOfDot = aliasDotValue.indexOf("."); + String owner = aliasDotValue.substring(0, indexOfDot); + if (owner.equals(alias)) { + fields.add(new Field(aliasDotValue.substring(indexOfDot + 1), null)); + } + } + } + return fields; + } + + private void fillBasicTableSelectJoin( + TableOnJoinSelect select, + From from, + Where where, + List orderBys, + MySqlSelectQueryBlock query) + throws SqlParseException { + select.getFrom().add(from); + findSelect(query, select, from.getAlias()); + select.setWhere(where); + addOrderByToSelect(select, query, orderBys, from.getAlias()); + } + + private List getJoinConditionsFlatten(SQLJoinTableSource from) + throws SqlParseException { + List conditions = new ArrayList<>(); + if (from.getCondition() == null) { + return conditions; + } + Where where = Where.newInstance(); + WhereParser whereParser = new WhereParser(this, from.getCondition()); + whereParser.parseWhere(from.getCondition(), where); + addIfConditionRecursive(where, conditions); + return conditions; + } + + private List getConditionsFlatten(Where where) throws SqlParseException { + List conditions = new ArrayList<>(); + if (where == null) { + return conditions; + } + addIfConditionRecursive(where, conditions); + return conditions; + } + + private Map splitWheres(Where where, String... aliases) throws SqlParseException { + Map aliasToWhere = new HashMap<>(); + for (String alias : aliases) { + aliasToWhere.put(alias, null); + } + if (where == null) { + return aliasToWhere; } + String allWhereFromSameAlias = sameAliasWhere(where, aliases); + if (allWhereFromSameAlias != null) { + removeAliasPrefix(where, allWhereFromSameAlias); + aliasToWhere.put(allWhereFromSameAlias, where); + return aliasToWhere; + } + for (Where innerWhere : where.getWheres()) { + String sameAlias = sameAliasWhere(innerWhere, aliases); + if (sameAlias == null) { + throw new SqlParseException( + "Currently support only one hierarchy on different tables where"); + } + removeAliasPrefix(innerWhere, sameAlias); + Where aliasCurrentWhere = aliasToWhere.get(sameAlias); + if (aliasCurrentWhere == null) { + aliasToWhere.put(sameAlias, innerWhere); + } else { + Where andWhereContainer = Where.newInstance(); + andWhereContainer.addWhere(aliasCurrentWhere); + andWhereContainer.addWhere(innerWhere); + aliasToWhere.put(sameAlias, andWhereContainer); + } + } - private Map splitWheres(Where where, String... aliases) throws SqlParseException { - Map aliasToWhere = new HashMap<>(); - for (String alias : aliases) { - aliasToWhere.put(alias, null); - } - if (where == null) { - return aliasToWhere; - } + return aliasToWhere; + } - String allWhereFromSameAlias = sameAliasWhere(where, aliases); - if (allWhereFromSameAlias != null) { - removeAliasPrefix(where, allWhereFromSameAlias); - aliasToWhere.put(allWhereFromSameAlias, where); - return aliasToWhere; - } - for (Where innerWhere : where.getWheres()) { - String sameAlias = sameAliasWhere(innerWhere, aliases); - if (sameAlias == null) { - throw new SqlParseException("Currently support only one hierarchy on different tables where"); - } - removeAliasPrefix(innerWhere, sameAlias); - Where aliasCurrentWhere = aliasToWhere.get(sameAlias); - if (aliasCurrentWhere == null) { - aliasToWhere.put(sameAlias, innerWhere); - } else { - Where andWhereContainer = Where.newInstance(); - andWhereContainer.addWhere(aliasCurrentWhere); - andWhereContainer.addWhere(innerWhere); - aliasToWhere.put(sameAlias, andWhereContainer); - } - } + private void removeAliasPrefix(Where where, String alias) { - return aliasToWhere; + if (where instanceof Condition) { + Condition cond = (Condition) where; + String aliasPrefix = alias + "."; + cond.setName(cond.getName().replaceFirst(aliasPrefix, "")); + return; } - - private void removeAliasPrefix(Where where, String alias) { - - if (where instanceof Condition) { - Condition cond = (Condition) where; - String aliasPrefix = alias + "."; - cond.setName(cond.getName().replaceFirst(aliasPrefix, "")); - return; - } - for (Where innerWhere : where.getWheres()) { - removeAliasPrefix(innerWhere, alias); - } + for (Where innerWhere : where.getWheres()) { + removeAliasPrefix(innerWhere, alias); } - - private void addIfConditionRecursive(Where where, List conditions) throws SqlParseException { - if (where instanceof Condition) { - Condition cond = (Condition) where; - if (!((cond.getValue() instanceof SQLIdentifierExpr) || (cond.getValue() instanceof SQLPropertyExpr) - || (cond.getValue() instanceof String))) { - throw new SqlParseException("conditions on join should be one side is secondTable OPEAR firstTable, " - + "condition was:" + cond.toString()); - } - conditions.add(cond); - } - for (Where innerWhere : where.getWheres()) { - addIfConditionRecursive(innerWhere, conditions); - } + } + + private void addIfConditionRecursive(Where where, List conditions) + throws SqlParseException { + if (where instanceof Condition) { + Condition cond = (Condition) where; + if (!((cond.getValue() instanceof SQLIdentifierExpr) + || (cond.getValue() instanceof SQLPropertyExpr) + || (cond.getValue() instanceof String))) { + throw new SqlParseException( + "conditions on join should be one side is secondTable OPEAR firstTable, " + + "condition was:" + + cond.toString()); + } + conditions.add(cond); } - - private List findJoinedFrom(SQLTableSource from) { - SQLJoinTableSource joinTableSource = ((SQLJoinTableSource) from); - List fromList = new ArrayList<>(); - fromList.addAll(findFrom(joinTableSource.getLeft())); - fromList.addAll(findFrom(joinTableSource.getRight())); - return fromList; + for (Where innerWhere : where.getWheres()) { + addIfConditionRecursive(innerWhere, conditions); } - - + } + + private List findJoinedFrom(SQLTableSource from) { + SQLJoinTableSource joinTableSource = ((SQLJoinTableSource) from); + List fromList = new ArrayList<>(); + fromList.addAll(findFrom(joinTableSource.getLeft())); + fromList.addAll(findFrom(joinTableSource.getRight())); + return fromList; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/parser/SubQueryExpression.java b/legacy/src/main/java/org/opensearch/sql/legacy/parser/SubQueryExpression.java index 168318c490..e9b0797d00 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/parser/SubQueryExpression.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/parser/SubQueryExpression.java @@ -3,42 +3,39 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.parser; import org.opensearch.sql.legacy.domain.Select; -/** - * Created by Eliran on 3/10/2015. - */ +/** Created by Eliran on 3/10/2015. */ public class SubQueryExpression { - private Object[] values; - private Select select; - private String returnField; - - public SubQueryExpression(Select innerSelect) { - this.select = innerSelect; - this.returnField = select.getFields().get(0).getName(); - values = null; - } - - public Object[] getValues() { - return values; - } - - public void setValues(Object[] values) { - this.values = values; - } - - public Select getSelect() { - return select; - } - - public void setSelect(Select select) { - this.select = select; - } - - public String getReturnField() { - return returnField; - } + private Object[] values; + private Select select; + private String returnField; + + public SubQueryExpression(Select innerSelect) { + this.select = innerSelect; + this.returnField = select.getFields().get(0).getName(); + values = null; + } + + public Object[] getValues() { + return values; + } + + public void setValues(Object[] values) { + this.values = values; + } + + public Select getSelect() { + return select; + } + + public void setSelect(Select select) { + this.select = select; + } + + public String getReturnField() { + return returnField; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/parser/SubQueryParser.java b/legacy/src/main/java/org/opensearch/sql/legacy/parser/SubQueryParser.java index 71b19db0cf..b6d04ddd54 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/parser/SubQueryParser.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/parser/SubQueryParser.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.parser; import com.alibaba.druid.sql.ast.SQLExpr; @@ -23,88 +22,89 @@ import org.opensearch.sql.legacy.domain.Where; import org.opensearch.sql.legacy.exception.SqlParseException; -/** - * Definition of SubQuery Parser - */ +/** Definition of SubQuery Parser */ public class SubQueryParser { - private final SqlParser sqlParser; + private final SqlParser sqlParser; - public SubQueryParser(SqlParser sqlParser) { - this.sqlParser = sqlParser; - } + public SubQueryParser(SqlParser sqlParser) { + this.sqlParser = sqlParser; + } - public boolean containSubqueryInFrom(MySqlSelectQueryBlock query) { - return query.getFrom() instanceof SQLSubqueryTableSource; - } + public boolean containSubqueryInFrom(MySqlSelectQueryBlock query) { + return query.getFrom() instanceof SQLSubqueryTableSource; + } - public Select parseSubQueryInFrom(MySqlSelectQueryBlock query) throws SqlParseException { - assert query.getFrom() instanceof SQLSubqueryTableSource; + public Select parseSubQueryInFrom(MySqlSelectQueryBlock query) throws SqlParseException { + assert query.getFrom() instanceof SQLSubqueryTableSource; - Select select = sqlParser.parseSelect( - (MySqlSelectQueryBlock) ((SQLSubqueryTableSource) query.getFrom()).getSelect() - .getQuery()); - String subQueryAlias = query.getFrom().getAlias(); - return pushSelect(query.getSelectList(), select, subQueryAlias); - } + Select select = + sqlParser.parseSelect( + (MySqlSelectQueryBlock) + ((SQLSubqueryTableSource) query.getFrom()).getSelect().getQuery()); + String subQueryAlias = query.getFrom().getAlias(); + return pushSelect(query.getSelectList(), select, subQueryAlias); + } - private Select pushSelect(List selectItems, Select subquerySelect, String subQueryAlias) { - Map> fieldAliasRewriter = prepareFieldAliasRewriter( - selectItems, - subQueryAlias); + private Select pushSelect( + List selectItems, Select subquerySelect, String subQueryAlias) { + Map> fieldAliasRewriter = + prepareFieldAliasRewriter(selectItems, subQueryAlias); - //1. rewrite field in select list - Iterator fieldIterator = subquerySelect.getFields().iterator(); - while (fieldIterator.hasNext()) { - Field field = fieldIterator.next(); - /* - * return true if the subquerySelectItem in the final select list. - * for example, subquerySelectItem is "SUM(emp.empno) as TEMP", - * and final select list is TEMP. then return true. - */ - String fieldIdentifier = Strings.isNullOrEmpty(field.getAlias()) ? field.getName() : field.getAlias(); - if (fieldAliasRewriter.containsKey(fieldIdentifier)) { - field.setAlias(fieldAliasRewriter.get(fieldIdentifier).apply(fieldIdentifier)); - } else { - fieldIterator.remove(); - } - } - - //2. rewrite field in order by - for (Order orderBy : subquerySelect.getOrderBys()) { - if (fieldAliasRewriter.containsKey(orderBy.getName())) { - String replaceOrderName = fieldAliasRewriter.get(orderBy.getName()).apply(orderBy.getName()); - orderBy.setName(replaceOrderName); - orderBy.getSortField().setName(replaceOrderName); - } - } + // 1. rewrite field in select list + Iterator fieldIterator = subquerySelect.getFields().iterator(); + while (fieldIterator.hasNext()) { + Field field = fieldIterator.next(); + /* + * return true if the subquerySelectItem in the final select list. + * for example, subquerySelectItem is "SUM(emp.empno) as TEMP", + * and final select list is TEMP. then return true. + */ + String fieldIdentifier = + Strings.isNullOrEmpty(field.getAlias()) ? field.getName() : field.getAlias(); + if (fieldAliasRewriter.containsKey(fieldIdentifier)) { + field.setAlias(fieldAliasRewriter.get(fieldIdentifier).apply(fieldIdentifier)); + } else { + fieldIterator.remove(); + } + } - // 3. rewrite field in having - if (subquerySelect.getHaving() != null) { - for (Where condition : subquerySelect.getHaving().getConditions()) { - Condition cond = (Condition) condition; - if (fieldAliasRewriter.containsKey(cond.getName())) { - String replaceOrderName = fieldAliasRewriter.get(cond.getName()).apply(cond.getName()); - cond.setName(replaceOrderName); - } - } - } - return subquerySelect; + // 2. rewrite field in order by + for (Order orderBy : subquerySelect.getOrderBys()) { + if (fieldAliasRewriter.containsKey(orderBy.getName())) { + String replaceOrderName = + fieldAliasRewriter.get(orderBy.getName()).apply(orderBy.getName()); + orderBy.setName(replaceOrderName); + orderBy.getSortField().setName(replaceOrderName); + } } - private Map> prepareFieldAliasRewriter(List selectItems, - String owner) { - HashMap> selectMap = new HashMap<>(); - for (SQLSelectItem item : selectItems) { - if (Strings.isNullOrEmpty(item.getAlias())) { - selectMap.put(getFieldName(item.getExpr(), owner), Function.identity()); - } else { - selectMap.put(getFieldName(item.getExpr(), owner), s -> item.getAlias()); - } + // 3. rewrite field in having + if (subquerySelect.getHaving() != null) { + for (Where condition : subquerySelect.getHaving().getConditions()) { + Condition cond = (Condition) condition; + if (fieldAliasRewriter.containsKey(cond.getName())) { + String replaceOrderName = fieldAliasRewriter.get(cond.getName()).apply(cond.getName()); + cond.setName(replaceOrderName); } - return selectMap; + } } + return subquerySelect; + } - private String getFieldName(SQLExpr expr, String owner) { - return expr.toString().replace(String.format("%s.", owner), ""); + private Map> prepareFieldAliasRewriter( + List selectItems, String owner) { + HashMap> selectMap = new HashMap<>(); + for (SQLSelectItem item : selectItems) { + if (Strings.isNullOrEmpty(item.getAlias())) { + selectMap.put(getFieldName(item.getExpr(), owner), Function.identity()); + } else { + selectMap.put(getFieldName(item.getExpr(), owner), s -> item.getAlias()); + } } + return selectMap; + } + + private String getFieldName(SQLExpr expr, String owner) { + return expr.toString().replace(String.format("%s.", owner), ""); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/parser/WhereParser.java b/legacy/src/main/java/org/opensearch/sql/legacy/parser/WhereParser.java index c3ea5270e3..a329d1ed52 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/parser/WhereParser.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/parser/WhereParser.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.parser; import com.alibaba.druid.sql.ast.SQLExpr; @@ -43,637 +42,808 @@ import org.opensearch.sql.legacy.utils.SQLFunctions; import org.opensearch.sql.legacy.utils.Util; -/** - * Created by allwefantasy on 9/2/16. - */ +/** Created by allwefantasy on 9/2/16. */ public class WhereParser { - private FieldMaker fieldMaker; + private FieldMaker fieldMaker; - private MySqlSelectQueryBlock query; - private SQLDeleteStatement delete; - private SQLExpr where; - private SqlParser sqlParser; + private MySqlSelectQueryBlock query; + private SQLDeleteStatement delete; + private SQLExpr where; + private SqlParser sqlParser; - public WhereParser(SqlParser sqlParser, MySqlSelectQueryBlock query, FieldMaker fieldMaker) { - this.sqlParser = sqlParser; - this.where = query.getWhere(); - - this.query = query; - this.fieldMaker = fieldMaker; - } + public WhereParser(SqlParser sqlParser, MySqlSelectQueryBlock query, FieldMaker fieldMaker) { + this.sqlParser = sqlParser; + this.where = query.getWhere(); - public WhereParser(SqlParser sqlParser, SQLDeleteStatement delete) { - this(sqlParser, delete.getWhere()); + this.query = query; + this.fieldMaker = fieldMaker; + } - this.delete = delete; - } + public WhereParser(SqlParser sqlParser, SQLDeleteStatement delete) { + this(sqlParser, delete.getWhere()); - public WhereParser(SqlParser sqlParser, SQLExpr expr) { - this(sqlParser); - this.where = expr; - } + this.delete = delete; + } - public WhereParser(SqlParser sqlParser) { - this.sqlParser = sqlParser; - this.fieldMaker = new FieldMaker(); - } + public WhereParser(SqlParser sqlParser, SQLExpr expr) { + this(sqlParser); + this.where = expr; + } - public Where findWhere() throws SqlParseException { - if (where == null) { - return null; - } + public WhereParser(SqlParser sqlParser) { + this.sqlParser = sqlParser; + this.fieldMaker = new FieldMaker(); + } - Where myWhere = Where.newInstance(); - parseWhere(where, myWhere); - return myWhere; + public Where findWhere() throws SqlParseException { + if (where == null) { + return null; } - public void parseWhere(SQLExpr expr, Where where) throws SqlParseException { - if (expr instanceof SQLBinaryOpExpr) { - SQLBinaryOpExpr bExpr = (SQLBinaryOpExpr) expr; - if (explainSpecialCondWithBothSidesAreLiterals(bExpr, where)) { - return; - } - if (explainSpecialCondWithBothSidesAreProperty(bExpr, where)) { - return; - } - } - - if (expr instanceof SQLBinaryOpExpr && !isCond((SQLBinaryOpExpr) expr)) { - SQLBinaryOpExpr bExpr = (SQLBinaryOpExpr) expr; - routeCond(bExpr, bExpr.getLeft(), where); - routeCond(bExpr, bExpr.getRight(), where); - } else if (expr instanceof SQLNotExpr) { - parseWhere(((SQLNotExpr) expr).getExpr(), where); - negateWhere(where); - } else { - explainCond("AND", expr, where); - } + Where myWhere = Where.newInstance(); + parseWhere(where, myWhere); + return myWhere; + } + + public void parseWhere(SQLExpr expr, Where where) throws SqlParseException { + if (expr instanceof SQLBinaryOpExpr) { + SQLBinaryOpExpr bExpr = (SQLBinaryOpExpr) expr; + if (explainSpecialCondWithBothSidesAreLiterals(bExpr, where)) { + return; + } + if (explainSpecialCondWithBothSidesAreProperty(bExpr, where)) { + return; + } } - private void negateWhere(Where where) throws SqlParseException { - for (Where sub : where.getWheres()) { - if (sub instanceof Condition) { - Condition cond = (Condition) sub; - cond.setOPERATOR(cond.getOPERATOR().negative()); - } else { - negateWhere(sub); - } - sub.setConn(sub.getConn().negative()); - } + if (expr instanceof SQLBinaryOpExpr && !isCond((SQLBinaryOpExpr) expr)) { + SQLBinaryOpExpr bExpr = (SQLBinaryOpExpr) expr; + routeCond(bExpr, bExpr.getLeft(), where); + routeCond(bExpr, bExpr.getRight(), where); + } else if (expr instanceof SQLNotExpr) { + parseWhere(((SQLNotExpr) expr).getExpr(), where); + negateWhere(where); + } else { + explainCond("AND", expr, where); } - - //some where conditions eg. 1=1 or 3>2 or 'a'='b' - private boolean explainSpecialCondWithBothSidesAreLiterals(SQLBinaryOpExpr bExpr, Where where) - throws SqlParseException { - if ((bExpr.getLeft() instanceof SQLNumericLiteralExpr || bExpr.getLeft() instanceof SQLCharExpr) - && (bExpr.getRight() instanceof SQLNumericLiteralExpr || bExpr.getRight() instanceof SQLCharExpr) - ) { - SQLMethodInvokeExpr sqlMethodInvokeExpr = new SQLMethodInvokeExpr("script", null); - String operator = bExpr.getOperator().getName(); - if (operator.equals("=")) { - operator = "=="; - } - sqlMethodInvokeExpr.addParameter( - new SQLCharExpr(Util.expr2Object(bExpr.getLeft(), "'") - + " " + operator + " " + Util.expr2Object(bExpr.getRight(), "'")) - ); - - explainCond("AND", sqlMethodInvokeExpr, where); - return true; - } - return false; + } + + private void negateWhere(Where where) throws SqlParseException { + for (Where sub : where.getWheres()) { + if (sub instanceof Condition) { + Condition cond = (Condition) sub; + cond.setOPERATOR(cond.getOPERATOR().negative()); + } else { + negateWhere(sub); + } + sub.setConn(sub.getConn().negative()); } - - //some where conditions eg. field1=field2 or field1>field2 - private boolean explainSpecialCondWithBothSidesAreProperty(SQLBinaryOpExpr bExpr, Where where) - throws SqlParseException { - //join is not support - if ((bExpr.getLeft() instanceof SQLPropertyExpr || bExpr.getLeft() instanceof SQLIdentifierExpr) - && (bExpr.getRight() instanceof SQLPropertyExpr || bExpr.getRight() instanceof SQLIdentifierExpr) - && Sets.newHashSet("=", "<", ">", ">=", "<=").contains(bExpr.getOperator().getName()) - && !Util.isFromJoinOrUnionTable(bExpr) - ) { - SQLMethodInvokeExpr sqlMethodInvokeExpr = new SQLMethodInvokeExpr("script", null); - String operator = bExpr.getOperator().getName(); - if (operator.equals("=")) { - operator = "=="; - } - - String leftProperty = Util.expr2Object(bExpr.getLeft()).toString(); - String rightProperty = Util.expr2Object(bExpr.getRight()).toString(); - if (leftProperty.split("\\.").length > 1) { - - leftProperty = leftProperty.substring(leftProperty.split("\\.")[0].length() + 1); - } - - if (rightProperty.split("\\.").length > 1) { - rightProperty = rightProperty.substring(rightProperty.split("\\.")[0].length() + 1); - } - - sqlMethodInvokeExpr.addParameter(new SQLCharExpr( - "doc['" + leftProperty + "'].value " + operator + " doc['" + rightProperty + "'].value")); - - explainCond("AND", sqlMethodInvokeExpr, where); - return true; - } - return false; + } + + // some where conditions eg. 1=1 or 3>2 or 'a'='b' + private boolean explainSpecialCondWithBothSidesAreLiterals(SQLBinaryOpExpr bExpr, Where where) + throws SqlParseException { + if ((bExpr.getLeft() instanceof SQLNumericLiteralExpr || bExpr.getLeft() instanceof SQLCharExpr) + && (bExpr.getRight() instanceof SQLNumericLiteralExpr + || bExpr.getRight() instanceof SQLCharExpr)) { + SQLMethodInvokeExpr sqlMethodInvokeExpr = new SQLMethodInvokeExpr("script", null); + String operator = bExpr.getOperator().getName(); + if (operator.equals("=")) { + operator = "=="; + } + sqlMethodInvokeExpr.addParameter( + new SQLCharExpr( + Util.expr2Object(bExpr.getLeft(), "'") + + " " + + operator + + " " + + Util.expr2Object(bExpr.getRight(), "'"))); + + explainCond("AND", sqlMethodInvokeExpr, where); + return true; } - - - private boolean isCond(SQLBinaryOpExpr expr) { - SQLExpr leftSide = expr.getLeft(); - if (leftSide instanceof SQLMethodInvokeExpr) { - return isAllowedMethodOnConditionLeft((SQLMethodInvokeExpr) leftSide, expr.getOperator()); - } - return leftSide instanceof SQLIdentifierExpr - || leftSide instanceof SQLPropertyExpr - || leftSide instanceof SQLVariantRefExpr - || leftSide instanceof SQLCastExpr; + return false; + } + + // some where conditions eg. field1=field2 or field1>field2 + private boolean explainSpecialCondWithBothSidesAreProperty(SQLBinaryOpExpr bExpr, Where where) + throws SqlParseException { + // join is not support + if ((bExpr.getLeft() instanceof SQLPropertyExpr || bExpr.getLeft() instanceof SQLIdentifierExpr) + && (bExpr.getRight() instanceof SQLPropertyExpr + || bExpr.getRight() instanceof SQLIdentifierExpr) + && Sets.newHashSet("=", "<", ">", ">=", "<=").contains(bExpr.getOperator().getName()) + && !Util.isFromJoinOrUnionTable(bExpr)) { + SQLMethodInvokeExpr sqlMethodInvokeExpr = new SQLMethodInvokeExpr("script", null); + String operator = bExpr.getOperator().getName(); + if (operator.equals("=")) { + operator = "=="; + } + + String leftProperty = Util.expr2Object(bExpr.getLeft()).toString(); + String rightProperty = Util.expr2Object(bExpr.getRight()).toString(); + if (leftProperty.split("\\.").length > 1) { + + leftProperty = leftProperty.substring(leftProperty.split("\\.")[0].length() + 1); + } + + if (rightProperty.split("\\.").length > 1) { + rightProperty = rightProperty.substring(rightProperty.split("\\.")[0].length() + 1); + } + + sqlMethodInvokeExpr.addParameter( + new SQLCharExpr( + "doc['" + + leftProperty + + "'].value " + + operator + + " doc['" + + rightProperty + + "'].value")); + + explainCond("AND", sqlMethodInvokeExpr, where); + return true; } + return false; + } - private boolean isAllowedMethodOnConditionLeft(SQLMethodInvokeExpr method, SQLBinaryOperator operator) { - return (method.getMethodName().toLowerCase().equals("nested") - || method.getMethodName().toLowerCase().equals("children") - || SQLFunctions.isFunctionTranslatedToScript(method.getMethodName()) - ) && !operator.isLogical(); + private boolean isCond(SQLBinaryOpExpr expr) { + SQLExpr leftSide = expr.getLeft(); + if (leftSide instanceof SQLMethodInvokeExpr) { + return isAllowedMethodOnConditionLeft((SQLMethodInvokeExpr) leftSide, expr.getOperator()); } - - - private void routeCond(SQLBinaryOpExpr bExpr, SQLExpr sub, Where where) throws SqlParseException { - if (sub instanceof SQLBinaryOpExpr && !isCond((SQLBinaryOpExpr) sub)) { - SQLBinaryOpExpr binarySub = (SQLBinaryOpExpr) sub; - if (binarySub.getOperator().priority != bExpr.getOperator().priority) { - Where subWhere = new Where(bExpr.getOperator().name); - where.addWhere(subWhere); - parseWhere(binarySub, subWhere); - } else { - parseWhere(binarySub, where); - } - } else if (sub instanceof SQLNotExpr) { - Where subWhere = new Where(bExpr.getOperator().name); - where.addWhere(subWhere); - parseWhere(((SQLNotExpr) sub).getExpr(), subWhere); - negateWhere(subWhere); - } else { - explainCond(bExpr.getOperator().name, sub, where); - } + return leftSide instanceof SQLIdentifierExpr + || leftSide instanceof SQLPropertyExpr + || leftSide instanceof SQLVariantRefExpr + || leftSide instanceof SQLCastExpr; + } + + private boolean isAllowedMethodOnConditionLeft( + SQLMethodInvokeExpr method, SQLBinaryOperator operator) { + return (method.getMethodName().toLowerCase().equals("nested") + || method.getMethodName().toLowerCase().equals("children") + || SQLFunctions.isFunctionTranslatedToScript(method.getMethodName())) + && !operator.isLogical(); + } + + private void routeCond(SQLBinaryOpExpr bExpr, SQLExpr sub, Where where) throws SqlParseException { + if (sub instanceof SQLBinaryOpExpr && !isCond((SQLBinaryOpExpr) sub)) { + SQLBinaryOpExpr binarySub = (SQLBinaryOpExpr) sub; + if (binarySub.getOperator().priority != bExpr.getOperator().priority) { + Where subWhere = new Where(bExpr.getOperator().name); + where.addWhere(subWhere); + parseWhere(binarySub, subWhere); + } else { + parseWhere(binarySub, where); + } + } else if (sub instanceof SQLNotExpr) { + Where subWhere = new Where(bExpr.getOperator().name); + where.addWhere(subWhere); + parseWhere(((SQLNotExpr) sub).getExpr(), subWhere); + negateWhere(subWhere); + } else { + explainCond(bExpr.getOperator().name, sub, where); } - - private void explainCond(String opear, SQLExpr expr, Where where) throws SqlParseException { - if (expr instanceof SQLBinaryOpExpr) { - SQLBinaryOpExpr soExpr = (SQLBinaryOpExpr) expr; - - boolean methodAsOpear = false; - - boolean isNested = false; - boolean isChildren = false; - - NestedType nestedType = new NestedType(); - if (nestedType.tryFillFromExpr(soExpr.getLeft())) { - soExpr.setLeft(new SQLIdentifierExpr(nestedType.field)); - isNested = true; - } - - ChildrenType childrenType = new ChildrenType(); - if (childrenType.tryFillFromExpr(soExpr.getLeft())) { - soExpr.setLeft(new SQLIdentifierExpr(childrenType.field)); - isChildren = true; - } - - if (soExpr.getRight() instanceof SQLMethodInvokeExpr) { - SQLMethodInvokeExpr method = (SQLMethodInvokeExpr) soExpr.getRight(); - String methodName = method.getMethodName().toLowerCase(); - - if (Condition.OPERATOR.methodNameToOpear.containsKey(methodName)) { - Object[] methodParametersValue = getMethodValuesWithSubQueries(method); - - final Condition condition; - // fix OPEAR - Condition.OPERATOR oper = Condition.OPERATOR.methodNameToOpear.get(methodName); - if (soExpr.getOperator() == SQLBinaryOperator.LessThanOrGreater - || soExpr.getOperator() == SQLBinaryOperator.NotEqual) { - oper = oper.negative(); - } - if (isNested) { - condition = new Condition(Where.CONN.valueOf(opear), soExpr.getLeft().toString(), - soExpr.getLeft(), oper, methodParametersValue, soExpr.getRight(), nestedType); - } else if (isChildren) { - condition = new Condition(Where.CONN.valueOf(opear), soExpr.getLeft().toString(), - soExpr.getLeft(), oper, methodParametersValue, soExpr.getRight(), childrenType); - } else { - condition = new Condition(Where.CONN.valueOf(opear), soExpr.getLeft().toString(), - soExpr.getLeft(), oper, methodParametersValue, soExpr.getRight(), null); - } - - where.addWhere(condition); - methodAsOpear = true; - } - } - - if (!methodAsOpear) { - final Condition condition; - - if (isNested) { - condition = new Condition(Where.CONN.valueOf(opear), soExpr.getLeft().toString(), soExpr.getLeft(), - soExpr.getOperator().name, parseValue(soExpr.getRight()), soExpr.getRight(), nestedType); - } else if (isChildren) { - condition = new Condition(Where.CONN.valueOf(opear), soExpr.getLeft().toString(), soExpr.getLeft(), - soExpr.getOperator().name, parseValue(soExpr.getRight()), soExpr.getRight(), childrenType); - } else { - SQLMethodInvokeExpr sqlMethodInvokeExpr = parseSQLBinaryOpExprWhoIsConditionInWhere(soExpr); - if (sqlMethodInvokeExpr == null) { - condition = new Condition(Where.CONN.valueOf(opear), soExpr.getLeft().toString(), - soExpr.getLeft(), soExpr.getOperator().name, parseValue(soExpr.getRight()), - soExpr.getRight(), null); - } else { - ScriptFilter scriptFilter = new ScriptFilter(); - if (!scriptFilter.tryParseFromMethodExpr(sqlMethodInvokeExpr)) { - throw new SqlParseException("could not parse script filter"); - } - condition = new Condition(Where.CONN.valueOf(opear), null, soExpr.getLeft(), - "SCRIPT", scriptFilter, soExpr.getRight()); - - } - - } - where.addWhere(condition); + } + + private void explainCond(String opear, SQLExpr expr, Where where) throws SqlParseException { + if (expr instanceof SQLBinaryOpExpr) { + SQLBinaryOpExpr soExpr = (SQLBinaryOpExpr) expr; + + boolean methodAsOpear = false; + + boolean isNested = false; + boolean isChildren = false; + + NestedType nestedType = new NestedType(); + if (nestedType.tryFillFromExpr(soExpr.getLeft())) { + soExpr.setLeft(new SQLIdentifierExpr(nestedType.field)); + isNested = true; + } + + ChildrenType childrenType = new ChildrenType(); + if (childrenType.tryFillFromExpr(soExpr.getLeft())) { + soExpr.setLeft(new SQLIdentifierExpr(childrenType.field)); + isChildren = true; + } + + if (soExpr.getRight() instanceof SQLMethodInvokeExpr) { + SQLMethodInvokeExpr method = (SQLMethodInvokeExpr) soExpr.getRight(); + String methodName = method.getMethodName().toLowerCase(); + + if (Condition.OPERATOR.methodNameToOpear.containsKey(methodName)) { + Object[] methodParametersValue = getMethodValuesWithSubQueries(method); + + final Condition condition; + // fix OPEAR + Condition.OPERATOR oper = Condition.OPERATOR.methodNameToOpear.get(methodName); + if (soExpr.getOperator() == SQLBinaryOperator.LessThanOrGreater + || soExpr.getOperator() == SQLBinaryOperator.NotEqual) { + oper = oper.negative(); + } + if (isNested) { + condition = + new Condition( + Where.CONN.valueOf(opear), + soExpr.getLeft().toString(), + soExpr.getLeft(), + oper, + methodParametersValue, + soExpr.getRight(), + nestedType); + } else if (isChildren) { + condition = + new Condition( + Where.CONN.valueOf(opear), + soExpr.getLeft().toString(), + soExpr.getLeft(), + oper, + methodParametersValue, + soExpr.getRight(), + childrenType); + } else { + condition = + new Condition( + Where.CONN.valueOf(opear), + soExpr.getLeft().toString(), + soExpr.getLeft(), + oper, + methodParametersValue, + soExpr.getRight(), + null); + } + + where.addWhere(condition); + methodAsOpear = true; + } + } + + if (!methodAsOpear) { + final Condition condition; + + if (isNested) { + condition = + new Condition( + Where.CONN.valueOf(opear), + soExpr.getLeft().toString(), + soExpr.getLeft(), + soExpr.getOperator().name, + parseValue(soExpr.getRight()), + soExpr.getRight(), + nestedType); + } else if (isChildren) { + condition = + new Condition( + Where.CONN.valueOf(opear), + soExpr.getLeft().toString(), + soExpr.getLeft(), + soExpr.getOperator().name, + parseValue(soExpr.getRight()), + soExpr.getRight(), + childrenType); + } else { + SQLMethodInvokeExpr sqlMethodInvokeExpr = + parseSQLBinaryOpExprWhoIsConditionInWhere(soExpr); + if (sqlMethodInvokeExpr == null) { + condition = + new Condition( + Where.CONN.valueOf(opear), + soExpr.getLeft().toString(), + soExpr.getLeft(), + soExpr.getOperator().name, + parseValue(soExpr.getRight()), + soExpr.getRight(), + null); + } else { + ScriptFilter scriptFilter = new ScriptFilter(); + if (!scriptFilter.tryParseFromMethodExpr(sqlMethodInvokeExpr)) { + throw new SqlParseException("could not parse script filter"); } - } else if (expr instanceof SQLInListExpr) { - SQLInListExpr siExpr = (SQLInListExpr) expr; - String leftSide = siExpr.getExpr().toString(); - - boolean isNested = false; - boolean isChildren = false; + condition = + new Condition( + Where.CONN.valueOf(opear), + null, + soExpr.getLeft(), + "SCRIPT", + scriptFilter, + soExpr.getRight()); + } + } + where.addWhere(condition); + } + } else if (expr instanceof SQLInListExpr) { + SQLInListExpr siExpr = (SQLInListExpr) expr; + String leftSide = siExpr.getExpr().toString(); - NestedType nestedType = new NestedType(); - if (nestedType.tryFillFromExpr(siExpr.getExpr())) { - leftSide = nestedType.field; + boolean isNested = false; + boolean isChildren = false; - isNested = false; - } + NestedType nestedType = new NestedType(); + if (nestedType.tryFillFromExpr(siExpr.getExpr())) { + leftSide = nestedType.field; - ChildrenType childrenType = new ChildrenType(); - if (childrenType.tryFillFromExpr(siExpr.getExpr())) { - leftSide = childrenType.field; + isNested = false; + } - isChildren = true; - } + ChildrenType childrenType = new ChildrenType(); + if (childrenType.tryFillFromExpr(siExpr.getExpr())) { + leftSide = childrenType.field; - final Condition condition; - - if (isNested) { - condition = new Condition(Where.CONN.valueOf(opear), leftSide, null, siExpr.isNot() ? "NOT IN" : "IN", - parseValue(siExpr.getTargetList()), null, nestedType); - } else if (isChildren) { - condition = new Condition(Where.CONN.valueOf(opear), leftSide, null, siExpr.isNot() ? "NOT IN" : "IN", - parseValue(siExpr.getTargetList()), null, childrenType); - } else { - condition = new Condition(Where.CONN.valueOf(opear), leftSide, null, siExpr.isNot() ? "NOT IN" : "IN", - parseValue(siExpr.getTargetList()), null); - } + isChildren = true; + } - where.addWhere(condition); - } else if (expr instanceof SQLBetweenExpr) { - SQLBetweenExpr between = ((SQLBetweenExpr) expr); - String leftSide = between.getTestExpr().toString(); + final Condition condition; - boolean isNested = false; - boolean isChildren = false; + if (isNested) { + condition = + new Condition( + Where.CONN.valueOf(opear), + leftSide, + null, + siExpr.isNot() ? "NOT IN" : "IN", + parseValue(siExpr.getTargetList()), + null, + nestedType); + } else if (isChildren) { + condition = + new Condition( + Where.CONN.valueOf(opear), + leftSide, + null, + siExpr.isNot() ? "NOT IN" : "IN", + parseValue(siExpr.getTargetList()), + null, + childrenType); + } else { + condition = + new Condition( + Where.CONN.valueOf(opear), + leftSide, + null, + siExpr.isNot() ? "NOT IN" : "IN", + parseValue(siExpr.getTargetList()), + null); + } - NestedType nestedType = new NestedType(); - if (nestedType.tryFillFromExpr(between.getTestExpr())) { - leftSide = nestedType.field; + where.addWhere(condition); + } else if (expr instanceof SQLBetweenExpr) { + SQLBetweenExpr between = ((SQLBetweenExpr) expr); + String leftSide = between.getTestExpr().toString(); - isNested = true; - } + boolean isNested = false; + boolean isChildren = false; - ChildrenType childrenType = new ChildrenType(); - if (childrenType.tryFillFromExpr(between.getTestExpr())) { - leftSide = childrenType.field; + NestedType nestedType = new NestedType(); + if (nestedType.tryFillFromExpr(between.getTestExpr())) { + leftSide = nestedType.field; - isChildren = true; - } + isNested = true; + } - final Condition condition; - - if (isNested) { - condition = new Condition(Where.CONN.valueOf(opear), leftSide, null, - between.isNot() ? "NOT BETWEEN" : "BETWEEN", new Object[]{parseValue(between.beginExpr), - parseValue(between.endExpr)}, null, nestedType); - } else if (isChildren) { - condition = new Condition(Where.CONN.valueOf(opear), leftSide, null, - between.isNot() ? "NOT BETWEEN" : "BETWEEN", new Object[]{parseValue(between.beginExpr), - parseValue(between.endExpr)}, null, childrenType); - } else { - condition = new Condition(Where.CONN.valueOf(opear), leftSide, null, - between.isNot() ? "NOT BETWEEN" : "BETWEEN", new Object[]{parseValue(between.beginExpr), - parseValue(between.endExpr)}, null, null); - } + ChildrenType childrenType = new ChildrenType(); + if (childrenType.tryFillFromExpr(between.getTestExpr())) { + leftSide = childrenType.field; - where.addWhere(condition); - } else if (expr instanceof SQLMethodInvokeExpr) { - - SQLMethodInvokeExpr methodExpr = (SQLMethodInvokeExpr) expr; - List methodParameters = methodExpr.getParameters(); - - String methodName = methodExpr.getMethodName(); - if (SpatialParamsFactory.isAllowedMethod(methodName)) { - String fieldName = methodParameters.get(0).toString(); - - boolean isNested = false; - boolean isChildren = false; - - NestedType nestedType = new NestedType(); - if (nestedType.tryFillFromExpr(methodParameters.get(0))) { - fieldName = nestedType.field; - - isNested = true; - } - - ChildrenType childrenType = new ChildrenType(); - if (childrenType.tryFillFromExpr(methodParameters.get(0))) { - fieldName = childrenType.field; - - isChildren = true; - } - - Object spatialParamsObject = SpatialParamsFactory.generateSpatialParamsObject(methodName, - methodParameters); - - final Condition condition; - - if (isNested) { - condition = new Condition(Where.CONN.valueOf(opear), fieldName, null, methodName, - spatialParamsObject, null, nestedType); - } else if (isChildren) { - condition = new Condition(Where.CONN.valueOf(opear), fieldName, null, methodName, - spatialParamsObject, null, childrenType); - } else { - condition = new Condition(Where.CONN.valueOf(opear), fieldName, null, methodName, - spatialParamsObject, null, null); - } - - where.addWhere(condition); - } else if (methodName.toLowerCase().equals("nested")) { - NestedType nestedType = new NestedType(); - - if (!nestedType.tryFillFromExpr(expr)) { - throw new SqlParseException("could not fill nested from expr:" + expr); - } - - Condition condition = new Condition(Where.CONN.valueOf(opear), nestedType.path, null, - methodName.toUpperCase(), nestedType.where, null); - - where.addWhere(condition); - } else if (methodName.toLowerCase().equals("children")) { - ChildrenType childrenType = new ChildrenType(); - - if (!childrenType.tryFillFromExpr(expr)) { - throw new SqlParseException("could not fill children from expr:" + expr); - } - - Condition condition = new Condition(Where.CONN.valueOf(opear), childrenType.childType, null, - methodName.toUpperCase(), childrenType.where, null); - - where.addWhere(condition); - } else if (methodName.toLowerCase().equals("script")) { - ScriptFilter scriptFilter = new ScriptFilter(); - if (!scriptFilter.tryParseFromMethodExpr(methodExpr)) { - throw new SqlParseException("could not parse script filter"); - } - Condition condition = new Condition(Where.CONN.valueOf(opear), null, null, "SCRIPT", - scriptFilter, null); - where.addWhere(condition); - } else if (Maker.isQueryFunction(methodName)) { - Condition condition = getConditionForMethod(expr, Where.CONN.valueOf(opear)); - - where.addWhere(condition); - } else { - throw new SqlParseException("unsupported method: " + methodName); - } - } else if (expr instanceof SQLInSubQueryExpr) { - SQLInSubQueryExpr sqlIn = (SQLInSubQueryExpr) expr; + isChildren = true; + } - Select innerSelect = sqlParser.parseSelect((MySqlSelectQueryBlock) sqlIn.getSubQuery().getQuery()); + final Condition condition; - if (innerSelect.getFields() == null || innerSelect.getFields().size() != 1) { - throw new SqlParseException("should only have one return field in subQuery"); - } + if (isNested) { + condition = + new Condition( + Where.CONN.valueOf(opear), + leftSide, + null, + between.isNot() ? "NOT BETWEEN" : "BETWEEN", + new Object[] {parseValue(between.beginExpr), parseValue(between.endExpr)}, + null, + nestedType); + } else if (isChildren) { + condition = + new Condition( + Where.CONN.valueOf(opear), + leftSide, + null, + between.isNot() ? "NOT BETWEEN" : "BETWEEN", + new Object[] {parseValue(between.beginExpr), parseValue(between.endExpr)}, + null, + childrenType); + } else { + condition = + new Condition( + Where.CONN.valueOf(opear), + leftSide, + null, + between.isNot() ? "NOT BETWEEN" : "BETWEEN", + new Object[] {parseValue(between.beginExpr), parseValue(between.endExpr)}, + null, + null); + } - SubQueryExpression subQueryExpression = new SubQueryExpression(innerSelect); + where.addWhere(condition); + } else if (expr instanceof SQLMethodInvokeExpr) { - String leftSide = sqlIn.getExpr().toString(); + SQLMethodInvokeExpr methodExpr = (SQLMethodInvokeExpr) expr; + List methodParameters = methodExpr.getParameters(); - boolean isNested = false; - boolean isChildren = false; + String methodName = methodExpr.getMethodName(); + if (SpatialParamsFactory.isAllowedMethod(methodName)) { + String fieldName = methodParameters.get(0).toString(); - NestedType nestedType = new NestedType(); - if (nestedType.tryFillFromExpr(sqlIn.getExpr())) { - leftSide = nestedType.field; + boolean isNested = false; + boolean isChildren = false; - isNested = true; - } + NestedType nestedType = new NestedType(); + if (nestedType.tryFillFromExpr(methodParameters.get(0))) { + fieldName = nestedType.field; - ChildrenType childrenType = new ChildrenType(); - if (childrenType.tryFillFromExpr(sqlIn.getExpr())) { - leftSide = childrenType.field; + isNested = true; + } - isChildren = true; - } + ChildrenType childrenType = new ChildrenType(); + if (childrenType.tryFillFromExpr(methodParameters.get(0))) { + fieldName = childrenType.field; - final Condition condition; - - if (isNested) { - condition = new Condition(Where.CONN.valueOf(opear), leftSide, null, sqlIn.isNot() ? "NOT IN" : "IN", - subQueryExpression, null, nestedType); - } else if (isChildren) { - condition = new Condition(Where.CONN.valueOf(opear), leftSide, null, sqlIn.isNot() ? "NOT IN" : "IN", - subQueryExpression, null, childrenType); - } else { - condition = new Condition(Where.CONN.valueOf(opear), leftSide, null, sqlIn.isNot() ? "NOT IN" : "IN", - subQueryExpression, null, null); - } + isChildren = true; + } - where.addWhere(condition); + Object spatialParamsObject = + SpatialParamsFactory.generateSpatialParamsObject(methodName, methodParameters); + + final Condition condition; + + if (isNested) { + condition = + new Condition( + Where.CONN.valueOf(opear), + fieldName, + null, + methodName, + spatialParamsObject, + null, + nestedType); + } else if (isChildren) { + condition = + new Condition( + Where.CONN.valueOf(opear), + fieldName, + null, + methodName, + spatialParamsObject, + null, + childrenType); } else { - throw new SqlParseException("err find condition " + expr.getClass()); + condition = + new Condition( + Where.CONN.valueOf(opear), + fieldName, + null, + methodName, + spatialParamsObject, + null, + null); } - } - private MethodField parseSQLMethodInvokeExprWithFunctionInWhere(SQLMethodInvokeExpr soExpr) - throws SqlParseException { + where.addWhere(condition); + } else if (methodName.toLowerCase().equals("nested")) { + NestedType nestedType = new NestedType(); - MethodField methodField = fieldMaker.makeMethodField(soExpr.getMethodName(), - soExpr.getParameters(), - null, - null, - query != null ? query.getFrom().getAlias() : null, - false); - return methodField; - } + if (!nestedType.tryFillFromExpr(expr)) { + throw new SqlParseException("could not fill nested from expr:" + expr); + } - private MethodField parseSQLCastExprWithFunctionInWhere(SQLCastExpr soExpr) throws SqlParseException { - ArrayList parameters = new ArrayList<>(); - parameters.add(soExpr.getExpr()); - return fieldMaker.makeMethodField( - "CAST", - parameters, + Condition condition = + new Condition( + Where.CONN.valueOf(opear), + nestedType.path, null, - null, - query != null ? query.getFrom().getAlias() : null, - false - ); - } + methodName.toUpperCase(), + nestedType.where, + null); - private SQLMethodInvokeExpr parseSQLBinaryOpExprWhoIsConditionInWhere(SQLBinaryOpExpr soExpr) - throws SqlParseException { - - if (bothSideAreNotFunction(soExpr) && bothSidesAreNotCast(soExpr)) { - return null; - } + where.addWhere(condition); + } else if (methodName.toLowerCase().equals("children")) { + ChildrenType childrenType = new ChildrenType(); - if (soExpr.getLeft() instanceof SQLMethodInvokeExpr) { - if (!SQLFunctions.isFunctionTranslatedToScript(((SQLMethodInvokeExpr) soExpr.getLeft()).getMethodName())) { - return null; - } + if (!childrenType.tryFillFromExpr(expr)) { + throw new SqlParseException("could not fill children from expr:" + expr); } - if (soExpr.getRight() instanceof SQLMethodInvokeExpr) { - if (!SQLFunctions.isFunctionTranslatedToScript(((SQLMethodInvokeExpr) soExpr.getRight()).getMethodName())) { - return null; - } + Condition condition = + new Condition( + Where.CONN.valueOf(opear), + childrenType.childType, + null, + methodName.toUpperCase(), + childrenType.where, + null); + + where.addWhere(condition); + } else if (methodName.toLowerCase().equals("script")) { + ScriptFilter scriptFilter = new ScriptFilter(); + if (!scriptFilter.tryParseFromMethodExpr(methodExpr)) { + throw new SqlParseException("could not parse script filter"); } + Condition condition = + new Condition(Where.CONN.valueOf(opear), null, null, "SCRIPT", scriptFilter, null); + where.addWhere(condition); + } else if (Maker.isQueryFunction(methodName)) { + Condition condition = getConditionForMethod(expr, Where.CONN.valueOf(opear)); + where.addWhere(condition); + } else { + throw new SqlParseException("unsupported method: " + methodName); + } + } else if (expr instanceof SQLInSubQueryExpr) { + SQLInSubQueryExpr sqlIn = (SQLInSubQueryExpr) expr; - MethodField leftMethod = new MethodField(null, Lists.newArrayList( - new KVValue("", Util.expr2Object(soExpr.getLeft(), "'"))), null, null); - MethodField rightMethod = new MethodField(null, Lists.newArrayList( - new KVValue("", Util.expr2Object(soExpr.getRight(), "'"))), null, null); + Select innerSelect = + sqlParser.parseSelect((MySqlSelectQueryBlock) sqlIn.getSubQuery().getQuery()); - if (soExpr.getLeft() instanceof SQLIdentifierExpr || soExpr.getLeft() instanceof SQLPropertyExpr) { - leftMethod = new MethodField(null, Lists.newArrayList( - new KVValue("", "doc['" + Util.expr2Object(soExpr.getLeft(), "'") + "'].value")), - null, null); - } - - if (soExpr.getRight() instanceof SQLIdentifierExpr || soExpr.getRight() instanceof SQLPropertyExpr) { - rightMethod = new MethodField(null, Lists.newArrayList( - new KVValue("", "doc['" + Util.expr2Object(soExpr.getRight(), "'") + "'].value")), - null, null); - } + if (innerSelect.getFields() == null || innerSelect.getFields().size() != 1) { + throw new SqlParseException("should only have one return field in subQuery"); + } - if (soExpr.getLeft() instanceof SQLMethodInvokeExpr) { - leftMethod = parseSQLMethodInvokeExprWithFunctionInWhere((SQLMethodInvokeExpr) soExpr.getLeft()); - } - if (soExpr.getRight() instanceof SQLMethodInvokeExpr) { - rightMethod = parseSQLMethodInvokeExprWithFunctionInWhere((SQLMethodInvokeExpr) soExpr.getRight()); - } + SubQueryExpression subQueryExpression = new SubQueryExpression(innerSelect); - if (soExpr.getLeft() instanceof SQLCastExpr) { - leftMethod = parseSQLCastExprWithFunctionInWhere((SQLCastExpr) soExpr.getLeft()); - } - if (soExpr.getRight() instanceof SQLCastExpr) { - rightMethod = parseSQLCastExprWithFunctionInWhere((SQLCastExpr) soExpr.getRight()); - } + String leftSide = sqlIn.getExpr().toString(); - String v1 = leftMethod.getParams().get(0).value.toString(); - String v1Dec = leftMethod.getParams().size() == 2 ? leftMethod.getParams().get(1).value.toString() + ";" : ""; + boolean isNested = false; + boolean isChildren = false; + NestedType nestedType = new NestedType(); + if (nestedType.tryFillFromExpr(sqlIn.getExpr())) { + leftSide = nestedType.field; - String v2 = rightMethod.getParams().get(0).value.toString(); - String v2Dec = rightMethod.getParams().size() == 2 ? rightMethod.getParams().get(1).value.toString() + ";" : ""; + isNested = true; + } - String operator = soExpr.getOperator().getName(); + ChildrenType childrenType = new ChildrenType(); + if (childrenType.tryFillFromExpr(sqlIn.getExpr())) { + leftSide = childrenType.field; - if (operator.equals("=")) { - operator = "=="; - } + isChildren = true; + } - String finalStr = v1Dec + v2Dec + v1 + " " + operator + " " + v2; + final Condition condition; - SQLMethodInvokeExpr scriptMethod = new SQLMethodInvokeExpr("script", null); - scriptMethod.addParameter(new SQLCharExpr(finalStr)); - return scriptMethod; + if (isNested) { + condition = + new Condition( + Where.CONN.valueOf(opear), + leftSide, + null, + sqlIn.isNot() ? "NOT IN" : "IN", + subQueryExpression, + null, + nestedType); + } else if (isChildren) { + condition = + new Condition( + Where.CONN.valueOf(opear), + leftSide, + null, + sqlIn.isNot() ? "NOT IN" : "IN", + subQueryExpression, + null, + childrenType); + } else { + condition = + new Condition( + Where.CONN.valueOf(opear), + leftSide, + null, + sqlIn.isNot() ? "NOT IN" : "IN", + subQueryExpression, + null, + null); + } + where.addWhere(condition); + } else { + throw new SqlParseException("err find condition " + expr.getClass()); + } + } + + private MethodField parseSQLMethodInvokeExprWithFunctionInWhere(SQLMethodInvokeExpr soExpr) + throws SqlParseException { + + MethodField methodField = + fieldMaker.makeMethodField( + soExpr.getMethodName(), + soExpr.getParameters(), + null, + null, + query != null ? query.getFrom().getAlias() : null, + false); + return methodField; + } + + private MethodField parseSQLCastExprWithFunctionInWhere(SQLCastExpr soExpr) + throws SqlParseException { + ArrayList parameters = new ArrayList<>(); + parameters.add(soExpr.getExpr()); + return fieldMaker.makeMethodField( + "CAST", parameters, null, null, query != null ? query.getFrom().getAlias() : null, false); + } + + private SQLMethodInvokeExpr parseSQLBinaryOpExprWhoIsConditionInWhere(SQLBinaryOpExpr soExpr) + throws SqlParseException { + + if (bothSideAreNotFunction(soExpr) && bothSidesAreNotCast(soExpr)) { + return null; } - private Boolean bothSideAreNotFunction(SQLBinaryOpExpr soExpr) { - return !(soExpr.getLeft() instanceof SQLMethodInvokeExpr || soExpr.getRight() instanceof SQLMethodInvokeExpr); + if (soExpr.getLeft() instanceof SQLMethodInvokeExpr) { + if (!SQLFunctions.isFunctionTranslatedToScript( + ((SQLMethodInvokeExpr) soExpr.getLeft()).getMethodName())) { + return null; + } } - private Boolean bothSidesAreNotCast(SQLBinaryOpExpr soExpr) { - return !(soExpr.getLeft() instanceof SQLCastExpr || soExpr.getRight() instanceof SQLCastExpr); + if (soExpr.getRight() instanceof SQLMethodInvokeExpr) { + if (!SQLFunctions.isFunctionTranslatedToScript( + ((SQLMethodInvokeExpr) soExpr.getRight()).getMethodName())) { + return null; + } } - private Object[] getMethodValuesWithSubQueries(SQLMethodInvokeExpr method) throws SqlParseException { - List values = new ArrayList<>(); - for (SQLExpr innerExpr : method.getParameters()) { - if (innerExpr instanceof SQLQueryExpr) { - Select select = sqlParser.parseSelect((MySqlSelectQueryBlock) ((SQLQueryExpr) innerExpr).getSubQuery() - .getQuery()); - values.add(new SubQueryExpression(select)); - } else if (innerExpr instanceof SQLTextLiteralExpr) { - values.add(((SQLTextLiteralExpr) innerExpr).getText()); - } else { - values.add(innerExpr); - } + MethodField leftMethod = + new MethodField( + null, + Lists.newArrayList(new KVValue("", Util.expr2Object(soExpr.getLeft(), "'"))), + null, + null); + MethodField rightMethod = + new MethodField( + null, + Lists.newArrayList(new KVValue("", Util.expr2Object(soExpr.getRight(), "'"))), + null, + null); + + if (soExpr.getLeft() instanceof SQLIdentifierExpr + || soExpr.getLeft() instanceof SQLPropertyExpr) { + leftMethod = + new MethodField( + null, + Lists.newArrayList( + new KVValue("", "doc['" + Util.expr2Object(soExpr.getLeft(), "'") + "'].value")), + null, + null); + } - } - return values.toArray(); + if (soExpr.getRight() instanceof SQLIdentifierExpr + || soExpr.getRight() instanceof SQLPropertyExpr) { + rightMethod = + new MethodField( + null, + Lists.newArrayList( + new KVValue("", "doc['" + Util.expr2Object(soExpr.getRight(), "'") + "'].value")), + null, + null); } - private Object[] parseValue(List targetList) throws SqlParseException { - Object[] value = new Object[targetList.size()]; - for (int i = 0; i < targetList.size(); i++) { - value[i] = parseValue(targetList.get(i)); - } - return value; + if (soExpr.getLeft() instanceof SQLMethodInvokeExpr) { + leftMethod = + parseSQLMethodInvokeExprWithFunctionInWhere((SQLMethodInvokeExpr) soExpr.getLeft()); + } + if (soExpr.getRight() instanceof SQLMethodInvokeExpr) { + rightMethod = + parseSQLMethodInvokeExprWithFunctionInWhere((SQLMethodInvokeExpr) soExpr.getRight()); } - private Object parseValue(SQLExpr expr) throws SqlParseException { - if (expr instanceof SQLNumericLiteralExpr) { - Number number = ((SQLNumericLiteralExpr) expr).getNumber(); - if (number instanceof BigDecimal) { - return number.doubleValue(); - } - if (number instanceof BigInteger) { - return number.longValue(); - } - return ((SQLNumericLiteralExpr) expr).getNumber(); - } else if (expr instanceof SQLCharExpr) { - return ((SQLCharExpr) expr).getText(); - } else if (expr instanceof SQLMethodInvokeExpr) { - return expr; - } else if (expr instanceof SQLNullExpr) { - return null; - } else if (expr instanceof SQLIdentifierExpr) { - return expr; - } else if (expr instanceof SQLPropertyExpr) { - return expr; - } else if (expr instanceof SQLBooleanExpr) { - return ((SQLBooleanExpr) expr).getValue(); - } else { - throw new SqlParseException( - String.format("Failed to parse SqlExpression of type %s. expression value: %s", - expr.getClass(), expr) - ); - } + if (soExpr.getLeft() instanceof SQLCastExpr) { + leftMethod = parseSQLCastExprWithFunctionInWhere((SQLCastExpr) soExpr.getLeft()); + } + if (soExpr.getRight() instanceof SQLCastExpr) { + rightMethod = parseSQLCastExprWithFunctionInWhere((SQLCastExpr) soExpr.getRight()); } - public static Condition getConditionForMethod(SQLExpr expr, Where.CONN conn) throws SqlParseException { - SQLExpr param = ((SQLMethodInvokeExpr) expr).getParameters().get(0); - String fieldName = param.toString(); + String v1 = leftMethod.getParams().get(0).value.toString(); + String v1Dec = + leftMethod.getParams().size() == 2 + ? leftMethod.getParams().get(1).value.toString() + ";" + : ""; - NestedType nestedType = new NestedType(); - ChildrenType childrenType = new ChildrenType(); + String v2 = rightMethod.getParams().get(0).value.toString(); + String v2Dec = + rightMethod.getParams().size() == 2 + ? rightMethod.getParams().get(1).value.toString() + ";" + : ""; - if (nestedType.tryFillFromExpr(param)) { - return new Condition(conn, nestedType.field, null, "=", expr, expr, nestedType); - } else if (childrenType.tryFillFromExpr(param)) { - return new Condition(conn, childrenType.field, null, "=", expr, expr, childrenType); - } else { - return new Condition(conn, fieldName, null, "=", expr, expr, null); - } + String operator = soExpr.getOperator().getName(); + + if (operator.equals("=")) { + operator = "=="; + } + + String finalStr = v1Dec + v2Dec + v1 + " " + operator + " " + v2; + + SQLMethodInvokeExpr scriptMethod = new SQLMethodInvokeExpr("script", null); + scriptMethod.addParameter(new SQLCharExpr(finalStr)); + return scriptMethod; + } + + private Boolean bothSideAreNotFunction(SQLBinaryOpExpr soExpr) { + return !(soExpr.getLeft() instanceof SQLMethodInvokeExpr + || soExpr.getRight() instanceof SQLMethodInvokeExpr); + } + + private Boolean bothSidesAreNotCast(SQLBinaryOpExpr soExpr) { + return !(soExpr.getLeft() instanceof SQLCastExpr || soExpr.getRight() instanceof SQLCastExpr); + } + + private Object[] getMethodValuesWithSubQueries(SQLMethodInvokeExpr method) + throws SqlParseException { + List values = new ArrayList<>(); + for (SQLExpr innerExpr : method.getParameters()) { + if (innerExpr instanceof SQLQueryExpr) { + Select select = + sqlParser.parseSelect( + (MySqlSelectQueryBlock) ((SQLQueryExpr) innerExpr).getSubQuery().getQuery()); + values.add(new SubQueryExpression(select)); + } else if (innerExpr instanceof SQLTextLiteralExpr) { + values.add(((SQLTextLiteralExpr) innerExpr).getText()); + } else { + values.add(innerExpr); + } + } + return values.toArray(); + } + + private Object[] parseValue(List targetList) throws SqlParseException { + Object[] value = new Object[targetList.size()]; + for (int i = 0; i < targetList.size(); i++) { + value[i] = parseValue(targetList.get(i)); + } + return value; + } + + private Object parseValue(SQLExpr expr) throws SqlParseException { + if (expr instanceof SQLNumericLiteralExpr) { + Number number = ((SQLNumericLiteralExpr) expr).getNumber(); + if (number instanceof BigDecimal) { + return number.doubleValue(); + } + if (number instanceof BigInteger) { + return number.longValue(); + } + return ((SQLNumericLiteralExpr) expr).getNumber(); + } else if (expr instanceof SQLCharExpr) { + return ((SQLCharExpr) expr).getText(); + } else if (expr instanceof SQLMethodInvokeExpr) { + return expr; + } else if (expr instanceof SQLNullExpr) { + return null; + } else if (expr instanceof SQLIdentifierExpr) { + return expr; + } else if (expr instanceof SQLPropertyExpr) { + return expr; + } else if (expr instanceof SQLBooleanExpr) { + return ((SQLBooleanExpr) expr).getValue(); + } else { + throw new SqlParseException( + String.format( + "Failed to parse SqlExpression of type %s. expression value: %s", + expr.getClass(), expr)); + } + } + + public static Condition getConditionForMethod(SQLExpr expr, Where.CONN conn) + throws SqlParseException { + SQLExpr param = ((SQLMethodInvokeExpr) expr).getParameters().get(0); + String fieldName = param.toString(); + + NestedType nestedType = new NestedType(); + ChildrenType childrenType = new ChildrenType(); + + if (nestedType.tryFillFromExpr(param)) { + return new Condition(conn, nestedType.field, null, "=", expr, expr, nestedType); + } else if (childrenType.tryFillFromExpr(param)) { + return new Condition(conn, childrenType.field, null, "=", expr, expr, childrenType); + } else { + return new Condition(conn, fieldName, null, "=", expr, expr, null); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/plugin/RestSQLQueryAction.java b/legacy/src/main/java/org/opensearch/sql/legacy/plugin/RestSQLQueryAction.java index 12176d4fa7..309a7c9c2a 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/plugin/RestSQLQueryAction.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/plugin/RestSQLQueryAction.java @@ -41,10 +41,11 @@ /** * New SQL REST action handler. This will not be registered to OpenSearch unless: + * *
    - *
  1. we want to test new SQL engine; - *
  2. all old functionalities migrated to new query engine and legacy REST handler removed. - *
+ *
  • we want to test new SQL engine; + *
  • all old functionalities migrated to new query engine and legacy REST handler removed. + * */ public class RestSQLQueryAction extends BaseRestHandler { diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/ShowQueryAction.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/ShowQueryAction.java index 7a414087e4..d9baa901fa 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/ShowQueryAction.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/ShowQueryAction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query; import static org.opensearch.sql.legacy.utils.Util.prepareIndexRequestBuilder; @@ -15,22 +14,23 @@ public class ShowQueryAction extends QueryAction { - private final IndexStatement statement; + private final IndexStatement statement; - public ShowQueryAction(Client client, IndexStatement statement) { - super(client, null); - this.statement = statement; - } + public ShowQueryAction(Client client, IndexStatement statement) { + super(client, null); + this.statement = statement; + } - @Override - public QueryStatement getQueryStatement() { - return statement; - } + @Override + public QueryStatement getQueryStatement() { + return statement; + } - @Override - public SqlOpenSearchRequestBuilder explain() { - final GetIndexRequestBuilder indexRequestBuilder = prepareIndexRequestBuilder(client, statement); + @Override + public SqlOpenSearchRequestBuilder explain() { + final GetIndexRequestBuilder indexRequestBuilder = + prepareIndexRequestBuilder(client, statement); - return new SqlOpenSearchRequestBuilder(indexRequestBuilder); - } + return new SqlOpenSearchRequestBuilder(indexRequestBuilder); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/SqlElasticDeleteByQueryRequestBuilder.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/SqlElasticDeleteByQueryRequestBuilder.java index 6963996b22..2203cbb39e 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/SqlElasticDeleteByQueryRequestBuilder.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/SqlElasticDeleteByQueryRequestBuilder.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query; import org.opensearch.action.ActionRequest; @@ -12,41 +11,39 @@ import org.opensearch.core.action.ActionResponse; import org.opensearch.index.reindex.DeleteByQueryRequestBuilder; -/** - * Created by Eliran on 19/8/2015. - */ +/** Created by Eliran on 19/8/2015. */ public class SqlElasticDeleteByQueryRequestBuilder implements SqlElasticRequestBuilder { - DeleteByQueryRequestBuilder deleteByQueryRequestBuilder; - - public SqlElasticDeleteByQueryRequestBuilder(DeleteByQueryRequestBuilder deleteByQueryRequestBuilder) { - this.deleteByQueryRequestBuilder = deleteByQueryRequestBuilder; - } - - @Override - public ActionRequest request() { - return deleteByQueryRequestBuilder.request(); + DeleteByQueryRequestBuilder deleteByQueryRequestBuilder; + + public SqlElasticDeleteByQueryRequestBuilder( + DeleteByQueryRequestBuilder deleteByQueryRequestBuilder) { + this.deleteByQueryRequestBuilder = deleteByQueryRequestBuilder; + } + + @Override + public ActionRequest request() { + return deleteByQueryRequestBuilder.request(); + } + + @Override + public String explain() { + try { + SearchRequestBuilder source = deleteByQueryRequestBuilder.source(); + return source.toString(); + } catch (Exception e) { + e.printStackTrace(); } + return null; + } - @Override - public String explain() { - try { - SearchRequestBuilder source = deleteByQueryRequestBuilder.source(); - return source.toString(); - } catch (Exception e) { - e.printStackTrace(); - } - return null; - } - - @Override - public ActionResponse get() { + @Override + public ActionResponse get() { - return this.deleteByQueryRequestBuilder.get(); - } - - @Override - public ActionRequestBuilder getBuilder() { - return deleteByQueryRequestBuilder; - } + return this.deleteByQueryRequestBuilder.get(); + } + @Override + public ActionRequestBuilder getBuilder() { + return deleteByQueryRequestBuilder; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/SqlElasticRequestBuilder.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/SqlElasticRequestBuilder.java index e1f3db3fa7..7babbe5abe 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/SqlElasticRequestBuilder.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/SqlElasticRequestBuilder.java @@ -3,22 +3,19 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query; import org.opensearch.action.ActionRequest; import org.opensearch.action.ActionRequestBuilder; import org.opensearch.core.action.ActionResponse; -/** - * Created by Eliran on 19/8/2015. - */ +/** Created by Eliran on 19/8/2015. */ public interface SqlElasticRequestBuilder { - ActionRequest request(); + ActionRequest request(); - String explain(); + String explain(); - ActionResponse get(); + ActionResponse get(); - ActionRequestBuilder getBuilder(); + ActionRequestBuilder getBuilder(); } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/SqlOpenSearchRequestBuilder.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/SqlOpenSearchRequestBuilder.java index 6bba1048c4..2beb16837b 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/SqlOpenSearchRequestBuilder.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/SqlOpenSearchRequestBuilder.java @@ -3,45 +3,42 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query; import org.opensearch.action.ActionRequest; import org.opensearch.action.ActionRequestBuilder; import org.opensearch.core.action.ActionResponse; -/** - * Created by Eliran on 19/8/2015. - */ +/** Created by Eliran on 19/8/2015. */ public class SqlOpenSearchRequestBuilder implements SqlElasticRequestBuilder { - ActionRequestBuilder requestBuilder; - - public SqlOpenSearchRequestBuilder(ActionRequestBuilder requestBuilder) { - this.requestBuilder = requestBuilder; - } - - @Override - public ActionRequest request() { - return requestBuilder.request(); - } - - @Override - public String explain() { - return requestBuilder.toString(); - } - - @Override - public ActionResponse get() { - return requestBuilder.get(); - } - - @Override - public ActionRequestBuilder getBuilder() { - return requestBuilder; - } - - @Override - public String toString() { - return this.requestBuilder.toString(); - } + ActionRequestBuilder requestBuilder; + + public SqlOpenSearchRequestBuilder(ActionRequestBuilder requestBuilder) { + this.requestBuilder = requestBuilder; + } + + @Override + public ActionRequest request() { + return requestBuilder.request(); + } + + @Override + public String explain() { + return requestBuilder.toString(); + } + + @Override + public ActionResponse get() { + return requestBuilder.get(); + } + + @Override + public ActionRequestBuilder getBuilder() { + return requestBuilder; + } + + @Override + public String toString() { + return this.requestBuilder.toString(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/join/TableInJoinRequestBuilder.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/join/TableInJoinRequestBuilder.java index b1a07486b7..0b37497541 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/join/TableInJoinRequestBuilder.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/join/TableInJoinRequestBuilder.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.join; import java.util.List; @@ -11,56 +10,53 @@ import org.opensearch.sql.legacy.domain.Field; import org.opensearch.sql.legacy.domain.Select; -/** - * Created by Eliran on 28/8/2015. - */ +/** Created by Eliran on 28/8/2015. */ public class TableInJoinRequestBuilder { - private SearchRequestBuilder requestBuilder; - private String alias; - private List returnedFields; - private Select originalSelect; - private Integer hintLimit; + private SearchRequestBuilder requestBuilder; + private String alias; + private List returnedFields; + private Select originalSelect; + private Integer hintLimit; - public TableInJoinRequestBuilder() { - } + public TableInJoinRequestBuilder() {} - public SearchRequestBuilder getRequestBuilder() { - return requestBuilder; - } + public SearchRequestBuilder getRequestBuilder() { + return requestBuilder; + } - public void setRequestBuilder(SearchRequestBuilder requestBuilder) { - this.requestBuilder = requestBuilder; - } + public void setRequestBuilder(SearchRequestBuilder requestBuilder) { + this.requestBuilder = requestBuilder; + } - public String getAlias() { - return alias; - } + public String getAlias() { + return alias; + } - public void setAlias(String alias) { - this.alias = alias; - } + public void setAlias(String alias) { + this.alias = alias; + } - public List getReturnedFields() { - return returnedFields; - } + public List getReturnedFields() { + return returnedFields; + } - public void setReturnedFields(List returnedFields) { - this.returnedFields = returnedFields; - } + public void setReturnedFields(List returnedFields) { + this.returnedFields = returnedFields; + } - public Select getOriginalSelect() { - return originalSelect; - } + public Select getOriginalSelect() { + return originalSelect; + } - public void setOriginalSelect(Select originalSelect) { - this.originalSelect = originalSelect; - } + public void setOriginalSelect(Select originalSelect) { + this.originalSelect = originalSelect; + } - public Integer getHintLimit() { - return hintLimit; - } + public Integer getHintLimit() { + return hintLimit; + } - public void setHintLimit(Integer hintLimit) { - this.hintLimit = hintLimit; - } + public void setHintLimit(Integer hintLimit) { + this.hintLimit = hintLimit; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/converter/SQLAggregationParser.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/converter/SQLAggregationParser.java index ac9a173212..b54e260fd4 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/converter/SQLAggregationParser.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/converter/SQLAggregationParser.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.converter; import com.alibaba.druid.sql.ast.SQLExpr; @@ -31,253 +30,281 @@ import org.opensearch.sql.legacy.query.planner.core.ColumnNode; /** - * The definition of SQL Aggregation Converter which will parse the query to project column node list and - * aggregation list - * e.g. parse the query: SELECT age, MAX(balance) - MIN(balance) FROM T GROUP BY age. - * will generate the - * node list: age, max_0 - min_0 - * aggregation list: age, max(balance) as max_0, min(balance) as min_0 - * + * The definition of SQL Aggregation Converter which will parse the query to project column node + * list and aggregation list e.g. parse the query: SELECT age, MAX(balance) - MIN(balance) FROM T + * GROUP BY age. will generate the node list: age, max_0 - min_0 aggregation list: age, max(balance) + * as max_0, min(balance) as min_0 */ @RequiredArgsConstructor public class SQLAggregationParser { - private final ColumnTypeProvider columnTypeProvider; - private Context context; - @Getter - private List columnNodes = new ArrayList<>(); - - public void parse(MySqlSelectQueryBlock queryBlock) { - context = new Context(constructSQLExprAliasMapFromSelect(queryBlock)); - - //1. extract raw names of selectItems - List selectItemNames = extractSelectItemNames(queryBlock.getSelectList()); - - //2. rewrite all the function name to lower case. - rewriteFunctionNameToLowerCase(queryBlock); - - //2. find all GroupKeyExpr from GroupBy expression. - findAllGroupKeyExprFromGroupByAndSelect(queryBlock); - findAllAggregationExprFromSelect(queryBlock); - - //3. parse the select list to expression - parseExprInSelectList(queryBlock, selectItemNames, new SQLExprToExpressionConverter(context)); - } - - public List selectItemList() { - List sqlSelectItems = new ArrayList<>(); - context.getGroupKeyExprMap().entrySet().forEach(entry -> sqlSelectItems - .add(new SQLSelectItem(entry.getKey(), entry.getValue().getExpression().toString()))); - context.getAggregationExprMap().entrySet().forEach(entry -> sqlSelectItems - .add(new SQLSelectItem(entry.getKey(), entry.getValue().getExpression().toString()))); - return sqlSelectItems; + private final ColumnTypeProvider columnTypeProvider; + private Context context; + @Getter private List columnNodes = new ArrayList<>(); + + public void parse(MySqlSelectQueryBlock queryBlock) { + context = new Context(constructSQLExprAliasMapFromSelect(queryBlock)); + + // 1. extract raw names of selectItems + List selectItemNames = extractSelectItemNames(queryBlock.getSelectList()); + + // 2. rewrite all the function name to lower case. + rewriteFunctionNameToLowerCase(queryBlock); + + // 2. find all GroupKeyExpr from GroupBy expression. + findAllGroupKeyExprFromGroupByAndSelect(queryBlock); + findAllAggregationExprFromSelect(queryBlock); + + // 3. parse the select list to expression + parseExprInSelectList(queryBlock, selectItemNames, new SQLExprToExpressionConverter(context)); + } + + public List selectItemList() { + List sqlSelectItems = new ArrayList<>(); + context + .getGroupKeyExprMap() + .entrySet() + .forEach( + entry -> + sqlSelectItems.add( + new SQLSelectItem( + entry.getKey(), entry.getValue().getExpression().toString()))); + context + .getAggregationExprMap() + .entrySet() + .forEach( + entry -> + sqlSelectItems.add( + new SQLSelectItem( + entry.getKey(), entry.getValue().getExpression().toString()))); + return sqlSelectItems; + } + + private Map constructSQLExprAliasMapFromSelect( + MySqlSelectQueryBlock queryBlock) { + return queryBlock.getSelectList().stream() + .filter(item -> !Strings.isNullOrEmpty(item.getAlias())) + .collect(Collectors.toMap(SQLSelectItem::getExpr, SQLSelectItem::getAlias)); + } + + /** + * The SQL-92 require nonaggregated name column in the select list must appear in the GROUP BY, + * But the existing uses cases violate this require. e.g. AggregationIT. countGroupByDateTest Ref + * the https://dev.mysql.com/doc/refman/8.0/en/group-by-handling.html for detail information + */ + private void findAllGroupKeyExprFromGroupByAndSelect(MySqlSelectQueryBlock queryBlock) { + if (queryBlock.getGroupBy() == null) { + return; } - - private Map constructSQLExprAliasMapFromSelect(MySqlSelectQueryBlock queryBlock) { - return queryBlock.getSelectList().stream().filter(item -> !Strings.isNullOrEmpty(item.getAlias())) - .collect(Collectors.toMap(SQLSelectItem::getExpr, SQLSelectItem::getAlias)); - } - - /** - * The SQL-92 require nonaggregated name column in the select list must appear in the GROUP BY, But the - * existing uses cases violate this require. e.g. AggregationIT. countGroupByDateTest - * Ref the https://dev.mysql.com/doc/refman/8.0/en/group-by-handling.html for detail information - */ - private void findAllGroupKeyExprFromGroupByAndSelect(MySqlSelectQueryBlock queryBlock) { - if (queryBlock.getGroupBy() == null) { - return; - } - // 1. fetch the expr from groupby clause. - List groupByKeyExprList = - queryBlock.getGroupBy().getItems().stream().map(item -> ((MySqlSelectGroupByExpr) item).getExpr()) - .collect(Collectors.toList()); - - // 2. find the group expr from select. - for (SQLSelectItem selectItem : queryBlock.getSelectList()) { - SQLExpr selectItemExpr = selectItem.getExpr(); - // extension, group key in select could not in group by. - if (selectItemExpr instanceof SQLIdentifierExpr) { - context.addGroupKeyExpr(selectItemExpr); - } else { - for (SQLExpr groupByExpr : groupByKeyExprList) { - // SQL-92,nonaggregated name column in the select list must appear in the GROUP BY - if (compareSelectExprAndGroupByExpr(selectItemExpr, selectItem.getAlias(), groupByExpr)) { - context.addGroupKeyExpr(selectItemExpr); - } else if (groupByExpr instanceof SQLIdentifierExpr) { - // support expression over group key, e.g. SELECT log(G), max(A) FROM T GROUP BY G. - String groupByName = ((SQLIdentifierExpr) groupByExpr).getName(); - selectItemExpr.accept(new MySqlASTVisitorAdapter() { - @Override - public boolean visit(SQLAggregateExpr x) { - return false; - } - - @Override - public boolean visit(SQLIdentifierExpr expr) { - if (groupByName.equalsIgnoreCase(expr.getName())) { - expr.setParent(selectItem.getParent()); - context.addGroupKeyExpr(expr); - } - return false; - } - }); + // 1. fetch the expr from groupby clause. + List groupByKeyExprList = + queryBlock.getGroupBy().getItems().stream() + .map(item -> ((MySqlSelectGroupByExpr) item).getExpr()) + .collect(Collectors.toList()); + + // 2. find the group expr from select. + for (SQLSelectItem selectItem : queryBlock.getSelectList()) { + SQLExpr selectItemExpr = selectItem.getExpr(); + // extension, group key in select could not in group by. + if (selectItemExpr instanceof SQLIdentifierExpr) { + context.addGroupKeyExpr(selectItemExpr); + } else { + for (SQLExpr groupByExpr : groupByKeyExprList) { + // SQL-92,nonaggregated name column in the select list must appear in the GROUP BY + if (compareSelectExprAndGroupByExpr(selectItemExpr, selectItem.getAlias(), groupByExpr)) { + context.addGroupKeyExpr(selectItemExpr); + } else if (groupByExpr instanceof SQLIdentifierExpr) { + // support expression over group key, e.g. SELECT log(G), max(A) FROM T GROUP BY G. + String groupByName = ((SQLIdentifierExpr) groupByExpr).getName(); + selectItemExpr.accept( + new MySqlASTVisitorAdapter() { + @Override + public boolean visit(SQLAggregateExpr x) { + return false; + } + + @Override + public boolean visit(SQLIdentifierExpr expr) { + if (groupByName.equalsIgnoreCase(expr.getName())) { + expr.setParent(selectItem.getParent()); + context.addGroupKeyExpr(expr); } - } - } + return false; + } + }); + } } + } + } + } + + private boolean compareSelectExprAndGroupByExpr( + SQLExpr selectItemExpr, String alias, SQLExpr groupByExpr) { + if (groupByExpr.equals(selectItemExpr)) { + return true; + } else if (groupByExpr instanceof SQLIdentifierExpr + && ((SQLIdentifierExpr) groupByExpr).getName().equalsIgnoreCase(alias)) { + return true; } + return false; + } + + private void findAllAggregationExprFromSelect(MySqlSelectQueryBlock queryBlock) { + queryBlock + .getSelectList() + .forEach( + selectItem -> + selectItem.accept( + new MySqlASTVisitorAdapter() { + @Override + public boolean visit(SQLAggregateExpr expr) { + context.addAggregationExpr(expr); + return true; + } + })); + } + + private void parseExprInSelectList( + MySqlSelectQueryBlock queryBlock, + List selectItemNames, + SQLExprToExpressionConverter exprConverter) { + List selectItems = queryBlock.getSelectList(); + for (int i = 0; i < selectItems.size(); i++) { + Expression expression = exprConverter.convert(selectItems.get(i).getExpr()); + ColumnNode columnNode = + ColumnNode.builder() + .name(selectItemNames.get(i)) + .alias(selectItems.get(i).getAlias()) + .type(columnTypeProvider.get(i)) + .expr(expression) + .build(); + columnNodes.add(columnNode); + } + } - private boolean compareSelectExprAndGroupByExpr(SQLExpr selectItemExpr, String alias, SQLExpr groupByExpr) { - if (groupByExpr.equals(selectItemExpr)) { + private List extractSelectItemNames(List selectItems) { + List selectItemNames = new ArrayList<>(); + for (SQLSelectItem selectItem : selectItems) { + selectItemNames.add(nameOfSelectItem(selectItem)); + } + return selectItemNames; + } + + private void rewriteFunctionNameToLowerCase(MySqlSelectQueryBlock query) { + query.accept( + new MySqlASTVisitorAdapter() { + @Override + public boolean visit(SQLMethodInvokeExpr x) { + x.setMethodName(x.getMethodName().toLowerCase()); return true; - } else if (groupByExpr instanceof SQLIdentifierExpr - && ((SQLIdentifierExpr) groupByExpr).getName().equalsIgnoreCase(alias)) { - return true; - } - return false; + } + }); + } + + private String nameOfSelectItem(SQLSelectItem selectItem) { + return Strings.isNullOrEmpty(selectItem.getAlias()) + ? Context.nameOfExpr(selectItem.getExpr()) + : selectItem.getAlias(); + } + + @RequiredArgsConstructor + public static class Context { + private final AliasGenerator aliasGenerator = new AliasGenerator(); + + private final Map selectSQLExprAliasMap; + + @Getter private final Map groupKeyExprMap = new LinkedHashMap<>(); + @Getter private final Map aggregationExprMap = new LinkedHashMap<>(); + + Optional resolve(SQLExpr expr) { + if (groupKeyExprMap.containsKey(expr)) { + return Optional.of(groupKeyExprMap.get(expr).getExpression()); + } else if (aggregationExprMap.containsKey(expr)) { + return Optional.of(aggregationExprMap.get(expr).getExpression()); + } else { + return Optional.empty(); + } } - private void findAllAggregationExprFromSelect(MySqlSelectQueryBlock queryBlock) { - queryBlock.getSelectList().forEach(selectItem -> selectItem.accept(new MySqlASTVisitorAdapter() { - @Override - public boolean visit(SQLAggregateExpr expr) { - context.addAggregationExpr(expr); - return true; - } - })); + public void addGroupKeyExpr(SQLExpr groupKeyExpr) { + if (!groupKeyExprMap.containsKey(groupKeyExpr)) { + groupKeyExprMap.put(groupKeyExpr, new GroupKeyExpr(groupKeyExpr)); + } } - private void parseExprInSelectList( - MySqlSelectQueryBlock queryBlock, List selectItemNames, - SQLExprToExpressionConverter exprConverter) { - List selectItems = queryBlock.getSelectList(); - for (int i = 0; i < selectItems.size(); i++) { - Expression expression = exprConverter.convert(selectItems.get(i).getExpr()); - ColumnNode columnNode = ColumnNode.builder() - .name(selectItemNames.get(i)) - .alias(selectItems.get(i).getAlias()) - .type(columnTypeProvider.get(i)) - .expr(expression) - .build(); - columnNodes.add(columnNode); - } + public void addAggregationExpr(SQLAggregateExpr aggregationExpr) { + if (!aggregationExprMap.containsKey(aggregationExpr)) { + aggregationExprMap.put(aggregationExpr, new AggregationExpr(aggregationExpr)); + } } - private List extractSelectItemNames(List selectItems) { - List selectItemNames = new ArrayList<>(); - for (SQLSelectItem selectItem: selectItems){ - selectItemNames.add(nameOfSelectItem(selectItem)); + @Getter + public class GroupKeyExpr { + private final SQLExpr expr; + private final Expression expression; + + public GroupKeyExpr(SQLExpr expr) { + this.expr = expr; + String exprName = nameOfExpr(expr).replace(".", "#"); + if (expr instanceof SQLIdentifierExpr + && selectSQLExprAliasMap.values().contains(((SQLIdentifierExpr) expr).getName())) { + exprName = ((SQLIdentifierExpr) expr).getName(); } - return selectItemNames; + this.expression = ExpressionFactory.ref(selectSQLExprAliasMap.getOrDefault(expr, exprName)); + } } - private void rewriteFunctionNameToLowerCase(MySqlSelectQueryBlock query) { - query.accept(new MySqlASTVisitorAdapter() { - @Override - public boolean visit(SQLMethodInvokeExpr x) { - x.setMethodName(x.getMethodName().toLowerCase()); - return true; - } - }); + @Getter + public class AggregationExpr { + private final SQLAggregateExpr expr; + private final Expression expression; + + public AggregationExpr(SQLAggregateExpr expr) { + this.expr = expr; + this.expression = + ExpressionFactory.ref( + selectSQLExprAliasMap.getOrDefault( + expr, aliasGenerator.nextAlias(expr.getMethodName()))); + } } - private String nameOfSelectItem(SQLSelectItem selectItem) { - return Strings.isNullOrEmpty(selectItem.getAlias()) ? Context - .nameOfExpr(selectItem.getExpr()) : selectItem.getAlias(); + public static String nameOfExpr(SQLExpr expr) { + String exprName = expr.toString().toLowerCase(); + if (expr instanceof SQLAggregateExpr) { + SQLAggregateExpr aggExpr = (SQLAggregateExpr) expr; + SQLAggregateOption option = aggExpr.getOption(); + exprName = + option == null + ? String.format("%s(%s)", aggExpr.getMethodName(), aggExpr.getArguments().get(0)) + : String.format( + "%s(%s %s)", + aggExpr.getMethodName(), option.name(), aggExpr.getArguments().get(0)); + } else if (expr instanceof SQLMethodInvokeExpr) { + exprName = + String.format( + "%s(%s)", + ((SQLMethodInvokeExpr) expr).getMethodName(), + nameOfExpr(((SQLMethodInvokeExpr) expr).getParameters().get(0))); + } else if (expr instanceof SQLIdentifierExpr) { + exprName = ((SQLIdentifierExpr) expr).getName(); + } else if (expr instanceof SQLCastExpr) { + exprName = + String.format( + "CAST(%s AS %s)", + ((SQLCastExpr) expr).getExpr(), ((SQLCastExpr) expr).getDataType().getName()); + } + return exprName; } - @RequiredArgsConstructor - public static class Context { - private final AliasGenerator aliasGenerator = new AliasGenerator(); - - private final Map selectSQLExprAliasMap; - - @Getter - private final Map groupKeyExprMap = new LinkedHashMap<>(); - @Getter - private final Map aggregationExprMap = new LinkedHashMap<>(); - - Optional resolve(SQLExpr expr) { - if (groupKeyExprMap.containsKey(expr)) { - return Optional.of(groupKeyExprMap.get(expr).getExpression()); - } else if (aggregationExprMap.containsKey(expr)) { - return Optional.of(aggregationExprMap.get(expr).getExpression()); - } else { - return Optional.empty(); - } - } + static class AliasGenerator { + private int aliasSuffix = 0; - public void addGroupKeyExpr(SQLExpr groupKeyExpr) { - if (!groupKeyExprMap.containsKey(groupKeyExpr)) { - groupKeyExprMap.put(groupKeyExpr, new GroupKeyExpr(groupKeyExpr)); - } - } - - public void addAggregationExpr(SQLAggregateExpr aggregationExpr) { - if (!aggregationExprMap.containsKey(aggregationExpr)) { - aggregationExprMap.put(aggregationExpr, new AggregationExpr(aggregationExpr)); - } - } + private String nextAlias(String name) { + return String.format("%s_%d", name, next()); + } - @Getter - public class GroupKeyExpr { - private final SQLExpr expr; - private final Expression expression; - - public GroupKeyExpr(SQLExpr expr) { - this.expr = expr; - String exprName = nameOfExpr(expr).replace(".", "#"); - if (expr instanceof SQLIdentifierExpr - && selectSQLExprAliasMap.values().contains(((SQLIdentifierExpr) expr).getName())) { - exprName = ((SQLIdentifierExpr) expr).getName(); - } - this.expression = ExpressionFactory.ref(selectSQLExprAliasMap.getOrDefault(expr, exprName)); - } - } - - @Getter - public class AggregationExpr { - private final SQLAggregateExpr expr; - private final Expression expression; - - public AggregationExpr(SQLAggregateExpr expr) { - this.expr = expr; - this.expression = - ExpressionFactory.ref(selectSQLExprAliasMap.getOrDefault(expr, aliasGenerator - .nextAlias(expr.getMethodName()))); - } - } - - public static String nameOfExpr(SQLExpr expr) { - String exprName = expr.toString().toLowerCase(); - if (expr instanceof SQLAggregateExpr) { - SQLAggregateExpr aggExpr = (SQLAggregateExpr) expr; - SQLAggregateOption option = aggExpr.getOption(); - exprName = option == null - ? String.format("%s(%s)", aggExpr.getMethodName(), aggExpr.getArguments().get(0)) - : String.format("%s(%s %s)", aggExpr.getMethodName(), option.name(), - aggExpr.getArguments().get(0)); - } else if (expr instanceof SQLMethodInvokeExpr) { - exprName = String.format("%s(%s)", ((SQLMethodInvokeExpr) expr).getMethodName(), - nameOfExpr(((SQLMethodInvokeExpr) expr).getParameters().get(0))); - } else if (expr instanceof SQLIdentifierExpr) { - exprName = ((SQLIdentifierExpr) expr).getName(); - } else if (expr instanceof SQLCastExpr) { - exprName = String.format("CAST(%s AS %s)", ((SQLCastExpr) expr).getExpr(), - ((SQLCastExpr) expr).getDataType().getName()); - } - return exprName; - } - - static class AliasGenerator { - private int aliasSuffix = 0; - - private String nextAlias(String name) { - return String.format("%s_%d", name, next()); - } - - private Integer next() { - return aliasSuffix++; - } - } + private Integer next() { + return aliasSuffix++; + } } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/converter/SQLExprToExpressionConverter.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/converter/SQLExprToExpressionConverter.java index 0315fef900..800dac8426 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/converter/SQLExprToExpressionConverter.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/converter/SQLExprToExpressionConverter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.converter; import static org.opensearch.sql.legacy.expression.core.ExpressionFactory.cast; @@ -27,86 +26,86 @@ import org.opensearch.sql.legacy.expression.core.operator.ScalarOperation; import org.opensearch.sql.legacy.expression.model.ExprValueFactory; -/** - * The definition of {@link SQLExpr} to {@link Expression} converter. - */ +/** The definition of {@link SQLExpr} to {@link Expression} converter. */ @RequiredArgsConstructor public class SQLExprToExpressionConverter { - private static final Map binaryOperatorOperationMap = - new ImmutableMap.Builder() - .put(SQLBinaryOperator.Add, ScalarOperation.ADD) - .put(SQLBinaryOperator.Subtract, ScalarOperation.SUBTRACT) - .put(SQLBinaryOperator.Multiply, ScalarOperation.MULTIPLY) - .put(SQLBinaryOperator.Divide, ScalarOperation.DIVIDE) - .put(SQLBinaryOperator.Modulus, ScalarOperation.MODULES) - .build(); - private static final Map methodOperationMap = - new ImmutableMap.Builder() - .put(ScalarOperation.ABS.getName(), ScalarOperation.ABS) - .put(ScalarOperation.ACOS.getName(), ScalarOperation.ACOS) - .put(ScalarOperation.ASIN.getName(), ScalarOperation.ASIN) - .put(ScalarOperation.ATAN.getName(), ScalarOperation.ATAN) - .put(ScalarOperation.ATAN2.getName(), ScalarOperation.ATAN2) - .put(ScalarOperation.TAN.getName(), ScalarOperation.TAN) - .put(ScalarOperation.CBRT.getName(), ScalarOperation.CBRT) - .put(ScalarOperation.CEIL.getName(), ScalarOperation.CEIL) - .put(ScalarOperation.COS.getName(), ScalarOperation.COS) - .put(ScalarOperation.COSH.getName(), ScalarOperation.COSH) - .put(ScalarOperation.EXP.getName(), ScalarOperation.EXP) - .put(ScalarOperation.FLOOR.getName(), ScalarOperation.FLOOR) - .put(ScalarOperation.LN.getName(), ScalarOperation.LN) - .put(ScalarOperation.LOG.getName(), ScalarOperation.LOG) - .put(ScalarOperation.LOG2.getName(), ScalarOperation.LOG2) - .put(ScalarOperation.LOG10.getName(), ScalarOperation.LOG10) - .build(); - + private static final Map binaryOperatorOperationMap = + new ImmutableMap.Builder() + .put(SQLBinaryOperator.Add, ScalarOperation.ADD) + .put(SQLBinaryOperator.Subtract, ScalarOperation.SUBTRACT) + .put(SQLBinaryOperator.Multiply, ScalarOperation.MULTIPLY) + .put(SQLBinaryOperator.Divide, ScalarOperation.DIVIDE) + .put(SQLBinaryOperator.Modulus, ScalarOperation.MODULES) + .build(); + private static final Map methodOperationMap = + new ImmutableMap.Builder() + .put(ScalarOperation.ABS.getName(), ScalarOperation.ABS) + .put(ScalarOperation.ACOS.getName(), ScalarOperation.ACOS) + .put(ScalarOperation.ASIN.getName(), ScalarOperation.ASIN) + .put(ScalarOperation.ATAN.getName(), ScalarOperation.ATAN) + .put(ScalarOperation.ATAN2.getName(), ScalarOperation.ATAN2) + .put(ScalarOperation.TAN.getName(), ScalarOperation.TAN) + .put(ScalarOperation.CBRT.getName(), ScalarOperation.CBRT) + .put(ScalarOperation.CEIL.getName(), ScalarOperation.CEIL) + .put(ScalarOperation.COS.getName(), ScalarOperation.COS) + .put(ScalarOperation.COSH.getName(), ScalarOperation.COSH) + .put(ScalarOperation.EXP.getName(), ScalarOperation.EXP) + .put(ScalarOperation.FLOOR.getName(), ScalarOperation.FLOOR) + .put(ScalarOperation.LN.getName(), ScalarOperation.LN) + .put(ScalarOperation.LOG.getName(), ScalarOperation.LOG) + .put(ScalarOperation.LOG2.getName(), ScalarOperation.LOG2) + .put(ScalarOperation.LOG10.getName(), ScalarOperation.LOG10) + .build(); - private final SQLAggregationParser.Context context; + private final SQLAggregationParser.Context context; - /** - * Convert the {@link SQLExpr} to {@link Expression} - * - * @param expr {@link SQLExpr} - * @return expression {@link Expression} - */ - public Expression convert(SQLExpr expr) { - Optional resolvedExpression = context.resolve(expr); - if (resolvedExpression.isPresent()) { - return resolvedExpression.get(); - } else { - if (expr instanceof SQLBinaryOpExpr) { - return binaryOperatorToExpression((SQLBinaryOpExpr) expr, this::convert); - } else if (expr instanceof SQLMethodInvokeExpr) { - return methodToExpression((SQLMethodInvokeExpr) expr, this::convert); - } else if (expr instanceof SQLValuableExpr) { - return literal(ExprValueFactory.from(((SQLValuableExpr) expr).getValue())); - } else if (expr instanceof SQLCastExpr) { - return cast(convert(((SQLCastExpr) expr).getExpr())); - } else { - throw new RuntimeException("unsupported expr: " + expr); - } - } + /** + * Convert the {@link SQLExpr} to {@link Expression} + * + * @param expr {@link SQLExpr} + * @return expression {@link Expression} + */ + public Expression convert(SQLExpr expr) { + Optional resolvedExpression = context.resolve(expr); + if (resolvedExpression.isPresent()) { + return resolvedExpression.get(); + } else { + if (expr instanceof SQLBinaryOpExpr) { + return binaryOperatorToExpression((SQLBinaryOpExpr) expr, this::convert); + } else if (expr instanceof SQLMethodInvokeExpr) { + return methodToExpression((SQLMethodInvokeExpr) expr, this::convert); + } else if (expr instanceof SQLValuableExpr) { + return literal(ExprValueFactory.from(((SQLValuableExpr) expr).getValue())); + } else if (expr instanceof SQLCastExpr) { + return cast(convert(((SQLCastExpr) expr).getExpr())); + } else { + throw new RuntimeException("unsupported expr: " + expr); + } } + } - private Expression binaryOperatorToExpression(SQLBinaryOpExpr expr, - Function converter) { - if (binaryOperatorOperationMap.containsKey(expr.getOperator())) { - return ExpressionFactory.of(binaryOperatorOperationMap.get(expr.getOperator()), - Arrays.asList(converter.apply(expr.getLeft()), - converter.apply(expr.getRight()))); - } else { - throw new UnsupportedOperationException("unsupported operator: " + expr.getOperator().getName()); - } + private Expression binaryOperatorToExpression( + SQLBinaryOpExpr expr, Function converter) { + if (binaryOperatorOperationMap.containsKey(expr.getOperator())) { + return ExpressionFactory.of( + binaryOperatorOperationMap.get(expr.getOperator()), + Arrays.asList(converter.apply(expr.getLeft()), converter.apply(expr.getRight()))); + } else { + throw new UnsupportedOperationException( + "unsupported operator: " + expr.getOperator().getName()); } + } - private Expression methodToExpression(SQLMethodInvokeExpr expr, Function converter) { - String methodName = expr.getMethodName().toLowerCase(); - if (methodOperationMap.containsKey(methodName)) { + private Expression methodToExpression( + SQLMethodInvokeExpr expr, Function converter) { + String methodName = expr.getMethodName().toLowerCase(); + if (methodOperationMap.containsKey(methodName)) { - return ExpressionFactory.of(methodOperationMap.get(methodName), - expr.getParameters().stream().map(converter).collect(Collectors.toList())); - } else { - throw new UnsupportedOperationException("unsupported operator: " + expr.getMethodName()); - } + return ExpressionFactory.of( + methodOperationMap.get(methodName), + expr.getParameters().stream().map(converter).collect(Collectors.toList())); + } else { + throw new UnsupportedOperationException("unsupported operator: " + expr.getMethodName()); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/converter/SQLToOperatorConverter.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/converter/SQLToOperatorConverter.java index fbaff0ba18..4d1ab58160 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/converter/SQLToOperatorConverter.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/converter/SQLToOperatorConverter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.converter; import com.alibaba.druid.sql.dialect.mysql.ast.statement.MySqlSelectQueryBlock; @@ -24,53 +23,49 @@ import org.opensearch.sql.legacy.query.planner.physical.node.project.PhysicalProject; import org.opensearch.sql.legacy.query.planner.physical.node.scroll.PhysicalScroll; -/** - * Definition of SQL to PhysicalOperator converter. - */ +/** Definition of SQL to PhysicalOperator converter. */ public class SQLToOperatorConverter extends MySqlASTVisitorAdapter { - private static final Logger LOG = LogManager.getLogger(SQLToOperatorConverter.class); - - private final Client client; - private final SQLAggregationParser aggregationParser; + private static final Logger LOG = LogManager.getLogger(SQLToOperatorConverter.class); - @Getter - private PhysicalOperator physicalOperator; + private final Client client; + private final SQLAggregationParser aggregationParser; - public SQLToOperatorConverter(Client client, ColumnTypeProvider columnTypeProvider) { - this.client = client; - this.aggregationParser = new SQLAggregationParser(columnTypeProvider); - } + @Getter private PhysicalOperator physicalOperator; - @Override - public boolean visit(MySqlSelectQueryBlock query) { + public SQLToOperatorConverter(Client client, ColumnTypeProvider columnTypeProvider) { + this.client = client; + this.aggregationParser = new SQLAggregationParser(columnTypeProvider); + } - //1. parse the aggregation - aggregationParser.parse(query); + @Override + public boolean visit(MySqlSelectQueryBlock query) { + // 1. parse the aggregation + aggregationParser.parse(query); - //2. construct the PhysicalOperator - physicalOperator = project(scroll(query)); - return false; - } + // 2. construct the PhysicalOperator + physicalOperator = project(scroll(query)); + return false; + } - /** - * Get list of {@link ColumnNode}. - * - * @return list of {@link ColumnNode}. - */ - public List getColumnNodes() { - return aggregationParser.getColumnNodes(); - } + /** + * Get list of {@link ColumnNode}. + * + * @return list of {@link ColumnNode}. + */ + public List getColumnNodes() { + return aggregationParser.getColumnNodes(); + } - private PhysicalOperator project(PhysicalOperator input) { - return new PhysicalProject(input, aggregationParser.getColumnNodes()); - } + private PhysicalOperator project(PhysicalOperator input) { + return new PhysicalProject(input, aggregationParser.getColumnNodes()); + } - @SneakyThrows - private PhysicalOperator scroll(MySqlSelectQueryBlock query) { - query.getSelectList().clear(); - query.getSelectList().addAll(aggregationParser.selectItemList()); - Select select = new SqlParser().parseSelect(query); - return new PhysicalScroll(new AggregationQueryAction(client, select)); - } + @SneakyThrows + private PhysicalOperator scroll(MySqlSelectQueryBlock query) { + query.getSelectList().clear(); + query.getSelectList().addAll(aggregationParser.selectItemList()); + Select select = new SqlParser().parseSelect(query); + return new PhysicalScroll(new AggregationQueryAction(client, select)); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/QueryParams.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/QueryParams.java index ae5f0fb9c8..0bf93f5787 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/QueryParams.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/QueryParams.java @@ -23,17 +23,19 @@ public class QueryParams { /** Join type, ex. inner join, left join */ private final SQLJoinTableSource.JoinType joinType; - /** - *
    -     * Join conditions in ON clause grouped by OR.
    -     * For example, "ON (a.name = b.id AND a.age = b.age) OR a.location = b.address"
    -     * => list: [
    -     * [ (a.name, b.id), (a.age, b.age) ],
    -     * [ (a.location, b.address) ]
    -     * ]
    -     * 
    - */ - private final List>> joinConditions; + /** + * + * + *
    +   * Join conditions in ON clause grouped by OR.
    +   * For example, "ON (a.name = b.id AND a.age = b.age) OR a.location = b.address"
    +   * => list: [
    +   * [ (a.name, b.id), (a.age, b.age) ],
    +   * [ (a.location, b.address) ]
    +   * ]
    +   * 
    + */ + private final List>> joinConditions; public QueryParams( TableInJoinRequestBuilder request1, diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Join.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Join.java index 405a8a9f72..c23786d6b3 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Join.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Join.java @@ -71,23 +71,25 @@ public String toString() { return "Join [ conditions=" + condition + " type=" + type + " ]"; } - /** - *
    -     * Join condition in ON clause grouped by OR.
    -     * 

    - * For example, "ON (a.name = b.id AND a.age = b.age) OR a.location = b.address" - * => input list: [ - * [ (a.name, b.id), (a.age, b.age) ], - * [ (a.location, b.address) ] - * ] - *

    - * => JoinCondition: - * leftTableAlias: "a", rightTableAlias: "b" - * leftColumnNames: [ ["name", "age"], ["location"] ] - * rightColumnNames: [ ["id", "age"], ["address" ] ] - *

    - */ - public static class JoinCondition { + /** + * + * + *
    +   * Join condition in ON clause grouped by OR.
    +   * 

    + * For example, "ON (a.name = b.id AND a.age = b.age) OR a.location = b.address" + * => input list: [ + * [ (a.name, b.id), (a.age, b.age) ], + * [ (a.location, b.address) ] + * ] + *

    + * => JoinCondition: + * leftTableAlias: "a", rightTableAlias: "b" + * leftColumnNames: [ ["name", "age"], ["location"] ] + * rightColumnNames: [ ["id", "age"], ["address" ] ] + *

    + */ + public static class JoinCondition { private final String leftTableAlias; private final String rightTableAlias; diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Sort.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Sort.java index 670be71de5..f9033ce90f 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Sort.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Sort.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.logical.node; import java.util.List; @@ -13,45 +12,37 @@ import org.opensearch.sql.legacy.query.planner.physical.PhysicalOperator; import org.opensearch.sql.legacy.query.planner.physical.node.sort.QuickSort; -/** - * Logical operator for Sort. - */ +/** Logical operator for Sort. */ public class Sort implements LogicalOperator { - private final LogicalOperator next; - - /** - * Column name list in ORDER BY - */ - private final List orderByColNames; - - /** - * Order by type, ex. ASC, DESC - */ - private final String orderByType; + private final LogicalOperator next; + /** Column name list in ORDER BY */ + private final List orderByColNames; - public Sort(LogicalOperator next, List orderByColNames, String orderByType) { - this.next = next; - this.orderByColNames = orderByColNames; - this.orderByType = orderByType.toUpperCase(); - } + /** Order by type, ex. ASC, DESC */ + private final String orderByType; - @Override - public PlanNode[] children() { - return new PlanNode[]{next}; - } + public Sort(LogicalOperator next, List orderByColNames, String orderByType) { + this.next = next; + this.orderByColNames = orderByColNames; + this.orderByType = orderByType.toUpperCase(); + } - @Override - public PhysicalOperator[] toPhysical(Map> optimalOps) { - return new PhysicalOperator[]{ - new QuickSort<>(optimalOps.get(next), orderByColNames, orderByType) - }; - } + @Override + public PlanNode[] children() { + return new PlanNode[] {next}; + } - @Override - public String toString() { - return "Sort [ columns=" + orderByColNames + " order=" + orderByType + " ]"; - } + @Override + public PhysicalOperator[] toPhysical(Map> optimalOps) { + return new PhysicalOperator[] { + new QuickSort<>(optimalOps.get(next), orderByColNames, orderByType) + }; + } + @Override + public String toString() { + return "Sort [ columns=" + orderByColNames + " order=" + orderByType + " ]"; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/TableScan.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/TableScan.java index 466779faae..16af199ed7 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/TableScan.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/TableScan.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.logical.node; import java.util.Map; @@ -13,54 +12,44 @@ import org.opensearch.sql.legacy.query.planner.physical.PhysicalOperator; import org.opensearch.sql.legacy.query.planner.physical.node.scroll.Scroll; -/** - * Table scan - */ +/** Table scan */ public class TableScan implements LogicalOperator { - /** - * Request builder for the table - */ - private final TableInJoinRequestBuilder request; - - /** - * Page size for physical operator - */ - private final int pageSize; - - public TableScan(TableInJoinRequestBuilder request, int pageSize) { - this.request = request; - this.pageSize = pageSize; - } + /** Request builder for the table */ + private final TableInJoinRequestBuilder request; - @Override - public PlanNode[] children() { - return new PlanNode[0]; - } + /** Page size for physical operator */ + private final int pageSize; - @Override - public PhysicalOperator[] toPhysical(Map> optimalOps) { - return new PhysicalOperator[]{ - new Scroll(request, pageSize) - }; - } + public TableScan(TableInJoinRequestBuilder request, int pageSize) { + this.request = request; + this.pageSize = pageSize; + } - @Override - public String toString() { - return "TableScan"; - } + @Override + public PlanNode[] children() { + return new PlanNode[0]; + } + @Override + public PhysicalOperator[] toPhysical(Map> optimalOps) { + return new PhysicalOperator[] {new Scroll(request, pageSize)}; + } - /********************************************* - * Getters for Explain - *********************************************/ + @Override + public String toString() { + return "TableScan"; + } - public String getTableAlias() { - return request.getAlias(); - } + /********************************************* + * Getters for Explain + *********************************************/ - public String getTableName() { - return request.getOriginalSelect().getFrom().get(0).getIndex(); - } + public String getTableAlias() { + return request.getAlias(); + } + public String getTableName() { + return request.getOriginalSelect().getFrom().get(0).getIndex(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Top.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Top.java index e39f36ed5a..978d996ad0 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Top.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Top.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.logical.node; import java.util.Map; @@ -20,52 +19,51 @@ */ public class Top implements LogicalOperator, PhysicalOperator { - private final PlanNode next; + private final PlanNode next; - /** - * Number of rows to return in total - */ - private int count; + /** Number of rows to return in total */ + private int count; - @SuppressWarnings("unchecked") - public Top(PlanNode next, int count) { - this.next = next; - this.count = count; - } + @SuppressWarnings("unchecked") + public Top(PlanNode next, int count) { + this.next = next; + this.count = count; + } - @Override - public PlanNode[] children() { - return new PlanNode[]{next}; - } + @Override + public PlanNode[] children() { + return new PlanNode[] {next}; + } - @SuppressWarnings("unchecked") - @Override - public boolean hasNext() { - return count > 0 && ((PhysicalOperator) next).hasNext(); - } + @SuppressWarnings("unchecked") + @Override + public boolean hasNext() { + return count > 0 && ((PhysicalOperator) next).hasNext(); + } - @SuppressWarnings("unchecked") - @Override - public Row next() { - count--; - return ((PhysicalOperator) next).next(); - } + @SuppressWarnings("unchecked") + @Override + public Row next() { + count--; + return ((PhysicalOperator) next).next(); + } - @Override - public PhysicalOperator[] toPhysical(Map> optimalOps) { - if (!(next instanceof LogicalOperator)) { - throw new IllegalStateException("Only logical operator can perform this toPhysical() operation"); - } - return new PhysicalOperator[]{new Top<>(optimalOps.get(next), count)}; + @Override + public PhysicalOperator[] toPhysical(Map> optimalOps) { + if (!(next instanceof LogicalOperator)) { + throw new IllegalStateException( + "Only logical operator can perform this toPhysical() operation"); } + return new PhysicalOperator[] {new Top<>(optimalOps.get(next), count)}; + } - @Override - public Cost estimate() { - return new Cost(); - } + @Override + public Cost estimate() { + return new Cost(); + } - @Override - public String toString() { - return "Top [ " + "count=" + count + " ]"; - } + @Override + public String toString() { + return "Top [ count=" + count + " ]"; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/JoinAlgorithm.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/JoinAlgorithm.java index 9fcb977beb..9f2c9e4174 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/JoinAlgorithm.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/JoinAlgorithm.java @@ -91,19 +91,22 @@ public void close() { LOG.debug("Cleared all resources used by join"); } - /** - * Build-probe left and right block by block to prefetch next matches (and mismatches if outer join). - *
      - *
    1. Build hash table and open right side. - *
    2. Keep probing right to find matched rows (meanwhile update mismatched set) - *
    3. Check if any row in mismatched set to return in the case of outer join. - *
    4. Nothing remained now, move on to next block of left. Go back to step 1. - *
    - * This is a new run AND no block from left means algorithm should stop and return empty. - */ - @Override - protected Collection> prefetch() throws Exception { - while (!isNewRunButNoMoreBlockFromLeft()) { + /** + * Build-probe left and right block by block to prefetch next matches (and mismatches if outer + * join). + * + *
      + *
    1. Build hash table and open right side. + *
    2. Keep probing right to find matched rows (meanwhile update mismatched set) + *
    3. Check if any row in mismatched set to return in the case of outer join. + *
    4. Nothing remained now, move on to next block of left. Go back to step 1. + *
    + * + * This is a new run AND no block from left means algorithm should stop and return empty. + */ + @Override + protected Collection> prefetch() throws Exception { + while (!isNewRunButNoMoreBlockFromLeft()) { // 1.Build hash table and (re-)open right side for the new run if (isNewRun()) { diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/SearchHitRow.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/SearchHitRow.java index 1750563e47..d03dd5af40 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/SearchHitRow.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/SearchHitRow.java @@ -14,6 +14,8 @@ import org.opensearch.sql.legacy.query.planner.physical.Row; /** + * + * *
      * Search hit row that implements basic accessor for SearchHit.
      * Encapsulate all OpenSearch specific knowledge: how to parse source including nested path.
    diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/Stats.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/Stats.java
    index ec03eeaccb..3ff4662ce4 100644
    --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/Stats.java
    +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/Stats.java
    @@ -3,67 +3,61 @@
      * SPDX-License-Identifier: Apache-2.0
      */
     
    -
     package org.opensearch.sql.legacy.query.planner.resource;
     
     import org.opensearch.client.Client;
     
     /**
      * Statistics collector collects from OpenSearch stats, JVM etc for other components:
    - * 

    - * 1) Resource monitor - * 2) Cost estimation - * 3) Block size calculation + * + *

      + *
    1. Resource monitor + *
    2. Cost estimation + *
    3. Block size calculation + *
    */ public class Stats { - /** - * Client connection to OpenSearch cluster (unused now) - */ - private Client client; - - public Stats(Client client) { - this.client = client; - } - - public MemStats collectMemStats() { - return new MemStats( - Runtime.getRuntime().freeMemory(), - Runtime.getRuntime().totalMemory() - ); - } + /** Client connection to OpenSearch cluster (unused now) */ + private Client client; - /** - * Statistics data class for memory usage - */ - public static class MemStats { - private long free; - private long total; + public Stats(Client client) { + this.client = client; + } - public MemStats(long free, long total) { - this.free = free; - this.total = total; - } + public MemStats collectMemStats() { + return new MemStats(Runtime.getRuntime().freeMemory(), Runtime.getRuntime().totalMemory()); + } - public long getFree() { - return free; - } + /** Statistics data class for memory usage */ + public static class MemStats { + private long free; + private long total; - public long getTotal() { - return total; - } + public MemStats(long free, long total) { + this.free = free; + this.total = total; } - /* - public class IndexStats { - private long size; - private long docNum; + public long getFree() { + return free; + } - public IndexStats(long size, long docNum) { - this.size = size; - this.docNum = docNum; - } + public long getTotal() { + return total; } - */ + } + + /* + public class IndexStats { + private long size; + private long docNum; + + public IndexStats(long size, long docNum) { + this.size = size; + this.docNum = docNum; + } + } + */ } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/monitor/TotalMemoryMonitor.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/monitor/TotalMemoryMonitor.java index 961729867d..76a8c5902c 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/monitor/TotalMemoryMonitor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/monitor/TotalMemoryMonitor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.resource.monitor; import org.apache.logging.log4j.LogManager; @@ -12,46 +11,39 @@ import org.opensearch.sql.legacy.query.planner.resource.Stats; import org.opensearch.sql.legacy.query.planner.resource.Stats.MemStats; -/** - * Circuit breaker for total memory usage in JVM on current OpenSearch node. - */ +/** Circuit breaker for total memory usage in JVM on current OpenSearch node. */ public class TotalMemoryMonitor implements Monitor { - private static final Logger LOG = LogManager.getLogger(); - - /** - * Statistic collector - */ - private final Stats stats; + private static final Logger LOG = LogManager.getLogger(); - /** - * Upper limit for memory usage percentage - */ - private final int limit; + /** Statistic collector */ + private final Stats stats; - public TotalMemoryMonitor(Stats stats, Config config) { - this.stats = stats; - this.limit = config.circuitBreakLimit(); - } + /** Upper limit for memory usage percentage */ + private final int limit; - @Override - public boolean isHealthy() { - MemStats memStats = stats.collectMemStats(); - int usage = percentage(memUsage(memStats)); + public TotalMemoryMonitor(Stats stats, Config config) { + this.stats = stats; + this.limit = config.circuitBreakLimit(); + } - if (LOG.isDebugEnabled()) { - LOG.debug("Memory usage and limit: {}%, {}%", usage, limit); - } + @Override + public boolean isHealthy() { + MemStats memStats = stats.collectMemStats(); + int usage = percentage(memUsage(memStats)); - return usage < limit; + if (LOG.isDebugEnabled()) { + LOG.debug("Memory usage and limit: {}%, {}%", usage, limit); } - private int percentage(double usage) { - return (int) Math.round(usage * 100); - } + return usage < limit; + } - private double memUsage(MemStats memStats) { - return (1.0 * (memStats.getTotal() - memStats.getFree())) / memStats.getTotal(); - } + private int percentage(double usage) { + return (int) Math.round(usage * 100); + } + private double memUsage(MemStats memStats) { + return (1.0 * (memStats.getTotal() - memStats.getFree())) / memStats.getTotal(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/request/SqlRequest.java b/legacy/src/main/java/org/opensearch/sql/legacy/request/SqlRequest.java index 8ac66e4b70..bffdd36688 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/request/SqlRequest.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/request/SqlRequest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.request; import com.fasterxml.jackson.core.JsonFactory; @@ -22,95 +21,96 @@ public class SqlRequest { - public static final SqlRequest NULL = new SqlRequest("", null); - - String sql; - JSONObject jsonContent; - String cursor; - Integer fetchSize; - - public SqlRequest(final String sql, final JSONObject jsonContent) { - this.sql = sql; - this.jsonContent = jsonContent; - } - - public SqlRequest(final String cursor) { - this.cursor = cursor; - } - - public SqlRequest(final String sql, final Integer fetchSize, final JSONObject jsonContent) { - this.sql = sql; - this.fetchSize = fetchSize; - this.jsonContent = jsonContent; - } - - private static boolean isValidJson(String json) { - try { - new JSONObject(json); - } catch (JSONException e) { - return false; - } - return true; + public static final SqlRequest NULL = new SqlRequest("", null); + + String sql; + JSONObject jsonContent; + String cursor; + Integer fetchSize; + + public SqlRequest(final String sql, final JSONObject jsonContent) { + this.sql = sql; + this.jsonContent = jsonContent; + } + + public SqlRequest(final String cursor) { + this.cursor = cursor; + } + + public SqlRequest(final String sql, final Integer fetchSize, final JSONObject jsonContent) { + this.sql = sql; + this.fetchSize = fetchSize; + this.jsonContent = jsonContent; + } + + private static boolean isValidJson(String json) { + try { + new JSONObject(json); + } catch (JSONException e) { + return false; } - - public String getSql() { - return this.sql; - } - - public String cursor() { - return this.cursor; - } - - public Integer fetchSize() { - return this.fetchSize; - } - - public JSONObject getJsonContent() { - return this.jsonContent; - } - - /** - * JSONObject's getJSONObject method will return just the value, this helper method is to extract the key and - * value of 'filter' and return the JSON as a string. - */ - private String getFilterObjectAsString(JSONObject jsonContent) { - String filterVal = jsonContent.getJSONObject("filter").toString(); - return "{\"filter\":" + filterVal + "}"; - } - - private boolean hasFilterInRequest() { - return jsonContent != null && jsonContent.has("filter"); - } - - /** - * Takes 'filter' parameter from JSON request if JSON request and 'filter' were given and creates a QueryBuilder - * object out of it to add to the filterClauses of the BoolQueryBuilder. - */ - private void addFilterFromJson(BoolQueryBuilder boolQuery) throws SqlParseException { - try { - String filter = getFilterObjectAsString(jsonContent); - SearchModule searchModule = new SearchModule(Settings.EMPTY, Collections.emptyList()); - XContentParser parser = new JsonXContentParser( - new NamedXContentRegistry(searchModule.getNamedXContents()), - LoggingDeprecationHandler.INSTANCE, - new JsonFactory().createParser(filter)); - - // nextToken is called before passing the parser to fromXContent since the fieldName will be null if the - // first token it parses is START_OBJECT resulting in an exception - parser.nextToken(); - boolQuery.filter(BoolQueryBuilder.fromXContent(parser)); - } catch (IOException e) { - throw new SqlParseException("Unable to parse 'filter' in JSON request: " + e.getMessage()); - } - + return true; + } + + public String getSql() { + return this.sql; + } + + public String cursor() { + return this.cursor; + } + + public Integer fetchSize() { + return this.fetchSize; + } + + public JSONObject getJsonContent() { + return this.jsonContent; + } + + /** + * JSONObject's getJSONObject method will return just the value, this helper method is to extract + * the key and value of 'filter' and return the JSON as a string. + */ + private String getFilterObjectAsString(JSONObject jsonContent) { + String filterVal = jsonContent.getJSONObject("filter").toString(); + return "{\"filter\":" + filterVal + "}"; + } + + private boolean hasFilterInRequest() { + return jsonContent != null && jsonContent.has("filter"); + } + + /** + * Takes 'filter' parameter from JSON request if JSON request and 'filter' were given and creates + * a QueryBuilder object out of it to add to the filterClauses of the BoolQueryBuilder. + */ + private void addFilterFromJson(BoolQueryBuilder boolQuery) throws SqlParseException { + try { + String filter = getFilterObjectAsString(jsonContent); + SearchModule searchModule = new SearchModule(Settings.EMPTY, Collections.emptyList()); + XContentParser parser = + new JsonXContentParser( + new NamedXContentRegistry(searchModule.getNamedXContents()), + LoggingDeprecationHandler.INSTANCE, + new JsonFactory().createParser(filter)); + + // nextToken is called before passing the parser to fromXContent since the fieldName will be + // null if the + // first token it parses is START_OBJECT resulting in an exception + parser.nextToken(); + boolQuery.filter(BoolQueryBuilder.fromXContent(parser)); + } catch (IOException e) { + throw new SqlParseException("Unable to parse 'filter' in JSON request: " + e.getMessage()); } + } - public BoolQueryBuilder checkAndAddFilter(BoolQueryBuilder boolQuery) throws SqlParseException { - if (hasFilterInRequest()) { - // if WHERE was not given, create a new BoolQuery to add "filter" to - boolQuery = boolQuery == null ? new BoolQueryBuilder() : boolQuery; - addFilterFromJson(boolQuery); - } - return boolQuery; + public BoolQueryBuilder checkAndAddFilter(BoolQueryBuilder boolQuery) throws SqlParseException { + if (hasFilterInRequest()) { + // if WHERE was not given, create a new BoolQuery to add "filter" to + boolQuery = boolQuery == null ? new BoolQueryBuilder() : boolQuery; + addFilterFromJson(boolQuery); } + return boolQuery; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/request/SqlRequestFactory.java b/legacy/src/main/java/org/opensearch/sql/legacy/request/SqlRequestFactory.java index 4c5d207be8..0fee6cff86 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/request/SqlRequestFactory.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/request/SqlRequestFactory.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.request; import java.util.ArrayList; @@ -16,128 +15,134 @@ public class SqlRequestFactory { - private static final String SQL_URL_PARAM_KEY = "sql"; - private static final String SQL_FIELD_NAME = "query"; - private static final String PARAM_FIELD_NAME = "parameters"; - private static final String PARAM_TYPE_FIELD_NAME = "type"; - private static final String PARAM_VALUE_FIELD_NAME = "value"; + private static final String SQL_URL_PARAM_KEY = "sql"; + private static final String SQL_FIELD_NAME = "query"; + private static final String PARAM_FIELD_NAME = "parameters"; + private static final String PARAM_TYPE_FIELD_NAME = "type"; + private static final String PARAM_VALUE_FIELD_NAME = "value"; - public static final String SQL_CURSOR_FIELD_NAME = "cursor"; - public static final String SQL_FETCH_FIELD_NAME = "fetch_size"; + public static final String SQL_CURSOR_FIELD_NAME = "cursor"; + public static final String SQL_FETCH_FIELD_NAME = "fetch_size"; - public static SqlRequest getSqlRequest(RestRequest request) { - switch (request.method()) { - case POST: - return parseSqlRequestFromPayload(request); - default: - throw new IllegalArgumentException("OpenSearch SQL doesn't supported HTTP " + request.method().name()); - } + public static SqlRequest getSqlRequest(RestRequest request) { + switch (request.method()) { + case POST: + return parseSqlRequestFromPayload(request); + default: + throw new IllegalArgumentException( + "OpenSearch SQL doesn't supported HTTP " + request.method().name()); } + } - private static SqlRequest parseSqlRequestFromUrl(RestRequest restRequest) { - String sql; + private static SqlRequest parseSqlRequestFromUrl(RestRequest restRequest) { + String sql; - sql = restRequest.param(SQL_URL_PARAM_KEY); - if (sql == null) { - throw new IllegalArgumentException("Cannot find sql parameter from the URL"); - } - return new SqlRequest(sql, null); + sql = restRequest.param(SQL_URL_PARAM_KEY); + if (sql == null) { + throw new IllegalArgumentException("Cannot find sql parameter from the URL"); } + return new SqlRequest(sql, null); + } - private static SqlRequest parseSqlRequestFromPayload(RestRequest restRequest) { - String content = restRequest.content().utf8ToString(); + private static SqlRequest parseSqlRequestFromPayload(RestRequest restRequest) { + String content = restRequest.content().utf8ToString(); - JSONObject jsonContent; - try { - jsonContent = new JSONObject(content); - if (jsonContent.has(SQL_CURSOR_FIELD_NAME)) { - return new SqlRequest(jsonContent.getString(SQL_CURSOR_FIELD_NAME)); - } - } catch (JSONException e) { - throw new IllegalArgumentException("Failed to parse request payload", e); - } - String sql = jsonContent.getString(SQL_FIELD_NAME); - - if (jsonContent.has(PARAM_FIELD_NAME)) { // is a PreparedStatement - JSONArray paramArray = jsonContent.getJSONArray(PARAM_FIELD_NAME); - List parameters = parseParameters(paramArray); - return new PreparedStatementRequest(sql, validateAndGetFetchSize(jsonContent), jsonContent, parameters); - } - return new SqlRequest(sql, validateAndGetFetchSize(jsonContent), jsonContent); + JSONObject jsonContent; + try { + jsonContent = new JSONObject(content); + if (jsonContent.has(SQL_CURSOR_FIELD_NAME)) { + return new SqlRequest(jsonContent.getString(SQL_CURSOR_FIELD_NAME)); + } + } catch (JSONException e) { + throw new IllegalArgumentException("Failed to parse request payload", e); } + String sql = jsonContent.getString(SQL_FIELD_NAME); + if (jsonContent.has(PARAM_FIELD_NAME)) { // is a PreparedStatement + JSONArray paramArray = jsonContent.getJSONArray(PARAM_FIELD_NAME); + List parameters = + parseParameters(paramArray); + return new PreparedStatementRequest( + sql, validateAndGetFetchSize(jsonContent), jsonContent, parameters); + } + return new SqlRequest(sql, validateAndGetFetchSize(jsonContent), jsonContent); + } - private static Integer validateAndGetFetchSize(JSONObject jsonContent) { - Optional fetchSize = Optional.empty(); - try { - if (jsonContent.has(SQL_FETCH_FIELD_NAME)) { - fetchSize = Optional.of(jsonContent.getInt(SQL_FETCH_FIELD_NAME)); - if (fetchSize.get() < 0) { - throw new IllegalArgumentException("Fetch_size must be greater or equal to 0"); - } - } - } catch (JSONException e) { - throw new IllegalArgumentException("Failed to parse field [" + SQL_FETCH_FIELD_NAME +"]", e); + private static Integer validateAndGetFetchSize(JSONObject jsonContent) { + Optional fetchSize = Optional.empty(); + try { + if (jsonContent.has(SQL_FETCH_FIELD_NAME)) { + fetchSize = Optional.of(jsonContent.getInt(SQL_FETCH_FIELD_NAME)); + if (fetchSize.get() < 0) { + throw new IllegalArgumentException("Fetch_size must be greater or equal to 0"); } - return fetchSize.orElse(0); + } + } catch (JSONException e) { + throw new IllegalArgumentException("Failed to parse field [" + SQL_FETCH_FIELD_NAME + "]", e); } + return fetchSize.orElse(0); + } - private static List parseParameters( - JSONArray paramsJsonArray) { - List parameters = new ArrayList<>(); - for (int i = 0; i < paramsJsonArray.length(); i++) { - JSONObject paramJson = paramsJsonArray.getJSONObject(i); - String typeString = paramJson.getString(PARAM_TYPE_FIELD_NAME); - if (typeString == null) { - throw new IllegalArgumentException("Parameter type cannot be null. parameter json: " - + paramJson.toString()); - } - PreparedStatementRequest.ParameterType type; - try { - type = PreparedStatementRequest.ParameterType.valueOf(typeString.toUpperCase()); - } catch (IllegalArgumentException e) { - throw new IllegalArgumentException("Unsupported parameter type " + typeString, e); - } - try { - PreparedStatementRequest.PreparedStatementParameter parameter; - switch (type) { - case BOOLEAN: - parameter = new PreparedStatementRequest.PreparedStatementParameter<>( - paramJson.getBoolean(PARAM_VALUE_FIELD_NAME)); - parameters.add(parameter); - break; - case KEYWORD: - case STRING: - case DATE: - parameter = new PreparedStatementRequest.StringParameter( - paramJson.getString(PARAM_VALUE_FIELD_NAME)); - parameters.add(parameter); - break; - case BYTE: - case SHORT: - case INTEGER: - case LONG: - parameter = new PreparedStatementRequest.PreparedStatementParameter<>( - paramJson.getLong(PARAM_VALUE_FIELD_NAME)); - parameters.add(parameter); - break; - case FLOAT: - case DOUBLE: - parameter = new PreparedStatementRequest.PreparedStatementParameter<>( - paramJson.getDouble(PARAM_VALUE_FIELD_NAME)); - parameters.add(parameter); - break; - case NULL: - parameter = new PreparedStatementRequest.NullParameter(); - parameters.add(parameter); - break; - default: - throw new IllegalArgumentException("Failed to handle parameter type " + type.name()); - } - } catch (JSONException e) { - throw new IllegalArgumentException("Failed to parse PreparedStatement parameters", e); - } + private static List parseParameters( + JSONArray paramsJsonArray) { + List parameters = new ArrayList<>(); + for (int i = 0; i < paramsJsonArray.length(); i++) { + JSONObject paramJson = paramsJsonArray.getJSONObject(i); + String typeString = paramJson.getString(PARAM_TYPE_FIELD_NAME); + if (typeString == null) { + throw new IllegalArgumentException( + "Parameter type cannot be null. parameter json: " + paramJson.toString()); + } + PreparedStatementRequest.ParameterType type; + try { + type = PreparedStatementRequest.ParameterType.valueOf(typeString.toUpperCase()); + } catch (IllegalArgumentException e) { + throw new IllegalArgumentException("Unsupported parameter type " + typeString, e); + } + try { + PreparedStatementRequest.PreparedStatementParameter parameter; + switch (type) { + case BOOLEAN: + parameter = + new PreparedStatementRequest.PreparedStatementParameter<>( + paramJson.getBoolean(PARAM_VALUE_FIELD_NAME)); + parameters.add(parameter); + break; + case KEYWORD: + case STRING: + case DATE: + parameter = + new PreparedStatementRequest.StringParameter( + paramJson.getString(PARAM_VALUE_FIELD_NAME)); + parameters.add(parameter); + break; + case BYTE: + case SHORT: + case INTEGER: + case LONG: + parameter = + new PreparedStatementRequest.PreparedStatementParameter<>( + paramJson.getLong(PARAM_VALUE_FIELD_NAME)); + parameters.add(parameter); + break; + case FLOAT: + case DOUBLE: + parameter = + new PreparedStatementRequest.PreparedStatementParameter<>( + paramJson.getDouble(PARAM_VALUE_FIELD_NAME)); + parameters.add(parameter); + break; + case NULL: + parameter = new PreparedStatementRequest.NullParameter(); + parameters.add(parameter); + break; + default: + throw new IllegalArgumentException("Failed to handle parameter type " + type.name()); } - return parameters; + } catch (JSONException e) { + throw new IllegalArgumentException("Failed to parse PreparedStatement parameters", e); + } } + return parameters; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/request/SqlRequestParam.java b/legacy/src/main/java/org/opensearch/sql/legacy/request/SqlRequestParam.java index c9d3abb320..b151fabde6 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/request/SqlRequestParam.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/request/SqlRequestParam.java @@ -3,57 +3,56 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.request; import java.util.Map; import java.util.Optional; import org.opensearch.sql.legacy.executor.Format; -/** - * Utils class for parse the request params. - */ +/** Utils class for parse the request params. */ public class SqlRequestParam { - public static final String QUERY_PARAMS_FORMAT = "format"; - public static final String QUERY_PARAMS_PRETTY = "pretty"; - public static final String QUERY_PARAMS_ESCAPE = "escape"; - - private static final String DEFAULT_RESPONSE_FORMAT = "jdbc"; - - /** - * Parse the pretty params to decide whether the response should be pretty formatted. - * @param requestParams request params. - * @return return true if the response required pretty format, otherwise return false. - */ - public static boolean isPrettyFormat(Map requestParams) { - return requestParams.containsKey(QUERY_PARAMS_PRETTY) - && ("".equals(requestParams.get(QUERY_PARAMS_PRETTY)) - || "true".equals(requestParams.get(QUERY_PARAMS_PRETTY))); - } - - /** - * Parse the request params and return the {@link Format} of the response - * @param requestParams request params - * @return The response Format. - */ - public static Format getFormat(Map requestParams) { - String formatName = - requestParams.containsKey(QUERY_PARAMS_FORMAT) - ? requestParams.get(QUERY_PARAMS_FORMAT).toLowerCase() - : DEFAULT_RESPONSE_FORMAT; - Optional optionalFormat = Format.of(formatName); - if (optionalFormat.isPresent()) { - return optionalFormat.get(); - } else { - throw new IllegalArgumentException("Failed to create executor due to unknown response format: " - + formatName); - } + public static final String QUERY_PARAMS_FORMAT = "format"; + public static final String QUERY_PARAMS_PRETTY = "pretty"; + public static final String QUERY_PARAMS_ESCAPE = "escape"; + + private static final String DEFAULT_RESPONSE_FORMAT = "jdbc"; + + /** + * Parse the pretty params to decide whether the response should be pretty formatted. + * + * @param requestParams request params. + * @return return true if the response required pretty format, otherwise return false. + */ + public static boolean isPrettyFormat(Map requestParams) { + return requestParams.containsKey(QUERY_PARAMS_PRETTY) + && ("".equals(requestParams.get(QUERY_PARAMS_PRETTY)) + || "true".equals(requestParams.get(QUERY_PARAMS_PRETTY))); + } + + /** + * Parse the request params and return the {@link Format} of the response + * + * @param requestParams request params + * @return The response Format. + */ + public static Format getFormat(Map requestParams) { + String formatName = + requestParams.containsKey(QUERY_PARAMS_FORMAT) + ? requestParams.get(QUERY_PARAMS_FORMAT).toLowerCase() + : DEFAULT_RESPONSE_FORMAT; + Optional optionalFormat = Format.of(formatName); + if (optionalFormat.isPresent()) { + return optionalFormat.get(); + } else { + throw new IllegalArgumentException( + "Failed to create executor due to unknown response format: " + formatName); } + } - public static boolean getEscapeOption(Map requestParams) { - if (requestParams.containsKey(QUERY_PARAMS_ESCAPE)) { - return Boolean.parseBoolean(requestParams.get(QUERY_PARAMS_ESCAPE)); - } - return false; + public static boolean getEscapeOption(Map requestParams) { + if (requestParams.containsKey(QUERY_PARAMS_ESCAPE)) { + return Boolean.parseBoolean(requestParams.get(QUERY_PARAMS_ESCAPE)); } + return false; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/alias/Table.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/alias/Table.java index 63c33d4721..015d8d8858 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/alias/Table.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/alias/Table.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.alias; import static com.alibaba.druid.sql.ast.expr.SQLBinaryOperator.Divide; @@ -14,44 +13,42 @@ import com.alibaba.druid.sql.ast.statement.SQLExprTableSource; import com.google.common.base.Strings; -/** - * Util class for table expression parsing - */ +/** Util class for table expression parsing */ class Table { - private final SQLExprTableSource tableExpr; - - Table(SQLExprTableSource tableExpr) { - this.tableExpr = tableExpr; - } + private final SQLExprTableSource tableExpr; - boolean hasAlias() { - return !alias().isEmpty(); - } + Table(SQLExprTableSource tableExpr) { + this.tableExpr = tableExpr; + } - String alias() { - return Strings.nullToEmpty(tableExpr.getAlias()); - } + boolean hasAlias() { + return !alias().isEmpty(); + } - void removeAlias() { - tableExpr.setAlias(null); - } + String alias() { + return Strings.nullToEmpty(tableExpr.getAlias()); + } - /** Extract table name in table expression */ - String name() { - SQLExpr expr = tableExpr.getExpr(); - if (expr instanceof SQLIdentifierExpr) { - return ((SQLIdentifierExpr) expr).getName(); - } else if (isTableWithType(expr)) { - return ((SQLIdentifierExpr) ((SQLBinaryOpExpr) expr).getLeft()).getName(); - } - return expr.toString(); - } + void removeAlias() { + tableExpr.setAlias(null); + } - /** Return true for table name along with type name, for example 'accounts/_doc' */ - private boolean isTableWithType(SQLExpr expr) { - return expr instanceof SQLBinaryOpExpr - && ((SQLBinaryOpExpr) expr).getLeft() instanceof SQLIdentifierExpr - && ((SQLBinaryOpExpr) expr).getOperator() == Divide; + /** Extract table name in table expression */ + String name() { + SQLExpr expr = tableExpr.getExpr(); + if (expr instanceof SQLIdentifierExpr) { + return ((SQLIdentifierExpr) expr).getName(); + } else if (isTableWithType(expr)) { + return ((SQLIdentifierExpr) ((SQLBinaryOpExpr) expr).getLeft()).getName(); } + return expr.toString(); + } + + /** Return true for table name along with type name, for example 'accounts/_doc' */ + private boolean isTableWithType(SQLExpr expr) { + return expr instanceof SQLBinaryOpExpr + && ((SQLBinaryOpExpr) expr).getLeft() instanceof SQLIdentifierExpr + && ((SQLBinaryOpExpr) expr).getOperator() == Divide; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/alias/TableAliasPrefixRemoveRule.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/alias/TableAliasPrefixRemoveRule.java index b8500454cd..80190a5889 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/alias/TableAliasPrefixRemoveRule.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/alias/TableAliasPrefixRemoveRule.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.alias; import com.alibaba.druid.sql.ast.expr.SQLIdentifierExpr; @@ -17,86 +16,87 @@ import org.opensearch.sql.legacy.rewriter.RewriteRule; import org.opensearch.sql.legacy.rewriter.subquery.utils.FindSubQuery; -/** - * Rewrite rule for removing table alias or table name prefix in field name. - */ +/** Rewrite rule for removing table alias or table name prefix in field name. */ public class TableAliasPrefixRemoveRule implements RewriteRule { - /** Table aliases in FROM clause. Store table name for those without alias. */ - private final Set tableAliasesToRemove = new HashSet<>(); + /** Table aliases in FROM clause. Store table name for those without alias. */ + private final Set tableAliasesToRemove = new HashSet<>(); - @Override - public boolean match(SQLQueryExpr root) { - if (hasSubQuery(root)) { - return false; - } - collectTableAliasesThatCanBeRemoved(root); - return !tableAliasesToRemove.isEmpty(); + @Override + public boolean match(SQLQueryExpr root) { + if (hasSubQuery(root)) { + return false; } + collectTableAliasesThatCanBeRemoved(root); + return !tableAliasesToRemove.isEmpty(); + } - @Override - public void rewrite(SQLQueryExpr root) { - removeTableAliasPrefixInColumnName(root); - } + @Override + public void rewrite(SQLQueryExpr root) { + removeTableAliasPrefixInColumnName(root); + } - private boolean hasSubQuery(SQLQueryExpr root) { - FindSubQuery visitor = new FindSubQuery(); - root.accept(visitor); - return visitor.hasSubQuery(); - } + private boolean hasSubQuery(SQLQueryExpr root) { + FindSubQuery visitor = new FindSubQuery(); + root.accept(visitor); + return visitor.hasSubQuery(); + } - private void collectTableAliasesThatCanBeRemoved(SQLQueryExpr root) { - visitNonJoinedTable(root, tableExpr -> { - Table table = new Table(tableExpr); - if (table.hasAlias()) { - tableAliasesToRemove.add(table.alias()); - table.removeAlias(); - } else { - tableAliasesToRemove.add(table.name()); - } + private void collectTableAliasesThatCanBeRemoved(SQLQueryExpr root) { + visitNonJoinedTable( + root, + tableExpr -> { + Table table = new Table(tableExpr); + if (table.hasAlias()) { + tableAliasesToRemove.add(table.alias()); + table.removeAlias(); + } else { + tableAliasesToRemove.add(table.name()); + } }); - } + } - private void removeTableAliasPrefixInColumnName(SQLQueryExpr root) { - visitColumnName(root, idExpr -> { - Identifier field = new Identifier(idExpr); - if (field.hasPrefix() && tableAliasesToRemove.contains(field.prefix())) { - field.removePrefix(); - } + private void removeTableAliasPrefixInColumnName(SQLQueryExpr root) { + visitColumnName( + root, + idExpr -> { + Identifier field = new Identifier(idExpr); + if (field.hasPrefix() && tableAliasesToRemove.contains(field.prefix())) { + field.removePrefix(); + } }); - } + } - private void visitNonJoinedTable(SQLQueryExpr root, - Consumer visit) { - root.accept(new MySqlASTVisitorAdapter() { - @Override - public boolean visit(SQLJoinTableSource x) { - // Avoid visiting table name in any JOIN including comma/inner/left join - // between 2 indices or between index and nested field. - // For the latter case, alias is taken care of in {@link NestedFieldRewriter}. - return false; - } + private void visitNonJoinedTable(SQLQueryExpr root, Consumer visit) { + root.accept( + new MySqlASTVisitorAdapter() { + @Override + public boolean visit(SQLJoinTableSource x) { + // Avoid visiting table name in any JOIN including comma/inner/left join + // between 2 indices or between index and nested field. + // For the latter case, alias is taken care of in {@link NestedFieldRewriter}. + return false; + } - @Override - public void endVisit(SQLExprTableSource tableExpr) { - visit.accept(tableExpr); - } + @Override + public void endVisit(SQLExprTableSource tableExpr) { + visit.accept(tableExpr); + } }); - } + } - private void visitColumnName(SQLQueryExpr expr, - Consumer visit) { - expr.accept(new MySqlASTVisitorAdapter() { - @Override - public boolean visit(SQLExprTableSource x) { - return false; // Avoid rewriting identifier in table name - } + private void visitColumnName(SQLQueryExpr expr, Consumer visit) { + expr.accept( + new MySqlASTVisitorAdapter() { + @Override + public boolean visit(SQLExprTableSource x) { + return false; // Avoid rewriting identifier in table name + } - @Override - public void endVisit(SQLIdentifierExpr idExpr) { - visit.accept(idExpr); - } + @Override + public void endVisit(SQLIdentifierExpr idExpr) { + visit.accept(idExpr); + } }); - } - + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/identifier/UnquoteIdentifierRule.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/identifier/UnquoteIdentifierRule.java index 31fc732879..b0258420eb 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/identifier/UnquoteIdentifierRule.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/identifier/UnquoteIdentifierRule.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.identifier; import static org.opensearch.sql.legacy.utils.StringUtils.unquoteFullColumn; @@ -16,53 +15,52 @@ import com.alibaba.druid.sql.dialect.mysql.visitor.MySqlASTVisitorAdapter; import org.opensearch.sql.legacy.rewriter.RewriteRule; -/** - * Quoted Identifiers Rewriter Rule - */ -public class UnquoteIdentifierRule extends MySqlASTVisitorAdapter implements RewriteRule { +/** Quoted Identifiers Rewriter Rule */ +public class UnquoteIdentifierRule extends MySqlASTVisitorAdapter + implements RewriteRule { - /** - * - * This method is to adjust the AST in the cases where the field is quoted, - * and the full name in the SELECT field is in the format of indexAlias.fieldName - * (e.g. SELECT b.`lastname` FROM bank AS b). - * - * In this case, the druid parser constructs a SQLSelectItem for the field "b.`lastname`", with SQLIdentifierExpr of - * "b." and alias of "`lastname`". - * - * This method corrects the SQLSelectItem object to have SQLIdentifier of "b.lastname" and alias of null. - */ - @Override - public boolean visit(SQLSelectItem selectItem) { - if (selectItem.getExpr() instanceof SQLIdentifierExpr) { - String identifier = ((SQLIdentifierExpr) selectItem.getExpr()).getName(); - if (identifier.endsWith(".")) { - String correctedIdentifier = identifier + unquoteSingleField(selectItem.getAlias(), "`"); - selectItem.setExpr(new SQLIdentifierExpr(correctedIdentifier)); - selectItem.setAlias(null); - } - } - selectItem.setAlias(unquoteSingleField(selectItem.getAlias(), "`")); - return true; + /** + * This method is to adjust the AST in the cases where the field is quoted, and the full name in + * the SELECT field is in the format of indexAlias.fieldName (e.g. SELECT b.`lastname` FROM bank + * AS b). + * + *

    In this case, the druid parser constructs a SQLSelectItem for the field "b.`lastname`", with + * SQLIdentifierExpr of "b." and alias of "`lastname`". + * + *

    This method corrects the SQLSelectItem object to have SQLIdentifier of "b.lastname" and + * alias of null. + */ + @Override + public boolean visit(SQLSelectItem selectItem) { + if (selectItem.getExpr() instanceof SQLIdentifierExpr) { + String identifier = ((SQLIdentifierExpr) selectItem.getExpr()).getName(); + if (identifier.endsWith(".")) { + String correctedIdentifier = identifier + unquoteSingleField(selectItem.getAlias(), "`"); + selectItem.setExpr(new SQLIdentifierExpr(correctedIdentifier)); + selectItem.setAlias(null); + } } + selectItem.setAlias(unquoteSingleField(selectItem.getAlias(), "`")); + return true; + } - @Override - public void endVisit(SQLIdentifierExpr identifierExpr) { - identifierExpr.setName(unquoteFullColumn(identifierExpr.getName())); - } + @Override + public void endVisit(SQLIdentifierExpr identifierExpr) { + identifierExpr.setName(unquoteFullColumn(identifierExpr.getName())); + } - @Override - public void endVisit(SQLExprTableSource tableSource) { - tableSource.setAlias(unquoteSingleField(tableSource.getAlias())); - } + @Override + public void endVisit(SQLExprTableSource tableSource) { + tableSource.setAlias(unquoteSingleField(tableSource.getAlias())); + } - @Override - public boolean match(SQLQueryExpr root) { - return true; - } + @Override + public boolean match(SQLQueryExpr root) { + return true; + } - @Override - public void rewrite(SQLQueryExpr root) { - root.accept(new UnquoteIdentifierRule()); - } + @Override + public void rewrite(SQLQueryExpr root) { + root.accept(new UnquoteIdentifierRule()); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/join/JoinRewriteRule.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/join/JoinRewriteRule.java index 69178b7e83..884784ed42 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/join/JoinRewriteRule.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/join/JoinRewriteRule.java @@ -27,6 +27,8 @@ import org.opensearch.sql.legacy.utils.StringUtils; /** + * + * *

      *  Rewrite rule to add table alias to columnNames for JOIN queries without table alias.
      * 

    diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/matchtoterm/TermFieldRewriter.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/matchtoterm/TermFieldRewriter.java index 5890befbca..2c837a7b2b 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/matchtoterm/TermFieldRewriter.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/matchtoterm/TermFieldRewriter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.matchtoterm; import com.alibaba.druid.sql.ast.SQLExpr; @@ -35,233 +34,233 @@ /** * Visitor to rewrite AST (abstract syntax tree) for supporting term_query in WHERE and IN condition - * Simple changing the matchQuery() to termQuery() will not work when mapping is both text and keyword - * The approach is to implement SQLIdentifier.visit() based on the correct field mapping. + * Simple changing the matchQuery() to termQuery() will not work when mapping is both text and + * keyword The approach is to implement SQLIdentifier.visit() based on the correct field mapping. */ - public class TermFieldRewriter extends MySqlASTVisitorAdapter { - private Deque environment = new ArrayDeque<>(); - private TermRewriterFilter filterType; + private Deque environment = new ArrayDeque<>(); + private TermRewriterFilter filterType; - public TermFieldRewriter() { - this.filterType = TermRewriterFilter.COMMA; - } - - public TermFieldRewriter(TermRewriterFilter filterType) { - this.filterType = filterType; - } - - @Override - public boolean visit(MySqlSelectQueryBlock query) { - environment.push(new TermFieldScope()); - if (query.getFrom() == null) { - return false; - } - - Map indexToType = new HashMap<>(); - collect(query.getFrom(), indexToType, curScope().getAliases()); - if (indexToType.isEmpty()) { - // no index found for current scope, continue. - return true; - } - curScope().setMapper(getMappings(indexToType)); + public TermFieldRewriter() { + this.filterType = TermRewriterFilter.COMMA; + } - if (this.filterType == TermRewriterFilter.COMMA || this.filterType == TermRewriterFilter.MULTI_QUERY) { - checkMappingCompatibility(curScope(), indexToType); - } + public TermFieldRewriter(TermRewriterFilter filterType) { + this.filterType = filterType; + } - return true; + @Override + public boolean visit(MySqlSelectQueryBlock query) { + environment.push(new TermFieldScope()); + if (query.getFrom() == null) { + return false; } - @Override - public void endVisit(MySqlSelectQueryBlock query) { - environment.pop(); + Map indexToType = new HashMap<>(); + collect(query.getFrom(), indexToType, curScope().getAliases()); + if (indexToType.isEmpty()) { + // no index found for current scope, continue. + return true; } + curScope().setMapper(getMappings(indexToType)); - @Override - public boolean visit(SQLSelectItem sqlSelectItem) { - return false; + if (this.filterType == TermRewriterFilter.COMMA + || this.filterType == TermRewriterFilter.MULTI_QUERY) { + checkMappingCompatibility(curScope(), indexToType); } - @Override - public boolean visit(SQLJoinTableSource tableSource) { - return false; - } + return true; + } + + @Override + public void endVisit(MySqlSelectQueryBlock query) { + environment.pop(); + } + + @Override + public boolean visit(SQLSelectItem sqlSelectItem) { + return false; + } + + @Override + public boolean visit(SQLJoinTableSource tableSource) { + return false; + } + + @Override + public boolean visit(SQLExprTableSource tableSource) { + return false; + } + + /** Fix null parent problem which is required when visiting SQLIdentifier */ + public boolean visit(SQLInListExpr inListExpr) { + inListExpr.getExpr().setParent(inListExpr); + return true; + } + + @SuppressWarnings("unchecked") + @Override + public boolean visit(SQLIdentifierExpr expr) { + if (isValidIdentifierForTerm(expr)) { + Map source = null; + if (this.filterType == TermRewriterFilter.COMMA + || this.filterType == TermRewriterFilter.MULTI_QUERY) { + Optional> optionalMap = curScope().resolveFieldMapping(expr.getName()); + if (optionalMap.isPresent()) { + source = optionalMap.get(); + } else { + return true; + } - @Override - public boolean visit(SQLExprTableSource tableSource) { - return false; - } + } else if (this.filterType == TermRewriterFilter.JOIN) { + String[] arr = expr.getName().split("\\.", 2); + if (arr.length < 2) { + throw new VerificationException("table alias or field name missing"); + } + String alias = arr[0]; + String fullFieldName = arr[1]; + + String index = curScope().getAliases().get(alias); + FieldMappings fieldMappings = curScope().getMapper().mapping(index); + if (fieldMappings.has(fullFieldName)) { + source = fieldMappings.mapping(fullFieldName); + } else { + return true; + } + } - /** - * Fix null parent problem which is required when visiting SQLIdentifier - */ - public boolean visit(SQLInListExpr inListExpr) { - inListExpr.getExpr().setParent(inListExpr); - return true; + String keywordAlias = isBothTextAndKeyword(source); + if (keywordAlias != null) { + expr.setName(expr.getName() + "." + keywordAlias); + } } - - - @SuppressWarnings("unchecked") - @Override - public boolean visit(SQLIdentifierExpr expr) { - if (isValidIdentifierForTerm(expr)) { - Map source = null; - if (this.filterType == TermRewriterFilter.COMMA || this.filterType == TermRewriterFilter.MULTI_QUERY) { - Optional> optionalMap = curScope().resolveFieldMapping(expr.getName()); - if (optionalMap.isPresent()) { - source = optionalMap.get(); - } else { - return true; - } - - } else if (this.filterType == TermRewriterFilter.JOIN) { - String[] arr = expr.getName().split("\\.", 2); - if (arr.length < 2) { - throw new VerificationException("table alias or field name missing"); - } - String alias = arr[0]; - String fullFieldName = arr[1]; - - String index = curScope().getAliases().get(alias); - FieldMappings fieldMappings = curScope().getMapper().mapping(index); - if (fieldMappings.has(fullFieldName)) { - source = fieldMappings.mapping(fullFieldName); - } else { - return true; - } - } - - String keywordAlias = isBothTextAndKeyword(source); - if (keywordAlias != null) { - expr.setName(expr.getName() + "." + keywordAlias); - } + return true; + } + + public void collect( + SQLTableSource tableSource, Map indexToType, Map aliases) { + if (tableSource instanceof SQLExprTableSource) { + + String tableName = null; + SQLExprTableSource sqlExprTableSource = (SQLExprTableSource) tableSource; + + if (sqlExprTableSource.getExpr() instanceof SQLIdentifierExpr) { + SQLIdentifierExpr sqlIdentifier = (SQLIdentifierExpr) sqlExprTableSource.getExpr(); + tableName = sqlIdentifier.getName(); + indexToType.put(tableName, null); + } else if (sqlExprTableSource.getExpr() instanceof SQLBinaryOpExpr) { + SQLBinaryOpExpr sqlBinaryOpExpr = (SQLBinaryOpExpr) sqlExprTableSource.getExpr(); + tableName = ((SQLIdentifierExpr) sqlBinaryOpExpr.getLeft()).getName(); + SQLExpr rightSideOfExpression = sqlBinaryOpExpr.getRight(); + + // This assumes that right side of the expression is different name in query + if (rightSideOfExpression instanceof SQLIdentifierExpr) { + SQLIdentifierExpr right = (SQLIdentifierExpr) rightSideOfExpression; + indexToType.put(tableName, right.getName()); + } else { + throw new ParserException( + "Right side of the expression [" + + rightSideOfExpression.toString() + + "] is expected to be an identifier"); } - return true; + } + if (tableSource.getAlias() != null) { + aliases.put(tableSource.getAlias(), tableName); + } else { + aliases.put(tableName, tableName); + } + + } else if (tableSource instanceof SQLJoinTableSource) { + collect(((SQLJoinTableSource) tableSource).getLeft(), indexToType, aliases); + collect(((SQLJoinTableSource) tableSource).getRight(), indexToType, aliases); } - - public void collect(SQLTableSource tableSource, Map indexToType, Map aliases) { - if (tableSource instanceof SQLExprTableSource) { - - String tableName = null; - SQLExprTableSource sqlExprTableSource = (SQLExprTableSource) tableSource; - - if (sqlExprTableSource.getExpr() instanceof SQLIdentifierExpr) { - SQLIdentifierExpr sqlIdentifier = (SQLIdentifierExpr) sqlExprTableSource.getExpr(); - tableName = sqlIdentifier.getName(); - indexToType.put(tableName, null); - } else if (sqlExprTableSource.getExpr() instanceof SQLBinaryOpExpr) { - SQLBinaryOpExpr sqlBinaryOpExpr = (SQLBinaryOpExpr) sqlExprTableSource.getExpr(); - tableName = ((SQLIdentifierExpr) sqlBinaryOpExpr.getLeft()).getName(); - SQLExpr rightSideOfExpression = sqlBinaryOpExpr.getRight(); - - // This assumes that right side of the expression is different name in query - if (rightSideOfExpression instanceof SQLIdentifierExpr) { - SQLIdentifierExpr right = (SQLIdentifierExpr) rightSideOfExpression; - indexToType.put(tableName, right.getName()); - } else { - throw new ParserException("Right side of the expression [" + rightSideOfExpression.toString() - + "] is expected to be an identifier"); - } - } - if (tableSource.getAlias() != null) { - aliases.put(tableSource.getAlias(), tableName); - } else { - aliases.put(tableName, tableName); - } - - } else if (tableSource instanceof SQLJoinTableSource) { - collect(((SQLJoinTableSource) tableSource).getLeft(), indexToType, aliases); - collect(((SQLJoinTableSource) tableSource).getRight(), indexToType, aliases); + } + + /** Current scope which is top of the stack */ + private TermFieldScope curScope() { + return environment.peek(); + } + + public String isBothTextAndKeyword(Map source) { + if (source.containsKey("fields")) { + for (Object key : ((Map) source.get("fields")).keySet()) { + if (key instanceof String + && ((Map) ((Map) source.get("fields")).get(key)).get("type").equals("keyword")) { + return (String) key; } + } } + return null; + } + public boolean isValidIdentifierForTerm(SQLIdentifierExpr expr) { /** - * Current scope which is top of the stack + * + * + *

    +     * Only for following conditions Identifier will be modified
    +     *  Where:  WHERE identifier = 'something'
    +     *  IN list: IN ('Tom', 'Dick', 'Harry')
    +     *  IN subquery: IN (SELECT firstname from accounts/account where firstname = 'John')
    +     *  Group by: GROUP BY state , employer , ...
    +     *  Order by: ORDER BY firstname, lastname , ...
    +     *
    +     * NOTE: Does not impact fields on ON condition clause in JOIN as we skip visiting SQLJoinTableSource
    +     * 
    */ - private TermFieldScope curScope() { - return environment.peek(); + return !expr.getName().startsWith("_") + && (isValidIdentifier(expr) || checkIfNestedIdentifier(expr)); + } + + private boolean checkIfNestedIdentifier(SQLIdentifierExpr expr) { + return expr.getParent() instanceof SQLMethodInvokeExpr + && ((SQLMethodInvokeExpr) expr.getParent()).getMethodName().equals("nested") + && isValidIdentifier(expr.getParent()); + } + + private boolean isValidIdentifier(SQLObject expr) { + SQLObject parent = expr.getParent(); + return isBinaryExprWithValidOperators(parent) + || parent instanceof SQLInListExpr + || parent instanceof SQLInSubQueryExpr + || parent instanceof SQLSelectOrderByItem + || parent instanceof MySqlSelectGroupByExpr; + } + + private boolean isBinaryExprWithValidOperators(SQLObject expr) { + if (!(expr instanceof SQLBinaryOpExpr)) { + return false; } + return Stream.of(SQLBinaryOperator.Equality, SQLBinaryOperator.Is, SQLBinaryOperator.IsNot) + .anyMatch(operator -> operator == ((SQLBinaryOpExpr) expr).getOperator()); + } - public String isBothTextAndKeyword(Map source) { - if (source.containsKey("fields")) { - for (Object key : ((Map) source.get("fields")).keySet()) { - if (key instanceof String - && ((Map) ((Map) source.get("fields")).get(key)).get("type").equals("keyword")) { - return (String) key; - } - } - } - return null; + private void checkMappingCompatibility(TermFieldScope scope, Map indexToType) { + if (scope.getMapper().isEmpty()) { + throw new VerificationException("Unknown index " + indexToType.keySet()); } + } - public boolean isValidIdentifierForTerm(SQLIdentifierExpr expr) { - /** - * Only for following conditions Identifier will be modified - * Where: WHERE identifier = 'something' - * IN list: IN ('Tom', 'Dick', 'Harry') - * IN subquery: IN (SELECT firstname from accounts/account where firstname = 'John') - * Group by: GROUP BY state , employer , ... - * Order by: ORDER BY firstname, lastname , ... - * - * NOTE: Does not impact fields on ON condition clause in JOIN as we skip visiting SQLJoinTableSource - */ - return !expr.getName().startsWith("_") && (isValidIdentifier(expr) || checkIfNestedIdentifier(expr)); - } + public IndexMappings getMappings(Map indexToType) { + String[] allIndexes = indexToType.keySet().stream().toArray(String[]::new); + // GetFieldMappingsRequest takes care of wildcard index expansion + return LocalClusterState.state().getFieldMappings(allIndexes); + } - private boolean checkIfNestedIdentifier(SQLIdentifierExpr expr) { - return - expr.getParent() instanceof SQLMethodInvokeExpr - && ((SQLMethodInvokeExpr) expr.getParent()).getMethodName().equals("nested") - && isValidIdentifier(expr.getParent()); - } + public enum TermRewriterFilter { + COMMA(","), // No joins, multiple tables in SELECT + JOIN("JOIN"), // All JOINs + MULTI_QUERY("MULTI_QUERY"); // MINUS and UNION - private boolean isValidIdentifier(SQLObject expr) { - SQLObject parent = expr.getParent(); - return isBinaryExprWithValidOperators(parent) - || parent instanceof SQLInListExpr - || parent instanceof SQLInSubQueryExpr - || parent instanceof SQLSelectOrderByItem - || parent instanceof MySqlSelectGroupByExpr; - } + public final String name; - private boolean isBinaryExprWithValidOperators(SQLObject expr) { - if (!(expr instanceof SQLBinaryOpExpr)) { - return false; - } - return Stream.of( - SQLBinaryOperator.Equality, - SQLBinaryOperator.Is, - SQLBinaryOperator.IsNot - ).anyMatch(operator -> operator == ((SQLBinaryOpExpr) expr).getOperator()); + TermRewriterFilter(String name) { + this.name = name; } - private void checkMappingCompatibility(TermFieldScope scope, Map indexToType) { - if (scope.getMapper().isEmpty()) { - throw new VerificationException("Unknown index " + indexToType.keySet()); - } - } - - public IndexMappings getMappings(Map indexToType) { - String[] allIndexes = indexToType.keySet().stream().toArray(String[]::new); - // GetFieldMappingsRequest takes care of wildcard index expansion - return LocalClusterState.state().getFieldMappings(allIndexes); - } - - public enum TermRewriterFilter { - COMMA(","), // No joins, multiple tables in SELECT - JOIN("JOIN"), // All JOINs - MULTI_QUERY("MULTI_QUERY"); // MINUS and UNION - - public final String name; - - TermRewriterFilter(String name) { - this.name = name; - } - - public static String toString(TermRewriterFilter filter) { - return filter.name; - } + public static String toString(TermRewriterFilter filter) { + return filter.name; } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/matchtoterm/TermFieldScope.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/matchtoterm/TermFieldScope.java index f8b6e9b05e..29f8ed82b8 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/matchtoterm/TermFieldScope.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/matchtoterm/TermFieldScope.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.matchtoterm; import java.util.*; @@ -11,57 +10,62 @@ import org.opensearch.sql.legacy.esdomain.mapping.FieldMappings; import org.opensearch.sql.legacy.esdomain.mapping.IndexMappings; -/** - * Index Mapping information in current query being visited. - */ +/** Index Mapping information in current query being visited. */ public class TermFieldScope { - // mapper => index, type, field_name, FieldMappingMetaData - private IndexMappings mapper; - private FieldMappings finalMapping; - private Map aliases; + // mapper => index, type, field_name, FieldMappingMetaData + private IndexMappings mapper; + private FieldMappings finalMapping; + private Map aliases; - public TermFieldScope() { - this.mapper = IndexMappings.EMPTY; - this.aliases = new HashMap<>(); - } + public TermFieldScope() { + this.mapper = IndexMappings.EMPTY; + this.aliases = new HashMap<>(); + } - public Map getAliases() { - return aliases; - } + public Map getAliases() { + return aliases; + } - public void setAliases(Map aliases) { - this.aliases = aliases; - } + public void setAliases(Map aliases) { + this.aliases = aliases; + } - public IndexMappings getMapper() { - return this.mapper; - } + public IndexMappings getMapper() { + return this.mapper; + } - public void setMapper(IndexMappings mapper) { - this.mapper = mapper; - } + public void setMapper(IndexMappings mapper) { + this.mapper = mapper; + } - public Optional> resolveFieldMapping(String fieldName) { - Set indexMappings = new HashSet<>(mapper.allMappings()); - Optional> resolvedMapping = - indexMappings.stream() - .filter(mapping -> mapping.has(fieldName)) - .map(mapping -> mapping.mapping(fieldName)).reduce((map1, map2) -> { - if (!map1.equals(map2)) { - // TODO: Merge mappings if they are compatible, for text and text/keyword to text/keyword. - String exceptionReason = String.format(Locale.ROOT, "Different mappings are not allowed " - + "for the same field[%s]: found [%s] and [%s] ", - fieldName, pretty(map1), pretty(map2)); - throw new VerificationException(exceptionReason); - } - return map1; + public Optional> resolveFieldMapping(String fieldName) { + Set indexMappings = new HashSet<>(mapper.allMappings()); + Optional> resolvedMapping = + indexMappings.stream() + .filter(mapping -> mapping.has(fieldName)) + .map(mapping -> mapping.mapping(fieldName)) + .reduce( + (map1, map2) -> { + if (!map1.equals(map2)) { + // TODO: Merge mappings if they are compatible, for text and text/keyword to + // text/keyword. + String exceptionReason = + String.format( + Locale.ROOT, + "Different mappings are not allowed " + + "for the same field[%s]: found [%s] and [%s] ", + fieldName, + pretty(map1), + pretty(map2)); + throw new VerificationException(exceptionReason); + } + return map1; }); - return resolvedMapping; - } - - private static String pretty(Map mapping) { - return new JSONObject(mapping).toString().replaceAll("\"", ""); - } + return resolvedMapping; + } + private static String pretty(Map mapping) { + return new JSONObject(mapping).toString().replaceAll("\"", ""); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/matchtoterm/VerificationException.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/matchtoterm/VerificationException.java index 51b936bdc3..f8ec8ad215 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/matchtoterm/VerificationException.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/matchtoterm/VerificationException.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.matchtoterm; import org.opensearch.OpenSearchException; @@ -11,12 +10,12 @@ public class VerificationException extends OpenSearchException { - public VerificationException(String message) { - super(message); - } + public VerificationException(String message) { + super(message); + } - @Override - public RestStatus status() { - return RestStatus.BAD_REQUEST; - } + @Override + public RestStatus status() { + return RestStatus.BAD_REQUEST; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/From.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/From.java index b39907366e..2c7b074e0a 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/From.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/From.java @@ -70,29 +70,31 @@ private void keepParentTableOnly() { left().expr.setParent(query); } - /** - *
    -     * Collect path alias and full path mapping of nested field in FROM clause.
    -     * Sample:
    -     * FROM team t, t.employees e ...
    -     * 

    - * Join - * / \ - * team t Join - * / \ - * t.employees e ... - *

    - * t.employees is nested because path "t" == parentAlias "t" - * Save path alias to full path name mapping {"e": "employees"} to Scope - *

    - */ - private void collectNestedFields(Scope scope) { - From clause = this; - for (; clause.isCommaJoin(); clause = clause.right()) { - clause.left().addIfNestedField(scope); - } - clause.addIfNestedField(scope); + /** + * + * + *
    +   * Collect path alias and full path mapping of nested field in FROM clause.
    +   * Sample:
    +   * FROM team t, t.employees e ...
    +   * 

    + * Join + * / \ + * team t Join + * / \ + * t.employees e ... + *

    + * t.employees is nested because path "t" == parentAlias "t" + * Save path alias to full path name mapping {"e": "employees"} to Scope + *

    + */ + private void collectNestedFields(Scope scope) { + From clause = this; + for (; clause.isCommaJoin(); clause = clause.right()) { + clause.left().addIfNestedField(scope); } + clause.addIfNestedField(scope); + } private boolean isCommaJoin() { return expr instanceof SQLJoinTableSource && ((SQLJoinTableSource) expr).getJoinType() == COMMA; diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/Identifier.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/Identifier.java index e3e1cfb7ce..6c5867b864 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/Identifier.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/Identifier.java @@ -9,6 +9,8 @@ import com.alibaba.druid.sql.ast.expr.SQLIdentifierExpr; /** + * + * *
      * Identifier expression in SELECT, FROM, WHERE, GROUP BY, ORDER BY etc.
      *
    diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/NestedFieldProjection.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/NestedFieldProjection.java
    index 83a94b1e9b..590ed8fb4d 100644
    --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/NestedFieldProjection.java
    +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/NestedFieldProjection.java
    @@ -109,29 +109,30 @@ private Map> groupFieldNamesByPath(List fields) {
             .collect(groupingBy(Field::getNestedPath, mapping(Field::getName, toList())));
       }
     
    -    /**
    -     * Why search for NestedQueryBuilder recursively?
    -     * Because
    -     * 
      - *
    1. it was added and wrapped by BoolQuery when WHERE explained (far from here) - *
    2. InnerHit must be added to the NestedQueryBuilder related - *
    - *

    - * Either we store it to global data structure (which requires to be thread-safe or ThreadLocal) - * or we peel off BoolQuery to find it (the way we followed here because recursion tree should be very thin). - */ - private List extractNestedQueries(QueryBuilder query) { - List result = new ArrayList<>(); - if (query instanceof NestedQueryBuilder) { - result.add((NestedQueryBuilder) query); - } else if (query instanceof BoolQueryBuilder) { - BoolQueryBuilder boolQ = (BoolQueryBuilder) query; - Stream.of(boolQ.filter(), boolQ.must(), boolQ.should()). - flatMap(Collection::stream). - forEach(q -> result.addAll(extractNestedQueries(q))); - } - return result; + /** + * Why search for NestedQueryBuilder recursively? Because + * + *

      + *
    1. it was added and wrapped by BoolQuery when WHERE explained (far from here) + *
    2. InnerHit must be added to the NestedQueryBuilder related + *
    + * + *

    Either we store it to global data structure (which requires to be thread-safe or + * ThreadLocal) or we peel off BoolQuery to find it (the way we followed here because recursion + * tree should be very thin). + */ + private List extractNestedQueries(QueryBuilder query) { + List result = new ArrayList<>(); + if (query instanceof NestedQueryBuilder) { + result.add((NestedQueryBuilder) query); + } else if (query instanceof BoolQueryBuilder) { + BoolQueryBuilder boolQ = (BoolQueryBuilder) query; + Stream.of(boolQ.filter(), boolQ.must(), boolQ.should()) + .flatMap(Collection::stream) + .forEach(q -> result.addAll(extractNestedQueries(q))); } + return result; + } private void buildInnerHit(List fieldNames, NestedQueryBuilder query) { query.innerHit( diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/NestedFieldRewriter.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/NestedFieldRewriter.java index 976075a72d..46afbb8ca1 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/NestedFieldRewriter.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/NestedFieldRewriter.java @@ -15,6 +15,8 @@ import java.util.Deque; /** + * + * *

      * Visitor to rewrite AST (abstract syntax tree) for nested type fields to support implicit nested() function call.
      * Intuitively, the approach is to implement SQLIdentifier.visit() and wrap nested() function for nested field.
    diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/SQLClause.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/SQLClause.java
    index 160403ab11..fb4c1b9fe9 100644
    --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/SQLClause.java
    +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/SQLClause.java
    @@ -3,7 +3,6 @@
      * SPDX-License-Identifier: Apache-2.0
      */
     
    -
     package org.opensearch.sql.legacy.rewriter.nestedfield;
     
     import com.alibaba.druid.sql.ast.SQLExpr;
    @@ -28,71 +27,69 @@
      */
     abstract class SQLClause {
     
    -    protected final T expr;
    -
    -    SQLClause(T expr) {
    -        this.expr = expr;
    -    }
    +  protected final T expr;
     
    -    /**
    -     * Rewrite nested fields in query according to/fill into information in scope.
    -     *
    -     * @param scope Scope of current query
    -     */
    -    abstract void rewrite(Scope scope);
    +  SQLClause(T expr) {
    +    this.expr = expr;
    +  }
     
    -    SQLMethodInvokeExpr replaceByNestedFunction(SQLExpr expr, String nestedPath) {
    -        final int nestedPathIndex = 1;
    -        SQLMethodInvokeExpr nestedFunc = replaceByNestedFunction(expr);
    -        nestedFunc.getParameters().add(nestedPathIndex, new SQLCharExpr(nestedPath));
    -        return nestedFunc;
    -    }
    +  /**
    +   * Rewrite nested fields in query according to/fill into information in scope.
    +   *
    +   * @param scope Scope of current query
    +   */
    +  abstract void rewrite(Scope scope);
     
    -    /**
    -     * Replace expr by nested(expr) and set pointer in parent properly
    -     */
    -    SQLMethodInvokeExpr replaceByNestedFunction(SQLExpr expr) {
    -        SQLObject parent = expr.getParent();
    -        SQLMethodInvokeExpr nestedFunc = wrapNestedFunction(expr);
    -        if (parent instanceof SQLAggregateExpr) {
    -            List args = ((SQLAggregateExpr) parent).getArguments();
    -            args.set(args.indexOf(expr), nestedFunc);
    -        } else if (parent instanceof SQLSelectItem) {
    -            ((SQLSelectItem) parent).setExpr(nestedFunc);
    -        } else if (parent instanceof MySqlSelectGroupByExpr) {
    -            ((MySqlSelectGroupByExpr) parent).setExpr(nestedFunc);
    -        } else if (parent instanceof SQLSelectOrderByItem) {
    -            ((SQLSelectOrderByItem) parent).setExpr(nestedFunc);
    -        } else if (parent instanceof SQLInSubQueryExpr) {
    -            ((SQLInSubQueryExpr) parent).setExpr(nestedFunc);
    -        } else if (parent instanceof SQLBinaryOpExpr) {
    -            SQLBinaryOpExpr parentOp = (SQLBinaryOpExpr) parent;
    -            if (parentOp.getLeft() == expr) {
    -                parentOp.setLeft(nestedFunc);
    -            } else {
    -                parentOp.setRight(nestedFunc);
    -            }
    -        } else if (parent instanceof MySqlSelectQueryBlock) {
    -            ((MySqlSelectQueryBlock) parent).setWhere(nestedFunc);
    -        } else if (parent instanceof SQLNotExpr) {
    -              ((SQLNotExpr) parent).setExpr(nestedFunc);
    -        } else {
    -            throw new IllegalStateException("Unsupported place to use nested field under parent: " + parent);
    -        }
    -        return nestedFunc;
    -    }
    +  SQLMethodInvokeExpr replaceByNestedFunction(SQLExpr expr, String nestedPath) {
    +    final int nestedPathIndex = 1;
    +    SQLMethodInvokeExpr nestedFunc = replaceByNestedFunction(expr);
    +    nestedFunc.getParameters().add(nestedPathIndex, new SQLCharExpr(nestedPath));
    +    return nestedFunc;
    +  }
     
    -    private SQLMethodInvokeExpr wrapNestedFunction(SQLExpr expr) {
    -        SQLMethodInvokeExpr nestedFunc = new SQLMethodInvokeExpr("nested");
    -        nestedFunc.setParent(expr.getParent());
    -        nestedFunc.addParameter(expr);  // this will auto set parent of expr
    -        return nestedFunc;
    +  /** Replace expr by nested(expr) and set pointer in parent properly */
    +  SQLMethodInvokeExpr replaceByNestedFunction(SQLExpr expr) {
    +    SQLObject parent = expr.getParent();
    +    SQLMethodInvokeExpr nestedFunc = wrapNestedFunction(expr);
    +    if (parent instanceof SQLAggregateExpr) {
    +      List args = ((SQLAggregateExpr) parent).getArguments();
    +      args.set(args.indexOf(expr), nestedFunc);
    +    } else if (parent instanceof SQLSelectItem) {
    +      ((SQLSelectItem) parent).setExpr(nestedFunc);
    +    } else if (parent instanceof MySqlSelectGroupByExpr) {
    +      ((MySqlSelectGroupByExpr) parent).setExpr(nestedFunc);
    +    } else if (parent instanceof SQLSelectOrderByItem) {
    +      ((SQLSelectOrderByItem) parent).setExpr(nestedFunc);
    +    } else if (parent instanceof SQLInSubQueryExpr) {
    +      ((SQLInSubQueryExpr) parent).setExpr(nestedFunc);
    +    } else if (parent instanceof SQLBinaryOpExpr) {
    +      SQLBinaryOpExpr parentOp = (SQLBinaryOpExpr) parent;
    +      if (parentOp.getLeft() == expr) {
    +        parentOp.setLeft(nestedFunc);
    +      } else {
    +        parentOp.setRight(nestedFunc);
    +      }
    +    } else if (parent instanceof MySqlSelectQueryBlock) {
    +      ((MySqlSelectQueryBlock) parent).setWhere(nestedFunc);
    +    } else if (parent instanceof SQLNotExpr) {
    +      ((SQLNotExpr) parent).setExpr(nestedFunc);
    +    } else {
    +      throw new IllegalStateException(
    +          "Unsupported place to use nested field under parent: " + parent);
         }
    +    return nestedFunc;
    +  }
     
    -    String pathFromIdentifier(SQLExpr identifier) {
    -        String field = Util.extendedToString(identifier);
    -        int lastDot = field.lastIndexOf(".");
    -        return lastDot == -1 ? field :field.substring(0, lastDot);
    -    }
    +  private SQLMethodInvokeExpr wrapNestedFunction(SQLExpr expr) {
    +    SQLMethodInvokeExpr nestedFunc = new SQLMethodInvokeExpr("nested");
    +    nestedFunc.setParent(expr.getParent());
    +    nestedFunc.addParameter(expr); // this will auto set parent of expr
    +    return nestedFunc;
    +  }
     
    +  String pathFromIdentifier(SQLExpr identifier) {
    +    String field = Util.extendedToString(identifier);
    +    int lastDot = field.lastIndexOf(".");
    +    return lastDot == -1 ? field : field.substring(0, lastDot);
    +  }
     }
    diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/Where.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/Where.java
    index c126bb264f..36dc3263b4 100644
    --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/Where.java
    +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/Where.java
    @@ -3,7 +3,6 @@
      * SPDX-License-Identifier: Apache-2.0
      */
     
    -
     package org.opensearch.sql.legacy.rewriter.nestedfield;
     
     import com.alibaba.druid.sql.ast.expr.SQLBinaryOpExpr;
    @@ -11,116 +10,114 @@
     import com.alibaba.druid.sql.ast.expr.SQLNotExpr;
     import com.alibaba.druid.sql.dialect.mysql.ast.statement.MySqlSelectQueryBlock;
     
    -/**
    - * Condition expression in WHERE statement.
    - */
    +/** Condition expression in WHERE statement. */
     class Where extends SQLClause {
     
    -    Where(SQLBinaryOpExpr expr) {
    -        super(expr);
    -    }
    +  Where(SQLBinaryOpExpr expr) {
    +    super(expr);
    +  }
     
    -    /**
    -     * Rewrite if left and right tag is different (or reach root of WHERE).
    -     * Otherwise continue delaying the rewrite.
    -     * 

    - * Assumption: there are only 2 forms of condition - * 1) BinaryOp: Left=Identifier, right=value - * 2) BinaryOp: Left=BinaryOp, right=BinaryOp - */ - @Override - void rewrite(Scope scope) { - if (isLeftChildCondition()) { - if (isChildTagEquals(scope)) { - useAnyChildTag(scope); - } else { - left().mergeNestedField(scope); - right().mergeNestedField(scope); - } - } - mergeIfHaveTagAndIsRootOfWhereOrNot(scope); + /** + * Rewrite if left and right tag is different (or reach root of WHERE). Otherwise continue + * delaying the rewrite. + * + *

    Assumption: there are only 2 forms of condition 1) BinaryOp: Left=Identifier, right=value 2) + * BinaryOp: Left=BinaryOp, right=BinaryOp + */ + @Override + void rewrite(Scope scope) { + if (isLeftChildCondition()) { + if (isChildTagEquals(scope)) { + useAnyChildTag(scope); + } else { + left().mergeNestedField(scope); + right().mergeNestedField(scope); + } } + mergeIfHaveTagAndIsRootOfWhereOrNot(scope); + } - private boolean isLeftChildCondition() { - return expr.getLeft() instanceof SQLBinaryOpExpr; - } + private boolean isLeftChildCondition() { + return expr.getLeft() instanceof SQLBinaryOpExpr; + } - private boolean isChildTagEquals(Scope scope) { - String left = scope.getConditionTag((SQLBinaryOpExpr) expr.getLeft()); - String right = scope.getConditionTag((SQLBinaryOpExpr) expr.getRight()); - return left.equals(right); - } + private boolean isChildTagEquals(Scope scope) { + String left = scope.getConditionTag((SQLBinaryOpExpr) expr.getLeft()); + String right = scope.getConditionTag((SQLBinaryOpExpr) expr.getRight()); + return left.equals(right); + } - private void useAnyChildTag(Scope scope) { - scope.addConditionTag(expr, scope.getConditionTag((SQLBinaryOpExpr) expr.getLeft())); - } + private void useAnyChildTag(Scope scope) { + scope.addConditionTag(expr, scope.getConditionTag((SQLBinaryOpExpr) expr.getLeft())); + } - /** - * Merge anyway if the root of WHERE clause or {@link SQLNotExpr} be reached. - */ - private void mergeIfHaveTagAndIsRootOfWhereOrNot(Scope scope) { - if (scope.getConditionTag(expr).isEmpty()) { - return; - } - if (expr.getParent() instanceof MySqlSelectQueryBlock - || expr.getParent() instanceof SQLNotExpr) { - mergeNestedField(scope); - } + /** Merge anyway if the root of WHERE clause or {@link SQLNotExpr} be reached. */ + private void mergeIfHaveTagAndIsRootOfWhereOrNot(Scope scope) { + if (scope.getConditionTag(expr).isEmpty()) { + return; } - - private Where left() { - return new Where((SQLBinaryOpExpr) expr.getLeft()); + if (expr.getParent() instanceof MySqlSelectQueryBlock + || expr.getParent() instanceof SQLNotExpr) { + mergeNestedField(scope); } + } - private Where right() { - return new Where((SQLBinaryOpExpr) expr.getRight()); - } + private Where left() { + return new Where((SQLBinaryOpExpr) expr.getLeft()); + } - /** - * There are 2 cases: - * 1) For a single condition, just wrap nested() function. That's it. - *

    - * BinaryOp - * / \ - * Identifier Value - * "employees.age" "30" - *

    - * to - *

    - * BinaryOp - * / \ - * Method Value - * "nested" "30" - * | - * Identifier - * "employees.age" - *

    - * 2) For multiple conditions, put entire BinaryOp to the parameter and add function name "nested()" first - *

    - * BinaryOp (a) - * / \ - * BinaryOp BinaryOp - * | | - * ... ... - *

    - * to - *

    - * Method - * "nested" - * | - * BinaryOp (a) - * / \ - * ... ... - */ - private void mergeNestedField(Scope scope) { - String tag = scope.getConditionTag(expr); - if (!tag.isEmpty()) { - if (isLeftChildCondition()) { - replaceByNestedFunction(expr).getParameters().add(0, new SQLCharExpr(tag)); - } else { - replaceByNestedFunction(expr.getLeft(), pathFromIdentifier(expr.getLeft())); - } - } - } + private Where right() { + return new Where((SQLBinaryOpExpr) expr.getRight()); + } + /** + * + * + *

    +   * There are 2 cases:
    +   * 1) For a single condition, just wrap nested() function. That's it.
    +   * 

    + * BinaryOp + * / \ + * Identifier Value + * "employees.age" "30" + *

    + * to + *

    + * BinaryOp + * / \ + * Method Value + * "nested" "30" + * | + * Identifier + * "employees.age" + *

    + * 2) For multiple conditions, put entire BinaryOp to the parameter and add function name "nested()" first + *

    + * BinaryOp (a) + * / \ + * BinaryOp BinaryOp + * | | + * ... ... + *

    + * to + *

    + * Method + * "nested" + * | + * BinaryOp (a) + * / \ + * ... ... + *

    + */ + private void mergeNestedField(Scope scope) { + String tag = scope.getConditionTag(expr); + if (!tag.isEmpty()) { + if (isLeftChildCondition()) { + replaceByNestedFunction(expr).getParameters().add(0, new SQLCharExpr(tag)); + } else { + replaceByNestedFunction(expr.getLeft(), pathFromIdentifier(expr.getLeft())); + } + } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/ordinal/OrdinalRewriterRule.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/ordinal/OrdinalRewriterRule.java index 03ff07b1b8..ed853823ce 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/ordinal/OrdinalRewriterRule.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/ordinal/OrdinalRewriterRule.java @@ -145,25 +145,26 @@ private boolean hasOrderByWithOrdinals(MySqlSelectQueryBlock query) { return false; } - /** - *
    -         * The second condition checks valid AST that meets ORDER BY IS NULL/NOT NULL condition
    -         *
    -         *            SQLSelectOrderByItem
    -         *                      |
    -         *             SQLBinaryOpExpr (Is || IsNot)
    -         *                    /  \
    -         *    SQLIdentifierExpr  SQLNullExpr
    -         *  
    - */ - return query.getOrderBy().getItems().stream().anyMatch(x -> - x.getExpr() instanceof SQLIntegerExpr - || ( - x.getExpr() instanceof SQLBinaryOpExpr - && ((SQLBinaryOpExpr) x.getExpr()).getLeft() instanceof SQLIntegerExpr - ) - ); - } + /** + * + * + *
    +     * The second condition checks valid AST that meets ORDER BY IS NULL/NOT NULL condition
    +     *
    +     *            SQLSelectOrderByItem
    +     *                      |
    +     *             SQLBinaryOpExpr (Is || IsNot)
    +     *                    /  \
    +     *    SQLIdentifierExpr  SQLNullExpr
    +     *  
    + */ + return query.getOrderBy().getItems().stream() + .anyMatch( + x -> + x.getExpr() instanceof SQLIntegerExpr + || (x.getExpr() instanceof SQLBinaryOpExpr + && ((SQLBinaryOpExpr) x.getExpr()).getLeft() instanceof SQLIntegerExpr)); + } private SQLQueryExpr toSqlExpr() { SQLExprParser parser = new ElasticSqlExprParser(sql); diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/parent/SQLExprParentSetter.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/parent/SQLExprParentSetter.java index 9de81f2ab1..3ad2955798 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/parent/SQLExprParentSetter.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/parent/SQLExprParentSetter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.parent; import com.alibaba.druid.sql.ast.expr.SQLInListExpr; @@ -12,35 +11,27 @@ import com.alibaba.druid.sql.ast.expr.SQLQueryExpr; import com.alibaba.druid.sql.dialect.mysql.visitor.MySqlASTVisitorAdapter; -/** - * Add the parent for the node in {@link SQLQueryExpr} if it is missing. - */ +/** Add the parent for the node in {@link SQLQueryExpr} if it is missing. */ public class SQLExprParentSetter extends MySqlASTVisitorAdapter { - /** - * Fix null parent problem which is required by SQLIdentifier.visit() - */ - @Override - public boolean visit(SQLInSubQueryExpr subQuery) { - subQuery.getExpr().setParent(subQuery); - return true; - } + /** Fix null parent problem which is required by SQLIdentifier.visit() */ + @Override + public boolean visit(SQLInSubQueryExpr subQuery) { + subQuery.getExpr().setParent(subQuery); + return true; + } - /** - * Fix null parent problem which is required by SQLIdentifier.visit() - */ - @Override - public boolean visit(SQLInListExpr expr) { - expr.getExpr().setParent(expr); - return true; - } + /** Fix null parent problem which is required by SQLIdentifier.visit() */ + @Override + public boolean visit(SQLInListExpr expr) { + expr.getExpr().setParent(expr); + return true; + } - /** - * Fix the expr in {@link SQLNotExpr} without parent. - */ - @Override - public boolean visit(SQLNotExpr notExpr) { - notExpr.getExpr().setParent(notExpr); - return true; - } + /** Fix the expr in {@link SQLNotExpr} without parent. */ + @Override + public boolean visit(SQLNotExpr notExpr) { + notExpr.getExpr().setParent(notExpr); + return true; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/parent/SQLExprParentSetterRule.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/parent/SQLExprParentSetterRule.java index 62ad0765d8..b623998b6e 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/parent/SQLExprParentSetterRule.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/parent/SQLExprParentSetterRule.java @@ -3,24 +3,21 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.parent; import com.alibaba.druid.sql.ast.expr.SQLQueryExpr; import org.opensearch.sql.legacy.rewriter.RewriteRule; -/** - * The {@link RewriteRule} which will apply {@link SQLExprParentSetter} for {@link SQLQueryExpr} - */ +/** The {@link RewriteRule} which will apply {@link SQLExprParentSetter} for {@link SQLQueryExpr} */ public class SQLExprParentSetterRule implements RewriteRule { - @Override - public boolean match(SQLQueryExpr expr) { - return true; - } + @Override + public boolean match(SQLQueryExpr expr) { + return true; + } - @Override - public void rewrite(SQLQueryExpr expr) { - expr.accept(new SQLExprParentSetter()); - } + @Override + public void rewrite(SQLQueryExpr expr) { + expr.accept(new SQLExprParentSetter()); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/NestedQueryContext.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/NestedQueryContext.java index b300015d49..17eaf72865 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/NestedQueryContext.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/NestedQueryContext.java @@ -14,9 +14,9 @@ /** * {@link NestedQueryContext} build the context with Query to detected the specified table is nested - * or not. - *
    Todo current implementation doesn't rely on the index mapping which should be added after - * the semantics is built. + * or not.
    + * Todo current implementation doesn't rely on the index mapping which should be added after the + * semantics is built. */ public class NestedQueryContext { private static final String SEPARATOR = "."; @@ -54,17 +54,17 @@ private void process(SQLExprTableSource table) { } } - /** - * Extract the parent alias from the tableName. For example
    - * SELECT * FROM employee e, e.project as p,
    - * For expr: employee, the parent alias is "".
    - * For expr: e.project, the parent alias is e. - */ - private String parent(SQLExprTableSource table) { - String tableName = table.getExpr().toString(); - int index = tableName.indexOf(SEPARATOR); - return index == -1 ? EMPTY : tableName.substring(0, index); - } + /** + * Extract the parent alias from the tableName. For example
    + * SELECT * FROM employee e, e.project as p,
    + * For expr: employee, the parent alias is "".
    + * For expr: e.project, the parent alias is e. + */ + private String parent(SQLExprTableSource table) { + String tableName = table.getExpr().toString(); + int index = tableName.indexOf(SEPARATOR); + return index == -1 ? EMPTY : tableName.substring(0, index); + } private String alias(SQLExprTableSource table) { if (Strings.isNullOrEmpty(table.getAlias())) { diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/SubQueryRewriteRule.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/SubQueryRewriteRule.java index 44a68b1bbb..5177b2d6d3 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/SubQueryRewriteRule.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/SubQueryRewriteRule.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.subquery; import com.alibaba.druid.sql.ast.expr.SQLQueryExpr; @@ -12,42 +11,44 @@ import org.opensearch.sql.legacy.rewriter.subquery.rewriter.SubqueryAliasRewriter; import org.opensearch.sql.legacy.rewriter.subquery.utils.FindSubQuery; -/** - * Subquery Rewriter Rule. - */ +/** Subquery Rewriter Rule. */ public class SubQueryRewriteRule implements RewriteRule { - private FindSubQuery findAllSubQuery = new FindSubQuery(); - - @Override - public boolean match(SQLQueryExpr expr) throws SQLFeatureNotSupportedException { - expr.accept(findAllSubQuery); - - if (isContainSubQuery(findAllSubQuery)) { - if (isSupportedSubQuery(findAllSubQuery)) { - return true; - } else { - throw new SQLFeatureNotSupportedException("Unsupported subquery. Only one EXISTS or IN is supported"); - } - } else { - return false; - } - } - - @Override - public void rewrite(SQLQueryExpr expr) { - expr.accept(new SubqueryAliasRewriter()); - new SubQueryRewriter().convert(expr.getSubQuery()); + private FindSubQuery findAllSubQuery = new FindSubQuery(); + + @Override + public boolean match(SQLQueryExpr expr) throws SQLFeatureNotSupportedException { + expr.accept(findAllSubQuery); + + if (isContainSubQuery(findAllSubQuery)) { + if (isSupportedSubQuery(findAllSubQuery)) { + return true; + } else { + throw new SQLFeatureNotSupportedException( + "Unsupported subquery. Only one EXISTS or IN is supported"); + } + } else { + return false; } - - private boolean isContainSubQuery(FindSubQuery allSubQuery) { - return !allSubQuery.getSqlExistsExprs().isEmpty() || !allSubQuery.getSqlInSubQueryExprs().isEmpty(); - } - - private boolean isSupportedSubQuery(FindSubQuery allSubQuery) { - if ((allSubQuery.getSqlInSubQueryExprs().size() == 1 && allSubQuery.getSqlExistsExprs().size() == 0) - || (allSubQuery.getSqlInSubQueryExprs().size() == 0 && allSubQuery.getSqlExistsExprs().size() == 1)) { - return true; - } - return false; + } + + @Override + public void rewrite(SQLQueryExpr expr) { + expr.accept(new SubqueryAliasRewriter()); + new SubQueryRewriter().convert(expr.getSubQuery()); + } + + private boolean isContainSubQuery(FindSubQuery allSubQuery) { + return !allSubQuery.getSqlExistsExprs().isEmpty() + || !allSubQuery.getSqlInSubQueryExprs().isEmpty(); + } + + private boolean isSupportedSubQuery(FindSubQuery allSubQuery) { + if ((allSubQuery.getSqlInSubQueryExprs().size() == 1 + && allSubQuery.getSqlExistsExprs().size() == 0) + || (allSubQuery.getSqlInSubQueryExprs().size() == 0 + && allSubQuery.getSqlExistsExprs().size() == 1)) { + return true; } + return false; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/SubQueryRewriter.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/SubQueryRewriter.java index fd503a0e9b..c788e8f559 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/SubQueryRewriter.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/SubQueryRewriter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.subquery; import com.alibaba.druid.sql.ast.SQLExpr; @@ -19,73 +18,73 @@ import org.opensearch.sql.legacy.rewriter.subquery.utils.FindSubQuery; public class SubQueryRewriter { - private final RewriterContext ctx = new RewriterContext(); + private final RewriterContext ctx = new RewriterContext(); - public void convert(SQLSelect query) { - SQLSelectQuery queryExpr = query.getQuery(); - if (queryExpr instanceof MySqlSelectQueryBlock) { - MySqlSelectQueryBlock queryBlock = (MySqlSelectQueryBlock) queryExpr; - ctx.addTable(queryBlock.getFrom()); + public void convert(SQLSelect query) { + SQLSelectQuery queryExpr = query.getQuery(); + if (queryExpr instanceof MySqlSelectQueryBlock) { + MySqlSelectQueryBlock queryBlock = (MySqlSelectQueryBlock) queryExpr; + ctx.addTable(queryBlock.getFrom()); - queryBlock.setWhere(convertWhere(queryBlock.getWhere())); - queryBlock.setFrom(convertFrom(queryBlock.getFrom())); - } + queryBlock.setWhere(convertWhere(queryBlock.getWhere())); + queryBlock.setFrom(convertFrom(queryBlock.getFrom())); } + } - private SQLTableSource convertFrom(SQLTableSource expr) { - SQLTableSource join = ctx.popJoin(); - if (join != null) { - return join; - } - return expr; + private SQLTableSource convertFrom(SQLTableSource expr) { + SQLTableSource join = ctx.popJoin(); + if (join != null) { + return join; } + return expr; + } - private SQLExpr convertWhere(SQLExpr expr) { - if (expr instanceof SQLExistsExpr) { - ctx.setExistsSubQuery((SQLExistsExpr) expr); - rewriteSubQuery(expr, ((SQLExistsExpr) expr).getSubQuery()); - return ctx.popWhere(); - } else if (expr instanceof SQLInSubQueryExpr) { - ctx.setInSubQuery((SQLInSubQueryExpr) expr); - rewriteSubQuery(expr, ((SQLInSubQueryExpr) expr).getSubQuery()); - return ctx.popWhere(); - } else if (expr instanceof SQLBinaryOpExpr) { - SQLBinaryOpExpr binaryOpExpr = (SQLBinaryOpExpr) expr; - SQLExpr left = convertWhere(binaryOpExpr.getLeft()); - left.setParent(binaryOpExpr); - binaryOpExpr.setLeft(left); - SQLExpr right = convertWhere(binaryOpExpr.getRight()); - right.setParent(binaryOpExpr); - binaryOpExpr.setRight(right); - } - return expr; + private SQLExpr convertWhere(SQLExpr expr) { + if (expr instanceof SQLExistsExpr) { + ctx.setExistsSubQuery((SQLExistsExpr) expr); + rewriteSubQuery(expr, ((SQLExistsExpr) expr).getSubQuery()); + return ctx.popWhere(); + } else if (expr instanceof SQLInSubQueryExpr) { + ctx.setInSubQuery((SQLInSubQueryExpr) expr); + rewriteSubQuery(expr, ((SQLInSubQueryExpr) expr).getSubQuery()); + return ctx.popWhere(); + } else if (expr instanceof SQLBinaryOpExpr) { + SQLBinaryOpExpr binaryOpExpr = (SQLBinaryOpExpr) expr; + SQLExpr left = convertWhere(binaryOpExpr.getLeft()); + left.setParent(binaryOpExpr); + binaryOpExpr.setLeft(left); + SQLExpr right = convertWhere(binaryOpExpr.getRight()); + right.setParent(binaryOpExpr); + binaryOpExpr.setRight(right); } + return expr; + } - private void rewriteSubQuery(SQLExpr subQueryExpr, SQLSelect subQuerySelect) { - if (containSubQuery(subQuerySelect)) { - convert(subQuerySelect); - } else if (isSupportedSubQuery(ctx)){ - for (Rewriter rewriter : RewriterFactory.createRewriterList(subQueryExpr, ctx)) { - if (rewriter.canRewrite()) { - rewriter.rewrite(); - return; - } - } + private void rewriteSubQuery(SQLExpr subQueryExpr, SQLSelect subQuerySelect) { + if (containSubQuery(subQuerySelect)) { + convert(subQuerySelect); + } else if (isSupportedSubQuery(ctx)) { + for (Rewriter rewriter : RewriterFactory.createRewriterList(subQueryExpr, ctx)) { + if (rewriter.canRewrite()) { + rewriter.rewrite(); + return; } - throw new IllegalStateException("Unsupported subquery"); + } } + throw new IllegalStateException("Unsupported subquery"); + } - private boolean containSubQuery(SQLSelect query) { - FindSubQuery findSubQuery = new FindSubQuery().continueVisitWhenFound(false); - query.accept(findSubQuery); - return findSubQuery.hasSubQuery(); - } + private boolean containSubQuery(SQLSelect query) { + FindSubQuery findSubQuery = new FindSubQuery().continueVisitWhenFound(false); + query.accept(findSubQuery); + return findSubQuery.hasSubQuery(); + } - private boolean isSupportedSubQuery(RewriterContext ctx) { - if ((ctx.getSqlInSubQueryExprs().size() == 1 && ctx.getSqlExistsExprs().size() == 0) - || (ctx.getSqlInSubQueryExprs().size() == 0 && ctx.getSqlExistsExprs().size() == 1)) { - return true; - } - return false; + private boolean isSupportedSubQuery(RewriterContext ctx) { + if ((ctx.getSqlInSubQueryExprs().size() == 1 && ctx.getSqlExistsExprs().size() == 0) + || (ctx.getSqlInSubQueryExprs().size() == 0 && ctx.getSqlExistsExprs().size() == 1)) { + return true; } + return false; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/InRewriter.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/InRewriter.java index 281918d52c..f598ab9a97 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/InRewriter.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/InRewriter.java @@ -16,6 +16,8 @@ import org.opensearch.sql.legacy.rewriter.subquery.RewriterContext; /** + * + * *
      * IN Subquery Rewriter.
      * For example,
    @@ -39,29 +41,31 @@ public boolean canRewrite() {
         return !inExpr.isNot();
       }
     
    -    /**
    -     * 
    -     * Build Where clause from input query.
    -     * 

    - * With the input query. - * Query - * / | \ - * SELECT FROM WHERE - * | | / | \ - * * A c>10 AND INSubquery - * / \ - * a Query - * / \ - * SELECT FROM - * | | - * b B - *

    - *

    - */ - @Override - public void rewrite() { - SQLTableSource from = queryBlock.getFrom(); - addJoinTable(from); + /** + * + * + *
    +   * Build Where clause from input query.
    +   * 

    + * With the input query. + * Query + * / | \ + * SELECT FROM WHERE + * | | / | \ + * * A c>10 AND INSubquery + * / \ + * a Query + * / \ + * SELECT FROM + * | | + * b B + *

    + *

    + */ + @Override + public void rewrite() { + SQLTableSource from = queryBlock.getFrom(); + addJoinTable(from); SQLExpr where = queryBlock.getWhere(); if (null == where) { @@ -73,16 +77,16 @@ public void rewrite() { } } - /** - * Build the Null check expression. For example,
    - * SELECT * FROM A WHERE a IN (SELECT b FROM B)
    - * should return B.b IS NOT NULL - */ - private SQLBinaryOpExpr generateNullOp() { - SQLBinaryOpExpr binaryOpExpr = new SQLBinaryOpExpr(); - binaryOpExpr.setLeft(fetchJoinExpr()); - binaryOpExpr.setRight(new SQLNullExpr()); - binaryOpExpr.setOperator(SQLBinaryOperator.IsNot); + /** + * Build the Null check expression. For example,
    + * SELECT * FROM A WHERE a IN (SELECT b FROM B)
    + * should return B.b IS NOT NULL + */ + private SQLBinaryOpExpr generateNullOp() { + SQLBinaryOpExpr binaryOpExpr = new SQLBinaryOpExpr(); + binaryOpExpr.setLeft(fetchJoinExpr()); + binaryOpExpr.setRight(new SQLNullExpr()); + binaryOpExpr.setOperator(SQLBinaryOperator.IsNot); return binaryOpExpr; } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/NestedExistsRewriter.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/NestedExistsRewriter.java index 26684f4f61..f504b4760d 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/NestedExistsRewriter.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/NestedExistsRewriter.java @@ -19,6 +19,7 @@ /** * Nested EXISTS SQL Rewriter. The EXISTS clause will be remove from the SQL. The translated SQL * will use ElasticSearch's nested query logic. + * *
      * For example,
      * 

    diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/SubqueryAliasRewriter.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/SubqueryAliasRewriter.java index e47027f024..7176bd030c 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/SubqueryAliasRewriter.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/SubqueryAliasRewriter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.subquery.rewriter; import com.alibaba.druid.sql.ast.expr.SQLAllColumnExpr; @@ -20,6 +19,9 @@ import java.util.Deque; /** + * + * + *

      * Add the alias for identifier the subquery query.
      * Use the table alias if it already has one, Auto generate if it doesn't has one.
      * 

    @@ -41,126 +43,123 @@ * | Identifier in Select | (TbA,TbA_0) | SELECT TbA.* FROM TbA as TbA_0 WHERE TbA_0.a IN (SELECT TbB_0.b FROM TbB as TbB_0) and TbA_0.c > 10 | * | | (TbB,TbB_0) | | * +-----------------------+-------------+-----------------------------------------------------------------------------------------------------+ + *

    */ public class SubqueryAliasRewriter extends MySqlASTVisitorAdapter { - private final Deque
  • tableScope = new ArrayDeque<>(); - private int aliasSuffix = 0; - private static final String DOT = "."; - - @Override - public boolean visit(MySqlSelectQueryBlock query) { - SQLTableSource from = query.getFrom(); - if (from instanceof SQLExprTableSource) { - SQLExprTableSource expr = (SQLExprTableSource) from; - String tableName = expr.getExpr().toString().replaceAll(" ", ""); - - if (expr.getAlias() != null) { - tableScope.push(new Table(tableName, expr.getAlias())); - } else { - expr.setAlias(createAlias(tableName)); - tableScope.push(new Table(tableName, expr.getAlias())); - } - } - return true; + private final Deque
    tableScope = new ArrayDeque<>(); + private int aliasSuffix = 0; + private static final String DOT = "."; + + @Override + public boolean visit(MySqlSelectQueryBlock query) { + SQLTableSource from = query.getFrom(); + if (from instanceof SQLExprTableSource) { + SQLExprTableSource expr = (SQLExprTableSource) from; + String tableName = expr.getExpr().toString().replaceAll(" ", ""); + + if (expr.getAlias() != null) { + tableScope.push(new Table(tableName, expr.getAlias())); + } else { + expr.setAlias(createAlias(tableName)); + tableScope.push(new Table(tableName, expr.getAlias())); + } } + return true; + } - @Override - public boolean visit(SQLIdentifierExpr expr) { - if (!tableScope.isEmpty() && (inSelect(expr) || inWhere(expr) || inSubquery(expr))) { - rewrite(tableScope.peek(), expr); - } - return true; + @Override + public boolean visit(SQLIdentifierExpr expr) { + if (!tableScope.isEmpty() && (inSelect(expr) || inWhere(expr) || inSubquery(expr))) { + rewrite(tableScope.peek(), expr); } + return true; + } - @Override - public boolean visit(SQLAllColumnExpr expr) { - if (!tableScope.isEmpty() && inSelect(expr)) { - ((SQLSelectItem) expr.getParent()).setExpr(createIdentifierExpr(tableScope.peek())); - } - return true; + @Override + public boolean visit(SQLAllColumnExpr expr) { + if (!tableScope.isEmpty() && inSelect(expr)) { + ((SQLSelectItem) expr.getParent()).setExpr(createIdentifierExpr(tableScope.peek())); } - - private boolean inSelect(SQLIdentifierExpr expr) { - return expr.getParent() instanceof SQLSelectItem; + return true; + } + + private boolean inSelect(SQLIdentifierExpr expr) { + return expr.getParent() instanceof SQLSelectItem; + } + + private boolean inSelect(SQLAllColumnExpr expr) { + return expr.getParent() instanceof SQLSelectItem; + } + + private boolean inWhere(SQLIdentifierExpr expr) { + return expr.getParent() instanceof SQLBinaryOpExpr + && !isESTable((SQLBinaryOpExpr) expr.getParent()); + } + + /** + * The table name in OpenSearch could be "index/type". Which represent as SQLBinaryOpExpr in AST. + */ + private boolean isESTable(SQLBinaryOpExpr expr) { + return expr.getOperator() == SQLBinaryOperator.Divide + && expr.getParent() instanceof SQLExprTableSource; + } + + private boolean inSubquery(SQLIdentifierExpr expr) { + return expr.getParent() instanceof SQLInSubQueryExpr; + } + + @Override + public void endVisit(MySqlSelectQueryBlock query) { + if (!tableScope.isEmpty()) { + tableScope.pop(); } + } - private boolean inSelect(SQLAllColumnExpr expr) { - return expr.getParent() instanceof SQLSelectItem; - } + private void rewrite(Table table, SQLIdentifierExpr expr) { + String tableAlias = table.getAlias(); + String tableName = table.getName(); - private boolean inWhere(SQLIdentifierExpr expr) { - return expr.getParent() instanceof SQLBinaryOpExpr && !isESTable((SQLBinaryOpExpr) expr.getParent()); + String exprName = expr.getName(); + if (exprName.startsWith(tableName + DOT) || exprName.startsWith(tableAlias + DOT)) { + expr.setName(exprName.replace(tableName + DOT, tableAlias + DOT)); + } else { + expr.setName(String.join(DOT, tableAlias, exprName)); } + } - /** - * The table name in OpenSearch could be "index/type". Which represent as SQLBinaryOpExpr in AST. - */ - private boolean isESTable(SQLBinaryOpExpr expr) { - return expr.getOperator() == SQLBinaryOperator.Divide && expr.getParent() instanceof SQLExprTableSource; - } + private SQLIdentifierExpr createIdentifierExpr(Table table) { + String newIdentifierName = String.join(DOT, table.getAlias(), "*"); + return new SQLIdentifierExpr(newIdentifierName); + } - private boolean inSubquery(SQLIdentifierExpr expr) { - return expr.getParent() instanceof SQLInSubQueryExpr; - } + private String createAlias(String alias) { + return String.format("%s_%d", alias, next()); + } - @Override - public void endVisit(MySqlSelectQueryBlock query) { - if (!tableScope.isEmpty()) { - tableScope.pop(); - } - } + private Integer next() { + return aliasSuffix++; + } - private void rewrite(Table table, SQLIdentifierExpr expr) { - String tableAlias = table.getAlias(); - String tableName = table.getName(); + /** Table Bean. */ + private static class Table { - String exprName = expr.getName(); - if (exprName.startsWith(tableName + DOT) || exprName.startsWith(tableAlias + DOT)) { - expr.setName(exprName.replace(tableName + DOT, tableAlias + DOT)); - } else { - expr.setName(String.join(DOT, tableAlias, exprName)); - } + public String getName() { + return name; } - private SQLIdentifierExpr createIdentifierExpr(Table table) { - String newIdentifierName = String.join(DOT, table.getAlias(), "*"); - return new SQLIdentifierExpr(newIdentifierName); + public String getAlias() { + return alias; } - private String createAlias(String alias) { - return String.format("%s_%d", alias, next()); - } + /** Table Name. */ + private String name; - private Integer next() { - return aliasSuffix++; - } + /** Table Alias. */ + private String alias; - /** - * Table Bean. - */ - private static class Table { - - public String getName() { - return name; - } - - public String getAlias() { - return alias; - } - - /** - * Table Name. - */ - private String name; - - /** - * Table Alias. - */ - private String alias; - - Table(String name, String alias) { - this.name = name; - this.alias = alias; - } + Table(String name, String alias) { + this.name = name; + this.alias = alias; } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/spatial/SpatialParamsFactory.java b/legacy/src/main/java/org/opensearch/sql/legacy/spatial/SpatialParamsFactory.java index 7b99d52e68..5e1102994e 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/spatial/SpatialParamsFactory.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/spatial/SpatialParamsFactory.java @@ -3,103 +3,105 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.spatial; - import com.alibaba.druid.sql.ast.SQLExpr; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Set; -/** - * Created by Eliran on 1/8/2015. - */ +/** Created by Eliran on 1/8/2015. */ public class SpatialParamsFactory { - public static Set allowedMethods; + public static Set allowedMethods; - static { - allowedMethods = new HashSet<>(); - allowedMethods.add("GEO_INTERSECTS"); - allowedMethods.add("GEO_BOUNDING_BOX"); - allowedMethods.add("GEO_DISTANCE"); - allowedMethods.add("GEO_DISTANCE_RANGE"); - allowedMethods.add("GEO_POLYGON"); - allowedMethods.add("GEO_CELL"); - } + static { + allowedMethods = new HashSet<>(); + allowedMethods.add("GEO_INTERSECTS"); + allowedMethods.add("GEO_BOUNDING_BOX"); + allowedMethods.add("GEO_DISTANCE"); + allowedMethods.add("GEO_DISTANCE_RANGE"); + allowedMethods.add("GEO_POLYGON"); + allowedMethods.add("GEO_CELL"); + } - public static boolean isAllowedMethod(String name) { - return allowedMethods.contains(name); - } + public static boolean isAllowedMethod(String name) { + return allowedMethods.contains(name); + } - public static Object generateSpatialParamsObject(String methodName, List params) { - switch (methodName) { - case "GEO_INTERSECTS": - if (params.size() != 2) { - throw new RuntimeException("GEO_INTERSECTS should have exactly 2 parameters : (fieldName,'WKT') "); - } - return params.get(1).toString(); - case "GEO_BOUNDING_BOX": - if (params.size() != 5) { - throw new RuntimeException("GEO_BOUNDING_BOX should have exactly 5 parameters : " - + "(fieldName,topLeftLon,topLeftLan,bottomRightLon,bottomRightLan) "); - } - double topLeftLon = Double.parseDouble(params.get(1).toString()); - double topLeftLat = Double.parseDouble(params.get(2).toString()); - double bottomRightLon = Double.parseDouble(params.get(3).toString()); - double bottomRightLat = Double.parseDouble(params.get(4).toString()); - return new BoundingBoxFilterParams(new Point(topLeftLon, topLeftLat), - new Point(bottomRightLon, bottomRightLat)); - case "GEO_DISTANCE": - if (params.size() != 4) { - throw new RuntimeException("GEO_DISTANCE should have exactly 4 parameters : " - + "(fieldName,distance,fromLon,fromLat) "); - } - String distance = params.get(1).toString(); - double lon = Double.parseDouble(params.get(2).toString()); - double lat = Double.parseDouble(params.get(3).toString()); - return new DistanceFilterParams(distance, new Point(lon, lat)); - case "GEO_DISTANCE_RANGE": - if (params.size() != 5) { - throw new RuntimeException("GEO_DISTANCE should have exactly 5 parameters : " - + "(fieldName,distanceFrom,distanceTo,fromLon,fromLat) "); - } - String distanceFrom = params.get(1).toString(); - String distanceTo = params.get(2).toString(); - lon = Double.parseDouble(params.get(3).toString()); - lat = Double.parseDouble(params.get(4).toString()); - return new RangeDistanceFilterParams(distanceFrom, distanceTo, new Point(lon, lat)); - case "GEO_POLYGON": - if (params.size() % 2 == 0 || params.size() <= 5) { - throw new RuntimeException("GEO_POLYGON should have odd num of parameters and > 5 : " - + "(fieldName,lon1,lat1,lon2,lat2,lon3,lat3,...) "); - } - int numberOfPoints = (params.size() - 1) / 2; - List points = new LinkedList<>(); - for (int i = 0; i < numberOfPoints; i++) { - int currentPointLocation = 1 + i * 2; - lon = Double.parseDouble(params.get(currentPointLocation).toString()); - lat = Double.parseDouble(params.get(currentPointLocation + 1).toString()); - points.add(new Point(lon, lat)); - } - return new PolygonFilterParams(points); - case "GEO_CELL": - if (params.size() < 4 || params.size() > 5) { - throw new RuntimeException("GEO_CELL should have 4 or 5 params " - + "(fieldName,lon,lat,precision,neighbors(optional)) "); - } - lon = Double.parseDouble(params.get(1).toString()); - lat = Double.parseDouble(params.get(2).toString()); - Point geoHashPoint = new Point(lon, lat); - int precision = Integer.parseInt(params.get(3).toString()); - if (params.size() == 4) { - return new CellFilterParams(geoHashPoint, precision); - } - boolean neighbors = Boolean.parseBoolean(params.get(4).toString()); - return new CellFilterParams(geoHashPoint, precision, neighbors); - default: - throw new RuntimeException(String.format("Unknown method name: %s", methodName)); + public static Object generateSpatialParamsObject(String methodName, List params) { + switch (methodName) { + case "GEO_INTERSECTS": + if (params.size() != 2) { + throw new RuntimeException( + "GEO_INTERSECTS should have exactly 2 parameters : (fieldName,'WKT') "); + } + return params.get(1).toString(); + case "GEO_BOUNDING_BOX": + if (params.size() != 5) { + throw new RuntimeException( + "GEO_BOUNDING_BOX should have exactly 5 parameters : " + + "(fieldName,topLeftLon,topLeftLan,bottomRightLon,bottomRightLan) "); + } + double topLeftLon = Double.parseDouble(params.get(1).toString()); + double topLeftLat = Double.parseDouble(params.get(2).toString()); + double bottomRightLon = Double.parseDouble(params.get(3).toString()); + double bottomRightLat = Double.parseDouble(params.get(4).toString()); + return new BoundingBoxFilterParams( + new Point(topLeftLon, topLeftLat), new Point(bottomRightLon, bottomRightLat)); + case "GEO_DISTANCE": + if (params.size() != 4) { + throw new RuntimeException( + "GEO_DISTANCE should have exactly 4 parameters : " + + "(fieldName,distance,fromLon,fromLat) "); + } + String distance = params.get(1).toString(); + double lon = Double.parseDouble(params.get(2).toString()); + double lat = Double.parseDouble(params.get(3).toString()); + return new DistanceFilterParams(distance, new Point(lon, lat)); + case "GEO_DISTANCE_RANGE": + if (params.size() != 5) { + throw new RuntimeException( + "GEO_DISTANCE should have exactly 5 parameters : " + + "(fieldName,distanceFrom,distanceTo,fromLon,fromLat) "); + } + String distanceFrom = params.get(1).toString(); + String distanceTo = params.get(2).toString(); + lon = Double.parseDouble(params.get(3).toString()); + lat = Double.parseDouble(params.get(4).toString()); + return new RangeDistanceFilterParams(distanceFrom, distanceTo, new Point(lon, lat)); + case "GEO_POLYGON": + if (params.size() % 2 == 0 || params.size() <= 5) { + throw new RuntimeException( + "GEO_POLYGON should have odd num of parameters and > 5 : " + + "(fieldName,lon1,lat1,lon2,lat2,lon3,lat3,...) "); + } + int numberOfPoints = (params.size() - 1) / 2; + List points = new LinkedList<>(); + for (int i = 0; i < numberOfPoints; i++) { + int currentPointLocation = 1 + i * 2; + lon = Double.parseDouble(params.get(currentPointLocation).toString()); + lat = Double.parseDouble(params.get(currentPointLocation + 1).toString()); + points.add(new Point(lon, lat)); + } + return new PolygonFilterParams(points); + case "GEO_CELL": + if (params.size() < 4 || params.size() > 5) { + throw new RuntimeException( + "GEO_CELL should have 4 or 5 params " + + "(fieldName,lon,lat,precision,neighbors(optional)) "); + } + lon = Double.parseDouble(params.get(1).toString()); + lat = Double.parseDouble(params.get(2).toString()); + Point geoHashPoint = new Point(lon, lat); + int precision = Integer.parseInt(params.get(3).toString()); + if (params.size() == 4) { + return new CellFilterParams(geoHashPoint, precision); } + boolean neighbors = Boolean.parseBoolean(params.get(4).toString()); + return new CellFilterParams(geoHashPoint, precision, neighbors); + default: + throw new RuntimeException(String.format("Unknown method name: %s", methodName)); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/spatial/WktToGeoJsonConverter.java b/legacy/src/main/java/org/opensearch/sql/legacy/spatial/WktToGeoJsonConverter.java index 99bc8f0742..13f51ed777 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/spatial/WktToGeoJsonConverter.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/spatial/WktToGeoJsonConverter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.spatial; import com.google.common.base.Joiner; @@ -11,160 +10,156 @@ import java.util.List; import org.opensearch.sql.legacy.utils.StringUtils; -/** - * Created by Eliran on 4/8/2015. - */ +/** Created by Eliran on 4/8/2015. */ public class WktToGeoJsonConverter { - public static String toGeoJson(String wkt) { - wkt = wkt.toLowerCase(); - int startOfCoordinates = wkt.indexOf("("); - if (startOfCoordinates == -1) { - throw new IllegalArgumentException( - StringUtils.format("Failed to convert well-known-text [%s] to geometry type", wkt)); - } - - String wktType = wkt.substring(0, startOfCoordinates).trim(); - wkt = wkt.substring(startOfCoordinates); - - String type = ""; - String coordinates = ""; - switch (wktType) { - case ("point"): - type = "Point"; - coordinates = pointCoordinatesFromWkt(wkt); - break; - case ("polygon"): - type = "Polygon"; - coordinates = polygonCoordinatesFromWkt(wkt); - break; - case ("linestring"): - type = "LineString"; - coordinates = lineStringCoordinatesFromWkt(wkt); - break; - case ("multipolygon"): - type = "MultiPolygon"; - coordinates = multiPolygonCoordinatesFromWkt(wkt); - break; - case ("multipoint"): - type = "MultiPoint"; - coordinates = multiPointCoordinatesFromWkt(wkt); - break; - case ("multilinestring"): - type = "MultiLineString"; - coordinates = multiLineStringCoordinatesFromWkt(wkt); - break; - default: - throw new IllegalArgumentException("Unsupported well-known-text type: " + wktType); - - } - - return buildGeoJson(type, coordinates); + public static String toGeoJson(String wkt) { + wkt = wkt.toLowerCase(); + int startOfCoordinates = wkt.indexOf("("); + if (startOfCoordinates == -1) { + throw new IllegalArgumentException( + StringUtils.format("Failed to convert well-known-text [%s] to geometry type", wkt)); } - //input: ((10 10, 20 20, 10 40),(40 40, 30 30, 40 20, 30 10)) - private static String multiLineStringCoordinatesFromWkt(String wkt) { - wkt = removeBrackets(wkt, 1); - String lineStringsWithPipeSeparator = wkt.replaceAll("\\s*\\)\\s*,\\s*\\(", ")|("); - String[] lineStrings = lineStringsWithPipeSeparator.split("\\|"); - String[] coordinates = new String[lineStrings.length]; - for (int i = 0; i < lineStrings.length; i++) { - coordinates[i] = lineStringCoordinatesFromWkt(lineStrings[i]); - } - String multiLineStringCoordinates = Joiner.on(",").join(coordinates); - return String.format("[%s]", multiLineStringCoordinates); - + String wktType = wkt.substring(0, startOfCoordinates).trim(); + wkt = wkt.substring(startOfCoordinates); + + String type = ""; + String coordinates = ""; + switch (wktType) { + case ("point"): + type = "Point"; + coordinates = pointCoordinatesFromWkt(wkt); + break; + case ("polygon"): + type = "Polygon"; + coordinates = polygonCoordinatesFromWkt(wkt); + break; + case ("linestring"): + type = "LineString"; + coordinates = lineStringCoordinatesFromWkt(wkt); + break; + case ("multipolygon"): + type = "MultiPolygon"; + coordinates = multiPolygonCoordinatesFromWkt(wkt); + break; + case ("multipoint"): + type = "MultiPoint"; + coordinates = multiPointCoordinatesFromWkt(wkt); + break; + case ("multilinestring"): + type = "MultiLineString"; + coordinates = multiLineStringCoordinatesFromWkt(wkt); + break; + default: + throw new IllegalArgumentException("Unsupported well-known-text type: " + wktType); } - //input v1:MULTIPOINT (10 40, 40 30, 20 20, 30 10) - //v2:MULTIPOINT ((10 40), (40 30), (20 20), (30 10)) - private static String multiPointCoordinatesFromWkt(String wkt) { - wkt = removeBrackets(wkt, 1); - boolean isSecondVersionMultiPoint = wkt.contains("("); - String coordinates = ""; - if (isSecondVersionMultiPoint) { - //(10 40), (40 30), (20 20)-> 10 40, 40 30, 20 20 - wkt = wkt.replaceAll("\\(|\\)", ""); - } - coordinates = getJsonArrayFromListOfPoints(wkt); - return coordinates; + return buildGeoJson(type, coordinates); + } + + // input: ((10 10, 20 20, 10 40),(40 40, 30 30, 40 20, 30 10)) + private static String multiLineStringCoordinatesFromWkt(String wkt) { + wkt = removeBrackets(wkt, 1); + String lineStringsWithPipeSeparator = wkt.replaceAll("\\s*\\)\\s*,\\s*\\(", ")|("); + String[] lineStrings = lineStringsWithPipeSeparator.split("\\|"); + String[] coordinates = new String[lineStrings.length]; + for (int i = 0; i < lineStrings.length; i++) { + coordinates[i] = lineStringCoordinatesFromWkt(lineStrings[i]); } - - //input (((30 20, 45 40, 10 40, 30 20)),((15 5, 40 10, 10 20, 5 10, 15 5))) - private static String multiPolygonCoordinatesFromWkt(String wkt) { - wkt = removeBrackets(wkt, 1); - String polygonsWithPipeSeparator = wkt.replaceAll("\\s*\\)\\s*\\)\\s*,\\s*\\(\\s*\\(\\s*", "))|(("); - String[] polygons = polygonsWithPipeSeparator.split("\\|"); - String[] polygonsCoordinates = new String[polygons.length]; - for (int i = 0; i < polygons.length; i++) { - polygonsCoordinates[i] = polygonCoordinatesFromWkt(polygons[i]); - } - String coordinates = Joiner.on(",").join(polygonsCoordinates); - return String.format("[%s]", coordinates); + String multiLineStringCoordinates = Joiner.on(",").join(coordinates); + return String.format("[%s]", multiLineStringCoordinates); + } + + // input v1:MULTIPOINT (10 40, 40 30, 20 20, 30 10) + // v2:MULTIPOINT ((10 40), (40 30), (20 20), (30 10)) + private static String multiPointCoordinatesFromWkt(String wkt) { + wkt = removeBrackets(wkt, 1); + boolean isSecondVersionMultiPoint = wkt.contains("("); + String coordinates = ""; + if (isSecondVersionMultiPoint) { + // (10 40), (40 30), (20 20)-> 10 40, 40 30, 20 20 + wkt = wkt.replaceAll("\\(|\\)", ""); } - - //input : (30 10, 10 30, 40 40) - private static String lineStringCoordinatesFromWkt(String wkt) { - wkt = removeBrackets(wkt, 1); - return getJsonArrayFromListOfPoints(wkt); + coordinates = getJsonArrayFromListOfPoints(wkt); + return coordinates; + } + + // input (((30 20, 45 40, 10 40, 30 20)),((15 5, 40 10, 10 20, 5 10, 15 5))) + private static String multiPolygonCoordinatesFromWkt(String wkt) { + wkt = removeBrackets(wkt, 1); + String polygonsWithPipeSeparator = + wkt.replaceAll("\\s*\\)\\s*\\)\\s*,\\s*\\(\\s*\\(\\s*", "))|(("); + String[] polygons = polygonsWithPipeSeparator.split("\\|"); + String[] polygonsCoordinates = new String[polygons.length]; + for (int i = 0; i < polygons.length; i++) { + polygonsCoordinates[i] = polygonCoordinatesFromWkt(polygons[i]); } - - //input: v1:((35 10, 45 45, 15 40, 10 20, 35 10)) - //v2:((35 10, 45 45, 15 40, 10 20, 35 10),(20 30, 35 35, 30 20, 20 30)) - private static String polygonCoordinatesFromWkt(String wkt) { - wkt = removeBrackets(wkt, 2); - String coordinates; - boolean polygonContainsInnerHoles = wkt.contains("("); - if (polygonContainsInnerHoles) { - String[] polygons = wkt.split("\\s*\\)\\s*,\\s*\\(\\s*"); - String[] coordinatesOfPolygons = new String[polygons.length]; - for (int i = 0; i < polygons.length; i++) { - String polygonCoordinates = getJsonArrayFromListOfPoints(polygons[i]); - coordinatesOfPolygons[i] = polygonCoordinates; - } - coordinates = Joiner.on(",").join(coordinatesOfPolygons); - } else { - coordinates = getJsonArrayFromListOfPoints(wkt); - } - return String.format("[%s]", coordinates); + String coordinates = Joiner.on(",").join(polygonsCoordinates); + return String.format("[%s]", coordinates); + } + + // input : (30 10, 10 30, 40 40) + private static String lineStringCoordinatesFromWkt(String wkt) { + wkt = removeBrackets(wkt, 1); + return getJsonArrayFromListOfPoints(wkt); + } + + // input: v1:((35 10, 45 45, 15 40, 10 20, 35 10)) + // v2:((35 10, 45 45, 15 40, 10 20, 35 10),(20 30, 35 35, 30 20, 20 30)) + private static String polygonCoordinatesFromWkt(String wkt) { + wkt = removeBrackets(wkt, 2); + String coordinates; + boolean polygonContainsInnerHoles = wkt.contains("("); + if (polygonContainsInnerHoles) { + String[] polygons = wkt.split("\\s*\\)\\s*,\\s*\\(\\s*"); + String[] coordinatesOfPolygons = new String[polygons.length]; + for (int i = 0; i < polygons.length; i++) { + String polygonCoordinates = getJsonArrayFromListOfPoints(polygons[i]); + coordinatesOfPolygons[i] = polygonCoordinates; + } + coordinates = Joiner.on(",").join(coordinatesOfPolygons); + } else { + coordinates = getJsonArrayFromListOfPoints(wkt); } - - private static String getJsonArrayFromListOfPoints(String pointsInWkt) { - String[] points = pointsInWkt.split(","); - List coordinates = new ArrayList<>(); - for (String point : points) { - coordinates.add(extractCoordinateFromPoint(point)); - } - - String joinedCoordinates = Joiner.on(",").join(coordinates); - return String.format("[%s]", joinedCoordinates); + return String.format("[%s]", coordinates); + } + + private static String getJsonArrayFromListOfPoints(String pointsInWkt) { + String[] points = pointsInWkt.split(","); + List coordinates = new ArrayList<>(); + for (String point : points) { + coordinates.add(extractCoordinateFromPoint(point)); } - private static String buildGeoJson(String type, String coordinates) { - return String.format("{\"type\":\"%s\", \"coordinates\": %s}", type, coordinates); + String joinedCoordinates = Joiner.on(",").join(coordinates); + return String.format("[%s]", joinedCoordinates); + } + + private static String buildGeoJson(String type, String coordinates) { + return String.format("{\"type\":\"%s\", \"coordinates\": %s}", type, coordinates); + } + + // input : (30 10) + public static String pointCoordinatesFromWkt(String wkt) { + wkt = removeBrackets(wkt, 1); + return extractCoordinateFromPoint(wkt); + } + + private static String extractCoordinateFromPoint(String point) { + String pointPattern = "(\\s*)([0-9\\.-]+)(\\s*)([0-9\\.-]+)(\\s*)"; + return point.replaceAll(pointPattern, "[$2,$4]"); + } + + private static String removeBrackets(String wkt, int num) { + String result = wkt; + for (int i = 0; i < num; i++) { + int lastClosingBrackets = result.lastIndexOf(")"); + int firstOpenBrackets = result.indexOf("("); + if (lastClosingBrackets == -1 || firstOpenBrackets == -1) { + throw new IllegalArgumentException("Illegal syntax: " + wkt); + } + result = result.substring(firstOpenBrackets + 1, lastClosingBrackets); } - - //input : (30 10) - public static String pointCoordinatesFromWkt(String wkt) { - wkt = removeBrackets(wkt, 1); - return extractCoordinateFromPoint(wkt); - } - - private static String extractCoordinateFromPoint(String point) { - String pointPattern = "(\\s*)([0-9\\.-]+)(\\s*)([0-9\\.-]+)(\\s*)"; - return point.replaceAll(pointPattern, "[$2,$4]"); - } - - private static String removeBrackets(String wkt, int num) { - String result = wkt; - for (int i = 0; i < num; i++) { - int lastClosingBrackets = result.lastIndexOf(")"); - int firstOpenBrackets = result.indexOf("("); - if (lastClosingBrackets == -1 || firstOpenBrackets == -1) { - throw new IllegalArgumentException("Illegal syntax: " + wkt); - } - result = result.substring(firstOpenBrackets + 1, lastClosingBrackets); - } - return result; - } - + return result; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/utils/SQLFunctions.java b/legacy/src/main/java/org/opensearch/sql/legacy/utils/SQLFunctions.java index de8e6eb0fa..d46a80f6d3 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/utils/SQLFunctions.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/utils/SQLFunctions.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.utils; import static org.opensearch.sql.legacy.utils.StringUtils.format; @@ -39,1019 +38,1171 @@ import org.opensearch.sql.legacy.exception.SqlParseException; import org.opensearch.sql.legacy.executor.format.Schema; -/** - * Created by allwefantasy on 8/19/16. - */ +/** Created by allwefantasy on 8/19/16. */ public class SQLFunctions { - private static final Set numberOperators = Sets.newHashSet( - "exp", "expm1", "log", "log2", "log10", "ln", "sqrt", "cbrt", "ceil", "floor", "rint", "pow", "power", - "round", "rand", "abs", "sign", "signum" - ); - - private static final Set mathConstants = Sets.newHashSet("e", "pi"); - - private static final Set trigFunctions = Sets.newHashSet( - "degrees", "radians", "sin", "cos", "tan", "asin", "acos", "atan", "atan2", "sinh", "cosh", "cot" - ); - - private static final Set stringOperators = Sets.newHashSet( - "split", "concat_ws", "substring", "trim", "lower", "upper", "rtrim", "ltrim", "replace", - "left", "right" - ); - - private static final Set stringFunctions = Sets.newHashSet( - "length", "locate", "ascii" - ); - - private static final Set binaryOperators = Sets.newHashSet( - "add", "multiply", "divide", "subtract", "modulus" - ); - - private static final Set dateFunctions = Sets.newHashSet( - "date_format", "year", "month_of_year", "week_of_year", "day_of_year", "day_of_month", - "day_of_week", "hour_of_day", "minute_of_day", "minute_of_hour", "second_of_minute", "month", "dayofmonth", - "date", "monthname", "timestamp", "maketime", "now", "curdate" - ); - - private static final Set conditionalFunctions = Sets.newHashSet( - "if", "ifnull", "isnull" - ); - - private static final Set utilityFunctions = Sets.newHashSet("field", "assign", "cast"); - - public static final Set builtInFunctions = Stream.of( - numberOperators, - mathConstants, - trigFunctions, - stringOperators, - stringFunctions, - binaryOperators, - dateFunctions, - conditionalFunctions, - utilityFunctions) - .flatMap(Set::stream).collect(Collectors.toSet()); - - private Map generatedIds = new HashMap<>(); - - /** - * Generates next id for given method name. The id's are increasing for each method name, so - * nextId("a"), nextId("a"), nextId("b") will return a_1, a_2, b_1 - */ - public String nextId(String methodName) { - return methodName + "_" + generatedIds.merge(methodName, 1, Integer::sum); - } - - - /** - * Is the function actually translated into Elastic DSL script during execution? - */ - public static boolean isFunctionTranslatedToScript(String function) { - return builtInFunctions.contains(function.toLowerCase()); - } - - public Tuple function(String methodName, List paramers, String name, - boolean returnValue) throws SqlParseException { - Tuple functionStr = null; - switch (methodName.toLowerCase()) { - case "cast": { - SQLCastExpr castExpr = (SQLCastExpr) ((SQLIdentifierExpr) paramers.get(0).value).getParent(); - String typeName = castExpr.getDataType().getName(); - functionStr = cast(typeName, paramers); - break; - } - case "lower": { - functionStr = lower( - (SQLExpr) paramers.get(0).value, - getLocaleForCaseChangingFunction(paramers), - name - ); - break; - } - case "upper": { - functionStr = upper( - (SQLExpr) paramers.get(0).value, - getLocaleForCaseChangingFunction(paramers), - name); - break; - } - - // Split is currently not supported since its using .split() in painless which is not allow-listed - case "split": - if (paramers.size() == 3) { - functionStr = split((SQLExpr) paramers.get(0).value, - Util.expr2Object((SQLExpr) paramers.get(1).value).toString(), - Integer.parseInt(Util.expr2Object((SQLExpr) paramers.get(2).value).toString()), name); - } else { - functionStr = split((SQLExpr) paramers.get(0).value, - paramers.get(1).value.toString(), - name); - } - - break; - - case "concat_ws": - List result = Lists.newArrayList(); - for (int i = 1; i < paramers.size(); i++) { - result.add((SQLExpr) paramers.get(i).value); - } - functionStr = concat_ws(paramers.get(0).value.toString(), result); - - break; - - - case "date_format": - functionStr = date_format( - (SQLExpr) paramers.get(0).value, - Util.expr2Object((SQLExpr) paramers.get(1).value).toString(), - paramers.size() > 2 ? Util.expr2Object((SQLExpr) paramers.get(2).value).toString() : null, - name); - break; - - case "year": - functionStr = dateFunctionTemplate("year", (SQLExpr) paramers.get(0).value); - break; - case "month_of_year": - case "month": - functionStr = dateFunctionTemplate("monthValue", (SQLExpr) paramers.get(0).value); - break; - case "monthname": - functionStr = dateFunctionTemplate("month", (SQLExpr) paramers.get(0).value); - break; - case "week_of_year": - functionStr = dateFunctionTemplate("weekOfWeekyear", - "get(WeekFields.ISO.weekOfWeekBasedYear())", - (SQLExpr) paramers.get(0).value); - break; - case "day_of_year": - functionStr = dateFunctionTemplate("dayOfYear", (SQLExpr) paramers.get(0).value); - break; - case "day_of_month": - case "dayofmonth": - functionStr = dateFunctionTemplate("dayOfMonth", (SQLExpr) paramers.get(0).value); - break; - case "day_of_week": - functionStr = dateFunctionTemplate("dayOfWeek", - "getDayOfWeekEnum().getValue()", - (SQLExpr) paramers.get(0).value); - break; - case "date": - functionStr = date((SQLExpr) paramers.get(0).value); - break; - case "hour_of_day": - functionStr = dateFunctionTemplate("hour", (SQLExpr) paramers.get(0).value); - break; - case "minute_of_day": - functionStr = dateFunctionTemplate("minuteOfDay", - "get(ChronoField.MINUTE_OF_DAY)", - (SQLExpr) paramers.get(0).value); - break; - case "minute_of_hour": - functionStr = dateFunctionTemplate("minute", (SQLExpr) paramers.get(0).value); - break; - case "second_of_minute": - functionStr = dateFunctionTemplate("second", (SQLExpr) paramers.get(0).value); - break; - case "timestamp": - functionStr = timestamp((SQLExpr) paramers.get(0).value); - break; - case "maketime": - functionStr = maketime((SQLExpr) paramers.get(0).value, (SQLExpr) paramers.get(1).value, - (SQLExpr) paramers.get(2).value); - break; - case "now": - functionStr = now(); - break; - case "curdate": - functionStr = curdate(); - break; - - case "e": - case "pi": - methodName = methodName.toUpperCase(); - functionStr = mathConstantTemplate("Math." + methodName, methodName); - break; - - case "abs": - case "round": - case "floor": - case "ceil": - case "cbrt": - case "rint": - case "exp": - case "expm1": - case "sqrt": - case "sin": - case "cos": - case "tan": - case "asin": - case "acos": - case "atan": - case "sinh": - case "cosh": - functionStr = mathSingleValueTemplate("Math." + methodName, methodName, - (SQLExpr) paramers.get(0).value, name); - break; - - case "rand": - if (paramers.isEmpty()) { - functionStr = rand(); - } else { - functionStr = rand((SQLExpr) paramers.get(0).value); - } - break; - - case "cot": - // OpenSearch does not support the function name cot - functionStr = mathSingleValueTemplate("1 / Math.tan", methodName, - (SQLExpr) paramers.get(0).value, name); - break; - - case "sign": - case "signum": - methodName = "signum"; - functionStr = mathSingleValueTemplate("Math." + methodName, methodName, - (SQLExpr) paramers.get(0).value, name); - break; - - case "pow": - case "power": - methodName = "pow"; - functionStr = mathDoubleValueTemplate("Math." + methodName, methodName, - (SQLExpr) paramers.get(0).value, Util.expr2Object((SQLExpr) paramers.get(1).value).toString(), - name); - break; - - case "atan2": - functionStr = mathDoubleValueTemplate("Math." + methodName, methodName, - (SQLExpr) paramers.get(0).value, (SQLExpr) paramers.get(1).value); - break; - - case "substring": - functionStr = substring((SQLExpr) paramers.get(0).value, - Integer.parseInt(Util.expr2Object((SQLExpr) paramers.get(1).value).toString()), - Integer.parseInt(Util.expr2Object((SQLExpr) paramers.get(2).value).toString())); - break; - - case "degrees": - functionStr = degrees((SQLExpr) paramers.get(0).value, name); - break; - case "radians": - functionStr = radians((SQLExpr) paramers.get(0).value, name); - break; - - case "trim": - functionStr = trim((SQLExpr) paramers.get(0).value, name); - break; - - case "add": - functionStr = add((SQLExpr) paramers.get(0).value, (SQLExpr) paramers.get(1).value); - break; - - case "subtract": - functionStr = subtract((SQLExpr) paramers.get(0).value, (SQLExpr) paramers.get(1).value); - break; - case "divide": - functionStr = divide((SQLExpr) paramers.get(0).value, (SQLExpr) paramers.get(1).value); - break; - - case "multiply": - functionStr = multiply((SQLExpr) paramers.get(0).value, (SQLExpr) paramers.get(1).value); - break; - case "modulus": - functionStr = modulus((SQLExpr) paramers.get(0).value, (SQLExpr) paramers.get(1).value); - break; - - case "field": - functionStr = field(Util.expr2Object((SQLExpr) paramers.get(0).value).toString()); - break; - - case "log2": - functionStr = log(SQLUtils.toSQLExpr("2"), (SQLExpr) paramers.get(0).value, name); - break; - case "log10": - functionStr = log10((SQLExpr) paramers.get(0).value); - break; - case "log": - if (paramers.size() > 1) { - functionStr = log((SQLExpr) paramers.get(0).value, (SQLExpr) paramers.get(1).value, name); - } else { - functionStr = ln((SQLExpr) paramers.get(0).value); - } - break; - case "ln": - functionStr = ln((SQLExpr) paramers.get(0).value); - break; - case "assign": - functionStr = assign((SQLExpr) paramers.get(0).value); - break; - case "length": - functionStr = length((SQLExpr) paramers.get(0).value); - break; - case "replace": - functionStr = replace((SQLExpr) paramers.get(0).value, paramers.get(1).value.toString(), - paramers.get(2).value.toString()); - break; - case "locate": - int start = 0; - if (paramers.size() > 2) { - start = Integer.parseInt(paramers.get(2).value.toString()); - } - functionStr = locate(paramers.get(0).value.toString(), (SQLExpr) paramers.get(1).value, start); - break; - case "rtrim": - functionStr = rtrim((SQLExpr) paramers.get(0).value); - break; - case "ltrim": - functionStr = ltrim((SQLExpr) paramers.get(0).value); - break; - case "ascii": - functionStr = ascii((SQLExpr) paramers.get(0).value); - break; - case "left": - functionStr = left((SQLExpr) paramers.get(0).value, (SQLExpr) paramers.get(1).value); - break; - case "right": - functionStr = right((SQLExpr) paramers.get(0).value, (SQLExpr) paramers.get(1).value); - break; - - case "if": - functionStr = ifFunc(paramers); - break; - case "ifnull": - functionStr = ifnull((SQLExpr) paramers.get(0).value, (SQLExpr) paramers.get(1).value); - break; - case "isnull": - functionStr = isnull((SQLExpr) paramers.get(0).value); - break; - - default: - - } - if (returnValue) { - String generatedFieldName = functionStr.v1(); - String returnCommand = ";return " + generatedFieldName + ";"; - String newScript = functionStr.v2() + returnCommand; - functionStr = new Tuple<>(generatedFieldName, newScript); - } - return functionStr; - } - - public String getLocaleForCaseChangingFunction(List paramers) { - String locale; - if (paramers.size() == 1) { - locale = Locale.getDefault().getLanguage(); - } else { - locale = Util.expr2Object((SQLExpr) paramers.get(1).value).toString(); + private static final Set numberOperators = + Sets.newHashSet( + "exp", "expm1", "log", "log2", "log10", "ln", "sqrt", "cbrt", "ceil", "floor", "rint", + "pow", "power", "round", "rand", "abs", "sign", "signum"); + + private static final Set mathConstants = Sets.newHashSet("e", "pi"); + + private static final Set trigFunctions = + Sets.newHashSet( + "degrees", "radians", "sin", "cos", "tan", "asin", "acos", "atan", "atan2", "sinh", + "cosh", "cot"); + + private static final Set stringOperators = + Sets.newHashSet( + "split", + "concat_ws", + "substring", + "trim", + "lower", + "upper", + "rtrim", + "ltrim", + "replace", + "left", + "right"); + + private static final Set stringFunctions = Sets.newHashSet("length", "locate", "ascii"); + + private static final Set binaryOperators = + Sets.newHashSet("add", "multiply", "divide", "subtract", "modulus"); + + private static final Set dateFunctions = + Sets.newHashSet( + "date_format", + "year", + "month_of_year", + "week_of_year", + "day_of_year", + "day_of_month", + "day_of_week", + "hour_of_day", + "minute_of_day", + "minute_of_hour", + "second_of_minute", + "month", + "dayofmonth", + "date", + "monthname", + "timestamp", + "maketime", + "now", + "curdate"); + + private static final Set conditionalFunctions = Sets.newHashSet("if", "ifnull", "isnull"); + + private static final Set utilityFunctions = Sets.newHashSet("field", "assign", "cast"); + + public static final Set builtInFunctions = + Stream.of( + numberOperators, + mathConstants, + trigFunctions, + stringOperators, + stringFunctions, + binaryOperators, + dateFunctions, + conditionalFunctions, + utilityFunctions) + .flatMap(Set::stream) + .collect(Collectors.toSet()); + + private Map generatedIds = new HashMap<>(); + + /** + * Generates next id for given method name. The id's are increasing for each method name, so + * nextId("a"), nextId("a"), nextId("b") will return a_1, a_2, b_1 + */ + public String nextId(String methodName) { + return methodName + "_" + generatedIds.merge(methodName, 1, Integer::sum); + } + + /** Is the function actually translated into Elastic DSL script during execution? */ + public static boolean isFunctionTranslatedToScript(String function) { + return builtInFunctions.contains(function.toLowerCase()); + } + + public Tuple function( + String methodName, List paramers, String name, boolean returnValue) + throws SqlParseException { + Tuple functionStr = null; + switch (methodName.toLowerCase()) { + case "cast": + { + SQLCastExpr castExpr = + (SQLCastExpr) ((SQLIdentifierExpr) paramers.get(0).value).getParent(); + String typeName = castExpr.getDataType().getName(); + functionStr = cast(typeName, paramers); + break; } - return locale; - } - - public Tuple cast(String castType, List paramers) throws SqlParseException { - String name = nextId("cast"); - return new Tuple<>(name, getCastScriptStatement(name, castType, paramers)); - } - - - public Tuple upper(SQLExpr field, String locale, String valueName) { - String name = nextId("upper"); - - if (valueName == null) { - return new Tuple<>(name, def(name, upper(getPropertyOrStringValue(field), locale))); - } else { - return new Tuple<>(name, getPropertyOrStringValue(field) + "; " - + def(name, valueName + "." + upper(getPropertyOrStringValue(field), locale))); + case "lower": + { + functionStr = + lower( + (SQLExpr) paramers.get(0).value, + getLocaleForCaseChangingFunction(paramers), + name); + break; } - } - - public Tuple lower(SQLExpr field, String locale, String valueName) { - String name = nextId("lower"); - - if (valueName == null) { - return new Tuple<>(name, def(name, lower(getPropertyOrStringValue(field), locale))); - } else { - return new Tuple<>(name, getPropertyOrStringValue(field) + "; " - + def(name, valueName + "." + lower(getPropertyOrStringValue(field), locale))); + case "upper": + { + functionStr = + upper( + (SQLExpr) paramers.get(0).value, + getLocaleForCaseChangingFunction(paramers), + name); + break; } - } - - private static String def(String name, String value) { - return "def " + name + " = " + value; - } - - private static String doc(SQLExpr field) { - return "doc['" + exprString(field) + "']"; - } - - private static String doc(String field) { - return "doc['" + field + "']"; - } - - private static String exprString(SQLExpr expr) { - return Util.expr2Object(expr).toString(); - } - - private static String func(String methodName, boolean quotes, String... params) { - if (quotes) { - return methodName + "(" + quoteParams(params) + ")"; - } - - return methodName + "(" + String.join(", ", params) + ")"; - } - - /** - * Helper method to surround each param with '' (single quotes) for painless script - */ - private static String quoteParams(String... params) { - return Stream.of(params).collect(Collectors.joining("', '", "'", "'")); - } - - private Tuple concat_ws(String split, List columns) { - String name = nextId("concat_ws"); - List result = Lists.newArrayList(); - - for (SQLExpr column : columns) { - String strColumn = exprString(column); - if (strColumn.startsWith("def ")) { - result.add(strColumn); - } else if (isProperty(column)) { - result.add("doc['" + strColumn + "'].value"); - } else { - result.add("'" + strColumn + "'"); - } - - } - return new Tuple<>(name, def(name, Joiner.on("+ " + split + " +").join(result))); - } - - - //split(Column expr, java.lang.String pattern) - public Tuple split(SQLExpr field, String pattern, int index, String valueName) { - String name = nextId("split"); - final String script; - if (valueName == null) { - script = def(name, - getPropertyOrValue(field) + "." - + func("split", true, pattern) + "[" + index + "]"); - } else { - script = "; " + def(name, valueName + "." - + func("split", true, pattern) + "[" + index + "]"); - } - return new Tuple<>(name, script); - } - - //split(Column expr, java.lang.String pattern) - public Tuple split(SQLExpr field, String pattern, String valueName) { - String name = nextId("split"); - if (valueName == null) { - return new Tuple<>(name, - def(name, getPropertyOrValue(field) + "." - + func("split", true, pattern))); - } else { - return new Tuple<>(name, getPropertyOrValue(field) + "; " - + def(name, valueName + "." + func("split", true, pattern))); - } - } - - private Tuple date_format(SQLExpr field, String pattern, String zoneId, String valueName) { - String name = nextId("date_format"); - if (valueName == null) { - return new Tuple<>(name, "def " + name + " = DateTimeFormatter.ofPattern('" + pattern + "').withZone(" - + (zoneId != null ? "ZoneId.of('" + zoneId + "')" : "ZoneId.of(\"UTC\")") - + ").format(Instant.ofEpochMilli(" + getPropertyOrValue(field) + ".toInstant().toEpochMilli()))"); - } else { - return new Tuple<>(name, exprString(field) + "; " - + "def " + name + " = new SimpleDateFormat('" + pattern + "').format(" - + "new Date(" + valueName + " - 8*1000*60*60))"); - } - } - - /** - * Explicitly pass in name used to generate variable ID because methodName is not always valid - * - * For example, - * - * functionStr = dateFunctionTemplate("weekOfWeekyear", - * "get(WeekFields.ISO.weekOfWeekBasedYear())", - * (SQLExpr) paramers.get(0).value); - * - * - * The old dateFunctionTemplate(methodName, field) passes string "get(WeekFields.ISO.weekOfWeekBasedYear())" - * to nextId() which generates an invalid variable name in painless script. - */ - private Tuple dateFunctionTemplate(String name, String methodName, SQLExpr field) { - String id = nextId(name); - return new Tuple<>(id, def(id, doc(field) + ".value." + methodName)); - } - - private Tuple dateFunctionTemplate(String methodName, SQLExpr field) { - return dateFunctionTemplate(methodName, methodName, field); - } - - public Tuple add(SQLExpr a, SQLExpr b) { - return binaryOpertator("add", "+", a, b); - } - - public Tuple assign(SQLExpr a) { - String name = nextId("assign"); - return new Tuple<>(name, - def(name, extractName(a))); - } - - private Tuple modulus(SQLExpr a, SQLExpr b) { - return binaryOpertator("modulus", "%", a, b); - } - - public Tuple field(String a) { - String name = nextId("field"); - return new Tuple<>(name, def(name, doc(a) + ".value")); - } - - private Tuple subtract(SQLExpr a, SQLExpr b) { - return binaryOpertator("subtract", "-", a, b); - } - - private Tuple multiply(SQLExpr a, SQLExpr b) { - return binaryOpertator("multiply", "*", a, b); - } - - private Tuple divide(SQLExpr a, SQLExpr b) { - return binaryOpertator("divide", "/", a, b); - } - - private Tuple binaryOpertator(String methodName, String operator, SQLExpr a, SQLExpr b) { - String name = nextId(methodName); - return new Tuple<>(name, - scriptDeclare(a) + scriptDeclare(b) + convertType(a) + convertType(b) - + def(name, extractName(a) + " " + operator + " " + extractName(b))); - } - - private static boolean isProperty(SQLExpr expr) { - return (expr instanceof SQLIdentifierExpr || expr instanceof SQLPropertyExpr - || expr instanceof SQLVariantRefExpr); - } - - private static String getPropertyOrValue(SQLExpr expr) { - if (isProperty(expr)) { - return doc(expr) + ".value"; - } else { - return exprString(expr); - } - } - - private static String getPropertyOrValue(String expr) { - if (isQuoted(expr, "'")) { - return expr; - } else if (StringUtils.isNumeric(expr)) { - return expr; - } else { - return doc(expr) + ".value"; - } - } - - private static String getPropertyOrStringValue(SQLExpr expr) { - if (isProperty(expr)) { - return doc(expr) + ".value"; - } else { - return "'" + exprString(expr) + "'"; - } - } - - private static String scriptDeclare(SQLExpr a) { - - if (isProperty(a) || a instanceof SQLNumericLiteralExpr) { - return ""; - } else { - return exprString(a) + ";"; - } - } - - private static String extractName(SQLExpr script) { - if (isProperty(script)) { - return doc(script) + ".value"; - } - String scriptStr = exprString(script); - String[] variance = scriptStr.split(";"); - String newScript = variance[variance.length - 1]; - if (newScript.trim().startsWith("def ")) { - //for now ,if variant is string,then change to double. - return newScript.trim().substring(4).split("=")[0].trim(); - } else { - return scriptStr; - } - } - //cast(year as int) - - private static String convertType(SQLExpr script) { - String[] variance = exprString(script).split(";"); - String newScript = variance[variance.length - 1]; - if (newScript.trim().startsWith("def ")) { - //for now ,if variant is string,then change to double. - String temp = newScript.trim().substring(4).split("=")[0].trim(); - - return " if( " + temp + " instanceof String) " + temp + "= Double.parseDouble(" + temp.trim() + "); "; + // Split is currently not supported since its using .split() in painless which is not + // allow-listed + case "split": + if (paramers.size() == 3) { + functionStr = + split( + (SQLExpr) paramers.get(0).value, + Util.expr2Object((SQLExpr) paramers.get(1).value).toString(), + Integer.parseInt(Util.expr2Object((SQLExpr) paramers.get(2).value).toString()), + name); } else { - return ""; + functionStr = + split((SQLExpr) paramers.get(0).value, paramers.get(1).value.toString(), name); } + break; - } - - private String getScriptText(MethodField field) { - String content = ((SQLTextLiteralExpr) field.getParams().get(1).value).getText(); - return content; - } - - /** - * Using exprString() rather than getPropertyOrValue() for "base" since something like "Math.E" gets evaluated - * incorrectly in getPropertyOrValue(), returning it as a doc value instead of the literal string - */ - public Tuple log(SQLExpr base, SQLExpr field, String valueName) { - String name = nextId("log"); - String result; - if (valueName == null) { - result = def(name, func("Math.log", false, getPropertyOrValue(field)) - + "/" + func("Math.log", false, exprString(base))); - } else { - result = getPropertyOrValue(field) + "; " - + def(name, func("Math.log", false, valueName) + "/" - + func("Math.log", false, exprString(base))); + case "concat_ws": + List result = Lists.newArrayList(); + for (int i = 1; i < paramers.size(); i++) { + result.add((SQLExpr) paramers.get(i).value); } - return new Tuple<>(name, result); - } - - public Tuple log10(SQLExpr field) { - String name = nextId("log10"); - return new Tuple<>(name, def(name, StringUtils.format("Math.log10(%s)", getPropertyOrValue(field)))); - } - - public Tuple ln(SQLExpr field) { - String name = nextId("ln"); - return new Tuple<>(name, def(name, StringUtils.format("Math.log(%s)", getPropertyOrValue(field)))); - } - - public Tuple trim(SQLExpr field, String valueName) { - return strSingleValueTemplate("trim", field, valueName); - } - - private Tuple degrees(SQLExpr field, String valueName) { - return mathSingleValueTemplate("Math.toDegrees", "degrees", field, valueName); - } - - private Tuple radians(SQLExpr field, String valueName) { - return mathSingleValueTemplate("Math.toRadians", "radians", field, valueName); - } - - private Tuple rand(SQLExpr expr) { - String name = nextId("rand"); - return new Tuple<>(name, def(name, format("new Random(%s).nextDouble()", getPropertyOrValue(expr)))); - } - - private Tuple rand() { - String name = nextId("rand"); - return new Tuple<>(name, def(name, "new Random().nextDouble()")); - } - - private Tuple mathDoubleValueTemplate(String methodName, String fieldName, SQLExpr val1, - String val2, String valueName) { - String name = nextId(fieldName); - if (valueName == null) { - return new Tuple<>(name, def(name, func(methodName, false, getPropertyOrValue(val1), - getPropertyOrValue(val2)))); + functionStr = concat_ws(paramers.get(0).value.toString(), result); + + break; + + case "date_format": + functionStr = + date_format( + (SQLExpr) paramers.get(0).value, + Util.expr2Object((SQLExpr) paramers.get(1).value).toString(), + paramers.size() > 2 + ? Util.expr2Object((SQLExpr) paramers.get(2).value).toString() + : null, + name); + break; + + case "year": + functionStr = dateFunctionTemplate("year", (SQLExpr) paramers.get(0).value); + break; + case "month_of_year": + case "month": + functionStr = dateFunctionTemplate("monthValue", (SQLExpr) paramers.get(0).value); + break; + case "monthname": + functionStr = dateFunctionTemplate("month", (SQLExpr) paramers.get(0).value); + break; + case "week_of_year": + functionStr = + dateFunctionTemplate( + "weekOfWeekyear", + "get(WeekFields.ISO.weekOfWeekBasedYear())", + (SQLExpr) paramers.get(0).value); + break; + case "day_of_year": + functionStr = dateFunctionTemplate("dayOfYear", (SQLExpr) paramers.get(0).value); + break; + case "day_of_month": + case "dayofmonth": + functionStr = dateFunctionTemplate("dayOfMonth", (SQLExpr) paramers.get(0).value); + break; + case "day_of_week": + functionStr = + dateFunctionTemplate( + "dayOfWeek", "getDayOfWeekEnum().getValue()", (SQLExpr) paramers.get(0).value); + break; + case "date": + functionStr = date((SQLExpr) paramers.get(0).value); + break; + case "hour_of_day": + functionStr = dateFunctionTemplate("hour", (SQLExpr) paramers.get(0).value); + break; + case "minute_of_day": + functionStr = + dateFunctionTemplate( + "minuteOfDay", "get(ChronoField.MINUTE_OF_DAY)", (SQLExpr) paramers.get(0).value); + break; + case "minute_of_hour": + functionStr = dateFunctionTemplate("minute", (SQLExpr) paramers.get(0).value); + break; + case "second_of_minute": + functionStr = dateFunctionTemplate("second", (SQLExpr) paramers.get(0).value); + break; + case "timestamp": + functionStr = timestamp((SQLExpr) paramers.get(0).value); + break; + case "maketime": + functionStr = + maketime( + (SQLExpr) paramers.get(0).value, + (SQLExpr) paramers.get(1).value, + (SQLExpr) paramers.get(2).value); + break; + case "now": + functionStr = now(); + break; + case "curdate": + functionStr = curdate(); + break; + + case "e": + case "pi": + methodName = methodName.toUpperCase(); + functionStr = mathConstantTemplate("Math." + methodName, methodName); + break; + + case "abs": + case "round": + case "floor": + case "ceil": + case "cbrt": + case "rint": + case "exp": + case "expm1": + case "sqrt": + case "sin": + case "cos": + case "tan": + case "asin": + case "acos": + case "atan": + case "sinh": + case "cosh": + functionStr = + mathSingleValueTemplate( + "Math." + methodName, methodName, (SQLExpr) paramers.get(0).value, name); + break; + + case "rand": + if (paramers.isEmpty()) { + functionStr = rand(); } else { - return new Tuple<>(name, getPropertyOrValue(val1) + "; " - + def(name, func(methodName, false, valueName, getPropertyOrValue(val2)))); + functionStr = rand((SQLExpr) paramers.get(0).value); } - } - - private Tuple mathDoubleValueTemplate(String methodName, String fieldName, SQLExpr val1, - SQLExpr val2) { - String name = nextId(fieldName); - return new Tuple<>(name, def(name, func(methodName, false, - getPropertyOrValue(val1), getPropertyOrValue(val2)))); - } - - private Tuple mathSingleValueTemplate(String methodName, String fieldName, SQLExpr field, - String valueName) { - String name = nextId(fieldName); - if (valueName == null) { - return new Tuple<>(name, def(name, func(methodName, false, getPropertyOrValue(field)))); + break; + + case "cot": + // OpenSearch does not support the function name cot + functionStr = + mathSingleValueTemplate( + "1 / Math.tan", methodName, (SQLExpr) paramers.get(0).value, name); + break; + + case "sign": + case "signum": + methodName = "signum"; + functionStr = + mathSingleValueTemplate( + "Math." + methodName, methodName, (SQLExpr) paramers.get(0).value, name); + break; + + case "pow": + case "power": + methodName = "pow"; + functionStr = + mathDoubleValueTemplate( + "Math." + methodName, + methodName, + (SQLExpr) paramers.get(0).value, + Util.expr2Object((SQLExpr) paramers.get(1).value).toString(), + name); + break; + + case "atan2": + functionStr = + mathDoubleValueTemplate( + "Math." + methodName, + methodName, + (SQLExpr) paramers.get(0).value, + (SQLExpr) paramers.get(1).value); + break; + + case "substring": + functionStr = + substring( + (SQLExpr) paramers.get(0).value, + Integer.parseInt(Util.expr2Object((SQLExpr) paramers.get(1).value).toString()), + Integer.parseInt(Util.expr2Object((SQLExpr) paramers.get(2).value).toString())); + break; + + case "degrees": + functionStr = degrees((SQLExpr) paramers.get(0).value, name); + break; + case "radians": + functionStr = radians((SQLExpr) paramers.get(0).value, name); + break; + + case "trim": + functionStr = trim((SQLExpr) paramers.get(0).value, name); + break; + + case "add": + functionStr = add((SQLExpr) paramers.get(0).value, (SQLExpr) paramers.get(1).value); + break; + + case "subtract": + functionStr = subtract((SQLExpr) paramers.get(0).value, (SQLExpr) paramers.get(1).value); + break; + case "divide": + functionStr = divide((SQLExpr) paramers.get(0).value, (SQLExpr) paramers.get(1).value); + break; + + case "multiply": + functionStr = multiply((SQLExpr) paramers.get(0).value, (SQLExpr) paramers.get(1).value); + break; + case "modulus": + functionStr = modulus((SQLExpr) paramers.get(0).value, (SQLExpr) paramers.get(1).value); + break; + + case "field": + functionStr = field(Util.expr2Object((SQLExpr) paramers.get(0).value).toString()); + break; + + case "log2": + functionStr = log(SQLUtils.toSQLExpr("2"), (SQLExpr) paramers.get(0).value, name); + break; + case "log10": + functionStr = log10((SQLExpr) paramers.get(0).value); + break; + case "log": + if (paramers.size() > 1) { + functionStr = log((SQLExpr) paramers.get(0).value, (SQLExpr) paramers.get(1).value, name); } else { - return new Tuple<>(name, getPropertyOrValue(field) + "; " - + def(name, func(methodName, false, valueName))); + functionStr = ln((SQLExpr) paramers.get(0).value); } - - } - - private Tuple mathConstantTemplate(String methodName, String fieldName) { - String name = nextId(fieldName); - return new Tuple<>(name, def(name, methodName)); - } - - private Tuple strSingleValueTemplate(String methodName, SQLExpr field, String valueName) { - String name = nextId(methodName); - if (valueName == null) { - return new Tuple<>(name, def(name, getPropertyOrStringValue(field) + "." + func(methodName, false))); - } else { - return new Tuple<>(name, getPropertyOrStringValue(field) + "; " - + def(name, valueName + "." + func(methodName, false))); + break; + case "ln": + functionStr = ln((SQLExpr) paramers.get(0).value); + break; + case "assign": + functionStr = assign((SQLExpr) paramers.get(0).value); + break; + case "length": + functionStr = length((SQLExpr) paramers.get(0).value); + break; + case "replace": + functionStr = + replace( + (SQLExpr) paramers.get(0).value, + paramers.get(1).value.toString(), + paramers.get(2).value.toString()); + break; + case "locate": + int start = 0; + if (paramers.size() > 2) { + start = Integer.parseInt(paramers.get(2).value.toString()); } - - } - - // query: substring(Column expr, int pos, int len) - // painless script: substring(int begin, int end) - // OpenSearch behavior: 1-index, supports out-of-bound index - public Tuple substring(SQLExpr field, int pos, int len) { - String name = nextId("substring"); - // start and end are 0-indexes - int start = pos < 1 ? 0 : pos - 1; - return new Tuple<>(name, StringUtils.format( - "def end = (int) Math.min(%s + %s, %s.length()); " - + def(name, getPropertyOrStringValue(field) + "." - + func("substring", false, Integer.toString(start), "end")), - Integer.toString(start), Integer.toString(len), getPropertyOrStringValue(field) - )); - } - - private String lower(String property, String culture) { - return property + ".toLowerCase(Locale.forLanguageTag(\"" + culture + "\"))"; - } - - private String upper(String property, String culture) { - return property + ".toUpperCase(Locale.forLanguageTag(\"" + culture + "\"))"; - } - - private Tuple length(SQLExpr field) { - String name = nextId("length"); - return new Tuple<>(name, def(name, getPropertyOrStringValue(field) + ".length()")); - } - - private Tuple replace(SQLExpr field, String target, String replacement) { - String name = nextId("replace"); - return new Tuple<>(name, def(name, getPropertyOrStringValue(field) - + ".replace(" + target + "," + replacement + ")")); - } - - // OpenSearch behavior: both 'start' and return value are 1-index; return 0 if pattern does not exist; - // support out-of-bound index - private Tuple locate(String pattern, SQLExpr source, int start) { - String name = nextId("locate"); - String docSource = getPropertyOrStringValue(source); - start = start < 1 ? 0 : start - 1; - return new Tuple<>(name, def(name, StringUtils.format("%s.indexOf(%s,%d)+1", docSource, pattern, start))); - } - - private Tuple rtrim(SQLExpr field) { - String name = nextId("rtrim"); - String fieldString = getPropertyOrStringValue(field); - return new Tuple<>(name, StringUtils.format( - "int pos=%s.length()-1;" + functionStr = + locate(paramers.get(0).value.toString(), (SQLExpr) paramers.get(1).value, start); + break; + case "rtrim": + functionStr = rtrim((SQLExpr) paramers.get(0).value); + break; + case "ltrim": + functionStr = ltrim((SQLExpr) paramers.get(0).value); + break; + case "ascii": + functionStr = ascii((SQLExpr) paramers.get(0).value); + break; + case "left": + functionStr = left((SQLExpr) paramers.get(0).value, (SQLExpr) paramers.get(1).value); + break; + case "right": + functionStr = right((SQLExpr) paramers.get(0).value, (SQLExpr) paramers.get(1).value); + break; + + case "if": + functionStr = ifFunc(paramers); + break; + case "ifnull": + functionStr = ifnull((SQLExpr) paramers.get(0).value, (SQLExpr) paramers.get(1).value); + break; + case "isnull": + functionStr = isnull((SQLExpr) paramers.get(0).value); + break; + + default: + } + if (returnValue) { + String generatedFieldName = functionStr.v1(); + String returnCommand = ";return " + generatedFieldName + ";"; + String newScript = functionStr.v2() + returnCommand; + functionStr = new Tuple<>(generatedFieldName, newScript); + } + return functionStr; + } + + public String getLocaleForCaseChangingFunction(List paramers) { + String locale; + if (paramers.size() == 1) { + locale = Locale.getDefault().getLanguage(); + } else { + locale = Util.expr2Object((SQLExpr) paramers.get(1).value).toString(); + } + return locale; + } + + public Tuple cast(String castType, List paramers) + throws SqlParseException { + String name = nextId("cast"); + return new Tuple<>(name, getCastScriptStatement(name, castType, paramers)); + } + + public Tuple upper(SQLExpr field, String locale, String valueName) { + String name = nextId("upper"); + + if (valueName == null) { + return new Tuple<>(name, def(name, upper(getPropertyOrStringValue(field), locale))); + } else { + return new Tuple<>( + name, + getPropertyOrStringValue(field) + + "; " + + def(name, valueName + "." + upper(getPropertyOrStringValue(field), locale))); + } + } + + public Tuple lower(SQLExpr field, String locale, String valueName) { + String name = nextId("lower"); + + if (valueName == null) { + return new Tuple<>(name, def(name, lower(getPropertyOrStringValue(field), locale))); + } else { + return new Tuple<>( + name, + getPropertyOrStringValue(field) + + "; " + + def(name, valueName + "." + lower(getPropertyOrStringValue(field), locale))); + } + } + + private static String def(String name, String value) { + return "def " + name + " = " + value; + } + + private static String doc(SQLExpr field) { + return "doc['" + exprString(field) + "']"; + } + + private static String doc(String field) { + return "doc['" + field + "']"; + } + + private static String exprString(SQLExpr expr) { + return Util.expr2Object(expr).toString(); + } + + private static String func(String methodName, boolean quotes, String... params) { + if (quotes) { + return methodName + "(" + quoteParams(params) + ")"; + } + + return methodName + "(" + String.join(", ", params) + ")"; + } + + /** Helper method to surround each param with '' (single quotes) for painless script */ + private static String quoteParams(String... params) { + return Stream.of(params).collect(Collectors.joining("', '", "'", "'")); + } + + private Tuple concat_ws(String split, List columns) { + String name = nextId("concat_ws"); + List result = Lists.newArrayList(); + + for (SQLExpr column : columns) { + String strColumn = exprString(column); + if (strColumn.startsWith("def ")) { + result.add(strColumn); + } else if (isProperty(column)) { + result.add("doc['" + strColumn + "'].value"); + } else { + result.add("'" + strColumn + "'"); + } + } + return new Tuple<>(name, def(name, Joiner.on("+ " + split + " +").join(result))); + } + + // split(Column expr, java.lang.String pattern) + public Tuple split(SQLExpr field, String pattern, int index, String valueName) { + String name = nextId("split"); + final String script; + if (valueName == null) { + script = + def( + name, + getPropertyOrValue(field) + "." + func("split", true, pattern) + "[" + index + "]"); + } else { + script = "; " + def(name, valueName + "." + func("split", true, pattern) + "[" + index + "]"); + } + return new Tuple<>(name, script); + } + + // split(Column expr, java.lang.String pattern) + public Tuple split(SQLExpr field, String pattern, String valueName) { + String name = nextId("split"); + if (valueName == null) { + return new Tuple<>( + name, def(name, getPropertyOrValue(field) + "." + func("split", true, pattern))); + } else { + return new Tuple<>( + name, + getPropertyOrValue(field) + + "; " + + def(name, valueName + "." + func("split", true, pattern))); + } + } + + private Tuple date_format( + SQLExpr field, String pattern, String zoneId, String valueName) { + String name = nextId("date_format"); + if (valueName == null) { + return new Tuple<>( + name, + "def " + + name + + " = DateTimeFormatter.ofPattern('" + + pattern + + "').withZone(" + + (zoneId != null ? "ZoneId.of('" + zoneId + "')" : "ZoneId.of(\"UTC\")") + + ").format(Instant.ofEpochMilli(" + + getPropertyOrValue(field) + + ".toInstant().toEpochMilli()))"); + } else { + return new Tuple<>( + name, + exprString(field) + + "; " + + "def " + + name + + " = new SimpleDateFormat('" + + pattern + + "').format(" + + "new Date(" + + valueName + + " - 8*1000*60*60))"); + } + } + + /** + * Explicitly pass in name used to generate variable ID because methodName is not always valid + * + *

    For example, + * functionStr = dateFunctionTemplate("weekOfWeekyear", + * "get(WeekFields.ISO.weekOfWeekBasedYear())", + * (SQLExpr) paramers.get(0).value); + * The old dateFunctionTemplate(methodName, field) passes string + * "get(WeekFields.ISO.weekOfWeekBasedYear())" to nextId() which generates an invalid variable + * name in painless script. + */ + private Tuple dateFunctionTemplate( + String name, String methodName, SQLExpr field) { + String id = nextId(name); + return new Tuple<>(id, def(id, doc(field) + ".value." + methodName)); + } + + private Tuple dateFunctionTemplate(String methodName, SQLExpr field) { + return dateFunctionTemplate(methodName, methodName, field); + } + + public Tuple add(SQLExpr a, SQLExpr b) { + return binaryOpertator("add", "+", a, b); + } + + public Tuple assign(SQLExpr a) { + String name = nextId("assign"); + return new Tuple<>(name, def(name, extractName(a))); + } + + private Tuple modulus(SQLExpr a, SQLExpr b) { + return binaryOpertator("modulus", "%", a, b); + } + + public Tuple field(String a) { + String name = nextId("field"); + return new Tuple<>(name, def(name, doc(a) + ".value")); + } + + private Tuple subtract(SQLExpr a, SQLExpr b) { + return binaryOpertator("subtract", "-", a, b); + } + + private Tuple multiply(SQLExpr a, SQLExpr b) { + return binaryOpertator("multiply", "*", a, b); + } + + private Tuple divide(SQLExpr a, SQLExpr b) { + return binaryOpertator("divide", "/", a, b); + } + + private Tuple binaryOpertator( + String methodName, String operator, SQLExpr a, SQLExpr b) { + String name = nextId(methodName); + return new Tuple<>( + name, + scriptDeclare(a) + + scriptDeclare(b) + + convertType(a) + + convertType(b) + + def(name, extractName(a) + " " + operator + " " + extractName(b))); + } + + private static boolean isProperty(SQLExpr expr) { + return (expr instanceof SQLIdentifierExpr + || expr instanceof SQLPropertyExpr + || expr instanceof SQLVariantRefExpr); + } + + private static String getPropertyOrValue(SQLExpr expr) { + if (isProperty(expr)) { + return doc(expr) + ".value"; + } else { + return exprString(expr); + } + } + + private static String getPropertyOrValue(String expr) { + if (isQuoted(expr, "'")) { + return expr; + } else if (StringUtils.isNumeric(expr)) { + return expr; + } else { + return doc(expr) + ".value"; + } + } + + private static String getPropertyOrStringValue(SQLExpr expr) { + if (isProperty(expr)) { + return doc(expr) + ".value"; + } else { + return "'" + exprString(expr) + "'"; + } + } + + private static String scriptDeclare(SQLExpr a) { + + if (isProperty(a) || a instanceof SQLNumericLiteralExpr) { + return ""; + } else { + return exprString(a) + ";"; + } + } + + private static String extractName(SQLExpr script) { + if (isProperty(script)) { + return doc(script) + ".value"; + } + String scriptStr = exprString(script); + String[] variance = scriptStr.split(";"); + String newScript = variance[variance.length - 1]; + if (newScript.trim().startsWith("def ")) { + // for now ,if variant is string,then change to double. + return newScript.trim().substring(4).split("=")[0].trim(); + } else { + return scriptStr; + } + } + + // cast(year as int) + + private static String convertType(SQLExpr script) { + String[] variance = exprString(script).split(";"); + String newScript = variance[variance.length - 1]; + if (newScript.trim().startsWith("def ")) { + // for now ,if variant is string,then change to double. + String temp = newScript.trim().substring(4).split("=")[0].trim(); + + return " if( " + + temp + + " instanceof String) " + + temp + + "= Double.parseDouble(" + + temp.trim() + + "); "; + } else { + return ""; + } + } + + private String getScriptText(MethodField field) { + String content = ((SQLTextLiteralExpr) field.getParams().get(1).value).getText(); + return content; + } + + /** + * Using exprString() rather than getPropertyOrValue() for "base" since something like "Math.E" + * gets evaluated incorrectly in getPropertyOrValue(), returning it as a doc value instead of the + * literal string + */ + public Tuple log(SQLExpr base, SQLExpr field, String valueName) { + String name = nextId("log"); + String result; + if (valueName == null) { + result = + def( + name, + func("Math.log", false, getPropertyOrValue(field)) + + "/" + + func("Math.log", false, exprString(base))); + } else { + result = + getPropertyOrValue(field) + + "; " + + def( + name, + func("Math.log", false, valueName) + + "/" + + func("Math.log", false, exprString(base))); + } + return new Tuple<>(name, result); + } + + public Tuple log10(SQLExpr field) { + String name = nextId("log10"); + return new Tuple<>( + name, def(name, StringUtils.format("Math.log10(%s)", getPropertyOrValue(field)))); + } + + public Tuple ln(SQLExpr field) { + String name = nextId("ln"); + return new Tuple<>( + name, def(name, StringUtils.format("Math.log(%s)", getPropertyOrValue(field)))); + } + + public Tuple trim(SQLExpr field, String valueName) { + return strSingleValueTemplate("trim", field, valueName); + } + + private Tuple degrees(SQLExpr field, String valueName) { + return mathSingleValueTemplate("Math.toDegrees", "degrees", field, valueName); + } + + private Tuple radians(SQLExpr field, String valueName) { + return mathSingleValueTemplate("Math.toRadians", "radians", field, valueName); + } + + private Tuple rand(SQLExpr expr) { + String name = nextId("rand"); + return new Tuple<>( + name, def(name, format("new Random(%s).nextDouble()", getPropertyOrValue(expr)))); + } + + private Tuple rand() { + String name = nextId("rand"); + return new Tuple<>(name, def(name, "new Random().nextDouble()")); + } + + private Tuple mathDoubleValueTemplate( + String methodName, String fieldName, SQLExpr val1, String val2, String valueName) { + String name = nextId(fieldName); + if (valueName == null) { + return new Tuple<>( + name, + def(name, func(methodName, false, getPropertyOrValue(val1), getPropertyOrValue(val2)))); + } else { + return new Tuple<>( + name, + getPropertyOrValue(val1) + + "; " + + def(name, func(methodName, false, valueName, getPropertyOrValue(val2)))); + } + } + + private Tuple mathDoubleValueTemplate( + String methodName, String fieldName, SQLExpr val1, SQLExpr val2) { + String name = nextId(fieldName); + return new Tuple<>( + name, + def(name, func(methodName, false, getPropertyOrValue(val1), getPropertyOrValue(val2)))); + } + + private Tuple mathSingleValueTemplate( + String methodName, String fieldName, SQLExpr field, String valueName) { + String name = nextId(fieldName); + if (valueName == null) { + return new Tuple<>(name, def(name, func(methodName, false, getPropertyOrValue(field)))); + } else { + return new Tuple<>( + name, getPropertyOrValue(field) + "; " + def(name, func(methodName, false, valueName))); + } + } + + private Tuple mathConstantTemplate(String methodName, String fieldName) { + String name = nextId(fieldName); + return new Tuple<>(name, def(name, methodName)); + } + + private Tuple strSingleValueTemplate( + String methodName, SQLExpr field, String valueName) { + String name = nextId(methodName); + if (valueName == null) { + return new Tuple<>( + name, def(name, getPropertyOrStringValue(field) + "." + func(methodName, false))); + } else { + return new Tuple<>( + name, + getPropertyOrStringValue(field) + + "; " + + def(name, valueName + "." + func(methodName, false))); + } + } + + // query: substring(Column expr, int pos, int len) + // painless script: substring(int begin, int end) + // OpenSearch behavior: 1-index, supports out-of-bound index + public Tuple substring(SQLExpr field, int pos, int len) { + String name = nextId("substring"); + // start and end are 0-indexes + int start = pos < 1 ? 0 : pos - 1; + return new Tuple<>( + name, + StringUtils.format( + "def end = (int) Math.min(%s + %s, %s.length()); " + + def( + name, + getPropertyOrStringValue(field) + + "." + + func("substring", false, Integer.toString(start), "end")), + Integer.toString(start), + Integer.toString(len), + getPropertyOrStringValue(field))); + } + + private String lower(String property, String culture) { + return property + ".toLowerCase(Locale.forLanguageTag(\"" + culture + "\"))"; + } + + private String upper(String property, String culture) { + return property + ".toUpperCase(Locale.forLanguageTag(\"" + culture + "\"))"; + } + + private Tuple length(SQLExpr field) { + String name = nextId("length"); + return new Tuple<>(name, def(name, getPropertyOrStringValue(field) + ".length()")); + } + + private Tuple replace(SQLExpr field, String target, String replacement) { + String name = nextId("replace"); + return new Tuple<>( + name, + def( + name, + getPropertyOrStringValue(field) + ".replace(" + target + "," + replacement + ")")); + } + + // OpenSearch behavior: both 'start' and return value are 1-index; return 0 if pattern does not + // exist; + // support out-of-bound index + private Tuple locate(String pattern, SQLExpr source, int start) { + String name = nextId("locate"); + String docSource = getPropertyOrStringValue(source); + start = start < 1 ? 0 : start - 1; + return new Tuple<>( + name, def(name, StringUtils.format("%s.indexOf(%s,%d)+1", docSource, pattern, start))); + } + + private Tuple rtrim(SQLExpr field) { + String name = nextId("rtrim"); + String fieldString = getPropertyOrStringValue(field); + return new Tuple<>( + name, + StringUtils.format( + "int pos=%s.length()-1;" + "while(pos >= 0 && Character.isWhitespace(%s.charAt(pos))) {pos --;} " + def(name, "%s.substring(0, pos+1)"), - fieldString, fieldString, fieldString - )); - } - - private Tuple ltrim(SQLExpr field) { - String name = nextId("ltrim"); - String fieldString = getPropertyOrStringValue(field); - return new Tuple<>(name, StringUtils.format( - "int pos=0;" + fieldString, + fieldString, + fieldString)); + } + + private Tuple ltrim(SQLExpr field) { + String name = nextId("ltrim"); + String fieldString = getPropertyOrStringValue(field); + return new Tuple<>( + name, + StringUtils.format( + "int pos=0;" + "while(pos < %s.length() && Character.isWhitespace(%s.charAt(pos))) {pos ++;} " + def(name, "%s.substring(pos, %s.length())"), - fieldString, fieldString, fieldString, fieldString - )); - } - - private Tuple ascii(SQLExpr field) { - String name = nextId("ascii"); - return new Tuple<>(name, def(name, "(int) " + getPropertyOrStringValue(field) + ".charAt(0)")); - } - - private Tuple left(SQLExpr expr, SQLExpr length) { - String name = nextId("left"); - return new Tuple<>(name, StringUtils.format( - "def len = (int) Math.min(%s, %s.length()); def %s = %s.substring(0, len)", - exprString(length), getPropertyOrStringValue(expr), name, getPropertyOrStringValue(expr))); - } - - private Tuple right(SQLExpr expr, SQLExpr length) { - String name = nextId("right"); - return new Tuple<>(name, StringUtils.format( - "def start = (int) Math.max(0, %s.length()-%s); def %s = %s.substring(start)", - getPropertyOrStringValue(expr), exprString(length), name, getPropertyOrStringValue(expr))); - } - - private Tuple date(SQLExpr field) { - String name = nextId("date"); - return new Tuple<>(name, def(name, - "LocalDate.parse(" + getPropertyOrStringValue(field) + ".toString()," - + "DateTimeFormatter.ISO_DATE_TIME)")); - } - - private Tuple timestamp(SQLExpr field) { - String name = nextId("timestamp"); - return new Tuple<>(name, def(name, - "DateTimeFormatter.ofPattern('yyyy-MM-dd HH:mm:ss').format(" - + "DateTimeFormatter.ISO_DATE_TIME.parse(" - + getPropertyOrStringValue(field) + ".toString()))")); - } - - private Tuple maketime(SQLExpr hr, SQLExpr min, SQLExpr sec) { - String name = nextId("maketime"); - return new Tuple<>(name, def(name, StringUtils.format( + fieldString, + fieldString, + fieldString, + fieldString)); + } + + private Tuple ascii(SQLExpr field) { + String name = nextId("ascii"); + return new Tuple<>(name, def(name, "(int) " + getPropertyOrStringValue(field) + ".charAt(0)")); + } + + private Tuple left(SQLExpr expr, SQLExpr length) { + String name = nextId("left"); + return new Tuple<>( + name, + StringUtils.format( + "def len = (int) Math.min(%s, %s.length()); def %s = %s.substring(0, len)", + exprString(length), + getPropertyOrStringValue(expr), + name, + getPropertyOrStringValue(expr))); + } + + private Tuple right(SQLExpr expr, SQLExpr length) { + String name = nextId("right"); + return new Tuple<>( + name, + StringUtils.format( + "def start = (int) Math.max(0, %s.length()-%s); def %s = %s.substring(start)", + getPropertyOrStringValue(expr), + exprString(length), + name, + getPropertyOrStringValue(expr))); + } + + private Tuple date(SQLExpr field) { + String name = nextId("date"); + return new Tuple<>( + name, + def( + name, + "LocalDate.parse(" + + getPropertyOrStringValue(field) + + ".toString()," + + "DateTimeFormatter.ISO_DATE_TIME)")); + } + + private Tuple timestamp(SQLExpr field) { + String name = nextId("timestamp"); + return new Tuple<>( + name, + def( + name, + "DateTimeFormatter.ofPattern('yyyy-MM-dd HH:mm:ss').format(" + + "DateTimeFormatter.ISO_DATE_TIME.parse(" + + getPropertyOrStringValue(field) + + ".toString()))")); + } + + private Tuple maketime(SQLExpr hr, SQLExpr min, SQLExpr sec) { + String name = nextId("maketime"); + return new Tuple<>( + name, + def( + name, + StringUtils.format( "LocalTime.of(%s, %s, %s).format(DateTimeFormatter.ofPattern('HH:mm:ss'))", hr.toString(), min.toString(), sec.toString()))); - } - - private Tuple now() { - String name = nextId("now"); - return new Tuple<>(name, def(name, "new SimpleDateFormat('HH:mm:ss').format(System.currentTimeMillis())")); - } - - private Tuple curdate() { - String name = nextId("curdate"); - return new Tuple<>(name, def(name, "new SimpleDateFormat('yyyy-MM-dd').format(System.currentTimeMillis())")); - } - - private Tuple ifFunc(List paramers) { - String expr1 = paramers.get(1).value.toString(); - String expr2 = paramers.get(2).value.toString(); - String name = nextId("if"); - - /** Input with null is regarded as false */ - if (paramers.get(0).value instanceof SQLNullExpr) { - return new Tuple<>(name, def(name, expr2)); - } - if (paramers.get(0).value instanceof MethodField) { - String condition = getScriptText((MethodField) paramers.get(0).value); - return new Tuple<>(name, "boolean cond = " + condition + ";" - + def(name, "cond ? " + expr1 + " : " + expr2)); - } else if (paramers.get(0).value instanceof SQLBooleanExpr) { - Boolean condition = ((SQLBooleanExpr) paramers.get(0).value).getValue(); - if (condition) { - return new Tuple<>(name, def(name, expr1)); - } else { - return new Tuple<>(name, def(name, expr2)); - } - } else { - /** - * Detailed explanation of cases that come here: - * the condition expression would be in the format of a=b: - * a is parsed as the key (String) of a KVValue (get from paramers.get(0)) - * and b is parsed as the value (Object) of this KVValue. - * - * Either a or b could be a column name, literal, or a number: - * - if isNumeric is true --> number - * - else if this string is single quoted --> literal - * - else --> column name - */ - String key = getPropertyOrValue(paramers.get(0).key); - String value = getPropertyOrValue(paramers.get(0).value.toString()); - String condition = key + " == " + value; - return new Tuple<>(name, "boolean cond = " + condition + ";" - + def(name, "cond ? " + expr1 + " : " + expr2)); - } - } - - private Tuple ifnull(SQLExpr condition, SQLExpr expr) { - String name = nextId("ifnull"); - if (condition instanceof SQLNullExpr) { - return new Tuple<>(name, def(name, expr.toString())); - } - if (isProperty(condition)) { - return new Tuple<>(name, def(name, doc(condition) + ".size()==0 ? " + expr.toString() + " : " - + getPropertyOrValue(condition))); - } else { - String condStr = Strings.isNullOrEmpty(condition.toString()) ? null : getPropertyOrStringValue(condition); - return new Tuple<>(name, def(name, condStr)); - } - } - - private Tuple isnull(SQLExpr expr) { - String name = nextId("isnull"); - if (expr instanceof SQLNullExpr) { - return new Tuple<>(name, def(name, "1")); - } - if (isProperty(expr)) { - return new Tuple<>(name, def(name, doc(expr) + ".size()==0 ? 1 : 0")); - } - // cases that return 1: - // expr is null || expr is math func but tends to throw "divided by zero" arithmetic exception - String resultStr = "0"; - if (Strings.isNullOrEmpty(expr.toString())) { - resultStr = "1"; - } - if (expr instanceof SQLCharExpr && this.generatedIds.size() > 1) { - // the expr is a math expression - String mathExpr = ((SQLCharExpr) expr).getText(); - return new Tuple<>(name, StringUtils.format( - "try {%s;} " - + "catch(ArithmeticException e) " - + "{return 1;} " - + "def %s=0", - mathExpr, name, name) - ); - } - return new Tuple<>(name, def(name, resultStr)); - } - - public String getCastScriptStatement(String name, String castType, List paramers) - throws SqlParseException { - String castFieldName = String.format("doc['%s'].value", paramers.get(0).toString()); - switch (StringUtils.toUpper(castType)) { - case "INT": - case "LONG": - case "FLOAT": - case "DOUBLE": - return getCastToNumericValueScript(name, castFieldName, StringUtils.toLower(castType)); - case "STRING": - return String.format("def %s = %s.toString()", name, castFieldName); - case "DATETIME": - return String.format("def %s = DateTimeFormatter.ofPattern(\"yyyy-MM-dd'T'HH:mm:ss.SSS'Z'\").format(" - + "DateTimeFormatter.ISO_DATE_TIME.parse(%s.toString()))", name, castFieldName); - default: - throw new SqlParseException("Unsupported cast type " + castType); - } - } - - private String getCastToNumericValueScript(String varName, String docValue, String targetType) { - String script = - "def %1$s = (%2$s instanceof boolean) " - + "? (%2$s ? 1 : 0) " - + ": Double.parseDouble(%2$s.toString()).%3$sValue()"; - return StringUtils.format(script, varName, docValue, targetType); - } - - /** - * Returns return type of script function. This is simple approach, that might be not the best solution in the long - * term. For example - for JDBC, if the column type in index is INTEGER, and the query is "select column+5", current - * approach will return type of result column as DOUBLE, although there is enough information to understand that - * it might be safely treated as INTEGER. - */ - public static Schema.Type getScriptFunctionReturnType(MethodField field, Schema.Type resolvedType) { - String functionName = ((ScriptMethodField) field).getFunctionName().toLowerCase(); - if (functionName.equals("cast")) { - String castType = ((SQLCastExpr) field.getExpression()).getDataType().getName(); - return getCastFunctionReturnType(castType); - } - return resolvedType; - } - - public static Schema.Type getCastFunctionReturnType(String castType) { - switch (StringUtils.toUpper(castType)) { - case "FLOAT": - return Schema.Type.FLOAT; - case "DOUBLE": - return Schema.Type.DOUBLE; - case "INT": - return Schema.Type.INTEGER; - case "STRING": - return Schema.Type.TEXT; - case "DATETIME": - return Schema.Type.DATE; - case "LONG": - return Schema.Type.LONG; - default: - throw new UnsupportedOperationException( - StringUtils.format("The following type is not supported by cast(): %s", castType) - ); - } - } - - /** - * - * @param field - * @return Schema.Type.TEXT or DOUBLE - * There are only two ORDER BY types (TEXT, NUMBER) in OpenSearch, so the Type that is returned here essentially - * indicates the category of the function as opposed to the actual return type. - */ - public static Schema.Type getOrderByFieldType(Field field) { - String functionName = ((ScriptMethodField) field).getFunctionName().toLowerCase(); - if (functionName.equals("cast")) { - String castType = ((SQLCastExpr) field.getExpression()).getDataType().getName(); - return getCastFunctionReturnType(castType); - } - - if (numberOperators.contains(functionName) || mathConstants.contains(functionName) - || trigFunctions.contains(functionName) || binaryOperators.contains(functionName)) { - return Schema.Type.DOUBLE; - } else if (dateFunctions.contains(functionName)) { - if (functionName.equals("date_format") || functionName.equals("now") - || functionName.equals("curdate") || functionName.equals("date") - || functionName.equals("timestamp") || functionName.equals("monthname")) { - return Schema.Type.TEXT; - } - return Schema.Type.DOUBLE; - } else if (stringFunctions.contains(functionName) || stringOperators.contains(functionName)) { - return Schema.Type.TEXT; - } - + } + + private Tuple now() { + String name = nextId("now"); + return new Tuple<>( + name, def(name, "new SimpleDateFormat('HH:mm:ss').format(System.currentTimeMillis())")); + } + + private Tuple curdate() { + String name = nextId("curdate"); + return new Tuple<>( + name, def(name, "new SimpleDateFormat('yyyy-MM-dd').format(System.currentTimeMillis())")); + } + + private Tuple ifFunc(List paramers) { + String expr1 = paramers.get(1).value.toString(); + String expr2 = paramers.get(2).value.toString(); + String name = nextId("if"); + + /** Input with null is regarded as false */ + if (paramers.get(0).value instanceof SQLNullExpr) { + return new Tuple<>(name, def(name, expr2)); + } + if (paramers.get(0).value instanceof MethodField) { + String condition = getScriptText((MethodField) paramers.get(0).value); + return new Tuple<>( + name, "boolean cond = " + condition + ";" + def(name, "cond ? " + expr1 + " : " + expr2)); + } else if (paramers.get(0).value instanceof SQLBooleanExpr) { + Boolean condition = ((SQLBooleanExpr) paramers.get(0).value).getValue(); + if (condition) { + return new Tuple<>(name, def(name, expr1)); + } else { + return new Tuple<>(name, def(name, expr2)); + } + } else { + /** + * Detailed explanation of cases that come here: the condition expression would be in the + * format of a=b: a is parsed as the key (String) of a KVValue (get from paramers.get(0)) and + * b is parsed as the value (Object) of this KVValue. + * + *

    Either a or b could be a column name, literal, or a number: - if isNumeric is true --> + * number - else if this string is single quoted --> literal - else --> column name + */ + String key = getPropertyOrValue(paramers.get(0).key); + String value = getPropertyOrValue(paramers.get(0).value.toString()); + String condition = key + " == " + value; + return new Tuple<>( + name, "boolean cond = " + condition + ";" + def(name, "cond ? " + expr1 + " : " + expr2)); + } + } + + private Tuple ifnull(SQLExpr condition, SQLExpr expr) { + String name = nextId("ifnull"); + if (condition instanceof SQLNullExpr) { + return new Tuple<>(name, def(name, expr.toString())); + } + if (isProperty(condition)) { + return new Tuple<>( + name, + def( + name, + doc(condition) + + ".size()==0 ? " + + expr.toString() + + " : " + + getPropertyOrValue(condition))); + } else { + String condStr = + Strings.isNullOrEmpty(condition.toString()) ? null : getPropertyOrStringValue(condition); + return new Tuple<>(name, def(name, condStr)); + } + } + + private Tuple isnull(SQLExpr expr) { + String name = nextId("isnull"); + if (expr instanceof SQLNullExpr) { + return new Tuple<>(name, def(name, "1")); + } + if (isProperty(expr)) { + return new Tuple<>(name, def(name, doc(expr) + ".size()==0 ? 1 : 0")); + } + // cases that return 1: + // expr is null || expr is math func but tends to throw "divided by zero" arithmetic exception + String resultStr = "0"; + if (Strings.isNullOrEmpty(expr.toString())) { + resultStr = "1"; + } + if (expr instanceof SQLCharExpr && this.generatedIds.size() > 1) { + // the expr is a math expression + String mathExpr = ((SQLCharExpr) expr).getText(); + return new Tuple<>( + name, + StringUtils.format( + "try {%s;} " + "catch(ArithmeticException e) " + "{return 1;} " + "def %s=0", + mathExpr, name, name)); + } + return new Tuple<>(name, def(name, resultStr)); + } + + public String getCastScriptStatement(String name, String castType, List paramers) + throws SqlParseException { + String castFieldName = String.format("doc['%s'].value", paramers.get(0).toString()); + switch (StringUtils.toUpper(castType)) { + case "INT": + case "LONG": + case "FLOAT": + case "DOUBLE": + return getCastToNumericValueScript(name, castFieldName, StringUtils.toLower(castType)); + case "STRING": + return String.format("def %s = %s.toString()", name, castFieldName); + case "DATETIME": + return String.format( + "def %s = DateTimeFormatter.ofPattern(\"yyyy-MM-dd'T'HH:mm:ss.SSS'Z'\").format(" + + "DateTimeFormatter.ISO_DATE_TIME.parse(%s.toString()))", + name, castFieldName); + default: + throw new SqlParseException("Unsupported cast type " + castType); + } + } + + private String getCastToNumericValueScript(String varName, String docValue, String targetType) { + String script = + "def %1$s = (%2$s instanceof boolean) " + + "? (%2$s ? 1 : 0) " + + ": Double.parseDouble(%2$s.toString()).%3$sValue()"; + return StringUtils.format(script, varName, docValue, targetType); + } + + /** + * Returns return type of script function. This is simple approach, that might be not the best + * solution in the long term. For example - for JDBC, if the column type in index is INTEGER, and + * the query is "select column+5", current approach will return type of result column as DOUBLE, + * although there is enough information to understand that it might be safely treated as INTEGER. + */ + public static Schema.Type getScriptFunctionReturnType( + MethodField field, Schema.Type resolvedType) { + String functionName = ((ScriptMethodField) field).getFunctionName().toLowerCase(); + if (functionName.equals("cast")) { + String castType = ((SQLCastExpr) field.getExpression()).getDataType().getName(); + return getCastFunctionReturnType(castType); + } + return resolvedType; + } + + public static Schema.Type getCastFunctionReturnType(String castType) { + switch (StringUtils.toUpper(castType)) { + case "FLOAT": + return Schema.Type.FLOAT; + case "DOUBLE": + return Schema.Type.DOUBLE; + case "INT": + return Schema.Type.INTEGER; + case "STRING": + return Schema.Type.TEXT; + case "DATETIME": + return Schema.Type.DATE; + case "LONG": + return Schema.Type.LONG; + default: throw new UnsupportedOperationException( - String.format( - "The following method is not supported in Schema for Order By: %s", - functionName)); - } + StringUtils.format("The following type is not supported by cast(): %s", castType)); + } + } + + /** + * @param field + * @return Schema.Type.TEXT or DOUBLE There are only two ORDER BY types (TEXT, NUMBER) in + * OpenSearch, so the Type that is returned here essentially indicates the category of the + * function as opposed to the actual return type. + */ + public static Schema.Type getOrderByFieldType(Field field) { + String functionName = ((ScriptMethodField) field).getFunctionName().toLowerCase(); + if (functionName.equals("cast")) { + String castType = ((SQLCastExpr) field.getExpression()).getDataType().getName(); + return getCastFunctionReturnType(castType); + } + + if (numberOperators.contains(functionName) + || mathConstants.contains(functionName) + || trigFunctions.contains(functionName) + || binaryOperators.contains(functionName)) { + return Schema.Type.DOUBLE; + } else if (dateFunctions.contains(functionName)) { + if (functionName.equals("date_format") + || functionName.equals("now") + || functionName.equals("curdate") + || functionName.equals("date") + || functionName.equals("timestamp") + || functionName.equals("monthname")) { + return Schema.Type.TEXT; + } + return Schema.Type.DOUBLE; + } else if (stringFunctions.contains(functionName) || stringOperators.contains(functionName)) { + return Schema.Type.TEXT; + } + + throw new UnsupportedOperationException( + String.format( + "The following method is not supported in Schema for Order By: %s", functionName)); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/utils/StringUtils.java b/legacy/src/main/java/org/opensearch/sql/legacy/utils/StringUtils.java index 515d980db9..8a3975713b 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/utils/StringUtils.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/utils/StringUtils.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.utils; import com.google.common.base.Strings; @@ -17,106 +16,102 @@ */ public class StringUtils { - /** - * Returns a formatted string using the specified format string and - * arguments, as well as the {@link Locale#ROOT} locale. - * - * @param format format string - * @param args arguments referenced by the format specifiers in the format string - * @return A formatted string - * @throws java.util.IllegalFormatException If a format string contains an illegal syntax, a format - * specifier that is incompatible with the given arguments, - * insufficient arguments given the format string, or other - * illegal conditions. - * @see java.lang.String#format(Locale, String, Object...) - */ - public static String format(final String format, Object... args) { - return String.format(Locale.ROOT, format, args); - } + /** + * Returns a formatted string using the specified format string and arguments, as well as the + * {@link Locale#ROOT} locale. + * + * @param format format string + * @param args arguments referenced by the format specifiers in the format string + * @return A formatted string + * @throws java.util.IllegalFormatException If a format string contains an illegal syntax, a + * format specifier that is incompatible with the given arguments, insufficient arguments + * given the format string, or other illegal conditions. + * @see java.lang.String#format(Locale, String, Object...) + */ + public static String format(final String format, Object... args) { + return String.format(Locale.ROOT, format, args); + } - /** - * Converts all of the characters in this {@code String} to lower - * case using the rules of the {@link Locale#ROOT} locale. This is equivalent to calling - * {@link String#toLowerCase(Locale)} with {@link Locale#ROOT}. - * - * @param input the input String - * @return the {@code String}, converted to lowercase - * @see java.lang.String#toLowerCase(Locale) - */ - public static String toLower(final String input) { - return input.toLowerCase(Locale.ROOT); - } + /** + * Converts all of the characters in this {@code String} to lower case using the rules of the + * {@link Locale#ROOT} locale. This is equivalent to calling {@link String#toLowerCase(Locale)} + * with {@link Locale#ROOT}. + * + * @param input the input String + * @return the {@code String}, converted to lowercase + * @see java.lang.String#toLowerCase(Locale) + */ + public static String toLower(final String input) { + return input.toLowerCase(Locale.ROOT); + } - /** - * Converts all of the characters in this {@code String} to upper - * case using the rules of the {@link Locale#ROOT} locale. This is equivalent to calling - * {@link String#toUpperCase(Locale)} with {@link Locale#ROOT}. - * - * @param input the input String - * @return the {@code String}, converted to uppercase - * @see java.lang.String#toUpperCase(Locale) - */ - public static String toUpper(final String input) { - return input.toUpperCase(Locale.ROOT); - } + /** + * Converts all the characters in this {@code String} to upper case using the rules of the {@link + * Locale#ROOT} locale. This is equivalent to calling {@link String#toUpperCase(Locale)} with + * {@link Locale#ROOT}. + * + * @param input the input String + * @return the {@code String}, converted to uppercase + * @see java.lang.String#toUpperCase(Locale) + */ + public static String toUpper(final String input) { + return input.toUpperCase(Locale.ROOT); + } - /** - * Count how many occurrences of character in this input {@code Sequence}. - * - * @param input the input string - * @param match char to be matched - * @return number of occurrences - */ - public static int countMatches(CharSequence input, char match) { - return Math.toIntExact(input.chars(). - filter(c -> c == match). - count()); - } + /** + * Count how many occurrences of character in this input {@code Sequence}. + * + * @param input the input string + * @param match char to be matched + * @return number of occurrences + */ + public static int countMatches(CharSequence input, char match) { + return Math.toIntExact(input.chars().filter(c -> c == match).count()); + } - /** - * - * @param text string - * @param quote - * @return An unquoted string whose outer pair of back-ticks (if any) has been removed - */ - public static String unquoteSingleField(String text, String quote) { - if (isQuoted(text, quote)) { - return text.substring(quote.length(), text.length() - quote.length()); - } - return text; + /** + * @param text string + * @param quote + * @return An unquoted string whose outer pair of back-ticks (if any) has been removed + */ + public static String unquoteSingleField(String text, String quote) { + if (isQuoted(text, quote)) { + return text.substring(quote.length(), text.length() - quote.length()); } + return text; + } - public static String unquoteSingleField(String text) { - return unquoteSingleField(text, "`"); - } + public static String unquoteSingleField(String text) { + return unquoteSingleField(text, "`"); + } - /** - * - * @param text - * @return A string whose each dot-seperated field has been unquoted from back-ticks (if any) - */ - public static String unquoteFullColumn(String text, String quote) { - String[] strs = text.split("\\."); - for (int i = 0; i < strs.length; i++) { - String unquotedSubstr = unquoteSingleField(strs[i], quote); - strs[i] = unquotedSubstr; - } - return String.join(".", strs); + /** + * @param text + * @return A string whose each dot-separated field has been unquoted from back-ticks (if any) + */ + public static String unquoteFullColumn(String text, String quote) { + String[] strs = text.split("\\."); + for (int i = 0; i < strs.length; i++) { + String unquotedSubstr = unquoteSingleField(strs[i], quote); + strs[i] = unquotedSubstr; } + return String.join(".", strs); + } - public static String unquoteFullColumn(String text) { - return unquoteFullColumn(text, "`"); - } + public static String unquoteFullColumn(String text) { + return unquoteFullColumn(text, "`"); + } - public static boolean isQuoted(String text, String quote) { - return !Strings.isNullOrEmpty(text) && text.startsWith(quote) && text.endsWith(quote); - } + public static boolean isQuoted(String text, String quote) { + return !Strings.isNullOrEmpty(text) && text.startsWith(quote) && text.endsWith(quote); + } - public static boolean isNumeric(String text) { - return Doubles.tryParse(text) != null; - } + public static boolean isNumeric(String text) { + return Doubles.tryParse(text) != null; + } - private StringUtils() { - throw new AssertionError(getClass().getCanonicalName() + " is a utility class and must not be initialized"); - } + private StringUtils() { + throw new AssertionError( + getClass().getCanonicalName() + " is a utility class and must not be initialized"); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/utils/Util.java b/legacy/src/main/java/org/opensearch/sql/legacy/utils/Util.java index bd1b7f3865..632074bbbe 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/utils/Util.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/utils/Util.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.utils; import com.alibaba.druid.sql.ast.SQLExpr; @@ -38,231 +37,247 @@ import org.opensearch.sql.legacy.exception.SqlParseException; import org.opensearch.sql.legacy.parser.ElasticSqlExprParser; - public class Util { - public static final String NESTED_JOIN_TYPE = "NestedJoinType"; - - public static String joiner(List lists, String oper) { - - if (lists.size() == 0) { - return null; - } + public static final String NESTED_JOIN_TYPE = "NestedJoinType"; - StringBuilder sb = new StringBuilder(lists.get(0).toString()); - for (int i = 1; i < lists.size(); i++) { - sb.append(oper); - sb.append(lists.get(i).toString()); - } + public static String joiner(List lists, String oper) { - return sb.toString(); + if (lists.size() == 0) { + return null; } - public static Object removeTableAilasFromField(Object expr, String tableAlias) { - - if (expr instanceof SQLIdentifierExpr || expr instanceof SQLPropertyExpr || expr instanceof SQLVariantRefExpr) { - String name = expr.toString().replace("`", ""); - if (tableAlias != null) { - String aliasPrefix = tableAlias + "."; - if (name.startsWith(aliasPrefix)) { - String newFieldName = name.replaceFirst(aliasPrefix, ""); - return new SQLIdentifierExpr(newFieldName); - } - } - } - return expr; + StringBuilder sb = new StringBuilder(lists.get(0).toString()); + for (int i = 1; i < lists.size(); i++) { + sb.append(oper); + sb.append(lists.get(i).toString()); } + return sb.toString(); + } - public static Object expr2Object(SQLExpr expr) { - return expr2Object(expr, ""); - } + public static Object removeTableAilasFromField(Object expr, String tableAlias) { - public static Object expr2Object(SQLExpr expr, String charWithQuote) { - Object value = null; - if (expr instanceof SQLNumericLiteralExpr) { - value = ((SQLNumericLiteralExpr) expr).getNumber(); - } else if (expr instanceof SQLCharExpr) { - value = charWithQuote + ((SQLCharExpr) expr).getText() + charWithQuote; - } else if (expr instanceof SQLIdentifierExpr) { - value = expr.toString(); - } else if (expr instanceof SQLPropertyExpr) { - value = expr.toString(); - } else if (expr instanceof SQLVariantRefExpr) { - value = expr.toString(); - } else if (expr instanceof SQLAllColumnExpr) { - value = "*"; - } else if (expr instanceof SQLValuableExpr) { - value = ((SQLValuableExpr) expr).getValue(); - } else { - //throw new SqlParseException("can not support this type " + expr.getClass()); + if (expr instanceof SQLIdentifierExpr + || expr instanceof SQLPropertyExpr + || expr instanceof SQLVariantRefExpr) { + String name = expr.toString().replace("`", ""); + if (tableAlias != null) { + String aliasPrefix = tableAlias + "."; + if (name.startsWith(aliasPrefix)) { + String newFieldName = name.replaceFirst(aliasPrefix, ""); + return new SQLIdentifierExpr(newFieldName); } - return value; + } } - - public static Object getScriptValue(SQLExpr expr) throws SqlParseException { - if (expr instanceof SQLIdentifierExpr || expr instanceof SQLPropertyExpr || expr instanceof SQLVariantRefExpr) { - return "doc['" + expr.toString() + "'].value"; - } else if (expr instanceof SQLValuableExpr) { - return ((SQLValuableExpr) expr).getValue(); - } - throw new SqlParseException("could not parse sqlBinaryOpExpr need to be identifier/valuable got" - + expr.getClass().toString() + " with value:" + expr.toString()); + return expr; + } + + public static Object expr2Object(SQLExpr expr) { + return expr2Object(expr, ""); + } + + public static Object expr2Object(SQLExpr expr, String charWithQuote) { + Object value = null; + if (expr instanceof SQLNumericLiteralExpr) { + value = ((SQLNumericLiteralExpr) expr).getNumber(); + } else if (expr instanceof SQLCharExpr) { + value = charWithQuote + ((SQLCharExpr) expr).getText() + charWithQuote; + } else if (expr instanceof SQLIdentifierExpr) { + value = expr.toString(); + } else if (expr instanceof SQLPropertyExpr) { + value = expr.toString(); + } else if (expr instanceof SQLVariantRefExpr) { + value = expr.toString(); + } else if (expr instanceof SQLAllColumnExpr) { + value = "*"; + } else if (expr instanceof SQLValuableExpr) { + value = ((SQLValuableExpr) expr).getValue(); + } else { + // throw new SqlParseException("can not support this type " + expr.getClass()); } - - public static Object getScriptValueWithQuote(SQLExpr expr, String quote) throws SqlParseException { - if (expr instanceof SQLIdentifierExpr || expr instanceof SQLPropertyExpr || expr instanceof SQLVariantRefExpr) { - return "doc['" + expr.toString() + "'].value"; - } else if (expr instanceof SQLCharExpr) { - return quote + ((SQLCharExpr) expr).getValue() + quote; - } else if (expr instanceof SQLIntegerExpr) { - return ((SQLIntegerExpr) expr).getValue(); - } else if (expr instanceof SQLNumericLiteralExpr) { - return ((SQLNumericLiteralExpr) expr).getNumber(); - } else if (expr instanceof SQLNullExpr) { - return ((SQLNullExpr) expr).toString().toLowerCase(); - } - throw new SqlParseException("could not parse sqlBinaryOpExpr need to be identifier/valuable got" - + expr.getClass().toString() + " with value:" + expr.toString()); + return value; + } + + public static Object getScriptValue(SQLExpr expr) throws SqlParseException { + if (expr instanceof SQLIdentifierExpr + || expr instanceof SQLPropertyExpr + || expr instanceof SQLVariantRefExpr) { + return "doc['" + expr.toString() + "'].value"; + } else if (expr instanceof SQLValuableExpr) { + return ((SQLValuableExpr) expr).getValue(); } - - public static boolean isFromJoinOrUnionTable(SQLExpr expr) { - SQLObject temp = expr; - AtomicInteger counter = new AtomicInteger(10); - while (temp != null && !(expr instanceof SQLSelectQueryBlock) - && !(expr instanceof SQLJoinTableSource) && !(expr instanceof SQLUnionQuery) && counter.get() > 0) { - counter.decrementAndGet(); - temp = temp.getParent(); - if (temp instanceof SQLSelectQueryBlock) { - SQLTableSource from = ((SQLSelectQueryBlock) temp).getFrom(); - if (from instanceof SQLJoinTableSource || from instanceof SQLUnionQuery) { - return true; - } - } - if (temp instanceof SQLJoinTableSource || temp instanceof SQLUnionQuery) { - return true; - } - } - return false; + throw new SqlParseException( + "could not parse sqlBinaryOpExpr need to be identifier/valuable got" + + expr.getClass().toString() + + " with value:" + + expr.toString()); + } + + public static Object getScriptValueWithQuote(SQLExpr expr, String quote) + throws SqlParseException { + if (expr instanceof SQLIdentifierExpr + || expr instanceof SQLPropertyExpr + || expr instanceof SQLVariantRefExpr) { + return "doc['" + expr.toString() + "'].value"; + } else if (expr instanceof SQLCharExpr) { + return quote + ((SQLCharExpr) expr).getValue() + quote; + } else if (expr instanceof SQLIntegerExpr) { + return ((SQLIntegerExpr) expr).getValue(); + } else if (expr instanceof SQLNumericLiteralExpr) { + return ((SQLNumericLiteralExpr) expr).getNumber(); + } else if (expr instanceof SQLNullExpr) { + return ((SQLNullExpr) expr).toString().toLowerCase(); } - - public static double[] KV2DoubleArr(List params) { - double[] ds = new double[params.size()]; - int i = 0; - for (KVValue v : params) { - ds[i] = Double.parseDouble(v.value.toString()); - i++; + throw new SqlParseException( + "could not parse sqlBinaryOpExpr need to be identifier/valuable got" + + expr.getClass().toString() + + " with value:" + + expr.toString()); + } + + public static boolean isFromJoinOrUnionTable(SQLExpr expr) { + SQLObject temp = expr; + AtomicInteger counter = new AtomicInteger(10); + while (temp != null + && !(expr instanceof SQLSelectQueryBlock) + && !(expr instanceof SQLJoinTableSource) + && !(expr instanceof SQLUnionQuery) + && counter.get() > 0) { + counter.decrementAndGet(); + temp = temp.getParent(); + if (temp instanceof SQLSelectQueryBlock) { + SQLTableSource from = ((SQLSelectQueryBlock) temp).getFrom(); + if (from instanceof SQLJoinTableSource || from instanceof SQLUnionQuery) { + return true; } - return ds; + } + if (temp instanceof SQLJoinTableSource || temp instanceof SQLUnionQuery) { + return true; + } } - - - public static String extendedToString(SQLExpr sqlExpr) { - if (sqlExpr instanceof SQLTextLiteralExpr) { - return ((SQLTextLiteralExpr) sqlExpr).getText(); - } - return sqlExpr.toString(); + return false; + } + + public static double[] KV2DoubleArr(List params) { + double[] ds = new double[params.size()]; + int i = 0; + for (KVValue v : params) { + ds[i] = Double.parseDouble(v.value.toString()); + i++; } + return ds; + } - public static String[] concatStringsArrays(String[] a1, String[] a2) { - String[] strings = new String[a1.length + a2.length]; - for (int i = 0; i < a1.length; i++) { - strings[i] = a1[i]; - } - for (int i = 0; i < a2.length; i++) { - strings[a1.length + i] = a2[i]; - } - return strings; + public static String extendedToString(SQLExpr sqlExpr) { + if (sqlExpr instanceof SQLTextLiteralExpr) { + return ((SQLTextLiteralExpr) sqlExpr).getText(); } + return sqlExpr.toString(); + } - public static Object searchPathInMap(Map fieldsMap, String[] path) { - Map currentObject = fieldsMap; - for (int i = 0; i < path.length - 1; i++) { - Object valueFromCurrentMap = currentObject.get(path[i]); - if (valueFromCurrentMap == null) { - return null; - } - if (!Map.class.isAssignableFrom(valueFromCurrentMap.getClass())) { - return null; - } - currentObject = (Map) valueFromCurrentMap; - } - return currentObject.get(path[path.length - 1]); + public static String[] concatStringsArrays(String[] a1, String[] a2) { + String[] strings = new String[a1.length + a2.length]; + for (int i = 0; i < a1.length; i++) { + strings[i] = a1[i]; + } + for (int i = 0; i < a2.length; i++) { + strings[a1.length + i] = a2[i]; } + return strings; + } + + public static Object searchPathInMap(Map fieldsMap, String[] path) { + Map currentObject = fieldsMap; + for (int i = 0; i < path.length - 1; i++) { + Object valueFromCurrentMap = currentObject.get(path[i]); + if (valueFromCurrentMap == null) { + return null; + } + if (!Map.class.isAssignableFrom(valueFromCurrentMap.getClass())) { + return null; + } + currentObject = (Map) valueFromCurrentMap; + } + return currentObject.get(path[path.length - 1]); + } - public static Object deepSearchInMap(Map fieldsMap, String field) { - if (field.contains(".")) { - String[] split = field.split("\\."); - return searchPathInMap(fieldsMap, split); - } - return fieldsMap.get(field); + public static Object deepSearchInMap(Map fieldsMap, String field) { + if (field.contains(".")) { + String[] split = field.split("\\."); + return searchPathInMap(fieldsMap, split); } + return fieldsMap.get(field); + } - public static boolean clearEmptyPaths(Map map) { - if (map.size() == 0) { - return true; - } - Set keysToDelete = new HashSet<>(); - for (Map.Entry entry : map.entrySet()) { - Object value = entry.getValue(); - if (Map.class.isAssignableFrom(value.getClass())) { - if (clearEmptyPaths((Map) value)) { - keysToDelete.add(entry.getKey()); - } - } - } - if (keysToDelete.size() != 0) { - if (map.size() == keysToDelete.size()) { - map.clear(); - return true; - } - for (String key : keysToDelete) { - // TODO: seems like a bug, either fix, or just get rid of for loop and remove the first key - map.remove(key); - return false; - } + public static boolean clearEmptyPaths(Map map) { + if (map.size() == 0) { + return true; + } + Set keysToDelete = new HashSet<>(); + for (Map.Entry entry : map.entrySet()) { + Object value = entry.getValue(); + if (Map.class.isAssignableFrom(value.getClass())) { + if (clearEmptyPaths((Map) value)) { + keysToDelete.add(entry.getKey()); } + } + } + if (keysToDelete.size() != 0) { + if (map.size() == keysToDelete.size()) { + map.clear(); + return true; + } + for (String key : keysToDelete) { + // TODO: seems like a bug, either fix, or just get rid of for loop and remove the first key + map.remove(key); return false; + } } - - public static GetIndexRequestBuilder prepareIndexRequestBuilder(Client client, IndexStatement statement) { - /* - * indexPattern represents wildcard as '.*' which is the regex syntax for matching anything but - * indexRequestBuilder uses the file-match syntax like UNIX which is just '*', so the pattern is converted - * in case its added to the request below - */ - String indexPattern = statement.getIndexPattern().replace(".*", "*"); - - /* - * Ideally all features should be removed from the indexRequest used in SHOW to prevent wasted data - * since only the index name is required in the JDBC format response. However, the type is obtained from the - * mappings response so this feature will need to be set if retrieving type is necessary in other formats. - * (For the time being it is included since the GUI returns types for SHOW queries) - */ - GetIndexRequestBuilder indexRequestBuilder = client.admin().indices() - .prepareGetIndex() - .setFeatures(GetIndexRequest.Feature.MAPPINGS) - .setLocal(true); - - /* - * Since the index request supports index names with wildcard (*) but not (.) it is checked for here so that the - * results returned can be reduced if possible (the regex checks in the ResultSet classes handle the rest). - */ - if (!indexPattern.contains(".")) { - indexRequestBuilder.addIndices(indexPattern); - } - - return indexRequestBuilder; + return false; + } + + public static GetIndexRequestBuilder prepareIndexRequestBuilder( + Client client, IndexStatement statement) { + /* + * indexPattern represents wildcard as '.*' which is the regex syntax for matching anything but + * indexRequestBuilder uses the file-match syntax like UNIX which is just '*', so the pattern is converted + * in case its added to the request below + */ + String indexPattern = statement.getIndexPattern().replace(".*", "*"); + + /* + * Ideally all features should be removed from the indexRequest used in SHOW to prevent wasted data + * since only the index name is required in the JDBC format response. However, the type is obtained from the + * mappings response so this feature will need to be set if retrieving type is necessary in other formats. + * (For the time being it is included since the GUI returns types for SHOW queries) + */ + GetIndexRequestBuilder indexRequestBuilder = + client + .admin() + .indices() + .prepareGetIndex() + .setFeatures(GetIndexRequest.Feature.MAPPINGS) + .setLocal(true); + + /* + * Since the index request supports index names with wildcard (*) but not (.) it is checked for here so that the + * results returned can be reduced if possible (the regex checks in the ResultSet classes handle the rest). + */ + if (!indexPattern.contains(".")) { + indexRequestBuilder.addIndices(indexPattern); } - public static SQLExpr toSqlExpr(String sql) { - SQLExprParser parser = new ElasticSqlExprParser(sql); - SQLExpr expr = parser.expr(); + return indexRequestBuilder; + } - if (parser.getLexer().token() != Token.EOF) { - throw new ParserException("Illegal SQL expression : " + sql); - } - return expr; - } + public static SQLExpr toSqlExpr(String sql) { + SQLExprParser parser = new ElasticSqlExprParser(sql); + SQLExpr expr = parser.expr(); + if (parser.getLexer().token() != Token.EOF) { + throw new ParserException("Illegal SQL expression : " + sql); + } + return expr; + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/SymbolSimilarityTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/SymbolSimilarityTest.java index a894f4311a..fbdcca2bb0 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/SymbolSimilarityTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/SymbolSimilarityTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr; import static java.util.Collections.emptyList; @@ -14,42 +13,39 @@ import org.junit.Assert; import org.junit.Test; -/** - * Test cases for symbol similarity - */ +/** Test cases for symbol similarity */ public class SymbolSimilarityTest { - @Test - public void noneCandidateShouldReturnTargetStringItself() { - String target = "test"; - String mostSimilarSymbol = new SimilarSymbols(emptyList()).mostSimilarTo(target); - Assert.assertEquals(target, mostSimilarSymbol); - } - - @Test - public void singleCandidateShouldReturnTheOnlyCandidate() { - String target = "test"; - String candidate = "hello"; - String mostSimilarSymbol = new SimilarSymbols(singletonList(candidate)).mostSimilarTo(target); - Assert.assertEquals(candidate, mostSimilarSymbol); - } - - @Test - public void twoCandidatesShouldReturnMostSimilarCandidate() { - String target = "test"; - String mostSimilar = "tests"; - List candidates = Arrays.asList("hello", mostSimilar); - String mostSimilarSymbol = new SimilarSymbols(candidates).mostSimilarTo(target); - Assert.assertEquals(mostSimilar, mostSimilarSymbol); - } - - @Test - public void manyCandidatesShouldReturnMostSimilarCandidate() { - String target = "test"; - String mostSimilar = "tests"; - List candidates = Arrays.asList("hello", mostSimilar, "world"); - String mostSimilarSymbol = new SimilarSymbols(candidates).mostSimilarTo(target); - Assert.assertEquals(mostSimilar, mostSimilarSymbol); - } - + @Test + public void noneCandidateShouldReturnTargetStringItself() { + String target = "test"; + String mostSimilarSymbol = new SimilarSymbols(emptyList()).mostSimilarTo(target); + Assert.assertEquals(target, mostSimilarSymbol); + } + + @Test + public void singleCandidateShouldReturnTheOnlyCandidate() { + String target = "test"; + String candidate = "hello"; + String mostSimilarSymbol = new SimilarSymbols(singletonList(candidate)).mostSimilarTo(target); + Assert.assertEquals(candidate, mostSimilarSymbol); + } + + @Test + public void twoCandidatesShouldReturnMostSimilarCandidate() { + String target = "test"; + String mostSimilar = "tests"; + List candidates = Arrays.asList("hello", mostSimilar); + String mostSimilarSymbol = new SimilarSymbols(candidates).mostSimilarTo(target); + Assert.assertEquals(mostSimilar, mostSimilarSymbol); + } + + @Test + public void manyCandidatesShouldReturnMostSimilarCandidate() { + String target = "test"; + String mostSimilar = "tests"; + List candidates = Arrays.asList("hello", mostSimilar, "world"); + String mostSimilarSymbol = new SimilarSymbols(candidates).mostSimilarTo(target); + Assert.assertEquals(mostSimilar, mostSimilarSymbol); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/SyntaxAnalysisTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/SyntaxAnalysisTest.java index bd71fd2500..765bb0616e 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/SyntaxAnalysisTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/SyntaxAnalysisTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr; import static java.util.stream.Collectors.toList; @@ -17,124 +16,115 @@ import org.opensearch.sql.legacy.antlr.syntax.SyntaxAnalysisException; /** - * Test cases focused on illegal syntax testing (denylist) along with a few normal cases not covered previously. - * All other normal cases should be covered in existing unit test and IT. + * Test cases focused on illegal syntax testing (denylist) along with a few normal cases not covered + * previously. All other normal cases should be covered in existing unit test and IT. */ public class SyntaxAnalysisTest { - /** public accessor is required by @Rule annotation */ - @Rule - public ExpectedException exception = ExpectedException.none(); - - private OpenSearchLegacySqlAnalyzer - analyzer = new OpenSearchLegacySqlAnalyzer(new SqlAnalysisConfig(true, true, 1000)); - - /** In reality exception occurs before reaching new parser for now */ - @Test - public void unsupportedKeywordShouldThrowException() { - expectValidationFailWithErrorMessage( - "INSERT INTO accounts VALUES ('a')", - "offending symbol [INSERT]" - ); - } - - /** - * Why we need to let it go and verify in semantic analyzer? - * Parser treats LOG123 a valid column and stops at '(' which gives wrong location and expected token - * In this case it's hard for parser to figure out if this is a wrong function name indeed or not. - * So we let it pass as an UDF and fail in semantic analyzer with meaningful message. - */ - @Test //(expected = SyntaxAnalysisException.class) - public void unsupportedFunctionShouldThrowException() { - validate("SELECT * FROM accounts WHERE LOG123(balance) = 1"); - } - - @Test - public void unsupportedOperatorShouldPassSyntaxCheck() { - expectValidationFailWithErrorMessage( - "SELECT * FROM accounts WHERE age <=> 1", - "offending symbol [>]" - ); - } - - @Test - public void missingFromClauseShouldThrowException() { - expectValidationFailWithErrorMessage( - "SELECT 1", - "offending symbol []" // parsing was unable to terminate normally - ); - } - - @Test - public void missingWhereKeywordShouldThrowException() { - expectValidationFailWithErrorMessage( - "SELECT * FROM accounts age = 1", - "offending symbol [=]", // parser thought 'age' is alias of 'accounts' and failed at '=' - "Expecting", ";" // "Expecting tokens in {, ';'}" + /** public accessor is required by @Rule annotation */ + @Rule public ExpectedException exception = ExpectedException.none(); + + private OpenSearchLegacySqlAnalyzer analyzer = + new OpenSearchLegacySqlAnalyzer(new SqlAnalysisConfig(true, true, 1000)); + + /** In reality exception occurs before reaching new parser for now */ + @Test + public void unsupportedKeywordShouldThrowException() { + expectValidationFailWithErrorMessage( + "INSERT INTO accounts VALUES ('a')", "offending symbol [INSERT]"); + } + + /** + * Why we need to let it go and verify in semantic analyzer? Parser treats LOG123 a valid column + * and stops at '(' which gives wrong location and expected token In this case it's hard for + * parser to figure out if this is a wrong function name indeed or not. So we let it pass as an + * UDF and fail in semantic analyzer with meaningful message. + */ + @Test // (expected = SyntaxAnalysisException.class) + public void unsupportedFunctionShouldThrowException() { + validate("SELECT * FROM accounts WHERE LOG123(balance) = 1"); + } + + @Test + public void unsupportedOperatorShouldPassSyntaxCheck() { + expectValidationFailWithErrorMessage( + "SELECT * FROM accounts WHERE age <=> 1", "offending symbol [>]"); + } + + @Test + public void missingFromClauseShouldThrowException() { + expectValidationFailWithErrorMessage( + "SELECT 1", "offending symbol []" // parsing was unable to terminate normally ); - } - - @Test - public void someKeywordsShouldBeAbleToUseAsIdentifier() { - validate("SELECT AVG(balance) AS avg FROM accounts"); - } - - @Test - public void specialIndexNameShouldPass() { - validate("SELECT * FROM accounts/temp"); - validate("SELECT * FROM account*"); - validate("SELECT * FROM opensearch-accounts"); - validate("SELECT * FROM opensearch-account*"); - } - - @Test - public void typeNamePatternShouldThrowException() { - expectValidationFailWithErrorMessage( - "SELECT * FROM accounts/tem*", - "offending symbol [*]" + } + + @Test + public void missingWhereKeywordShouldThrowException() { + expectValidationFailWithErrorMessage( + "SELECT * FROM accounts age = 1", + "offending symbol [=]", // parser thought 'age' is alias of 'accounts' and failed at '=' + "Expecting", + ";" // "Expecting tokens in {, ';'}" ); - } - - @Test - public void systemIndexNameShouldPass() { - validate("SELECT * FROM .opensearch_dashboards"); - } - - @Test - public void useMetadataFieldShouldPass() { - validate("SELECT @timestamp FROM accounts"); - } - - @Test - public void leftJoinOnNestedFieldWithoutOnClauseShouldPass() { - validate("SELECT * FROM accounts a LEFT JOIN a.projects p"); - } - - @Test - public void useDeepNestedFieldShouldPass() { - validate("SELECT a.projects.name FROM accounts a"); - } - - /** As the translation is not supported for now, check this in semantic analyzer */ - @Test - public void arithmeticExpressionInWhereClauseShouldPass() { - validate("SELECT * FROM accounts WHERE age + 1 = 10"); - } - - @Test - public void queryEndWithSemiColonShouldPass() { - validate("SELECT * FROM accounts;"); - } - - private void expectValidationFailWithErrorMessage(String query, String... messages) { - exception.expect(SyntaxAnalysisException.class); - exception.expectMessage(allOf(Arrays.stream(messages). - map(Matchers::containsString). - collect(toList()))); - validate(query); - } - - private void validate(String sql) { - analyzer.analyzeSyntax(sql); - } + } + + @Test + public void someKeywordsShouldBeAbleToUseAsIdentifier() { + validate("SELECT AVG(balance) AS avg FROM accounts"); + } + + @Test + public void specialIndexNameShouldPass() { + validate("SELECT * FROM accounts/temp"); + validate("SELECT * FROM account*"); + validate("SELECT * FROM opensearch-accounts"); + validate("SELECT * FROM opensearch-account*"); + } + + @Test + public void typeNamePatternShouldThrowException() { + expectValidationFailWithErrorMessage("SELECT * FROM accounts/tem*", "offending symbol [*]"); + } + + @Test + public void systemIndexNameShouldPass() { + validate("SELECT * FROM .opensearch_dashboards"); + } + + @Test + public void useMetadataFieldShouldPass() { + validate("SELECT @timestamp FROM accounts"); + } + + @Test + public void leftJoinOnNestedFieldWithoutOnClauseShouldPass() { + validate("SELECT * FROM accounts a LEFT JOIN a.projects p"); + } + + @Test + public void useDeepNestedFieldShouldPass() { + validate("SELECT a.projects.name FROM accounts a"); + } + + /** As the translation is not supported for now, check this in semantic analyzer */ + @Test + public void arithmeticExpressionInWhereClauseShouldPass() { + validate("SELECT * FROM accounts WHERE age + 1 = 10"); + } + + @Test + public void queryEndWithSemiColonShouldPass() { + validate("SELECT * FROM accounts;"); + } + + private void expectValidationFailWithErrorMessage(String query, String... messages) { + exception.expect(SyntaxAnalysisException.class); + exception.expectMessage( + allOf(Arrays.stream(messages).map(Matchers::containsString).collect(toList()))); + validate(query); + } + + private void validate(String sql) { + analyzer.analyzeSyntax(sql); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerAggregateFunctionTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerAggregateFunctionTest.java index 6671542298..df258270b9 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerAggregateFunctionTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerAggregateFunctionTest.java @@ -3,154 +3,147 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic; import org.junit.Ignore; import org.junit.Test; -/** - * Semantic analysis test for aggregate functions. - */ +/** Semantic analysis test for aggregate functions. */ public class SemanticAnalyzerAggregateFunctionTest extends SemanticAnalyzerTestBase { - @Ignore("To be implemented") - @Test(expected = SemanticAnalysisException.class) - public void useAggregateFunctionInWhereClauseShouldFail() { - validate("SELECT * FROM semantics WHERE AVG(balance) > 10000"); - } - - @Test - public void useAggregateFunctionInSelectClauseShouldPass() { - validate( - "SELECT" + - " city," + - " COUNT(*)," + - " MAX(age)," + - " MIN(balance)," + - " AVG(manager.salary)," + - " SUM(balance)" + - "FROM semantics " + - "GROUP BY city"); - } - - @Test - public void useAggregateFunctionInSelectClauseWithoutGroupByShouldPass() { - validate( - "SELECT" + - " COUNT(*)," + - " MAX(age)," + - " MIN(balance)," + - " AVG(manager.salary)," + - " SUM(balance)" + - "FROM semantics"); - } - - @Test - public void countFunctionCallOnAnyFieldShouldPass() { - validate( - "SELECT" + - " COUNT(address)," + - " COUNT(age)," + - " COUNT(birthday)," + - " COUNT(location)," + - " COUNT(manager.address)," + - " COUNT(employer)" + - "FROM semantics"); - } - - @Test - public void maxFunctionCallOnTextFieldShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT MAX(address) FROM semantics", - "Function [MAX] cannot work with [TEXT].", - "Usage: MAX(NUMBER T) -> T" - ); - } - - @Test - public void minFunctionCallOnDateFieldShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT MIN(birthday) FROM semantics", - "Function [MIN] cannot work with [DATE].", - "Usage: MIN(NUMBER T) -> T" - ); - } - - @Test - public void avgFunctionCallOnBooleanFieldShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT AVG(p.active) FROM semantics s, s.projects p", - "Function [AVG] cannot work with [BOOLEAN].", - "Usage: AVG(NUMBER T) -> DOUBLE" - ); - } - - @Test - public void sumFunctionCallOnBooleanFieldShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT SUM(city) FROM semantics", - "Function [SUM] cannot work with [KEYWORD].", - "Usage: SUM(NUMBER T) -> T" - ); - } - - @Test - public void useAvgFunctionCallAliasInHavingClauseShouldPass() { - validate("SELECT city, AVG(age) AS avg FROM semantics GROUP BY city HAVING avg > 10"); - } - - @Test - public void useAvgAndMaxFunctionCallAliasInHavingClauseShouldPass() { - validate( - "SELECT city, AVG(age) AS avg, MAX(balance) AS bal FROM semantics " + - "GROUP BY city HAVING avg > 10 AND bal > 10000" - ); - } - - @Test - public void useAvgFunctionCallWithoutAliasInHavingShouldPass() { - validate("SELECT city, AVG(age) FROM semantics GROUP BY city HAVING AVG(age) > 10"); - } - - @Test - public void useDifferentAggregateFunctionInHavingClauseShouldPass() { - validate("SELECT city, AVG(age) FROM semantics GROUP BY city HAVING COUNT(*) > 10 AND SUM(balance) <= 10000"); - } - - @Test - public void useAvgFunctionCallAliasInOrderByClauseShouldPass() { - validate("SELECT city, AVG(age) AS avg FROM semantics GROUP BY city ORDER BY avg"); - } - - @Test - public void useAvgFunctionCallAliasInGroupByAndOrderByClauseShouldPass() { - validate("SELECT SUBSTRING(address, 0, 3) AS add FROM semantics GROUP BY add ORDER BY add"); - } - - @Test - public void useColumnNameAliasInOrderByClauseShouldPass() { - validate("SELECT age AS a, AVG(balance) FROM semantics GROUP BY age ORDER BY a"); - } - - @Test - public void useExpressionAliasInOrderByClauseShouldPass() { - validate("SELECT age + 1 AS a FROM semantics GROUP BY age ORDER BY a"); - } - - @Test - public void useAvgFunctionCallWithTextFieldInHavingClauseShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT city FROM semantics GROUP BY city HAVING AVG(address) > 10", - "Function [AVG] cannot work with [TEXT].", - "Usage: AVG(NUMBER T) -> DOUBLE" - ); - } - - @Test - public void useCountFunctionCallWithNestedFieldShouldPass() { - validate("SELECT * FROM semantics s, s.projects p GROUP BY city HAVING COUNT(p) > 1"); - validate("SELECT * FROM semantics s, s.projects p, p.members m GROUP BY city HAVING COUNT(m) > 1"); - } - + @Ignore("To be implemented") + @Test(expected = SemanticAnalysisException.class) + public void useAggregateFunctionInWhereClauseShouldFail() { + validate("SELECT * FROM semantics WHERE AVG(balance) > 10000"); + } + + @Test + public void useAggregateFunctionInSelectClauseShouldPass() { + validate( + "SELECT" + + " city," + + " COUNT(*)," + + " MAX(age)," + + " MIN(balance)," + + " AVG(manager.salary)," + + " SUM(balance)" + + "FROM semantics " + + "GROUP BY city"); + } + + @Test + public void useAggregateFunctionInSelectClauseWithoutGroupByShouldPass() { + validate( + "SELECT" + + " COUNT(*)," + + " MAX(age)," + + " MIN(balance)," + + " AVG(manager.salary)," + + " SUM(balance)" + + "FROM semantics"); + } + + @Test + public void countFunctionCallOnAnyFieldShouldPass() { + validate( + "SELECT" + + " COUNT(address)," + + " COUNT(age)," + + " COUNT(birthday)," + + " COUNT(location)," + + " COUNT(manager.address)," + + " COUNT(employer)" + + "FROM semantics"); + } + + @Test + public void maxFunctionCallOnTextFieldShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT MAX(address) FROM semantics", + "Function [MAX] cannot work with [TEXT].", + "Usage: MAX(NUMBER T) -> T"); + } + + @Test + public void minFunctionCallOnDateFieldShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT MIN(birthday) FROM semantics", + "Function [MIN] cannot work with [DATE].", + "Usage: MIN(NUMBER T) -> T"); + } + + @Test + public void avgFunctionCallOnBooleanFieldShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT AVG(p.active) FROM semantics s, s.projects p", + "Function [AVG] cannot work with [BOOLEAN].", + "Usage: AVG(NUMBER T) -> DOUBLE"); + } + + @Test + public void sumFunctionCallOnBooleanFieldShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT SUM(city) FROM semantics", + "Function [SUM] cannot work with [KEYWORD].", + "Usage: SUM(NUMBER T) -> T"); + } + + @Test + public void useAvgFunctionCallAliasInHavingClauseShouldPass() { + validate("SELECT city, AVG(age) AS avg FROM semantics GROUP BY city HAVING avg > 10"); + } + + @Test + public void useAvgAndMaxFunctionCallAliasInHavingClauseShouldPass() { + validate( + "SELECT city, AVG(age) AS avg, MAX(balance) AS bal FROM semantics " + + "GROUP BY city HAVING avg > 10 AND bal > 10000"); + } + + @Test + public void useAvgFunctionCallWithoutAliasInHavingShouldPass() { + validate("SELECT city, AVG(age) FROM semantics GROUP BY city HAVING AVG(age) > 10"); + } + + @Test + public void useDifferentAggregateFunctionInHavingClauseShouldPass() { + validate( + "SELECT city, AVG(age) FROM semantics GROUP BY city HAVING COUNT(*) > 10 AND SUM(balance)" + + " <= 10000"); + } + + @Test + public void useAvgFunctionCallAliasInOrderByClauseShouldPass() { + validate("SELECT city, AVG(age) AS avg FROM semantics GROUP BY city ORDER BY avg"); + } + + @Test + public void useAvgFunctionCallAliasInGroupByAndOrderByClauseShouldPass() { + validate("SELECT SUBSTRING(address, 0, 3) AS add FROM semantics GROUP BY add ORDER BY add"); + } + + @Test + public void useColumnNameAliasInOrderByClauseShouldPass() { + validate("SELECT age AS a, AVG(balance) FROM semantics GROUP BY age ORDER BY a"); + } + + @Test + public void useExpressionAliasInOrderByClauseShouldPass() { + validate("SELECT age + 1 AS a FROM semantics GROUP BY age ORDER BY a"); + } + + @Test + public void useAvgFunctionCallWithTextFieldInHavingClauseShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT city FROM semantics GROUP BY city HAVING AVG(address) > 10", + "Function [AVG] cannot work with [TEXT].", + "Usage: AVG(NUMBER T) -> DOUBLE"); + } + + @Test + public void useCountFunctionCallWithNestedFieldShouldPass() { + validate("SELECT * FROM semantics s, s.projects p GROUP BY city HAVING COUNT(p) > 1"); + validate( + "SELECT * FROM semantics s, s.projects p, p.members m GROUP BY city HAVING COUNT(m) > 1"); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerBasicTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerBasicTest.java index 6f6b09b737..1d5ff595f3 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerBasicTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerBasicTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic; import static org.hamcrest.MatcherAssert.assertThat; @@ -37,561 +36,576 @@ import org.opensearch.sql.legacy.esdomain.LocalClusterState; /** - * Semantic analysis test cases focused on basic scope building logic which is the cornerstone of analysis followed. - * The low abstraction here enumerating all present field names in each test case is intentional for better demonstration. + * Semantic analysis test cases focused on basic scope building logic which is the cornerstone of + * analysis followed. The low abstraction here enumerating all present field names in each test case + * is intentional for better demonstration. */ public class SemanticAnalyzerBasicTest extends SemanticAnalyzerTestBase { - private SemanticContext context; - - private OpenSearchMappingLoader analyzer; + private SemanticContext context; - @Before - public void setUp() { - context = new SemanticContext(); - analyzer = new OpenSearchMappingLoader(context, LocalClusterState.state(), 1000); - } + private OpenSearchMappingLoader analyzer; - @Test - public void contextShouldIncludeAllFieldsAfterVisitingIndexNameInFromClause() { - analyzer.visitIndexName("semantics"); + @Before + public void setUp() { + context = new SemanticContext(); + analyzer = new OpenSearchMappingLoader(context, LocalClusterState.state(), 1000); + } - Map typeByName = context.peek().resolveAll(Namespace.FIELD_NAME); - assertThat( - typeByName, - allOf( - aMapWithSize(21), - hasEntry("semantics", (Type) new OpenSearchIndex("semantics", INDEX)), - hasEntry("address", TEXT), - hasEntry("age", INTEGER), - hasEntry("balance", DOUBLE), - hasEntry("city", KEYWORD), - hasEntry("birthday", DATE), - hasEntry("location", GEO_POINT), - hasEntry("new_field", UNKNOWN), - hasEntry("field with spaces", TEXT), - hasEntry("employer", TEXT), - hasEntry("employer.keyword", KEYWORD), - hasEntry("projects", (Type) new OpenSearchIndex("projects", NESTED_FIELD)), - hasEntry("projects.active", BOOLEAN), - hasEntry("projects.release", DATE), - hasEntry("projects.members", (Type) new OpenSearchIndex("projects.members", NESTED_FIELD)), - hasEntry("projects.members.name", TEXT), - hasEntry("manager", OBJECT), - hasEntry("manager.name", TEXT), - hasEntry("manager.name.keyword", KEYWORD), - hasEntry("manager.address", KEYWORD), - hasEntry("manager.salary", LONG) - ) - ); + @Test + public void contextShouldIncludeAllFieldsAfterVisitingIndexNameInFromClause() { + analyzer.visitIndexName("semantics"); - analyzer.visitAs("", new OpenSearchIndex("semantics", INDEX)); - typeByName = context.peek().resolveAll(Namespace.FIELD_NAME); - assertThat( - typeByName, - allOf( - aMapWithSize(41), - hasEntry("semantics", (Type) new OpenSearchIndex("semantics", INDEX)), - hasEntry("address", TEXT), - hasEntry("age", INTEGER), - hasEntry("balance", DOUBLE), - hasEntry("city", KEYWORD), - hasEntry("birthday", DATE), - hasEntry("location", GEO_POINT), - hasEntry("new_field", UNKNOWN), - hasEntry("field with spaces", TEXT), - hasEntry("employer", TEXT), - hasEntry("employer.keyword", KEYWORD), - hasEntry("projects", (Type) new OpenSearchIndex("projects", NESTED_FIELD)), - hasEntry("projects.active", BOOLEAN), - hasEntry("projects.release", DATE), - hasEntry("projects.members", (Type) new OpenSearchIndex("projects.members", NESTED_FIELD)), - hasEntry("projects.members.name", TEXT), - hasEntry("manager", OBJECT), - hasEntry("manager.name", TEXT), - hasEntry("manager.name.keyword", KEYWORD), - hasEntry("manager.address", KEYWORD), - hasEntry("manager.salary", LONG), - // These are also valid identifier in SQL - hasEntry("semantics.address", TEXT), - hasEntry("semantics.age", INTEGER), - hasEntry("semantics.balance", DOUBLE), - hasEntry("semantics.city", KEYWORD), - hasEntry("semantics.birthday", DATE), - hasEntry("semantics.location", GEO_POINT), - hasEntry("semantics.new_field", UNKNOWN), - hasEntry("semantics.field with spaces", TEXT), - hasEntry("semantics.employer", TEXT), - hasEntry("semantics.employer.keyword", KEYWORD), - hasEntry("semantics.projects", (Type) new OpenSearchIndex("semantics.projects", NESTED_FIELD)), - hasEntry("semantics.projects.active", BOOLEAN), - hasEntry("semantics.projects.release", DATE), - hasEntry("semantics.projects.members", (Type) new OpenSearchIndex("semantics.projects.members", NESTED_FIELD)), - hasEntry("semantics.projects.members.name", TEXT), - hasEntry("semantics.manager", OBJECT), - hasEntry("semantics.manager.name", TEXT), - hasEntry("semantics.manager.name.keyword", KEYWORD), - hasEntry("semantics.manager.address", KEYWORD), - hasEntry("semantics.manager.salary", LONG) - ) - ); - } + Map typeByName = context.peek().resolveAll(Namespace.FIELD_NAME); + assertThat( + typeByName, + allOf( + aMapWithSize(21), + hasEntry("semantics", (Type) new OpenSearchIndex("semantics", INDEX)), + hasEntry("address", TEXT), + hasEntry("age", INTEGER), + hasEntry("balance", DOUBLE), + hasEntry("city", KEYWORD), + hasEntry("birthday", DATE), + hasEntry("location", GEO_POINT), + hasEntry("new_field", UNKNOWN), + hasEntry("field with spaces", TEXT), + hasEntry("employer", TEXT), + hasEntry("employer.keyword", KEYWORD), + hasEntry("projects", (Type) new OpenSearchIndex("projects", NESTED_FIELD)), + hasEntry("projects.active", BOOLEAN), + hasEntry("projects.release", DATE), + hasEntry( + "projects.members", (Type) new OpenSearchIndex("projects.members", NESTED_FIELD)), + hasEntry("projects.members.name", TEXT), + hasEntry("manager", OBJECT), + hasEntry("manager.name", TEXT), + hasEntry("manager.name.keyword", KEYWORD), + hasEntry("manager.address", KEYWORD), + hasEntry("manager.salary", LONG))); - @Test - public void contextShouldIncludeAllFieldsPrefixedByIndexAliasAfterVisitingIndexNameWithAliasInFromClause() { - OpenSearchIndex indexType = new OpenSearchIndex("semantics", INDEX); - analyzer.visitIndexName("semantics"); - analyzer.visitAs("s", indexType); + analyzer.visitAs("", new OpenSearchIndex("semantics", INDEX)); + typeByName = context.peek().resolveAll(Namespace.FIELD_NAME); + assertThat( + typeByName, + allOf( + aMapWithSize(41), + hasEntry("semantics", (Type) new OpenSearchIndex("semantics", INDEX)), + hasEntry("address", TEXT), + hasEntry("age", INTEGER), + hasEntry("balance", DOUBLE), + hasEntry("city", KEYWORD), + hasEntry("birthday", DATE), + hasEntry("location", GEO_POINT), + hasEntry("new_field", UNKNOWN), + hasEntry("field with spaces", TEXT), + hasEntry("employer", TEXT), + hasEntry("employer.keyword", KEYWORD), + hasEntry("projects", (Type) new OpenSearchIndex("projects", NESTED_FIELD)), + hasEntry("projects.active", BOOLEAN), + hasEntry("projects.release", DATE), + hasEntry( + "projects.members", (Type) new OpenSearchIndex("projects.members", NESTED_FIELD)), + hasEntry("projects.members.name", TEXT), + hasEntry("manager", OBJECT), + hasEntry("manager.name", TEXT), + hasEntry("manager.name.keyword", KEYWORD), + hasEntry("manager.address", KEYWORD), + hasEntry("manager.salary", LONG), + // These are also valid identifier in SQL + hasEntry("semantics.address", TEXT), + hasEntry("semantics.age", INTEGER), + hasEntry("semantics.balance", DOUBLE), + hasEntry("semantics.city", KEYWORD), + hasEntry("semantics.birthday", DATE), + hasEntry("semantics.location", GEO_POINT), + hasEntry("semantics.new_field", UNKNOWN), + hasEntry("semantics.field with spaces", TEXT), + hasEntry("semantics.employer", TEXT), + hasEntry("semantics.employer.keyword", KEYWORD), + hasEntry( + "semantics.projects", + (Type) new OpenSearchIndex("semantics.projects", NESTED_FIELD)), + hasEntry("semantics.projects.active", BOOLEAN), + hasEntry("semantics.projects.release", DATE), + hasEntry( + "semantics.projects.members", + (Type) new OpenSearchIndex("semantics.projects.members", NESTED_FIELD)), + hasEntry("semantics.projects.members.name", TEXT), + hasEntry("semantics.manager", OBJECT), + hasEntry("semantics.manager.name", TEXT), + hasEntry("semantics.manager.name.keyword", KEYWORD), + hasEntry("semantics.manager.address", KEYWORD), + hasEntry("semantics.manager.salary", LONG))); + } - Map typeByName = context.peek().resolveAll(Namespace.FIELD_NAME); - assertThat( - typeByName, - allOf( - aMapWithSize(41), - hasEntry("semantics", (Type) indexType), - // These are also valid because alias is optional in SQL - hasEntry("address", TEXT), - hasEntry("age", INTEGER), - hasEntry("balance", DOUBLE), - hasEntry("city", KEYWORD), - hasEntry("birthday", DATE), - hasEntry("location", GEO_POINT), - hasEntry("new_field", UNKNOWN), - hasEntry("field with spaces", TEXT), - hasEntry("employer", TEXT), - hasEntry("employer.keyword", KEYWORD), - hasEntry("projects", (Type) new OpenSearchIndex("projects", NESTED_FIELD)), - hasEntry("projects.active", BOOLEAN), - hasEntry("projects.release", DATE), - hasEntry("projects.members", (Type) new OpenSearchIndex("projects.members", NESTED_FIELD)), - hasEntry("projects.members.name", TEXT), - hasEntry("manager", OBJECT), - hasEntry("manager.name", TEXT), - hasEntry("manager.name.keyword", KEYWORD), - hasEntry("manager.address", KEYWORD), - hasEntry("manager.salary", LONG), - // These are valid because of alias specified - hasEntry("s.address", TEXT), - hasEntry("s.age", INTEGER), - hasEntry("s.balance", DOUBLE), - hasEntry("s.city", KEYWORD), - hasEntry("s.birthday", DATE), - hasEntry("s.location", GEO_POINT), - hasEntry("s.new_field", UNKNOWN), - hasEntry("s.field with spaces", TEXT), - hasEntry("s.employer", TEXT), - hasEntry("s.employer.keyword", KEYWORD), - hasEntry("s.projects", (Type) new OpenSearchIndex("s.projects", NESTED_FIELD)), - hasEntry("s.projects.active", BOOLEAN), - hasEntry("s.projects.release", DATE), - hasEntry("s.projects.members", (Type) new OpenSearchIndex("s.projects.members", NESTED_FIELD)), - hasEntry("s.projects.members.name", TEXT), - hasEntry("s.manager", OBJECT), - hasEntry("s.manager.name", TEXT), - hasEntry("s.manager.name.keyword", KEYWORD), - hasEntry("s.manager.address", KEYWORD), - hasEntry("s.manager.salary", LONG) - ) - ); - } + @Test + public void + contextShouldIncludeAllFieldsPrefixedByIndexAliasAfterVisitingIndexNameWithAliasInFromClause() { + OpenSearchIndex indexType = new OpenSearchIndex("semantics", INDEX); + analyzer.visitIndexName("semantics"); + analyzer.visitAs("s", indexType); - @Test - public void contextShouldIncludeSameFieldsAfterVisitingNestedFieldWithoutAliasInFromClause() { - OpenSearchIndex indexType = new OpenSearchIndex("semantics", INDEX); - analyzer.visitIndexName("semantics"); - analyzer.visitAs("s", indexType); - analyzer.visitIndexName("s.projects"); - analyzer.visitAs("", new OpenSearchIndex("s.projects", NESTED_FIELD)); + Map typeByName = context.peek().resolveAll(Namespace.FIELD_NAME); + assertThat( + typeByName, + allOf( + aMapWithSize(41), + hasEntry("semantics", (Type) indexType), + // These are also valid because alias is optional in SQL + hasEntry("address", TEXT), + hasEntry("age", INTEGER), + hasEntry("balance", DOUBLE), + hasEntry("city", KEYWORD), + hasEntry("birthday", DATE), + hasEntry("location", GEO_POINT), + hasEntry("new_field", UNKNOWN), + hasEntry("field with spaces", TEXT), + hasEntry("employer", TEXT), + hasEntry("employer.keyword", KEYWORD), + hasEntry("projects", (Type) new OpenSearchIndex("projects", NESTED_FIELD)), + hasEntry("projects.active", BOOLEAN), + hasEntry("projects.release", DATE), + hasEntry( + "projects.members", (Type) new OpenSearchIndex("projects.members", NESTED_FIELD)), + hasEntry("projects.members.name", TEXT), + hasEntry("manager", OBJECT), + hasEntry("manager.name", TEXT), + hasEntry("manager.name.keyword", KEYWORD), + hasEntry("manager.address", KEYWORD), + hasEntry("manager.salary", LONG), + // These are valid because of alias specified + hasEntry("s.address", TEXT), + hasEntry("s.age", INTEGER), + hasEntry("s.balance", DOUBLE), + hasEntry("s.city", KEYWORD), + hasEntry("s.birthday", DATE), + hasEntry("s.location", GEO_POINT), + hasEntry("s.new_field", UNKNOWN), + hasEntry("s.field with spaces", TEXT), + hasEntry("s.employer", TEXT), + hasEntry("s.employer.keyword", KEYWORD), + hasEntry("s.projects", (Type) new OpenSearchIndex("s.projects", NESTED_FIELD)), + hasEntry("s.projects.active", BOOLEAN), + hasEntry("s.projects.release", DATE), + hasEntry( + "s.projects.members", + (Type) new OpenSearchIndex("s.projects.members", NESTED_FIELD)), + hasEntry("s.projects.members.name", TEXT), + hasEntry("s.manager", OBJECT), + hasEntry("s.manager.name", TEXT), + hasEntry("s.manager.name.keyword", KEYWORD), + hasEntry("s.manager.address", KEYWORD), + hasEntry("s.manager.salary", LONG))); + } - Map typeByName = context.peek().resolveAll(Namespace.FIELD_NAME); - assertThat( - typeByName, - allOf( - aMapWithSize(41), - hasEntry("semantics", (Type) indexType), - // These are also valid because alias is optional in SQL - hasEntry("address", TEXT), - hasEntry("age", INTEGER), - hasEntry("balance", DOUBLE), - hasEntry("city", KEYWORD), - hasEntry("birthday", DATE), - hasEntry("location", GEO_POINT), - hasEntry("new_field", UNKNOWN), - hasEntry("field with spaces", TEXT), - hasEntry("employer", TEXT), - hasEntry("employer.keyword", KEYWORD), - hasEntry("projects", (Type) new OpenSearchIndex("projects", NESTED_FIELD)), - hasEntry("projects.active", BOOLEAN), - hasEntry("projects.release", DATE), - hasEntry("projects.members", (Type) new OpenSearchIndex("projects.members", NESTED_FIELD)), - hasEntry("projects.members.name", TEXT), - hasEntry("manager", OBJECT), - hasEntry("manager.name", TEXT), - hasEntry("manager.name.keyword", KEYWORD), - hasEntry("manager.address", KEYWORD), - hasEntry("manager.salary", LONG), - // These are valid because of alias specified - hasEntry("s.address", TEXT), - hasEntry("s.age", INTEGER), - hasEntry("s.balance", DOUBLE), - hasEntry("s.city", KEYWORD), - hasEntry("s.birthday", DATE), - hasEntry("s.location", GEO_POINT), - hasEntry("s.new_field", UNKNOWN), - hasEntry("s.field with spaces", TEXT), - hasEntry("s.employer", TEXT), - hasEntry("s.employer.keyword", KEYWORD), - hasEntry("s.projects", (Type) new OpenSearchIndex("s.projects", NESTED_FIELD)), - hasEntry("s.projects.active", BOOLEAN), - hasEntry("s.projects.release", DATE), - hasEntry("s.projects.members", (Type) new OpenSearchIndex("s.projects.members", NESTED_FIELD)), - hasEntry("s.projects.members.name", TEXT), - hasEntry("s.manager", OBJECT), - hasEntry("s.manager.name", TEXT), - hasEntry("s.manager.name.keyword", KEYWORD), - hasEntry("s.manager.address", KEYWORD), - hasEntry("s.manager.salary", LONG) - ) - ); - } + @Test + public void contextShouldIncludeSameFieldsAfterVisitingNestedFieldWithoutAliasInFromClause() { + OpenSearchIndex indexType = new OpenSearchIndex("semantics", INDEX); + analyzer.visitIndexName("semantics"); + analyzer.visitAs("s", indexType); + analyzer.visitIndexName("s.projects"); + analyzer.visitAs("", new OpenSearchIndex("s.projects", NESTED_FIELD)); - @Test - public void contextShouldIncludeMoreFieldsPrefixedByNestedFieldAliasAfterVisitingNestedFieldWithAliasInFromClause() { - OpenSearchIndex indexType = new OpenSearchIndex("semantics", INDEX); - analyzer.visitIndexName("semantics"); - analyzer.visitAs("s", indexType); - analyzer.visitIndexName("s.projects"); - analyzer.visitAs("p", new OpenSearchIndex("s.projects", NESTED_FIELD)); + Map typeByName = context.peek().resolveAll(Namespace.FIELD_NAME); + assertThat( + typeByName, + allOf( + aMapWithSize(41), + hasEntry("semantics", (Type) indexType), + // These are also valid because alias is optional in SQL + hasEntry("address", TEXT), + hasEntry("age", INTEGER), + hasEntry("balance", DOUBLE), + hasEntry("city", KEYWORD), + hasEntry("birthday", DATE), + hasEntry("location", GEO_POINT), + hasEntry("new_field", UNKNOWN), + hasEntry("field with spaces", TEXT), + hasEntry("employer", TEXT), + hasEntry("employer.keyword", KEYWORD), + hasEntry("projects", (Type) new OpenSearchIndex("projects", NESTED_FIELD)), + hasEntry("projects.active", BOOLEAN), + hasEntry("projects.release", DATE), + hasEntry( + "projects.members", (Type) new OpenSearchIndex("projects.members", NESTED_FIELD)), + hasEntry("projects.members.name", TEXT), + hasEntry("manager", OBJECT), + hasEntry("manager.name", TEXT), + hasEntry("manager.name.keyword", KEYWORD), + hasEntry("manager.address", KEYWORD), + hasEntry("manager.salary", LONG), + // These are valid because of alias specified + hasEntry("s.address", TEXT), + hasEntry("s.age", INTEGER), + hasEntry("s.balance", DOUBLE), + hasEntry("s.city", KEYWORD), + hasEntry("s.birthday", DATE), + hasEntry("s.location", GEO_POINT), + hasEntry("s.new_field", UNKNOWN), + hasEntry("s.field with spaces", TEXT), + hasEntry("s.employer", TEXT), + hasEntry("s.employer.keyword", KEYWORD), + hasEntry("s.projects", (Type) new OpenSearchIndex("s.projects", NESTED_FIELD)), + hasEntry("s.projects.active", BOOLEAN), + hasEntry("s.projects.release", DATE), + hasEntry( + "s.projects.members", + (Type) new OpenSearchIndex("s.projects.members", NESTED_FIELD)), + hasEntry("s.projects.members.name", TEXT), + hasEntry("s.manager", OBJECT), + hasEntry("s.manager.name", TEXT), + hasEntry("s.manager.name.keyword", KEYWORD), + hasEntry("s.manager.address", KEYWORD), + hasEntry("s.manager.salary", LONG))); + } - Map typeByName = context.peek().resolveAll(Namespace.FIELD_NAME); - assertThat( - typeByName, - allOf( - aMapWithSize(46), - // These are also valid because alias is optional in SQL - hasEntry("semantics", (Type) indexType), - // These are also valid because alias is optional in SQL - hasEntry("address", TEXT), - hasEntry("age", INTEGER), - hasEntry("balance", DOUBLE), - hasEntry("city", KEYWORD), - hasEntry("birthday", DATE), - hasEntry("location", GEO_POINT), - hasEntry("new_field", UNKNOWN), - hasEntry("field with spaces", TEXT), - hasEntry("employer", TEXT), - hasEntry("employer.keyword", KEYWORD), - hasEntry("projects", (Type) new OpenSearchIndex("projects", NESTED_FIELD)), - hasEntry("projects.active", BOOLEAN), - hasEntry("projects.release", DATE), - hasEntry("projects.members", (Type) new OpenSearchIndex("projects.members", NESTED_FIELD)), - hasEntry("projects.members.name", TEXT), - hasEntry("manager", OBJECT), - hasEntry("manager.name", TEXT), - hasEntry("manager.name.keyword", KEYWORD), - hasEntry("manager.address", KEYWORD), - hasEntry("manager.salary", LONG), - // These are valid because of alias specified - hasEntry("s.address", TEXT), - hasEntry("s.age", INTEGER), - hasEntry("s.balance", DOUBLE), - hasEntry("s.city", KEYWORD), - hasEntry("s.birthday", DATE), - hasEntry("s.location", GEO_POINT), - hasEntry("s.new_field", UNKNOWN), - hasEntry("s.field with spaces", TEXT), - hasEntry("s.employer", TEXT), - hasEntry("s.employer.keyword", KEYWORD), - hasEntry("s.projects", (Type) new OpenSearchIndex("s.projects", NESTED_FIELD)), - hasEntry("s.projects.active", BOOLEAN), - hasEntry("s.projects.release", DATE), - hasEntry("s.projects.members", (Type) new OpenSearchIndex("s.projects.members", NESTED_FIELD)), - hasEntry("s.projects.members.name", TEXT), - hasEntry("s.manager", OBJECT), - hasEntry("s.manager.name", TEXT), - hasEntry("s.manager.name.keyword", KEYWORD), - hasEntry("s.manager.address", KEYWORD), - hasEntry("s.manager.salary", LONG), - // Valid because of nested field alias specified - hasEntry("p", (Type) new OpenSearchIndex("s.projects", NESTED_FIELD)), - hasEntry("p.active", BOOLEAN), - hasEntry("p.release", DATE), - hasEntry("p.members", (Type) new OpenSearchIndex("s.projects.members", NESTED_FIELD)), - hasEntry("p.members.name", TEXT) - ) - ); - } + @Test + public void + contextShouldIncludeMoreFieldsPrefixedByNestedFieldAliasAfterVisitingNestedFieldWithAliasInFromClause() { + OpenSearchIndex indexType = new OpenSearchIndex("semantics", INDEX); + analyzer.visitIndexName("semantics"); + analyzer.visitAs("s", indexType); + analyzer.visitIndexName("s.projects"); + analyzer.visitAs("p", new OpenSearchIndex("s.projects", NESTED_FIELD)); - @Test - public void contextShouldIncludeMoreFieldsPrefixedByNestedFieldAliasAfterVisitingDeepNestedFieldWithAliasInFromClause() { - OpenSearchIndex indexType = new OpenSearchIndex("semantics", INDEX); - analyzer.visitIndexName("semantics"); - analyzer.visitAs("s", indexType); - analyzer.visitIndexName("s.projects.members"); - analyzer.visitAs("m", new OpenSearchIndex("s.projects.members", NESTED_FIELD)); + Map typeByName = context.peek().resolveAll(Namespace.FIELD_NAME); + assertThat( + typeByName, + allOf( + aMapWithSize(46), + // These are also valid because alias is optional in SQL + hasEntry("semantics", (Type) indexType), + // These are also valid because alias is optional in SQL + hasEntry("address", TEXT), + hasEntry("age", INTEGER), + hasEntry("balance", DOUBLE), + hasEntry("city", KEYWORD), + hasEntry("birthday", DATE), + hasEntry("location", GEO_POINT), + hasEntry("new_field", UNKNOWN), + hasEntry("field with spaces", TEXT), + hasEntry("employer", TEXT), + hasEntry("employer.keyword", KEYWORD), + hasEntry("projects", (Type) new OpenSearchIndex("projects", NESTED_FIELD)), + hasEntry("projects.active", BOOLEAN), + hasEntry("projects.release", DATE), + hasEntry( + "projects.members", (Type) new OpenSearchIndex("projects.members", NESTED_FIELD)), + hasEntry("projects.members.name", TEXT), + hasEntry("manager", OBJECT), + hasEntry("manager.name", TEXT), + hasEntry("manager.name.keyword", KEYWORD), + hasEntry("manager.address", KEYWORD), + hasEntry("manager.salary", LONG), + // These are valid because of alias specified + hasEntry("s.address", TEXT), + hasEntry("s.age", INTEGER), + hasEntry("s.balance", DOUBLE), + hasEntry("s.city", KEYWORD), + hasEntry("s.birthday", DATE), + hasEntry("s.location", GEO_POINT), + hasEntry("s.new_field", UNKNOWN), + hasEntry("s.field with spaces", TEXT), + hasEntry("s.employer", TEXT), + hasEntry("s.employer.keyword", KEYWORD), + hasEntry("s.projects", (Type) new OpenSearchIndex("s.projects", NESTED_FIELD)), + hasEntry("s.projects.active", BOOLEAN), + hasEntry("s.projects.release", DATE), + hasEntry( + "s.projects.members", + (Type) new OpenSearchIndex("s.projects.members", NESTED_FIELD)), + hasEntry("s.projects.members.name", TEXT), + hasEntry("s.manager", OBJECT), + hasEntry("s.manager.name", TEXT), + hasEntry("s.manager.name.keyword", KEYWORD), + hasEntry("s.manager.address", KEYWORD), + hasEntry("s.manager.salary", LONG), + // Valid because of nested field alias specified + hasEntry("p", (Type) new OpenSearchIndex("s.projects", NESTED_FIELD)), + hasEntry("p.active", BOOLEAN), + hasEntry("p.release", DATE), + hasEntry("p.members", (Type) new OpenSearchIndex("s.projects.members", NESTED_FIELD)), + hasEntry("p.members.name", TEXT))); + } - Map typeByName = context.peek().resolveAll(Namespace.FIELD_NAME); + @Test + public void + contextShouldIncludeMoreFieldsPrefixedByNestedFieldAliasAfterVisitingDeepNestedFieldWithAliasInFromClause() { + OpenSearchIndex indexType = new OpenSearchIndex("semantics", INDEX); + analyzer.visitIndexName("semantics"); + analyzer.visitAs("s", indexType); + analyzer.visitIndexName("s.projects.members"); + analyzer.visitAs("m", new OpenSearchIndex("s.projects.members", NESTED_FIELD)); - assertThat( - typeByName, - allOf( - aMapWithSize(43), - hasEntry("semantics", (Type) indexType), - // These are also valid because alias is optional in SQL - hasEntry("address", TEXT), - hasEntry("age", INTEGER), - hasEntry("balance", DOUBLE), - hasEntry("city", KEYWORD), - hasEntry("birthday", DATE), - hasEntry("location", GEO_POINT), - hasEntry("new_field", UNKNOWN), - hasEntry("field with spaces", TEXT), - hasEntry("employer", TEXT), - hasEntry("employer.keyword", KEYWORD), - hasEntry("projects", (Type) new OpenSearchIndex("projects", NESTED_FIELD)), - hasEntry("projects.active", BOOLEAN), - hasEntry("projects.release", DATE), - hasEntry("projects.members", (Type) new OpenSearchIndex("projects.members", NESTED_FIELD)), - hasEntry("projects.members.name", TEXT), - hasEntry("manager", OBJECT), - hasEntry("manager.name", TEXT), - hasEntry("manager.name.keyword", KEYWORD), - hasEntry("manager.address", KEYWORD), - hasEntry("manager.salary", LONG), - // These are valid because of alias specified - hasEntry("s.address", TEXT), - hasEntry("s.age", INTEGER), - hasEntry("s.balance", DOUBLE), - hasEntry("s.city", KEYWORD), - hasEntry("s.birthday", DATE), - hasEntry("s.location", GEO_POINT), - hasEntry("s.new_field", UNKNOWN), - hasEntry("s.field with spaces", TEXT), - hasEntry("s.employer", TEXT), - hasEntry("s.employer.keyword", KEYWORD), - hasEntry("s.projects", (Type) new OpenSearchIndex("s.projects", NESTED_FIELD)), - hasEntry("s.projects.active", BOOLEAN), - hasEntry("s.projects.release", DATE), - hasEntry("s.projects.members", (Type) new OpenSearchIndex("s.projects.members", NESTED_FIELD)), - hasEntry("s.projects.members.name", TEXT), - hasEntry("s.manager", OBJECT), - hasEntry("s.manager.name", TEXT), - hasEntry("s.manager.name.keyword", KEYWORD), - hasEntry("s.manager.address", KEYWORD), - hasEntry("s.manager.salary", LONG), - // Valid because of deep nested field alias specified - hasEntry("m", (Type) new OpenSearchIndex("s.projects.members", NESTED_FIELD)), - hasEntry("m.name", TEXT) - ) - ); - } + Map typeByName = context.peek().resolveAll(Namespace.FIELD_NAME); - @Test - public void contextShouldIncludeMoreFieldsPrefixedByNestedFieldAliasAfterVisitingAllNestedFieldsWithAliasInFromClause() { - OpenSearchIndex indexType = new OpenSearchIndex("semantics", INDEX); - analyzer.visitIndexName("semantics"); - analyzer.visitAs("s", indexType); - analyzer.visitIndexName("s.projects"); - analyzer.visitAs("p", new OpenSearchIndex("s.projects", NESTED_FIELD)); - analyzer.visitIndexName("s.projects.members"); - analyzer.visitAs("m", new OpenSearchIndex("s.projects.members", NESTED_FIELD)); + assertThat( + typeByName, + allOf( + aMapWithSize(43), + hasEntry("semantics", (Type) indexType), + // These are also valid because alias is optional in SQL + hasEntry("address", TEXT), + hasEntry("age", INTEGER), + hasEntry("balance", DOUBLE), + hasEntry("city", KEYWORD), + hasEntry("birthday", DATE), + hasEntry("location", GEO_POINT), + hasEntry("new_field", UNKNOWN), + hasEntry("field with spaces", TEXT), + hasEntry("employer", TEXT), + hasEntry("employer.keyword", KEYWORD), + hasEntry("projects", (Type) new OpenSearchIndex("projects", NESTED_FIELD)), + hasEntry("projects.active", BOOLEAN), + hasEntry("projects.release", DATE), + hasEntry( + "projects.members", (Type) new OpenSearchIndex("projects.members", NESTED_FIELD)), + hasEntry("projects.members.name", TEXT), + hasEntry("manager", OBJECT), + hasEntry("manager.name", TEXT), + hasEntry("manager.name.keyword", KEYWORD), + hasEntry("manager.address", KEYWORD), + hasEntry("manager.salary", LONG), + // These are valid because of alias specified + hasEntry("s.address", TEXT), + hasEntry("s.age", INTEGER), + hasEntry("s.balance", DOUBLE), + hasEntry("s.city", KEYWORD), + hasEntry("s.birthday", DATE), + hasEntry("s.location", GEO_POINT), + hasEntry("s.new_field", UNKNOWN), + hasEntry("s.field with spaces", TEXT), + hasEntry("s.employer", TEXT), + hasEntry("s.employer.keyword", KEYWORD), + hasEntry("s.projects", (Type) new OpenSearchIndex("s.projects", NESTED_FIELD)), + hasEntry("s.projects.active", BOOLEAN), + hasEntry("s.projects.release", DATE), + hasEntry( + "s.projects.members", + (Type) new OpenSearchIndex("s.projects.members", NESTED_FIELD)), + hasEntry("s.projects.members.name", TEXT), + hasEntry("s.manager", OBJECT), + hasEntry("s.manager.name", TEXT), + hasEntry("s.manager.name.keyword", KEYWORD), + hasEntry("s.manager.address", KEYWORD), + hasEntry("s.manager.salary", LONG), + // Valid because of deep nested field alias specified + hasEntry("m", (Type) new OpenSearchIndex("s.projects.members", NESTED_FIELD)), + hasEntry("m.name", TEXT))); + } - Map typeByName = context.peek().resolveAll(Namespace.FIELD_NAME); - assertThat( - typeByName, - allOf( - aMapWithSize(48), - hasEntry("semantics", (Type) indexType), - // These are also valid because alias is optional in SQL - hasEntry("address", TEXT), - hasEntry("age", INTEGER), - hasEntry("balance", DOUBLE), - hasEntry("city", KEYWORD), - hasEntry("birthday", DATE), - hasEntry("location", GEO_POINT), - hasEntry("new_field", UNKNOWN), - hasEntry("field with spaces", TEXT), - hasEntry("employer", TEXT), - hasEntry("employer.keyword", KEYWORD), - hasEntry("projects", (Type) new OpenSearchIndex("projects", NESTED_FIELD)), - hasEntry("projects.active", BOOLEAN), - hasEntry("projects.release", DATE), - hasEntry("projects.members", (Type) new OpenSearchIndex("projects.members", NESTED_FIELD)), - hasEntry("projects.members.name", TEXT), - hasEntry("manager", OBJECT), - hasEntry("manager.name", TEXT), - hasEntry("manager.name.keyword", KEYWORD), - hasEntry("manager.address", KEYWORD), - hasEntry("manager.salary", LONG), - // These are valid because of alias specified - hasEntry("s.address", TEXT), - hasEntry("s.age", INTEGER), - hasEntry("s.balance", DOUBLE), - hasEntry("s.city", KEYWORD), - hasEntry("s.birthday", DATE), - hasEntry("s.location", GEO_POINT), - hasEntry("s.new_field", UNKNOWN), - hasEntry("s.field with spaces", TEXT), - hasEntry("s.employer", TEXT), - hasEntry("s.employer.keyword", KEYWORD), - hasEntry("s.projects", (Type) new OpenSearchIndex("s.projects", NESTED_FIELD)), - hasEntry("s.projects.active", BOOLEAN), - hasEntry("s.projects.release", DATE), - hasEntry("s.projects.members", (Type) new OpenSearchIndex("s.projects.members", NESTED_FIELD)), - hasEntry("s.projects.members.name", TEXT), - hasEntry("s.manager", OBJECT), - hasEntry("s.manager.name", TEXT), - hasEntry("s.manager.name.keyword", KEYWORD), - hasEntry("s.manager.address", KEYWORD), - hasEntry("s.manager.salary", LONG), - // Valid because of nested field alias specified - hasEntry("p", (Type) new OpenSearchIndex("s.projects", NESTED_FIELD)), - hasEntry("p.active", BOOLEAN), - hasEntry("p.release", DATE), - hasEntry("p.members", (Type) new OpenSearchIndex("s.projects.members", NESTED_FIELD)), - hasEntry("p.members.name", TEXT), - // Valid because of deep nested field alias specified - hasEntry("m", (Type) new OpenSearchIndex("s.projects.members", NESTED_FIELD)), - hasEntry("m.name", TEXT) - ) - ); - } + @Test + public void + contextShouldIncludeMoreFieldsPrefixedByNestedFieldAliasAfterVisitingAllNestedFieldsWithAliasInFromClause() { + OpenSearchIndex indexType = new OpenSearchIndex("semantics", INDEX); + analyzer.visitIndexName("semantics"); + analyzer.visitAs("s", indexType); + analyzer.visitIndexName("s.projects"); + analyzer.visitAs("p", new OpenSearchIndex("s.projects", NESTED_FIELD)); + analyzer.visitIndexName("s.projects.members"); + analyzer.visitAs("m", new OpenSearchIndex("s.projects.members", NESTED_FIELD)); - @Test - public void contextShouldIncludeMoreFieldsPrefixedByNestedFieldAliasAfterVisitingNestedFieldWithAliasInSubqueryFromClause() { - OpenSearchIndex indexType = new OpenSearchIndex("semantics", INDEX); - analyzer.visitIndexName("semantics"); - analyzer.visitAs("s", indexType); + Map typeByName = context.peek().resolveAll(Namespace.FIELD_NAME); + assertThat( + typeByName, + allOf( + aMapWithSize(48), + hasEntry("semantics", (Type) indexType), + // These are also valid because alias is optional in SQL + hasEntry("address", TEXT), + hasEntry("age", INTEGER), + hasEntry("balance", DOUBLE), + hasEntry("city", KEYWORD), + hasEntry("birthday", DATE), + hasEntry("location", GEO_POINT), + hasEntry("new_field", UNKNOWN), + hasEntry("field with spaces", TEXT), + hasEntry("employer", TEXT), + hasEntry("employer.keyword", KEYWORD), + hasEntry("projects", (Type) new OpenSearchIndex("projects", NESTED_FIELD)), + hasEntry("projects.active", BOOLEAN), + hasEntry("projects.release", DATE), + hasEntry( + "projects.members", (Type) new OpenSearchIndex("projects.members", NESTED_FIELD)), + hasEntry("projects.members.name", TEXT), + hasEntry("manager", OBJECT), + hasEntry("manager.name", TEXT), + hasEntry("manager.name.keyword", KEYWORD), + hasEntry("manager.address", KEYWORD), + hasEntry("manager.salary", LONG), + // These are valid because of alias specified + hasEntry("s.address", TEXT), + hasEntry("s.age", INTEGER), + hasEntry("s.balance", DOUBLE), + hasEntry("s.city", KEYWORD), + hasEntry("s.birthday", DATE), + hasEntry("s.location", GEO_POINT), + hasEntry("s.new_field", UNKNOWN), + hasEntry("s.field with spaces", TEXT), + hasEntry("s.employer", TEXT), + hasEntry("s.employer.keyword", KEYWORD), + hasEntry("s.projects", (Type) new OpenSearchIndex("s.projects", NESTED_FIELD)), + hasEntry("s.projects.active", BOOLEAN), + hasEntry("s.projects.release", DATE), + hasEntry( + "s.projects.members", + (Type) new OpenSearchIndex("s.projects.members", NESTED_FIELD)), + hasEntry("s.projects.members.name", TEXT), + hasEntry("s.manager", OBJECT), + hasEntry("s.manager.name", TEXT), + hasEntry("s.manager.name.keyword", KEYWORD), + hasEntry("s.manager.address", KEYWORD), + hasEntry("s.manager.salary", LONG), + // Valid because of nested field alias specified + hasEntry("p", (Type) new OpenSearchIndex("s.projects", NESTED_FIELD)), + hasEntry("p.active", BOOLEAN), + hasEntry("p.release", DATE), + hasEntry("p.members", (Type) new OpenSearchIndex("s.projects.members", NESTED_FIELD)), + hasEntry("p.members.name", TEXT), + // Valid because of deep nested field alias specified + hasEntry("m", (Type) new OpenSearchIndex("s.projects.members", NESTED_FIELD)), + hasEntry("m.name", TEXT))); + } - context.push(); - analyzer.visitIndexName("s.projects"); - analyzer.visitAs("p", new OpenSearchIndex("s.projects", NESTED_FIELD)); + @Test + public void + contextShouldIncludeMoreFieldsPrefixedByNestedFieldAliasAfterVisitingNestedFieldWithAliasInSubqueryFromClause() { + OpenSearchIndex indexType = new OpenSearchIndex("semantics", INDEX); + analyzer.visitIndexName("semantics"); + analyzer.visitAs("s", indexType); - Map typeByName = context.peek().resolveAll(Namespace.FIELD_NAME); - assertThat( - typeByName, - allOf( - aMapWithSize(46), - // These are also valid because alias is optional in SQL - hasEntry("semantics", (Type) indexType), - // These are also valid because alias is optional in SQL - hasEntry("address", TEXT), - hasEntry("age", INTEGER), - hasEntry("balance", DOUBLE), - hasEntry("city", KEYWORD), - hasEntry("birthday", DATE), - hasEntry("location", GEO_POINT), - hasEntry("new_field", UNKNOWN), - hasEntry("field with spaces", TEXT), - hasEntry("employer", TEXT), - hasEntry("employer.keyword", KEYWORD), - hasEntry("projects", (Type) new OpenSearchIndex("projects", NESTED_FIELD)), - hasEntry("projects.active", BOOLEAN), - hasEntry("projects.release", DATE), - hasEntry("projects.members", (Type) new OpenSearchIndex("projects.members", NESTED_FIELD)), - hasEntry("projects.members.name", TEXT), - hasEntry("manager", OBJECT), - hasEntry("manager.name", TEXT), - hasEntry("manager.name.keyword", KEYWORD), - hasEntry("manager.address", KEYWORD), - hasEntry("manager.salary", LONG), - // These are valid because of alias specified - hasEntry("s.address", TEXT), - hasEntry("s.age", INTEGER), - hasEntry("s.balance", DOUBLE), - hasEntry("s.city", KEYWORD), - hasEntry("s.birthday", DATE), - hasEntry("s.location", GEO_POINT), - hasEntry("s.new_field", UNKNOWN), - hasEntry("s.field with spaces", TEXT), - hasEntry("s.employer", TEXT), - hasEntry("s.employer.keyword", KEYWORD), - hasEntry("s.projects", (Type) new OpenSearchIndex("s.projects", NESTED_FIELD)), - hasEntry("s.projects.active", BOOLEAN), - hasEntry("s.projects.release", DATE), - hasEntry("s.projects.members", (Type) new OpenSearchIndex("s.projects.members", NESTED_FIELD)), - hasEntry("s.projects.members.name", TEXT), - hasEntry("s.manager", OBJECT), - hasEntry("s.manager.name", TEXT), - hasEntry("s.manager.name.keyword", KEYWORD), - hasEntry("s.manager.address", KEYWORD), - hasEntry("s.manager.salary", LONG), - // Valid because of nested field alias specified - hasEntry("p", (Type) new OpenSearchIndex("s.projects", NESTED_FIELD)), - hasEntry("p.active", BOOLEAN), - hasEntry("p.release", DATE), - hasEntry("p.members", (Type) new OpenSearchIndex("s.projects.members", NESTED_FIELD)), - hasEntry("p.members.name", TEXT) - ) - ); + context.push(); + analyzer.visitIndexName("s.projects"); + analyzer.visitAs("p", new OpenSearchIndex("s.projects", NESTED_FIELD)); - context.pop(); - typeByName = context.peek().resolveAll(Namespace.FIELD_NAME); - assertThat( - typeByName, - allOf( - aMapWithSize(41), - hasEntry("semantics", (Type) indexType), - // These are also valid because alias is optional in SQL - hasEntry("address", TEXT), - hasEntry("age", INTEGER), - hasEntry("balance", DOUBLE), - hasEntry("city", KEYWORD), - hasEntry("birthday", DATE), - hasEntry("location", GEO_POINT), - hasEntry("new_field", UNKNOWN), - hasEntry("field with spaces", TEXT), - hasEntry("employer", TEXT), - hasEntry("employer.keyword", KEYWORD), - hasEntry("projects", (Type) new OpenSearchIndex("projects", NESTED_FIELD)), - hasEntry("projects.active", BOOLEAN), - hasEntry("projects.release", DATE), - hasEntry("projects.members", (Type) new OpenSearchIndex("projects.members", NESTED_FIELD)), - hasEntry("projects.members.name", TEXT), - hasEntry("manager", OBJECT), - hasEntry("manager.name", TEXT), - hasEntry("manager.name.keyword", KEYWORD), - hasEntry("manager.address", KEYWORD), - hasEntry("manager.salary", LONG), - // These are valid because of alias specified - hasEntry("s.address", TEXT), - hasEntry("s.age", INTEGER), - hasEntry("s.balance", DOUBLE), - hasEntry("s.city", KEYWORD), - hasEntry("s.birthday", DATE), - hasEntry("s.location", GEO_POINT), - hasEntry("s.new_field", UNKNOWN), - hasEntry("s.field with spaces", TEXT), - hasEntry("s.employer", TEXT), - hasEntry("s.employer.keyword", KEYWORD), - hasEntry("s.projects", (Type) new OpenSearchIndex("s.projects", NESTED_FIELD)), - hasEntry("s.projects.active", BOOLEAN), - hasEntry("s.projects.release", DATE), - hasEntry("s.projects.members", (Type) new OpenSearchIndex("s.projects.members", NESTED_FIELD)), - hasEntry("s.projects.members.name", TEXT), - hasEntry("s.manager", OBJECT), - hasEntry("s.manager.name", TEXT), - hasEntry("s.manager.name.keyword", KEYWORD), - hasEntry("s.manager.address", KEYWORD), - hasEntry("s.manager.salary", LONG) - ) - ); - } + Map typeByName = context.peek().resolveAll(Namespace.FIELD_NAME); + assertThat( + typeByName, + allOf( + aMapWithSize(46), + // These are also valid because alias is optional in SQL + hasEntry("semantics", (Type) indexType), + // These are also valid because alias is optional in SQL + hasEntry("address", TEXT), + hasEntry("age", INTEGER), + hasEntry("balance", DOUBLE), + hasEntry("city", KEYWORD), + hasEntry("birthday", DATE), + hasEntry("location", GEO_POINT), + hasEntry("new_field", UNKNOWN), + hasEntry("field with spaces", TEXT), + hasEntry("employer", TEXT), + hasEntry("employer.keyword", KEYWORD), + hasEntry("projects", (Type) new OpenSearchIndex("projects", NESTED_FIELD)), + hasEntry("projects.active", BOOLEAN), + hasEntry("projects.release", DATE), + hasEntry( + "projects.members", (Type) new OpenSearchIndex("projects.members", NESTED_FIELD)), + hasEntry("projects.members.name", TEXT), + hasEntry("manager", OBJECT), + hasEntry("manager.name", TEXT), + hasEntry("manager.name.keyword", KEYWORD), + hasEntry("manager.address", KEYWORD), + hasEntry("manager.salary", LONG), + // These are valid because of alias specified + hasEntry("s.address", TEXT), + hasEntry("s.age", INTEGER), + hasEntry("s.balance", DOUBLE), + hasEntry("s.city", KEYWORD), + hasEntry("s.birthday", DATE), + hasEntry("s.location", GEO_POINT), + hasEntry("s.new_field", UNKNOWN), + hasEntry("s.field with spaces", TEXT), + hasEntry("s.employer", TEXT), + hasEntry("s.employer.keyword", KEYWORD), + hasEntry("s.projects", (Type) new OpenSearchIndex("s.projects", NESTED_FIELD)), + hasEntry("s.projects.active", BOOLEAN), + hasEntry("s.projects.release", DATE), + hasEntry( + "s.projects.members", + (Type) new OpenSearchIndex("s.projects.members", NESTED_FIELD)), + hasEntry("s.projects.members.name", TEXT), + hasEntry("s.manager", OBJECT), + hasEntry("s.manager.name", TEXT), + hasEntry("s.manager.name.keyword", KEYWORD), + hasEntry("s.manager.address", KEYWORD), + hasEntry("s.manager.salary", LONG), + // Valid because of nested field alias specified + hasEntry("p", (Type) new OpenSearchIndex("s.projects", NESTED_FIELD)), + hasEntry("p.active", BOOLEAN), + hasEntry("p.release", DATE), + hasEntry("p.members", (Type) new OpenSearchIndex("s.projects.members", NESTED_FIELD)), + hasEntry("p.members.name", TEXT))); - @Test - public void fieldWithUnknownEsTypeShouldPass() { - analyzer.visitIndexName("semantics"); - Optional type = context.peek().resolve(new Symbol(Namespace.FIELD_NAME, "new_field")); - Assert.assertTrue(type.isPresent()); - Assert.assertSame(UNKNOWN, type.get()); - } + context.pop(); + typeByName = context.peek().resolveAll(Namespace.FIELD_NAME); + assertThat( + typeByName, + allOf( + aMapWithSize(41), + hasEntry("semantics", (Type) indexType), + // These are also valid because alias is optional in SQL + hasEntry("address", TEXT), + hasEntry("age", INTEGER), + hasEntry("balance", DOUBLE), + hasEntry("city", KEYWORD), + hasEntry("birthday", DATE), + hasEntry("location", GEO_POINT), + hasEntry("new_field", UNKNOWN), + hasEntry("field with spaces", TEXT), + hasEntry("employer", TEXT), + hasEntry("employer.keyword", KEYWORD), + hasEntry("projects", (Type) new OpenSearchIndex("projects", NESTED_FIELD)), + hasEntry("projects.active", BOOLEAN), + hasEntry("projects.release", DATE), + hasEntry( + "projects.members", (Type) new OpenSearchIndex("projects.members", NESTED_FIELD)), + hasEntry("projects.members.name", TEXT), + hasEntry("manager", OBJECT), + hasEntry("manager.name", TEXT), + hasEntry("manager.name.keyword", KEYWORD), + hasEntry("manager.address", KEYWORD), + hasEntry("manager.salary", LONG), + // These are valid because of alias specified + hasEntry("s.address", TEXT), + hasEntry("s.age", INTEGER), + hasEntry("s.balance", DOUBLE), + hasEntry("s.city", KEYWORD), + hasEntry("s.birthday", DATE), + hasEntry("s.location", GEO_POINT), + hasEntry("s.new_field", UNKNOWN), + hasEntry("s.field with spaces", TEXT), + hasEntry("s.employer", TEXT), + hasEntry("s.employer.keyword", KEYWORD), + hasEntry("s.projects", (Type) new OpenSearchIndex("s.projects", NESTED_FIELD)), + hasEntry("s.projects.active", BOOLEAN), + hasEntry("s.projects.release", DATE), + hasEntry( + "s.projects.members", + (Type) new OpenSearchIndex("s.projects.members", NESTED_FIELD)), + hasEntry("s.projects.members.name", TEXT), + hasEntry("s.manager", OBJECT), + hasEntry("s.manager.name", TEXT), + hasEntry("s.manager.name.keyword", KEYWORD), + hasEntry("s.manager.address", KEYWORD), + hasEntry("s.manager.salary", LONG))); + } - @Test - public void fieldWithSpacesInNameShouldPass() { - analyzer.visitIndexName("semantics"); - Optional type = context.peek().resolve(new Symbol(Namespace.FIELD_NAME, "field with spaces")); - Assert.assertTrue(type.isPresent()); - Assert.assertSame(TEXT, type.get()); - } + @Test + public void fieldWithUnknownEsTypeShouldPass() { + analyzer.visitIndexName("semantics"); + Optional type = context.peek().resolve(new Symbol(Namespace.FIELD_NAME, "new_field")); + Assert.assertTrue(type.isPresent()); + Assert.assertSame(UNKNOWN, type.get()); + } + @Test + public void fieldWithSpacesInNameShouldPass() { + analyzer.visitIndexName("semantics"); + Optional type = + context.peek().resolve(new Symbol(Namespace.FIELD_NAME, "field with spaces")); + Assert.assertTrue(type.isPresent()); + Assert.assertSame(TEXT, type.get()); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerConfigTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerConfigTest.java index 18253bd71f..2b9a5e418c 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerConfigTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerConfigTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic; import static org.hamcrest.Matchers.allOf; @@ -17,54 +16,47 @@ import org.opensearch.sql.legacy.antlr.SqlAnalysisConfig; import org.opensearch.sql.legacy.esdomain.LocalClusterState; -/** - * Test cases for semantic analysis configuration - */ +/** Test cases for semantic analysis configuration */ public class SemanticAnalyzerConfigTest extends SemanticAnalyzerTestBase { - @Rule - public ExpectedException exceptionWithoutSuggestion = ExpectedException.none(); - - @Test - public void noAnalysisShouldPerformForNonSelectStatement() { - String sql = "DELETE FROM semantics WHERE age12 = 123"; - expectValidationPassWithConfig(sql, new SqlAnalysisConfig(true, true, 1000)); - } - - @Test - public void noAnalysisShouldPerformIfDisabledAnalysis() { - String sql = "SELECT * FROM semantics WHERE age12 = 123"; - expectValidationFailWithErrorMessages(sql, "Field [age12] cannot be found or used here."); - expectValidationPassWithConfig(sql, new SqlAnalysisConfig(false, true, 1000)); - } - - @Test - public void noFieldNameSuggestionIfDisabledSuggestion() { - String sql = "SELECT * FROM semantics WHERE age12 = 123"; - expectValidationFailWithErrorMessages(sql, - "Field [age12] cannot be found or used here.", - "Did you mean [age]?"); - - exceptionWithoutSuggestion.expect(SemanticAnalysisException.class); - exceptionWithoutSuggestion.expectMessage( - allOf( - containsString("Field [age12] cannot be found or used here"), - not(containsString("Did you mean")) - ) - ); - new OpenSearchLegacySqlAnalyzer(new SqlAnalysisConfig(true, false, 1000)). - analyze(sql, LocalClusterState.state()); - } - - @Test - public void noAnalysisShouldPerformIfIndexMappingIsLargerThanThreshold() { - String sql = "SELECT * FROM semantics WHERE test = 123"; - expectValidationFailWithErrorMessages(sql, "Field [test] cannot be found or used here."); - expectValidationPassWithConfig(sql, new SqlAnalysisConfig(true, true, 1)); - } - - private void expectValidationPassWithConfig(String sql, SqlAnalysisConfig config) { - new OpenSearchLegacySqlAnalyzer(config).analyze(sql, LocalClusterState.state()); - } - + @Rule public ExpectedException exceptionWithoutSuggestion = ExpectedException.none(); + + @Test + public void noAnalysisShouldPerformForNonSelectStatement() { + String sql = "DELETE FROM semantics WHERE age12 = 123"; + expectValidationPassWithConfig(sql, new SqlAnalysisConfig(true, true, 1000)); + } + + @Test + public void noAnalysisShouldPerformIfDisabledAnalysis() { + String sql = "SELECT * FROM semantics WHERE age12 = 123"; + expectValidationFailWithErrorMessages(sql, "Field [age12] cannot be found or used here."); + expectValidationPassWithConfig(sql, new SqlAnalysisConfig(false, true, 1000)); + } + + @Test + public void noFieldNameSuggestionIfDisabledSuggestion() { + String sql = "SELECT * FROM semantics WHERE age12 = 123"; + expectValidationFailWithErrorMessages( + sql, "Field [age12] cannot be found or used here.", "Did you mean [age]?"); + + exceptionWithoutSuggestion.expect(SemanticAnalysisException.class); + exceptionWithoutSuggestion.expectMessage( + allOf( + containsString("Field [age12] cannot be found or used here"), + not(containsString("Did you mean")))); + new OpenSearchLegacySqlAnalyzer(new SqlAnalysisConfig(true, false, 1000)) + .analyze(sql, LocalClusterState.state()); + } + + @Test + public void noAnalysisShouldPerformIfIndexMappingIsLargerThanThreshold() { + String sql = "SELECT * FROM semantics WHERE test = 123"; + expectValidationFailWithErrorMessages(sql, "Field [test] cannot be found or used here."); + expectValidationPassWithConfig(sql, new SqlAnalysisConfig(true, true, 1)); + } + + private void expectValidationPassWithConfig(String sql, SqlAnalysisConfig config) { + new OpenSearchLegacySqlAnalyzer(config).analyze(sql, LocalClusterState.state()); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerConstantTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerConstantTest.java index 5ff8875f0c..48d9b6e36c 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerConstantTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerConstantTest.java @@ -3,21 +3,19 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic; import org.junit.Test; public class SemanticAnalyzerConstantTest extends SemanticAnalyzerTestBase { - @Test - public void useNegativeIntegerShouldPass() { - validate("SELECT * FROM test WHERE age > -1"); - } - - @Test - public void useNegativeFloatingPointNumberShouldPass() { - validate("SELECT * FROM test WHERE balance > -1.23456"); - } + @Test + public void useNegativeIntegerShouldPass() { + validate("SELECT * FROM test WHERE age > -1"); + } + @Test + public void useNegativeFloatingPointNumberShouldPass() { + validate("SELECT * FROM test WHERE balance > -1.23456"); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerESScalarFunctionTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerESScalarFunctionTest.java index 32c322f8c2..c16ecc33e3 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerESScalarFunctionTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerESScalarFunctionTest.java @@ -3,54 +3,50 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic; import org.junit.Test; -/** - * Semantic analysis test for Elaticsearch special scalar functions - */ +/** Semantic analysis test for Elaticsearch special scalar functions */ public class SemanticAnalyzerESScalarFunctionTest extends SemanticAnalyzerTestBase { - @Test - public void dateFunctionCallWithDateInSelectClauseShouldPass() { - validate("SELECT DAY_OF_MONTH(birthday) FROM semantics"); - validate("SELECT DAY_OF_WEEK(birthday) FROM semantics"); - validate("SELECT DAY_OF_YEAR(birthday) FROM semantics"); - validate("SELECT MINUTE_OF_DAY(birthday) FROM semantics"); - validate("SELECT MINUTE_OF_HOUR(birthday) FROM semantics"); - validate("SELECT MONTH_OF_YEAR(birthday) FROM semantics"); - validate("SELECT WEEK_OF_YEAR(birthday) FROM semantics"); - } - - @Test - public void dateFunctionCallWithDateInWhereClauseShouldPass() { - validate("SELECT * FROM semantics WHERE DAY_OF_MONTH(birthday) = 1"); - validate("SELECT * FROM semantics WHERE DAY_OF_WEEK(birthday) = 1"); - validate("SELECT * FROM semantics WHERE DAY_OF_YEAR(birthday) = 1"); - validate("SELECT * FROM semantics WHERE MINUTE_OF_DAY(birthday) = 1"); - validate("SELECT * FROM semantics WHERE MINUTE_OF_HOUR(birthday) = 1"); - validate("SELECT * FROM semantics WHERE MONTH_OF_YEAR(birthday) = 1"); - validate("SELECT * FROM semantics WHERE WEEK_OF_YEAR(birthday) = 1"); - } - - @Test - public void geoFunctionCallWithGeoPointInWhereClauseShouldPass() { - validate("SELECT * FROM semantics WHERE GEO_BOUNDING_BOX(location, 100.0, 1.0, 101, 0.0)"); - validate("SELECT * FROM semantics WHERE GEO_DISTANCE(location, '1km', 100.5, 0.500001)"); - validate("SELECT * FROM semantics WHERE GEO_DISTANCE_RANGE(location, '1km', 100.5, 0.500001)"); - } - - @Test - public void fullTextMatchFunctionCallWithStringInWhereClauseShouldPass() { - validate("SELECT * FROM semantics WHERE MATCH_PHRASE(address, 'Seattle')"); - validate("SELECT * FROM semantics WHERE MATCHPHRASE(employer, 'Seattle')"); - validate("SELECT * FROM semantics WHERE MATCH_QUERY(manager.name, 'Seattle')"); - validate("SELECT * FROM semantics WHERE MATCHQUERY(manager.name, 'Seattle')"); - validate("SELECT * FROM semantics WHERE QUERY('Seattle')"); - validate("SELECT * FROM semantics WHERE WILDCARD_QUERY(manager.name, 'Sea*')"); - validate("SELECT * FROM semantics WHERE WILDCARDQUERY(manager.name, 'Sea*')"); - } - + @Test + public void dateFunctionCallWithDateInSelectClauseShouldPass() { + validate("SELECT DAY_OF_MONTH(birthday) FROM semantics"); + validate("SELECT DAY_OF_WEEK(birthday) FROM semantics"); + validate("SELECT DAY_OF_YEAR(birthday) FROM semantics"); + validate("SELECT MINUTE_OF_DAY(birthday) FROM semantics"); + validate("SELECT MINUTE_OF_HOUR(birthday) FROM semantics"); + validate("SELECT MONTH_OF_YEAR(birthday) FROM semantics"); + validate("SELECT WEEK_OF_YEAR(birthday) FROM semantics"); + } + + @Test + public void dateFunctionCallWithDateInWhereClauseShouldPass() { + validate("SELECT * FROM semantics WHERE DAY_OF_MONTH(birthday) = 1"); + validate("SELECT * FROM semantics WHERE DAY_OF_WEEK(birthday) = 1"); + validate("SELECT * FROM semantics WHERE DAY_OF_YEAR(birthday) = 1"); + validate("SELECT * FROM semantics WHERE MINUTE_OF_DAY(birthday) = 1"); + validate("SELECT * FROM semantics WHERE MINUTE_OF_HOUR(birthday) = 1"); + validate("SELECT * FROM semantics WHERE MONTH_OF_YEAR(birthday) = 1"); + validate("SELECT * FROM semantics WHERE WEEK_OF_YEAR(birthday) = 1"); + } + + @Test + public void geoFunctionCallWithGeoPointInWhereClauseShouldPass() { + validate("SELECT * FROM semantics WHERE GEO_BOUNDING_BOX(location, 100.0, 1.0, 101, 0.0)"); + validate("SELECT * FROM semantics WHERE GEO_DISTANCE(location, '1km', 100.5, 0.500001)"); + validate("SELECT * FROM semantics WHERE GEO_DISTANCE_RANGE(location, '1km', 100.5, 0.500001)"); + } + + @Test + public void fullTextMatchFunctionCallWithStringInWhereClauseShouldPass() { + validate("SELECT * FROM semantics WHERE MATCH_PHRASE(address, 'Seattle')"); + validate("SELECT * FROM semantics WHERE MATCHPHRASE(employer, 'Seattle')"); + validate("SELECT * FROM semantics WHERE MATCH_QUERY(manager.name, 'Seattle')"); + validate("SELECT * FROM semantics WHERE MATCHQUERY(manager.name, 'Seattle')"); + validate("SELECT * FROM semantics WHERE QUERY('Seattle')"); + validate("SELECT * FROM semantics WHERE WILDCARD_QUERY(manager.name, 'Sea*')"); + validate("SELECT * FROM semantics WHERE WILDCARDQUERY(manager.name, 'Sea*')"); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerFieldTypeTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerFieldTypeTest.java index 3e4d3e6eb5..1b9b0dde45 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerFieldTypeTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerFieldTypeTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic; import static org.opensearch.sql.legacy.util.MultipleIndexClusterUtils.mockMultipleIndexEnv; @@ -12,87 +11,69 @@ import org.junit.Test; public class SemanticAnalyzerFieldTypeTest extends SemanticAnalyzerTestBase { - @Before - public void setup() { - mockMultipleIndexEnv(); - } + @Before + public void setup() { + mockMultipleIndexEnv(); + } - /** - * id has same type in account1 and account2. - */ - @Test - public void accessFieldTypeNotInQueryPassSemanticCheck() { - validate("SELECT id FROM account* WHERE id = 1"); - } + /** id has same type in account1 and account2. */ + @Test + public void accessFieldTypeNotInQueryPassSemanticCheck() { + validate("SELECT id FROM account* WHERE id = 1"); + } - /** - * address doesn't exist in account1. - */ - @Test - public void accessFieldTypeOnlyInOneIndexPassSemanticCheck() { - validate("SELECT address FROM account* WHERE id = 30"); - } + /** address doesn't exist in account1. */ + @Test + public void accessFieldTypeOnlyInOneIndexPassSemanticCheck() { + validate("SELECT address FROM account* WHERE id = 30"); + } - /** - * age has different type in account1 and account2. - */ - @Test - public void accessConflictFieldTypeShouldFailSemanticCheck() { - expectValidationFailWithErrorMessages("SELECT age FROM account* WHERE age = 30", - "Field [age] have conflict type"); - } + /** age has different type in account1 and account2. */ + @Test + public void accessConflictFieldTypeShouldFailSemanticCheck() { + expectValidationFailWithErrorMessages( + "SELECT age FROM account* WHERE age = 30", "Field [age] have conflict type"); + } - /** - * age has different type in account1 and account2. - */ - @Test - public void mixNonConflictTypeAndConflictFieldTypeShouldFailSemanticCheck() { - expectValidationFailWithErrorMessages("SELECT id, age FROM account* WHERE id = 1", - "Field [age] have conflict type"); - } + /** age has different type in account1 and account2. */ + @Test + public void mixNonConflictTypeAndConflictFieldTypeShouldFailSemanticCheck() { + expectValidationFailWithErrorMessages( + "SELECT id, age FROM account* WHERE id = 1", "Field [age] have conflict type"); + } - /** - * age has different type in account1 and account2. - */ - @Test - public void conflictFieldTypeWithAliasShouldFailSemanticCheck() { - expectValidationFailWithErrorMessages("SELECT a.age FROM account* as a", - "Field [a.age] have conflict type"); - } + /** age has different type in account1 and account2. */ + @Test + public void conflictFieldTypeWithAliasShouldFailSemanticCheck() { + expectValidationFailWithErrorMessages( + "SELECT a.age FROM account* as a", "Field [a.age] have conflict type"); + } - /** - * age has different type in account1 and account2. - * Todo, the error message is not accurate. - */ - @Test - public void selectAllFieldTypeShouldFailSemanticCheck() { - expectValidationFailWithErrorMessages("SELECT * FROM account*", - "Field [account*.age] have conflict type"); - } + /** age has different type in account1 and account2. Todo, the error message is not accurate. */ + @Test + public void selectAllFieldTypeShouldFailSemanticCheck() { + expectValidationFailWithErrorMessages( + "SELECT * FROM account*", "Field [account*.age] have conflict type"); + } - /** - * age has different type in account1 and account2. - */ - @Test - public void selectAllFieldTypeWithAliasShouldFailSemanticCheck() { - expectValidationFailWithErrorMessages("SELECT a.* FROM account* as a", - "Field [a.age] have conflict type"); - } + /** age has different type in account1 and account2. */ + @Test + public void selectAllFieldTypeWithAliasShouldFailSemanticCheck() { + expectValidationFailWithErrorMessages( + "SELECT a.* FROM account* as a", "Field [a.age] have conflict type"); + } - /** - * a.projects.name has same type in account1 and account2. - */ - @Test - public void selectNestedNoneConflictTypeShouldPassSemanticCheck() { - validate("SELECT a.projects.name FROM account* as a"); - } + /** a.projects.name has same type in account1 and account2. */ + @Test + public void selectNestedNoneConflictTypeShouldPassSemanticCheck() { + validate("SELECT a.projects.name FROM account* as a"); + } - /** - * a.projects.started_year has conflict type in account1 and account2. - */ - @Test - public void selectNestedConflictTypeShouldFailSemanticCheck() { - expectValidationFailWithErrorMessages("SELECT a.projects.started_year FROM account* as a", - "Field [a.projects.started_year] have conflict type"); - } + /** a.projects.started_year has conflict type in account1 and account2. */ + @Test + public void selectNestedConflictTypeShouldFailSemanticCheck() { + expectValidationFailWithErrorMessages( + "SELECT a.projects.started_year FROM account* as a", + "Field [a.projects.started_year] have conflict type"); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerFromClauseTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerFromClauseTest.java index a487a7afaa..2a04321f2f 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerFromClauseTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerFromClauseTest.java @@ -3,191 +3,173 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic; import org.junit.Ignore; import org.junit.Test; /** - * Semantic analyzer tests for FROM clause, including parse single index, multiple indices, - * index + (deep) nested field and multiple statements like UNION/MINUS etc. Basically, we - * need to make sure the environment be set up properly so that semantic analysis followed - * can be performed correctly. + * Semantic analyzer tests for FROM clause, including parse single index, multiple indices, index + + * (deep) nested field and multiple statements like UNION/MINUS etc. Basically, we need to make sure + * the environment be set up properly so that semantic analysis followed can be performed correctly. */ public class SemanticAnalyzerFromClauseTest extends SemanticAnalyzerTestBase { - @Ignore("IndexNotFoundException should be thrown from OpenSearch API directly") - @Test - public void nonExistingIndexNameShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics1" - ); - } - - @Test - public void useNotExistFieldInIndexPatternShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT abc FROM semant* WHERE def = 1", - "Field [def] cannot be found or used here.", - "Did you mean [address]?" - ); - } - - @Test - public void useNotExistFieldInIndexAndIndexPatternShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT abc FROM semantics, semant* WHERE def = 1", - "Field [def] cannot be found or used here.", - "Did you mean [address]?" - ); - } - - /** - * As shown below, there are multiple cases for alias: - * 1. Alias is not present: either use full index name as prefix or not. - * 2. Alias is present: either use alias as prefix or not. Full index name is illegal. - */ - @Test - public void indexNameAliasShouldBeOptional() { - validate("SELECT address FROM semantics"); - validate("SELECT address FROM semantics s"); - validate("SELECT * FROM semantics WHERE semantics.address LIKE 'Seattle'"); - } - - @Test - public void useFullIndexNameShouldFailIfAliasIsPresent() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics s WHERE semantics.address LIKE 'Seattle'", - "Field [semantics.address] cannot be found or used here", - "Did you mean [s.manager.address]?" - ); - } - - @Test - public void invalidIndexNameAliasInFromClauseShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics s, a.projects p", - "Field [a.projects] cannot be found or used here", - "Did you mean [s.projects]?" - ); - } - - @Test - public void invalidIndexNameAliasInWhereClauseShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics s WHERE a.balance = 10000", - "Field [a.balance] cannot be found or used here", - "Did you mean [s.balance]?" - ); - } - - @Test - public void invalidIndexNameAliasInGroupByClauseShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics s GROUP BY a.balance", - "Field [a.balance] cannot be found or used here", - "Did you mean [s.balance]?" - ); - } - - @Test - public void invalidIndexNameAliasInHavingClauseShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics s HAVING COUNT(a.balance) > 5", - "Field [a.balance] cannot be found or used here", - "Did you mean [s.balance]?" - ); - } - - @Test - public void invalidIndexNameAliasInOrderByClauseShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics s ORDER BY a.balance", - "Field [a.balance] cannot be found or used here", - "Did you mean [s.balance]?" - ); - } - - @Test - public void invalidIndexNameAliasInOnClauseShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics sem JOIN semantic tic ON sem.age = t.age", - "Field [t.age] cannot be found or used here", - "Did you mean [tic.age]?" - ); - } - - @Test - public void nonNestedFieldInFromClauseShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics s, s.manager m", - "Operator [JOIN] cannot work with [INDEX, OBJECT]." - ); - } - - @Test - public void nonExistingNestedFieldInFromClauseShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics s, s.project p", - "Field [s.project] cannot be found or used here", - "Did you mean [s.projects]?" - ); - } - - @Ignore("Need to figure out a better way to detect naming conflict") - @Test - public void duplicateIndexNameAliasInFromClauseShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics s, s.projects s", - "Field [s] is conflicting with field of same name defined by other index" - ); - } - - @Ignore("Need to figure out a better way to detect naming conflict") - @Test - public void duplicateFieldNameFromDifferentIndexShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics INNER JOIN semantics", - "is conflicting with field of same name defined by other index" - ); - } - - @Test - public void validIndexNameAliasShouldPass() { - validate("SELECT * FROM semantics s, s.projects p"); - validate("SELECT * FROM semantics s WHERE s.balance = 10000"); - } - - @Test - public void indexNameWithTypeShouldPass() { - validate("SELECT * FROM semantics/docs WHERE balance = 10000"); - validate("SELECT * FROM semantics/docs s WHERE s.balance = 10000"); - validate("SELECT * FROM semantics/docs s, s.projects p WHERE p.active IS TRUE"); - } - - @Test - public void noIndexAliasShouldPass() { - validate("SELECT * FROM semantics"); - validate("SELECT * FROM semantics, semantics.projects"); - } - - @Test - public void regularJoinShouldPass() { - validate("SELECT * FROM semantics s1, semantics s2"); - validate("SELECT * FROM semantics s1 JOIN semantics s2"); - validate("SELECT * FROM semantics s1 LEFT JOIN semantics s2 ON s1.balance = s2.balance"); - } - - @Test - public void deepNestedFieldInFromClauseShouldPass() { - validate("SELECT * FROM semantics s, s.projects p, p.members m"); - } - - @Test - public void duplicateFieldNameFromDifferentStatementShouldPass() { - validate("SELECT age FROM semantics UNION SELECT age FROM semantic"); - validate("SELECT s.age FROM semantics s UNION SELECT s.age FROM semantic s"); - } - + @Ignore("IndexNotFoundException should be thrown from OpenSearch API directly") + @Test + public void nonExistingIndexNameShouldFail() { + expectValidationFailWithErrorMessages("SELECT * FROM semantics1"); + } + + @Test + public void useNotExistFieldInIndexPatternShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT abc FROM semant* WHERE def = 1", + "Field [def] cannot be found or used here.", + "Did you mean [address]?"); + } + + @Test + public void useNotExistFieldInIndexAndIndexPatternShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT abc FROM semantics, semant* WHERE def = 1", + "Field [def] cannot be found or used here.", + "Did you mean [address]?"); + } + + /** + * As shown below, there are multiple cases for alias: 1. Alias is not present: either use full + * index name as prefix or not. 2. Alias is present: either use alias as prefix or not. Full index + * name is illegal. + */ + @Test + public void indexNameAliasShouldBeOptional() { + validate("SELECT address FROM semantics"); + validate("SELECT address FROM semantics s"); + validate("SELECT * FROM semantics WHERE semantics.address LIKE 'Seattle'"); + } + + @Test + public void useFullIndexNameShouldFailIfAliasIsPresent() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics s WHERE semantics.address LIKE 'Seattle'", + "Field [semantics.address] cannot be found or used here", + "Did you mean [s.manager.address]?"); + } + + @Test + public void invalidIndexNameAliasInFromClauseShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics s, a.projects p", + "Field [a.projects] cannot be found or used here", + "Did you mean [s.projects]?"); + } + + @Test + public void invalidIndexNameAliasInWhereClauseShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics s WHERE a.balance = 10000", + "Field [a.balance] cannot be found or used here", + "Did you mean [s.balance]?"); + } + + @Test + public void invalidIndexNameAliasInGroupByClauseShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics s GROUP BY a.balance", + "Field [a.balance] cannot be found or used here", + "Did you mean [s.balance]?"); + } + + @Test + public void invalidIndexNameAliasInHavingClauseShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics s HAVING COUNT(a.balance) > 5", + "Field [a.balance] cannot be found or used here", + "Did you mean [s.balance]?"); + } + + @Test + public void invalidIndexNameAliasInOrderByClauseShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics s ORDER BY a.balance", + "Field [a.balance] cannot be found or used here", + "Did you mean [s.balance]?"); + } + + @Test + public void invalidIndexNameAliasInOnClauseShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics sem JOIN semantic tic ON sem.age = t.age", + "Field [t.age] cannot be found or used here", + "Did you mean [tic.age]?"); + } + + @Test + public void nonNestedFieldInFromClauseShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics s, s.manager m", + "Operator [JOIN] cannot work with [INDEX, OBJECT]."); + } + + @Test + public void nonExistingNestedFieldInFromClauseShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics s, s.project p", + "Field [s.project] cannot be found or used here", + "Did you mean [s.projects]?"); + } + + @Ignore("Need to figure out a better way to detect naming conflict") + @Test + public void duplicateIndexNameAliasInFromClauseShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics s, s.projects s", + "Field [s] is conflicting with field of same name defined by other index"); + } + + @Ignore("Need to figure out a better way to detect naming conflict") + @Test + public void duplicateFieldNameFromDifferentIndexShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics INNER JOIN semantics", + "is conflicting with field of same name defined by other index"); + } + + @Test + public void validIndexNameAliasShouldPass() { + validate("SELECT * FROM semantics s, s.projects p"); + validate("SELECT * FROM semantics s WHERE s.balance = 10000"); + } + + @Test + public void indexNameWithTypeShouldPass() { + validate("SELECT * FROM semantics/docs WHERE balance = 10000"); + validate("SELECT * FROM semantics/docs s WHERE s.balance = 10000"); + validate("SELECT * FROM semantics/docs s, s.projects p WHERE p.active IS TRUE"); + } + + @Test + public void noIndexAliasShouldPass() { + validate("SELECT * FROM semantics"); + validate("SELECT * FROM semantics, semantics.projects"); + } + + @Test + public void regularJoinShouldPass() { + validate("SELECT * FROM semantics s1, semantics s2"); + validate("SELECT * FROM semantics s1 JOIN semantics s2"); + validate("SELECT * FROM semantics s1 LEFT JOIN semantics s2 ON s1.balance = s2.balance"); + } + + @Test + public void deepNestedFieldInFromClauseShouldPass() { + validate("SELECT * FROM semantics s, s.projects p, p.members m"); + } + + @Test + public void duplicateFieldNameFromDifferentStatementShouldPass() { + validate("SELECT age FROM semantics UNION SELECT age FROM semantic"); + validate("SELECT s.age FROM semantics s UNION SELECT s.age FROM semantic s"); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerIdentifierTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerIdentifierTest.java index 3d9133c937..35bcde3f76 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerIdentifierTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerIdentifierTest.java @@ -3,169 +3,158 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic; import org.junit.Ignore; import org.junit.Test; -/** - * Semantic analyzer tests for identifier - */ +/** Semantic analyzer tests for identifier */ public class SemanticAnalyzerIdentifierTest extends SemanticAnalyzerTestBase { - @Ignore("To be implemented") - @Test - public void duplicateFieldAliasInSelectClauseShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT age a, COUNT(*) a FROM semantics s, a.projects p", - "Field [a.projects] cannot be found or used here" - ); - } - - @Test - public void fieldWithDifferentCaseInSelectClauseShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT Age a FROM semantics", - "Field [Age] cannot be found or used here", - "Did you mean [age]?" - ); - } - - @Test - public void useHiddenFieldShouldPass() { - validate("SELECT _score FROM semantics WHERE _id = 1 AND _type = '_doc'"); - } - - @Ignore("Need to remove single quote or back ticks") - @Test - public void useFieldNameWithSpaceShouldPass() { - validate("SELECT ['field with spaces'] FROM semantics"); - validate("SELECT `field with spaces` FROM semantics"); - } - - @Test - public void nonExistingFieldNameInSelectClauseShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT age1 FROM semantics s", - "Field [age1] cannot be found or used here.", - "Did you mean [age]?" - ); - } - - @Test - public void invalidIndexAliasInFromClauseShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics s, a.projects p", - "Field [a.projects] cannot be found or used here.", - "Did you mean [s.projects]?" - ); - } - - @Test - public void nonExistingFieldNameInWhereClauseShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics s WHERE s.balce = 10000", - "Field [s.balce] cannot be found or used here.", - "Did you mean [s.balance]?" - ); - } - - @Test - public void nonExistingFieldNameInGroupByClauseShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics s GROUP BY s.balce", - "Field [s.balce] cannot be found or used here.", - "Did you mean [s.balance]?" - ); - } - - @Test - public void nonExistingFieldNameInHavingClauseShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics s HAVING COUNT(s.balce) > 5", - "Field [s.balce] cannot be found or used here.", - "Did you mean [s.balance]?" - ); - } - - @Test - public void nonExistingFieldNameInOrderByClauseShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics s ORDER BY s.balce", - "Field [s.balce] cannot be found or used here.", - "Did you mean [s.balance]?" - ); - } - - @Test - public void nonExistingFieldNameInFunctionShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics s WHERE LOG(s.balce) = 1", - "Field [s.balce] cannot be found or used here.", - "Did you mean [s.balance]?" - ); - } - - @Test - public void nonExistingNestedFieldNameInWhereClauseShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics s, s.projects p, p.members m WHERE m.nam = 'John'", - "Field [m.nam] cannot be found or used here.", - "Did you mean [m.name]?" - ); - } - - @Test - public void nonExistingNestedFieldNameInFunctionShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics WHERE nested(projects.actives) = TRUE", - "Field [projects.actives] cannot be found or used here.", - "Did you mean [projects.active]?" - ); - } - - @Test - public void useKeywordInMultiFieldShouldPass() { - validate("SELECT employer.keyword FROM semantics WHERE employer.keyword LIKE 'AWS' GROUP BY employer.keyword"); - validate("SELECT * FROM semantics s WHERE s.manager.name.keyword LIKE 'John'"); - } - - @Test - public void useDeepNestedFieldNameShouldPass() { - validate("SELECT p.* FROM semantics s, s.projects p WHERE p IS NULL"); - validate("SELECT p.active FROM semantics s, s.projects p WHERE p.active = TRUE"); - validate("SELECT m.name FROM semantics s, s.projects p, p.members m WHERE m.name = 'John'"); - } - - @Test - public void useConstantLiteralInSelectClauseShouldPass() { - validate("SELECT 1 FROM semantics"); - validate("SELECT 2.0 FROM semantics"); - //validate("SELECT 'test' FROM semantics"); TODO: why 'test' goes to fullColumnName that can be string literal - validate("SELECT TRUE FROM semantics"); - } - - @Test - public void queryWithBackticksQuotedIndexShouldPass() { - validate("SELECT age FROM `semantics`"); - } - - @Test - public void queryWithBackticksQuotedIndexAliasShouldPass() { - validate("SELECT `s`.age FROM semantics AS `s`"); - validate("SELECT `s t`.age FROM semantics AS `s t`"); - } - - @Test - public void queryWithBackticksQuotedFieldNameShouldPass() { - validate("SELECT `age` FROM semantics"); - validate("SELECT s.`age` FROM semantics AS s"); - validate("SELECT `s`.`age` FROM semantics AS `s`"); - } - - @Test - public void queryWithBackticksQuotedFieldNameInFunctionShouldPass() { - validate("SELECT SUM(`age`) FROM semantics"); - } + @Ignore("To be implemented") + @Test + public void duplicateFieldAliasInSelectClauseShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT age a, COUNT(*) a FROM semantics s, a.projects p", + "Field [a.projects] cannot be found or used here"); + } + + @Test + public void fieldWithDifferentCaseInSelectClauseShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT Age a FROM semantics", + "Field [Age] cannot be found or used here", + "Did you mean [age]?"); + } + + @Test + public void useHiddenFieldShouldPass() { + validate("SELECT _score FROM semantics WHERE _id = 1 AND _type = '_doc'"); + } + + @Ignore("Need to remove single quote or back ticks") + @Test + public void useFieldNameWithSpaceShouldPass() { + validate("SELECT ['field with spaces'] FROM semantics"); + validate("SELECT `field with spaces` FROM semantics"); + } + + @Test + public void nonExistingFieldNameInSelectClauseShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT age1 FROM semantics s", + "Field [age1] cannot be found or used here.", + "Did you mean [age]?"); + } + + @Test + public void invalidIndexAliasInFromClauseShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics s, a.projects p", + "Field [a.projects] cannot be found or used here.", + "Did you mean [s.projects]?"); + } + + @Test + public void nonExistingFieldNameInWhereClauseShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics s WHERE s.balce = 10000", + "Field [s.balce] cannot be found or used here.", + "Did you mean [s.balance]?"); + } + + @Test + public void nonExistingFieldNameInGroupByClauseShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics s GROUP BY s.balce", + "Field [s.balce] cannot be found or used here.", + "Did you mean [s.balance]?"); + } + + @Test + public void nonExistingFieldNameInHavingClauseShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics s HAVING COUNT(s.balce) > 5", + "Field [s.balce] cannot be found or used here.", + "Did you mean [s.balance]?"); + } + + @Test + public void nonExistingFieldNameInOrderByClauseShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics s ORDER BY s.balce", + "Field [s.balce] cannot be found or used here.", + "Did you mean [s.balance]?"); + } + + @Test + public void nonExistingFieldNameInFunctionShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics s WHERE LOG(s.balce) = 1", + "Field [s.balce] cannot be found or used here.", + "Did you mean [s.balance]?"); + } + + @Test + public void nonExistingNestedFieldNameInWhereClauseShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics s, s.projects p, p.members m WHERE m.nam = 'John'", + "Field [m.nam] cannot be found or used here.", + "Did you mean [m.name]?"); + } + + @Test + public void nonExistingNestedFieldNameInFunctionShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics WHERE nested(projects.actives) = TRUE", + "Field [projects.actives] cannot be found or used here.", + "Did you mean [projects.active]?"); + } + + @Test + public void useKeywordInMultiFieldShouldPass() { + validate( + "SELECT employer.keyword FROM semantics WHERE employer.keyword LIKE 'AWS' GROUP BY" + + " employer.keyword"); + validate("SELECT * FROM semantics s WHERE s.manager.name.keyword LIKE 'John'"); + } + + @Test + public void useDeepNestedFieldNameShouldPass() { + validate("SELECT p.* FROM semantics s, s.projects p WHERE p IS NULL"); + validate("SELECT p.active FROM semantics s, s.projects p WHERE p.active = TRUE"); + validate("SELECT m.name FROM semantics s, s.projects p, p.members m WHERE m.name = 'John'"); + } + + @Test + public void useConstantLiteralInSelectClauseShouldPass() { + validate("SELECT 1 FROM semantics"); + validate("SELECT 2.0 FROM semantics"); + // validate("SELECT 'test' FROM semantics"); TODO: why 'test' goes to fullColumnName that can be + // string literal + validate("SELECT TRUE FROM semantics"); + } + + @Test + public void queryWithBackticksQuotedIndexShouldPass() { + validate("SELECT age FROM `semantics`"); + } + + @Test + public void queryWithBackticksQuotedIndexAliasShouldPass() { + validate("SELECT `s`.age FROM semantics AS `s`"); + validate("SELECT `s t`.age FROM semantics AS `s t`"); + } + + @Test + public void queryWithBackticksQuotedFieldNameShouldPass() { + validate("SELECT `age` FROM semantics"); + validate("SELECT s.`age` FROM semantics AS s"); + validate("SELECT `s`.`age` FROM semantics AS `s`"); + } + + @Test + public void queryWithBackticksQuotedFieldNameInFunctionShouldPass() { + validate("SELECT SUM(`age`) FROM semantics"); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerMultiQueryTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerMultiQueryTest.java index 3c4c71c6ea..319f6c5cfa 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerMultiQueryTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerMultiQueryTest.java @@ -3,93 +3,87 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic; import org.junit.Ignore; import org.junit.Test; -/** - * Semantic analyzer tests for multi query like UNION and MINUS - */ +/** Semantic analyzer tests for multi query like UNION and MINUS */ public class SemanticAnalyzerMultiQueryTest extends SemanticAnalyzerTestBase { - @Test - public void unionDifferentResultTypeOfTwoQueriesShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT balance FROM semantics UNION SELECT address FROM semantics", - "Operator [UNION] cannot work with [DOUBLE, TEXT]." - ); - } - - @Test - public void unionDifferentNumberOfResultTypeOfTwoQueriesShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT balance FROM semantics UNION SELECT balance, age FROM semantics", - "Operator [UNION] cannot work with [DOUBLE, (DOUBLE, INTEGER)]." - ); - } - - @Test - public void minusDifferentResultTypeOfTwoQueriesShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT p.active FROM semantics s, s.projects p MINUS SELECT address FROM semantics", - "Operator [MINUS] cannot work with [BOOLEAN, TEXT]." - ); - } - - @Test - public void unionSameResultTypeOfTwoQueriesShouldPass() { - validate("SELECT balance FROM semantics UNION SELECT balance FROM semantics"); - } - - @Test - public void unionCompatibleResultTypeOfTwoQueriesShouldPass() { - validate("SELECT balance FROM semantics UNION SELECT age FROM semantics"); - validate("SELECT address FROM semantics UNION ALL SELECT city FROM semantics"); - } - - @Test - public void minusSameResultTypeOfTwoQueriesShouldPass() { - validate("SELECT s.projects.active FROM semantics s UNION SELECT p.active FROM semantics s, s.projects p"); - } - - @Test - public void minusCompatibleResultTypeOfTwoQueriesShouldPass() { - validate("SELECT address FROM semantics MINUS SELECT manager.name.keyword FROM semantics"); - } - - @Test - public void unionSelectStarWithExtraFieldOfTwoQueriesShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics UNION SELECT *, city FROM semantics", - "Operator [UNION] cannot work with [(*), KEYWORD]." - ); - } - - @Test - public void minusSelectStarWithExtraFieldOfTwoQueriesShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT *, address, balance FROM semantics MINUS SELECT * FROM semantics", - "Operator [MINUS] cannot work with [(TEXT, DOUBLE), (*)]." - ); - } - - @Test - public void unionSelectStarOfTwoQueriesShouldPass() { - validate("SELECT * FROM semantics UNION SELECT * FROM semantics"); - validate("SELECT *, age FROM semantics UNION SELECT *, balance FROM semantics"); - } - - @Test - public void unionSelectFunctionCallWithSameReturnTypeOfTwoQueriesShouldPass() { - validate("SELECT LOG(balance) FROM semantics UNION SELECT ABS(age) FROM semantics"); - } - - @Ignore("* is empty and ignored in product of select items for now") - @Test - public void unionSelectFieldWithExtraStarOfTwoQueriesShouldFail() { - expectValidationFailWithErrorMessages("SELECT age FROM semantics UNION SELECT *, age FROM semantics"); - } - + @Test + public void unionDifferentResultTypeOfTwoQueriesShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT balance FROM semantics UNION SELECT address FROM semantics", + "Operator [UNION] cannot work with [DOUBLE, TEXT]."); + } + + @Test + public void unionDifferentNumberOfResultTypeOfTwoQueriesShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT balance FROM semantics UNION SELECT balance, age FROM semantics", + "Operator [UNION] cannot work with [DOUBLE, (DOUBLE, INTEGER)]."); + } + + @Test + public void minusDifferentResultTypeOfTwoQueriesShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT p.active FROM semantics s, s.projects p MINUS SELECT address FROM semantics", + "Operator [MINUS] cannot work with [BOOLEAN, TEXT]."); + } + + @Test + public void unionSameResultTypeOfTwoQueriesShouldPass() { + validate("SELECT balance FROM semantics UNION SELECT balance FROM semantics"); + } + + @Test + public void unionCompatibleResultTypeOfTwoQueriesShouldPass() { + validate("SELECT balance FROM semantics UNION SELECT age FROM semantics"); + validate("SELECT address FROM semantics UNION ALL SELECT city FROM semantics"); + } + + @Test + public void minusSameResultTypeOfTwoQueriesShouldPass() { + validate( + "SELECT s.projects.active FROM semantics s UNION SELECT p.active FROM semantics s," + + " s.projects p"); + } + + @Test + public void minusCompatibleResultTypeOfTwoQueriesShouldPass() { + validate("SELECT address FROM semantics MINUS SELECT manager.name.keyword FROM semantics"); + } + + @Test + public void unionSelectStarWithExtraFieldOfTwoQueriesShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics UNION SELECT *, city FROM semantics", + "Operator [UNION] cannot work with [(*), KEYWORD]."); + } + + @Test + public void minusSelectStarWithExtraFieldOfTwoQueriesShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT *, address, balance FROM semantics MINUS SELECT * FROM semantics", + "Operator [MINUS] cannot work with [(TEXT, DOUBLE), (*)]."); + } + + @Test + public void unionSelectStarOfTwoQueriesShouldPass() { + validate("SELECT * FROM semantics UNION SELECT * FROM semantics"); + validate("SELECT *, age FROM semantics UNION SELECT *, balance FROM semantics"); + } + + @Test + public void unionSelectFunctionCallWithSameReturnTypeOfTwoQueriesShouldPass() { + validate("SELECT LOG(balance) FROM semantics UNION SELECT ABS(age) FROM semantics"); + } + + @Ignore("* is empty and ignored in product of select items for now") + @Test + public void unionSelectFieldWithExtraStarOfTwoQueriesShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT age FROM semantics UNION SELECT *, age FROM semantics"); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerOperatorTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerOperatorTest.java index 36046aa0ad..bd5aeba507 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerOperatorTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerOperatorTest.java @@ -3,71 +3,62 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic; import org.junit.Test; -/** - * Semantic analysis test cases for operator - */ +/** Semantic analysis test cases for operator */ public class SemanticAnalyzerOperatorTest extends SemanticAnalyzerTestBase { - @Test - public void compareNumberIsBooleanShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics WHERE age IS FALSE", - "Operator [IS] cannot work with [INTEGER, BOOLEAN]." - ); - } - - @Test - public void compareTextIsNotBooleanShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics WHERE address IS NOT TRUE", - "Operator [IS] cannot work with [TEXT, BOOLEAN]." - ); - } + @Test + public void compareNumberIsBooleanShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics WHERE age IS FALSE", + "Operator [IS] cannot work with [INTEGER, BOOLEAN]."); + } - @Test - public void compareNumberEqualsToStringShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics WHERE balance = 'test'", - "Operator [=] cannot work with [DOUBLE, STRING]." - ); - } + @Test + public void compareTextIsNotBooleanShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics WHERE address IS NOT TRUE", + "Operator [IS] cannot work with [TEXT, BOOLEAN]."); + } - @Test - public void compareSubstringFunctionCallEqualsToNumberShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics WHERE SUBSTRING(address, 0, 3) = 1", - "Operator [=] cannot work with [TEXT, INTEGER]." - ); - } + @Test + public void compareNumberEqualsToStringShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics WHERE balance = 'test'", + "Operator [=] cannot work with [DOUBLE, STRING]."); + } - @Test - public void compareLogFunctionCallWithIntegerSmallerThanStringShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics WHERE LOG(age) < 'test'", - "Operator [<] cannot work with [DOUBLE, STRING]." - ); - } + @Test + public void compareSubstringFunctionCallEqualsToNumberShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics WHERE SUBSTRING(address, 0, 3) = 1", + "Operator [=] cannot work with [TEXT, INTEGER]."); + } - @Test - public void compareDoubleWithIntegerShouldPass() { - validate("SELECT * FROM semantics WHERE balance >= 1000"); - validate("SELECT * FROM semantics WHERE balance <> 1000"); - validate("SELECT * FROM semantics WHERE balance != 1000"); - } + @Test + public void compareLogFunctionCallWithIntegerSmallerThanStringShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics WHERE LOG(age) < 'test'", + "Operator [<] cannot work with [DOUBLE, STRING]."); + } - @Test - public void compareDateWithStringShouldPass() { - validate("SELECT * FROM semantics WHERE birthday = '2019-09-30'"); - } + @Test + public void compareDoubleWithIntegerShouldPass() { + validate("SELECT * FROM semantics WHERE balance >= 1000"); + validate("SELECT * FROM semantics WHERE balance <> 1000"); + validate("SELECT * FROM semantics WHERE balance != 1000"); + } - @Test - public void namedArgumentShouldSkipOperatorTypeCheck() { - validate("SELECT TOPHITS('size'=3, age='desc') FROM semantics GROUP BY city"); - } + @Test + public void compareDateWithStringShouldPass() { + validate("SELECT * FROM semantics WHERE birthday = '2019-09-30'"); + } + @Test + public void namedArgumentShouldSkipOperatorTypeCheck() { + validate("SELECT TOPHITS('size'=3, age='desc') FROM semantics GROUP BY city"); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerScalarFunctionTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerScalarFunctionTest.java index 83454b9549..8017c49548 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerScalarFunctionTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerScalarFunctionTest.java @@ -3,270 +3,255 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic; import org.junit.Ignore; import org.junit.Test; import org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchDataType; -/** - * Semantic analysis tests for scalar function. - */ +/** Semantic analysis tests for scalar function. */ public class SemanticAnalyzerScalarFunctionTest extends SemanticAnalyzerTestBase { - @Test - public void unsupportedScalarFunctionCallInSelectClauseShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT DAY() FROM semantics", - "Function [DAY] cannot be found or used here." - ); - } + @Test + public void unsupportedScalarFunctionCallInSelectClauseShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT DAY() FROM semantics", "Function [DAY] cannot be found or used here."); + } - @Test - public void unsupportedScalarFunctionCallInWhereClauseShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics WHERE LOG100(balance) = 1", - "Function [LOG100] cannot be found or used here.", - "Did you mean [LOG10]?" - ); - } + @Test + public void unsupportedScalarFunctionCallInWhereClauseShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics WHERE LOG100(balance) = 1", + "Function [LOG100] cannot be found or used here.", + "Did you mean [LOG10]?"); + } - @Test - public void scalarFunctionCallWithLessArgumentsInWhereClauseShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics WHERE LOG() = 1", - "Function [LOG] cannot work with [].", - "Usage: LOG(NUMBER T) -> DOUBLE" - ); - } + @Test + public void scalarFunctionCallWithLessArgumentsInWhereClauseShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics WHERE LOG() = 1", + "Function [LOG] cannot work with [].", + "Usage: LOG(NUMBER T) -> DOUBLE"); + } - @Test - public void scalarFunctionCallWithMoreArgumentsInWhereClauseShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics WHERE LOG(age, city) = 1", - "Function [LOG] cannot work with [INTEGER, KEYWORD].", - "Usage: LOG(NUMBER T) -> DOUBLE" - ); - } + @Test + public void scalarFunctionCallWithMoreArgumentsInWhereClauseShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics WHERE LOG(age, city) = 1", + "Function [LOG] cannot work with [INTEGER, KEYWORD].", + "Usage: LOG(NUMBER T) -> DOUBLE"); + } - @Test - public void logFunctionCallWithOneNestedInSelectClauseShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT LOG(projects) FROM semantics", - "Function [LOG] cannot work with [NESTED_FIELD].", - "Usage: LOG(NUMBER T) -> DOUBLE" - ); - } + @Test + public void logFunctionCallWithOneNestedInSelectClauseShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT LOG(projects) FROM semantics", + "Function [LOG] cannot work with [NESTED_FIELD].", + "Usage: LOG(NUMBER T) -> DOUBLE"); + } - @Test - public void logFunctionCallWithOneTextInWhereClauseShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics WHERE LOG(city) = 1", - "Function [LOG] cannot work with [KEYWORD].", - "Usage: LOG(NUMBER T) -> DOUBLE" - ); - } + @Test + public void logFunctionCallWithOneTextInWhereClauseShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics WHERE LOG(city) = 1", + "Function [LOG] cannot work with [KEYWORD].", + "Usage: LOG(NUMBER T) -> DOUBLE"); + } - @Test - public void logFunctionCallWithOneNumberShouldPass() { - validate("SELECT LOG(age) FROM semantics"); - validate("SELECT * FROM semantics s WHERE LOG(s.balance) = 1000"); - validate("SELECT LOG(s.manager.salary) FROM semantics s"); - } + @Test + public void logFunctionCallWithOneNumberShouldPass() { + validate("SELECT LOG(age) FROM semantics"); + validate("SELECT * FROM semantics s WHERE LOG(s.balance) = 1000"); + validate("SELECT LOG(s.manager.salary) FROM semantics s"); + } - @Test - public void logFunctionCallInDifferentCaseShouldPass() { - validate("SELECT log(age) FROM semantics"); - validate("SELECT Log(age) FROM semantics"); - validate("SELECT loG(age) FROM semantics"); - } + @Test + public void logFunctionCallInDifferentCaseShouldPass() { + validate("SELECT log(age) FROM semantics"); + validate("SELECT Log(age) FROM semantics"); + validate("SELECT loG(age) FROM semantics"); + } - @Test - public void logFunctionCallWithUnknownFieldShouldPass() { - validate("SELECT LOG(new_field) FROM semantics"); - } + @Test + public void logFunctionCallWithUnknownFieldShouldPass() { + validate("SELECT LOG(new_field) FROM semantics"); + } - @Ignore("Test set to ignore due to nested functions not supported and blocked by throwing SqlFeatureNotImplementedException") - @Test - public void substringWithLogFunctionCallWithUnknownFieldShouldPass() { - expectValidationFailWithErrorMessages( - "SELECT SUBSTRING(LOG(new_field), 0, 1) FROM semantics", - "Function [SUBSTRING] cannot work with [DOUBLE, INTEGER, INTEGER]." - ," Usage: SUBSTRING(STRING T, INTEGER, INTEGER) -> T" - ); - } + @Ignore( + "Test set to ignore due to nested functions not supported and blocked by throwing" + + " SqlFeatureNotImplementedException") + @Test + public void substringWithLogFunctionCallWithUnknownFieldShouldPass() { + expectValidationFailWithErrorMessages( + "SELECT SUBSTRING(LOG(new_field), 0, 1) FROM semantics", + "Function [SUBSTRING] cannot work with [DOUBLE, INTEGER, INTEGER].", + " Usage: SUBSTRING(STRING T, INTEGER, INTEGER) -> T"); + } - @Ignore("Test set to ignore due to nested functions not supported and blocked by throwing SqlFeatureNotImplementedException") - @Test - public void logFunctionCallWithResultOfAbsFunctionCallWithOneNumberShouldPass() { - validate("SELECT LOG(ABS(age)) FROM semantics"); - } + @Ignore( + "Test set to ignore due to nested functions not supported and blocked by throwing" + + " SqlFeatureNotImplementedException") + @Test + public void logFunctionCallWithResultOfAbsFunctionCallWithOneNumberShouldPass() { + validate("SELECT LOG(ABS(age)) FROM semantics"); + } - @Ignore("Test set to ignore due to nested functions not supported and blocked by throwing SqlFeatureNotImplementedException") - @Test - public void logFunctionCallWithMoreNestedFunctionCallWithOneNumberShouldPass() { - validate("SELECT LOG(ABS(SQRT(balance))) FROM semantics"); - } + @Ignore( + "Test set to ignore due to nested functions not supported and blocked by throwing" + + " SqlFeatureNotImplementedException") + @Test + public void logFunctionCallWithMoreNestedFunctionCallWithOneNumberShouldPass() { + validate("SELECT LOG(ABS(SQRT(balance))) FROM semantics"); + } - @Ignore("Test set to ignore due to nested functions not supported and blocked by throwing SqlFeatureNotImplementedException") - @Test - public void substringFunctionCallWithResultOfAnotherSubstringAndAbsFunctionCallShouldPass() { - validate("SELECT SUBSTRING(SUBSTRING(city, ABS(age), 1), 2, ABS(1)) FROM semantics"); - } + @Ignore( + "Test set to ignore due to nested functions not supported and blocked by throwing" + + " SqlFeatureNotImplementedException") + @Test + public void substringFunctionCallWithResultOfAnotherSubstringAndAbsFunctionCallShouldPass() { + validate("SELECT SUBSTRING(SUBSTRING(city, ABS(age), 1), 2, ABS(1)) FROM semantics"); + } - @Ignore("Test set to ignore due to nested functions not supported and blocked by throwing SqlFeatureNotImplementedException") - @Test - public void substringFunctionCallWithResultOfMathFunctionCallShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT SUBSTRING(LOG(balance), 2, 3) FROM semantics", - "Function [SUBSTRING] cannot work with [DOUBLE, INTEGER, INTEGER].", - "Usage: SUBSTRING(STRING T, INTEGER, INTEGER) -> T" - ); - } + @Ignore( + "Test set to ignore due to nested functions not supported and blocked by throwing" + + " SqlFeatureNotImplementedException") + @Test + public void substringFunctionCallWithResultOfMathFunctionCallShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT SUBSTRING(LOG(balance), 2, 3) FROM semantics", + "Function [SUBSTRING] cannot work with [DOUBLE, INTEGER, INTEGER].", + "Usage: SUBSTRING(STRING T, INTEGER, INTEGER) -> T"); + } - @Ignore("Test set to ignore due to nested functions not supported and blocked by throwing SqlFeatureNotImplementedException") - @Test - public void logFunctionCallWithResultOfSubstringFunctionCallShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT LOG(SUBSTRING(address, 0, 1)) FROM semantics", - "Function [LOG] cannot work with [TEXT].", - "Usage: LOG(NUMBER T) -> DOUBLE or LOG(NUMBER T, NUMBER) -> DOUBLE" - ); - } + @Ignore( + "Test set to ignore due to nested functions not supported and blocked by throwing" + + " SqlFeatureNotImplementedException") + @Test + public void logFunctionCallWithResultOfSubstringFunctionCallShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT LOG(SUBSTRING(address, 0, 1)) FROM semantics", + "Function [LOG] cannot work with [TEXT].", + "Usage: LOG(NUMBER T) -> DOUBLE or LOG(NUMBER T, NUMBER) -> DOUBLE"); + } - @Test - public void allSupportedMathFunctionCallInSelectClauseShouldPass() { - validate( - "SELECT" + - " ABS(age), " + - " ASIN(age), " + - " ATAN(age), " + - " ATAN2(age, age), " + - " CBRT(age), " + - " CEIL(age), " + - " COS(age), " + - " COSH(age), " + - " DEGREES(age), " + - " EXP(age), " + - " EXPM1(age), " + - " FLOOR(age), " + - " LOG(age), " + - " LOG2(age), " + - " LOG10(age), " + - " LN(age), " + - " POW(age), " + - " RADIANS(age), " + - " RINT(age), " + - " ROUND(age), " + - " SIN(age), " + - " SINH(age), " + - " SQRT(age), " + - " TAN(age) " + - "FROM semantics" - ); - } + @Test + public void allSupportedMathFunctionCallInSelectClauseShouldPass() { + validate( + "SELECT" + + " ABS(age), " + + " ASIN(age), " + + " ATAN(age), " + + " ATAN2(age, age), " + + " CBRT(age), " + + " CEIL(age), " + + " COS(age), " + + " COSH(age), " + + " DEGREES(age), " + + " EXP(age), " + + " EXPM1(age), " + + " FLOOR(age), " + + " LOG(age), " + + " LOG2(age), " + + " LOG10(age), " + + " LN(age), " + + " POW(age), " + + " RADIANS(age), " + + " RINT(age), " + + " ROUND(age), " + + " SIN(age), " + + " SINH(age), " + + " SQRT(age), " + + " TAN(age) " + + "FROM semantics"); + } - @Test - public void allSupportedMathFunctionCallInWhereClauseShouldPass() { - validate( - "SELECT * FROM semantics WHERE " + - " ABS(age) = 1 AND " + - " ASIN(age) = 1 AND " + - " ATAN(age) = 1 AND " + - " ATAN2(age, age) = 1 AND " + - " CBRT(age) = 1 AND " + - " CEIL(age) = 1 AND " + - " COS(age) = 1 AND " + - " COSH(age) = 1 AND " + - " DEGREES(age) = 1 AND " + - " EXP(age) = 1 AND " + - " EXPM1(age) = 1 AND " + - " FLOOR(age) = 1 AND " + - " LOG(age) = 1 AND " + - " LOG2(age) = 1 AND " + - " LOG10(age) = 1 AND " + - " LN(age) = 1 AND " + - " POW(age) = 1 AND " + - " RADIANS(age) = 1 AND " + - " RINT(age) = 1 AND " + - " ROUND(age) = 1 AND " + - " SIN(age) = 1 AND " + - " SINH(age) = 1 AND " + - " SQRT(age) = 1 AND " + - " TAN(age) = 1 " - ); - } + @Test + public void allSupportedMathFunctionCallInWhereClauseShouldPass() { + validate( + "SELECT * FROM semantics WHERE " + + " ABS(age) = 1 AND " + + " ASIN(age) = 1 AND " + + " ATAN(age) = 1 AND " + + " ATAN2(age, age) = 1 AND " + + " CBRT(age) = 1 AND " + + " CEIL(age) = 1 AND " + + " COS(age) = 1 AND " + + " COSH(age) = 1 AND " + + " DEGREES(age) = 1 AND " + + " EXP(age) = 1 AND " + + " EXPM1(age) = 1 AND " + + " FLOOR(age) = 1 AND " + + " LOG(age) = 1 AND " + + " LOG2(age) = 1 AND " + + " LOG10(age) = 1 AND " + + " LN(age) = 1 AND " + + " POW(age) = 1 AND " + + " RADIANS(age) = 1 AND " + + " RINT(age) = 1 AND " + + " ROUND(age) = 1 AND " + + " SIN(age) = 1 AND " + + " SINH(age) = 1 AND " + + " SQRT(age) = 1 AND " + + " TAN(age) = 1 "); + } - @Test - public void allSupportedConstantsUseInSelectClauseShouldPass() { - validate( - "SELECT " + - " E(), " + - " PI() " + - "FROM semantics" - ); - } + @Test + public void allSupportedConstantsUseInSelectClauseShouldPass() { + validate("SELECT " + " E(), " + " PI() " + "FROM semantics"); + } - @Test - public void allSupportedConstantsUseInWhereClauseShouldPass() { - validate( - "SELECT * FROM semantics WHERE " + - " E() > 1 OR " + - " PI() > 1" - ); - } + @Test + public void allSupportedConstantsUseInWhereClauseShouldPass() { + validate("SELECT * FROM semantics WHERE " + " E() > 1 OR " + " PI() > 1"); + } - @Test - public void allSupportedStringFunctionCallInSelectClauseShouldPass() { - validate( - "SELECT * FROM semantics WHERE " + - " SUBSTRING(city, 0, 3) = 'Sea' AND " + - " UPPER(city) = 'SEATTLE' AND " + - " LOWER(city) = 'seattle'" - ); - } + @Test + public void allSupportedStringFunctionCallInSelectClauseShouldPass() { + validate( + "SELECT * FROM semantics WHERE " + + " SUBSTRING(city, 0, 3) = 'Sea' AND " + + " UPPER(city) = 'SEATTLE' AND " + + " LOWER(city) = 'seattle'"); + } - @Test - public void allSupportedStringFunctionCallInWhereClauseShouldPass() { - validate( - "SELECT" + - " SUBSTRING(city, 0, 3), " + - " UPPER(address), " + - " LOWER(manager.name) " + - "FROM semantics " - ); - } + @Test + public void allSupportedStringFunctionCallInWhereClauseShouldPass() { + validate( + "SELECT" + + " SUBSTRING(city, 0, 3), " + + " UPPER(address), " + + " LOWER(manager.name) " + + "FROM semantics "); + } - @Test - public void dateFormatFunctionCallWithNumberShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT DATE_FORMAT(balance, 'yyyy-MM') FROM semantics", - "Function [DATE_FORMAT] cannot work with [DOUBLE, STRING].", - "Usage: DATE_FORMAT(DATE, STRING) -> STRING or DATE_FORMAT(DATE, STRING, STRING) -> STRING" - ); - } + @Test + public void dateFormatFunctionCallWithNumberShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT DATE_FORMAT(balance, 'yyyy-MM') FROM semantics", + "Function [DATE_FORMAT] cannot work with [DOUBLE, STRING].", + "Usage: DATE_FORMAT(DATE, STRING) -> STRING or DATE_FORMAT(DATE, STRING, STRING) ->" + + " STRING"); + } - @Test - public void allSupportedDateFunctionCallShouldPass() { - validate( - "SELECT date_format(birthday, 'yyyy-MM') " + - "FROM semantics " + - "WHERE date_format(birthday, 'yyyy-MM') > '1980-01' " + - "GROUP BY date_format(birthday, 'yyyy-MM') " + - "ORDER BY date_format(birthday, 'yyyy-MM') DESC" - ); - } + @Test + public void allSupportedDateFunctionCallShouldPass() { + validate( + "SELECT date_format(birthday, 'yyyy-MM') " + + "FROM semantics " + + "WHERE date_format(birthday, 'yyyy-MM') > '1980-01' " + + "GROUP BY date_format(birthday, 'yyyy-MM') " + + "ORDER BY date_format(birthday, 'yyyy-MM') DESC"); + } - @Test - public void concatRequiresVarargSupportShouldPassAnyway() { - validate("SELECT CONCAT('aaa') FROM semantics"); - validate("SELECT CONCAT('aaa', 'bbb') FROM semantics"); - validate("SELECT CONCAT('aaa', 'bbb', 123) FROM semantics"); - } + @Test + public void concatRequiresVarargSupportShouldPassAnyway() { + validate("SELECT CONCAT('aaa') FROM semantics"); + validate("SELECT CONCAT('aaa', 'bbb') FROM semantics"); + validate("SELECT CONCAT('aaa', 'bbb', 123) FROM semantics"); + } - @Test - public void castFunctionShouldPass() { - validateWithType("SELECT CAST(age AS DOUBLE) FROM semantics", OpenSearchDataType.DOUBLE); - } + @Test + public void castFunctionShouldPass() { + validateWithType("SELECT CAST(age AS DOUBLE) FROM semantics", OpenSearchDataType.DOUBLE); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerSubqueryTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerSubqueryTest.java index f34af4fe3a..7613806df7 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerSubqueryTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerSubqueryTest.java @@ -3,105 +3,94 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic; import org.junit.Test; import org.opensearch.sql.legacy.antlr.visitor.EarlyExitAnalysisException; -/** - * Semantic analysis test for subquery - */ +/** Semantic analysis test for subquery */ public class SemanticAnalyzerSubqueryTest extends SemanticAnalyzerTestBase { - @Test - public void useExistClauseOnNestedFieldShouldPass() { - validate( - "SELECT * FROM semantics AS s WHERE EXISTS " + - " ( SELECT * FROM s.projects AS p WHERE p.active IS TRUE ) " + - " AND s.age > 10" - ); - } - - @Test - public void useNotExistClauseOnNestedFieldShouldPass() { - validate( - "SELECT * FROM semantics AS s WHERE NOT EXISTS " + - " ( SELECT * FROM s.projects AS p WHERE p.active IS TRUE ) " + - " AND s.age > 10" - ); - } - - @Test - public void useInClauseOnAgeWithIntegerLiteralListShouldPass() { - validate("SELECT * FROM semantics WHERE age IN (30, 40)"); - } - - @Test - public void useAliasInSubqueryShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics s WHERE EXISTS (SELECT * FROM s.projects p) AND p.active IS TRUE", - "Field [p.active] cannot be found or used here.", - "Did you mean [projects.active]?" - ); - } - - @Test - public void useInClauseWithIncompatibleFieldTypesShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics s WHERE age IN (SELECT p.active FROM s.projects p)", - "Operator [IN] cannot work with [INTEGER, BOOLEAN]." - ); - } - - @Test - public void useInClauseWithCompatibleFieldTypesShouldPass() { - validate("SELECT * FROM semantics s WHERE address IN (SELECT city FROM s.projects p)"); - } - - @Test - public void useNotInClauseWithCompatibleFieldTypesShouldPass() { - validate("SELECT * FROM semantics s WHERE address NOT IN (SELECT city FROM s.projects p)"); - } - - @Test - public void useInClauseWithCompatibleConstantShouldPass() { - validate("SELECT * FROM semantics WHERE age IN (10, 20, 30)"); - validate("SELECT * FROM semantics WHERE city IN ('Seattle', 'Bellevue')"); - validate("SELECT * FROM semantics WHERE birthday IN ('2000-01-01', '2010-01-01')"); - } - - @Test - public void useInClauseWithIncompatibleConstantShouldPass() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics s WHERE age IN ('abc', 'def')", - "Operator [IN] cannot work with [INTEGER, STRING]." - ); - } - - @Test - public void useInClauseWithSelectStarShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics s WHERE address IN (SELECT * FROM s.projects p)", - "Operator [IN] cannot work with [TEXT, (*)]" - ); - } - - @Test - public void useExistsClauseWithSelectStarShouldPass() { - validate("SELECT * FROM semantics s WHERE EXISTS (SELECT * FROM s.projects p)"); - } - - @Test - public void useExistsClauseWithSelectConstantShouldPass() { - validate("SELECT * FROM semantics s WHERE EXISTS (SELECT 1 FROM s.projects p)"); - } - - /** - * Ignore the semantic analyzer by using {@link EarlyExitAnalysisException} - */ - @Test - public void useSubqueryInFromClauseWithSelectConstantShouldPass() { - validate("SELECT t.TEMP as count FROM (SELECT COUNT(*) as TEMP FROM semantics) t"); - } + @Test + public void useExistClauseOnNestedFieldShouldPass() { + validate( + "SELECT * FROM semantics AS s WHERE EXISTS " + + " ( SELECT * FROM s.projects AS p WHERE p.active IS TRUE ) " + + " AND s.age > 10"); + } + + @Test + public void useNotExistClauseOnNestedFieldShouldPass() { + validate( + "SELECT * FROM semantics AS s WHERE NOT EXISTS " + + " ( SELECT * FROM s.projects AS p WHERE p.active IS TRUE ) " + + " AND s.age > 10"); + } + + @Test + public void useInClauseOnAgeWithIntegerLiteralListShouldPass() { + validate("SELECT * FROM semantics WHERE age IN (30, 40)"); + } + + @Test + public void useAliasInSubqueryShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics s WHERE EXISTS (SELECT * FROM s.projects p) AND p.active IS TRUE", + "Field [p.active] cannot be found or used here.", + "Did you mean [projects.active]?"); + } + + @Test + public void useInClauseWithIncompatibleFieldTypesShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics s WHERE age IN (SELECT p.active FROM s.projects p)", + "Operator [IN] cannot work with [INTEGER, BOOLEAN]."); + } + + @Test + public void useInClauseWithCompatibleFieldTypesShouldPass() { + validate("SELECT * FROM semantics s WHERE address IN (SELECT city FROM s.projects p)"); + } + + @Test + public void useNotInClauseWithCompatibleFieldTypesShouldPass() { + validate("SELECT * FROM semantics s WHERE address NOT IN (SELECT city FROM s.projects p)"); + } + + @Test + public void useInClauseWithCompatibleConstantShouldPass() { + validate("SELECT * FROM semantics WHERE age IN (10, 20, 30)"); + validate("SELECT * FROM semantics WHERE city IN ('Seattle', 'Bellevue')"); + validate("SELECT * FROM semantics WHERE birthday IN ('2000-01-01', '2010-01-01')"); + } + + @Test + public void useInClauseWithIncompatibleConstantShouldPass() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics s WHERE age IN ('abc', 'def')", + "Operator [IN] cannot work with [INTEGER, STRING]."); + } + + @Test + public void useInClauseWithSelectStarShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics s WHERE address IN (SELECT * FROM s.projects p)", + "Operator [IN] cannot work with [TEXT, (*)]"); + } + + @Test + public void useExistsClauseWithSelectStarShouldPass() { + validate("SELECT * FROM semantics s WHERE EXISTS (SELECT * FROM s.projects p)"); + } + + @Test + public void useExistsClauseWithSelectConstantShouldPass() { + validate("SELECT * FROM semantics s WHERE EXISTS (SELECT 1 FROM s.projects p)"); + } + + /** Ignore the semantic analyzer by using {@link EarlyExitAnalysisException} */ + @Test + public void useSubqueryInFromClauseWithSelectConstantShouldPass() { + validate("SELECT t.TEMP as count FROM (SELECT COUNT(*) as TEMP FROM semantics) t"); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerTestBase.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerTestBase.java index 7b53619d9c..403c2f49b7 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerTestBase.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerTestBase.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic; import static java.util.stream.Collectors.toList; @@ -28,49 +27,45 @@ import org.opensearch.sql.legacy.antlr.semantic.types.Type; import org.opensearch.sql.legacy.esdomain.LocalClusterState; -/** - * Test cases for semantic analysis focused on semantic check which was missing in the past. - */ +/** Test cases for semantic analysis focused on semantic check which was missing in the past. */ public abstract class SemanticAnalyzerTestBase { - private static final String TEST_MAPPING_FILE = "mappings/semantics.json"; + private static final String TEST_MAPPING_FILE = "mappings/semantics.json"; - /** public accessor is required by @Rule annotation */ - @Rule - public ExpectedException exception = ExpectedException.none(); + /** public accessor is required by @Rule annotation */ + @Rule public ExpectedException exception = ExpectedException.none(); - private OpenSearchLegacySqlAnalyzer - analyzer = new OpenSearchLegacySqlAnalyzer(new SqlAnalysisConfig(true, true, 1000)); + private OpenSearchLegacySqlAnalyzer analyzer = + new OpenSearchLegacySqlAnalyzer(new SqlAnalysisConfig(true, true, 1000)); - @SuppressWarnings("UnstableApiUsage") - @BeforeClass - public static void init() throws IOException { - URL url = Resources.getResource(TEST_MAPPING_FILE); - String mappings = Resources.toString(url, Charsets.UTF_8); - LocalClusterState.state(null); - mockLocalClusterState(mappings); - } + @SuppressWarnings("UnstableApiUsage") + @BeforeClass + public static void init() throws IOException { + URL url = Resources.getResource(TEST_MAPPING_FILE); + String mappings = Resources.toString(url, Charsets.UTF_8); + LocalClusterState.state(null); + mockLocalClusterState(mappings); + } - @AfterClass - public static void cleanUp() { - LocalClusterState.state(null); - } + @AfterClass + public static void cleanUp() { + LocalClusterState.state(null); + } - protected void expectValidationFailWithErrorMessages(String query, String... messages) { - exception.expect(SemanticAnalysisException.class); - exception.expectMessage(allOf(Arrays.stream(messages). - map(Matchers::containsString). - collect(toList()))); - validate(query); - } + protected void expectValidationFailWithErrorMessages(String query, String... messages) { + exception.expect(SemanticAnalysisException.class); + exception.expectMessage( + allOf(Arrays.stream(messages).map(Matchers::containsString).collect(toList()))); + validate(query); + } - protected void validate(String sql) { - analyzer.analyze(sql, LocalClusterState.state()); - } + protected void validate(String sql) { + analyzer.analyze(sql, LocalClusterState.state()); + } - protected void validateWithType(String sql, Type type) { - Optional analyze = analyzer.analyze(sql, LocalClusterState.state()); - assertTrue(analyze.isPresent()); - assertEquals(type, analyze.get()); - } + protected void validateWithType(String sql, Type type) { + Optional analyze = analyzer.analyze(sql, LocalClusterState.state()); + assertTrue(analyze.isPresent()); + assertEquals(type, analyze.get()); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerTests.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerTests.java index 56a27b780f..7585152a4d 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerTests.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerTests.java @@ -3,29 +3,27 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic; import org.junit.runner.RunWith; import org.junit.runners.Suite; /** - * Semantic analyzer test suite to prepare mapping and avoid load from file every time. - * But Gradle seems not work well with suite. So move common logic to test base class - * and keep this for quick testing in IDE. + * Semantic analyzer test suite to prepare mapping and avoid load from file every time. But Gradle + * seems not work well with suite. So move common logic to test base class and keep this for quick + * testing in IDE. */ @RunWith(Suite.class) @Suite.SuiteClasses({ - SemanticAnalyzerBasicTest.class, - SemanticAnalyzerConfigTest.class, - SemanticAnalyzerFromClauseTest.class, - SemanticAnalyzerIdentifierTest.class, - SemanticAnalyzerScalarFunctionTest.class, - SemanticAnalyzerESScalarFunctionTest.class, - SemanticAnalyzerAggregateFunctionTest.class, - SemanticAnalyzerOperatorTest.class, - SemanticAnalyzerSubqueryTest.class, - SemanticAnalyzerMultiQueryTest.class, + SemanticAnalyzerBasicTest.class, + SemanticAnalyzerConfigTest.class, + SemanticAnalyzerFromClauseTest.class, + SemanticAnalyzerIdentifierTest.class, + SemanticAnalyzerScalarFunctionTest.class, + SemanticAnalyzerESScalarFunctionTest.class, + SemanticAnalyzerAggregateFunctionTest.class, + SemanticAnalyzerOperatorTest.class, + SemanticAnalyzerSubqueryTest.class, + SemanticAnalyzerMultiQueryTest.class, }) -public class SemanticAnalyzerTests { -} +public class SemanticAnalyzerTests {} diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/scope/SemanticContextTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/scope/SemanticContextTest.java index 689fdd20f6..e19b48f2a0 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/scope/SemanticContextTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/scope/SemanticContextTest.java @@ -3,42 +3,37 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.scope; import org.junit.Assert; import org.junit.Test; -/** - * Test cases for semantic context - */ +/** Test cases for semantic context */ public class SemanticContextTest { - private final SemanticContext context = new SemanticContext(); - - @Test - public void rootEnvironmentShouldBeThereInitially() { - Assert.assertNotNull( - "Didn't find root environment. Context is NOT supposed to be empty initially", - context.peek() - ); - } - - @Test - public void pushAndPopEnvironmentShouldPass() { - context.push(); - context.pop(); - } - - @Test - public void popRootEnvironmentShouldPass() { - context.pop(); - } - - @Test(expected = NullPointerException.class) - public void popEmptyEnvironmentStackShouldFail() { - context.pop(); - context.pop(); - } - + private final SemanticContext context = new SemanticContext(); + + @Test + public void rootEnvironmentShouldBeThereInitially() { + Assert.assertNotNull( + "Didn't find root environment. Context is NOT supposed to be empty initially", + context.peek()); + } + + @Test + public void pushAndPopEnvironmentShouldPass() { + context.push(); + context.pop(); + } + + @Test + public void popRootEnvironmentShouldPass() { + context.pop(); + } + + @Test(expected = NullPointerException.class) + public void popEmptyEnvironmentStackShouldFail() { + context.pop(); + context.pop(); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/scope/SymbolTableTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/scope/SymbolTableTest.java index fcbc9bf7b6..8fde3bdc3c 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/scope/SymbolTableTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/scope/SymbolTableTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.scope; import static org.hamcrest.MatcherAssert.assertThat; @@ -25,65 +24,62 @@ import org.opensearch.sql.legacy.antlr.semantic.types.TypeExpression; import org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchIndex; -/** - * Test cases for symbol table - */ +/** Test cases for symbol table */ public class SymbolTableTest { - private final SymbolTable symbolTable = new SymbolTable(); + private final SymbolTable symbolTable = new SymbolTable(); - @Test - public void defineFieldSymbolShouldBeAbleToResolve() { - defineSymbolShouldBeAbleToResolve(new Symbol(Namespace.FIELD_NAME, "birthday"), DATE); - } + @Test + public void defineFieldSymbolShouldBeAbleToResolve() { + defineSymbolShouldBeAbleToResolve(new Symbol(Namespace.FIELD_NAME, "birthday"), DATE); + } - @Test - public void defineFunctionSymbolShouldBeAbleToResolve() { - String funcName = "LOG"; - Type expectedType = new TypeExpression() { - @Override - public String getName() { - return "Temp type expression with [NUMBER] -> NUMBER specification"; - } + @Test + public void defineFunctionSymbolShouldBeAbleToResolve() { + String funcName = "LOG"; + Type expectedType = + new TypeExpression() { + @Override + public String getName() { + return "Temp type expression with [NUMBER] -> NUMBER specification"; + } - @Override - public TypeExpressionSpec[] specifications() { - return new TypeExpressionSpec[] { - new TypeExpressionSpec().map(NUMBER).to(NUMBER) - }; - } + @Override + public TypeExpressionSpec[] specifications() { + return new TypeExpressionSpec[] {new TypeExpressionSpec().map(NUMBER).to(NUMBER)}; + } }; - Symbol symbol = new Symbol(Namespace.FUNCTION_NAME, funcName); - defineSymbolShouldBeAbleToResolve(symbol, expectedType); - } - - @Test - public void defineFieldSymbolShouldBeAbleToResolveByPrefix() { - symbolTable.store(new Symbol(Namespace.FIELD_NAME, "s.projects"), new OpenSearchIndex("s.projects", NESTED_FIELD)); - symbolTable.store(new Symbol(Namespace.FIELD_NAME, "s.projects.release"), DATE); - symbolTable.store(new Symbol(Namespace.FIELD_NAME, "s.projects.active"), BOOLEAN); - symbolTable.store(new Symbol(Namespace.FIELD_NAME, "s.address"), TEXT); - symbolTable.store(new Symbol(Namespace.FIELD_NAME, "s.city"), KEYWORD); - symbolTable.store(new Symbol(Namespace.FIELD_NAME, "s.manager.name"), TEXT); + Symbol symbol = new Symbol(Namespace.FUNCTION_NAME, funcName); + defineSymbolShouldBeAbleToResolve(symbol, expectedType); + } - Map typeByName = symbolTable.lookupByPrefix(new Symbol(Namespace.FIELD_NAME, "s.projects")); - assertThat( - typeByName, - allOf( - aMapWithSize(3), - hasEntry("s.projects", (Type) new OpenSearchIndex("s.projects", NESTED_FIELD)), - hasEntry("s.projects.release", DATE), - hasEntry("s.projects.active", BOOLEAN) - ) - ); - } + @Test + public void defineFieldSymbolShouldBeAbleToResolveByPrefix() { + symbolTable.store( + new Symbol(Namespace.FIELD_NAME, "s.projects"), + new OpenSearchIndex("s.projects", NESTED_FIELD)); + symbolTable.store(new Symbol(Namespace.FIELD_NAME, "s.projects.release"), DATE); + symbolTable.store(new Symbol(Namespace.FIELD_NAME, "s.projects.active"), BOOLEAN); + symbolTable.store(new Symbol(Namespace.FIELD_NAME, "s.address"), TEXT); + symbolTable.store(new Symbol(Namespace.FIELD_NAME, "s.city"), KEYWORD); + symbolTable.store(new Symbol(Namespace.FIELD_NAME, "s.manager.name"), TEXT); - private void defineSymbolShouldBeAbleToResolve(Symbol symbol, Type expectedType) { - symbolTable.store(symbol, expectedType); + Map typeByName = + symbolTable.lookupByPrefix(new Symbol(Namespace.FIELD_NAME, "s.projects")); + assertThat( + typeByName, + allOf( + aMapWithSize(3), + hasEntry("s.projects", (Type) new OpenSearchIndex("s.projects", NESTED_FIELD)), + hasEntry("s.projects.release", DATE), + hasEntry("s.projects.active", BOOLEAN))); + } - Optional actualType = symbolTable.lookup(symbol); - Assert.assertTrue(actualType.isPresent()); - Assert.assertEquals(expectedType, actualType.get()); - } + private void defineSymbolShouldBeAbleToResolve(Symbol symbol, Type expectedType) { + symbolTable.store(symbol, expectedType); + Optional actualType = symbolTable.lookup(symbol); + Assert.assertTrue(actualType.isPresent()); + Assert.assertEquals(expectedType, actualType.get()); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/scope/TypeSupplierTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/scope/TypeSupplierTest.java index a0b60de4be..e6090117c1 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/scope/TypeSupplierTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/scope/TypeSupplierTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.scope; import static org.junit.Assert.assertEquals; @@ -15,31 +14,30 @@ import org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchDataType; public class TypeSupplierTest { - @Rule - public ExpectedException exception = ExpectedException.none(); + @Rule public ExpectedException exception = ExpectedException.none(); - @Test - public void haveOneTypeShouldPass() { - TypeSupplier age = new TypeSupplier("age", OpenSearchDataType.INTEGER); + @Test + public void haveOneTypeShouldPass() { + TypeSupplier age = new TypeSupplier("age", OpenSearchDataType.INTEGER); - assertEquals(OpenSearchDataType.INTEGER, age.get()); - } + assertEquals(OpenSearchDataType.INTEGER, age.get()); + } - @Test - public void addSameTypeShouldPass() { - TypeSupplier age = new TypeSupplier("age", OpenSearchDataType.INTEGER); - age.add(OpenSearchDataType.INTEGER); + @Test + public void addSameTypeShouldPass() { + TypeSupplier age = new TypeSupplier("age", OpenSearchDataType.INTEGER); + age.add(OpenSearchDataType.INTEGER); - assertEquals(OpenSearchDataType.INTEGER, age.get()); - } + assertEquals(OpenSearchDataType.INTEGER, age.get()); + } - @Test - public void haveTwoTypesShouldThrowException() { - TypeSupplier age = new TypeSupplier("age", OpenSearchDataType.INTEGER); - age.add(OpenSearchDataType.TEXT); + @Test + public void haveTwoTypesShouldThrowException() { + TypeSupplier age = new TypeSupplier("age", OpenSearchDataType.INTEGER); + age.add(OpenSearchDataType.TEXT); - exception.expect(SemanticAnalysisException.class); - exception.expectMessage("Field [age] have conflict type"); - age.get(); - } + exception.expect(SemanticAnalysisException.class); + exception.expectMessage("Field [age] have conflict type"); + age.get(); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/types/TypeExpressionTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/types/TypeExpressionTest.java index d1d1d7799b..55c184bcaa 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/types/TypeExpressionTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/types/TypeExpressionTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.types; import static org.junit.Assert.assertEquals; @@ -21,59 +20,58 @@ import java.util.Arrays; import org.junit.Test; -/** - * Test cases for default implementation methods in interface TypeExpression - */ +/** Test cases for default implementation methods in interface TypeExpression */ public class TypeExpressionTest { - private final TypeExpression test123 = new TypeExpression() { + private final TypeExpression test123 = + new TypeExpression() { @Override public String getName() { - return "TEST123"; + return "TEST123"; } @Override public TypeExpressionSpec[] specifications() { - return new TypeExpressionSpec[] { - new TypeExpressionSpec().map(T(NUMBER)).to(T), - new TypeExpressionSpec().map(STRING, BOOLEAN).to(DATE) - }; + return new TypeExpressionSpec[] { + new TypeExpressionSpec().map(T(NUMBER)).to(T), + new TypeExpressionSpec().map(STRING, BOOLEAN).to(DATE) + }; } - }; + }; - @Test - public void emptySpecificationShouldAlwaysReturnUnknown() { - TypeExpression expr = new TypeExpression() { - @Override - public TypeExpressionSpec[] specifications() { - return new TypeExpressionSpec[0]; - } + @Test + public void emptySpecificationShouldAlwaysReturnUnknown() { + TypeExpression expr = + new TypeExpression() { + @Override + public TypeExpressionSpec[] specifications() { + return new TypeExpressionSpec[0]; + } - @Override - public String getName() { - return "Temp type expression with empty specification"; - } + @Override + public String getName() { + return "Temp type expression with empty specification"; + } }; - assertEquals(UNKNOWN, expr.construct(Arrays.asList(NUMBER))); - assertEquals(UNKNOWN, expr.construct(Arrays.asList(STRING, BOOLEAN))); - assertEquals(UNKNOWN, expr.construct(Arrays.asList(INTEGER, DOUBLE, GEO_POINT))); - } - - @Test - public void compatibilityCheckShouldPassIfAnySpecificationCompatible() { - assertEquals(DOUBLE, test123.construct(Arrays.asList(DOUBLE))); - assertEquals(DATE, test123.construct(Arrays.asList(STRING, BOOLEAN))); - } + assertEquals(UNKNOWN, expr.construct(Arrays.asList(NUMBER))); + assertEquals(UNKNOWN, expr.construct(Arrays.asList(STRING, BOOLEAN))); + assertEquals(UNKNOWN, expr.construct(Arrays.asList(INTEGER, DOUBLE, GEO_POINT))); + } - @Test - public void compatibilityCheckShouldFailIfNoSpecificationCompatible() { - assertEquals(TYPE_ERROR, test123.construct(Arrays.asList(BOOLEAN))); - } + @Test + public void compatibilityCheckShouldPassIfAnySpecificationCompatible() { + assertEquals(DOUBLE, test123.construct(Arrays.asList(DOUBLE))); + assertEquals(DATE, test123.construct(Arrays.asList(STRING, BOOLEAN))); + } - @Test - public void usageShouldPrintAllSpecifications() { - assertEquals("TEST123(NUMBER T) -> T or TEST123(STRING, BOOLEAN) -> DATE", test123.usage()); - } + @Test + public void compatibilityCheckShouldFailIfNoSpecificationCompatible() { + assertEquals(TYPE_ERROR, test123.construct(Arrays.asList(BOOLEAN))); + } + @Test + public void usageShouldPrintAllSpecifications() { + assertEquals("TEST123(NUMBER T) -> T or TEST123(STRING, BOOLEAN) -> DATE", test123.usage()); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/executor/format/ResultSetTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/executor/format/ResultSetTest.java index 7cfada0b78..3310bdc1e1 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/executor/format/ResultSetTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/executor/format/ResultSetTest.java @@ -21,12 +21,13 @@ public Schema getSchema() { }; /** - * Case #1: - * LIKE 'test%' is converted to: + * Case #1: LIKE 'test%' is converted to: + * *

      - *
    1. Regex pattern: test.* - *
    2. OpenSearch search pattern: test* - *
    + *
  • Regex pattern: test.* + *
  • OpenSearch search pattern: test* + * + * * In this case, what OpenSearch returns is the final result. */ @Test @@ -35,15 +36,17 @@ public void testWildcardForZeroOrMoreCharacters() { } /** - * Case #2: - * LIKE 'test_123' is converted to: - *
      x - *
    1. Regex pattern: test.123 - *
    2. OpenSearch search pattern: (all) + * Case #2: LIKE 'test_123' is converted to: + * + *
        + * x + *
      1. Regex pattern: test.123 + *
      2. OpenSearch search pattern: (all) *
      - * Because OpenSearch doesn't support single wildcard character, in this case, none is passed - * as OpenSearch search pattern. So all index names are returned and need to be filtered by - * regex pattern again. + * + * Because OpenSearch doesn't support single wildcard character, in this case, none is passed as + * OpenSearch search pattern. So all index names are returned and need to be filtered by regex + * pattern again. */ @Test public void testWildcardForSingleCharacter() { diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/rewriter/alias/TableAliasPrefixRemoveRuleTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/rewriter/alias/TableAliasPrefixRemoveRuleTest.java index b59bd218e0..4a4161a585 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/rewriter/alias/TableAliasPrefixRemoveRuleTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/rewriter/alias/TableAliasPrefixRemoveRuleTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.alias; import com.alibaba.druid.sql.SQLUtils; @@ -12,122 +11,116 @@ import org.junit.Test; import org.opensearch.sql.legacy.util.SqlParserUtils; -/** - * Test cases for field name prefix remove rule. - */ +/** Test cases for field name prefix remove rule. */ public class TableAliasPrefixRemoveRuleTest { - @Test - public void queryWithUnAliasedTableNameShouldMatch() { - query("SELECT account.age FROM accounts").shouldMatchRule(); - } - - @Test - public void queryWithUnAliasedTableNameInSubQueryShouldNotMatch() { - query("SELECT * FROM test t WHERE t.name IN (SELECT accounts.name FROM accounts)").shouldNotMatchRule(); - } - - @Test - public void queryWithoutUnAliasedTableNameShouldMatch() { - query("SELECT a.age FROM accounts a WHERE a.balance > 1000").shouldMatchRule(); - } - - @Test - public void joinQueryWithoutUnAliasedTableNameShouldNotMatch() { - query("SELECT * FROM accounts a1 JOIN accounts a2 ON a1.city = a2.city").shouldNotMatchRule(); - } - - @Test - public void nestedFieldQueryWithoutUnAliasedTableNameShouldNotMatch() { - query("SELECT * FROM accounts a, a.project p").shouldNotMatchRule(); - } - - @Test - public void selectedFieldNamePrefixedByUnAliasedTableNameShouldRemoveTableNamePrefix() { - query("SELECT accounts.age FROM accounts").shouldBeAfterRewrite("SELECT age FROM accounts"); - query("SELECT accounts.age FROM accounts/temp").shouldBeAfterRewrite("SELECT age FROM accounts/temp"); - query("SELECT age FROM accounts/temp a").shouldBeAfterRewrite("SELECT age FROM accounts/temp"); - } - - @Test - public void allFieldNamePrefixedByUnAliasedTableNameEverywhereShouldRemoveTableNamePrefix() { - query( - "SELECT accounts.age, AVG(accounts.salary) FROM accounts WHERE accounts.age > 10 " + - "GROUP BY accounts.age HAVING AVG(accounts.balance) > 1000 ORDER BY accounts.age" - ).shouldBeAfterRewrite( - "SELECT age, AVG(salary) FROM accounts WHERE age > 10 " + - "GROUP BY age HAVING AVG(balance) > 1000 ORDER BY age" - ); - } - - @Test - public void selectedFieldNamePrefixedByTableAliasShouldRemoveTableAliasPrefix() { - query("SELECT a.age FROM accounts a").shouldBeAfterRewrite("SELECT age FROM accounts"); - query("SELECT a.age FROM accounts/temp a").shouldBeAfterRewrite("SELECT age FROM accounts/temp"); + @Test + public void queryWithUnAliasedTableNameShouldMatch() { + query("SELECT account.age FROM accounts").shouldMatchRule(); + } + + @Test + public void queryWithUnAliasedTableNameInSubQueryShouldNotMatch() { + query("SELECT * FROM test t WHERE t.name IN (SELECT accounts.name FROM accounts)") + .shouldNotMatchRule(); + } + + @Test + public void queryWithoutUnAliasedTableNameShouldMatch() { + query("SELECT a.age FROM accounts a WHERE a.balance > 1000").shouldMatchRule(); + } + + @Test + public void joinQueryWithoutUnAliasedTableNameShouldNotMatch() { + query("SELECT * FROM accounts a1 JOIN accounts a2 ON a1.city = a2.city").shouldNotMatchRule(); + } + + @Test + public void nestedFieldQueryWithoutUnAliasedTableNameShouldNotMatch() { + query("SELECT * FROM accounts a, a.project p").shouldNotMatchRule(); + } + + @Test + public void selectedFieldNamePrefixedByUnAliasedTableNameShouldRemoveTableNamePrefix() { + query("SELECT accounts.age FROM accounts").shouldBeAfterRewrite("SELECT age FROM accounts"); + query("SELECT accounts.age FROM accounts/temp") + .shouldBeAfterRewrite("SELECT age FROM accounts/temp"); + query("SELECT age FROM accounts/temp a").shouldBeAfterRewrite("SELECT age FROM accounts/temp"); + } + + @Test + public void allFieldNamePrefixedByUnAliasedTableNameEverywhereShouldRemoveTableNamePrefix() { + query( + "SELECT accounts.age, AVG(accounts.salary) FROM accounts WHERE accounts.age > 10 " + + "GROUP BY accounts.age HAVING AVG(accounts.balance) > 1000 ORDER BY accounts.age") + .shouldBeAfterRewrite( + "SELECT age, AVG(salary) FROM accounts WHERE age > 10 " + + "GROUP BY age HAVING AVG(balance) > 1000 ORDER BY age"); + } + + @Test + public void selectedFieldNamePrefixedByTableAliasShouldRemoveTableAliasPrefix() { + query("SELECT a.age FROM accounts a").shouldBeAfterRewrite("SELECT age FROM accounts"); + query("SELECT a.age FROM accounts/temp a") + .shouldBeAfterRewrite("SELECT age FROM accounts/temp"); + } + + @Test + public void allFieldNamePrefixedByTableAliasShouldRemoveTableAliasPrefix() { + query( + "SELECT a.age, AVG(a.salary) FROM accounts a WHERE a.age > 10 " + + "GROUP BY a.age HAVING AVG(a.balance) > 1000 ORDER BY a.age") + .shouldBeAfterRewrite( + "SELECT age, AVG(salary) FROM accounts WHERE age > 10 " + + "GROUP BY age HAVING AVG(balance) > 1000 ORDER BY age"); + } + + @Test + public void allFieldNamePrefixedByTableAliasInMultiQueryShouldRemoveTableAliasPrefix() { + query("SELECT t.name FROM test t UNION SELECT a.age FROM accounts a WHERE a.age > 10") + .shouldBeAfterRewrite( + "SELECT name FROM test UNION SELECT age FROM accounts WHERE age > 10"); + } + + @Test + public void unAliasedFieldNameShouldNotBeChanged() { + query("SELECT a.age, name FROM accounts a WHERE balance > 1000") + .shouldBeAfterRewrite("SELECT age, name FROM accounts WHERE balance > 1000"); + query("SELECT accounts.age, name FROM accounts WHERE balance > 1000") + .shouldBeAfterRewrite("SELECT age, name FROM accounts WHERE balance > 1000"); + } + + private QueryAssertion query(String sql) { + return new QueryAssertion(sql); + } + + private static class QueryAssertion { + + private final TableAliasPrefixRemoveRule rule = new TableAliasPrefixRemoveRule(); + + private final SQLQueryExpr expr; + + QueryAssertion(String sql) { + this.expr = SqlParserUtils.parse(sql); } - @Test - public void allFieldNamePrefixedByTableAliasShouldRemoveTableAliasPrefix() { - query( - "SELECT a.age, AVG(a.salary) FROM accounts a WHERE a.age > 10 " + - "GROUP BY a.age HAVING AVG(a.balance) > 1000 ORDER BY a.age" - ).shouldBeAfterRewrite( - "SELECT age, AVG(salary) FROM accounts WHERE age > 10 " + - "GROUP BY age HAVING AVG(balance) > 1000 ORDER BY age" - ); + void shouldMatchRule() { + Assert.assertTrue(match()); } - @Test - public void allFieldNamePrefixedByTableAliasInMultiQueryShouldRemoveTableAliasPrefix() { - query( - "SELECT t.name FROM test t UNION SELECT a.age FROM accounts a WHERE a.age > 10" - ).shouldBeAfterRewrite( - "SELECT name FROM test UNION SELECT age FROM accounts WHERE age > 10" - ); + void shouldNotMatchRule() { + Assert.assertFalse(match()); } - @Test - public void unAliasedFieldNameShouldNotBeChanged() { - query("SELECT a.age, name FROM accounts a WHERE balance > 1000"). - shouldBeAfterRewrite("SELECT age, name FROM accounts WHERE balance > 1000"); - query("SELECT accounts.age, name FROM accounts WHERE balance > 1000"). - shouldBeAfterRewrite("SELECT age, name FROM accounts WHERE balance > 1000"); + void shouldBeAfterRewrite(String expected) { + shouldMatchRule(); + rule.rewrite(expr); + Assert.assertEquals( + SQLUtils.toMySqlString(SqlParserUtils.parse(expected)), SQLUtils.toMySqlString(expr)); } - private QueryAssertion query(String sql) { - return new QueryAssertion(sql); + private boolean match() { + return rule.match(expr); } - - private static class QueryAssertion { - - private final TableAliasPrefixRemoveRule rule = new TableAliasPrefixRemoveRule(); - - private final SQLQueryExpr expr; - - QueryAssertion(String sql) { - this.expr = SqlParserUtils.parse(sql); - } - - void shouldMatchRule() { - Assert.assertTrue(match()); - } - - void shouldNotMatchRule() { - Assert.assertFalse(match()); - } - - void shouldBeAfterRewrite(String expected) { - shouldMatchRule(); - rule.rewrite(expr); - Assert.assertEquals( - SQLUtils.toMySqlString(SqlParserUtils.parse(expected)), - SQLUtils.toMySqlString(expr) - ); - } - - private boolean match() { - return rule.match(expr); - } - } - + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/rewriter/alias/TableTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/rewriter/alias/TableTest.java index 5fc677785d..ab5c6b3d10 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/rewriter/alias/TableTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/rewriter/alias/TableTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.alias; import com.alibaba.druid.sql.ast.expr.SQLBinaryOpExpr; @@ -13,27 +12,24 @@ import org.junit.Assert; import org.junit.Test; -/** - * Test cases for util class {@link Table}. - */ +/** Test cases for util class {@link Table}. */ public class TableTest { - @Test - public void identifierOfTableNameShouldReturnTheTableName() { - Table table = new Table(new SQLExprTableSource(new SQLIdentifierExpr("accounts"))); - Assert.assertEquals("accounts", table.name()); - } - - @Test - public void identifierOfTableAndTypeNameShouldReturnTheTableNameOnly() { - Table table = new Table(new SQLExprTableSource( - new SQLBinaryOpExpr( - new SQLIdentifierExpr("accounts"), - SQLBinaryOperator.Divide, - new SQLIdentifierExpr("test") - ) - )); - Assert.assertEquals("accounts", table.name()); - } + @Test + public void identifierOfTableNameShouldReturnTheTableName() { + Table table = new Table(new SQLExprTableSource(new SQLIdentifierExpr("accounts"))); + Assert.assertEquals("accounts", table.name()); + } + @Test + public void identifierOfTableAndTypeNameShouldReturnTheTableNameOnly() { + Table table = + new Table( + new SQLExprTableSource( + new SQLBinaryOpExpr( + new SQLIdentifierExpr("accounts"), + SQLBinaryOperator.Divide, + new SQLIdentifierExpr("test")))); + Assert.assertEquals("accounts", table.name()); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/NestedFieldRewriterTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/NestedFieldRewriterTest.java index 2593f25379..c303a97fbc 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/NestedFieldRewriterTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/NestedFieldRewriterTest.java @@ -518,14 +518,13 @@ private void noImpact(String sql) { /** * The intention for this assert method is: - *
        - *
      1. MySqlSelectQueryBlock.equals() doesn't call super.equals(). But select items, from, where - * and group by are all held by parent class SQLSelectQueryBlock. - * - *
      2. SQLSelectGroupByClause doesn't implement equals() at all.. MySqlSelectGroupByExpr - * compares identity of expression.. * - *
      3. MySqlUnionQuery doesn't implement equals() at all + *
          + *
        1. MySqlSelectQueryBlock.equals() doesn't call super.equals(). But select items, from, where + * and group by are all held by parent class SQLSelectQueryBlock. + *
        2. SQLSelectGroupByClause doesn't implement equals() at all.. MySqlSelectGroupByExpr + * compares identity of expression.. + *
        3. MySqlUnionQuery doesn't implement equals() at all *
        */ private void same(SQLQueryExpr actual, SQLQueryExpr expected) { diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/SqlRequestFactoryTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/SqlRequestFactoryTest.java index ec13789d28..63fcd98524 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/SqlRequestFactoryTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/SqlRequestFactoryTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest; import static java.util.Collections.emptyList; @@ -28,168 +27,186 @@ @RunWith(MockitoJUnitRunner.class) public class SqlRequestFactoryTest { - @Mock - private RestRequest restRequest; - - @Mock - private OpenSearchSettings settings; - - @Before - public void setup() { - // Force return empty list to avoid ClusterSettings be invoked which is a final class and hard to mock. - // In this case, default value in Setting will be returned all the time. - doReturn(emptyList()).when(settings).getSettings(); - LocalClusterState.state().setPluginSettings(settings); - } - - @Ignore("RestRequest is a final method, and Mockito 1.x cannot mock it." + - "Ignore this test case till we can upgrade to Mockito 2.x") - @Test - public void testGenerateSqlRequest_fromUrlParams() { - String sql = "select * from table"; - Mockito.when(restRequest.method()).thenReturn(RestRequest.Method.GET); - Mockito.when(restRequest.param("sql")).thenReturn(sql); - - SqlRequest sqlRequest = SqlRequestFactory.getSqlRequest(restRequest); - - Assert.assertFalse(sqlRequest instanceof PreparedStatementRequest); - Assert.assertEquals(sql, sqlRequest.getSql()); - } - - @Test - public void testGenerateSqlRequest_sqlRequestFromPayload() { - String payload = "{ \"query\": \"select * from my_table\" }"; - - Mockito.when(this.restRequest.content()).thenReturn(new BytesArray(payload)); - Mockito.when(this.restRequest.method()).thenReturn(RestRequest.Method.POST); - - SqlRequest sqlRequest = SqlRequestFactory.getSqlRequest(this.restRequest); - Assert.assertFalse(sqlRequest instanceof PreparedStatementRequest); - Assert.assertEquals("select * from my_table", sqlRequest.getSql()); - } - - @Test - public void testGenerateSqlRequest_preparedStatementFromPayload() { - String payload = "{\n" + - " \"query\": \"select * from my_table where int_param = ? and double_param = ? and string_param = ? and date_param = ? and null_param = ?\",\n" + - " \"parameters\": [\n" + - " {\n" + - " \"type\": \"integer\",\n" + - " \"value\": 1\n" + - " },\n" + - " {\n" + - " \"type\": \"double\",\n" + - " \"value\": \"2.0\"\n" + - " },\n" + - " {\n" + - " \"type\": \"string\",\n" + - " \"value\": \"string_value\"\n" + - " },\n" + - " {\n" + - " \"type\": \"date\",\n" + - " \"value\": \"2000-01-01\"\n" + - " },\n" + - " {\n" + - " \"type\": \"null\",\n" + - " \"value\": null\n" + - " }\n" + - " ]\n" + - "}"; - Mockito.when(this.restRequest.content()).thenReturn(new BytesArray(payload)); - Mockito.when(this.restRequest.method()).thenReturn(RestRequest.Method.POST); - - SqlRequest sqlRequest = SqlRequestFactory.getSqlRequest(this.restRequest); - - Assert.assertTrue(sqlRequest instanceof PreparedStatementRequest); - PreparedStatementRequest preparedStatementRequest = (PreparedStatementRequest) sqlRequest; - Assert.assertEquals("select * from my_table where int_param = ? and double_param = ? and string_param = ? and date_param = ? and null_param = ?", preparedStatementRequest.getPreparedStatement()); - Assert.assertEquals("select * from my_table where int_param = 1 and double_param = 2.0 and string_param = 'string_value' and date_param = '2000-01-01' and null_param = null", preparedStatementRequest.getSql()); - Assert.assertEquals(5, preparedStatementRequest.getParameters().size()); - Assert.assertTrue(preparedStatementRequest.getParameters().get(0).getValue() instanceof Long); - Assert.assertTrue(preparedStatementRequest.getParameters().get(1).getValue() instanceof Double); - Assert.assertTrue(preparedStatementRequest.getParameters().get(2) instanceof PreparedStatementRequest.StringParameter); - Assert.assertTrue(preparedStatementRequest.getParameters().get(3) instanceof PreparedStatementRequest.StringParameter); - Assert.assertTrue(preparedStatementRequest.getParameters().get(4) instanceof PreparedStatementRequest.NullParameter); - } - - @Test - public void testGenerateSqlRequest_prearedStatementFromPayload2() { - // type not covered in above test case - String payload = "{\n" + - " \"query\": \"select * from my_table where long_param = ? and float_param = ? and keyword_param = ? and boolean_param = ? and byte_param = ?\",\n" + - " \"parameters\": [\n" + - " {\n" + - " \"type\": \"long\",\n" + - " \"value\": 1\n" + - " },\n" + - " {\n" + - " \"type\": \"float\",\n" + - " \"value\": \"2.0\"\n" + - " },\n" + - " {\n" + - " \"type\": \"keyword\",\n" + - " \"value\": \"string_value\"\n" + - " },\n" + - " {\n" + - " \"type\": \"boolean\",\n" + - " \"value\": true\n" + - " },\n" + - " {\n" + - " \"type\": \"byte\",\n" + - " \"value\": 91\n" + - " }\n" + - " ]\n" + - "}"; - Mockito.when(this.restRequest.content()).thenReturn(new BytesArray(payload)); - Mockito.when(this.restRequest.method()).thenReturn(RestRequest.Method.POST); - SqlRequest sqlRequest = SqlRequestFactory.getSqlRequest(this.restRequest); - - Assert.assertTrue(sqlRequest instanceof PreparedStatementRequest); - PreparedStatementRequest preparedStatementRequest = (PreparedStatementRequest) sqlRequest; - Assert.assertEquals(5, preparedStatementRequest.getParameters().size()); - Assert.assertTrue(preparedStatementRequest.getParameters().get(0).getValue() instanceof Long); - Assert.assertTrue(preparedStatementRequest.getParameters().get(1).getValue() instanceof Double); - Assert.assertTrue(preparedStatementRequest.getParameters().get(2) instanceof PreparedStatementRequest.StringParameter); - System.out.println(preparedStatementRequest.getParameters().get(3)); - Assert.assertTrue(preparedStatementRequest.getParameters().get(3).getValue() instanceof Boolean); - Assert.assertTrue(preparedStatementRequest.getParameters().get(4).getValue() instanceof Long); - - } - - @Test(expected = IllegalArgumentException.class) - public void testGenerateSqlRequest_unsupportedHttpMethod() { - Mockito.when(this.restRequest.method()).thenReturn(RestRequest.Method.PUT); - SqlRequest sqlRequest = SqlRequestFactory.getSqlRequest(this.restRequest); - } - - @Test(expected = IllegalArgumentException.class) - public void testGenerateSqlRequest_invalidJson() { - String payload = "{\n" + - " \"query\": \"select * from my_table where param1 = ?\",\n"; - Mockito.when(this.restRequest.content()).thenReturn(new BytesArray(payload)); - Mockito.when(this.restRequest.method()).thenReturn(RestRequest.Method.POST); - - SqlRequest sqlRequest = SqlRequestFactory.getSqlRequest(this.restRequest); - } - - @Test(expected = IllegalArgumentException.class) - public void testGenerateSqlRequest_unsupportedType() { - String payload = "{\n" + - " \"query\": \"select * from my_table where param1 = ?\",\n" + - " \"parameters\": [\n" + - " {\n" + - " \"type\": \"unsupported_type\",\n" + - " \"value\": 1\n" + - " },\n" + - " {\n" + - " \"type\": \"string\",\n" + - " \"value\": \"string_value\"\n" + - " }\n" + - " ]\n" + - "}"; - Mockito.when(this.restRequest.content()).thenReturn(new BytesArray(payload)); - Mockito.when(this.restRequest.method()).thenReturn(RestRequest.Method.POST); - - SqlRequest sqlRequest = SqlRequestFactory.getSqlRequest(this.restRequest); - } + @Mock private RestRequest restRequest; + + @Mock private OpenSearchSettings settings; + + @Before + public void setup() { + // Force return empty list to avoid ClusterSettings be invoked which is a final class and hard + // to mock. + // In this case, default value in Setting will be returned all the time. + doReturn(emptyList()).when(settings).getSettings(); + LocalClusterState.state().setPluginSettings(settings); + } + + @Ignore( + "RestRequest is a final method, and Mockito 1.x cannot mock it." + + "Ignore this test case till we can upgrade to Mockito 2.x") + @Test + public void testGenerateSqlRequest_fromUrlParams() { + String sql = "select * from table"; + Mockito.when(restRequest.method()).thenReturn(RestRequest.Method.GET); + Mockito.when(restRequest.param("sql")).thenReturn(sql); + + SqlRequest sqlRequest = SqlRequestFactory.getSqlRequest(restRequest); + + Assert.assertFalse(sqlRequest instanceof PreparedStatementRequest); + Assert.assertEquals(sql, sqlRequest.getSql()); + } + + @Test + public void testGenerateSqlRequest_sqlRequestFromPayload() { + String payload = "{ \"query\": \"select * from my_table\" }"; + + Mockito.when(this.restRequest.content()).thenReturn(new BytesArray(payload)); + Mockito.when(this.restRequest.method()).thenReturn(RestRequest.Method.POST); + + SqlRequest sqlRequest = SqlRequestFactory.getSqlRequest(this.restRequest); + Assert.assertFalse(sqlRequest instanceof PreparedStatementRequest); + Assert.assertEquals("select * from my_table", sqlRequest.getSql()); + } + + @Test + public void testGenerateSqlRequest_preparedStatementFromPayload() { + String payload = + "{\n" + + " \"query\": \"select * from my_table where int_param = ? and double_param = ? and" + + " string_param = ? and date_param = ? and null_param = ?\",\n" + + " \"parameters\": [\n" + + " {\n" + + " \"type\": \"integer\",\n" + + " \"value\": 1\n" + + " },\n" + + " {\n" + + " \"type\": \"double\",\n" + + " \"value\": \"2.0\"\n" + + " },\n" + + " {\n" + + " \"type\": \"string\",\n" + + " \"value\": \"string_value\"\n" + + " },\n" + + " {\n" + + " \"type\": \"date\",\n" + + " \"value\": \"2000-01-01\"\n" + + " },\n" + + " {\n" + + " \"type\": \"null\",\n" + + " \"value\": null\n" + + " }\n" + + " ]\n" + + "}"; + Mockito.when(this.restRequest.content()).thenReturn(new BytesArray(payload)); + Mockito.when(this.restRequest.method()).thenReturn(RestRequest.Method.POST); + + SqlRequest sqlRequest = SqlRequestFactory.getSqlRequest(this.restRequest); + + Assert.assertTrue(sqlRequest instanceof PreparedStatementRequest); + PreparedStatementRequest preparedStatementRequest = (PreparedStatementRequest) sqlRequest; + Assert.assertEquals( + "select * from my_table where int_param = ? and double_param = ? and string_param = ? and" + + " date_param = ? and null_param = ?", + preparedStatementRequest.getPreparedStatement()); + Assert.assertEquals( + "select * from my_table where int_param = 1 and double_param = 2.0 and string_param =" + + " 'string_value' and date_param = '2000-01-01' and null_param = null", + preparedStatementRequest.getSql()); + Assert.assertEquals(5, preparedStatementRequest.getParameters().size()); + Assert.assertTrue(preparedStatementRequest.getParameters().get(0).getValue() instanceof Long); + Assert.assertTrue(preparedStatementRequest.getParameters().get(1).getValue() instanceof Double); + Assert.assertTrue( + preparedStatementRequest.getParameters().get(2) + instanceof PreparedStatementRequest.StringParameter); + Assert.assertTrue( + preparedStatementRequest.getParameters().get(3) + instanceof PreparedStatementRequest.StringParameter); + Assert.assertTrue( + preparedStatementRequest.getParameters().get(4) + instanceof PreparedStatementRequest.NullParameter); + } + + @Test + public void testGenerateSqlRequest_prearedStatementFromPayload2() { + // type not covered in above test case + String payload = + "{\n" + + " \"query\": \"select * from my_table where long_param = ? and float_param = ? and" + + " keyword_param = ? and boolean_param = ? and byte_param = ?\",\n" + + " \"parameters\": [\n" + + " {\n" + + " \"type\": \"long\",\n" + + " \"value\": 1\n" + + " },\n" + + " {\n" + + " \"type\": \"float\",\n" + + " \"value\": \"2.0\"\n" + + " },\n" + + " {\n" + + " \"type\": \"keyword\",\n" + + " \"value\": \"string_value\"\n" + + " },\n" + + " {\n" + + " \"type\": \"boolean\",\n" + + " \"value\": true\n" + + " },\n" + + " {\n" + + " \"type\": \"byte\",\n" + + " \"value\": 91\n" + + " }\n" + + " ]\n" + + "}"; + Mockito.when(this.restRequest.content()).thenReturn(new BytesArray(payload)); + Mockito.when(this.restRequest.method()).thenReturn(RestRequest.Method.POST); + SqlRequest sqlRequest = SqlRequestFactory.getSqlRequest(this.restRequest); + + Assert.assertTrue(sqlRequest instanceof PreparedStatementRequest); + PreparedStatementRequest preparedStatementRequest = (PreparedStatementRequest) sqlRequest; + Assert.assertEquals(5, preparedStatementRequest.getParameters().size()); + Assert.assertTrue(preparedStatementRequest.getParameters().get(0).getValue() instanceof Long); + Assert.assertTrue(preparedStatementRequest.getParameters().get(1).getValue() instanceof Double); + Assert.assertTrue( + preparedStatementRequest.getParameters().get(2) + instanceof PreparedStatementRequest.StringParameter); + System.out.println(preparedStatementRequest.getParameters().get(3)); + Assert.assertTrue( + preparedStatementRequest.getParameters().get(3).getValue() instanceof Boolean); + Assert.assertTrue(preparedStatementRequest.getParameters().get(4).getValue() instanceof Long); + } + + @Test(expected = IllegalArgumentException.class) + public void testGenerateSqlRequest_unsupportedHttpMethod() { + Mockito.when(this.restRequest.method()).thenReturn(RestRequest.Method.PUT); + SqlRequest sqlRequest = SqlRequestFactory.getSqlRequest(this.restRequest); + } + + @Test(expected = IllegalArgumentException.class) + public void testGenerateSqlRequest_invalidJson() { + String payload = "{\n" + " \"query\": \"select * from my_table where param1 = ?\",\n"; + Mockito.when(this.restRequest.content()).thenReturn(new BytesArray(payload)); + Mockito.when(this.restRequest.method()).thenReturn(RestRequest.Method.POST); + + SqlRequest sqlRequest = SqlRequestFactory.getSqlRequest(this.restRequest); + } + + @Test(expected = IllegalArgumentException.class) + public void testGenerateSqlRequest_unsupportedType() { + String payload = + "{\n" + + " \"query\": \"select * from my_table where param1 = ?\",\n" + + " \"parameters\": [\n" + + " {\n" + + " \"type\": \"unsupported_type\",\n" + + " \"value\": 1\n" + + " },\n" + + " {\n" + + " \"type\": \"string\",\n" + + " \"value\": \"string_value\"\n" + + " }\n" + + " ]\n" + + "}"; + Mockito.when(this.restRequest.content()).thenReturn(new BytesArray(payload)); + Mockito.when(this.restRequest.method()).thenReturn(RestRequest.Method.POST); + + SqlRequest sqlRequest = SqlRequestFactory.getSqlRequest(this.restRequest); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/SqlRequestParamTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/SqlRequestParamTest.java index 103d43d95c..3c47832761 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/SqlRequestParamTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/SqlRequestParamTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest; import static org.junit.Assert.assertEquals; @@ -20,52 +19,52 @@ import org.opensearch.sql.legacy.request.SqlRequestParam; public class SqlRequestParamTest { - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); + @Rule public ExpectedException exceptionRule = ExpectedException.none(); - @Test - public void shouldReturnTrueIfPrettyParamsIsTrue() { - assertTrue(SqlRequestParam.isPrettyFormat(ImmutableMap.of(QUERY_PARAMS_PRETTY, "true"))); - } + @Test + public void shouldReturnTrueIfPrettyParamsIsTrue() { + assertTrue(SqlRequestParam.isPrettyFormat(ImmutableMap.of(QUERY_PARAMS_PRETTY, "true"))); + } - @Test - public void shouldReturnTrueIfPrettyParamsIsEmpty() { - assertTrue(SqlRequestParam.isPrettyFormat(ImmutableMap.of(QUERY_PARAMS_PRETTY, ""))); - } + @Test + public void shouldReturnTrueIfPrettyParamsIsEmpty() { + assertTrue(SqlRequestParam.isPrettyFormat(ImmutableMap.of(QUERY_PARAMS_PRETTY, ""))); + } - @Test - public void shouldReturnFalseIfNoPrettyParams() { - assertFalse(SqlRequestParam.isPrettyFormat(ImmutableMap.of())); - } + @Test + public void shouldReturnFalseIfNoPrettyParams() { + assertFalse(SqlRequestParam.isPrettyFormat(ImmutableMap.of())); + } - @Test - public void shouldReturnFalseIfPrettyParamsIsUnknownValue() { - assertFalse(SqlRequestParam.isPrettyFormat(ImmutableMap.of(QUERY_PARAMS_PRETTY, "unknown"))); - } + @Test + public void shouldReturnFalseIfPrettyParamsIsUnknownValue() { + assertFalse(SqlRequestParam.isPrettyFormat(ImmutableMap.of(QUERY_PARAMS_PRETTY, "unknown"))); + } - @Test - public void shouldReturnJSONIfFormatParamsIsJSON() { - assertEquals(Format.JSON, SqlRequestParam.getFormat(ImmutableMap.of(QUERY_PARAMS_FORMAT, "json"))); - } + @Test + public void shouldReturnJSONIfFormatParamsIsJSON() { + assertEquals( + Format.JSON, SqlRequestParam.getFormat(ImmutableMap.of(QUERY_PARAMS_FORMAT, "json"))); + } - @Test - public void shouldReturnDefaultFormatIfNoFormatParams() { - assertEquals(Format.JDBC, SqlRequestParam.getFormat(ImmutableMap.of())); - } + @Test + public void shouldReturnDefaultFormatIfNoFormatParams() { + assertEquals(Format.JDBC, SqlRequestParam.getFormat(ImmutableMap.of())); + } - @Test - public void shouldThrowExceptionIfFormatParamsIsEmpty() { - exceptionRule.expect(IllegalArgumentException.class); - exceptionRule.expectMessage("Failed to create executor due to unknown response format: "); + @Test + public void shouldThrowExceptionIfFormatParamsIsEmpty() { + exceptionRule.expect(IllegalArgumentException.class); + exceptionRule.expectMessage("Failed to create executor due to unknown response format: "); - assertEquals(Format.JDBC, SqlRequestParam.getFormat(ImmutableMap.of(QUERY_PARAMS_FORMAT, ""))); - } + assertEquals(Format.JDBC, SqlRequestParam.getFormat(ImmutableMap.of(QUERY_PARAMS_FORMAT, ""))); + } - @Test - public void shouldThrowExceptionIfFormatParamsIsNotSupported() { - exceptionRule.expect(IllegalArgumentException.class); - exceptionRule.expectMessage("Failed to create executor due to unknown response format: xml"); + @Test + public void shouldThrowExceptionIfFormatParamsIsNotSupported() { + exceptionRule.expect(IllegalArgumentException.class); + exceptionRule.expectMessage("Failed to create executor due to unknown response format: xml"); - SqlRequestParam.getFormat(ImmutableMap.of(QUERY_PARAMS_FORMAT, "xml")); - } + SqlRequestParam.getFormat(ImmutableMap.of(QUERY_PARAMS_FORMAT, "xml")); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/StringOperatorsTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/StringOperatorsTest.java index b2d13f3ead..27b8e7f2c6 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/StringOperatorsTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/StringOperatorsTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest; import static org.junit.Assert.assertTrue; @@ -17,196 +16,154 @@ public class StringOperatorsTest { - private static SqlParser parser; - - @BeforeClass - public static void init() { parser = new SqlParser(); } - - @Test - public void substringTest() { - String query = "SELECT substring(lastname, 2, 1) FROM accounts WHERE substring(lastname, 2, 1) = 'a' " + - "GROUP BY substring(lastname, 2, 1) ORDER BY substring(lastname, 2, 1)"; - - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "doc['lastname'].value.substring(1, end)")); - - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "doc['lastname'].value.substring(1, end)" - ) - ); - } - - @Test - public void substringIndexOutOfBoundTest() { - String query = "SELECT substring('sampleName', 0, 20) FROM accounts"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "def end = (int) Math.min(0 + 20, 'sampleName'.length())" - ) - ); - } - - @Test - public void lengthTest() { - String query = "SELECT length(lastname) FROM accounts WHERE length(lastname) = 5 " + - "GROUP BY length(lastname) ORDER BY length(lastname)"; - - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "doc['lastname'].value.length()" - ) - ); - - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "doc['lastname'].value.length()" - ) - ); - } - - @Test - public void replaceTest() { - String query = "SELECT replace(lastname, 'a', 'A') FROM accounts WHERE replace(lastname, 'a', 'A') = 'aba' " + - "GROUP BY replace(lastname, 'a', 'A') ORDER BY replace(lastname, 'a', 'A')"; - - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "doc['lastname'].value.replace('a','A')" - ) - ); - - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "doc['lastname'].value.replace('a','A')" - ) - ); - } - - @Test - public void locateTest() { - String query = "SELECT locate('a', lastname, 1) FROM accounts WHERE locate('a', lastname, 1) = 4 " + - "GROUP BY locate('a', lastname, 1) ORDER BY locate('a', lastname, 1)"; - - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "doc['lastname'].value.indexOf('a',0)+1" - ) - ); - - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "doc['lastname'].value.indexOf('a',0)+1" - ) - ); - } - - @Test - public void ltrimTest() { - String query = "SELECT ltrim(lastname) FROM accounts WHERE ltrim(lastname) = 'abc' " + - "GROUP BY ltrim(lastname) ORDER BY ltrim(lastname)"; - - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Character.isWhitespace(doc['lastname'].value.charAt(pos))" - ) - ); - - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "Character.isWhitespace(doc['lastname'].value.charAt(pos))" - ) - ); - } - - @Test - public void rtrimTest() { - String query = "SELECT rtrim(lastname) FROM accounts WHERE rtrim(lastname) = 'cba' " + - "GROUP BY rtrim(lastname) ORDER BY rtrim(lastname)"; - - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Character.isWhitespace(doc['lastname'].value.charAt(pos))" - ) - ); - - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "Character.isWhitespace(doc['lastname'].value.charAt(pos))" - ) - ); - } - - @Test - public void asciiTest() { - String query = "SELECT ascii(lastname) FROM accounts WHERE ascii(lastname) = 108 " + - "GROUP BY ascii(lastname) ORDER BY ascii(lastname)"; - - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "(int) doc['lastname'].value.charAt(0)" - ) - ); - - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "(int) doc['lastname'].value.charAt(0)" - ) - ); - } - - @Test - public void left() { - String query = "SELECT left(lastname, 1) FROM accounts"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "doc['lastname'].value.substring(0, len)" - ) - ); - } - - @Test - public void right() { - String query = "SELECT right(lastname, 2) FROM accounts"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "doc['lastname'].value.substring(start)" - ) - ); - } + private static SqlParser parser; + + @BeforeClass + public static void init() { + parser = new SqlParser(); + } + + @Test + public void substringTest() { + String query = + "SELECT substring(lastname, 2, 1) FROM accounts WHERE substring(lastname, 2, 1) = 'a' " + + "GROUP BY substring(lastname, 2, 1) ORDER BY substring(lastname, 2, 1)"; + + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptField, "doc['lastname'].value.substring(1, end)")); + + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptFilter, "doc['lastname'].value.substring(1, end)")); + } + + @Test + public void substringIndexOutOfBoundTest() { + String query = "SELECT substring('sampleName', 0, 20) FROM accounts"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptField, "def end = (int) Math.min(0 + 20, 'sampleName'.length())")); + } + + @Test + public void lengthTest() { + String query = + "SELECT length(lastname) FROM accounts WHERE length(lastname) = 5 " + + "GROUP BY length(lastname) ORDER BY length(lastname)"; + + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString(scriptField, "doc['lastname'].value.length()")); + + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue( + CheckScriptContents.scriptContainsString(scriptFilter, "doc['lastname'].value.length()")); + } + + @Test + public void replaceTest() { + String query = + "SELECT replace(lastname, 'a', 'A') FROM accounts WHERE replace(lastname, 'a', 'A') = 'aba'" + + " GROUP BY replace(lastname, 'a', 'A') ORDER BY replace(lastname, 'a', 'A')"; + + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptField, "doc['lastname'].value.replace('a','A')")); + + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptFilter, "doc['lastname'].value.replace('a','A')")); + } + + @Test + public void locateTest() { + String query = + "SELECT locate('a', lastname, 1) FROM accounts WHERE locate('a', lastname, 1) = 4 " + + "GROUP BY locate('a', lastname, 1) ORDER BY locate('a', lastname, 1)"; + + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptField, "doc['lastname'].value.indexOf('a',0)+1")); + + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptFilter, "doc['lastname'].value.indexOf('a',0)+1")); + } + + @Test + public void ltrimTest() { + String query = + "SELECT ltrim(lastname) FROM accounts WHERE ltrim(lastname) = 'abc' " + + "GROUP BY ltrim(lastname) ORDER BY ltrim(lastname)"; + + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptField, "Character.isWhitespace(doc['lastname'].value.charAt(pos))")); + + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptFilter, "Character.isWhitespace(doc['lastname'].value.charAt(pos))")); + } + + @Test + public void rtrimTest() { + String query = + "SELECT rtrim(lastname) FROM accounts WHERE rtrim(lastname) = 'cba' " + + "GROUP BY rtrim(lastname) ORDER BY rtrim(lastname)"; + + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptField, "Character.isWhitespace(doc['lastname'].value.charAt(pos))")); + + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptFilter, "Character.isWhitespace(doc['lastname'].value.charAt(pos))")); + } + + @Test + public void asciiTest() { + String query = + "SELECT ascii(lastname) FROM accounts WHERE ascii(lastname) = 108 " + + "GROUP BY ascii(lastname) ORDER BY ascii(lastname)"; + + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptField, "(int) doc['lastname'].value.charAt(0)")); + + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptFilter, "(int) doc['lastname'].value.charAt(0)")); + } + + @Test + public void left() { + String query = "SELECT left(lastname, 1) FROM accounts"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptField, "doc['lastname'].value.substring(0, len)")); + } + + @Test + public void right() { + String query = "SELECT right(lastname, 2) FROM accounts"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptField, "doc['lastname'].value.substring(start)")); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/WhereWithBoolConditionTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/WhereWithBoolConditionTest.java index e7df57ce31..de6f2c8dda 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/WhereWithBoolConditionTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/WhereWithBoolConditionTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest; import static org.hamcrest.MatcherAssert.assertThat; @@ -25,58 +24,60 @@ import org.opensearch.sql.legacy.util.TestsConstants; import org.opensearch.sql.legacy.utils.StringUtils; - public class WhereWithBoolConditionTest { - @Test - public void whereWithBoolCompilationTest() - throws SQLFeatureNotSupportedException, SqlParseException, SQLFeatureDisabledException { - query(StringUtils.format("SELECT * FROM %s WHERE male = false", TestsConstants.TEST_INDEX_BANK)); - } + @Test + public void whereWithBoolCompilationTest() + throws SQLFeatureNotSupportedException, SqlParseException, SQLFeatureDisabledException { + query( + StringUtils.format("SELECT * FROM %s WHERE male = false", TestsConstants.TEST_INDEX_BANK)); + } - @Test - public void selectAllTest() - throws SQLFeatureNotSupportedException, SqlParseException, IOException, - SQLFeatureDisabledException { - String expectedOutput = Files.toString( - new File(getResourcePath() + "src/test/resources/expectedOutput/select_where_true.json"), StandardCharsets.UTF_8) - .replaceAll("\r", ""); + @Test + public void selectAllTest() + throws SQLFeatureNotSupportedException, + SqlParseException, + IOException, + SQLFeatureDisabledException { + String expectedOutput = + Files.toString( + new File( + getResourcePath() + "src/test/resources/expectedOutput/select_where_true.json"), + StandardCharsets.UTF_8) + .replaceAll("\r", ""); - assertThat(removeSpaces( - query( - StringUtils.format( - "SELECT * " + - "FROM %s " + - "WHERE male = true", - TestsConstants.TEST_INDEX_BANK)) - ), - equalTo(removeSpaces(expectedOutput)) - ); - } + assertThat( + removeSpaces( + query( + StringUtils.format( + "SELECT * " + "FROM %s " + "WHERE male = true", + TestsConstants.TEST_INDEX_BANK))), + equalTo(removeSpaces(expectedOutput))); + } - private String query(String query) - throws SQLFeatureNotSupportedException, SqlParseException, SQLFeatureDisabledException { - return explain(query); - } + private String query(String query) + throws SQLFeatureNotSupportedException, SqlParseException, SQLFeatureDisabledException { + return explain(query); + } - private String explain(String sql) - throws SQLFeatureNotSupportedException, SqlParseException, SQLFeatureDisabledException { - Client mockClient = Mockito.mock(Client.class); - CheckScriptContents.stubMockClient(mockClient); - QueryAction queryAction = OpenSearchActionFactory.create(mockClient, sql); - return queryAction.explain().explain(); - } + private String explain(String sql) + throws SQLFeatureNotSupportedException, SqlParseException, SQLFeatureDisabledException { + Client mockClient = Mockito.mock(Client.class); + CheckScriptContents.stubMockClient(mockClient); + QueryAction queryAction = OpenSearchActionFactory.create(mockClient, sql); + return queryAction.explain().explain(); + } - private String removeSpaces(String s) { - return s.replaceAll("\\s+", ""); - } + private String removeSpaces(String s) { + return s.replaceAll("\\s+", ""); + } - private String getResourcePath() { - String projectRoot = System.getProperty("project.root"); - if ( projectRoot!= null && projectRoot.trim().length() > 0) { - return projectRoot.trim() + "/"; - } else { - return ""; - } + private String getResourcePath() { + String projectRoot = System.getProperty("project.root"); + if (projectRoot != null && projectRoot.trim().length() > 0) { + return projectRoot.trim() + "/"; + } else { + return ""; } + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/UnaryExpressionTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/UnaryExpressionTest.java index 04196bab0a..c8582ecb05 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/UnaryExpressionTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/UnaryExpressionTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.expression.core; import static org.junit.Assert.assertEquals; @@ -21,108 +20,98 @@ @RunWith(MockitoJUnitRunner.class) public class UnaryExpressionTest extends ExpressionTest { - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); - - @Test - public void absShouldPass() { - assertEquals(2.0d, apply(ScalarOperation.ABS, literal(doubleValue(-2d)))); - } - - @Test - public void asinShouldPass() { - assertEquals(0.1001674211615598d, apply(ScalarOperation.ASIN, literal(doubleValue(0.1d)))); - } - - @Test - public void atanShouldPass() { - assertEquals(1.1071487177940904d, apply(ScalarOperation.ATAN, literal(doubleValue(2d)))); - } - - @Test - public void tanShouldPass() { - assertEquals(-2.185039863261519, apply(ScalarOperation.TAN, literal(doubleValue(2d)))); - } - - @Test - public void atan2ShouldPass() { - assertEquals(1.1071487177940904d, - apply(ScalarOperation.ATAN2, literal(doubleValue(2d)), literal(doubleValue(1d)))); - } - - @Test - public void cbrtShouldPass() { - assertEquals(1.2599210498948732d, - apply(ScalarOperation.CBRT, literal(doubleValue(2d)))); - } - - @Test - public void ceilShouldPass() { - assertEquals(3.0d, - apply(ScalarOperation.CEIL, literal(doubleValue(2.1d)))); - } - - @Test - public void floorShouldPass() { - assertEquals(2.0d, - apply(ScalarOperation.FLOOR, literal(doubleValue(2.1d)))); - } - - @Test - public void cosShouldPass() { - assertEquals(-0.4161468365471424d, - apply(ScalarOperation.COS, literal(doubleValue(2d)))); - } - - @Test - public void coshShouldPass() { - assertEquals(3.7621956910836314d, - apply(ScalarOperation.COSH, literal(doubleValue(2d)))); - } - - @Test - public void expShouldPass() { - assertEquals(7.38905609893065d, - apply(ScalarOperation.EXP, literal(doubleValue(2d)))); - } - - @Test - public void lnShouldPass() { - assertEquals(0.6931471805599453d, - apply(ScalarOperation.LN, literal(doubleValue(2d)))); - } - - @Test - public void logShouldPass() { - assertEquals(0.6931471805599453d, - apply(ScalarOperation.LOG, literal(doubleValue(2d)))); - } - - @Test - public void log2ShouldPass() { - assertEquals(1.0d, - apply(ScalarOperation.LOG2, literal(doubleValue(2d)))); - } - - @Test - public void log10ShouldPass() { - assertEquals(0.3010299956639812, - apply(ScalarOperation.LOG10, literal(doubleValue(2d)))); - } - - @Test - public void absWithStringShouldThrowException() { - exceptionRule.expect(RuntimeException.class); - exceptionRule.expectMessage("unexpected operation type: ABS(STRING_VALUE)"); - - apply(ScalarOperation.ABS, literal(stringValue("stringValue"))); - } - - @Test - public void atan2WithStringShouldThrowException() { - exceptionRule.expect(RuntimeException.class); - exceptionRule.expectMessage("unexpected operation type: ATAN2(DOUBLE_VALUE,STRING_VALUE)"); - - apply(ScalarOperation.ATAN2, literal(doubleValue(2d)), literal(stringValue("stringValue"))); - } + @Rule public ExpectedException exceptionRule = ExpectedException.none(); + + @Test + public void absShouldPass() { + assertEquals(2.0d, apply(ScalarOperation.ABS, literal(doubleValue(-2d)))); + } + + @Test + public void asinShouldPass() { + assertEquals(0.1001674211615598d, apply(ScalarOperation.ASIN, literal(doubleValue(0.1d)))); + } + + @Test + public void atanShouldPass() { + assertEquals(1.1071487177940904d, apply(ScalarOperation.ATAN, literal(doubleValue(2d)))); + } + + @Test + public void tanShouldPass() { + assertEquals(-2.185039863261519, apply(ScalarOperation.TAN, literal(doubleValue(2d)))); + } + + @Test + public void atan2ShouldPass() { + assertEquals( + 1.1071487177940904d, + apply(ScalarOperation.ATAN2, literal(doubleValue(2d)), literal(doubleValue(1d)))); + } + + @Test + public void cbrtShouldPass() { + assertEquals(1.2599210498948732d, apply(ScalarOperation.CBRT, literal(doubleValue(2d)))); + } + + @Test + public void ceilShouldPass() { + assertEquals(3.0d, apply(ScalarOperation.CEIL, literal(doubleValue(2.1d)))); + } + + @Test + public void floorShouldPass() { + assertEquals(2.0d, apply(ScalarOperation.FLOOR, literal(doubleValue(2.1d)))); + } + + @Test + public void cosShouldPass() { + assertEquals(-0.4161468365471424d, apply(ScalarOperation.COS, literal(doubleValue(2d)))); + } + + @Test + public void coshShouldPass() { + assertEquals(3.7621956910836314d, apply(ScalarOperation.COSH, literal(doubleValue(2d)))); + } + + @Test + public void expShouldPass() { + assertEquals(7.38905609893065d, apply(ScalarOperation.EXP, literal(doubleValue(2d)))); + } + + @Test + public void lnShouldPass() { + assertEquals(0.6931471805599453d, apply(ScalarOperation.LN, literal(doubleValue(2d)))); + } + + @Test + public void logShouldPass() { + assertEquals(0.6931471805599453d, apply(ScalarOperation.LOG, literal(doubleValue(2d)))); + } + + @Test + public void log2ShouldPass() { + assertEquals(1.0d, apply(ScalarOperation.LOG2, literal(doubleValue(2d)))); + } + + @Test + public void log10ShouldPass() { + assertEquals(0.3010299956639812, apply(ScalarOperation.LOG10, literal(doubleValue(2d)))); + } + + @Test + public void absWithStringShouldThrowException() { + exceptionRule.expect(RuntimeException.class); + exceptionRule.expectMessage("unexpected operation type: ABS(STRING_VALUE)"); + + apply(ScalarOperation.ABS, literal(stringValue("stringValue"))); + } + + @Test + public void atan2WithStringShouldThrowException() { + exceptionRule.expect(RuntimeException.class); + exceptionRule.expectMessage("unexpected operation type: ATAN2(DOUBLE_VALUE,STRING_VALUE)"); + + apply(ScalarOperation.ATAN2, literal(doubleValue(2d)), literal(stringValue("stringValue"))); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/parser/SqlParserTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/parser/SqlParserTest.java index 354c6ff8a1..38eefaaec1 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/parser/SqlParserTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/parser/SqlParserTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.parser; import static org.hamcrest.Matchers.equalTo; @@ -56,1366 +55,1460 @@ public class SqlParserTest { - private SqlParser parser; - - @Before - public void init() { - parser = new SqlParser(); - } - - @Rule - public ExpectedException thrown= ExpectedException.none(); - - @Test - public void whereConditionLeftFunctionRightPropertyGreatTest() throws Exception { - - String query = "SELECT " + - " * from " + - TEST_INDEX_ACCOUNT + "/account " + - " where floor(split(address,' ')[0]+0) > b limit 1000 "; - - Select select = parser.parseSelect((SQLQueryExpr) queryToExpr(query)); - Where where = select.getWhere(); - Assert.assertTrue((where.getWheres().size() == 1)); - Assert.assertTrue(((Condition) (where.getWheres().get(0))).getValue() instanceof ScriptFilter); - ScriptFilter scriptFilter = (ScriptFilter) (((Condition) (where.getWheres().get(0))).getValue()); - - Assert.assertTrue(scriptFilter.getScript().contains("doc['address'].value.split(' ')[0]")); - Pattern pattern = Pattern.compile("floor_\\d+ > doc\\['b'].value"); - java.util.regex.Matcher matcher = pattern.matcher(scriptFilter.getScript()); - Assert.assertTrue(matcher.find()); - } - - @Test() - public void failingQueryTest() throws SqlParseException { - thrown.expect(SqlFeatureNotImplementedException.class); - thrown.expectMessage( - "The complex aggregate expressions are not implemented yet: MAX(FlightDelayMin) - MIN(FlightDelayMin)"); - - Select select = - parser.parseSelect((SQLQueryExpr) queryToExpr( - "SELECT DestCountry, dayOfWeek, max(FlightDelayMin) - min(FlightDelayMin)" + - " FROM opensearch_dashboards_sample_data_flights\n" + - " GROUP BY DestCountry, dayOfWeek\n")); - - AggregationQueryAction queryAction = new AggregationQueryAction(mock(Client.class), select); - String elasticDsl = queryAction.explain().explain(); - } - - @Test() - public void failingQueryTest2() throws SqlParseException { - thrown.expect(SqlFeatureNotImplementedException.class); - thrown.expectMessage( - "Function calls of form 'log(MAX(...))' are not implemented yet"); - - Select select = - parser.parseSelect((SQLQueryExpr) queryToExpr( - "SELECT DestCountry, dayOfWeek, log(max(FlightDelayMin))" + - " FROM opensearch_dashboards_sample_data_flights\n" + - " GROUP BY DestCountry, dayOfWeek\n")); - - AggregationQueryAction queryAction = new AggregationQueryAction(mock(Client.class), select); - String elasticDsl = queryAction.explain().explain(); - } - - @Test() - public void failingQueryWithHavingTest() throws SqlParseException { - thrown.expect(SqlFeatureNotImplementedException.class); - thrown.expectMessage( - "The complex aggregate expressions are not implemented yet: MAX(FlightDelayMin) - MIN(FlightDelayMin)"); - - Select select = - parser.parseSelect((SQLQueryExpr) queryToExpr( - "SELECT DestCountry, dayOfWeek, max(FlightDelayMin) - min(FlightDelayMin) " + - " FROM opensearch_dashboards_sample_data_flights\n" + - " GROUP BY DestCountry, dayOfWeek\n" + - " HAVING max(FlightDelayMin) - min(FlightDelayMin)) * count(FlightDelayMin) + 14 > 100")); - - AggregationQueryAction queryAction = new AggregationQueryAction(mock(Client.class), select); - String elasticDsl = queryAction.explain().explain(); - } - - @Test() - @Ignore("Github issues: https://github.com/opendistro-for-elasticsearch/sql/issues/194, " + - "https://github.com/opendistro-for-elasticsearch/sql/issues/234") - public void failingQueryWithHavingTest2() throws SqlParseException { - Select select = - parser.parseSelect((SQLQueryExpr) queryToExpr( - "SELECT DestCountry, dayOfWeek, max(FlightDelayMin) " + - " FROM opensearch_dashboards_sample_data_flights\n" + - " GROUP BY DestCountry, dayOfWeek\n" + - " HAVING max(FlightDelayMin) - min(FlightDelayMin) > 100")); - - AggregationQueryAction queryAction = new AggregationQueryAction(mock(Client.class), select); - - String elasticDsl = queryAction.explain().explain(); - } - - @Test - public void whereConditionLeftFunctionRightFunctionEqualTest() throws Exception { - - String query = "SELECT " + - " * from " + - TEST_INDEX_ACCOUNT + "/account " + - " where floor(split(address,' ')[0]+0) = floor(split(address,' ')[0]+0) limit 1000 "; - - Select select = parser.parseSelect((SQLQueryExpr) queryToExpr(query)); - Where where = select.getWhere(); - Assert.assertTrue((where.getWheres().size() == 1)); - Assert.assertTrue(((Condition) (where.getWheres().get(0))).getValue() instanceof ScriptFilter); - ScriptFilter scriptFilter = (ScriptFilter) (((Condition) (where.getWheres().get(0))).getValue()); - Assert.assertTrue(scriptFilter.getScript().contains("doc['address'].value.split(' ')[0]")); - Pattern pattern = Pattern.compile("floor_\\d+ == floor_\\d+"); - java.util.regex.Matcher matcher = pattern.matcher(scriptFilter.getScript()); - Assert.assertTrue(matcher.find()); - } - - @Test - public void whereConditionVariableRightVariableEqualTest() throws Exception { - - String query = "SELECT " + - " * from " + - TEST_INDEX_ACCOUNT + "/account " + - " where a = b limit 1000 "; - - Select select = parser.parseSelect((SQLQueryExpr) queryToExpr(query)); - Where where = select.getWhere(); - Assert.assertTrue((where.getWheres().size() == 1)); - Assert.assertTrue(((Condition) (where.getWheres().get(0))).getValue() instanceof ScriptFilter); - ScriptFilter scriptFilter = (ScriptFilter) (((Condition) (where.getWheres().get(0))).getValue()); - Assert.assertTrue(scriptFilter.getScript().contains("doc['a'].value == doc['b'].value")); - } - - @Test - public void joinParseCheckSelectedFieldsSplit() throws SqlParseException { - String query = "SELECT a.firstname ,a.lastname , a.gender , d.holdersName ,d.name FROM " + - TestsConstants.TEST_INDEX_ACCOUNT + - "/account a " + - "LEFT JOIN " + - TEST_INDEX_DOG + - "/dog d on d.holdersName = a.firstname " + - " AND d.age < a.age " + - " WHERE a.firstname = 'eliran' AND " + - " (a.age > 10 OR a.balance > 2000)" + - " AND d.age > 1"; - - JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); - - List t1Fields = joinSelect.getFirstTable().getSelectedFields(); - Assert.assertEquals(t1Fields.size(), 3); - Assert.assertTrue(fieldExist(t1Fields, "firstname")); - Assert.assertTrue(fieldExist(t1Fields, "lastname")); - Assert.assertTrue(fieldExist(t1Fields, "gender")); - - List t2Fields = joinSelect.getSecondTable().getSelectedFields(); - Assert.assertEquals(t2Fields.size(), 2); - Assert.assertTrue(fieldExist(t2Fields, "holdersName")); - Assert.assertTrue(fieldExist(t2Fields, "name")); - } - - @Test - public void joinParseCheckConnectedFields() throws SqlParseException { - String query = "SELECT a.firstname ,a.lastname , a.gender , d.holdersName ,d.name FROM " + - TestsConstants.TEST_INDEX_ACCOUNT + - "/account a " + - "LEFT JOIN " + - TEST_INDEX_DOG + - "/dog d on d.holdersName = a.firstname " + - " AND d.age < a.age " + - " WHERE a.firstname = 'eliran' AND " + - " (a.age > 10 OR a.balance > 2000)" + - " AND d.age > 1"; - - JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); - - List t1Fields = joinSelect.getFirstTable().getConnectedFields(); - Assert.assertEquals(t1Fields.size(), 2); - Assert.assertTrue(fieldExist(t1Fields, "firstname")); - Assert.assertTrue(fieldExist(t1Fields, "age")); - - List t2Fields = joinSelect.getSecondTable().getConnectedFields(); - Assert.assertEquals(t2Fields.size(), 2); - Assert.assertTrue(fieldExist(t2Fields, "holdersName")); - Assert.assertTrue(fieldExist(t2Fields, "age")); - } - - private boolean fieldExist(List fields, String fieldName) { - for (Field field : fields) - if (field.getName().equals(fieldName)) return true; - - return false; - } - - - @Test - public void joinParseFromsAreSplitedCorrectly() throws SqlParseException { - String query = "SELECT a.firstname ,a.lastname , a.gender , d.holdersName ,d.name FROM " + - TestsConstants.TEST_INDEX_ACCOUNT + - " a " + - "LEFT JOIN " + - TEST_INDEX_DOG + - " d on d.holdersName = a.firstname" + - " WHERE a.firstname = 'eliran' AND " + - " (a.age > 10 OR a.balance > 2000)" + - " AND d.age > 1"; - - JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); - List t1From = joinSelect.getFirstTable().getFrom(); - - Assert.assertNotNull(t1From); - Assert.assertEquals(1, t1From.size()); - Assert.assertTrue(checkFrom(t1From.get(0), TestsConstants.TEST_INDEX_ACCOUNT, "a")); - - List t2From = joinSelect.getSecondTable().getFrom(); - Assert.assertNotNull(t2From); - Assert.assertEquals(1, t2From.size()); - Assert.assertTrue(checkFrom(t2From.get(0), TEST_INDEX_DOG, "d")); - } - - private boolean checkFrom(From from, String index, String alias) { - return from.getAlias().equals(alias) && from.getIndex().equals(index); - } - - @Test - public void joinParseConditionsTestOneCondition() throws SqlParseException { - String query = "SELECT a.*, a.firstname ,a.lastname , a.gender , d.holdersName ,d.name FROM " + - TestsConstants.TEST_INDEX_ACCOUNT + - "/account a " + - "LEFT JOIN " + - TEST_INDEX_DOG + - "/dog d on d.holdersName = a.firstname" + - " WHERE a.firstname = 'eliran' AND " + - " (a.age > 10 OR a.balance > 2000)" + - " AND d.age > 1"; - - JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); - List conditions = joinSelect.getConnectedConditions(); - Assert.assertNotNull(conditions); - Assert.assertEquals(1, conditions.size()); - Assert.assertTrue("condition not exist: d.holdersName = a.firstname", - conditionExist(conditions, "d.holdersName", "a.firstname", Condition.OPERATOR.EQ)); - } - - @Test - public void joinParseConditionsTestTwoConditions() throws SqlParseException { - String query = "SELECT a.*, a.firstname ,a.lastname , a.gender , d.holdersName ,d.name FROM " + - TestsConstants.TEST_INDEX_ACCOUNT + - "/account a " + - "LEFT JOIN " + - TEST_INDEX_DOG + - "/dog d on d.holdersName = a.firstname " + - " AND d.age < a.age " + - " WHERE a.firstname = 'eliran' AND " + - " (a.age > 10 OR a.balance > 2000)" + - " AND d.age > 1"; - - JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); - List conditions = joinSelect.getConnectedConditions(); - Assert.assertNotNull(conditions); - Assert.assertEquals(2, conditions.size()); - Assert.assertTrue("condition not exist: d.holdersName = a.firstname", - conditionExist(conditions, "d.holdersName", "a.firstname", Condition.OPERATOR.EQ)); - Assert.assertTrue("condition not exist: d.age < a.age", - conditionExist(conditions, "d.age", "a.age", Condition.OPERATOR.LT)); - } - - - @Test - public void joinSplitWhereCorrectly() throws SqlParseException { - String query = "SELECT a.*, a.firstname ,a.lastname , a.gender , d.holdersName ,d.name FROM " + - TestsConstants.TEST_INDEX_ACCOUNT + - "/account a " + - "LEFT JOIN " + - TEST_INDEX_DOG + - "/dog d on d.holdersName = a.firstname" + - " WHERE a.firstname = 'eliran' AND " + - " (a.age > 10 OR a.balance > 2000)" + - " AND d.age > 1"; - - JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); - String s1Where = joinSelect.getFirstTable().getWhere().toString(); - Assert.assertEquals("AND ( AND firstname EQ eliran, AND ( OR age GT 10, OR balance GT 2000 ) ) ", s1Where); - String s2Where = joinSelect.getSecondTable().getWhere().toString(); - Assert.assertEquals("AND age GT 1", s2Where); - } - - @Test - public void joinConditionWithComplexObjectComparisonRightSide() throws SqlParseException { - String query = String.format(Locale.ROOT, "select c.name.firstname,c.parents.father , h.name,h.words " + - "from %s/gotCharacters c " + - "JOIN %s/gotCharacters h " + - "on h.name = c.name.lastname " + - "where c.name.firstname='Daenerys'", TEST_INDEX_GAME_OF_THRONES, TEST_INDEX_GAME_OF_THRONES); - JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); - List conditions = joinSelect.getConnectedConditions(); - Assert.assertNotNull(conditions); - Assert.assertEquals(1, conditions.size()); - Assert.assertTrue("condition not exist: h.name = c.name.lastname", - conditionExist(conditions, "h.name", "c.name.lastname", Condition.OPERATOR.EQ)); - } - - @Test - public void joinConditionWithComplexObjectComparisonLeftSide() throws SqlParseException { - String query = String.format(Locale.ROOT, - "select c.name.firstname,c.parents.father , h.name,h.words from %s/gotCharacters c " + - "JOIN %s/gotCharacters h " + - "on c.name.lastname = h.name " + - "where c.name.firstname='Daenerys'", TEST_INDEX_GAME_OF_THRONES, TEST_INDEX_GAME_OF_THRONES); - JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); - List conditions = joinSelect.getConnectedConditions(); - Assert.assertNotNull(conditions); - Assert.assertEquals(1, conditions.size()); - Assert.assertTrue("condition not exist: c.name.lastname = h.name", - conditionExist(conditions, "c.name.lastname", "h.name", Condition.OPERATOR.EQ)); - } - - - @Test - public void limitHintsOnJoin() throws SqlParseException { - String query = String.format(Locale.ROOT,"select /*! JOIN_TABLES_LIMIT(1000,null) */ " + - "c.name.firstname,c.parents.father , h.name,h.words from %s/gotCharacters c " + - "use KEY (termsFilter) " + - "JOIN %s/gotCharacters h " + - "on c.name.lastname = h.name " + - "where c.name.firstname='Daenerys'", TEST_INDEX_GAME_OF_THRONES, TEST_INDEX_GAME_OF_THRONES); - JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); - List hints = joinSelect.getHints(); - Assert.assertNotNull(hints); - Assert.assertEquals("hints size was not 1", 1, hints.size()); - Hint hint = hints.get(0); - Assert.assertEquals(HintType.JOIN_LIMIT, hint.getType()); - Object[] params = hint.getParams(); - Assert.assertNotNull(params); - Assert.assertEquals("params size was not 2", 2, params.length); - Assert.assertEquals(1000, params[0]); - Assert.assertNull(params[1]); - } - - @Test - public void hashTermsFilterHint() throws SqlParseException { - String query = String.format(Locale.ROOT, "select /*! HASH_WITH_TERMS_FILTER*/ " + - "c.name.firstname,c.parents.father , h.name,h.words from %s/gotCharacters c " + - "use KEY (termsFilter) " + - "JOIN %s/gotCharacters h " + - "on c.name.lastname = h.name " + - "where c.name.firstname='Daenerys'", TEST_INDEX_GAME_OF_THRONES, TEST_INDEX_GAME_OF_THRONES); - JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); - List hints = joinSelect.getHints(); - Assert.assertNotNull(hints); - Assert.assertEquals("hints size was not 1", 1, hints.size()); - Hint hint = hints.get(0); - Assert.assertEquals(HintType.HASH_WITH_TERMS_FILTER, hint.getType()); - } - - @Test - public void multipleHints() throws SqlParseException { - String query = String.format(Locale.ROOT, "select /*! HASH_WITH_TERMS_FILTER*/ " + - "/*! JOIN_TABLES_LIMIT(1000,null) */ " + - " /*! JOIN_TABLES_LIMIT(100,200) */ " + - "c.name.firstname,c.parents.father , h.name,h.words from %s/gotCharacters c " + - "use KEY (termsFilter) " + - "JOIN %s/gotCharacters h " + - "on c.name.lastname = h.name " + - "where c.name.firstname='Daenerys'", TEST_INDEX_GAME_OF_THRONES, TEST_INDEX_GAME_OF_THRONES); - - JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); - List hints = joinSelect.getHints(); - - Assert.assertNotNull(hints); - Assert.assertEquals("hints size was not 3", 3, hints.size()); - Hint firstHint = hints.get(0); - Assert.assertEquals(HintType.HASH_WITH_TERMS_FILTER, firstHint.getType()); - Hint secondHint = hints.get(1); - Assert.assertEquals(HintType.JOIN_LIMIT, secondHint.getType()); - Assert.assertEquals(1000, secondHint.getParams()[0]); - Assert.assertNull(secondHint.getParams()[1]); - Hint thirdHint = hints.get(2); - Assert.assertEquals(100, thirdHint.getParams()[0]); - Assert.assertEquals(200, thirdHint.getParams()[1]); - Assert.assertEquals(HintType.JOIN_LIMIT, thirdHint.getType()); - } - - @Test - public void searchWithOdbcTimeFormatParse() throws SqlParseException { - String query = String.format(Locale.ROOT, "SELECT insert_time FROM %s/odbc " + - "WHERE insert_time < {ts '2015-03-15 00:00:00.000'}", TEST_INDEX_ODBC); - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - LinkedList wheres = select.getWhere().getWheres(); - Assert.assertEquals(1, wheres.size()); - Condition condition = (Condition) wheres.get(0); - Assert.assertEquals("{ts '2015-03-15 00:00:00.000'}", condition.getValue().toString()); - - } - - @Test - public void indexWithSpacesWithinBrackets() throws SqlParseException { - String query = "SELECT insert_time FROM [Test Index] WHERE age > 3"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - List fromList = select.getFrom(); - Assert.assertEquals(1, fromList.size()); - From from = fromList.get(0); - Assert.assertEquals("Test Index", from.getIndex()); - } - - @Test - public void indexWithSpacesWithTypeWithinBrackets() throws SqlParseException { - String query = "SELECT insert_time FROM [Test Index] WHERE age > 3"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - List fromList = select.getFrom(); - Assert.assertEquals(1, fromList.size()); - From from = fromList.get(0); - Assert.assertEquals("Test Index", from.getIndex()); - } - - - @Test - public void fieldWithSpacesWithinBrackets() throws SqlParseException { - String query = "SELECT insert_time FROM name/type1 WHERE [first name] = 'Name'"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - List where = select.getWhere().getWheres(); - Assert.assertEquals(1, where.size()); - Condition condition = (Condition) where.get(0); - Assert.assertEquals("first name", condition.getName()); - Assert.assertEquals("Name", condition.getValue()); - } - - @Test - public void twoIndices() throws SqlParseException { - String query = "SELECT insert_time FROM index1, index2 WHERE age > 3"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - List fromList = select.getFrom(); - Assert.assertEquals(2, fromList.size()); - From from1 = fromList.get(0); - From from2 = fromList.get(1); - boolean preservedOrder = from1.getIndex().equals("index1") - && from2.getIndex().equals("index2"); - boolean notPreservedOrder = from1.getIndex().equals("index2") - && from2.getIndex().equals("index1"); - Assert.assertTrue(preservedOrder || notPreservedOrder); - } - - @Test - public void fieldWithATcharAtWhere() throws SqlParseException { - String query = "SELECT * FROM index/type where @field = 6 "; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - LinkedList wheres = select.getWhere().getWheres(); - Assert.assertEquals(1, wheres.size()); - Condition condition = (Condition) wheres.get(0); - Assert.assertEquals("@field", condition.getName()); - } - - @Test - public void fieldWithATcharAtSelect() throws SqlParseException { - String query = "SELECT @field FROM index/type where field2 = 6 "; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - List fields = select.getFields(); - Assert.assertEquals(1, fields.size()); - Field field = fields.get(0); - Assert.assertEquals(field.getName(), "@field"); - } - - @Test - public void fieldWithATcharAtSelectOnAgg() throws SqlParseException { - String query = "SELECT max(@field) FROM index/type where field2 = 6 "; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - List fields = select.getFields(); - Assert.assertEquals(1, fields.size()); - Field field = fields.get(0); - Assert.assertEquals("MAX(@field)", field.toString()); - } - - @Test - public void fieldWithColonCharAtSelect() throws SqlParseException { - String query = "SELECT a:b FROM index/type where field2 = 6 "; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - List fields = select.getFields(); - Assert.assertEquals(1, fields.size()); - Field field = fields.get(0); - Assert.assertEquals(field.getName(), "a:b"); - } - - @Test - public void fieldWithColonCharAtWhere() throws SqlParseException { - String query = "SELECT * FROM index/type where a:b = 6 "; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - LinkedList wheres = select.getWhere().getWheres(); - Assert.assertEquals(1, wheres.size()); - Condition condition = (Condition) wheres.get(0); - Assert.assertEquals("a:b", condition.getName()); - } - - @Test - public void fieldIsNull() throws SqlParseException { - String query = "SELECT * FROM index/type where a IS NOT NULL"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - LinkedList wheres = select.getWhere().getWheres(); - Assert.assertEquals(1, wheres.size()); - Condition condition = (Condition) wheres.get(0); - Assert.assertEquals("a", condition.getName()); - Assert.assertNull(condition.getValue()); - } - - @Test - public void innerQueryTest() throws SqlParseException { - String query = String.format(Locale.ROOT, "select * from %s/dog where holdersName " + - "IN (select firstname from %s/account where firstname = 'eliran')", - TEST_INDEX_DOG, TestsConstants.TEST_INDEX_ACCOUNT); - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - Assert.assertTrue(select.containsSubQueries()); - Assert.assertEquals(1, select.getSubQueries().size()); - } - - @Test - public void inTermsSubQueryTest() throws SqlParseException { - String query = String.format(Locale.ROOT, "select * from %s/dog where " + - "holdersName = IN_TERMS (select firstname from %s/account where firstname = 'eliran')", - TEST_INDEX_DOG, TestsConstants.TEST_INDEX_ACCOUNT); - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - Assert.assertTrue(select.containsSubQueries()); - Assert.assertEquals(1, select.getSubQueries().size()); - } - - - @Test - public void innerQueryTestTwoQueries() throws SqlParseException { - String query = String.format(Locale.ROOT, "select * from %s/dog where holdersName IN " + - "(select firstname from %s/account where firstname = 'eliran') and " + - "age IN (select name.ofHisName from %s/gotCharacters) ", - TEST_INDEX_DOG, TestsConstants.TEST_INDEX_ACCOUNT, TEST_INDEX_GAME_OF_THRONES); - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - Assert.assertTrue(select.containsSubQueries()); - Assert.assertEquals(2, select.getSubQueries().size()); - } - - @Test - public void indexWithDotsAndHyphen() throws SqlParseException { - String query = "select * from data-2015.08.22"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - Assert.assertEquals(1, select.getFrom().size()); - Assert.assertEquals("data-2015.08.22", select.getFrom().get(0).getIndex()); - } - - @Test - public void indexNameWithDotAtTheStart() throws SqlParseException { - String query = "SELECT * FROM .opensearch_dashboards"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - Assert.assertEquals(".opensearch_dashboards", select.getFrom().get(0).getIndex()); - } - - @Test - public void indexWithSemiColons() throws SqlParseException { - String query = "select * from some;index"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - Assert.assertEquals(1, select.getFrom().size()); - Assert.assertEquals("some;index", select.getFrom().get(0).getIndex()); - } - - @Test - public void scriptFiledPlusLiteralTest() throws SqlParseException { - String query = "SELECT field1 + 3 FROM index/type"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - List fields = select.getFields(); - Assert.assertEquals(1, fields.size()); - Field field = fields.get(0); - Assert.assertTrue(field instanceof MethodField); - MethodField scriptMethod = (MethodField) field; - Assert.assertEquals("script", scriptMethod.getName().toLowerCase()); - Assert.assertEquals(2, scriptMethod.getParams().size()); - Assert.assertTrue(scriptMethod.getParams().get(1).toString().contains("doc['field1'].value + 3")); - } - - @Test - public void scriptFieldPlusFieldTest() throws SqlParseException { - String query = "SELECT field1 + field2 FROM index/type"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - List fields = select.getFields(); - Assert.assertEquals(1, fields.size()); - Field field = fields.get(0); - Assert.assertTrue(field instanceof MethodField); - MethodField scriptMethod = (MethodField) field; - Assert.assertEquals("script", scriptMethod.getName().toLowerCase()); - Assert.assertEquals(2, scriptMethod.getParams().size()); - Assert.assertTrue(scriptMethod.getParams().get(1).toString() - .contains("doc['field1'].value + doc['field2'].value")); - } - - - @Test - public void scriptLiteralPlusLiteralTest() throws SqlParseException { - String query = "SELECT 1 + 2 FROM index/type"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - List fields = select.getFields(); - Assert.assertEquals(1, fields.size()); - Field field = fields.get(0); - Assert.assertTrue(field instanceof MethodField); - MethodField scriptMethod = (MethodField) field; - Assert.assertEquals("script", scriptMethod.getName().toLowerCase()); - Assert.assertEquals(2, scriptMethod.getParams().size()); - Assert.assertTrue(scriptMethod.getParams().get(1).toString().contains("1 + 2")); - } - - - @Test - public void explicitScriptOnAggregation() throws SqlParseException { - String query = "SELECT avg( script('add','doc[\\'field1\\'].value + doc[\\'field2\\'].value') )" + - " FROM index/type"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - List fields = select.getFields(); - Assert.assertEquals(1, fields.size()); - Field field = fields.get(0); - Assert.assertTrue(field instanceof MethodField); - MethodField avgMethodField = (MethodField) field; - Assert.assertEquals("avg", avgMethodField.getName().toLowerCase()); - Assert.assertEquals(1, avgMethodField.getParams().size()); - MethodField scriptMethod = (MethodField) avgMethodField.getParams().get(0).value; - Assert.assertEquals("script", scriptMethod.getName().toLowerCase()); - Assert.assertEquals(2, scriptMethod.getParams().size()); - Assert.assertEquals("doc['field1'].value + doc['field2'].value", - scriptMethod.getParams().get(1).toString()); - } - - @Test - public void implicitScriptOnAggregation() throws SqlParseException { - String query = "SELECT avg(field(field1) + field(field2)) FROM index/type"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - List fields = select.getFields(); - Assert.assertEquals(1, fields.size()); - Field field = fields.get(0); - Assert.assertTrue(field instanceof MethodField); - MethodField avgMethodField = (MethodField) field; - Assert.assertEquals("avg", avgMethodField.getName().toLowerCase()); - Assert.assertEquals(1, avgMethodField.getParams().size()); - Assert.assertTrue(avgMethodField.getParams().get(0).value.toString().contains("doc['field1'].value")); - Assert.assertTrue(avgMethodField.getParams().get(0).value.toString().contains("doc['field2'].value")); - - } - - @Test - public void nestedFieldOnWhereNoPathSimpleField() throws SqlParseException { - String query = "select * from myIndex where nested(message.name) = 'hey'"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - Where where = select.getWhere().getWheres().get(0); - Assert.assertTrue("where should be condition", where instanceof Condition); - Condition condition = (Condition) where; - Assert.assertTrue("condition should be nested", condition.isNested()); - Assert.assertEquals("message", condition.getNestedPath()); - Assert.assertEquals("message.name", condition.getName()); - } - - - @Test - public void nestedFieldOnWhereNoPathComplexField() throws SqlParseException { - String query = "select * from myIndex where nested(message.moreNested.name) = 'hey'"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - Where where = select.getWhere().getWheres().get(0); - Assert.assertTrue("where should be condition", where instanceof Condition); - Condition condition = (Condition) where; - Assert.assertTrue("condition should be nested", condition.isNested()); - Assert.assertEquals("message.moreNested", condition.getNestedPath()); - Assert.assertEquals("message.moreNested.name", condition.getName()); - } - - - @Test - public void aggFieldWithAliasTableAliasShouldBeRemoved() throws SqlParseException { - String query = "select count(t.*) as counts,sum(t.size) from xxx/locs as t group by t.kk"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - List fields = select.getFields(); - Assert.assertThat(fields.size(), equalTo(2)); - Assert.assertEquals("COUNT(*)", fields.get(0).toString()); - Assert.assertEquals("SUM(size)", fields.get(1).toString()); - List> groups = select.getGroupBys(); - Assert.assertThat(groups.size(), equalTo(1)); - Assert.assertThat(groups.get(0).size(), equalTo(1)); - Assert.assertEquals("kk", groups.get(0).get(0).getName()); - } - - @Test - public void nestedFieldOnWhereGivenPath() throws SqlParseException { - String query = "select * from myIndex where nested(message.name,message) = 'hey'"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - Where where = select.getWhere().getWheres().get(0); - Assert.assertTrue("where should be condition", where instanceof Condition); - Condition condition = (Condition) where; - Assert.assertTrue("condition should be nested", condition.isNested()); - Assert.assertEquals("message", condition.getNestedPath()); - Assert.assertEquals("message.name", condition.getName()); - } - - @Test - public void nestedFieldOnGroupByNoPath() throws SqlParseException { - String query = "select * from myIndex group by nested(message.name)"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - Field field = select.getGroupBys().get(0).get(0); - Assert.assertTrue("condition should be nested", field.isNested()); - Assert.assertEquals("message", field.getNestedPath()); - Assert.assertEquals("message.name", field.getName()); - } - - @Test - public void nestedFieldOnGroupByWithPath() throws SqlParseException { - String query = "select * from myIndex group by nested(message.name,message)"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - Field field = select.getGroupBys().get(0).get(0); - Assert.assertTrue("condition should be nested", field.isNested()); - Assert.assertEquals("message", field.getNestedPath()); - Assert.assertEquals("message.name", field.getName()); - } - - @Test - public void filterAggTestNoAlias() throws SqlParseException { - String query = "select * from myIndex group by a , filter( a > 3 AND b='3' )"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - List> groupBys = select.getGroupBys(); - Assert.assertEquals(1, groupBys.size()); - Field aAgg = groupBys.get(0).get(0); - Assert.assertEquals("a", aAgg.getName()); - Field field = groupBys.get(0).get(1); - Assert.assertTrue("filter field should be method field", field instanceof MethodField); - MethodField filterAgg = (MethodField) field; - Assert.assertEquals("filter", filterAgg.getName()); - Map params = filterAgg.getParamsAsMap(); - Assert.assertEquals(2, params.size()); - Object alias = params.get("alias"); - Assert.assertEquals("filter(a > 3 AND b = '3')@FILTER", alias); - - Assert.assertTrue(params.get("where") instanceof Where); - Where where = (Where) params.get("where"); - Assert.assertEquals(2, where.getWheres().size()); - } - - @Test - public void filterAggTestWithAlias() throws SqlParseException { - String query = "select * from myIndex group by a , filter(myFilter, a > 3 AND b='3' )"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - List> groupBys = select.getGroupBys(); - Assert.assertEquals(1, groupBys.size()); - Field aAgg = groupBys.get(0).get(0); - Assert.assertEquals("a", aAgg.getName()); - Field field = groupBys.get(0).get(1); - Assert.assertTrue("filter field should be method field", field instanceof MethodField); - MethodField filterAgg = (MethodField) field; - Assert.assertEquals("filter", filterAgg.getName()); - Map params = filterAgg.getParamsAsMap(); - Assert.assertEquals(2, params.size()); - Object alias = params.get("alias"); - Assert.assertEquals("myFilter@FILTER", alias); - - Assert.assertTrue(params.get("where") instanceof Where); - Where where = (Where) params.get("where"); - Assert.assertEquals(2, where.getWheres().size()); - } - - - @Test - public void filterAggTestWithAliasAsString() throws SqlParseException { - String query = "select * from myIndex group by a , filter('my filter', a > 3 AND b='3' )"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - List> groupBys = select.getGroupBys(); - Assert.assertEquals(1, groupBys.size()); - Field aAgg = groupBys.get(0).get(0); - Assert.assertEquals("a", aAgg.getName()); - Field field = groupBys.get(0).get(1); - Assert.assertTrue("filter field should be method field", field instanceof MethodField); - MethodField filterAgg = (MethodField) field; - Assert.assertEquals("filter", filterAgg.getName()); - Map params = filterAgg.getParamsAsMap(); - Assert.assertEquals(2, params.size()); - Object alias = params.get("alias"); - Assert.assertEquals("my filter@FILTER", alias); - - Assert.assertTrue(params.get("where") instanceof Where); - Where where = (Where) params.get("where"); - Assert.assertEquals(2, where.getWheres().size()); - } - - @Test - public void doubleOrderByTest() throws SqlParseException { - String query = "select * from indexName order by a asc, b desc"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - List orderBys = select.getOrderBys(); - Assert.assertEquals(2, orderBys.size()); - Assert.assertEquals("a", orderBys.get(0).getName()); - Assert.assertEquals("ASC", orderBys.get(0).getType()); - - Assert.assertEquals("b", orderBys.get(1).getName()); - Assert.assertEquals("DESC", orderBys.get(1).getType()); - } - - @Test - public void parseJoinWithOneTableOrderByAttachToCorrectTable() throws SqlParseException { - String query = String.format(Locale.ROOT, "select c.name.firstname , d.words from %s/gotCharacters c " + - "JOIN %s/gotCharacters d on d.name = c.house " + - "order by c.name.firstname" - , TEST_INDEX_GAME_OF_THRONES, TEST_INDEX_GAME_OF_THRONES); - - JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); - Assert.assertTrue("first table should be ordered", joinSelect.getFirstTable().isOrderdSelect()); - Assert.assertFalse("second table should not be ordered", joinSelect.getSecondTable().isOrderdSelect()); - - } - - @Test - public void parseJoinWithOneTableOrderByRemoveAlias() throws SqlParseException { - String query = String.format(Locale.ROOT, "select c.name.firstname , d.words from %s/gotCharacters c " + - "JOIN %s/gotCharacters d on d.name = c.house " + - "order by c.name.firstname" - , TEST_INDEX_GAME_OF_THRONES, TEST_INDEX_GAME_OF_THRONES); - - JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); - List orderBys = joinSelect.getFirstTable().getOrderBys(); - Assert.assertEquals(1, orderBys.size()); - Order order = orderBys.get(0); - Assert.assertEquals("name.firstname", order.getName()); - - } - - @Test - public void termsWithStringTest() throws SqlParseException { - String query = "select * from x where y = IN_TERMS('a','b')"; - Select select = parser.parseSelect((SQLQueryExpr) queryToExpr(query)); - Condition condition = (Condition) select.getWhere().getWheres().get(0); - Object[] values = (Object[]) condition.getValue(); - Assert.assertEquals("a", values[0]); - Assert.assertEquals("b", values[1]); - } - - @Test - public void termWithStringTest() throws SqlParseException { - String query = "select * from x where y = TERM('a')"; - Select select = parser.parseSelect((SQLQueryExpr) queryToExpr(query)); - Condition condition = (Condition) select.getWhere().getWheres().get(0); - Object[] values = (Object[]) condition.getValue(); - Assert.assertEquals("a", values[0]); - } - - @Test - public void complexNestedTest() throws SqlParseException { - String query = "select * from x where nested('y',y.b = 'a' and y.c = 'd') "; - Select select = parser.parseSelect((SQLQueryExpr) queryToExpr(query)); - Condition condition = (Condition) select.getWhere().getWheres().get(0); - Assert.assertEquals(Condition.OPERATOR.NESTED_COMPLEX, condition.getOPERATOR()); - Assert.assertEquals("y", condition.getName()); - Assert.assertTrue(condition.getValue() instanceof Where); - Where where = (Where) condition.getValue(); - Assert.assertEquals(2, where.getWheres().size()); - } - - @Test - public void scriptOnFilterNoParams() throws SqlParseException { - String query = "select * from x where script('doc[\\'field\\'].date.hourOfDay == 3') "; - Select select = parser.parseSelect((SQLQueryExpr) queryToExpr(query)); - Condition condition = (Condition) select.getWhere().getWheres().get(0); - Assert.assertEquals(Condition.OPERATOR.SCRIPT, condition.getOPERATOR()); - Assert.assertNull(condition.getName()); - Assert.assertTrue(condition.getValue() instanceof ScriptFilter); - ScriptFilter scriptFilter = (ScriptFilter) condition.getValue(); - Assert.assertEquals("doc['field'].date.hourOfDay == 3", scriptFilter.getScript()); - Assert.assertFalse(scriptFilter.containsParameters()); - } - - @Test - public void scriptOnFilterWithParams() throws SqlParseException { - String query = "select * from x where script('doc[\\'field\\'].date.hourOfDay == x','x'=3) "; - Select select = parser.parseSelect((SQLQueryExpr) queryToExpr(query)); - Condition condition = (Condition) select.getWhere().getWheres().get(0); - Assert.assertEquals(Condition.OPERATOR.SCRIPT, condition.getOPERATOR()); - Assert.assertNull(condition.getName()); - Assert.assertTrue(condition.getValue() instanceof ScriptFilter); - ScriptFilter scriptFilter = (ScriptFilter) condition.getValue(); - Assert.assertEquals("doc['field'].date.hourOfDay == x", scriptFilter.getScript()); - Assert.assertTrue(scriptFilter.containsParameters()); - Map args = scriptFilter.getArgs(); - Assert.assertEquals(1, args.size()); - Assert.assertTrue(args.containsKey("x")); - Assert.assertEquals(3, args.get("x")); - - } - - @Test - public void fieldsAsNumbersOnWhere() throws SqlParseException { - String query = "select * from x where ['3'] > 2"; - Select select = parser.parseSelect((SQLQueryExpr) queryToExpr(query)); - LinkedList wheres = select.getWhere().getWheres(); - Assert.assertEquals(1, wheres.size()); - Where where = wheres.get(0); - Assert.assertEquals(Condition.class, where.getClass()); - Condition condition = (Condition) where; - Assert.assertEquals("3", condition.getName()); - } - - @Test - public void likeTestWithEscaped() throws SqlParseException { - String query = "select * from x where name like '&UNDERSCOREhey_%&PERCENT'"; - Select select = parser.parseSelect((SQLQueryExpr) queryToExpr(query)); - BoolQueryBuilder explan = QueryMaker.explain(select.getWhere()); - String filterAsString = explan.toString(); - Assert.assertTrue(filterAsString.contains("_hey?*%")); - } - - - @Test - public void complexNestedAndOtherQuery() throws SqlParseException { - String query = "select * from x where nested('path',path.x=3) and y=3"; - Select select = parser.parseSelect((SQLQueryExpr) queryToExpr(query)); - LinkedList wheres = select.getWhere().getWheres(); - Assert.assertEquals(2, wheres.size()); - Assert.assertEquals("AND path NESTED_COMPLEX AND ( AND path.x EQ 3 ) ", wheres.get(0).toString()); - Assert.assertEquals("AND y EQ 3", wheres.get(1).toString()); - } - - - @Test - public void numberEqualConditionWithoutProperty() throws SqlParseException { - SQLExpr sqlExpr = queryToExpr("select * from xxx/locs where 1 = 1"); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - List wheres = select.getWhere().getWheres(); - Assert.assertThat(wheres.size(), equalTo(1)); - Condition condition = (Condition) wheres.get(0); - Assert.assertTrue(condition.getValue() instanceof ScriptFilter); - ScriptFilter sf = (ScriptFilter) condition.getValue(); - Assert.assertEquals(sf.getScript(), "1 == 1"); - } - - @Test - public void numberGreatConditionWithoutProperty() throws SqlParseException { - SQLExpr sqlExpr = queryToExpr("select * from xxx/locs where 1 > 1"); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - List wheres = select.getWhere().getWheres(); - Assert.assertThat(wheres.size(), equalTo(1)); - Condition condition = (Condition) wheres.get(0); - Assert.assertTrue(condition.getValue() instanceof ScriptFilter); - ScriptFilter sf = (ScriptFilter) condition.getValue(); - Assert.assertEquals(sf.getScript(), "1 > 1"); - } - - @Test - public void stringEqualConditionWithoutProperty() throws SqlParseException { - SQLExpr sqlExpr = queryToExpr("select * from xxx/locs where 'a' = 'b'"); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - List wheres = select.getWhere().getWheres(); - Assert.assertThat(wheres.size(), equalTo(1)); - Condition condition = (Condition) wheres.get(0); - Assert.assertTrue(condition.getValue() instanceof ScriptFilter); - ScriptFilter sf = (ScriptFilter) condition.getValue(); - Assert.assertEquals(sf.getScript(), "'a' == 'b'"); - } - - @Test - public void propertyEqualCondition() throws SqlParseException { - SQLExpr sqlExpr = queryToExpr("select * from xxx/locs where a = b"); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - List wheres = select.getWhere().getWheres(); - Assert.assertThat(wheres.size(), equalTo(1)); - Condition condition = (Condition) wheres.get(0); - Assert.assertTrue(condition.getValue() instanceof ScriptFilter); - ScriptFilter sf = (ScriptFilter) condition.getValue(); - Assert.assertEquals(sf.getScript(), "doc['a'].value == doc['b'].value"); - } - - - @Test - public void propertyWithTableAliasEqualCondition() throws SqlParseException { - SQLExpr sqlExpr = queryToExpr("select t.* from xxx/locs where t.a = t.b"); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - List wheres = select.getWhere().getWheres(); - Assert.assertThat(wheres.size(), equalTo(1)); - Condition condition = (Condition) wheres.get(0); - Assert.assertTrue(condition.getValue() instanceof ScriptFilter); - ScriptFilter sf = (ScriptFilter) condition.getValue(); - Assert.assertEquals(sf.getScript(), "doc['a'].value == doc['b'].value"); - } - - @Test - public void propertyGreatCondition() throws SqlParseException { - SQLExpr sqlExpr = queryToExpr("select * from xxx/locs where a > b"); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - List wheres = select.getWhere().getWheres(); - Assert.assertThat(wheres.size(), equalTo(1)); - Condition condition = (Condition) wheres.get(0); - Assert.assertTrue(condition.getValue() instanceof ScriptFilter); - ScriptFilter sf = (ScriptFilter) condition.getValue(); - Assert.assertEquals(sf.getScript(), "doc['a'].value > doc['b'].value"); - } - - @Test - public void stringAndNumberEqualConditionWithoutProperty() throws SqlParseException { - SQLExpr sqlExpr = queryToExpr("select * from xxx/locs where 'a' = 1"); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - List wheres = select.getWhere().getWheres(); - Assert.assertThat(wheres.size(), equalTo(1)); - Condition condition = (Condition) wheres.get(0); - Assert.assertTrue(condition.getValue() instanceof ScriptFilter); - ScriptFilter sf = (ScriptFilter) condition.getValue(); - Assert.assertEquals(sf.getScript(), "'a' == 1"); - } - - - @Test - public void caseWhenTest() throws SqlParseException { - String query = "Select k,\n" + - "Case \n" + - "When floor(testBase)>=90 then 'A'\n" + - "When testBase = '80' then 'B'\n" + - "Else 'E' end as testBaseLevel\n" + - "from t"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - for (Field field : select.getFields()) { - if (field instanceof MethodField) { - MethodField methodField = (MethodField) field; - String alias = (String) methodField.getParams().get(0).value; - String scriptCode = (String) methodField.getParams().get(1).value; - Assert.assertEquals(alias, "testBaseLevel"); - Matcher docValue = Pattern.compile("doc\\['testBase'].value").matcher(scriptCode); - Matcher number = Pattern.compile(" (\\s+90) | (\\s+'80')").matcher(scriptCode); - - AtomicInteger docValueCounter = new AtomicInteger(); - - while (docValue.find()) { - docValueCounter.incrementAndGet(); - } - - Assert.assertThat(docValueCounter.get(), equalTo(2)); - Assert.assertThat(number.groupCount(), equalTo(2)); - - } - } - - } - - @Test - public void caseWhenTestWithFieldElseExpr() throws SqlParseException { - String query = "Select k,\n" + - "Case \n" + - "When floor(testBase)>=90 then 'A'\n" + - "When testBase = '80' then 'B'\n" + - "Else testBase end as testBaseLevel\n" + - "from t"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - for (Field field : select.getFields()) { - if (field instanceof MethodField) { - MethodField methodField = (MethodField) field; - String alias = (String) methodField.getParams().get(0).value; - String scriptCode = (String) methodField.getParams().get(1).value; - Assert.assertEquals(alias, "testBaseLevel"); - Matcher docValue = Pattern.compile("doc\\['testBase'].value").matcher(scriptCode); - Matcher number = Pattern.compile(" (\\s+90) | (\\s+'80')").matcher(scriptCode); - - AtomicInteger docValueCounter = new AtomicInteger(); - - while (docValue.find()) { - docValueCounter.incrementAndGet(); - } - - Assert.assertThat(docValueCounter.get(), equalTo(3)); - Assert.assertThat(number.groupCount(), equalTo(2)); - - } - } - - } - - @Test - public void caseWhenTestWithouhtElseExpr() throws SqlParseException { - String query = "Select k,\n" + - "Case \n" + - "When floor(testBase)>=90 then 'A'\n" + - "When testBase = '80' then 'B'\n" + - "end as testBaseLevel\n" + - "from t"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - for (Field field : select.getFields()) { - if (field instanceof MethodField) { - MethodField methodField = (MethodField) field; - String alias = (String) methodField.getParams().get(0).value; - String scriptCode = (String) methodField.getParams().get(1).value; - Assert.assertEquals(alias, "testBaseLevel"); - - Matcher docValue = Pattern.compile("\\{\\s+null\\s+}").matcher(scriptCode); - - AtomicInteger docValueCounter = new AtomicInteger(); - - while (docValue.find()) { - docValueCounter.incrementAndGet(); - } - - Assert.assertThat(docValueCounter.get(), equalTo(1)); - - } - } - - } - - @Test - public void caseWhenSwitchTest() { - String query = "SELECT CASE weather " - + "WHEN 'Sunny' THEN '0' " - + "WHEN 'Rainy' THEN '1' " - + "ELSE 'NA' END AS case " - + "FROM t"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - Assert.assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "doc['weather'].value=='Sunny'" - ) - ); - } - - @Test - public void castToIntTest() throws Exception { - String query = "select cast(age as int) from "+ TestsConstants.TEST_INDEX_ACCOUNT + "/account limit 10"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - Field castField = select.getFields().get(0); - Assert.assertTrue(castField instanceof MethodField); - - MethodField methodField = (MethodField) castField; - Assert.assertEquals("script",castField.getName()); - + private SqlParser parser; + + @Before + public void init() { + parser = new SqlParser(); + } + + @Rule public ExpectedException thrown = ExpectedException.none(); + + @Test + public void whereConditionLeftFunctionRightPropertyGreatTest() throws Exception { + + String query = + "SELECT " + + " * from " + + TEST_INDEX_ACCOUNT + + "/account " + + " where floor(split(address,' ')[0]+0) > b limit 1000 "; + + Select select = parser.parseSelect((SQLQueryExpr) queryToExpr(query)); + Where where = select.getWhere(); + Assert.assertTrue((where.getWheres().size() == 1)); + Assert.assertTrue(((Condition) (where.getWheres().get(0))).getValue() instanceof ScriptFilter); + ScriptFilter scriptFilter = + (ScriptFilter) (((Condition) (where.getWheres().get(0))).getValue()); + + Assert.assertTrue(scriptFilter.getScript().contains("doc['address'].value.split(' ')[0]")); + Pattern pattern = Pattern.compile("floor_\\d+ > doc\\['b'].value"); + java.util.regex.Matcher matcher = pattern.matcher(scriptFilter.getScript()); + Assert.assertTrue(matcher.find()); + } + + @Test() + public void failingQueryTest() throws SqlParseException { + thrown.expect(SqlFeatureNotImplementedException.class); + thrown.expectMessage( + "The complex aggregate expressions are not implemented yet: MAX(FlightDelayMin) -" + + " MIN(FlightDelayMin)"); + + Select select = + parser.parseSelect( + (SQLQueryExpr) + queryToExpr( + "SELECT DestCountry, dayOfWeek, max(FlightDelayMin) - min(FlightDelayMin)" + + " FROM opensearch_dashboards_sample_data_flights\n" + + " GROUP BY DestCountry, dayOfWeek\n")); + + AggregationQueryAction queryAction = new AggregationQueryAction(mock(Client.class), select); + String elasticDsl = queryAction.explain().explain(); + } + + @Test() + public void failingQueryTest2() throws SqlParseException { + thrown.expect(SqlFeatureNotImplementedException.class); + thrown.expectMessage("Function calls of form 'log(MAX(...))' are not implemented yet"); + + Select select = + parser.parseSelect( + (SQLQueryExpr) + queryToExpr( + "SELECT DestCountry, dayOfWeek, log(max(FlightDelayMin))" + + " FROM opensearch_dashboards_sample_data_flights\n" + + " GROUP BY DestCountry, dayOfWeek\n")); + + AggregationQueryAction queryAction = new AggregationQueryAction(mock(Client.class), select); + String elasticDsl = queryAction.explain().explain(); + } + + @Test() + public void failingQueryWithHavingTest() throws SqlParseException { + thrown.expect(SqlFeatureNotImplementedException.class); + thrown.expectMessage( + "The complex aggregate expressions are not implemented yet: MAX(FlightDelayMin) -" + + " MIN(FlightDelayMin)"); + + Select select = + parser.parseSelect( + (SQLQueryExpr) + queryToExpr( + "SELECT DestCountry, dayOfWeek, max(FlightDelayMin) - min(FlightDelayMin) " + + " FROM opensearch_dashboards_sample_data_flights\n" + + " GROUP BY DestCountry, dayOfWeek\n" + + " HAVING max(FlightDelayMin) - min(FlightDelayMin)) *" + + " count(FlightDelayMin) + 14 > 100")); + + AggregationQueryAction queryAction = new AggregationQueryAction(mock(Client.class), select); + String elasticDsl = queryAction.explain().explain(); + } + + @Test() + @Ignore( + "Github issues: https://github.com/opendistro-for-elasticsearch/sql/issues/194, " + + "https://github.com/opendistro-for-elasticsearch/sql/issues/234") + public void failingQueryWithHavingTest2() throws SqlParseException { + Select select = + parser.parseSelect( + (SQLQueryExpr) + queryToExpr( + "SELECT DestCountry, dayOfWeek, max(FlightDelayMin) " + + " FROM opensearch_dashboards_sample_data_flights\n" + + " GROUP BY DestCountry, dayOfWeek\n" + + " HAVING max(FlightDelayMin) - min(FlightDelayMin) > 100")); + + AggregationQueryAction queryAction = new AggregationQueryAction(mock(Client.class), select); + + String elasticDsl = queryAction.explain().explain(); + } + + @Test + public void whereConditionLeftFunctionRightFunctionEqualTest() throws Exception { + + String query = + "SELECT " + + " * from " + + TEST_INDEX_ACCOUNT + + "/account " + + " where floor(split(address,' ')[0]+0) = floor(split(address,' ')[0]+0) limit 1000 "; + + Select select = parser.parseSelect((SQLQueryExpr) queryToExpr(query)); + Where where = select.getWhere(); + Assert.assertTrue((where.getWheres().size() == 1)); + Assert.assertTrue(((Condition) (where.getWheres().get(0))).getValue() instanceof ScriptFilter); + ScriptFilter scriptFilter = + (ScriptFilter) (((Condition) (where.getWheres().get(0))).getValue()); + Assert.assertTrue(scriptFilter.getScript().contains("doc['address'].value.split(' ')[0]")); + Pattern pattern = Pattern.compile("floor_\\d+ == floor_\\d+"); + java.util.regex.Matcher matcher = pattern.matcher(scriptFilter.getScript()); + Assert.assertTrue(matcher.find()); + } + + @Test + public void whereConditionVariableRightVariableEqualTest() throws Exception { + + String query = + "SELECT " + " * from " + TEST_INDEX_ACCOUNT + "/account " + " where a = b limit 1000 "; + + Select select = parser.parseSelect((SQLQueryExpr) queryToExpr(query)); + Where where = select.getWhere(); + Assert.assertTrue((where.getWheres().size() == 1)); + Assert.assertTrue(((Condition) (where.getWheres().get(0))).getValue() instanceof ScriptFilter); + ScriptFilter scriptFilter = + (ScriptFilter) (((Condition) (where.getWheres().get(0))).getValue()); + Assert.assertTrue(scriptFilter.getScript().contains("doc['a'].value == doc['b'].value")); + } + + @Test + public void joinParseCheckSelectedFieldsSplit() throws SqlParseException { + String query = + "SELECT a.firstname ,a.lastname , a.gender , d.holdersName ,d.name FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + "/account a " + + "LEFT JOIN " + + TEST_INDEX_DOG + + "/dog d on d.holdersName = a.firstname " + + " AND d.age < a.age " + + " WHERE a.firstname = 'eliran' AND " + + " (a.age > 10 OR a.balance > 2000)" + + " AND d.age > 1"; + + JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); + + List t1Fields = joinSelect.getFirstTable().getSelectedFields(); + Assert.assertEquals(t1Fields.size(), 3); + Assert.assertTrue(fieldExist(t1Fields, "firstname")); + Assert.assertTrue(fieldExist(t1Fields, "lastname")); + Assert.assertTrue(fieldExist(t1Fields, "gender")); + + List t2Fields = joinSelect.getSecondTable().getSelectedFields(); + Assert.assertEquals(t2Fields.size(), 2); + Assert.assertTrue(fieldExist(t2Fields, "holdersName")); + Assert.assertTrue(fieldExist(t2Fields, "name")); + } + + @Test + public void joinParseCheckConnectedFields() throws SqlParseException { + String query = + "SELECT a.firstname ,a.lastname , a.gender , d.holdersName ,d.name FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + "/account a " + + "LEFT JOIN " + + TEST_INDEX_DOG + + "/dog d on d.holdersName = a.firstname " + + " AND d.age < a.age " + + " WHERE a.firstname = 'eliran' AND " + + " (a.age > 10 OR a.balance > 2000)" + + " AND d.age > 1"; + + JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); + + List t1Fields = joinSelect.getFirstTable().getConnectedFields(); + Assert.assertEquals(t1Fields.size(), 2); + Assert.assertTrue(fieldExist(t1Fields, "firstname")); + Assert.assertTrue(fieldExist(t1Fields, "age")); + + List t2Fields = joinSelect.getSecondTable().getConnectedFields(); + Assert.assertEquals(t2Fields.size(), 2); + Assert.assertTrue(fieldExist(t2Fields, "holdersName")); + Assert.assertTrue(fieldExist(t2Fields, "age")); + } + + private boolean fieldExist(List fields, String fieldName) { + for (Field field : fields) if (field.getName().equals(fieldName)) return true; + + return false; + } + + @Test + public void joinParseFromsAreSplitedCorrectly() throws SqlParseException { + String query = + "SELECT a.firstname ,a.lastname , a.gender , d.holdersName ,d.name FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " a " + + "LEFT JOIN " + + TEST_INDEX_DOG + + " d on d.holdersName = a.firstname" + + " WHERE a.firstname = 'eliran' AND " + + " (a.age > 10 OR a.balance > 2000)" + + " AND d.age > 1"; + + JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); + List t1From = joinSelect.getFirstTable().getFrom(); + + Assert.assertNotNull(t1From); + Assert.assertEquals(1, t1From.size()); + Assert.assertTrue(checkFrom(t1From.get(0), TestsConstants.TEST_INDEX_ACCOUNT, "a")); + + List t2From = joinSelect.getSecondTable().getFrom(); + Assert.assertNotNull(t2From); + Assert.assertEquals(1, t2From.size()); + Assert.assertTrue(checkFrom(t2From.get(0), TEST_INDEX_DOG, "d")); + } + + private boolean checkFrom(From from, String index, String alias) { + return from.getAlias().equals(alias) && from.getIndex().equals(index); + } + + @Test + public void joinParseConditionsTestOneCondition() throws SqlParseException { + String query = + "SELECT a.*, a.firstname ,a.lastname , a.gender , d.holdersName ,d.name FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + "/account a " + + "LEFT JOIN " + + TEST_INDEX_DOG + + "/dog d on d.holdersName = a.firstname" + + " WHERE a.firstname = 'eliran' AND " + + " (a.age > 10 OR a.balance > 2000)" + + " AND d.age > 1"; + + JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); + List conditions = joinSelect.getConnectedConditions(); + Assert.assertNotNull(conditions); + Assert.assertEquals(1, conditions.size()); + Assert.assertTrue( + "condition not exist: d.holdersName = a.firstname", + conditionExist(conditions, "d.holdersName", "a.firstname", Condition.OPERATOR.EQ)); + } + + @Test + public void joinParseConditionsTestTwoConditions() throws SqlParseException { + String query = + "SELECT a.*, a.firstname ,a.lastname , a.gender , d.holdersName ,d.name FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + "/account a " + + "LEFT JOIN " + + TEST_INDEX_DOG + + "/dog d on d.holdersName = a.firstname " + + " AND d.age < a.age " + + " WHERE a.firstname = 'eliran' AND " + + " (a.age > 10 OR a.balance > 2000)" + + " AND d.age > 1"; + + JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); + List conditions = joinSelect.getConnectedConditions(); + Assert.assertNotNull(conditions); + Assert.assertEquals(2, conditions.size()); + Assert.assertTrue( + "condition not exist: d.holdersName = a.firstname", + conditionExist(conditions, "d.holdersName", "a.firstname", Condition.OPERATOR.EQ)); + Assert.assertTrue( + "condition not exist: d.age < a.age", + conditionExist(conditions, "d.age", "a.age", Condition.OPERATOR.LT)); + } + + @Test + public void joinSplitWhereCorrectly() throws SqlParseException { + String query = + "SELECT a.*, a.firstname ,a.lastname , a.gender , d.holdersName ,d.name FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + "/account a " + + "LEFT JOIN " + + TEST_INDEX_DOG + + "/dog d on d.holdersName = a.firstname" + + " WHERE a.firstname = 'eliran' AND " + + " (a.age > 10 OR a.balance > 2000)" + + " AND d.age > 1"; + + JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); + String s1Where = joinSelect.getFirstTable().getWhere().toString(); + Assert.assertEquals( + "AND ( AND firstname EQ eliran, AND ( OR age GT 10, OR balance GT 2000 ) ) ", s1Where); + String s2Where = joinSelect.getSecondTable().getWhere().toString(); + Assert.assertEquals("AND age GT 1", s2Where); + } + + @Test + public void joinConditionWithComplexObjectComparisonRightSide() throws SqlParseException { + String query = + String.format( + Locale.ROOT, + "select c.name.firstname,c.parents.father , h.name,h.words " + + "from %s/gotCharacters c " + + "JOIN %s/gotCharacters h " + + "on h.name = c.name.lastname " + + "where c.name.firstname='Daenerys'", + TEST_INDEX_GAME_OF_THRONES, + TEST_INDEX_GAME_OF_THRONES); + JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); + List conditions = joinSelect.getConnectedConditions(); + Assert.assertNotNull(conditions); + Assert.assertEquals(1, conditions.size()); + Assert.assertTrue( + "condition not exist: h.name = c.name.lastname", + conditionExist(conditions, "h.name", "c.name.lastname", Condition.OPERATOR.EQ)); + } + + @Test + public void joinConditionWithComplexObjectComparisonLeftSide() throws SqlParseException { + String query = + String.format( + Locale.ROOT, + "select c.name.firstname,c.parents.father , h.name,h.words from %s/gotCharacters c " + + "JOIN %s/gotCharacters h " + + "on c.name.lastname = h.name " + + "where c.name.firstname='Daenerys'", + TEST_INDEX_GAME_OF_THRONES, + TEST_INDEX_GAME_OF_THRONES); + JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); + List conditions = joinSelect.getConnectedConditions(); + Assert.assertNotNull(conditions); + Assert.assertEquals(1, conditions.size()); + Assert.assertTrue( + "condition not exist: c.name.lastname = h.name", + conditionExist(conditions, "c.name.lastname", "h.name", Condition.OPERATOR.EQ)); + } + + @Test + public void limitHintsOnJoin() throws SqlParseException { + String query = + String.format( + Locale.ROOT, + "select /*! JOIN_TABLES_LIMIT(1000,null) */ " + + "c.name.firstname,c.parents.father , h.name,h.words from %s/gotCharacters c " + + "use KEY (termsFilter) " + + "JOIN %s/gotCharacters h " + + "on c.name.lastname = h.name " + + "where c.name.firstname='Daenerys'", + TEST_INDEX_GAME_OF_THRONES, + TEST_INDEX_GAME_OF_THRONES); + JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); + List hints = joinSelect.getHints(); + Assert.assertNotNull(hints); + Assert.assertEquals("hints size was not 1", 1, hints.size()); + Hint hint = hints.get(0); + Assert.assertEquals(HintType.JOIN_LIMIT, hint.getType()); + Object[] params = hint.getParams(); + Assert.assertNotNull(params); + Assert.assertEquals("params size was not 2", 2, params.length); + Assert.assertEquals(1000, params[0]); + Assert.assertNull(params[1]); + } + + @Test + public void hashTermsFilterHint() throws SqlParseException { + String query = + String.format( + Locale.ROOT, + "select /*! HASH_WITH_TERMS_FILTER*/ " + + "c.name.firstname,c.parents.father , h.name,h.words from %s/gotCharacters c " + + "use KEY (termsFilter) " + + "JOIN %s/gotCharacters h " + + "on c.name.lastname = h.name " + + "where c.name.firstname='Daenerys'", + TEST_INDEX_GAME_OF_THRONES, + TEST_INDEX_GAME_OF_THRONES); + JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); + List hints = joinSelect.getHints(); + Assert.assertNotNull(hints); + Assert.assertEquals("hints size was not 1", 1, hints.size()); + Hint hint = hints.get(0); + Assert.assertEquals(HintType.HASH_WITH_TERMS_FILTER, hint.getType()); + } + + @Test + public void multipleHints() throws SqlParseException { + String query = + String.format( + Locale.ROOT, + "select /*! HASH_WITH_TERMS_FILTER*/ " + + "/*! JOIN_TABLES_LIMIT(1000,null) */ " + + " /*! JOIN_TABLES_LIMIT(100,200) */ " + + "c.name.firstname,c.parents.father , h.name,h.words from %s/gotCharacters c " + + "use KEY (termsFilter) " + + "JOIN %s/gotCharacters h " + + "on c.name.lastname = h.name " + + "where c.name.firstname='Daenerys'", + TEST_INDEX_GAME_OF_THRONES, + TEST_INDEX_GAME_OF_THRONES); + + JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); + List hints = joinSelect.getHints(); + + Assert.assertNotNull(hints); + Assert.assertEquals("hints size was not 3", 3, hints.size()); + Hint firstHint = hints.get(0); + Assert.assertEquals(HintType.HASH_WITH_TERMS_FILTER, firstHint.getType()); + Hint secondHint = hints.get(1); + Assert.assertEquals(HintType.JOIN_LIMIT, secondHint.getType()); + Assert.assertEquals(1000, secondHint.getParams()[0]); + Assert.assertNull(secondHint.getParams()[1]); + Hint thirdHint = hints.get(2); + Assert.assertEquals(100, thirdHint.getParams()[0]); + Assert.assertEquals(200, thirdHint.getParams()[1]); + Assert.assertEquals(HintType.JOIN_LIMIT, thirdHint.getType()); + } + + @Test + public void searchWithOdbcTimeFormatParse() throws SqlParseException { + String query = + String.format( + Locale.ROOT, + "SELECT insert_time FROM %s/odbc " + + "WHERE insert_time < {ts '2015-03-15 00:00:00.000'}", + TEST_INDEX_ODBC); + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + LinkedList wheres = select.getWhere().getWheres(); + Assert.assertEquals(1, wheres.size()); + Condition condition = (Condition) wheres.get(0); + Assert.assertEquals("{ts '2015-03-15 00:00:00.000'}", condition.getValue().toString()); + } + + @Test + public void indexWithSpacesWithinBrackets() throws SqlParseException { + String query = "SELECT insert_time FROM [Test Index] WHERE age > 3"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + List fromList = select.getFrom(); + Assert.assertEquals(1, fromList.size()); + From from = fromList.get(0); + Assert.assertEquals("Test Index", from.getIndex()); + } + + @Test + public void indexWithSpacesWithTypeWithinBrackets() throws SqlParseException { + String query = "SELECT insert_time FROM [Test Index] WHERE age > 3"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + List fromList = select.getFrom(); + Assert.assertEquals(1, fromList.size()); + From from = fromList.get(0); + Assert.assertEquals("Test Index", from.getIndex()); + } + + @Test + public void fieldWithSpacesWithinBrackets() throws SqlParseException { + String query = "SELECT insert_time FROM name/type1 WHERE [first name] = 'Name'"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + List where = select.getWhere().getWheres(); + Assert.assertEquals(1, where.size()); + Condition condition = (Condition) where.get(0); + Assert.assertEquals("first name", condition.getName()); + Assert.assertEquals("Name", condition.getValue()); + } + + @Test + public void twoIndices() throws SqlParseException { + String query = "SELECT insert_time FROM index1, index2 WHERE age > 3"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + List fromList = select.getFrom(); + Assert.assertEquals(2, fromList.size()); + From from1 = fromList.get(0); + From from2 = fromList.get(1); + boolean preservedOrder = from1.getIndex().equals("index1") && from2.getIndex().equals("index2"); + boolean notPreservedOrder = + from1.getIndex().equals("index2") && from2.getIndex().equals("index1"); + Assert.assertTrue(preservedOrder || notPreservedOrder); + } + + @Test + public void fieldWithATcharAtWhere() throws SqlParseException { + String query = "SELECT * FROM index/type where @field = 6 "; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + LinkedList wheres = select.getWhere().getWheres(); + Assert.assertEquals(1, wheres.size()); + Condition condition = (Condition) wheres.get(0); + Assert.assertEquals("@field", condition.getName()); + } + + @Test + public void fieldWithATcharAtSelect() throws SqlParseException { + String query = "SELECT @field FROM index/type where field2 = 6 "; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + List fields = select.getFields(); + Assert.assertEquals(1, fields.size()); + Field field = fields.get(0); + Assert.assertEquals(field.getName(), "@field"); + } + + @Test + public void fieldWithATcharAtSelectOnAgg() throws SqlParseException { + String query = "SELECT max(@field) FROM index/type where field2 = 6 "; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + List fields = select.getFields(); + Assert.assertEquals(1, fields.size()); + Field field = fields.get(0); + Assert.assertEquals("MAX(@field)", field.toString()); + } + + @Test + public void fieldWithColonCharAtSelect() throws SqlParseException { + String query = "SELECT a:b FROM index/type where field2 = 6 "; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + List fields = select.getFields(); + Assert.assertEquals(1, fields.size()); + Field field = fields.get(0); + Assert.assertEquals(field.getName(), "a:b"); + } + + @Test + public void fieldWithColonCharAtWhere() throws SqlParseException { + String query = "SELECT * FROM index/type where a:b = 6 "; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + LinkedList wheres = select.getWhere().getWheres(); + Assert.assertEquals(1, wheres.size()); + Condition condition = (Condition) wheres.get(0); + Assert.assertEquals("a:b", condition.getName()); + } + + @Test + public void fieldIsNull() throws SqlParseException { + String query = "SELECT * FROM index/type where a IS NOT NULL"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + LinkedList wheres = select.getWhere().getWheres(); + Assert.assertEquals(1, wheres.size()); + Condition condition = (Condition) wheres.get(0); + Assert.assertEquals("a", condition.getName()); + Assert.assertNull(condition.getValue()); + } + + @Test + public void innerQueryTest() throws SqlParseException { + String query = + String.format( + Locale.ROOT, + "select * from %s/dog where holdersName " + + "IN (select firstname from %s/account where firstname = 'eliran')", + TEST_INDEX_DOG, + TestsConstants.TEST_INDEX_ACCOUNT); + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + Assert.assertTrue(select.containsSubQueries()); + Assert.assertEquals(1, select.getSubQueries().size()); + } + + @Test + public void inTermsSubQueryTest() throws SqlParseException { + String query = + String.format( + Locale.ROOT, + "select * from %s/dog where holdersName = IN_TERMS (select firstname from %s/account" + + " where firstname = 'eliran')", + TEST_INDEX_DOG, + TestsConstants.TEST_INDEX_ACCOUNT); + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + Assert.assertTrue(select.containsSubQueries()); + Assert.assertEquals(1, select.getSubQueries().size()); + } + + @Test + public void innerQueryTestTwoQueries() throws SqlParseException { + String query = + String.format( + Locale.ROOT, + "select * from %s/dog where holdersName IN " + + "(select firstname from %s/account where firstname = 'eliran') and " + + "age IN (select name.ofHisName from %s/gotCharacters) ", + TEST_INDEX_DOG, + TestsConstants.TEST_INDEX_ACCOUNT, + TEST_INDEX_GAME_OF_THRONES); + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + Assert.assertTrue(select.containsSubQueries()); + Assert.assertEquals(2, select.getSubQueries().size()); + } + + @Test + public void indexWithDotsAndHyphen() throws SqlParseException { + String query = "select * from data-2015.08.22"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + Assert.assertEquals(1, select.getFrom().size()); + Assert.assertEquals("data-2015.08.22", select.getFrom().get(0).getIndex()); + } + + @Test + public void indexNameWithDotAtTheStart() throws SqlParseException { + String query = "SELECT * FROM .opensearch_dashboards"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + Assert.assertEquals(".opensearch_dashboards", select.getFrom().get(0).getIndex()); + } + + @Test + public void indexWithSemiColons() throws SqlParseException { + String query = "select * from some;index"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + Assert.assertEquals(1, select.getFrom().size()); + Assert.assertEquals("some;index", select.getFrom().get(0).getIndex()); + } + + @Test + public void scriptFiledPlusLiteralTest() throws SqlParseException { + String query = "SELECT field1 + 3 FROM index/type"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + List fields = select.getFields(); + Assert.assertEquals(1, fields.size()); + Field field = fields.get(0); + Assert.assertTrue(field instanceof MethodField); + MethodField scriptMethod = (MethodField) field; + Assert.assertEquals("script", scriptMethod.getName().toLowerCase()); + Assert.assertEquals(2, scriptMethod.getParams().size()); + Assert.assertTrue( + scriptMethod.getParams().get(1).toString().contains("doc['field1'].value + 3")); + } + + @Test + public void scriptFieldPlusFieldTest() throws SqlParseException { + String query = "SELECT field1 + field2 FROM index/type"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + List fields = select.getFields(); + Assert.assertEquals(1, fields.size()); + Field field = fields.get(0); + Assert.assertTrue(field instanceof MethodField); + MethodField scriptMethod = (MethodField) field; + Assert.assertEquals("script", scriptMethod.getName().toLowerCase()); + Assert.assertEquals(2, scriptMethod.getParams().size()); + Assert.assertTrue( + scriptMethod + .getParams() + .get(1) + .toString() + .contains("doc['field1'].value + doc['field2'].value")); + } + + @Test + public void scriptLiteralPlusLiteralTest() throws SqlParseException { + String query = "SELECT 1 + 2 FROM index/type"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + List fields = select.getFields(); + Assert.assertEquals(1, fields.size()); + Field field = fields.get(0); + Assert.assertTrue(field instanceof MethodField); + MethodField scriptMethod = (MethodField) field; + Assert.assertEquals("script", scriptMethod.getName().toLowerCase()); + Assert.assertEquals(2, scriptMethod.getParams().size()); + Assert.assertTrue(scriptMethod.getParams().get(1).toString().contains("1 + 2")); + } + + @Test + public void explicitScriptOnAggregation() throws SqlParseException { + String query = + "SELECT avg( script('add','doc[\\'field1\\'].value + doc[\\'field2\\'].value') )" + + " FROM index/type"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + List fields = select.getFields(); + Assert.assertEquals(1, fields.size()); + Field field = fields.get(0); + Assert.assertTrue(field instanceof MethodField); + MethodField avgMethodField = (MethodField) field; + Assert.assertEquals("avg", avgMethodField.getName().toLowerCase()); + Assert.assertEquals(1, avgMethodField.getParams().size()); + MethodField scriptMethod = (MethodField) avgMethodField.getParams().get(0).value; + Assert.assertEquals("script", scriptMethod.getName().toLowerCase()); + Assert.assertEquals(2, scriptMethod.getParams().size()); + Assert.assertEquals( + "doc['field1'].value + doc['field2'].value", scriptMethod.getParams().get(1).toString()); + } + + @Test + public void implicitScriptOnAggregation() throws SqlParseException { + String query = "SELECT avg(field(field1) + field(field2)) FROM index/type"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + List fields = select.getFields(); + Assert.assertEquals(1, fields.size()); + Field field = fields.get(0); + Assert.assertTrue(field instanceof MethodField); + MethodField avgMethodField = (MethodField) field; + Assert.assertEquals("avg", avgMethodField.getName().toLowerCase()); + Assert.assertEquals(1, avgMethodField.getParams().size()); + Assert.assertTrue( + avgMethodField.getParams().get(0).value.toString().contains("doc['field1'].value")); + Assert.assertTrue( + avgMethodField.getParams().get(0).value.toString().contains("doc['field2'].value")); + } + + @Test + public void nestedFieldOnWhereNoPathSimpleField() throws SqlParseException { + String query = "select * from myIndex where nested(message.name) = 'hey'"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + Where where = select.getWhere().getWheres().get(0); + Assert.assertTrue("where should be condition", where instanceof Condition); + Condition condition = (Condition) where; + Assert.assertTrue("condition should be nested", condition.isNested()); + Assert.assertEquals("message", condition.getNestedPath()); + Assert.assertEquals("message.name", condition.getName()); + } + + @Test + public void nestedFieldOnWhereNoPathComplexField() throws SqlParseException { + String query = "select * from myIndex where nested(message.moreNested.name) = 'hey'"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + Where where = select.getWhere().getWheres().get(0); + Assert.assertTrue("where should be condition", where instanceof Condition); + Condition condition = (Condition) where; + Assert.assertTrue("condition should be nested", condition.isNested()); + Assert.assertEquals("message.moreNested", condition.getNestedPath()); + Assert.assertEquals("message.moreNested.name", condition.getName()); + } + + @Test + public void aggFieldWithAliasTableAliasShouldBeRemoved() throws SqlParseException { + String query = "select count(t.*) as counts,sum(t.size) from xxx/locs as t group by t.kk"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + List fields = select.getFields(); + Assert.assertThat(fields.size(), equalTo(2)); + Assert.assertEquals("COUNT(*)", fields.get(0).toString()); + Assert.assertEquals("SUM(size)", fields.get(1).toString()); + List> groups = select.getGroupBys(); + Assert.assertThat(groups.size(), equalTo(1)); + Assert.assertThat(groups.get(0).size(), equalTo(1)); + Assert.assertEquals("kk", groups.get(0).get(0).getName()); + } + + @Test + public void nestedFieldOnWhereGivenPath() throws SqlParseException { + String query = "select * from myIndex where nested(message.name,message) = 'hey'"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + Where where = select.getWhere().getWheres().get(0); + Assert.assertTrue("where should be condition", where instanceof Condition); + Condition condition = (Condition) where; + Assert.assertTrue("condition should be nested", condition.isNested()); + Assert.assertEquals("message", condition.getNestedPath()); + Assert.assertEquals("message.name", condition.getName()); + } + + @Test + public void nestedFieldOnGroupByNoPath() throws SqlParseException { + String query = "select * from myIndex group by nested(message.name)"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + Field field = select.getGroupBys().get(0).get(0); + Assert.assertTrue("condition should be nested", field.isNested()); + Assert.assertEquals("message", field.getNestedPath()); + Assert.assertEquals("message.name", field.getName()); + } + + @Test + public void nestedFieldOnGroupByWithPath() throws SqlParseException { + String query = "select * from myIndex group by nested(message.name,message)"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + Field field = select.getGroupBys().get(0).get(0); + Assert.assertTrue("condition should be nested", field.isNested()); + Assert.assertEquals("message", field.getNestedPath()); + Assert.assertEquals("message.name", field.getName()); + } + + @Test + public void filterAggTestNoAlias() throws SqlParseException { + String query = "select * from myIndex group by a , filter( a > 3 AND b='3' )"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + List> groupBys = select.getGroupBys(); + Assert.assertEquals(1, groupBys.size()); + Field aAgg = groupBys.get(0).get(0); + Assert.assertEquals("a", aAgg.getName()); + Field field = groupBys.get(0).get(1); + Assert.assertTrue("filter field should be method field", field instanceof MethodField); + MethodField filterAgg = (MethodField) field; + Assert.assertEquals("filter", filterAgg.getName()); + Map params = filterAgg.getParamsAsMap(); + Assert.assertEquals(2, params.size()); + Object alias = params.get("alias"); + Assert.assertEquals("filter(a > 3 AND b = '3')@FILTER", alias); + + Assert.assertTrue(params.get("where") instanceof Where); + Where where = (Where) params.get("where"); + Assert.assertEquals(2, where.getWheres().size()); + } + + @Test + public void filterAggTestWithAlias() throws SqlParseException { + String query = "select * from myIndex group by a , filter(myFilter, a > 3 AND b='3' )"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + List> groupBys = select.getGroupBys(); + Assert.assertEquals(1, groupBys.size()); + Field aAgg = groupBys.get(0).get(0); + Assert.assertEquals("a", aAgg.getName()); + Field field = groupBys.get(0).get(1); + Assert.assertTrue("filter field should be method field", field instanceof MethodField); + MethodField filterAgg = (MethodField) field; + Assert.assertEquals("filter", filterAgg.getName()); + Map params = filterAgg.getParamsAsMap(); + Assert.assertEquals(2, params.size()); + Object alias = params.get("alias"); + Assert.assertEquals("myFilter@FILTER", alias); + + Assert.assertTrue(params.get("where") instanceof Where); + Where where = (Where) params.get("where"); + Assert.assertEquals(2, where.getWheres().size()); + } + + @Test + public void filterAggTestWithAliasAsString() throws SqlParseException { + String query = "select * from myIndex group by a , filter('my filter', a > 3 AND b='3' )"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + List> groupBys = select.getGroupBys(); + Assert.assertEquals(1, groupBys.size()); + Field aAgg = groupBys.get(0).get(0); + Assert.assertEquals("a", aAgg.getName()); + Field field = groupBys.get(0).get(1); + Assert.assertTrue("filter field should be method field", field instanceof MethodField); + MethodField filterAgg = (MethodField) field; + Assert.assertEquals("filter", filterAgg.getName()); + Map params = filterAgg.getParamsAsMap(); + Assert.assertEquals(2, params.size()); + Object alias = params.get("alias"); + Assert.assertEquals("my filter@FILTER", alias); + + Assert.assertTrue(params.get("where") instanceof Where); + Where where = (Where) params.get("where"); + Assert.assertEquals(2, where.getWheres().size()); + } + + @Test + public void doubleOrderByTest() throws SqlParseException { + String query = "select * from indexName order by a asc, b desc"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + List orderBys = select.getOrderBys(); + Assert.assertEquals(2, orderBys.size()); + Assert.assertEquals("a", orderBys.get(0).getName()); + Assert.assertEquals("ASC", orderBys.get(0).getType()); + + Assert.assertEquals("b", orderBys.get(1).getName()); + Assert.assertEquals("DESC", orderBys.get(1).getType()); + } + + @Test + public void parseJoinWithOneTableOrderByAttachToCorrectTable() throws SqlParseException { + String query = + String.format( + Locale.ROOT, + "select c.name.firstname , d.words from %s/gotCharacters c " + + "JOIN %s/gotCharacters d on d.name = c.house " + + "order by c.name.firstname", + TEST_INDEX_GAME_OF_THRONES, + TEST_INDEX_GAME_OF_THRONES); + + JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); + Assert.assertTrue("first table should be ordered", joinSelect.getFirstTable().isOrderdSelect()); + Assert.assertFalse( + "second table should not be ordered", joinSelect.getSecondTable().isOrderdSelect()); + } + + @Test + public void parseJoinWithOneTableOrderByRemoveAlias() throws SqlParseException { + String query = + String.format( + Locale.ROOT, + "select c.name.firstname , d.words from %s/gotCharacters c " + + "JOIN %s/gotCharacters d on d.name = c.house " + + "order by c.name.firstname", + TEST_INDEX_GAME_OF_THRONES, + TEST_INDEX_GAME_OF_THRONES); + + JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); + List orderBys = joinSelect.getFirstTable().getOrderBys(); + Assert.assertEquals(1, orderBys.size()); + Order order = orderBys.get(0); + Assert.assertEquals("name.firstname", order.getName()); + } + + @Test + public void termsWithStringTest() throws SqlParseException { + String query = "select * from x where y = IN_TERMS('a','b')"; + Select select = parser.parseSelect((SQLQueryExpr) queryToExpr(query)); + Condition condition = (Condition) select.getWhere().getWheres().get(0); + Object[] values = (Object[]) condition.getValue(); + Assert.assertEquals("a", values[0]); + Assert.assertEquals("b", values[1]); + } + + @Test + public void termWithStringTest() throws SqlParseException { + String query = "select * from x where y = TERM('a')"; + Select select = parser.parseSelect((SQLQueryExpr) queryToExpr(query)); + Condition condition = (Condition) select.getWhere().getWheres().get(0); + Object[] values = (Object[]) condition.getValue(); + Assert.assertEquals("a", values[0]); + } + + @Test + public void complexNestedTest() throws SqlParseException { + String query = "select * from x where nested('y',y.b = 'a' and y.c = 'd') "; + Select select = parser.parseSelect((SQLQueryExpr) queryToExpr(query)); + Condition condition = (Condition) select.getWhere().getWheres().get(0); + Assert.assertEquals(Condition.OPERATOR.NESTED_COMPLEX, condition.getOPERATOR()); + Assert.assertEquals("y", condition.getName()); + Assert.assertTrue(condition.getValue() instanceof Where); + Where where = (Where) condition.getValue(); + Assert.assertEquals(2, where.getWheres().size()); + } + + @Test + public void scriptOnFilterNoParams() throws SqlParseException { + String query = "select * from x where script('doc[\\'field\\'].date.hourOfDay == 3') "; + Select select = parser.parseSelect((SQLQueryExpr) queryToExpr(query)); + Condition condition = (Condition) select.getWhere().getWheres().get(0); + Assert.assertEquals(Condition.OPERATOR.SCRIPT, condition.getOPERATOR()); + Assert.assertNull(condition.getName()); + Assert.assertTrue(condition.getValue() instanceof ScriptFilter); + ScriptFilter scriptFilter = (ScriptFilter) condition.getValue(); + Assert.assertEquals("doc['field'].date.hourOfDay == 3", scriptFilter.getScript()); + Assert.assertFalse(scriptFilter.containsParameters()); + } + + @Test + public void scriptOnFilterWithParams() throws SqlParseException { + String query = "select * from x where script('doc[\\'field\\'].date.hourOfDay == x','x'=3) "; + Select select = parser.parseSelect((SQLQueryExpr) queryToExpr(query)); + Condition condition = (Condition) select.getWhere().getWheres().get(0); + Assert.assertEquals(Condition.OPERATOR.SCRIPT, condition.getOPERATOR()); + Assert.assertNull(condition.getName()); + Assert.assertTrue(condition.getValue() instanceof ScriptFilter); + ScriptFilter scriptFilter = (ScriptFilter) condition.getValue(); + Assert.assertEquals("doc['field'].date.hourOfDay == x", scriptFilter.getScript()); + Assert.assertTrue(scriptFilter.containsParameters()); + Map args = scriptFilter.getArgs(); + Assert.assertEquals(1, args.size()); + Assert.assertTrue(args.containsKey("x")); + Assert.assertEquals(3, args.get("x")); + } + + @Test + public void fieldsAsNumbersOnWhere() throws SqlParseException { + String query = "select * from x where ['3'] > 2"; + Select select = parser.parseSelect((SQLQueryExpr) queryToExpr(query)); + LinkedList wheres = select.getWhere().getWheres(); + Assert.assertEquals(1, wheres.size()); + Where where = wheres.get(0); + Assert.assertEquals(Condition.class, where.getClass()); + Condition condition = (Condition) where; + Assert.assertEquals("3", condition.getName()); + } + + @Test + public void likeTestWithEscaped() throws SqlParseException { + String query = "select * from x where name like '&UNDERSCOREhey_%&PERCENT'"; + Select select = parser.parseSelect((SQLQueryExpr) queryToExpr(query)); + BoolQueryBuilder explan = QueryMaker.explain(select.getWhere()); + String filterAsString = explan.toString(); + Assert.assertTrue(filterAsString.contains("_hey?*%")); + } + + @Test + public void complexNestedAndOtherQuery() throws SqlParseException { + String query = "select * from x where nested('path',path.x=3) and y=3"; + Select select = parser.parseSelect((SQLQueryExpr) queryToExpr(query)); + LinkedList wheres = select.getWhere().getWheres(); + Assert.assertEquals(2, wheres.size()); + Assert.assertEquals( + "AND path NESTED_COMPLEX AND ( AND path.x EQ 3 ) ", wheres.get(0).toString()); + Assert.assertEquals("AND y EQ 3", wheres.get(1).toString()); + } + + @Test + public void numberEqualConditionWithoutProperty() throws SqlParseException { + SQLExpr sqlExpr = queryToExpr("select * from xxx/locs where 1 = 1"); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + List wheres = select.getWhere().getWheres(); + Assert.assertThat(wheres.size(), equalTo(1)); + Condition condition = (Condition) wheres.get(0); + Assert.assertTrue(condition.getValue() instanceof ScriptFilter); + ScriptFilter sf = (ScriptFilter) condition.getValue(); + Assert.assertEquals(sf.getScript(), "1 == 1"); + } + + @Test + public void numberGreatConditionWithoutProperty() throws SqlParseException { + SQLExpr sqlExpr = queryToExpr("select * from xxx/locs where 1 > 1"); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + List wheres = select.getWhere().getWheres(); + Assert.assertThat(wheres.size(), equalTo(1)); + Condition condition = (Condition) wheres.get(0); + Assert.assertTrue(condition.getValue() instanceof ScriptFilter); + ScriptFilter sf = (ScriptFilter) condition.getValue(); + Assert.assertEquals(sf.getScript(), "1 > 1"); + } + + @Test + public void stringEqualConditionWithoutProperty() throws SqlParseException { + SQLExpr sqlExpr = queryToExpr("select * from xxx/locs where 'a' = 'b'"); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + List wheres = select.getWhere().getWheres(); + Assert.assertThat(wheres.size(), equalTo(1)); + Condition condition = (Condition) wheres.get(0); + Assert.assertTrue(condition.getValue() instanceof ScriptFilter); + ScriptFilter sf = (ScriptFilter) condition.getValue(); + Assert.assertEquals(sf.getScript(), "'a' == 'b'"); + } + + @Test + public void propertyEqualCondition() throws SqlParseException { + SQLExpr sqlExpr = queryToExpr("select * from xxx/locs where a = b"); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + List wheres = select.getWhere().getWheres(); + Assert.assertThat(wheres.size(), equalTo(1)); + Condition condition = (Condition) wheres.get(0); + Assert.assertTrue(condition.getValue() instanceof ScriptFilter); + ScriptFilter sf = (ScriptFilter) condition.getValue(); + Assert.assertEquals(sf.getScript(), "doc['a'].value == doc['b'].value"); + } + + @Test + public void propertyWithTableAliasEqualCondition() throws SqlParseException { + SQLExpr sqlExpr = queryToExpr("select t.* from xxx/locs where t.a = t.b"); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + List wheres = select.getWhere().getWheres(); + Assert.assertThat(wheres.size(), equalTo(1)); + Condition condition = (Condition) wheres.get(0); + Assert.assertTrue(condition.getValue() instanceof ScriptFilter); + ScriptFilter sf = (ScriptFilter) condition.getValue(); + Assert.assertEquals(sf.getScript(), "doc['a'].value == doc['b'].value"); + } + + @Test + public void propertyGreatCondition() throws SqlParseException { + SQLExpr sqlExpr = queryToExpr("select * from xxx/locs where a > b"); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + List wheres = select.getWhere().getWheres(); + Assert.assertThat(wheres.size(), equalTo(1)); + Condition condition = (Condition) wheres.get(0); + Assert.assertTrue(condition.getValue() instanceof ScriptFilter); + ScriptFilter sf = (ScriptFilter) condition.getValue(); + Assert.assertEquals(sf.getScript(), "doc['a'].value > doc['b'].value"); + } + + @Test + public void stringAndNumberEqualConditionWithoutProperty() throws SqlParseException { + SQLExpr sqlExpr = queryToExpr("select * from xxx/locs where 'a' = 1"); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + List wheres = select.getWhere().getWheres(); + Assert.assertThat(wheres.size(), equalTo(1)); + Condition condition = (Condition) wheres.get(0); + Assert.assertTrue(condition.getValue() instanceof ScriptFilter); + ScriptFilter sf = (ScriptFilter) condition.getValue(); + Assert.assertEquals(sf.getScript(), "'a' == 1"); + } + + @Test + public void caseWhenTest() throws SqlParseException { + String query = + "Select k,\n" + + "Case \n" + + "When floor(testBase)>=90 then 'A'\n" + + "When testBase = '80' then 'B'\n" + + "Else 'E' end as testBaseLevel\n" + + "from t"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + for (Field field : select.getFields()) { + if (field instanceof MethodField) { + MethodField methodField = (MethodField) field; String alias = (String) methodField.getParams().get(0).value; String scriptCode = (String) methodField.getParams().get(1).value; - Assert.assertEquals("cast_age",alias); - Assert.assertTrue(scriptCode.contains("doc['age'].value")); - Assert.assertTrue(scriptCode.contains("Double.parseDouble(doc['age'].value.toString()).intValue()")); - } + Assert.assertEquals(alias, "testBaseLevel"); + Matcher docValue = Pattern.compile("doc\\['testBase'].value").matcher(scriptCode); + Matcher number = Pattern.compile(" (\\s+90) | (\\s+'80')").matcher(scriptCode); - @Test - public void castToLongTest() throws Exception { - String query = "select cast(insert_time as long) from "+ TestsConstants.TEST_INDEX_ACCOUNT + " limit 10"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - Field castField = select.getFields().get(0); - Assert.assertTrue(castField instanceof MethodField); + AtomicInteger docValueCounter = new AtomicInteger(); - MethodField methodField = (MethodField) castField; - Assert.assertEquals("script",castField.getName()); - - String alias = (String) methodField.getParams().get(0).value; - String scriptCode = (String) methodField.getParams().get(1).value; - Assert.assertEquals("cast_insert_time",alias); - Assert.assertTrue(scriptCode.contains("doc['insert_time'].value")); - Assert.assertTrue(scriptCode.contains("Double.parseDouble(doc['insert_time'].value.toString()).longValue()")); - } - - @Test - public void castToFloatTest() throws Exception { - String query = "select cast(age as float) from "+ TestsConstants.TEST_INDEX_ACCOUNT + " limit 10"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - Field castField = select.getFields().get(0); - Assert.assertTrue(castField instanceof MethodField); - - MethodField methodField = (MethodField) castField; - Assert.assertEquals("script",castField.getName()); - - String alias = (String) methodField.getParams().get(0).value; - String scriptCode = (String) methodField.getParams().get(1).value; - Assert.assertEquals("cast_age",alias); - Assert.assertTrue(scriptCode.contains("doc['age'].value")); - Assert.assertTrue(scriptCode.contains("Double.parseDouble(doc['age'].value.toString()).floatValue()")); - } - - @Test - public void castToDoubleTest() throws Exception { - String query = "select cast(age as double) from "+ TestsConstants.TEST_INDEX_ACCOUNT + "/account limit 10"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - Field castField = select.getFields().get(0); - Assert.assertTrue(castField instanceof MethodField); - - MethodField methodField = (MethodField) castField; - Assert.assertEquals("script",castField.getName()); + while (docValue.find()) { + docValueCounter.incrementAndGet(); + } + Assert.assertThat(docValueCounter.get(), equalTo(2)); + Assert.assertThat(number.groupCount(), equalTo(2)); + } + } + } + + @Test + public void caseWhenTestWithFieldElseExpr() throws SqlParseException { + String query = + "Select k,\n" + + "Case \n" + + "When floor(testBase)>=90 then 'A'\n" + + "When testBase = '80' then 'B'\n" + + "Else testBase end as testBaseLevel\n" + + "from t"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + for (Field field : select.getFields()) { + if (field instanceof MethodField) { + MethodField methodField = (MethodField) field; String alias = (String) methodField.getParams().get(0).value; String scriptCode = (String) methodField.getParams().get(1).value; - Assert.assertEquals("cast_age",alias); - Assert.assertTrue(scriptCode.contains("doc['age'].value")); - Assert.assertTrue(scriptCode.contains("Double.parseDouble(doc['age'].value.toString()).doubleValue()")); - } - - @Test - public void castToStringTest() throws Exception { - String query = "select cast(age as string) from "+ TestsConstants.TEST_INDEX_ACCOUNT + "/account limit 10"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - Field castField = select.getFields().get(0); - Assert.assertTrue(castField instanceof MethodField); - - MethodField methodField = (MethodField) castField; - Assert.assertEquals("script",castField.getName()); + Assert.assertEquals(alias, "testBaseLevel"); + Matcher docValue = Pattern.compile("doc\\['testBase'].value").matcher(scriptCode); + Matcher number = Pattern.compile(" (\\s+90) | (\\s+'80')").matcher(scriptCode); - String alias = (String) methodField.getParams().get(0).value; - String scriptCode = (String) methodField.getParams().get(1).value; - Assert.assertEquals("cast_age",alias); - Assert.assertTrue(scriptCode.contains("doc['age'].value.toString()")); - } + AtomicInteger docValueCounter = new AtomicInteger(); - @Test - public void castToDateTimeTest() throws Exception { - String query = "select cast(age as datetime) from "+ TestsConstants.TEST_INDEX_ACCOUNT + "/account limit 10"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - Field castField = select.getFields().get(0); - Assert.assertTrue(castField instanceof MethodField); - - MethodField methodField = (MethodField) castField; - Assert.assertEquals("script",castField.getName()); + while (docValue.find()) { + docValueCounter.incrementAndGet(); + } + Assert.assertThat(docValueCounter.get(), equalTo(3)); + Assert.assertThat(number.groupCount(), equalTo(2)); + } + } + } + + @Test + public void caseWhenTestWithouhtElseExpr() throws SqlParseException { + String query = + "Select k,\n" + + "Case \n" + + "When floor(testBase)>=90 then 'A'\n" + + "When testBase = '80' then 'B'\n" + + "end as testBaseLevel\n" + + "from t"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + for (Field field : select.getFields()) { + if (field instanceof MethodField) { + MethodField methodField = (MethodField) field; String alias = (String) methodField.getParams().get(0).value; String scriptCode = (String) methodField.getParams().get(1).value; - Assert.assertEquals("cast_age",alias); - Assert.assertTrue(scriptCode.contains("doc['age'].value")); - Assert.assertTrue(scriptCode.contains("DateTimeFormatter.ofPattern(\"yyyy-MM-dd'T'HH:mm:ss.SSS'Z'\").format(" - + "DateTimeFormatter.ISO_DATE_TIME.parse(doc['age'].value.toString()))")); - } + Assert.assertEquals(alias, "testBaseLevel"); - @Test - public void castToDoubleThenDivideTest() throws Exception { - String query = "select cast(age as double)/2 from "+ TestsConstants.TEST_INDEX_ACCOUNT + "/account limit 10"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - Field castField = select.getFields().get(0); - Assert.assertTrue(castField instanceof MethodField); - - MethodField methodField = (MethodField) castField; - Assert.assertEquals("script",castField.getName()); - - String scriptCode = (String) methodField.getParams().get(1).value; - Assert.assertTrue(scriptCode.contains("doc['age'].value")); - Assert.assertTrue(scriptCode.contains("Double.parseDouble(doc['age'].value.toString()).doubleValue()")); - Assert.assertTrue(scriptCode.contains("/ 2")); - } + Matcher docValue = Pattern.compile("\\{\\s+null\\s+}").matcher(scriptCode); + AtomicInteger docValueCounter = new AtomicInteger(); - @Test - public void multiSelectMinusOperationCheckIndices() throws SqlParseException { - String query = "select pk from firstIndex minus select pk from secondIndex "; - MultiQuerySelect select = parser.parseMultiSelect((SQLUnionQuery) - ((SQLQueryExpr) queryToExpr(query)).getSubQuery().getQuery()); - Assert.assertEquals("firstIndex",select.getFirstSelect().getFrom().get(0).getIndex()); - Assert.assertEquals("secondIndex",select.getSecondSelect().getFrom().get(0).getIndex()); - Assert.assertEquals(SQLUnionOperator.MINUS,select.getOperation()); - } - - @Test - public void multiSelectMinusWithAliasCheckAliases() throws SqlParseException { - String query = "select pk as myId from firstIndex minus select myId from secondIndex "; - MultiQuerySelect select = parser.parseMultiSelect((com.alibaba.druid.sql.ast.statement.SQLUnionQuery) - ((SQLQueryExpr) queryToExpr(query)).getSubQuery().getQuery()); - Assert.assertEquals("myId",select.getFirstSelect().getFields().get(0).getAlias()); - Assert.assertEquals("myId",select.getSecondSelect().getFields().get(0).getName()); - Assert.assertEquals(SQLUnionOperator.MINUS,select.getOperation()); - } - @Test - public void multiSelectMinusTestMinusHints() throws SqlParseException { - String query = "select /*! MINUS_SCROLL_FETCH_AND_RESULT_LIMITS(1000,50,100)*/ " + - "/*! MINUS_USE_TERMS_OPTIMIZATION(true)*/ pk from firstIndex minus select pk from secondIndex "; - MultiQuerySelect select = parser.parseMultiSelect((SQLUnionQuery) - ((SQLQueryExpr) queryToExpr(query)).getSubQuery().getQuery()); - List hints = select.getFirstSelect().getHints(); - Assert.assertEquals(2,hints.size()); - for(Hint hint : hints) { - if (hint.getType() == HintType.MINUS_FETCH_AND_RESULT_LIMITS) { - Object[] params = hint.getParams(); - Assert.assertEquals(1000,params[0]); - Assert.assertEquals(50,params[1]); - Assert.assertEquals(100,params[2]); - } - if(hint.getType() == HintType.MINUS_USE_TERMS_OPTIMIZATION){ - Assert.assertEquals(true,hint.getParams()[0]); - } + while (docValue.find()) { + docValueCounter.incrementAndGet(); } - } - @Test - public void multiSelectMinusScrollCheckDefaultsAllDefaults() throws SqlParseException { - String query = "select /*! MINUS_SCROLL_FETCH_AND_RESULT_LIMITS*/ pk from firstIndex " + - "minus select pk from secondIndex "; - MultiQuerySelect select = parser.parseMultiSelect((com.alibaba.druid.sql.ast.statement.SQLUnionQuery) + Assert.assertThat(docValueCounter.get(), equalTo(1)); + } + } + } + + @Test + public void caseWhenSwitchTest() { + String query = + "SELECT CASE weather " + + "WHEN 'Sunny' THEN '0' " + + "WHEN 'Rainy' THEN '1' " + + "ELSE 'NA' END AS case " + + "FROM t"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + Assert.assertTrue( + CheckScriptContents.scriptContainsString(scriptField, "doc['weather'].value=='Sunny'")); + } + + @Test + public void castToIntTest() throws Exception { + String query = + "select cast(age as int) from " + TestsConstants.TEST_INDEX_ACCOUNT + "/account limit 10"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + Field castField = select.getFields().get(0); + Assert.assertTrue(castField instanceof MethodField); + + MethodField methodField = (MethodField) castField; + Assert.assertEquals("script", castField.getName()); + + String alias = (String) methodField.getParams().get(0).value; + String scriptCode = (String) methodField.getParams().get(1).value; + Assert.assertEquals("cast_age", alias); + Assert.assertTrue(scriptCode.contains("doc['age'].value")); + Assert.assertTrue( + scriptCode.contains("Double.parseDouble(doc['age'].value.toString()).intValue()")); + } + + @Test + public void castToLongTest() throws Exception { + String query = + "select cast(insert_time as long) from " + TestsConstants.TEST_INDEX_ACCOUNT + " limit 10"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + Field castField = select.getFields().get(0); + Assert.assertTrue(castField instanceof MethodField); + + MethodField methodField = (MethodField) castField; + Assert.assertEquals("script", castField.getName()); + + String alias = (String) methodField.getParams().get(0).value; + String scriptCode = (String) methodField.getParams().get(1).value; + Assert.assertEquals("cast_insert_time", alias); + Assert.assertTrue(scriptCode.contains("doc['insert_time'].value")); + Assert.assertTrue( + scriptCode.contains("Double.parseDouble(doc['insert_time'].value.toString()).longValue()")); + } + + @Test + public void castToFloatTest() throws Exception { + String query = + "select cast(age as float) from " + TestsConstants.TEST_INDEX_ACCOUNT + " limit 10"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + Field castField = select.getFields().get(0); + Assert.assertTrue(castField instanceof MethodField); + + MethodField methodField = (MethodField) castField; + Assert.assertEquals("script", castField.getName()); + + String alias = (String) methodField.getParams().get(0).value; + String scriptCode = (String) methodField.getParams().get(1).value; + Assert.assertEquals("cast_age", alias); + Assert.assertTrue(scriptCode.contains("doc['age'].value")); + Assert.assertTrue( + scriptCode.contains("Double.parseDouble(doc['age'].value.toString()).floatValue()")); + } + + @Test + public void castToDoubleTest() throws Exception { + String query = + "select cast(age as double) from " + + TestsConstants.TEST_INDEX_ACCOUNT + + "/account limit 10"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + Field castField = select.getFields().get(0); + Assert.assertTrue(castField instanceof MethodField); + + MethodField methodField = (MethodField) castField; + Assert.assertEquals("script", castField.getName()); + + String alias = (String) methodField.getParams().get(0).value; + String scriptCode = (String) methodField.getParams().get(1).value; + Assert.assertEquals("cast_age", alias); + Assert.assertTrue(scriptCode.contains("doc['age'].value")); + Assert.assertTrue( + scriptCode.contains("Double.parseDouble(doc['age'].value.toString()).doubleValue()")); + } + + @Test + public void castToStringTest() throws Exception { + String query = + "select cast(age as string) from " + + TestsConstants.TEST_INDEX_ACCOUNT + + "/account limit 10"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + Field castField = select.getFields().get(0); + Assert.assertTrue(castField instanceof MethodField); + + MethodField methodField = (MethodField) castField; + Assert.assertEquals("script", castField.getName()); + + String alias = (String) methodField.getParams().get(0).value; + String scriptCode = (String) methodField.getParams().get(1).value; + Assert.assertEquals("cast_age", alias); + Assert.assertTrue(scriptCode.contains("doc['age'].value.toString()")); + } + + @Test + public void castToDateTimeTest() throws Exception { + String query = + "select cast(age as datetime) from " + + TestsConstants.TEST_INDEX_ACCOUNT + + "/account limit 10"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + Field castField = select.getFields().get(0); + Assert.assertTrue(castField instanceof MethodField); + + MethodField methodField = (MethodField) castField; + Assert.assertEquals("script", castField.getName()); + + String alias = (String) methodField.getParams().get(0).value; + String scriptCode = (String) methodField.getParams().get(1).value; + Assert.assertEquals("cast_age", alias); + Assert.assertTrue(scriptCode.contains("doc['age'].value")); + Assert.assertTrue( + scriptCode.contains( + "DateTimeFormatter.ofPattern(\"yyyy-MM-dd'T'HH:mm:ss.SSS'Z'\").format(" + + "DateTimeFormatter.ISO_DATE_TIME.parse(doc['age'].value.toString()))")); + } + + @Test + public void castToDoubleThenDivideTest() throws Exception { + String query = + "select cast(age as double)/2 from " + + TestsConstants.TEST_INDEX_ACCOUNT + + "/account limit 10"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + Field castField = select.getFields().get(0); + Assert.assertTrue(castField instanceof MethodField); + + MethodField methodField = (MethodField) castField; + Assert.assertEquals("script", castField.getName()); + + String scriptCode = (String) methodField.getParams().get(1).value; + Assert.assertTrue(scriptCode.contains("doc['age'].value")); + Assert.assertTrue( + scriptCode.contains("Double.parseDouble(doc['age'].value.toString()).doubleValue()")); + Assert.assertTrue(scriptCode.contains("/ 2")); + } + + @Test + public void multiSelectMinusOperationCheckIndices() throws SqlParseException { + String query = "select pk from firstIndex minus select pk from secondIndex "; + MultiQuerySelect select = + parser.parseMultiSelect( + (SQLUnionQuery) ((SQLQueryExpr) queryToExpr(query)).getSubQuery().getQuery()); + Assert.assertEquals("firstIndex", select.getFirstSelect().getFrom().get(0).getIndex()); + Assert.assertEquals("secondIndex", select.getSecondSelect().getFrom().get(0).getIndex()); + Assert.assertEquals(SQLUnionOperator.MINUS, select.getOperation()); + } + + @Test + public void multiSelectMinusWithAliasCheckAliases() throws SqlParseException { + String query = "select pk as myId from firstIndex minus select myId from secondIndex "; + MultiQuerySelect select = + parser.parseMultiSelect( + (com.alibaba.druid.sql.ast.statement.SQLUnionQuery) ((SQLQueryExpr) queryToExpr(query)).getSubQuery().getQuery()); - List hints = select.getFirstSelect().getHints(); - Assert.assertEquals(1, hints.size()); - Hint hint = hints.get(0); - Assert.assertEquals(HintType.MINUS_FETCH_AND_RESULT_LIMITS,hint.getType()); + Assert.assertEquals("myId", select.getFirstSelect().getFields().get(0).getAlias()); + Assert.assertEquals("myId", select.getSecondSelect().getFields().get(0).getName()); + Assert.assertEquals(SQLUnionOperator.MINUS, select.getOperation()); + } + + @Test + public void multiSelectMinusTestMinusHints() throws SqlParseException { + String query = + "select /*! MINUS_SCROLL_FETCH_AND_RESULT_LIMITS(1000,50,100)*/ /*!" + + " MINUS_USE_TERMS_OPTIMIZATION(true)*/ pk from firstIndex minus select pk from" + + " secondIndex "; + MultiQuerySelect select = + parser.parseMultiSelect( + (SQLUnionQuery) ((SQLQueryExpr) queryToExpr(query)).getSubQuery().getQuery()); + List hints = select.getFirstSelect().getHints(); + Assert.assertEquals(2, hints.size()); + for (Hint hint : hints) { + if (hint.getType() == HintType.MINUS_FETCH_AND_RESULT_LIMITS) { Object[] params = hint.getParams(); - Assert.assertEquals(100000, params[0]); - Assert.assertEquals(100000, params[1]); - Assert.assertEquals(1000, params[2]); - } - - @Test - public void multiSelectMinusScrollCheckDefaultsOneDefault() throws SqlParseException { - String query = "select /*! MINUS_SCROLL_FETCH_AND_RESULT_LIMITS(50,100)*/ pk " + - "from firstIndex minus select pk from secondIndex "; - MultiQuerySelect select = parser.parseMultiSelect((com.alibaba.druid.sql.ast.statement.SQLUnionQuery) + Assert.assertEquals(1000, params[0]); + Assert.assertEquals(50, params[1]); + Assert.assertEquals(100, params[2]); + } + if (hint.getType() == HintType.MINUS_USE_TERMS_OPTIMIZATION) { + Assert.assertEquals(true, hint.getParams()[0]); + } + } + } + + @Test + public void multiSelectMinusScrollCheckDefaultsAllDefaults() throws SqlParseException { + String query = + "select /*! MINUS_SCROLL_FETCH_AND_RESULT_LIMITS*/ pk from firstIndex " + + "minus select pk from secondIndex "; + MultiQuerySelect select = + parser.parseMultiSelect( + (com.alibaba.druid.sql.ast.statement.SQLUnionQuery) ((SQLQueryExpr) queryToExpr(query)).getSubQuery().getQuery()); - List hints = select.getFirstSelect().getHints(); - Assert.assertEquals(1, hints.size()); - Hint hint = hints.get(0); - Assert.assertEquals(HintType.MINUS_FETCH_AND_RESULT_LIMITS,hint.getType()); - Object[] params = hint.getParams(); - Assert.assertEquals(50, params[0]); - Assert.assertEquals(100, params[1]); - Assert.assertEquals(1000, params[2]); - } - - private SQLExpr queryToExpr(String query) { - return new ElasticSqlExprParser(query).expr(); - } - - private boolean conditionExist(List conditions, String from, String to, Condition.OPERATOR OPERATOR) { - String[] aliasAndField = to.split("\\.", 2); - String toAlias = aliasAndField[0]; - String toField = aliasAndField[1]; - for (Condition condition : conditions) { - if (condition.getOPERATOR() != OPERATOR) continue; - - boolean fromIsEqual = condition.getName().equals(from); - if (!fromIsEqual) continue; - - String[] valueAliasAndField = condition.getValue().toString().split("\\.", 2); - boolean toFieldNameIsEqual = valueAliasAndField[1].equals(toField); - boolean toAliasIsEqual = valueAliasAndField[0].equals(toAlias); - boolean toIsEqual = toAliasIsEqual && toFieldNameIsEqual; - - if (toIsEqual) return true; - } - return false; - } + List hints = select.getFirstSelect().getHints(); + Assert.assertEquals(1, hints.size()); + Hint hint = hints.get(0); + Assert.assertEquals(HintType.MINUS_FETCH_AND_RESULT_LIMITS, hint.getType()); + Object[] params = hint.getParams(); + Assert.assertEquals(100000, params[0]); + Assert.assertEquals(100000, params[1]); + Assert.assertEquals(1000, params[2]); + } + + @Test + public void multiSelectMinusScrollCheckDefaultsOneDefault() throws SqlParseException { + String query = + "select /*! MINUS_SCROLL_FETCH_AND_RESULT_LIMITS(50,100)*/ pk " + + "from firstIndex minus select pk from secondIndex "; + MultiQuerySelect select = + parser.parseMultiSelect( + (com.alibaba.druid.sql.ast.statement.SQLUnionQuery) + ((SQLQueryExpr) queryToExpr(query)).getSubQuery().getQuery()); + List hints = select.getFirstSelect().getHints(); + Assert.assertEquals(1, hints.size()); + Hint hint = hints.get(0); + Assert.assertEquals(HintType.MINUS_FETCH_AND_RESULT_LIMITS, hint.getType()); + Object[] params = hint.getParams(); + Assert.assertEquals(50, params[0]); + Assert.assertEquals(100, params[1]); + Assert.assertEquals(1000, params[2]); + } + + private SQLExpr queryToExpr(String query) { + return new ElasticSqlExprParser(query).expr(); + } + + private boolean conditionExist( + List conditions, String from, String to, Condition.OPERATOR OPERATOR) { + String[] aliasAndField = to.split("\\.", 2); + String toAlias = aliasAndField[0]; + String toField = aliasAndField[1]; + for (Condition condition : conditions) { + if (condition.getOPERATOR() != OPERATOR) continue; + + boolean fromIsEqual = condition.getName().equals(from); + if (!fromIsEqual) continue; + + String[] valueAliasAndField = condition.getValue().toString().split("\\.", 2); + boolean toFieldNameIsEqual = valueAliasAndField[1].equals(toField); + boolean toAliasIsEqual = valueAliasAndField[0].equals(toAlias); + boolean toIsEqual = toAliasIsEqual && toFieldNameIsEqual; + + if (toIsEqual) return true; + } + return false; + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/parser/SubQueryParserTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/parser/SubQueryParserTest.java index ac614affdb..5713179b46 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/parser/SubQueryParserTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/parser/SubQueryParserTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.parser; import static org.junit.Assert.assertEquals; @@ -20,127 +19,135 @@ public class SubQueryParserTest { - private static SqlParser parser = new SqlParser(); - - @Test - public void selectFromSubqueryShouldPass() throws SqlParseException { - Select select = parseSelect( - StringUtils.format( - "SELECT t.T1 as age1, t.T2 as balance1 " + - "FROM (SELECT age as T1, balance as T2 FROM %s/account) t", - TEST_INDEX_ACCOUNT)); - - assertEquals(2, select.getFields().size()); - assertEquals("age", select.getFields().get(0).getName()); - assertEquals("age1", select.getFields().get(0).getAlias()); - assertEquals("balance", select.getFields().get(1).getName()); - assertEquals("balance1", select.getFields().get(1).getAlias()); - } - - @Test - public void selectFromSubqueryWithoutAliasShouldPass() throws SqlParseException { - Select select = parseSelect( - StringUtils.format( - "SELECT t.age as finalAge, t.balance as finalBalance " + - "FROM (SELECT age, balance FROM %s/account) t", - TEST_INDEX_ACCOUNT)); - - assertEquals(2, select.getFields().size()); - assertEquals("age", select.getFields().get(0).getName()); - assertEquals("finalAge", select.getFields().get(0).getAlias()); - assertEquals("balance", select.getFields().get(1).getName()); - assertEquals("finalBalance", select.getFields().get(1).getAlias()); - } - - @Test - public void selectFromSubqueryShouldIgnoreUnusedField() throws SqlParseException { - Select select = parseSelect( - StringUtils.format( - "SELECT t.T1 as age1 " + - "FROM (SELECT age as T1, balance as T2 FROM %s/account) t", - TEST_INDEX_ACCOUNT)); - - assertEquals(1, select.getFields().size()); - assertEquals("age", select.getFields().get(0).getName()); - assertEquals("age1", select.getFields().get(0).getAlias()); - } - - @Test - public void selectFromSubqueryWithAggShouldPass() throws SqlParseException { - Select select = parseSelect( - StringUtils.format( - "SELECT t.TEMP as count " + - "FROM (SELECT COUNT(*) as TEMP FROM %s/account) t", - TEST_INDEX_ACCOUNT)); - assertEquals(1, select.getFields().size()); - assertEquals("COUNT", select.getFields().get(0).getName()); - assertEquals("count", select.getFields().get(0).getAlias()); - } - - @Test - public void selectFromSubqueryWithWhereAndCountShouldPass() throws SqlParseException { - Select select = parseSelect( - StringUtils.format( - "SELECT t.TEMP as count " + - "FROM (SELECT COUNT(*) as TEMP FROM %s/account WHERE age > 30) t", - TEST_INDEX_ACCOUNT)); - - assertEquals(1, select.getFields().size()); - assertEquals("COUNT", select.getFields().get(0).getName()); - assertEquals("count", select.getFields().get(0).getAlias()); - } - - @Test - public void selectFromSubqueryWithCountAndGroupByAndOrderByShouldPass() throws SqlParseException { - Select select = parseSelect( - StringUtils.format( - "SELECT t.TEMP as count " + - "FROM (SELECT COUNT(*) as TEMP FROM %s/account GROUP BY age ORDER BY TEMP) t", - TEST_INDEX_ACCOUNT)); - - assertEquals(1, select.getFields().size()); - assertEquals("COUNT", select.getFields().get(0).getName()); - assertEquals("count", select.getFields().get(0).getAlias()); - assertEquals(1, select.getOrderBys().size()); - assertEquals("count", select.getOrderBys().get(0).getName()); - assertEquals("count", select.getOrderBys().get(0).getSortField().getName()); - } - - @Test - public void selectFromSubqueryWithCountAndGroupByAndHavingShouldPass() throws Exception { - - Select select = parseSelect( - StringUtils.format("SELECT t.T1 as g, t.T2 as c " + - "FROM (SELECT gender as T1, COUNT(*) as T2 " + - " FROM %s/account " + - " GROUP BY gender " + - " HAVING T2 > 500) t", TEST_INDEX_ACCOUNT)); - - assertEquals(2, select.getFields().size()); - assertEquals("gender", select.getFields().get(0).getName()); - assertEquals("g", select.getFields().get(0).getAlias()); - assertEquals("COUNT", select.getFields().get(1).getName()); - assertEquals("c", select.getFields().get(1).getAlias()); - assertEquals(1, select.getHaving().getConditions().size()); - assertEquals("c", ((Condition) select.getHaving().getConditions().get(0)).getName()); - } - - @Test - public void selectFromSubqueryCountAndSum() throws Exception { - Select select = parseSelect( - StringUtils.format( - "SELECT t.TEMP1 as count, t.TEMP2 as balance " + - "FROM (SELECT COUNT(*) as TEMP1, SUM(balance) as TEMP2 " + - " FROM %s/account) t", - TEST_INDEX_ACCOUNT)); - assertEquals(2, select.getFields().size()); - assertEquals("COUNT", select.getFields().get(0).getName()); - assertEquals("count", select.getFields().get(0).getAlias()); - assertEquals("SUM", select.getFields().get(1).getName()); - assertEquals("balance", select.getFields().get(1).getAlias()); - } - - private Select parseSelect(String query) throws SqlParseException { - return parser.parseSelect((SQLQueryExpr) new ElasticSqlExprParser(query).expr()); - } + private static SqlParser parser = new SqlParser(); + + @Test + public void selectFromSubqueryShouldPass() throws SqlParseException { + Select select = + parseSelect( + StringUtils.format( + "SELECT t.T1 as age1, t.T2 as balance1 " + + "FROM (SELECT age as T1, balance as T2 FROM %s/account) t", + TEST_INDEX_ACCOUNT)); + + assertEquals(2, select.getFields().size()); + assertEquals("age", select.getFields().get(0).getName()); + assertEquals("age1", select.getFields().get(0).getAlias()); + assertEquals("balance", select.getFields().get(1).getName()); + assertEquals("balance1", select.getFields().get(1).getAlias()); + } + + @Test + public void selectFromSubqueryWithoutAliasShouldPass() throws SqlParseException { + Select select = + parseSelect( + StringUtils.format( + "SELECT t.age as finalAge, t.balance as finalBalance " + + "FROM (SELECT age, balance FROM %s/account) t", + TEST_INDEX_ACCOUNT)); + + assertEquals(2, select.getFields().size()); + assertEquals("age", select.getFields().get(0).getName()); + assertEquals("finalAge", select.getFields().get(0).getAlias()); + assertEquals("balance", select.getFields().get(1).getName()); + assertEquals("finalBalance", select.getFields().get(1).getAlias()); + } + + @Test + public void selectFromSubqueryShouldIgnoreUnusedField() throws SqlParseException { + Select select = + parseSelect( + StringUtils.format( + "SELECT t.T1 as age1 " + "FROM (SELECT age as T1, balance as T2 FROM %s/account) t", + TEST_INDEX_ACCOUNT)); + + assertEquals(1, select.getFields().size()); + assertEquals("age", select.getFields().get(0).getName()); + assertEquals("age1", select.getFields().get(0).getAlias()); + } + + @Test + public void selectFromSubqueryWithAggShouldPass() throws SqlParseException { + Select select = + parseSelect( + StringUtils.format( + "SELECT t.TEMP as count FROM (SELECT COUNT(*) as TEMP FROM %s/account) t", + TEST_INDEX_ACCOUNT)); + assertEquals(1, select.getFields().size()); + assertEquals("COUNT", select.getFields().get(0).getName()); + assertEquals("count", select.getFields().get(0).getAlias()); + } + + @Test + public void selectFromSubqueryWithWhereAndCountShouldPass() throws SqlParseException { + Select select = + parseSelect( + StringUtils.format( + "SELECT t.TEMP as count " + + "FROM (SELECT COUNT(*) as TEMP FROM %s/account WHERE age > 30) t", + TEST_INDEX_ACCOUNT)); + + assertEquals(1, select.getFields().size()); + assertEquals("COUNT", select.getFields().get(0).getName()); + assertEquals("count", select.getFields().get(0).getAlias()); + } + + @Test + public void selectFromSubqueryWithCountAndGroupByAndOrderByShouldPass() throws SqlParseException { + Select select = + parseSelect( + StringUtils.format( + "SELECT t.TEMP as count " + + "FROM (SELECT COUNT(*) as TEMP FROM %s/account GROUP BY age ORDER BY TEMP) t", + TEST_INDEX_ACCOUNT)); + + assertEquals(1, select.getFields().size()); + assertEquals("COUNT", select.getFields().get(0).getName()); + assertEquals("count", select.getFields().get(0).getAlias()); + assertEquals(1, select.getOrderBys().size()); + assertEquals("count", select.getOrderBys().get(0).getName()); + assertEquals("count", select.getOrderBys().get(0).getSortField().getName()); + } + + @Test + public void selectFromSubqueryWithCountAndGroupByAndHavingShouldPass() throws Exception { + + Select select = + parseSelect( + StringUtils.format( + "SELECT t.T1 as g, t.T2 as c " + + "FROM (SELECT gender as T1, COUNT(*) as T2 " + + " FROM %s/account " + + " GROUP BY gender " + + " HAVING T2 > 500) t", + TEST_INDEX_ACCOUNT)); + + assertEquals(2, select.getFields().size()); + assertEquals("gender", select.getFields().get(0).getName()); + assertEquals("g", select.getFields().get(0).getAlias()); + assertEquals("COUNT", select.getFields().get(1).getName()); + assertEquals("c", select.getFields().get(1).getAlias()); + assertEquals(1, select.getHaving().getConditions().size()); + assertEquals("c", ((Condition) select.getHaving().getConditions().get(0)).getName()); + } + + @Test + public void selectFromSubqueryCountAndSum() throws Exception { + Select select = + parseSelect( + StringUtils.format( + "SELECT t.TEMP1 as count, t.TEMP2 as balance " + + "FROM (SELECT COUNT(*) as TEMP1, SUM(balance) as TEMP2 " + + " FROM %s/account) t", + TEST_INDEX_ACCOUNT)); + assertEquals(2, select.getFields().size()); + assertEquals("COUNT", select.getFields().get(0).getName()); + assertEquals("count", select.getFields().get(0).getAlias()); + assertEquals("SUM", select.getFields().get(1).getName()); + assertEquals("balance", select.getFields().get(1).getAlias()); + } + + private Select parseSelect(String query) throws SqlParseException { + return parser.parseSelect((SQLQueryExpr) new ElasticSqlExprParser(query).expr()); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/converter/SQLAggregationParserTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/converter/SQLAggregationParserTest.java index bdf3c64fd8..855ed9e346 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/converter/SQLAggregationParserTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/converter/SQLAggregationParserTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.planner.converter; import static org.hamcrest.MatcherAssert.assertThat; @@ -36,321 +35,367 @@ @RunWith(MockitoJUnitRunner.class) public class SQLAggregationParserTest { - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); - - @Test - public void parseAggWithoutExpressionShouldPass() { - String sql = "SELECT dayOfWeek, max(FlightDelayMin), MIN(FlightDelayMin) as min " + - "FROM opensearch_dashboards_sample_data_flights " + - "GROUP BY dayOfWeek"; - SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); - parser.parse(mYSqlSelectQueryBlock(sql)); - List sqlSelectItems = parser.selectItemList(); - List columnNodes = parser.getColumnNodes(); - - assertThat(sqlSelectItems, containsInAnyOrder(group("dayOfWeek", "dayOfWeek"), - agg("MAX", "FlightDelayMin", "MAX_0"), - agg("MIN", "FlightDelayMin", "min"))); - - assertThat(columnNodes, containsInAnyOrder(columnNode("dayOfWeek", null, ExpressionFactory.ref("dayOfWeek")), - columnNode("MAX(FlightDelayMin)", null, ExpressionFactory - .ref("MAX_0")), - columnNode("min", "min", ExpressionFactory.ref("min")))); - } - - @Test - public void parseAggWithFunctioniWithoutExpressionShouldPass() { - String sql = "SELECT dayOfWeek, max(FlightDelayMin), MIN(FlightDelayMin) as min " + - "FROM opensearch_dashboards_sample_data_flights " + - "GROUP BY dayOfWeek"; - SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); - parser.parse(mYSqlSelectQueryBlock(sql)); - List sqlSelectItems = parser.selectItemList(); - List columnNodes = parser.getColumnNodes(); - - assertThat(sqlSelectItems, containsInAnyOrder(group("dayOfWeek", "dayOfWeek"), - agg("MAX", "FlightDelayMin", "MAX_0"), - agg("MIN", "FlightDelayMin", "min"))); - - assertThat(columnNodes, containsInAnyOrder(columnNode("dayOfWeek", null, ExpressionFactory.ref("dayOfWeek")), - columnNode("MAX(FlightDelayMin)", null, ExpressionFactory - .ref("MAX_0")), - columnNode("min", "min", ExpressionFactory.ref("min")))); - } - - @Test - public void parseAggWithExpressionShouldPass() { - String sql = "SELECT dayOfWeek, max(FlightDelayMin) + MIN(FlightDelayMin) as sub " + - "FROM opensearch_dashboards_sample_data_flights " + - "GROUP BY dayOfWeek"; - SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); - parser.parse(mYSqlSelectQueryBlock(sql)); - List sqlSelectItems = parser.selectItemList(); - List columnNodes = parser.getColumnNodes(); - - assertThat(sqlSelectItems, containsInAnyOrder(group("dayOfWeek", "dayOfWeek"), - agg("MAX", "FlightDelayMin", "MAX_0"), - agg("MIN", "FlightDelayMin", "MIN_1"))); - - assertThat(columnNodes, containsInAnyOrder(columnNode("dayOfWeek", null, ExpressionFactory.ref("dayOfWeek")), - columnNode("sub", "sub", add(ExpressionFactory.ref("MAX_0"), ExpressionFactory - .ref("MIN_1"))))); - } - - @Test - public void parseWithRawSelectFuncnameShouldPass() { - String sql = "SELECT LOG(FlightDelayMin) " + - "FROM opensearch_dashboards_sample_data_flights " + - "GROUP BY log(FlightDelayMin)"; - SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); - parser.parse(mYSqlSelectQueryBlock(sql)); - List sqlSelectItems = parser.selectItemList(); - List columnNodes = parser.getColumnNodes(); - - assertThat(sqlSelectItems, containsInAnyOrder(group("log(FlightDelayMin)", "log(FlightDelayMin)"))); - - assertThat( - columnNodes, - containsInAnyOrder( - columnNode( - "LOG(FlightDelayMin)", - null, - ExpressionFactory.ref("log(FlightDelayMin)") - ) - ) - ); - } - - @Test - public void functionOverFiledShouldPass() { - String sql = "SELECT dayOfWeek, max(FlightDelayMin) + MIN(FlightDelayMin) as sub " + - "FROM opensearch_dashboards_sample_data_flights " + - "GROUP BY dayOfWeek"; - SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); - parser.parse(mYSqlSelectQueryBlock(sql)); - List sqlSelectItems = parser.selectItemList(); - List columnNodes = parser.getColumnNodes(); - - assertThat(sqlSelectItems, containsInAnyOrder(group("dayOfWeek", "dayOfWeek"), - agg("MAX", "FlightDelayMin", "MAX_0"), - agg("MIN", "FlightDelayMin", "MIN_1"))); - - assertThat(columnNodes, containsInAnyOrder(columnNode("dayOfWeek", null, ExpressionFactory.ref("dayOfWeek")), - columnNode("sub", "sub", add(ExpressionFactory.ref("MAX_0"), ExpressionFactory - .ref("MIN_1"))))); - } - - @Test - public void parseCompoundAggWithExpressionShouldPass() { - String sql = "SELECT ASCII(dayOfWeek), log(max(FlightDelayMin) + MIN(FlightDelayMin)) as log " + - "FROM opensearch_dashboards_sample_data_flights " + - "GROUP BY ASCII(dayOfWeek)"; - SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); - parser.parse(mYSqlSelectQueryBlock(sql)); - List sqlSelectItems = parser.selectItemList(); - List columnNodes = parser.getColumnNodes(); - - assertThat(sqlSelectItems, containsInAnyOrder(group("ASCII(dayOfWeek)", "ASCII(dayOfWeek)"), - agg("MAX", "FlightDelayMin", "MAX_0"), - agg("MIN", "FlightDelayMin", "MIN_1"))); - - assertThat(columnNodes, containsInAnyOrder(columnNode("ASCII(dayOfWeek)", null, ExpressionFactory - .ref("ASCII(dayOfWeek)")), - columnNode("log", "log", log(add(ExpressionFactory.ref("MAX_0"), ExpressionFactory - .ref("MIN_1")))))); - } - - @Test - public void parseSingleFunctionOverAggShouldPass() { - String sql = "SELECT log(max(age)) FROM accounts"; - SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); - parser.parse(mYSqlSelectQueryBlock(sql)); - List sqlSelectItems = parser.selectItemList(); - List columnNodes = parser.getColumnNodes(); - - assertThat(sqlSelectItems, containsInAnyOrder(agg("max", "age", "max_0"))); - assertThat(columnNodes, containsInAnyOrder(columnNode("log(max(age))", null, log( - ExpressionFactory.ref("max_0"))))); - } - - @Test - public void parseFunctionGroupColumnOverShouldPass() { - String sql = "SELECT CAST(balance AS FLOAT) FROM accounts GROUP BY balance"; - SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); - parser.parse(mYSqlSelectQueryBlock(sql)); - List sqlSelectItems = parser.selectItemList(); - List columnNodes = parser.getColumnNodes(); - - assertThat(sqlSelectItems, containsInAnyOrder(group("balance", "balance"))); - assertThat(columnNodes, containsInAnyOrder(columnNode("CAST(balance AS FLOAT)", null, cast( - ExpressionFactory.ref("balance"))))); - } - - @Test - public void withoutAggregationShouldPass() { - String sql = "SELECT age, gender FROM accounts GROUP BY age, gender"; - SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); - parser.parse(mYSqlSelectQueryBlock(sql)); - List sqlSelectItems = parser.selectItemList(); - List columnNodes = parser.getColumnNodes(); - - assertThat(sqlSelectItems, containsInAnyOrder( - group("age", "age"), - group("gender", "gender"))); - assertThat(columnNodes, containsInAnyOrder( - columnNode("age", null, ExpressionFactory.ref("age")), - columnNode("gender", null, ExpressionFactory.ref("gender")))); - } - - @Test - public void groupKeyInSelectWithFunctionShouldPass() { - String sql = "SELECT log(age), max(balance) FROM accounts GROUP BY age"; - SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); - parser.parse(mYSqlSelectQueryBlock(sql)); - List sqlSelectItems = parser.selectItemList(); - List columnNodes = parser.getColumnNodes(); - - assertThat(sqlSelectItems, containsInAnyOrder( - group("age", "age"), - agg("max", "balance", "max_0"))); - assertThat(columnNodes, containsInAnyOrder( - columnNode("log(age)", null, log(ExpressionFactory.ref("age"))), - columnNode("max(balance)", null, ExpressionFactory.ref("max_0")))); - } - - @Test - public void theDotInFieldNameShouldBeReplaceWithSharp() { - String sql = "SELECT name.lastname, max(balance) FROM accounts GROUP BY name.lastname"; - SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); - parser.parse(mYSqlSelectQueryBlock(sql)); - List sqlSelectItems = parser.selectItemList(); - List columnNodes = parser.getColumnNodes(); - - assertThat(sqlSelectItems, containsInAnyOrder( - group("name.lastname", "name#lastname"), - agg("max", "balance", "max_0"))); - assertThat(columnNodes, containsInAnyOrder( - columnNode("name.lastname", null, ExpressionFactory.ref("name#lastname")), - columnNode("max(balance)", null, ExpressionFactory.ref("max_0")))); - } - - @Test - public void noGroupKeyInSelectShouldPass() { - String sql = "SELECT AVG(age) FROM t GROUP BY age"; - SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); - parser.parse(mYSqlSelectQueryBlock(sql)); - List sqlSelectItems = parser.selectItemList(); - List columnNodes = parser.getColumnNodes(); - - assertThat(sqlSelectItems, containsInAnyOrder( - agg("avg", "age", "avg_0"))); - assertThat(columnNodes, containsInAnyOrder( - columnNode("avg(age)", null, ExpressionFactory.ref("avg_0")))); - } - - @Test - public void aggWithDistinctShouldPass() { - String sql = "SELECT count(distinct gender) FROM t GROUP BY age"; - SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); - parser.parse(mYSqlSelectQueryBlock(sql)); - List sqlSelectItems = parser.selectItemList(); - List columnNodes = parser.getColumnNodes(); - - assertThat(sqlSelectItems, containsInAnyOrder( - agg("count", "gender", "count_0"))); - assertThat(columnNodes, containsInAnyOrder( - columnNode("count(distinct gender)", null, ExpressionFactory.ref("count_0")))); - } - - /** - * TermQueryExplainIT.testNestedSingleGroupBy - */ - @Test - public void aggregationWithNestedShouldThrowException() { - exceptionRule.expect(RuntimeException.class); - exceptionRule.expectMessage("unsupported operator: nested"); - - String sql = "SELECT nested(projects.name, 'projects'),id " - + "FROM t " - + "GROUP BY nested(projects.name.keyword, 'projects')"; - SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); - parser.parse(mYSqlSelectQueryBlock(sql)); - } - - private MySqlSelectQueryBlock mYSqlSelectQueryBlock(String sql) { - String dbType = JdbcConstants.MYSQL; - SQLQueryExpr sqlQueryExpr = (SQLQueryExpr) SQLUtils.toSQLExpr(sql, dbType); - return ((MySqlSelectQueryBlock) sqlQueryExpr.getSubQuery().getQuery()); - } - - private TypeSafeMatcher columnNode(String name, String alias, Expression expr) { - return new TypeSafeMatcher() { - @Override - public void describeTo(Description description) { - description.appendText(String.format("(name=%s,alias=%s,expression=%s)", name, alias, expr)); - } - - @Override - protected boolean matchesSafely(ColumnNode item) { - if (name == null) { - return false; - } - if (alias == null && item.getAlias() != null) { - return false; - } - - return name.equalsIgnoreCase(item.getName()) && - ((alias == null && item.getAlias() == null) || alias.equals(item.getAlias())) && - expr.toString().equalsIgnoreCase(item.getExpr().toString()); - } - }; - } - - private TypeSafeMatcher agg(String methodName, String name, String alias) { - return new TypeSafeMatcher() { - @Override - public void describeTo(Description description) { - description.appendText(String.format("(methodName=%s, name=%s, alias=%s)", methodName, name, alias)); - } - - @Override - protected boolean matchesSafely(SQLSelectItem item) { - if (item.getExpr() instanceof SQLAggregateExpr) { - return ((SQLAggregateExpr) item.getExpr()).getMethodName().equalsIgnoreCase(methodName) && - ((SQLAggregateExpr) item.getExpr()).getArguments() - .get(0) - .toString() - .equalsIgnoreCase(name) && - ((item.getAlias() == null && alias == null) || item.getAlias().equalsIgnoreCase(alias)); - } else { - return false; - } - } - }; - } - - private TypeSafeMatcher group(String name, String alias) { - return new TypeSafeMatcher() { - @Override - public void describeTo(Description description) { - description.appendText(String.format("(name=%s, alias=%s)", name, alias)); - } - - @Override - protected boolean matchesSafely(SQLSelectItem item) { - boolean b = item.getExpr().toString().equalsIgnoreCase(name) && - ((item.getAlias() == null && alias == null) || item.getAlias().equalsIgnoreCase(alias)); - return b; - } - }; - } - - private Expression add(Expression... expressions) { - return of(ADD, Arrays.asList(expressions)); - } - - private Expression log(Expression... expressions) { - return of(LOG, Arrays.asList(expressions)); - } + @Rule public ExpectedException exceptionRule = ExpectedException.none(); + + @Test + public void parseAggWithoutExpressionShouldPass() { + String sql = + "SELECT dayOfWeek, max(FlightDelayMin), MIN(FlightDelayMin) as min " + + "FROM opensearch_dashboards_sample_data_flights " + + "GROUP BY dayOfWeek"; + SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); + parser.parse(mYSqlSelectQueryBlock(sql)); + List sqlSelectItems = parser.selectItemList(); + List columnNodes = parser.getColumnNodes(); + + assertThat( + sqlSelectItems, + containsInAnyOrder( + group("dayOfWeek", "dayOfWeek"), + agg("MAX", "FlightDelayMin", "MAX_0"), + agg("MIN", "FlightDelayMin", "min"))); + + assertThat( + columnNodes, + containsInAnyOrder( + columnNode("dayOfWeek", null, ExpressionFactory.ref("dayOfWeek")), + columnNode("MAX(FlightDelayMin)", null, ExpressionFactory.ref("MAX_0")), + columnNode("min", "min", ExpressionFactory.ref("min")))); + } + + @Test + public void parseAggWithFunctioniWithoutExpressionShouldPass() { + String sql = + "SELECT dayOfWeek, max(FlightDelayMin), MIN(FlightDelayMin) as min " + + "FROM opensearch_dashboards_sample_data_flights " + + "GROUP BY dayOfWeek"; + SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); + parser.parse(mYSqlSelectQueryBlock(sql)); + List sqlSelectItems = parser.selectItemList(); + List columnNodes = parser.getColumnNodes(); + + assertThat( + sqlSelectItems, + containsInAnyOrder( + group("dayOfWeek", "dayOfWeek"), + agg("MAX", "FlightDelayMin", "MAX_0"), + agg("MIN", "FlightDelayMin", "min"))); + + assertThat( + columnNodes, + containsInAnyOrder( + columnNode("dayOfWeek", null, ExpressionFactory.ref("dayOfWeek")), + columnNode("MAX(FlightDelayMin)", null, ExpressionFactory.ref("MAX_0")), + columnNode("min", "min", ExpressionFactory.ref("min")))); + } + + @Test + public void parseAggWithExpressionShouldPass() { + String sql = + "SELECT dayOfWeek, max(FlightDelayMin) + MIN(FlightDelayMin) as sub " + + "FROM opensearch_dashboards_sample_data_flights " + + "GROUP BY dayOfWeek"; + SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); + parser.parse(mYSqlSelectQueryBlock(sql)); + List sqlSelectItems = parser.selectItemList(); + List columnNodes = parser.getColumnNodes(); + + assertThat( + sqlSelectItems, + containsInAnyOrder( + group("dayOfWeek", "dayOfWeek"), + agg("MAX", "FlightDelayMin", "MAX_0"), + agg("MIN", "FlightDelayMin", "MIN_1"))); + + assertThat( + columnNodes, + containsInAnyOrder( + columnNode("dayOfWeek", null, ExpressionFactory.ref("dayOfWeek")), + columnNode( + "sub", + "sub", + add(ExpressionFactory.ref("MAX_0"), ExpressionFactory.ref("MIN_1"))))); + } + + @Test + public void parseWithRawSelectFuncnameShouldPass() { + String sql = + "SELECT LOG(FlightDelayMin) " + + "FROM opensearch_dashboards_sample_data_flights " + + "GROUP BY log(FlightDelayMin)"; + SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); + parser.parse(mYSqlSelectQueryBlock(sql)); + List sqlSelectItems = parser.selectItemList(); + List columnNodes = parser.getColumnNodes(); + + assertThat( + sqlSelectItems, containsInAnyOrder(group("log(FlightDelayMin)", "log(FlightDelayMin)"))); + + assertThat( + columnNodes, + containsInAnyOrder( + columnNode("LOG(FlightDelayMin)", null, ExpressionFactory.ref("log(FlightDelayMin)")))); + } + + @Test + public void functionOverFiledShouldPass() { + String sql = + "SELECT dayOfWeek, max(FlightDelayMin) + MIN(FlightDelayMin) as sub " + + "FROM opensearch_dashboards_sample_data_flights " + + "GROUP BY dayOfWeek"; + SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); + parser.parse(mYSqlSelectQueryBlock(sql)); + List sqlSelectItems = parser.selectItemList(); + List columnNodes = parser.getColumnNodes(); + + assertThat( + sqlSelectItems, + containsInAnyOrder( + group("dayOfWeek", "dayOfWeek"), + agg("MAX", "FlightDelayMin", "MAX_0"), + agg("MIN", "FlightDelayMin", "MIN_1"))); + + assertThat( + columnNodes, + containsInAnyOrder( + columnNode("dayOfWeek", null, ExpressionFactory.ref("dayOfWeek")), + columnNode( + "sub", + "sub", + add(ExpressionFactory.ref("MAX_0"), ExpressionFactory.ref("MIN_1"))))); + } + + @Test + public void parseCompoundAggWithExpressionShouldPass() { + String sql = + "SELECT ASCII(dayOfWeek), log(max(FlightDelayMin) + MIN(FlightDelayMin)) as log " + + "FROM opensearch_dashboards_sample_data_flights " + + "GROUP BY ASCII(dayOfWeek)"; + SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); + parser.parse(mYSqlSelectQueryBlock(sql)); + List sqlSelectItems = parser.selectItemList(); + List columnNodes = parser.getColumnNodes(); + + assertThat( + sqlSelectItems, + containsInAnyOrder( + group("ASCII(dayOfWeek)", "ASCII(dayOfWeek)"), + agg("MAX", "FlightDelayMin", "MAX_0"), + agg("MIN", "FlightDelayMin", "MIN_1"))); + + assertThat( + columnNodes, + containsInAnyOrder( + columnNode("ASCII(dayOfWeek)", null, ExpressionFactory.ref("ASCII(dayOfWeek)")), + columnNode( + "log", + "log", + log(add(ExpressionFactory.ref("MAX_0"), ExpressionFactory.ref("MIN_1")))))); + } + + @Test + public void parseSingleFunctionOverAggShouldPass() { + String sql = "SELECT log(max(age)) FROM accounts"; + SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); + parser.parse(mYSqlSelectQueryBlock(sql)); + List sqlSelectItems = parser.selectItemList(); + List columnNodes = parser.getColumnNodes(); + + assertThat(sqlSelectItems, containsInAnyOrder(agg("max", "age", "max_0"))); + assertThat( + columnNodes, + containsInAnyOrder(columnNode("log(max(age))", null, log(ExpressionFactory.ref("max_0"))))); + } + + @Test + public void parseFunctionGroupColumnOverShouldPass() { + String sql = "SELECT CAST(balance AS FLOAT) FROM accounts GROUP BY balance"; + SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); + parser.parse(mYSqlSelectQueryBlock(sql)); + List sqlSelectItems = parser.selectItemList(); + List columnNodes = parser.getColumnNodes(); + + assertThat(sqlSelectItems, containsInAnyOrder(group("balance", "balance"))); + assertThat( + columnNodes, + containsInAnyOrder( + columnNode("CAST(balance AS FLOAT)", null, cast(ExpressionFactory.ref("balance"))))); + } + + @Test + public void withoutAggregationShouldPass() { + String sql = "SELECT age, gender FROM accounts GROUP BY age, gender"; + SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); + parser.parse(mYSqlSelectQueryBlock(sql)); + List sqlSelectItems = parser.selectItemList(); + List columnNodes = parser.getColumnNodes(); + + assertThat(sqlSelectItems, containsInAnyOrder(group("age", "age"), group("gender", "gender"))); + assertThat( + columnNodes, + containsInAnyOrder( + columnNode("age", null, ExpressionFactory.ref("age")), + columnNode("gender", null, ExpressionFactory.ref("gender")))); + } + + @Test + public void groupKeyInSelectWithFunctionShouldPass() { + String sql = "SELECT log(age), max(balance) FROM accounts GROUP BY age"; + SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); + parser.parse(mYSqlSelectQueryBlock(sql)); + List sqlSelectItems = parser.selectItemList(); + List columnNodes = parser.getColumnNodes(); + + assertThat( + sqlSelectItems, containsInAnyOrder(group("age", "age"), agg("max", "balance", "max_0"))); + assertThat( + columnNodes, + containsInAnyOrder( + columnNode("log(age)", null, log(ExpressionFactory.ref("age"))), + columnNode("max(balance)", null, ExpressionFactory.ref("max_0")))); + } + + @Test + public void theDotInFieldNameShouldBeReplaceWithSharp() { + String sql = "SELECT name.lastname, max(balance) FROM accounts GROUP BY name.lastname"; + SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); + parser.parse(mYSqlSelectQueryBlock(sql)); + List sqlSelectItems = parser.selectItemList(); + List columnNodes = parser.getColumnNodes(); + + assertThat( + sqlSelectItems, + containsInAnyOrder( + group("name.lastname", "name#lastname"), agg("max", "balance", "max_0"))); + assertThat( + columnNodes, + containsInAnyOrder( + columnNode("name.lastname", null, ExpressionFactory.ref("name#lastname")), + columnNode("max(balance)", null, ExpressionFactory.ref("max_0")))); + } + + @Test + public void noGroupKeyInSelectShouldPass() { + String sql = "SELECT AVG(age) FROM t GROUP BY age"; + SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); + parser.parse(mYSqlSelectQueryBlock(sql)); + List sqlSelectItems = parser.selectItemList(); + List columnNodes = parser.getColumnNodes(); + + assertThat(sqlSelectItems, containsInAnyOrder(agg("avg", "age", "avg_0"))); + assertThat( + columnNodes, + containsInAnyOrder(columnNode("avg(age)", null, ExpressionFactory.ref("avg_0")))); + } + + @Test + public void aggWithDistinctShouldPass() { + String sql = "SELECT count(distinct gender) FROM t GROUP BY age"; + SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); + parser.parse(mYSqlSelectQueryBlock(sql)); + List sqlSelectItems = parser.selectItemList(); + List columnNodes = parser.getColumnNodes(); + + assertThat(sqlSelectItems, containsInAnyOrder(agg("count", "gender", "count_0"))); + assertThat( + columnNodes, + containsInAnyOrder( + columnNode("count(distinct gender)", null, ExpressionFactory.ref("count_0")))); + } + + /** TermQueryExplainIT.testNestedSingleGroupBy */ + @Test + public void aggregationWithNestedShouldThrowException() { + exceptionRule.expect(RuntimeException.class); + exceptionRule.expectMessage("unsupported operator: nested"); + + String sql = + "SELECT nested(projects.name, 'projects'),id " + + "FROM t " + + "GROUP BY nested(projects.name.keyword, 'projects')"; + SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); + parser.parse(mYSqlSelectQueryBlock(sql)); + } + + private MySqlSelectQueryBlock mYSqlSelectQueryBlock(String sql) { + String dbType = JdbcConstants.MYSQL; + SQLQueryExpr sqlQueryExpr = (SQLQueryExpr) SQLUtils.toSQLExpr(sql, dbType); + return ((MySqlSelectQueryBlock) sqlQueryExpr.getSubQuery().getQuery()); + } + + private TypeSafeMatcher columnNode(String name, String alias, Expression expr) { + return new TypeSafeMatcher() { + @Override + public void describeTo(Description description) { + description.appendText( + String.format("(name=%s,alias=%s,expression=%s)", name, alias, expr)); + } + + @Override + protected boolean matchesSafely(ColumnNode item) { + if (name == null) { + return false; + } + if (alias == null && item.getAlias() != null) { + return false; + } + + return name.equalsIgnoreCase(item.getName()) + && ((alias == null && item.getAlias() == null) || alias.equals(item.getAlias())) + && expr.toString().equalsIgnoreCase(item.getExpr().toString()); + } + }; + } + + private TypeSafeMatcher agg(String methodName, String name, String alias) { + return new TypeSafeMatcher() { + @Override + public void describeTo(Description description) { + description.appendText( + String.format("(methodName=%s, name=%s, alias=%s)", methodName, name, alias)); + } + + @Override + protected boolean matchesSafely(SQLSelectItem item) { + if (item.getExpr() instanceof SQLAggregateExpr) { + return ((SQLAggregateExpr) item.getExpr()).getMethodName().equalsIgnoreCase(methodName) + && ((SQLAggregateExpr) item.getExpr()) + .getArguments() + .get(0) + .toString() + .equalsIgnoreCase(name) + && ((item.getAlias() == null && alias == null) + || item.getAlias().equalsIgnoreCase(alias)); + } else { + return false; + } + } + }; + } + + private TypeSafeMatcher group(String name, String alias) { + return new TypeSafeMatcher() { + @Override + public void describeTo(Description description) { + description.appendText(String.format("(name=%s, alias=%s)", name, alias)); + } + + @Override + protected boolean matchesSafely(SQLSelectItem item) { + boolean b = + item.getExpr().toString().equalsIgnoreCase(name) + && ((item.getAlias() == null && alias == null) + || item.getAlias().equalsIgnoreCase(alias)); + return b; + } + }; + } + + private Expression add(Expression... expressions) { + return of(ADD, Arrays.asList(expressions)); + } + + private Expression log(Expression... expressions) { + return of(LOG, Arrays.asList(expressions)); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/converter/SQLExprToExpressionConverterTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/converter/SQLExprToExpressionConverterTest.java index e297c2c1d4..ac949eb0d7 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/converter/SQLExprToExpressionConverterTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/converter/SQLExprToExpressionConverterTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.planner.converter; import static org.junit.Assert.assertEquals; @@ -34,118 +33,125 @@ @RunWith(MockitoJUnitRunner.class) public class SQLExprToExpressionConverterTest { - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); - - private SQLExprToExpressionConverter converter; - private SQLAggregationParser.Context context; - private final SQLAggregateExpr maxA = new SQLAggregateExpr("MAX"); - private final SQLAggregateExpr maxB = new SQLAggregateExpr("MAX"); - private final SQLAggregateExpr minA = new SQLAggregateExpr("MIN"); - private final SQLIdentifierExpr groupG = new SQLIdentifierExpr("A"); - private final SQLIdentifierExpr aggA = new SQLIdentifierExpr("A"); - private final SQLIdentifierExpr aggB = new SQLIdentifierExpr("B"); - private final SQLIntegerExpr one = new SQLIntegerExpr(1); - - @Before - public void setup() { - maxA.getArguments().add(aggA); - maxB.getArguments().add(aggB); - minA.getArguments().add(aggA); - context = new SQLAggregationParser.Context(ImmutableMap.of()); - converter = new SQLExprToExpressionConverter(context); - } - - @Test - public void identifierShouldReturnVarExpression() { - context.addGroupKeyExpr(groupG); - Expression expression = converter.convert(groupG); - - assertEquals(ref("A").toString(), expression.toString()); - } - - @Test - public void binaryOperatorAddShouldReturnAddExpression() { - context.addAggregationExpr(maxA); - context.addAggregationExpr(minA); - - Expression expression = converter.convert(new SQLBinaryOpExpr(maxA, SQLBinaryOperator.Add, minA)); - assertEquals(add(ref("MAX_0"), ref("MIN_1")).toString(), expression.toString()); - } - - @Test - public void compoundBinaryOperatorShouldReturnCorrectExpression() { - context.addAggregationExpr(maxA); - context.addAggregationExpr(minA); - - Expression expression = converter.convert(new SQLBinaryOpExpr(maxA, SQLBinaryOperator.Add, - new SQLBinaryOpExpr(maxA, SQLBinaryOperator.Add, - minA))); - assertEquals(add(ref("MAX_0"), add(ref("MAX_0"), ref("MIN_1"))).toString(), expression.toString()); - } - - @Test - public void functionOverCompoundBinaryOperatorShouldReturnCorrectExpression() { - context.addAggregationExpr(maxA); - context.addAggregationExpr(minA); - - SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr("LOG"); - methodInvokeExpr.addParameter(new SQLBinaryOpExpr(maxA, SQLBinaryOperator.Add, - new SQLBinaryOpExpr(maxA, SQLBinaryOperator.Add, - minA))); - - Expression expression = converter.convert(methodInvokeExpr); - assertEquals(log(add(ref("MAX_0"), add(ref("MAX_0"), ref("MIN_1")))).toString(), expression.toString()); - } - - @Test - public void functionOverGroupColumn() { - context.addAggregationExpr(maxA); - context.addAggregationExpr(minA); - - SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr("LOG"); - methodInvokeExpr.addParameter(new SQLBinaryOpExpr(maxA, SQLBinaryOperator.Add, - new SQLBinaryOpExpr(maxA, SQLBinaryOperator.Add, - minA))); - - Expression expression = converter.convert(methodInvokeExpr); - assertEquals(log(add(ref("MAX_0"), add(ref("MAX_0"), ref("MIN_1")))).toString(), expression.toString()); - } - - @Test - public void binaryOperatorWithLiteralAddShouldReturnAddExpression() { - context.addAggregationExpr(maxA); - - Expression expression = converter.convert(new SQLBinaryOpExpr(maxA, SQLBinaryOperator.Add, one)); - assertEquals(add(ref("MAX_0"), literal(integerValue(1))).toString(), expression.toString()); - } - - @Test - public void unknownIdentifierShouldThrowException() { - context.addAggregationExpr(maxA); - context.addAggregationExpr(minA); - - exceptionRule.expect(RuntimeException.class); - exceptionRule.expectMessage("unsupported expr"); - converter.convert(new SQLBinaryOpExpr(maxA, SQLBinaryOperator.Add, maxB)); - } - - @Test - public void unsupportOperationShouldThrowException() { - exceptionRule.expect(UnsupportedOperationException.class); - exceptionRule.expectMessage("unsupported operator: cot"); - - context.addAggregationExpr(maxA); - SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr("cot"); - methodInvokeExpr.addParameter(maxA); - converter.convert(methodInvokeExpr); - } - - private Expression add(Expression... expressions) { - return of(ADD, Arrays.asList(expressions)); - } - - private Expression log(Expression... expressions) { - return of(LOG, Arrays.asList(expressions)); - } + @Rule public ExpectedException exceptionRule = ExpectedException.none(); + + private SQLExprToExpressionConverter converter; + private SQLAggregationParser.Context context; + private final SQLAggregateExpr maxA = new SQLAggregateExpr("MAX"); + private final SQLAggregateExpr maxB = new SQLAggregateExpr("MAX"); + private final SQLAggregateExpr minA = new SQLAggregateExpr("MIN"); + private final SQLIdentifierExpr groupG = new SQLIdentifierExpr("A"); + private final SQLIdentifierExpr aggA = new SQLIdentifierExpr("A"); + private final SQLIdentifierExpr aggB = new SQLIdentifierExpr("B"); + private final SQLIntegerExpr one = new SQLIntegerExpr(1); + + @Before + public void setup() { + maxA.getArguments().add(aggA); + maxB.getArguments().add(aggB); + minA.getArguments().add(aggA); + context = new SQLAggregationParser.Context(ImmutableMap.of()); + converter = new SQLExprToExpressionConverter(context); + } + + @Test + public void identifierShouldReturnVarExpression() { + context.addGroupKeyExpr(groupG); + Expression expression = converter.convert(groupG); + + assertEquals(ref("A").toString(), expression.toString()); + } + + @Test + public void binaryOperatorAddShouldReturnAddExpression() { + context.addAggregationExpr(maxA); + context.addAggregationExpr(minA); + + Expression expression = + converter.convert(new SQLBinaryOpExpr(maxA, SQLBinaryOperator.Add, minA)); + assertEquals(add(ref("MAX_0"), ref("MIN_1")).toString(), expression.toString()); + } + + @Test + public void compoundBinaryOperatorShouldReturnCorrectExpression() { + context.addAggregationExpr(maxA); + context.addAggregationExpr(minA); + + Expression expression = + converter.convert( + new SQLBinaryOpExpr( + maxA, + SQLBinaryOperator.Add, + new SQLBinaryOpExpr(maxA, SQLBinaryOperator.Add, minA))); + assertEquals( + add(ref("MAX_0"), add(ref("MAX_0"), ref("MIN_1"))).toString(), expression.toString()); + } + + @Test + public void functionOverCompoundBinaryOperatorShouldReturnCorrectExpression() { + context.addAggregationExpr(maxA); + context.addAggregationExpr(minA); + + SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr("LOG"); + methodInvokeExpr.addParameter( + new SQLBinaryOpExpr( + maxA, SQLBinaryOperator.Add, new SQLBinaryOpExpr(maxA, SQLBinaryOperator.Add, minA))); + + Expression expression = converter.convert(methodInvokeExpr); + assertEquals( + log(add(ref("MAX_0"), add(ref("MAX_0"), ref("MIN_1")))).toString(), expression.toString()); + } + + @Test + public void functionOverGroupColumn() { + context.addAggregationExpr(maxA); + context.addAggregationExpr(minA); + + SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr("LOG"); + methodInvokeExpr.addParameter( + new SQLBinaryOpExpr( + maxA, SQLBinaryOperator.Add, new SQLBinaryOpExpr(maxA, SQLBinaryOperator.Add, minA))); + + Expression expression = converter.convert(methodInvokeExpr); + assertEquals( + log(add(ref("MAX_0"), add(ref("MAX_0"), ref("MIN_1")))).toString(), expression.toString()); + } + + @Test + public void binaryOperatorWithLiteralAddShouldReturnAddExpression() { + context.addAggregationExpr(maxA); + + Expression expression = + converter.convert(new SQLBinaryOpExpr(maxA, SQLBinaryOperator.Add, one)); + assertEquals(add(ref("MAX_0"), literal(integerValue(1))).toString(), expression.toString()); + } + + @Test + public void unknownIdentifierShouldThrowException() { + context.addAggregationExpr(maxA); + context.addAggregationExpr(minA); + + exceptionRule.expect(RuntimeException.class); + exceptionRule.expectMessage("unsupported expr"); + converter.convert(new SQLBinaryOpExpr(maxA, SQLBinaryOperator.Add, maxB)); + } + + @Test + public void unsupportOperationShouldThrowException() { + exceptionRule.expect(UnsupportedOperationException.class); + exceptionRule.expectMessage("unsupported operator: cot"); + + context.addAggregationExpr(maxA); + SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr("cot"); + methodInvokeExpr.addParameter(maxA); + converter.convert(methodInvokeExpr); + } + + private Expression add(Expression... expressions) { + return of(ADD, Arrays.asList(expressions)); + } + + private Expression log(Expression... expressions) { + return of(LOG, Arrays.asList(expressions)); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/converter/SQLToOperatorConverterTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/converter/SQLToOperatorConverterTest.java index f64a550a13..578fb9bcff 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/converter/SQLToOperatorConverterTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/converter/SQLToOperatorConverterTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.planner.converter; import static org.junit.Assert.assertTrue; @@ -25,51 +24,53 @@ @RunWith(MockitoJUnitRunner.class) public class SQLToOperatorConverterTest { - @Mock - private Client client; + @Mock private Client client; - private SQLToOperatorConverter converter; + private SQLToOperatorConverter converter; - @Before - public void setup() { - converter = new SQLToOperatorConverter(client, new ColumnTypeProvider()); - } + @Before + public void setup() { + converter = new SQLToOperatorConverter(client, new ColumnTypeProvider()); + } - @Test - public void convertAggShouldPass() { - String sql = "SELECT dayOfWeek, max(FlightDelayMin), MIN(FlightDelayMin) as min " + - "FROM opensearch_dashboards_sample_data_flights " + - "GROUP BY dayOfWeek"; - toExpr(sql).accept(converter); - PhysicalOperator physicalOperator = converter.getPhysicalOperator(); + @Test + public void convertAggShouldPass() { + String sql = + "SELECT dayOfWeek, max(FlightDelayMin), MIN(FlightDelayMin) as min " + + "FROM opensearch_dashboards_sample_data_flights " + + "GROUP BY dayOfWeek"; + toExpr(sql).accept(converter); + PhysicalOperator physicalOperator = converter.getPhysicalOperator(); - assertTrue(physicalOperator instanceof PhysicalProject); - } + assertTrue(physicalOperator instanceof PhysicalProject); + } - @Test - public void convertMaxMinusMinShouldPass() { - String sql = "SELECT dayOfWeek, max(FlightDelayMin) - MIN(FlightDelayMin) as diff " + - "FROM opensearch_dashboards_sample_data_flights " + - "GROUP BY dayOfWeek"; - toExpr(sql).accept(converter); - PhysicalOperator physicalOperator = converter.getPhysicalOperator(); + @Test + public void convertMaxMinusMinShouldPass() { + String sql = + "SELECT dayOfWeek, max(FlightDelayMin) - MIN(FlightDelayMin) as diff " + + "FROM opensearch_dashboards_sample_data_flights " + + "GROUP BY dayOfWeek"; + toExpr(sql).accept(converter); + PhysicalOperator physicalOperator = converter.getPhysicalOperator(); - assertTrue(physicalOperator instanceof PhysicalProject); - } + assertTrue(physicalOperator instanceof PhysicalProject); + } - @Test - public void convertDistinctPass() { - String sql = "SELECT dayOfWeek, max(FlightDelayMin) - MIN(FlightDelayMin) as diff " + - "FROM opensearch_dashboards_sample_data_flights " + - "GROUP BY dayOfWeek"; - toExpr(sql).accept(converter); - PhysicalOperator physicalOperator = converter.getPhysicalOperator(); + @Test + public void convertDistinctPass() { + String sql = + "SELECT dayOfWeek, max(FlightDelayMin) - MIN(FlightDelayMin) as diff " + + "FROM opensearch_dashboards_sample_data_flights " + + "GROUP BY dayOfWeek"; + toExpr(sql).accept(converter); + PhysicalOperator physicalOperator = converter.getPhysicalOperator(); - assertTrue(physicalOperator instanceof PhysicalProject); - } + assertTrue(physicalOperator instanceof PhysicalProject); + } - private SQLQueryExpr toExpr(String sql) { - String dbType = JdbcConstants.MYSQL; - return (SQLQueryExpr) SQLUtils.toSQLExpr(sql, dbType); - } + private SQLQueryExpr toExpr(String sql) { + String dbType = JdbcConstants.MYSQL; + return (SQLQueryExpr) SQLUtils.toSQLExpr(sql, dbType); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/identifier/UnquoteIdentifierRuleTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/identifier/UnquoteIdentifierRuleTest.java index 41f7b111b0..30bbac861a 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/identifier/UnquoteIdentifierRuleTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/identifier/UnquoteIdentifierRuleTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.rewriter.identifier; import com.alibaba.druid.sql.SQLUtils; @@ -13,75 +12,70 @@ import org.opensearch.sql.legacy.rewriter.identifier.UnquoteIdentifierRule; import org.opensearch.sql.legacy.util.SqlParserUtils; - -/** - * Test cases for backticks quoted identifiers - */ +/** Test cases for backticks quoted identifiers */ public class UnquoteIdentifierRuleTest { - @Test - public void queryWithQuotedIndex() { - query("SELECT lastname FROM `bank` WHERE balance > 1000 ORDER BY age" - ).shouldBeAfterRewrite("SELECT lastname FROM bank WHERE balance > 1000 ORDER BY age"); - } - - @Test - public void queryWithQuotedField() { - query("SELECT `lastname` FROM bank ORDER BY age" - ).shouldBeAfterRewrite("SELECT lastname FROM bank ORDER BY age"); - - query("SELECT b.`lastname` FROM bank AS b ORDER BY age" - ).shouldBeAfterRewrite("SELECT b.lastname FROM bank AS b ORDER BY age"); - } - - @Test - public void queryWithQuotedAlias() { - query("SELECT `b`.lastname FROM bank AS `b` ORDER BY age" - ).shouldBeAfterRewrite("SELECT b.lastname FROM bank AS b ORDER BY age"); - - query("SELECT `b`.`lastname` FROM bank AS `b` ORDER BY age" - ).shouldBeAfterRewrite("SELECT b.lastname FROM bank AS b ORDER BY age"); - - query("SELECT `b`.`lastname` AS `name` FROM bank AS `b` ORDER BY age" - ).shouldBeAfterRewrite("SELECT b.lastname AS name FROM bank AS b ORDER BY age"); + @Test + public void queryWithQuotedIndex() { + query("SELECT lastname FROM `bank` WHERE balance > 1000 ORDER BY age") + .shouldBeAfterRewrite("SELECT lastname FROM bank WHERE balance > 1000 ORDER BY age"); + } + + @Test + public void queryWithQuotedField() { + query("SELECT `lastname` FROM bank ORDER BY age") + .shouldBeAfterRewrite("SELECT lastname FROM bank ORDER BY age"); + + query("SELECT b.`lastname` FROM bank AS b ORDER BY age") + .shouldBeAfterRewrite("SELECT b.lastname FROM bank AS b ORDER BY age"); + } + + @Test + public void queryWithQuotedAlias() { + query("SELECT `b`.lastname FROM bank AS `b` ORDER BY age") + .shouldBeAfterRewrite("SELECT b.lastname FROM bank AS b ORDER BY age"); + + query("SELECT `b`.`lastname` FROM bank AS `b` ORDER BY age") + .shouldBeAfterRewrite("SELECT b.lastname FROM bank AS b ORDER BY age"); + + query("SELECT `b`.`lastname` AS `name` FROM bank AS `b` ORDER BY age") + .shouldBeAfterRewrite("SELECT b.lastname AS name FROM bank AS b ORDER BY age"); + } + + @Test + public void selectSpecificFieldsUsingQuotedTableNamePrefix() { + query("SELECT `bank`.`lastname` FROM `bank`") + .shouldBeAfterRewrite("SELECT bank.lastname FROM bank"); + } + + @Test + public void queryWithQuotedAggrAndFunc() { + query( + "" + + "SELECT `b`.`lastname` AS `name`, AVG(`b`.`balance`) FROM `bank` AS `b` " + + "WHERE ABS(`b`.`age`) > 20 GROUP BY `b`.`lastname` ORDER BY `b`.`lastname`") + .shouldBeAfterRewrite( + "SELECT b.lastname AS name, AVG(b.balance) FROM bank AS b " + + "WHERE ABS(b.age) > 20 GROUP BY b.lastname ORDER BY b.lastname"); + } + + private QueryAssertion query(String sql) { + return new QueryAssertion(sql); + } + + private static class QueryAssertion { + + private UnquoteIdentifierRule rule = new UnquoteIdentifierRule(); + private SQLQueryExpr expr; + + QueryAssertion(String sql) { + this.expr = SqlParserUtils.parse(sql); } - @Test - public void selectSpecificFieldsUsingQuotedTableNamePrefix() { - query("SELECT `bank`.`lastname` FROM `bank`" - ).shouldBeAfterRewrite("SELECT bank.lastname FROM bank"); - } - - @Test - public void queryWithQuotedAggrAndFunc() { - query("" + - "SELECT `b`.`lastname` AS `name`, AVG(`b`.`balance`) FROM `bank` AS `b` " + - "WHERE ABS(`b`.`age`) > 20 GROUP BY `b`.`lastname` ORDER BY `b`.`lastname`" - ).shouldBeAfterRewrite( - "SELECT b.lastname AS name, AVG(b.balance) FROM bank AS b " + - "WHERE ABS(b.age) > 20 GROUP BY b.lastname ORDER BY b.lastname" - ); - } - - private QueryAssertion query(String sql) { - return new QueryAssertion(sql); - } - - private static class QueryAssertion { - - private UnquoteIdentifierRule rule = new UnquoteIdentifierRule(); - private SQLQueryExpr expr; - - QueryAssertion(String sql) { - this.expr = SqlParserUtils.parse(sql); - } - - void shouldBeAfterRewrite(String expected) { - rule.rewrite(expr); - Assert.assertEquals( - SQLUtils.toMySqlString(SqlParserUtils.parse(expected)), - SQLUtils.toMySqlString(expr) - ); - } + void shouldBeAfterRewrite(String expected) { + rule.rewrite(expr); + Assert.assertEquals( + SQLUtils.toMySqlString(SqlParserUtils.parse(expected)), SQLUtils.toMySqlString(expr)); } + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/parent/SQLExprParentSetterRuleTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/parent/SQLExprParentSetterRuleTest.java index 15d97d362d..0fdf16e40e 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/parent/SQLExprParentSetterRuleTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/parent/SQLExprParentSetterRuleTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.rewriter.parent; import static org.junit.Assert.assertTrue; @@ -18,13 +17,12 @@ @RunWith(MockitoJUnitRunner.class) public class SQLExprParentSetterRuleTest { - @Mock - private SQLQueryExpr queryExpr; + @Mock private SQLQueryExpr queryExpr; - private SQLExprParentSetterRule rule = new SQLExprParentSetterRule(); + private SQLExprParentSetterRule rule = new SQLExprParentSetterRule(); - @Test - public void match() { - assertTrue(rule.match(queryExpr)); - } + @Test + public void match() { + assertTrue(rule.match(queryExpr)); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/parent/SQLExprParentSetterTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/parent/SQLExprParentSetterTest.java index 49023f522a..ccd440228b 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/parent/SQLExprParentSetterTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/parent/SQLExprParentSetterTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.rewriter.parent; import static org.junit.Assert.assertNotNull; @@ -17,36 +16,31 @@ public class SQLExprParentSetterTest { - @Test - public void testSQLInSubQueryExprHasParent() { - String sql = - "SELECT * FROM TbA " + - "WHERE a IN (SELECT b FROM TbB)"; - SQLQueryExpr expr = SqlParserUtils.parse(sql); - expr.accept(new SQLExprParentExistsValidator()); - } - - @Test - public void testSQLInListExprHasParent() { - String sql = - "SELECT * FROM TbA " + - "WHERE a IN (10, 20)"; - SQLQueryExpr expr = SqlParserUtils.parse(sql); - expr.accept(new SQLExprParentExistsValidator()); + @Test + public void testSQLInSubQueryExprHasParent() { + String sql = "SELECT * FROM TbA WHERE a IN (SELECT b FROM TbB)"; + SQLQueryExpr expr = SqlParserUtils.parse(sql); + expr.accept(new SQLExprParentExistsValidator()); + } + + @Test + public void testSQLInListExprHasParent() { + String sql = "SELECT * FROM TbA WHERE a IN (10, 20)"; + SQLQueryExpr expr = SqlParserUtils.parse(sql); + expr.accept(new SQLExprParentExistsValidator()); + } + + static class SQLExprParentExistsValidator extends MySqlASTVisitorAdapter { + @Override + public boolean visit(SQLInSubQueryExpr expr) { + assertNotNull(expr.getExpr().getParent()); + return true; } - static class SQLExprParentExistsValidator extends MySqlASTVisitorAdapter { - @Override - public boolean visit(SQLInSubQueryExpr expr) { - assertNotNull(expr.getExpr().getParent()); - return true; - } - - @Override - public boolean visit(SQLInListExpr expr) { - assertNotNull(expr.getExpr().getParent()); - return true; - } + @Override + public boolean visit(SQLInListExpr expr) { + assertNotNull(expr.getExpr().getParent()); + return true; } - + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/ExistsSubQueryRewriterTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/ExistsSubQueryRewriterTest.java index ed57335980..dd15fd6683 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/ExistsSubQueryRewriterTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/ExistsSubQueryRewriterTest.java @@ -18,13 +18,12 @@ public class ExistsSubQueryRewriterTest extends SubQueryRewriterTestBase { @Test public void nonCorrelatedExists() { assertEquals( - sqlString( - expr("SELECT e.name FROM employee e, e.projects p WHERE p IS NOT MISSING")), + sqlString(expr("SELECT e.name FROM employee e, e.projects p WHERE p IS NOT MISSING")), sqlString( rewrite( expr( - "SELECT e.name FROM employee as e WHERE EXISTS (SELECT * FROM e.projects as p)" - )))); + "SELECT e.name FROM employee as e WHERE EXISTS (SELECT * FROM e.projects" + + " as p)")))); } @Test diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/SubQueryRewriteRuleTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/SubQueryRewriteRuleTest.java index a01988d965..7bd3dd847e 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/SubQueryRewriteRuleTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/SubQueryRewriteRuleTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.rewriter.subquery; import static org.junit.Assert.assertFalse; @@ -18,67 +17,56 @@ public class SubQueryRewriteRuleTest { - final SubQueryRewriteRule rewriteRule = new SubQueryRewriteRule(); + final SubQueryRewriteRule rewriteRule = new SubQueryRewriteRule(); - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); + @Rule public ExpectedException exceptionRule = ExpectedException.none(); - @Test - public void isInMatch() throws SQLFeatureNotSupportedException { - String sql = "SELECT * " + - "FROM A " + - "WHERE a IN (SELECT b FROM B)"; - assertTrue(rewriteRule.match(SqlParserUtils.parse(sql))); - } + @Test + public void isInMatch() throws SQLFeatureNotSupportedException { + String sql = "SELECT * FROM A WHERE a IN (SELECT b FROM B)"; + assertTrue(rewriteRule.match(SqlParserUtils.parse(sql))); + } - @Test - public void isNotInMatch() throws SQLFeatureNotSupportedException { - String sql = "SELECT * " + - "FROM A " + - "WHERE a NOT IN (SELECT b FROM B)"; - assertTrue(rewriteRule.match(SqlParserUtils.parse(sql))); - } + @Test + public void isNotInMatch() throws SQLFeatureNotSupportedException { + String sql = "SELECT * FROM A WHERE a NOT IN (SELECT b FROM B)"; + assertTrue(rewriteRule.match(SqlParserUtils.parse(sql))); + } - @Test - public void isExistsMatch() throws SQLFeatureNotSupportedException { - String sql = "SELECT * " + - "FROM A WHERE " + - "EXISTS (SELECT 1 FROM B WHERE A.a_v = B.b_v)"; - assertTrue(rewriteRule.match(SqlParserUtils.parse(sql))); - } + @Test + public void isExistsMatch() throws SQLFeatureNotSupportedException { + String sql = "SELECT * FROM A WHERE EXISTS (SELECT 1 FROM B WHERE A.a_v = B.b_v)"; + assertTrue(rewriteRule.match(SqlParserUtils.parse(sql))); + } - @Test - public void isNotExistsMatch() throws SQLFeatureNotSupportedException { - String sql = "SELECT * " + - "FROM A " + - "WHERE NOT EXISTS (SELECT 1 FROM B WHERE A.a_v = B.b_v)"; - assertTrue(rewriteRule.match(SqlParserUtils.parse(sql))); - } + @Test + public void isNotExistsMatch() throws SQLFeatureNotSupportedException { + String sql = "SELECT * " + "FROM A WHERE NOT EXISTS (SELECT 1 FROM B WHERE A.a_v = B.b_v)"; + assertTrue(rewriteRule.match(SqlParserUtils.parse(sql))); + } - @Test - public void subQueryInSelectNotMatch() throws SQLFeatureNotSupportedException { - String sql = "SELECT A.v as v, (SELECT MAX(b) FROM B WHERE A.id = B.id) as max_age " + - "FROM A"; - assertFalse(rewriteRule.match(SqlParserUtils.parse(sql))); - } + @Test + public void subQueryInSelectNotMatch() throws SQLFeatureNotSupportedException { + String sql = "SELECT A.v as v, (SELECT MAX(b) FROM B WHERE A.id = B.id) as max_age FROM A"; + assertFalse(rewriteRule.match(SqlParserUtils.parse(sql))); + } - @Test - public void moreThanOneInIsNotSupporeted() throws SQLFeatureNotSupportedException { - String sql = "SELECT * " + - "FROM A " + - "WHERE a IN (SELECT b FROM B) and d IN (SELECT e FROM F)"; - exceptionRule.expect(SQLFeatureNotSupportedException.class); - exceptionRule.expectMessage("Unsupported subquery. Only one EXISTS or IN is supported"); - rewriteRule.match(SqlParserUtils.parse(sql)); - } + @Test + public void moreThanOneInIsNotSupporeted() throws SQLFeatureNotSupportedException { + String sql = "SELECT * FROM A WHERE a IN (SELECT b FROM B) and d IN (SELECT e FROM F)"; + exceptionRule.expect(SQLFeatureNotSupportedException.class); + exceptionRule.expectMessage("Unsupported subquery. Only one EXISTS or IN is supported"); + rewriteRule.match(SqlParserUtils.parse(sql)); + } - @Test - public void moreThanOneExistsIsNotSupporeted() throws SQLFeatureNotSupportedException { - String sql = "SELECT * " + - "FROM A WHERE " + - "EXISTS (SELECT 1 FROM B WHERE A.a_v = B.b_v) AND EXISTS (SELECT 1 FROM C)"; - exceptionRule.expect(SQLFeatureNotSupportedException.class); - exceptionRule.expectMessage("Unsupported subquery. Only one EXISTS or IN is supported"); - rewriteRule.match(SqlParserUtils.parse(sql)); - } + @Test + public void moreThanOneExistsIsNotSupporeted() throws SQLFeatureNotSupportedException { + String sql = + "SELECT * " + + "FROM A WHERE " + + "EXISTS (SELECT 1 FROM B WHERE A.a_v = B.b_v) AND EXISTS (SELECT 1 FROM C)"; + exceptionRule.expect(SQLFeatureNotSupportedException.class); + exceptionRule.expectMessage("Unsupported subquery. Only one EXISTS or IN is supported"); + rewriteRule.match(SqlParserUtils.parse(sql)); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/SubQueryRewriterTestBase.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/SubQueryRewriterTestBase.java index 036d0fc86a..ef7098004f 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/SubQueryRewriterTestBase.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/SubQueryRewriterTestBase.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.rewriter.subquery; import com.alibaba.druid.sql.SQLUtils; @@ -14,19 +13,19 @@ public abstract class SubQueryRewriterTestBase { - SQLQueryExpr expr(String query) { - return SqlParserUtils.parse(query); - } + SQLQueryExpr expr(String query) { + return SqlParserUtils.parse(query); + } - SQLQueryExpr rewrite(SQLQueryExpr expr) { - new SubQueryRewriteRule().rewrite(expr); - return expr; - } + SQLQueryExpr rewrite(SQLQueryExpr expr) { + new SubQueryRewriteRule().rewrite(expr); + return expr; + } - String sqlString(SQLObject expr) { - return SQLUtils.toMySqlString(expr) - .replaceAll("\n", " ") - .replaceAll("\t", " ") - .replaceAll(" +", " "); - } + String sqlString(SQLObject expr) { + return SQLUtils.toMySqlString(expr) + .replaceAll("\n", " ") + .replaceAll("\t", " ") + .replaceAll(" +", " "); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/rewriter/SubqueryAliasRewriterTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/rewriter/SubqueryAliasRewriterTest.java index b729b7ad59..5c5bc40bda 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/rewriter/SubqueryAliasRewriterTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/rewriter/SubqueryAliasRewriterTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.rewriter.subquery.rewriter; import static org.junit.Assert.assertEquals; @@ -17,121 +16,137 @@ public class SubqueryAliasRewriterTest { - @Test - public void testWithoutAlias() { - assertEquals( - sqlString(parse( - "SELECT TbA_0.* " + - "FROM TbA as TbA_0 " + - "WHERE TbA_0.a IN (SELECT TbB_1.b FROM TbB as TbB_1) and TbA_0.c > 10")), - sqlString(rewrite(parse( - "SELECT * " + - "FROM TbA " + - "WHERE a IN (SELECT b FROM TbB) and c > 10")))); - } - - @Test - public void testWithAlias() { - assertEquals( - sqlString(parse( - "SELECT A.* " + - "FROM TbA as A " + - "WHERE A.a IN (SELECT B.b FROM TbB as B) " + - "AND A.c > 10")), - sqlString(rewrite(parse( - "SELECT A.* " + - "FROM TbA as A " + - "WHERE A.a IN (SELECT B.b FROM TbB as B) " + - "AND A.c > 10")))); - } - - @Test - public void testOuterWithoutAliasInnerWithAlias() { - assertEquals( - sqlString(parse( - "SELECT TbA_0.* " + - "FROM TbA as TbA_0 " + - "WHERE TbA_0.a IN (SELECT TbB.b FROM TbB as TbB) " + - "AND TbA_0.c > 10")), - sqlString(rewrite(parse( - "SELECT * " + - "FROM TbA " + - "WHERE a IN (SELECT TbB.b FROM TbB as TbB) " + - "AND c > 10")))); - } - - @Test - public void testOuterWithoutAliasInnerMixAlias() { - String expect = - "SELECT TbA_0.* " + - "FROM TbA as TbA_0 " + - "WHERE TbA_0.a IN (SELECT B.b FROM TbB as B) " + - "AND TbA_0.c > 10"; - - assertEquals( - sqlString(parse(expect)), - sqlString(rewrite(parse( - "SELECT * " + - "FROM TbA " + - "WHERE a IN (SELECT b FROM TbB as B) " + - "AND c > 10")))); - - assertEquals( - sqlString(parse(expect)), - sqlString(rewrite(parse( - "SELECT * " + - "FROM TbA " + - "WHERE a IN (SELECT TbB.b FROM TbB as B) " + - "AND c > 10")))); - } - - @Test - public void testOuterWithAliasInnerWithoutAlias() { - assertEquals( - sqlString(parse( - "SELECT TbA.* " + - "FROM TbA as TbA " + - "WHERE TbA.a IN (SELECT TbB_0.b FROM TbB as TbB_0) " + - "AND TbA.c > 10")), - sqlString(rewrite(parse( - "SELECT TbA.* " + - "FROM TbA as TbA " + - "WHERE TbA.a IN (SELECT b FROM TbB ) " + - "AND TbA.c > 10")))); - } - - @Test - public void testOuterMixAliasInnerWithoutAlias() { - String expect = - "SELECT A.* " + - "FROM TbA as A " + - "WHERE A.a IN (SELECT TbB_0.b FROM TbB as TbB_0) " + - "AND A.c > 10"; - - assertEquals( - sqlString(parse(expect)), - sqlString(rewrite(parse( - "SELECT TbA.* " + - "FROM TbA as A " + - "WHERE a IN (SELECT b FROM TbB ) " + - "AND TbA.c > 10")))); - - assertEquals( - sqlString(parse(expect)), - sqlString(rewrite(parse( - "SELECT * " + - "FROM TbA as A " + - "WHERE TbA.a IN (SELECT b FROM TbB ) " + - "AND TbA.c > 10")))); - } - - - private String sqlString(SQLExpr expr) { - return SQLUtils.toMySqlString(expr); - } - - private SQLQueryExpr rewrite(SQLQueryExpr expr) { - expr.accept(new SubqueryAliasRewriter()); - return expr; - } + @Test + public void testWithoutAlias() { + assertEquals( + sqlString( + parse( + "SELECT TbA_0.* " + + "FROM TbA as TbA_0 " + + "WHERE TbA_0.a IN (SELECT TbB_1.b FROM TbB as TbB_1) and TbA_0.c > 10")), + sqlString( + rewrite( + parse("SELECT * " + "FROM TbA " + "WHERE a IN (SELECT b FROM TbB) and c > 10")))); + } + + @Test + public void testWithAlias() { + assertEquals( + sqlString( + parse( + "SELECT A.* " + + "FROM TbA as A " + + "WHERE A.a IN (SELECT B.b FROM TbB as B) " + + "AND A.c > 10")), + sqlString( + rewrite( + parse( + "SELECT A.* " + + "FROM TbA as A " + + "WHERE A.a IN (SELECT B.b FROM TbB as B) " + + "AND A.c > 10")))); + } + + @Test + public void testOuterWithoutAliasInnerWithAlias() { + assertEquals( + sqlString( + parse( + "SELECT TbA_0.* " + + "FROM TbA as TbA_0 " + + "WHERE TbA_0.a IN (SELECT TbB.b FROM TbB as TbB) " + + "AND TbA_0.c > 10")), + sqlString( + rewrite( + parse( + "SELECT * " + + "FROM TbA " + + "WHERE a IN (SELECT TbB.b FROM TbB as TbB) " + + "AND c > 10")))); + } + + @Test + public void testOuterWithoutAliasInnerMixAlias() { + String expect = + "SELECT TbA_0.* " + + "FROM TbA as TbA_0 " + + "WHERE TbA_0.a IN (SELECT B.b FROM TbB as B) " + + "AND TbA_0.c > 10"; + + assertEquals( + sqlString(parse(expect)), + sqlString( + rewrite( + parse( + "SELECT * " + + "FROM TbA " + + "WHERE a IN (SELECT b FROM TbB as B) " + + "AND c > 10")))); + + assertEquals( + sqlString(parse(expect)), + sqlString( + rewrite( + parse( + "SELECT * " + + "FROM TbA " + + "WHERE a IN (SELECT TbB.b FROM TbB as B) " + + "AND c > 10")))); + } + + @Test + public void testOuterWithAliasInnerWithoutAlias() { + assertEquals( + sqlString( + parse( + "SELECT TbA.* " + + "FROM TbA as TbA " + + "WHERE TbA.a IN (SELECT TbB_0.b FROM TbB as TbB_0) " + + "AND TbA.c > 10")), + sqlString( + rewrite( + parse( + "SELECT TbA.* " + + "FROM TbA as TbA " + + "WHERE TbA.a IN (SELECT b FROM TbB ) " + + "AND TbA.c > 10")))); + } + + @Test + public void testOuterMixAliasInnerWithoutAlias() { + String expect = + "SELECT A.* " + + "FROM TbA as A " + + "WHERE A.a IN (SELECT TbB_0.b FROM TbB as TbB_0) " + + "AND A.c > 10"; + + assertEquals( + sqlString(parse(expect)), + sqlString( + rewrite( + parse( + "SELECT TbA.* " + + "FROM TbA as A " + + "WHERE a IN (SELECT b FROM TbB ) " + + "AND TbA.c > 10")))); + + assertEquals( + sqlString(parse(expect)), + sqlString( + rewrite( + parse( + "SELECT * " + + "FROM TbA as A " + + "WHERE TbA.a IN (SELECT b FROM TbB ) " + + "AND TbA.c > 10")))); + } + + private String sqlString(SQLExpr expr) { + return SQLUtils.toMySqlString(expr); + } + + private SQLQueryExpr rewrite(SQLQueryExpr expr) { + expr.accept(new SubqueryAliasRewriter()); + return expr; + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/term/TermFieldRewriterTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/term/TermFieldRewriterTest.java index d001e0e1d0..44d3e2cbc0 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/term/TermFieldRewriterTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/term/TermFieldRewriterTest.java @@ -3,10 +3,8 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.rewriter.term; - import static org.hamcrest.MatcherAssert.assertThat; import static org.opensearch.sql.legacy.util.MultipleIndexClusterUtils.mockMultipleIndexEnv; @@ -22,88 +20,93 @@ import org.opensearch.sql.legacy.util.SqlParserUtils; public class TermFieldRewriterTest { - @Rule - public ExpectedException exception = ExpectedException.none(); - - @Before - public void setup() { - mockMultipleIndexEnv(); - } - - @Test - public void testFromSubqueryShouldPass() { - String sql = "SELECT t.age as a FROM (SELECT age FROM account1 WHERE address = 'sea') t"; - String expected = "SELECT t.age as a FROM (SELECT age FROM account1 WHERE address.keyword = 'sea') t"; - - assertThat(rewriteTerm(sql), - MatcherUtils.IsEqualIgnoreCaseAndWhiteSpace.equalToIgnoreCaseAndWhiteSpace(expected)); - } - - @Test - public void testFromSubqueryWithoutTermShouldPass() { - String sql = "SELECT t.age as a FROM (SELECT age FROM account1 WHERE age = 10) t"; - String expected = sql; - - assertThat(rewriteTerm(sql), - MatcherUtils.IsEqualIgnoreCaseAndWhiteSpace.equalToIgnoreCaseAndWhiteSpace(expected)); - } - - @Test - public void testFieldShouldBeRewritten() { - String sql = "SELECT age FROM account1 WHERE address = 'sea'"; - String expected = "SELECT age FROM account1 WHERE address.keyword = 'sea'"; - - assertThat(rewriteTerm(sql), - MatcherUtils.IsEqualIgnoreCaseAndWhiteSpace.equalToIgnoreCaseAndWhiteSpace(expected)); - } - - @Test - public void testSelectTheFieldWithCompatibleMappingShouldPass() { - String sql = "SELECT id FROM account* WHERE id = 10"; - String expected = sql; - - assertThat(rewriteTerm(sql), - MatcherUtils.IsEqualIgnoreCaseAndWhiteSpace.equalToIgnoreCaseAndWhiteSpace(expected)); - } - - @Test - public void testSelectTheFieldOnlyInOneIndexShouldPass() { - String sql = "SELECT address FROM account*"; - String expected = sql; - - assertThat(rewriteTerm(sql), - MatcherUtils.IsEqualIgnoreCaseAndWhiteSpace.equalToIgnoreCaseAndWhiteSpace(expected)); - } - - /** - * Ideally, it should fail. There are two reasons we didn't cover it now. - * 1. The semantic check already done that. - * 2. The {@link TermFieldRewriter} didn't touch allcolumn case. - */ - @Test - public void testSelectAllFieldWithConflictMappingShouldPass() { - String sql = "SELECT * FROM account*"; - String expected = sql; - - assertThat(rewriteTerm(sql), - MatcherUtils.IsEqualIgnoreCaseAndWhiteSpace.equalToIgnoreCaseAndWhiteSpace(expected)); - } - - @Test - public void testSelectTheFieldWithConflictMappingShouldThrowException() { - String sql = "SELECT age FROM account* WHERE age = 10"; - exception.expect(VerificationException.class); - exception.expectMessage("Different mappings are not allowed for the same field[age]"); - rewriteTerm(sql); - } - - private String rewriteTerm(String sql) { - SQLQueryExpr sqlQueryExpr = SqlParserUtils.parse(sql); - sqlQueryExpr.accept(new TermFieldRewriter()); - return SQLUtils.toMySqlString(sqlQueryExpr) - .replaceAll("[\\n\\t]+", " ") - .replaceAll("^\\(", " ") - .replaceAll("\\)$", " ") - .trim(); - } + @Rule public ExpectedException exception = ExpectedException.none(); + + @Before + public void setup() { + mockMultipleIndexEnv(); + } + + @Test + public void testFromSubqueryShouldPass() { + String sql = "SELECT t.age as a FROM (SELECT age FROM account1 WHERE address = 'sea') t"; + String expected = + "SELECT t.age as a FROM (SELECT age FROM account1 WHERE address.keyword = 'sea') t"; + + assertThat( + rewriteTerm(sql), + MatcherUtils.IsEqualIgnoreCaseAndWhiteSpace.equalToIgnoreCaseAndWhiteSpace(expected)); + } + + @Test + public void testFromSubqueryWithoutTermShouldPass() { + String sql = "SELECT t.age as a FROM (SELECT age FROM account1 WHERE age = 10) t"; + String expected = sql; + + assertThat( + rewriteTerm(sql), + MatcherUtils.IsEqualIgnoreCaseAndWhiteSpace.equalToIgnoreCaseAndWhiteSpace(expected)); + } + + @Test + public void testFieldShouldBeRewritten() { + String sql = "SELECT age FROM account1 WHERE address = 'sea'"; + String expected = "SELECT age FROM account1 WHERE address.keyword = 'sea'"; + + assertThat( + rewriteTerm(sql), + MatcherUtils.IsEqualIgnoreCaseAndWhiteSpace.equalToIgnoreCaseAndWhiteSpace(expected)); + } + + @Test + public void testSelectTheFieldWithCompatibleMappingShouldPass() { + String sql = "SELECT id FROM account* WHERE id = 10"; + String expected = sql; + + assertThat( + rewriteTerm(sql), + MatcherUtils.IsEqualIgnoreCaseAndWhiteSpace.equalToIgnoreCaseAndWhiteSpace(expected)); + } + + @Test + public void testSelectTheFieldOnlyInOneIndexShouldPass() { + String sql = "SELECT address FROM account*"; + String expected = sql; + + assertThat( + rewriteTerm(sql), + MatcherUtils.IsEqualIgnoreCaseAndWhiteSpace.equalToIgnoreCaseAndWhiteSpace(expected)); + } + + /** + * Ideally, it should fail. There are two reasons we didn't cover it now. 1. The semantic check + * already done that. 2. The {@link TermFieldRewriter} didn't touch allcolumn case. + */ + @Test + public void testSelectAllFieldWithConflictMappingShouldPass() { + String sql = "SELECT * FROM account*"; + String expected = sql; + + assertThat( + rewriteTerm(sql), + MatcherUtils.IsEqualIgnoreCaseAndWhiteSpace.equalToIgnoreCaseAndWhiteSpace(expected)); + } + + @Test + public void testSelectTheFieldWithConflictMappingShouldThrowException() { + String sql = "SELECT age FROM account* WHERE age = 10"; + exception.expect(VerificationException.class); + exception.expectMessage("Different mappings are not allowed for the same field[age]"); + rewriteTerm(sql); + } + + private String rewriteTerm(String sql) { + SQLQueryExpr sqlQueryExpr = SqlParserUtils.parse(sql); + sqlQueryExpr.accept(new TermFieldRewriter()); + return SQLUtils.toMySqlString(sqlQueryExpr) + .replaceAll("[\\n\\t]+", " ") + .replaceAll("^\\(", " ") + .replaceAll("\\)$", " ") + .trim(); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/spatial/WktToGeoJsonConverterTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/spatial/WktToGeoJsonConverterTest.java index 24889ff3ca..e63c60467f 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/spatial/WktToGeoJsonConverterTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/spatial/WktToGeoJsonConverterTest.java @@ -3,181 +3,206 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.spatial; - import org.junit.Assert; import org.junit.Test; import org.opensearch.sql.legacy.spatial.WktToGeoJsonConverter; -/** - * Created by Eliran on 4/8/2015. - */ +/** Created by Eliran on 4/8/2015. */ public class WktToGeoJsonConverterTest { - @Test - public void convertPoint_NoRedundantSpaces_ShouldConvert(){ - String wkt = "POINT(12.3 13.3)"; - String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); - String expectedGeoJson = "{\"type\":\"Point\", \"coordinates\": [12.3,13.3]}"; - Assert.assertEquals(expectedGeoJson,geoJson); - } - - @Test - public void convertPoint_WithRedundantSpaces_ShouldConvert(){ - String wkt = " POINT ( 12.3 13.3 ) "; - String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); - String expectedGeoJson = "{\"type\":\"Point\", \"coordinates\": [12.3,13.3]}"; - Assert.assertEquals(expectedGeoJson,geoJson); - } - - @Test - public void convertPoint_RoundNumbers_ShouldConvert(){ - String wkt = "POINT(12 13)"; - String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); - String expectedGeoJson = "{\"type\":\"Point\", \"coordinates\": [12,13]}"; - Assert.assertEquals(expectedGeoJson,geoJson); - } - - @Test - public void convertPoint_FirstIsRoundNumber_ShouldConvert(){ - String wkt = "POINT(12 13.3)"; - String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); - String expectedGeoJson = "{\"type\":\"Point\", \"coordinates\": [12,13.3]}"; - Assert.assertEquals(expectedGeoJson,geoJson); - } - - @Test - public void convertPoint_SecondIsRoundNumber_ShouldConvert(){ - String wkt = "POINT(12.2 13)"; - String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); - String expectedGeoJson = "{\"type\":\"Point\", \"coordinates\": [12.2,13]}"; - Assert.assertEquals(expectedGeoJson,geoJson); - } - - @Test - public void convertPoint_NegativeCoordinates_ShouldConvert(){ - String wkt = "POINT(-12.2 13)"; - String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); - String expectedGeoJson = "{\"type\":\"Point\", \"coordinates\": [-12.2,13]}"; - Assert.assertEquals(expectedGeoJson,geoJson); - } - - @Test - public void convertPolygon_NoRedundantSpaces_ShouldConvert(){ - String wkt = "POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))"; - String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); - String expectedGeoJson = "{\"type\":\"Polygon\", \"coordinates\": [[[30,10],[40,40],[20,40],[10,20],[30,10]]]}"; - Assert.assertEquals(expectedGeoJson,geoJson); - } - - @Test - public void convertPolygon_NegativeCoordinates_ShouldConvert(){ - String wkt = "POLYGON ((-30 10, 40 40, 20 40, 10 20, -30 10))"; - String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); - String expectedGeoJson = "{\"type\":\"Polygon\", \"coordinates\": [[[-30,10],[40,40],[20,40],[10,20],[-30,10]]]}"; - Assert.assertEquals(expectedGeoJson,geoJson); - } - - @Test - public void convertPolygon_WithRedundantSpaces_ShouldConvert(){ - String wkt = " POLYGON ( (30 10, 40 40 , 20 40, 10 20, 30 10 ) ) "; - String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); - String expectedGeoJson = "{\"type\":\"Polygon\", \"coordinates\": [[[30,10],[40,40],[20,40],[10,20],[30,10]]]}"; - Assert.assertEquals(expectedGeoJson,geoJson); - } - - @Test - public void convertPolygonWithHole_NoRedundantSpaces_ShouldConvert(){ - String wkt = "POLYGON ((35 10, 45 45, 15 40, 10 20, 35 10),(20 30, 35 35, 30 20, 20 30))"; - String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); - String expectedGeoJson = "{\"type\":\"Polygon\", \"coordinates\": [[[35,10],[45,45],[15,40],[10,20],[35,10]],[[20,30],[35,35],[30,20],[20,30]]]}"; - Assert.assertEquals(expectedGeoJson,geoJson); - } - - @Test - public void convertPolygonWithHole_WithRedundantSpaces_ShouldConvert(){ - String wkt = "POLYGON ( (35 10, 45 45, 15 40, 10 20, 35 10 ), (20 30 , 35 35, 30 20, 20 30 ) ) "; - String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); - String expectedGeoJson = "{\"type\":\"Polygon\", \"coordinates\": [[[35,10],[45,45],[15,40],[10,20],[35,10]],[[20,30],[35,35],[30,20],[20,30]]]}"; - Assert.assertEquals(expectedGeoJson,geoJson); - } - - @Test - public void convertLineString_NoRedundantSpaces_ShouldConvert(){ - String wkt = "LINESTRING (30 10, 10 30, 40 40)"; - String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); - String expectedGeoJson = "{\"type\":\"LineString\", \"coordinates\": [[30,10],[10,30],[40,40]]}"; - Assert.assertEquals(expectedGeoJson,geoJson); - } - - @Test - public void convertLineString_NegativeCoordinates_ShouldConvert(){ - String wkt = "LINESTRING (-30 10, 10 30, 40 40)"; - String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); - String expectedGeoJson = "{\"type\":\"LineString\", \"coordinates\": [[-30,10],[10,30],[40,40]]}"; - Assert.assertEquals(expectedGeoJson,geoJson); - } - - @Test - public void convertLineString_WithRedundantSpaces_ShouldConvert(){ - String wkt = "LINESTRING ( 30 10, 10 30 , 40 40 )"; - String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); - String expectedGeoJson = "{\"type\":\"LineString\", \"coordinates\": [[30,10],[10,30],[40,40]]}"; - Assert.assertEquals(expectedGeoJson,geoJson); - } - - @Test - public void convertMultiPolygon_NoRedundantSpaces_ShouldConvert(){ - String wkt = "MULTIPOLYGON (((30 20, 45 40, 10 40, 30 20)),((15 5, 40 10, 10 20, 5 10, 15 5)))"; - String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); - String expectedGeoJson = "{\"type\":\"MultiPolygon\", \"coordinates\": [[[[30,20],[45,40],[10,40],[30,20]]],[[[15,5],[40,10],[10,20],[5,10],[15,5]]]]}"; - Assert.assertEquals(expectedGeoJson,geoJson); - } - @Test - public void convertMultiPolygon_WithRedundantSpaces_ShouldConvert(){ - String wkt = "MULTIPOLYGON ( ((30 20, 45 40, 10 40, 30 20) ) , ((15 5, 40 10, 10 20, 5 10, 15 5)))"; - String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); - String expectedGeoJson = "{\"type\":\"MultiPolygon\", \"coordinates\": [[[[30,20],[45,40],[10,40],[30,20]]],[[[15,5],[40,10],[10,20],[5,10],[15,5]]]]}"; - Assert.assertEquals(expectedGeoJson,geoJson); - } - @Test - public void convertMultiPolygon_OnePolygonHaveHoles_ShouldConvert(){ - String wkt = "MULTIPOLYGON (((30 20, 45 40, 10 40, 30 20),(20 30, 35 35, 30 20, 20 30)),((15 5, 40 10, 10 20, 5 10, 15 5)))"; - String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); - String expectedGeoJson = "{\"type\":\"MultiPolygon\", \"coordinates\": [[[[30,20],[45,40],[10,40],[30,20]],[[20,30],[35,35],[30,20],[20,30]]],[[[15,5],[40,10],[10,20],[5,10],[15,5]]]]}"; - Assert.assertEquals(expectedGeoJson,geoJson); - } - - @Test - public void convertMultiPoint_V1_ShouldConvert(){ - String wkt = "MULTIPOINT (10 40, 40 30, 20 20, 30 10)"; - String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); - String expectedGeoJson = "{\"type\":\"MultiPoint\", \"coordinates\": [[10,40],[40,30],[20,20],[30,10]]}"; - Assert.assertEquals(expectedGeoJson,geoJson); - } - - @Test - public void convertMultiPoint_V2_ShouldConvert(){ - String wkt = "MULTIPOINT ((10 40), (40 30), (20 20), (30 10))"; - String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); - String expectedGeoJson = "{\"type\":\"MultiPoint\", \"coordinates\": [[10,40],[40,30],[20,20],[30,10]]}"; - Assert.assertEquals(expectedGeoJson,geoJson); - } - - @Test - public void convertMultiLineString_NoRedundantSpaces_ShouldConvert(){ - String wkt = "MULTILINESTRING ((10 10, 20 20, 10 40),(40 40, 30 30, 40 20, 30 10))"; - String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); - String expectedGeoJson = "{\"type\":\"MultiLineString\", \"coordinates\": [[[10,10],[20,20],[10,40]],[[40,40],[30,30],[40,20],[30,10]]]}"; - Assert.assertEquals(expectedGeoJson,geoJson); - } - @Test - public void convertMultiLineString_WithRedundantSpaces_ShouldConvert(){ - String wkt = "MULTILINESTRING ( (10 10, 20 20, 10 40 ) , (40 40, 30 30, 40 20, 30 10))"; - String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); - String expectedGeoJson = "{\"type\":\"MultiLineString\", \"coordinates\": [[[10,10],[20,20],[10,40]],[[40,40],[30,30],[40,20],[30,10]]]}"; - Assert.assertEquals(expectedGeoJson,geoJson); - } + @Test + public void convertPoint_NoRedundantSpaces_ShouldConvert() { + String wkt = "POINT(12.3 13.3)"; + String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); + String expectedGeoJson = "{\"type\":\"Point\", \"coordinates\": [12.3,13.3]}"; + Assert.assertEquals(expectedGeoJson, geoJson); + } + + @Test + public void convertPoint_WithRedundantSpaces_ShouldConvert() { + String wkt = " POINT ( 12.3 13.3 ) "; + String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); + String expectedGeoJson = "{\"type\":\"Point\", \"coordinates\": [12.3,13.3]}"; + Assert.assertEquals(expectedGeoJson, geoJson); + } + + @Test + public void convertPoint_RoundNumbers_ShouldConvert() { + String wkt = "POINT(12 13)"; + String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); + String expectedGeoJson = "{\"type\":\"Point\", \"coordinates\": [12,13]}"; + Assert.assertEquals(expectedGeoJson, geoJson); + } + + @Test + public void convertPoint_FirstIsRoundNumber_ShouldConvert() { + String wkt = "POINT(12 13.3)"; + String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); + String expectedGeoJson = "{\"type\":\"Point\", \"coordinates\": [12,13.3]}"; + Assert.assertEquals(expectedGeoJson, geoJson); + } + + @Test + public void convertPoint_SecondIsRoundNumber_ShouldConvert() { + String wkt = "POINT(12.2 13)"; + String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); + String expectedGeoJson = "{\"type\":\"Point\", \"coordinates\": [12.2,13]}"; + Assert.assertEquals(expectedGeoJson, geoJson); + } + + @Test + public void convertPoint_NegativeCoordinates_ShouldConvert() { + String wkt = "POINT(-12.2 13)"; + String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); + String expectedGeoJson = "{\"type\":\"Point\", \"coordinates\": [-12.2,13]}"; + Assert.assertEquals(expectedGeoJson, geoJson); + } + + @Test + public void convertPolygon_NoRedundantSpaces_ShouldConvert() { + String wkt = "POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))"; + String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); + String expectedGeoJson = + "{\"type\":\"Polygon\", \"coordinates\": [[[30,10],[40,40],[20,40],[10,20],[30,10]]]}"; + Assert.assertEquals(expectedGeoJson, geoJson); + } + + @Test + public void convertPolygon_NegativeCoordinates_ShouldConvert() { + String wkt = "POLYGON ((-30 10, 40 40, 20 40, 10 20, -30 10))"; + String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); + String expectedGeoJson = + "{\"type\":\"Polygon\", \"coordinates\": [[[-30,10],[40,40],[20,40],[10,20],[-30,10]]]}"; + Assert.assertEquals(expectedGeoJson, geoJson); + } + + @Test + public void convertPolygon_WithRedundantSpaces_ShouldConvert() { + String wkt = " POLYGON ( (30 10, 40 40 , 20 40, 10 20, 30 10 ) ) "; + String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); + String expectedGeoJson = + "{\"type\":\"Polygon\", \"coordinates\": [[[30,10],[40,40],[20,40],[10,20],[30,10]]]}"; + Assert.assertEquals(expectedGeoJson, geoJson); + } + + @Test + public void convertPolygonWithHole_NoRedundantSpaces_ShouldConvert() { + String wkt = "POLYGON ((35 10, 45 45, 15 40, 10 20, 35 10),(20 30, 35 35, 30 20, 20 30))"; + String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); + String expectedGeoJson = + "{\"type\":\"Polygon\", \"coordinates\":" + + " [[[35,10],[45,45],[15,40],[10,20],[35,10]],[[20,30],[35,35],[30,20],[20,30]]]}"; + Assert.assertEquals(expectedGeoJson, geoJson); + } + + @Test + public void convertPolygonWithHole_WithRedundantSpaces_ShouldConvert() { + String wkt = + "POLYGON ( (35 10, 45 45, 15 40, 10 20, 35 10 ), (20 30 , 35 35, 30 20, 20 30 ) ) "; + String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); + String expectedGeoJson = + "{\"type\":\"Polygon\", \"coordinates\":" + + " [[[35,10],[45,45],[15,40],[10,20],[35,10]],[[20,30],[35,35],[30,20],[20,30]]]}"; + Assert.assertEquals(expectedGeoJson, geoJson); + } + + @Test + public void convertLineString_NoRedundantSpaces_ShouldConvert() { + String wkt = "LINESTRING (30 10, 10 30, 40 40)"; + String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); + String expectedGeoJson = + "{\"type\":\"LineString\", \"coordinates\": [[30,10],[10,30],[40,40]]}"; + Assert.assertEquals(expectedGeoJson, geoJson); + } + + @Test + public void convertLineString_NegativeCoordinates_ShouldConvert() { + String wkt = "LINESTRING (-30 10, 10 30, 40 40)"; + String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); + String expectedGeoJson = + "{\"type\":\"LineString\", \"coordinates\": [[-30,10],[10,30],[40,40]]}"; + Assert.assertEquals(expectedGeoJson, geoJson); + } + + @Test + public void convertLineString_WithRedundantSpaces_ShouldConvert() { + String wkt = "LINESTRING ( 30 10, 10 30 , 40 40 )"; + String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); + String expectedGeoJson = + "{\"type\":\"LineString\", \"coordinates\": [[30,10],[10,30],[40,40]]}"; + Assert.assertEquals(expectedGeoJson, geoJson); + } + + @Test + public void convertMultiPolygon_NoRedundantSpaces_ShouldConvert() { + String wkt = "MULTIPOLYGON (((30 20, 45 40, 10 40, 30 20)),((15 5, 40 10, 10 20, 5 10, 15 5)))"; + String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); + String expectedGeoJson = + "{\"type\":\"MultiPolygon\", \"coordinates\":" + + " [[[[30,20],[45,40],[10,40],[30,20]]],[[[15,5],[40,10],[10,20],[5,10],[15,5]]]]}"; + Assert.assertEquals(expectedGeoJson, geoJson); + } + + @Test + public void convertMultiPolygon_WithRedundantSpaces_ShouldConvert() { + String wkt = + "MULTIPOLYGON ( ((30 20, 45 40, 10 40, 30 20) ) , ((15 5, 40 10, 10 20, 5 10, 15 5)))"; + String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); + String expectedGeoJson = + "{\"type\":\"MultiPolygon\", \"coordinates\":" + + " [[[[30,20],[45,40],[10,40],[30,20]]],[[[15,5],[40,10],[10,20],[5,10],[15,5]]]]}"; + Assert.assertEquals(expectedGeoJson, geoJson); + } + + @Test + public void convertMultiPolygon_OnePolygonHaveHoles_ShouldConvert() { + String wkt = + "MULTIPOLYGON (((30 20, 45 40, 10 40, 30 20),(20 30, 35 35, 30 20, 20 30)),((15 5, 40 10," + + " 10 20, 5 10, 15 5)))"; + String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); + String expectedGeoJson = + "{\"type\":\"MultiPolygon\", \"coordinates\":" + + " [[[[30,20],[45,40],[10,40],[30,20]],[[20,30],[35,35],[30,20],[20,30]]],[[[15,5],[40,10],[10,20],[5,10],[15,5]]]]}"; + Assert.assertEquals(expectedGeoJson, geoJson); + } + + @Test + public void convertMultiPoint_V1_ShouldConvert() { + String wkt = "MULTIPOINT (10 40, 40 30, 20 20, 30 10)"; + String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); + String expectedGeoJson = + "{\"type\":\"MultiPoint\", \"coordinates\": [[10,40],[40,30],[20,20],[30,10]]}"; + Assert.assertEquals(expectedGeoJson, geoJson); + } + + @Test + public void convertMultiPoint_V2_ShouldConvert() { + String wkt = "MULTIPOINT ((10 40), (40 30), (20 20), (30 10))"; + String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); + String expectedGeoJson = + "{\"type\":\"MultiPoint\", \"coordinates\": [[10,40],[40,30],[20,20],[30,10]]}"; + Assert.assertEquals(expectedGeoJson, geoJson); + } + + @Test + public void convertMultiLineString_NoRedundantSpaces_ShouldConvert() { + String wkt = "MULTILINESTRING ((10 10, 20 20, 10 40),(40 40, 30 30, 40 20, 30 10))"; + String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); + String expectedGeoJson = + "{\"type\":\"MultiLineString\", \"coordinates\":" + + " [[[10,10],[20,20],[10,40]],[[40,40],[30,30],[40,20],[30,10]]]}"; + Assert.assertEquals(expectedGeoJson, geoJson); + } + + @Test + public void convertMultiLineString_WithRedundantSpaces_ShouldConvert() { + String wkt = "MULTILINESTRING ( (10 10, 20 20, 10 40 ) , (40 40, 30 30, 40 20, 30 10))"; + String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); + String expectedGeoJson = + "{\"type\":\"MultiLineString\", \"coordinates\":" + + " [[[10,10],[20,20],[10,40]],[[40,40],[30,30],[40,20],[30,10]]]}"; + Assert.assertEquals(expectedGeoJson, geoJson); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/SQLFunctionsTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/SQLFunctionsTest.java index 70c4a2aa11..9fc2b6012d 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/SQLFunctionsTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/SQLFunctionsTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.utils; import static org.junit.Assert.assertEquals; @@ -35,69 +34,64 @@ public class SQLFunctionsTest { - private SQLFunctions sqlFunctions = new SQLFunctions(); - - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); - - @Test - public void testAssign() throws SqlParseException { - SQLFunctions sqlFunctions = new SQLFunctions(); - - final SQLIntegerExpr sqlIntegerExpr = new SQLIntegerExpr(10); - final Tuple assign = sqlFunctions.function("assign", - ImmutableList.of(new KVValue(null, sqlIntegerExpr)), - null, - true); - - assertTrue(assign.v1().matches("assign_[0-9]+")); - assertTrue(assign.v2().matches("def assign_[0-9]+ = 10;return assign_[0-9]+;")); - } - - @Test - public void testAbsWithIntReturnType() { - final SQLIntegerExpr sqlIntegerExpr = new SQLIntegerExpr(6); - - final SQLMethodInvokeExpr invokeExpr = new SQLMethodInvokeExpr("ABS"); - invokeExpr.addParameter(sqlIntegerExpr); - List params = new ArrayList<>(); - - final MethodField field = new ScriptMethodField("ABS", params, null, null); - field.setExpression(invokeExpr); - ColumnTypeProvider columnTypeProvider = new ColumnTypeProvider(OpenSearchDataType.INTEGER); - - Schema.Type resolvedType = columnTypeProvider.get(0); - final Schema.Type returnType = sqlFunctions.getScriptFunctionReturnType(field, resolvedType); - Assert.assertEquals(returnType, Schema.Type.INTEGER); - } - - @Test - public void testCastReturnType() { - final SQLIdentifierExpr identifierExpr = new SQLIdentifierExpr("int_type"); - SQLDataType sqlDataType = new SQLDataTypeImpl("INT"); - final SQLCastExpr castExpr = new SQLCastExpr(); - castExpr.setExpr(identifierExpr); - castExpr.setDataType(sqlDataType); - - List params = new ArrayList<>(); - final MethodField field = new ScriptMethodField("CAST", params, null, null); - field.setExpression(castExpr); - ColumnTypeProvider columnTypeProvider = new ColumnTypeProvider(OpenSearchDataType.INTEGER); - - Schema.Type resolvedType = columnTypeProvider.get(0); - final Schema.Type returnType = sqlFunctions.getScriptFunctionReturnType(field, resolvedType); - Assert.assertEquals(returnType, Schema.Type.INTEGER); - } - - @Test - public void testCastIntStatementScript() throws SqlParseException { - assertEquals( - "def result = (doc['age'].value instanceof boolean) " - + "? (doc['age'].value ? 1 : 0) " - + ": Double.parseDouble(doc['age'].value.toString()).intValue()", - sqlFunctions.getCastScriptStatement( - "result", "int", Arrays.asList(new KVValue("age"))) - ); - } - + private SQLFunctions sqlFunctions = new SQLFunctions(); + + @Rule public ExpectedException exceptionRule = ExpectedException.none(); + + @Test + public void testAssign() throws SqlParseException { + SQLFunctions sqlFunctions = new SQLFunctions(); + + final SQLIntegerExpr sqlIntegerExpr = new SQLIntegerExpr(10); + final Tuple assign = + sqlFunctions.function( + "assign", ImmutableList.of(new KVValue(null, sqlIntegerExpr)), null, true); + + assertTrue(assign.v1().matches("assign_[0-9]+")); + assertTrue(assign.v2().matches("def assign_[0-9]+ = 10;return assign_[0-9]+;")); + } + + @Test + public void testAbsWithIntReturnType() { + final SQLIntegerExpr sqlIntegerExpr = new SQLIntegerExpr(6); + + final SQLMethodInvokeExpr invokeExpr = new SQLMethodInvokeExpr("ABS"); + invokeExpr.addParameter(sqlIntegerExpr); + List params = new ArrayList<>(); + + final MethodField field = new ScriptMethodField("ABS", params, null, null); + field.setExpression(invokeExpr); + ColumnTypeProvider columnTypeProvider = new ColumnTypeProvider(OpenSearchDataType.INTEGER); + + Schema.Type resolvedType = columnTypeProvider.get(0); + final Schema.Type returnType = sqlFunctions.getScriptFunctionReturnType(field, resolvedType); + Assert.assertEquals(returnType, Schema.Type.INTEGER); + } + + @Test + public void testCastReturnType() { + final SQLIdentifierExpr identifierExpr = new SQLIdentifierExpr("int_type"); + SQLDataType sqlDataType = new SQLDataTypeImpl("INT"); + final SQLCastExpr castExpr = new SQLCastExpr(); + castExpr.setExpr(identifierExpr); + castExpr.setDataType(sqlDataType); + + List params = new ArrayList<>(); + final MethodField field = new ScriptMethodField("CAST", params, null, null); + field.setExpression(castExpr); + ColumnTypeProvider columnTypeProvider = new ColumnTypeProvider(OpenSearchDataType.INTEGER); + + Schema.Type resolvedType = columnTypeProvider.get(0); + final Schema.Type returnType = sqlFunctions.getScriptFunctionReturnType(field, resolvedType); + Assert.assertEquals(returnType, Schema.Type.INTEGER); + } + + @Test + public void testCastIntStatementScript() throws SqlParseException { + assertEquals( + "def result = (doc['age'].value instanceof boolean) " + + "? (doc['age'].value ? 1 : 0) " + + ": Double.parseDouble(doc['age'].value.toString()).intValue()", + sqlFunctions.getCastScriptStatement("result", "int", Arrays.asList(new KVValue("age")))); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/StringUtilsTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/StringUtilsTest.java index d25fed6f31..b73e91981e 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/StringUtilsTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/StringUtilsTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.utils; import static org.hamcrest.Matchers.equalTo; @@ -17,55 +16,57 @@ public class StringUtilsTest { - private Locale originalLocale; + private Locale originalLocale; - @Before - public void saveOriginalLocale() { - originalLocale = Locale.getDefault(); - } + @Before + public void saveOriginalLocale() { + originalLocale = Locale.getDefault(); + } - @After - public void restoreOriginalLocale() { - Locale.setDefault(originalLocale); - } + @After + public void restoreOriginalLocale() { + Locale.setDefault(originalLocale); + } - @Test - public void toLower() { - final String input = "SAMPLE STRING"; - final String output = StringUtils.toLower(input); + @Test + public void toLower() { + final String input = "SAMPLE STRING"; + final String output = StringUtils.toLower(input); - Assert.assertThat(output, equalTo("sample string")); + Assert.assertThat(output, equalTo("sample string")); - // See https://docs.oracle.com/javase/10/docs/api/java/lang/String.html#toLowerCase(java.util.Locale) - // for the choice of these characters and the locale. - final String upper = "\u0130 \u0049"; - Locale.setDefault(Locale.forLanguageTag("tr")); + // See + // https://docs.oracle.com/javase/10/docs/api/java/lang/String.html#toLowerCase(java.util.Locale) + // for the choice of these characters and the locale. + final String upper = "\u0130 \u0049"; + Locale.setDefault(Locale.forLanguageTag("tr")); - Assert.assertThat(upper.toUpperCase(Locale.ROOT), equalTo(StringUtils.toUpper(upper))); - } + Assert.assertThat(upper.toUpperCase(Locale.ROOT), equalTo(StringUtils.toUpper(upper))); + } - @Test - public void toUpper() { - final String input = "sample string"; - final String output = StringUtils.toUpper(input); + @Test + public void toUpper() { + final String input = "sample string"; + final String output = StringUtils.toUpper(input); - Assert.assertThat(output, equalTo("SAMPLE STRING")); + Assert.assertThat(output, equalTo("SAMPLE STRING")); - // See https://docs.oracle.com/javase/10/docs/api/java/lang/String.html#toUpperCase(java.util.Locale) - // for the choice of these characters and the locale. - final String lower = "\u0069 \u0131"; - Locale.setDefault(Locale.forLanguageTag("tr")); + // See + // https://docs.oracle.com/javase/10/docs/api/java/lang/String.html#toUpperCase(java.util.Locale) + // for the choice of these characters and the locale. + final String lower = "\u0069 \u0131"; + Locale.setDefault(Locale.forLanguageTag("tr")); - Assert.assertThat(lower.toUpperCase(Locale.ROOT), equalTo(StringUtils.toUpper(lower))); - } + Assert.assertThat(lower.toUpperCase(Locale.ROOT), equalTo(StringUtils.toUpper(lower))); + } - @Test - public void format() { - Locale.setDefault(Locale.forLanguageTag("tr")); - final String upper = "\u0130 \u0049"; - final String lower = "\u0069 \u0131"; + @Test + public void format() { + Locale.setDefault(Locale.forLanguageTag("tr")); + final String upper = "\u0130 \u0049"; + final String lower = "\u0069 \u0131"; - final String output = StringUtils.format("%s %s", upper, lower); - Assert.assertThat(output, equalTo(String.format(Locale.ROOT, "%s %s", upper, lower))); - } + final String output = StringUtils.format("%s %s", upper, lower); + Assert.assertThat(output, equalTo(String.format(Locale.ROOT, "%s %s", upper, lower))); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/UtilTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/UtilTest.java index 21731db5a5..e3c7a74a71 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/UtilTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/UtilTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.utils; import java.util.HashMap; @@ -14,54 +13,54 @@ public class UtilTest { - @Test - public void clearEmptyPaths_EmptyMap_ShouldReturnTrue(){ - Map map = new HashMap<>(); - boolean result = Util.clearEmptyPaths(map); - // - Assert.assertTrue(result); - } + @Test + public void clearEmptyPaths_EmptyMap_ShouldReturnTrue() { + Map map = new HashMap<>(); + boolean result = Util.clearEmptyPaths(map); + // + Assert.assertTrue(result); + } - @Test - public void clearEmptyPaths_EmptyPathSize1_ShouldReturnTrueAndMapShouldBeEmpty(){ - Map map = new HashMap<>(); - map.put("a",new HashMap()); - boolean result = Util.clearEmptyPaths(map); - Assert.assertTrue(result); - Assert.assertEquals(0,map.size()); - } + @Test + public void clearEmptyPaths_EmptyPathSize1_ShouldReturnTrueAndMapShouldBeEmpty() { + Map map = new HashMap<>(); + map.put("a", new HashMap()); + boolean result = Util.clearEmptyPaths(map); + Assert.assertTrue(result); + Assert.assertEquals(0, map.size()); + } - @Test - public void clearEmptyPaths_EmptyPathSize2_ShouldReturnTrueAndMapShouldBeEmpty(){ - Map map = new HashMap<>(); - Map innerMap = new HashMap<>(); - innerMap.put("b",new HashMap()); - map.put("a",innerMap); - boolean result = Util.clearEmptyPaths(map); - Assert.assertTrue(result); - Assert.assertEquals(0,map.size()); - } + @Test + public void clearEmptyPaths_EmptyPathSize2_ShouldReturnTrueAndMapShouldBeEmpty() { + Map map = new HashMap<>(); + Map innerMap = new HashMap<>(); + innerMap.put("b", new HashMap()); + map.put("a", innerMap); + boolean result = Util.clearEmptyPaths(map); + Assert.assertTrue(result); + Assert.assertEquals(0, map.size()); + } - @Test - public void clearEmptyPaths_2PathsOneEmpty_MapShouldBeSizeOne(){ - Map map = new HashMap<>(); - map.put("a",new HashMap()); - map.put("c",1); - Util.clearEmptyPaths(map); - Assert.assertEquals(1,map.size()); - } + @Test + public void clearEmptyPaths_2PathsOneEmpty_MapShouldBeSizeOne() { + Map map = new HashMap<>(); + map.put("a", new HashMap()); + map.put("c", 1); + Util.clearEmptyPaths(map); + Assert.assertEquals(1, map.size()); + } - @Test - @SuppressWarnings("unchecked") - public void clearEmptyPaths_MapSizeTwoAndTwoOneInnerEmpty_MapShouldBeSizeTwoAndOne(){ - Map map = new HashMap<>(); - Map innerMap = new HashMap<>(); - innerMap.put("b",2); - innerMap.put("c",new HashMap()); - map.put("a",innerMap); - map.put("c",1); - Util.clearEmptyPaths(map); - Assert.assertEquals(2, map.size()); - Assert.assertEquals(1,((HashMap)map.get("a")).size()); - } + @Test + @SuppressWarnings("unchecked") + public void clearEmptyPaths_MapSizeTwoAndTwoOneInnerEmpty_MapShouldBeSizeTwoAndOne() { + Map map = new HashMap<>(); + Map innerMap = new HashMap<>(); + innerMap.put("b", 2); + innerMap.put("c", new HashMap()); + map.put("a", innerMap); + map.put("c", 1); + Util.clearEmptyPaths(map); + Assert.assertEquals(2, map.size()); + Assert.assertEquals(1, ((HashMap) map.get("a")).size()); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/util/SqlExplainUtils.java b/legacy/src/test/java/org/opensearch/sql/legacy/util/SqlExplainUtils.java index 6228b971e2..3ad1cae211 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/util/SqlExplainUtils.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/util/SqlExplainUtils.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.util; import com.alibaba.druid.sql.parser.ParserException; @@ -15,22 +14,20 @@ import org.opensearch.sql.legacy.query.OpenSearchActionFactory; import org.opensearch.sql.legacy.query.QueryAction; -/** - * Test utils class that explains a query - */ +/** Test utils class that explains a query */ public class SqlExplainUtils { - public static String explain(String query) { - try { - Client mockClient = Mockito.mock(Client.class); - CheckScriptContents.stubMockClient(mockClient); - QueryAction queryAction = OpenSearchActionFactory.create(mockClient, query); + public static String explain(String query) { + try { + Client mockClient = Mockito.mock(Client.class); + CheckScriptContents.stubMockClient(mockClient); + QueryAction queryAction = OpenSearchActionFactory.create(mockClient, query); - return queryAction.explain().explain(); - } catch (SqlParseException | SQLFeatureNotSupportedException | SQLFeatureDisabledException e) { - throw new ParserException("Illegal sql expr in: " + query); - } + return queryAction.explain().explain(); + } catch (SqlParseException | SQLFeatureNotSupportedException | SQLFeatureDisabledException e) { + throw new ParserException("Illegal sql expr in: " + query); } + } - private SqlExplainUtils() {} + private SqlExplainUtils() {} } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/util/SqlParserUtils.java b/legacy/src/test/java/org/opensearch/sql/legacy/util/SqlParserUtils.java index a1c023cbff..90ccc705fd 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/util/SqlParserUtils.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/util/SqlParserUtils.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.util; import com.alibaba.druid.sql.ast.SQLExpr; @@ -13,24 +12,23 @@ import org.opensearch.sql.legacy.parser.ElasticSqlExprParser; import org.opensearch.sql.legacy.rewriter.parent.SQLExprParentSetter; -/** - * Test utils class include all SQLExpr related method. - */ +/** Test utils class include all SQLExpr related method. */ public class SqlParserUtils { - /** - * Parse sql with {@link ElasticSqlExprParser} - * @param sql sql - * @return {@link SQLQueryExpr} - */ - public static SQLQueryExpr parse(String sql) { - ElasticSqlExprParser parser = new ElasticSqlExprParser(sql); - SQLExpr expr = parser.expr(); - if (parser.getLexer().token() != Token.EOF) { - throw new ParserException("Illegal sql: " + sql); - } - SQLQueryExpr queryExpr = (SQLQueryExpr) expr; - queryExpr.accept(new SQLExprParentSetter()); - return (SQLQueryExpr) expr; + /** + * Parse sql with {@link ElasticSqlExprParser} + * + * @param sql sql + * @return {@link SQLQueryExpr} + */ + public static SQLQueryExpr parse(String sql) { + ElasticSqlExprParser parser = new ElasticSqlExprParser(sql); + SQLExpr expr = parser.expr(); + if (parser.getLexer().token() != Token.EOF) { + throw new ParserException("Illegal sql: " + sql); } + SQLQueryExpr queryExpr = (SQLQueryExpr) expr; + queryExpr.accept(new SQLExprParentSetter()); + return (SQLQueryExpr) expr; + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/util/TestUtils.java b/legacy/src/test/java/org/opensearch/sql/legacy/util/TestUtils.java index 27be512fc0..ab9a0ded14 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/util/TestUtils.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/util/TestUtils.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.util; import static com.google.common.base.Strings.isNullOrEmpty; @@ -36,792 +35,809 @@ public class TestUtils { - /** - * Create test index by REST client. - * @param client client connection - * @param indexName test index name - * @param mapping test index mapping or null if no predefined mapping - */ - public static void createIndexByRestClient(RestClient client, String indexName, String mapping) { - Request request = new Request("PUT", "/" + indexName); - if (!isNullOrEmpty(mapping)) { - request.setJsonEntity(mapping); - } - performRequest(client, request); - } - - /** - * https://github.com/elastic/elasticsearch/pull/49959 - * Deprecate creation of dot-prefixed index names except for hidden and system indices. - * Create hidden index by REST client. - * @param client client connection - * @param indexName test index name - * @param mapping test index mapping or null if no predefined mapping - */ - public static void createHiddenIndexByRestClient(RestClient client, String indexName, String mapping) { - Request request = new Request("PUT", "/" + indexName); - JSONObject jsonObject = isNullOrEmpty(mapping) ? new JSONObject() : new JSONObject(mapping); - jsonObject.put("settings", new JSONObject("{\"index\":{\"hidden\":true}}")); - request.setJsonEntity(jsonObject.toString()); - - performRequest(client, request); + /** + * Create test index by REST client. + * + * @param client client connection + * @param indexName test index name + * @param mapping test index mapping or null if no predefined mapping + */ + public static void createIndexByRestClient(RestClient client, String indexName, String mapping) { + Request request = new Request("PUT", "/" + indexName); + if (!isNullOrEmpty(mapping)) { + request.setJsonEntity(mapping); } - - /** - * Check if index already exists by OpenSearch index exists API which returns: - * 200 - specified indices or aliases exist - * 404 - one or more indices specified or aliases do not exist - * @param client client connection - * @param indexName index name - * @return true for index exist - */ - public static boolean isIndexExist(RestClient client, String indexName) { - try { - Response response = client.performRequest(new Request("HEAD", "/" + indexName)); - return (response.getStatusLine().getStatusCode() == 200); - } catch (IOException e) { - throw new IllegalStateException("Failed to perform request", e); - } + performRequest(client, request); + } + + /** + * https://github.com/elastic/elasticsearch/pull/49959 Deprecate creation of dot-prefixed index + * names except for hidden and system indices. Create hidden index by REST client. + * + * @param client client connection + * @param indexName test index name + * @param mapping test index mapping or null if no predefined mapping + */ + public static void createHiddenIndexByRestClient( + RestClient client, String indexName, String mapping) { + Request request = new Request("PUT", "/" + indexName); + JSONObject jsonObject = isNullOrEmpty(mapping) ? new JSONObject() : new JSONObject(mapping); + jsonObject.put("settings", new JSONObject("{\"index\":{\"hidden\":true}}")); + request.setJsonEntity(jsonObject.toString()); + + performRequest(client, request); + } + + /** + * Check if index already exists by OpenSearch index exists API which returns: 200 - specified + * indices or aliases exist 404 - one or more indices specified or aliases do not exist + * + * @param client client connection + * @param indexName index name + * @return true for index exist + */ + public static boolean isIndexExist(RestClient client, String indexName) { + try { + Response response = client.performRequest(new Request("HEAD", "/" + indexName)); + return (response.getStatusLine().getStatusCode() == 200); + } catch (IOException e) { + throw new IllegalStateException("Failed to perform request", e); } - - /** - * Load test data set by REST client. - * @param client client connection - * @param indexName index name - * @param dataSetFilePath file path of test data set - * @throws IOException - */ - public static void loadDataByRestClient(RestClient client, String indexName, String dataSetFilePath) throws IOException { - Path path = Paths.get(getResourceFilePath(dataSetFilePath)); - Request request = new Request("POST", "/" + indexName + "/_bulk?refresh=true"); - request.setJsonEntity(new String(Files.readAllBytes(path))); - performRequest(client, request); + } + + /** + * Load test data set by REST client. + * + * @param client client connection + * @param indexName index name + * @param dataSetFilePath file path of test data set + * @throws IOException + */ + public static void loadDataByRestClient( + RestClient client, String indexName, String dataSetFilePath) throws IOException { + Path path = Paths.get(getResourceFilePath(dataSetFilePath)); + Request request = new Request("POST", "/" + indexName + "/_bulk?refresh=true"); + request.setJsonEntity(new String(Files.readAllBytes(path))); + performRequest(client, request); + } + + /** + * Perform a request by REST client. + * + * @param client client connection + * @param request request object + */ + public static Response performRequest(RestClient client, Request request) { + try { + Response response = client.performRequest(request); + int status = response.getStatusLine().getStatusCode(); + if (status >= 400) { + throw new IllegalStateException("Failed to perform request. Error code: " + status); + } + return response; + } catch (IOException e) { + throw new IllegalStateException("Failed to perform request", e); } - - /** - * Perform a request by REST client. - * @param client client connection - * @param request request object - */ - public static Response performRequest(RestClient client, Request request) { - try { - Response response = client.performRequest(request); - int status = response.getStatusLine().getStatusCode(); - if (status >= 400) { - throw new IllegalStateException("Failed to perform request. Error code: " + status); - } - return response; - } catch (IOException e) { - throw new IllegalStateException("Failed to perform request", e); + } + + public static String getAccountIndexMapping() { + return "{ \"mappings\": {" + + " \"properties\": {\n" + + " \"gender\": {\n" + + " \"type\": \"text\",\n" + + " \"fielddata\": true\n" + + " }," + + " \"address\": {\n" + + " \"type\": \"text\",\n" + + " \"fielddata\": true\n" + + " }," + + " \"firstname\": {\n" + + " \"type\": \"text\",\n" + + " \"fielddata\": true,\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }" + + " }" + + " }," + + " \"lastname\": {\n" + + " \"type\": \"text\",\n" + + " \"fielddata\": true,\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }" + + " }" + + " }," + + " \"state\": {\n" + + " \"type\": \"text\",\n" + + " \"fielddata\": true,\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }" + + " }" + + " }" + + " }" + + " }" + + "}"; + } + + public static String getPhraseIndexMapping() { + return "{ \"mappings\": {" + + " \"properties\": {\n" + + " \"phrase\": {\n" + + " \"type\": \"text\",\n" + + " \"store\": true\n" + + " }" + + " }" + + " }" + + "}"; + } + + public static String getDogIndexMapping() { + return "{ \"mappings\": {" + + " \"properties\": {\n" + + " \"dog_name\": {\n" + + " \"type\": \"text\",\n" + + " \"fielddata\": true\n" + + " }" + + " }" + + " }" + + "}"; + } + + public static String getDogs2IndexMapping() { + return "{ \"mappings\": {" + + " \"properties\": {\n" + + " \"dog_name\": {\n" + + " \"type\": \"text\",\n" + + " \"fielddata\": true\n" + + " },\n" + + " \"holdersName\": {\n" + + " \"type\": \"keyword\"\n" + + " }" + + " }" + + " }" + + "}"; + } + + public static String getDogs3IndexMapping() { + return "{ \"mappings\": {" + + " \"properties\": {\n" + + " \"holdersName\": {\n" + + " \"type\": \"keyword\"\n" + + " },\n" + + " \"color\": {\n" + + " \"type\": \"text\"\n" + + " }" + + " }" + + " }" + + "}"; + } + + public static String getPeople2IndexMapping() { + return "{ \"mappings\": {" + + " \"properties\": {\n" + + " \"firstname\": {\n" + + " \"type\": \"keyword\"\n" + + " }" + + " }" + + " }" + + "}"; + } + + public static String getGameOfThronesIndexMapping() { + return "{ \"mappings\": { " + + " \"properties\": {\n" + + " \"nickname\": {\n" + + " \"type\":\"text\", " + + " \"fielddata\":true" + + " },\n" + + " \"name\": {\n" + + " \"properties\": {\n" + + " \"firstname\": {\n" + + " \"type\": \"text\",\n" + + " \"fielddata\": true\n" + + " },\n" + + " \"lastname\": {\n" + + " \"type\": \"text\",\n" + + " \"fielddata\": true\n" + + " },\n" + + " \"ofHerName\": {\n" + + " \"type\": \"integer\"\n" + + " },\n" + + " \"ofHisName\": {\n" + + " \"type\": \"integer\"\n" + + " }\n" + + " }\n" + + " },\n" + + " \"house\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\"\n" + + " }\n" + + " }\n" + + " },\n" + + " \"gender\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\"\n" + + " }\n" + + " }\n" + + " }" + + "} } }"; + } + + // System + + public static String getOdbcIndexMapping() { + return "{\n" + + "\t\"mappings\" :{\n" + + "\t\t\"properties\":{\n" + + "\t\t\t\"odbc_time\":{\n" + + "\t\t\t\t\"type\":\"date\",\n" + + "\t\t\t\t\"format\": \"'{ts' ''yyyy-MM-dd HH:mm:ss.SSS'''}'\"\n" + + "\t\t\t},\n" + + "\t\t\t\"docCount\":{\n" + + "\t\t\t\t\"type\":\"text\"\n" + + "\t\t\t}\n" + + "\t\t}\n" + + "\t}\n" + + "}"; + } + + public static String getLocationIndexMapping() { + return "{\n" + + "\t\"mappings\" :{\n" + + "\t\t\"properties\":{\n" + + "\t\t\t\"place\":{\n" + + "\t\t\t\t\"type\":\"geo_shape\"\n" + + + // "\t\t\t\t\"tree\": \"quadtree\",\n" + // Field tree and precision are deprecated in + // OpenSearch + // "\t\t\t\t\"precision\": \"10km\"\n" + + "\t\t\t},\n" + + "\t\t\t\"center\":{\n" + + "\t\t\t\t\"type\":\"geo_point\"\n" + + "\t\t\t},\n" + + "\t\t\t\"description\":{\n" + + "\t\t\t\t\"type\":\"text\"\n" + + "\t\t\t}\n" + + "\t\t}\n" + + "\t}\n" + + "}"; + } + + public static String getEmployeeNestedTypeIndexMapping() { + return "{\n" + + " \"mappings\": {\n" + + " \"properties\": {\n" + + " \"comments\": {\n" + + " \"type\": \"nested\",\n" + + " \"properties\": {\n" + + " \"date\": {\n" + + " \"type\": \"date\"\n" + + " },\n" + + " \"likes\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"message\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " },\n" + + " \"id\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"name\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }\n" + + " }\n" + + " },\n" + + " \"projects\": {\n" + + " \"type\": \"nested\",\n" + + " \"properties\": {\n" + + " \"address\": {\n" + + " \"type\": \"nested\",\n" + + " \"properties\": {\n" + + " \"city\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }\n" + + " }\n" + + " },\n" + + " \"state\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " },\n" + + " \"name\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\"\n" + + " }\n" + + " },\n" + + " \"fielddata\": true\n" + + " },\n" + + " \"started_year\": {\n" + + " \"type\": \"long\"\n" + + " }\n" + + " }\n" + + " },\n" + + " \"title\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + "}\n"; + } + + public static String getNestedTypeIndexMapping() { + return "{ \"mappings\": {\n" + + " \"properties\": {\n" + + " \"message\": {\n" + + " \"type\": \"nested\",\n" + + " \"properties\": {\n" + + " \"info\": {\n" + + " \"type\": \"keyword\",\n" + + " \"index\": \"true\"\n" + + " },\n" + + " \"author\": {\n" + + " \"type\": \"keyword\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\" : 256\n" + + " }\n" + + " },\n" + + " \"index\": \"true\"\n" + + " },\n" + + " \"dayOfWeek\": {\n" + + " \"type\": \"long\"\n" + + " }\n" + + " }\n" + + " },\n" + + " \"comment\": {\n" + + " \"type\": \"nested\",\n" + + " \"properties\": {\n" + + " \"data\": {\n" + + " \"type\": \"keyword\",\n" + + " \"index\": \"true\"\n" + + " },\n" + + " \"likes\": {\n" + + " \"type\": \"long\"\n" + + " }\n" + + " }\n" + + " },\n" + + " \"myNum\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"someField\": {\n" + + " \"type\": \"keyword\",\n" + + " \"index\": \"true\"\n" + + " }\n" + + " }\n" + + " }\n" + + " }}"; + } + + public static String getJoinTypeIndexMapping() { + return "{\n" + + " \"mappings\": {\n" + + " \"properties\": {\n" + + " \"join_field\": {\n" + + " \"type\": \"join\",\n" + + " \"relations\": {\n" + + " \"parentType\": \"childrenType\"\n" + + " }\n" + + " },\n" + + " \"parentTile\": {\n" + + " \"index\": \"true\",\n" + + " \"type\": \"keyword\"\n" + + " },\n" + + " \"dayOfWeek\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"author\": {\n" + + " \"index\": \"true\",\n" + + " \"type\": \"keyword\"\n" + + " },\n" + + " \"info\": {\n" + + " \"index\": \"true\",\n" + + " \"type\": \"keyword\"\n" + + " }\n" + + " }\n" + + " }\n" + + "}"; + } + + public static String getBankIndexMapping() { + return "{\n" + + " \"mappings\": {\n" + + " \"properties\": {\n" + + " \"account_number\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"address\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"age\": {\n" + + " \"type\": \"integer\"\n" + + " },\n" + + " \"balance\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"birthdate\": {\n" + + " \"type\": \"date\"\n" + + " },\n" + + " \"city\": {\n" + + " \"type\": \"keyword\"\n" + + " },\n" + + " \"email\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"employer\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"firstname\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"gender\": {\n" + + " \"type\": \"text\",\n" + + " \"fielddata\": true\n" + + " }," + + " \"lastname\": {\n" + + " \"type\": \"keyword\"\n" + + " },\n" + + " \"male\": {\n" + + " \"type\": \"boolean\"\n" + + " },\n" + + " \"state\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + "}"; + } + + public static String getBankWithNullValuesIndexMapping() { + return "{\n" + + " \"mappings\": {\n" + + " \"properties\": {\n" + + " \"account_number\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"address\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"age\": {\n" + + " \"type\": \"integer\"\n" + + " },\n" + + " \"balance\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"gender\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"firstname\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"lastname\": {\n" + + " \"type\": \"keyword\"\n" + + " }\n" + + " }\n" + + " }\n" + + "}"; + } + + public static String getOrderIndexMapping() { + return "{\n" + + " \"mappings\": {\n" + + " \"properties\": {\n" + + " \"id\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"name\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + "}"; + } + + public static String getWeblogsIndexMapping() { + return "{\n" + + " \"mappings\": {\n" + + " \"properties\": {\n" + + " \"host\": {\n" + + " \"type\": \"ip\"\n" + + " },\n" + + " \"method\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"url\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"response\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"bytes\": {\n" + + " \"type\": \"text\"\n" + + " }\n" + + " }\n" + + " }\n" + + "}"; + } + + public static String getDateIndexMapping() { + return "{ \"mappings\": {" + + " \"properties\": {\n" + + " \"date_keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }" + + " }" + + " }" + + "}"; + } + + public static String getDateTimeIndexMapping() { + return "{" + + " \"mappings\": {" + + " \"properties\": {" + + " \"birthday\": {" + + " \"type\": \"date\"" + + " }" + + " }" + + " }" + + "}"; + } + + public static String getNestedSimpleIndexMapping() { + return "{" + + " \"mappings\": {" + + " \"properties\": {" + + " \"address\": {" + + " \"type\": \"nested\"," + + " \"properties\": {" + + " \"city\": {" + + " \"type\": \"text\"," + + " \"fields\": {" + + " \"keyword\": {" + + " \"type\": \"keyword\"," + + " \"ignore_above\": 256" + + " }" + + " }" + + " }," + + " \"state\": {" + + " \"type\": \"text\"," + + " \"fields\": {" + + " \"keyword\": {" + + " \"type\": \"keyword\"," + + " \"ignore_above\": 256" + + " }" + + " }" + + " }" + + " }" + + " }," + + " \"age\": {" + + " \"type\": \"long\"" + + " }," + + " \"id\": {" + + " \"type\": \"long\"" + + " }," + + " \"name\": {" + + " \"type\": \"text\"," + + " \"fields\": {" + + " \"keyword\": {" + + " \"type\": \"keyword\"," + + " \"ignore_above\": 256" + + " }" + + " }" + + " }" + + " }" + + " }" + + "}"; + } + + public static void loadBulk(Client client, String jsonPath, String defaultIndex) + throws Exception { + System.out.println(String.format("Loading file %s into OpenSearch cluster", jsonPath)); + String absJsonPath = getResourceFilePath(jsonPath); + + BulkRequest bulkRequest = new BulkRequest(); + try (final InputStream stream = new FileInputStream(absJsonPath); + final Reader streamReader = new InputStreamReader(stream, StandardCharsets.UTF_8); + final BufferedReader br = new BufferedReader(streamReader)) { + + while (true) { + + String actionLine = br.readLine(); + if (actionLine == null || actionLine.trim().isEmpty()) { + break; } + String sourceLine = br.readLine(); + JSONObject actionJson = new JSONObject(actionLine); + + IndexRequest indexRequest = new IndexRequest(); + indexRequest.index(defaultIndex); + if (actionJson.getJSONObject("index").has("_id")) { + String docId = actionJson.getJSONObject("index").getString("_id"); + indexRequest.id(docId); + } + if (actionJson.getJSONObject("index").has("_routing")) { + String routing = actionJson.getJSONObject("index").getString("_routing"); + indexRequest.routing(routing); + } + indexRequest.source(sourceLine, XContentType.JSON); + bulkRequest.add(indexRequest); + } } - public static String getAccountIndexMapping() { - return "{ \"mappings\": {" + - " \"properties\": {\n" + - " \"gender\": {\n" + - " \"type\": \"text\",\n" + - " \"fielddata\": true\n" + - " }," + - " \"address\": {\n" + - " \"type\": \"text\",\n" + - " \"fielddata\": true\n" + - " }," + - " \"firstname\": {\n" + - " \"type\": \"text\",\n" + - " \"fielddata\": true,\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }" + - " }" + - " }," + - " \"lastname\": {\n" + - " \"type\": \"text\",\n" + - " \"fielddata\": true,\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }" + - " }" + - " }," + - " \"state\": {\n" + - " \"type\": \"text\",\n" + - " \"fielddata\": true,\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }" + - " }" + - " }" + - " }"+ - " }" + - "}"; - } - - public static String getPhraseIndexMapping() { - return "{ \"mappings\": {" + - " \"properties\": {\n" + - " \"phrase\": {\n" + - " \"type\": \"text\",\n" + - " \"store\": true\n" + - " }" + - " }"+ - " }" + - "}"; - } - - public static String getDogIndexMapping() { - return "{ \"mappings\": {" + - " \"properties\": {\n" + - " \"dog_name\": {\n" + - " \"type\": \"text\",\n" + - " \"fielddata\": true\n" + - " }"+ - " }"+ - " }" + - "}"; - } - - public static String getDogs2IndexMapping() { - return "{ \"mappings\": {" + - " \"properties\": {\n" + - " \"dog_name\": {\n" + - " \"type\": \"text\",\n" + - " \"fielddata\": true\n" + - " },\n"+ - " \"holdersName\": {\n" + - " \"type\": \"keyword\"\n" + - " }"+ - " }"+ - " }" + - "}"; - } - - public static String getDogs3IndexMapping() { - return "{ \"mappings\": {" + - " \"properties\": {\n" + - " \"holdersName\": {\n" + - " \"type\": \"keyword\"\n" + - " },\n"+ - " \"color\": {\n" + - " \"type\": \"text\"\n" + - " }"+ - " }"+ - " }" + - "}"; - } - - public static String getPeople2IndexMapping() { - return "{ \"mappings\": {" + - " \"properties\": {\n" + - " \"firstname\": {\n" + - " \"type\": \"keyword\"\n" + - " }"+ - " }"+ - " }" + - "}"; - } + BulkResponse bulkResponse = client.bulk(bulkRequest).actionGet(); - public static String getGameOfThronesIndexMapping() { - return "{ \"mappings\": { " + - " \"properties\": {\n" + - " \"nickname\": {\n" + - " \"type\":\"text\", "+ - " \"fielddata\":true"+ - " },\n"+ - " \"name\": {\n" + - " \"properties\": {\n" + - " \"firstname\": {\n" + - " \"type\": \"text\",\n" + - " \"fielddata\": true\n" + - " },\n" + - " \"lastname\": {\n" + - " \"type\": \"text\",\n" + - " \"fielddata\": true\n" + - " },\n" + - " \"ofHerName\": {\n" + - " \"type\": \"integer\"\n" + - " },\n" + - " \"ofHisName\": {\n" + - " \"type\": \"integer\"\n" + - " }\n" + - " }\n" + - " },\n" + - " \"house\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\"\n" + - " }\n" + - " }\n" + - " },\n" + - " \"gender\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\"\n" + - " }\n" + - " }\n" + - " }" + - "} } }"; + if (bulkResponse.hasFailures()) { + throw new Exception( + "Failed to load test data into index " + + defaultIndex + + ", " + + bulkResponse.buildFailureMessage()); } - - // System - - public static String getOdbcIndexMapping() { - return "{\n" + - "\t\"mappings\" :{\n" + - "\t\t\"properties\":{\n" + - "\t\t\t\"odbc_time\":{\n" + - "\t\t\t\t\"type\":\"date\",\n" + - "\t\t\t\t\"format\": \"'{ts' ''yyyy-MM-dd HH:mm:ss.SSS'''}'\"\n" + - "\t\t\t},\n" + - "\t\t\t\"docCount\":{\n" + - "\t\t\t\t\"type\":\"text\"\n" + - "\t\t\t}\n" + - "\t\t}\n" + - "\t}\n" + - "}"; + System.out.println(bulkResponse.getItems().length + " documents loaded."); + // ensure the documents are searchable + client.admin().indices().prepareRefresh(defaultIndex).execute().actionGet(); + } + + public static String getResourceFilePath(String relPath) { + String projectRoot = System.getProperty("project.root", null); + if (projectRoot == null) { + return new File(relPath).getAbsolutePath(); + } else { + return new File(projectRoot + "/" + relPath).getAbsolutePath(); } + } - public static String getLocationIndexMapping() { - return "{\n" + - "\t\"mappings\" :{\n" + - "\t\t\"properties\":{\n" + - "\t\t\t\"place\":{\n" + - "\t\t\t\t\"type\":\"geo_shape\"\n" + - //"\t\t\t\t\"tree\": \"quadtree\",\n" + // Field tree and precision are deprecated in OpenSearch - //"\t\t\t\t\"precision\": \"10km\"\n" + - "\t\t\t},\n" + - "\t\t\t\"center\":{\n" + - "\t\t\t\t\"type\":\"geo_point\"\n" + - "\t\t\t},\n" + - "\t\t\t\"description\":{\n" + - "\t\t\t\t\"type\":\"text\"\n" + - "\t\t\t}\n" + - "\t\t}\n" + - "\t}\n" + - "}"; - } + public static String getResponseBody(Response response) throws IOException { - public static String getEmployeeNestedTypeIndexMapping() { - return "{\n" + - " \"mappings\": {\n" + - " \"properties\": {\n" + - " \"comments\": {\n" + - " \"type\": \"nested\",\n" + - " \"properties\": {\n" + - " \"date\": {\n" + - " \"type\": \"date\"\n" + - " },\n" + - " \"likes\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"message\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - " },\n" + - " \"id\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"name\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }\n" + - " }\n" + - " },\n" + - " \"projects\": {\n" + - " \"type\": \"nested\",\n" + - " \"properties\": {\n" + - " \"address\": {\n" + - " \"type\": \"nested\",\n" + - " \"properties\": {\n" + - " \"city\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }\n" + - " }\n" + - " },\n" + - " \"state\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - " },\n" + - " \"name\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\"\n" + - " }\n" + - " },\n" + - " \"fielddata\": true\n" + - " },\n" + - " \"started_year\": {\n" + - " \"type\": \"long\"\n" + - " }\n" + - " }\n" + - " },\n" + - " \"title\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - "}\n"; - } + return getResponseBody(response, false); + } + public static String getResponseBody(Response response, boolean retainNewLines) + throws IOException { + final StringBuilder sb = new StringBuilder(); - public static String getNestedTypeIndexMapping() { - return "{ \"mappings\": {\n" + - " \"properties\": {\n" + - " \"message\": {\n" + - " \"type\": \"nested\",\n" + - " \"properties\": {\n" + - " \"info\": {\n" + - " \"type\": \"keyword\",\n" + - " \"index\": \"true\"\n" + - " },\n" + - " \"author\": {\n" + - " \"type\": \"keyword\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\" : 256\n" + - " }\n" + - " },\n" + - " \"index\": \"true\"\n" + - " },\n" + - " \"dayOfWeek\": {\n" + - " \"type\": \"long\"\n" + - " }\n" + - " }\n" + - " },\n" + - " \"comment\": {\n" + - " \"type\": \"nested\",\n" + - " \"properties\": {\n" + - " \"data\": {\n" + - " \"type\": \"keyword\",\n" + - " \"index\": \"true\"\n" + - " },\n" + - " \"likes\": {\n" + - " \"type\": \"long\"\n" + - " }\n" + - " }\n" + - " },\n" + - " \"myNum\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"someField\": {\n" + - " \"type\": \"keyword\",\n" + - " \"index\": \"true\"\n" + - " }\n" + - " }\n" + - " }\n" + - " }}"; - } - - public static String getJoinTypeIndexMapping() { - return "{\n" + - " \"mappings\": {\n" + - " \"properties\": {\n" + - " \"join_field\": {\n" + - " \"type\": \"join\",\n" + - " \"relations\": {\n" + - " \"parentType\": \"childrenType\"\n" + - " }\n" + - " },\n" + - " \"parentTile\": {\n" + - " \"index\": \"true\",\n" + - " \"type\": \"keyword\"\n" + - " },\n" + - " \"dayOfWeek\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"author\": {\n" + - " \"index\": \"true\",\n" + - " \"type\": \"keyword\"\n" + - " },\n" + - " \"info\": {\n" + - " \"index\": \"true\",\n" + - " \"type\": \"keyword\"\n" + - " }\n" + - " }\n" + - " }\n" + - "}"; - } - - public static String getBankIndexMapping() { - return "{\n" + - " \"mappings\": {\n" + - " \"properties\": {\n" + - " \"account_number\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"address\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"age\": {\n" + - " \"type\": \"integer\"\n" + - " },\n" + - " \"balance\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"birthdate\": {\n" + - " \"type\": \"date\"\n" + - " },\n" + - " \"city\": {\n" + - " \"type\": \"keyword\"\n" + - " },\n" + - " \"email\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"employer\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"firstname\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"gender\": {\n" + - " \"type\": \"text\",\n" + - " \"fielddata\": true\n" + - " }," + - " \"lastname\": {\n" + - " \"type\": \"keyword\"\n" + - " },\n" + - " \"male\": {\n" + - " \"type\": \"boolean\"\n" + - " },\n" + - " \"state\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - "}"; - } - - public static String getBankWithNullValuesIndexMapping() { - return "{\n" + - " \"mappings\": {\n" + - " \"properties\": {\n" + - " \"account_number\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"address\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"age\": {\n" + - " \"type\": \"integer\"\n" + - " },\n" + - " \"balance\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"gender\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"firstname\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"lastname\": {\n" + - " \"type\": \"keyword\"\n" + - " }\n" + - " }\n" + - " }\n" + - "}"; - } + try (final InputStream is = response.getEntity().getContent(); + final BufferedReader br = + new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8))) { - public static String getOrderIndexMapping() { - return "{\n" + - " \"mappings\": {\n" + - " \"properties\": {\n" + - " \"id\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"name\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - "}"; + String line; + while ((line = br.readLine()) != null) { + sb.append(line); + if (retainNewLines) { + sb.append(String.format(Locale.ROOT, "%n")); + } + } } + return sb.toString(); + } - public static String getWeblogsIndexMapping() { - return "{\n" + - " \"mappings\": {\n" + - " \"properties\": {\n" + - " \"host\": {\n" + - " \"type\": \"ip\"\n" + - " },\n" + - " \"method\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"url\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"response\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"bytes\": {\n" + - " \"type\": \"text\"\n" + - " }\n" + - " }\n" + - " }\n" + - "}"; - } + public static String fileToString( + final String filePathFromProjectRoot, final boolean removeNewLines) throws IOException { - public static String getDateIndexMapping() { - return "{ \"mappings\": {" + - " \"properties\": {\n" + - " \"date_keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }"+ - " }"+ - " }" + - "}"; - } + final String absolutePath = getResourceFilePath(filePathFromProjectRoot); - public static String getDateTimeIndexMapping() { - return "{" + - " \"mappings\": {" + - " \"properties\": {" + - " \"birthday\": {" + - " \"type\": \"date\"" + - " }" + - " }" + - " }" + - "}"; - } + try (final InputStream stream = new FileInputStream(absolutePath); + final Reader streamReader = new InputStreamReader(stream, StandardCharsets.UTF_8); + final BufferedReader br = new BufferedReader(streamReader)) { - public static String getNestedSimpleIndexMapping() { - return "{" + - " \"mappings\": {" + - " \"properties\": {" + - " \"address\": {" + - " \"type\": \"nested\"," + - " \"properties\": {" + - " \"city\": {" + - " \"type\": \"text\"," + - " \"fields\": {" + - " \"keyword\": {" + - " \"type\": \"keyword\"," + - " \"ignore_above\": 256" + - " }" + - " }" + - " }," + - " \"state\": {" + - " \"type\": \"text\"," + - " \"fields\": {" + - " \"keyword\": {" + - " \"type\": \"keyword\"," + - " \"ignore_above\": 256" + - " }" + - " }" + - " }" + - " }" + - " }," + - " \"age\": {" + - " \"type\": \"long\"" + - " }," + - " \"id\": {" + - " \"type\": \"long\"" + - " }," + - " \"name\": {" + - " \"type\": \"text\"," + - " \"fields\": {" + - " \"keyword\": {" + - " \"type\": \"keyword\"," + - " \"ignore_above\": 256" + - " }" + - " }" + - " }" + - " }" + - " }" + - "}"; - } - public static void loadBulk(Client client, String jsonPath, String defaultIndex) throws Exception { - System.out.println(String.format("Loading file %s into OpenSearch cluster", jsonPath)); - String absJsonPath = getResourceFilePath(jsonPath); - - BulkRequest bulkRequest = new BulkRequest(); - try (final InputStream stream = new FileInputStream(absJsonPath); - final Reader streamReader = new InputStreamReader(stream, StandardCharsets.UTF_8); - final BufferedReader br = new BufferedReader(streamReader)) { - - while (true) { - - String actionLine = br.readLine(); - if (actionLine == null || actionLine.trim().isEmpty()) { - break; - } - String sourceLine = br.readLine(); - JSONObject actionJson = new JSONObject(actionLine); - - IndexRequest indexRequest = new IndexRequest(); - indexRequest.index(defaultIndex); - if (actionJson.getJSONObject("index").has("_id")) { - String docId = actionJson.getJSONObject("index").getString("_id"); - indexRequest.id(docId); - } - if (actionJson.getJSONObject("index").has("_routing")) { - String routing = actionJson.getJSONObject("index").getString("_routing"); - indexRequest.routing(routing); - } - indexRequest.source(sourceLine, XContentType.JSON); - bulkRequest.add(indexRequest); - } - } + final StringBuilder stringBuilder = new StringBuilder(); + String line = br.readLine(); - BulkResponse bulkResponse = client.bulk(bulkRequest).actionGet(); + while (line != null) { - if (bulkResponse.hasFailures()) { - throw new Exception("Failed to load test data into index " + defaultIndex + ", " + - bulkResponse.buildFailureMessage()); + stringBuilder.append(line); + if (!removeNewLines) { + stringBuilder.append(String.format(Locale.ROOT, "%n")); } - System.out.println(bulkResponse.getItems().length + " documents loaded."); - // ensure the documents are searchable - client.admin().indices().prepareRefresh(defaultIndex).execute().actionGet(); - } + line = br.readLine(); + } - public static String getResourceFilePath(String relPath) { - String projectRoot = System.getProperty("project.root", null); - if (projectRoot == null) { - return new File(relPath).getAbsolutePath(); - } else { - return new File(projectRoot + "/" + relPath).getAbsolutePath(); - } + return stringBuilder.toString(); } - - public static String getResponseBody(Response response) throws IOException { - - return getResponseBody(response, false); + } + + /** + * Builds all permutations of the given list of Strings + * + * @param items list of strings to permute + * @return list of permutations + */ + public static List> getPermutations(final List items) { + + if (items.size() > 5) { + throw new IllegalArgumentException("Inefficient test, please refactor"); } - public static String getResponseBody(Response response, boolean retainNewLines) throws IOException { - final StringBuilder sb = new StringBuilder(); + final List> result = new LinkedList<>(); - try (final InputStream is = response.getEntity().getContent(); - final BufferedReader br = new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8))) { + if (items.isEmpty() || 1 == items.size()) { - String line; - while ((line = br.readLine()) != null) { - sb.append(line); - if (retainNewLines) { - sb.append(String.format(Locale.ROOT, "%n")); - } - } - } - return sb.toString(); + final List onlyElement = new ArrayList<>(); + if (1 == items.size()) { + onlyElement.add(items.get(0)); + } + result.add(onlyElement); + return result; } - public static String fileToString(final String filePathFromProjectRoot, final boolean removeNewLines) - throws IOException { - - final String absolutePath = getResourceFilePath(filePathFromProjectRoot); - - try (final InputStream stream = new FileInputStream(absolutePath); - final Reader streamReader = new InputStreamReader(stream, StandardCharsets.UTF_8); - final BufferedReader br = new BufferedReader(streamReader)) { - - final StringBuilder stringBuilder = new StringBuilder(); - String line = br.readLine(); - - while (line != null) { - - stringBuilder.append(line); - if (!removeNewLines) { - stringBuilder.append(String.format(Locale.ROOT, "%n")); - } - line = br.readLine(); - } - - return stringBuilder.toString(); - } + for (int i = 0; i < items.size(); ++i) { + + final List smallerSet = new ArrayList<>(); + + if (i != 0) { + smallerSet.addAll(items.subList(0, i)); + } + if (i != items.size() - 1) { + smallerSet.addAll(items.subList(i + 1, items.size())); + } + + final String currentItem = items.get(i); + result.addAll( + getPermutations(smallerSet).stream() + .map( + smallerSetPermutation -> { + final List permutation = new ArrayList<>(); + permutation.add(currentItem); + permutation.addAll(smallerSetPermutation); + return permutation; + }) + .collect(Collectors.toCollection(LinkedList::new))); } - /** - * Builds all permutations of the given list of Strings - * @param items - list of strings to permute - * @return list of permutations - */ - public static List> getPermutations(final List items) { - - if (items.size() > 5) { - throw new IllegalArgumentException("Inefficient test, please refactor"); - } - - final List> result = new LinkedList<>(); - - if (items.isEmpty() || 1 == items.size()) { - - final List onlyElement = new ArrayList<>(); - if (1 == items.size()) { - onlyElement.add(items.get(0)); - } - result.add(onlyElement); - return result; - } - - for (int i = 0; i < items.size(); ++i) { - - final List smallerSet = new ArrayList<>(); - - if (i != 0) { - smallerSet.addAll(items.subList(0, i)); - } - if (i != items.size() - 1) { - smallerSet.addAll(items.subList(i + 1, items.size())); - } - - final String currentItem = items.get(i); - result.addAll(getPermutations(smallerSet).stream().map(smallerSetPermutation -> { - final List permutation = new ArrayList<>(); - permutation.add(currentItem); - permutation.addAll(smallerSetPermutation); - return permutation; - }).collect(Collectors.toCollection(LinkedList::new))); - } - - return result; - } + return result; + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/util/TestsConstants.java b/legacy/src/test/java/org/opensearch/sql/legacy/util/TestsConstants.java index a6b2c84d55..f436cedaaa 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/util/TestsConstants.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/util/TestsConstants.java @@ -3,48 +3,46 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.util; -/** - * Created by omershelef on 18/12/14. - */ +/** Created by omershelef on 18/12/14. */ public class TestsConstants { - public final static String PERSISTENT = "persistent"; - public final static String TRANSIENT = "transient"; - - public final static String TEST_INDEX = "opensearch-sql_test_index"; - - public final static String TEST_INDEX_ONLINE = TEST_INDEX + "_online"; - public final static String TEST_INDEX_ACCOUNT = TEST_INDEX + "_account"; - public final static String TEST_INDEX_PHRASE = TEST_INDEX + "_phrase"; - public final static String TEST_INDEX_DOG = TEST_INDEX + "_dog"; - public final static String TEST_INDEX_DOG2 = TEST_INDEX + "_dog2"; - public final static String TEST_INDEX_DOG3 = TEST_INDEX + "_dog3"; - public final static String TEST_INDEX_DOGSUBQUERY = TEST_INDEX + "_subquery"; - public final static String TEST_INDEX_PEOPLE = TEST_INDEX + "_people"; - public final static String TEST_INDEX_PEOPLE2 = TEST_INDEX + "_people2"; - public final static String TEST_INDEX_GAME_OF_THRONES = TEST_INDEX + "_game_of_thrones"; - public final static String TEST_INDEX_SYSTEM = TEST_INDEX + "_system"; - public final static String TEST_INDEX_ODBC = TEST_INDEX + "_odbc"; - public final static String TEST_INDEX_LOCATION = TEST_INDEX + "_location"; - public final static String TEST_INDEX_LOCATION2 = TEST_INDEX + "_location2"; - public final static String TEST_INDEX_NESTED_TYPE = TEST_INDEX + "_nested_type"; - public final static String TEST_INDEX_NESTED_SIMPLE = TEST_INDEX + "_nested_simple"; - public final static String TEST_INDEX_NESTED_WITH_QUOTES = TEST_INDEX + "_nested_type_with_quotes"; - public final static String TEST_INDEX_EMPLOYEE_NESTED = TEST_INDEX + "_employee_nested"; - public final static String TEST_INDEX_JOIN_TYPE = TEST_INDEX + "_join_type"; - public final static String TEST_INDEX_BANK = TEST_INDEX + "_bank"; - public final static String TEST_INDEX_BANK_TWO = TEST_INDEX_BANK + "_two"; - public final static String TEST_INDEX_BANK_WITH_NULL_VALUES = TEST_INDEX_BANK + "_with_null_values"; - public final static String TEST_INDEX_ORDER = TEST_INDEX + "_order"; - public final static String TEST_INDEX_WEBLOG = TEST_INDEX + "_weblog"; - public final static String TEST_INDEX_DATE = TEST_INDEX + "_date"; - public final static String TEST_INDEX_DATE_TIME = TEST_INDEX + "_datetime"; - - - public final static String DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"; - public final static String TS_DATE_FORMAT = "yyyy-MM-dd HH:mm:ss.SSS"; - public final static String SIMPLE_DATE_FORMAT = "yyyy-MM-dd"; + public static final String PERSISTENT = "persistent"; + public static final String TRANSIENT = "transient"; + + public static final String TEST_INDEX = "opensearch-sql_test_index"; + + public static final String TEST_INDEX_ONLINE = TEST_INDEX + "_online"; + public static final String TEST_INDEX_ACCOUNT = TEST_INDEX + "_account"; + public static final String TEST_INDEX_PHRASE = TEST_INDEX + "_phrase"; + public static final String TEST_INDEX_DOG = TEST_INDEX + "_dog"; + public static final String TEST_INDEX_DOG2 = TEST_INDEX + "_dog2"; + public static final String TEST_INDEX_DOG3 = TEST_INDEX + "_dog3"; + public static final String TEST_INDEX_DOGSUBQUERY = TEST_INDEX + "_subquery"; + public static final String TEST_INDEX_PEOPLE = TEST_INDEX + "_people"; + public static final String TEST_INDEX_PEOPLE2 = TEST_INDEX + "_people2"; + public static final String TEST_INDEX_GAME_OF_THRONES = TEST_INDEX + "_game_of_thrones"; + public static final String TEST_INDEX_SYSTEM = TEST_INDEX + "_system"; + public static final String TEST_INDEX_ODBC = TEST_INDEX + "_odbc"; + public static final String TEST_INDEX_LOCATION = TEST_INDEX + "_location"; + public static final String TEST_INDEX_LOCATION2 = TEST_INDEX + "_location2"; + public static final String TEST_INDEX_NESTED_TYPE = TEST_INDEX + "_nested_type"; + public static final String TEST_INDEX_NESTED_SIMPLE = TEST_INDEX + "_nested_simple"; + public static final String TEST_INDEX_NESTED_WITH_QUOTES = + TEST_INDEX + "_nested_type_with_quotes"; + public static final String TEST_INDEX_EMPLOYEE_NESTED = TEST_INDEX + "_employee_nested"; + public static final String TEST_INDEX_JOIN_TYPE = TEST_INDEX + "_join_type"; + public static final String TEST_INDEX_BANK = TEST_INDEX + "_bank"; + public static final String TEST_INDEX_BANK_TWO = TEST_INDEX_BANK + "_two"; + public static final String TEST_INDEX_BANK_WITH_NULL_VALUES = + TEST_INDEX_BANK + "_with_null_values"; + public static final String TEST_INDEX_ORDER = TEST_INDEX + "_order"; + public static final String TEST_INDEX_WEBLOG = TEST_INDEX + "_weblog"; + public static final String TEST_INDEX_DATE = TEST_INDEX + "_date"; + public static final String TEST_INDEX_DATE_TIME = TEST_INDEX + "_datetime"; + + public static final String DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"; + public static final String TS_DATE_FORMAT = "yyyy-MM-dd HH:mm:ss.SSS"; + public static final String SIMPLE_DATE_FORMAT = "yyyy-MM-dd"; } diff --git a/opensearch/build.gradle b/opensearch/build.gradle index a2ab670403..34b5c3f452 100644 --- a/opensearch/build.gradle +++ b/opensearch/build.gradle @@ -27,6 +27,7 @@ plugins { id "io.freefair.lombok" id 'jacoco' id 'info.solidsoft.pitest' version '1.9.0' + id 'com.diffplug.spotless' version '6.19.0' } dependencies { @@ -48,8 +49,23 @@ dependencies { testImplementation group: 'org.opensearch.test', name: 'framework', version: "${opensearch_version}" } -checkstyleTest.ignoreFailures = true -checkstyleMain.ignoreFailures = true +spotless { + java { + target fileTree('.') { + include '**/*.java' + exclude '**/build/**', '**/build-*/**' + } + importOrder() +// licenseHeader("/*\n" + +// " * Copyright OpenSearch Contributors\n" + +// " * SPDX-License-Identifier: Apache-2.0\n" + +// " */\n\n") + removeUnusedImports() + trimTrailingWhitespace() + endWithNewline() + googleJavaFormat('1.17.0').reflowLongStrings().groupArtifact('com.google.googlejavaformat:google-java-format') + } +} pitest { targetClasses = ['org.opensearch.sql.*'] diff --git a/plugin/build.gradle b/plugin/build.gradle index 8ec6844bfd..7291f78ba4 100644 --- a/plugin/build.gradle +++ b/plugin/build.gradle @@ -27,6 +27,7 @@ plugins { id "io.freefair.lombok" id 'jacoco' id 'opensearch.opensearchplugin' + id 'com.diffplug.spotless' version '6.19.0' } apply plugin: 'opensearch.pluginzip' @@ -85,9 +86,6 @@ publishing { } } -checkstyleTest.ignoreFailures = true -checkstyleMain.ignoreFailures = true - javadoc.enabled = false loggerUsageCheck.enabled = false dependencyLicenses.enabled = false @@ -119,6 +117,24 @@ compileTestJava { options.compilerArgs.addAll(["-processor", 'lombok.launch.AnnotationProcessorHider$AnnotationProcessor']) } +spotless { + java { + target fileTree('.') { + include '**/*.java' + exclude '**/build/**', '**/build-*/**' + } + importOrder() +// licenseHeader("/*\n" + +// " * Copyright OpenSearch Contributors\n" + +// " * SPDX-License-Identifier: Apache-2.0\n" + +// " */\n\n") + removeUnusedImports() + trimTrailingWhitespace() + endWithNewline() + googleJavaFormat('1.17.0').reflowLongStrings().groupArtifact('com.google.googlejavaformat:google-java-format') + } +} + dependencies { api "com.fasterxml.jackson.core:jackson-core:${versions.jackson}" api "com.fasterxml.jackson.core:jackson-databind:${versions.jackson_databind}" diff --git a/ppl/build.gradle b/ppl/build.gradle index e16b6decfc..a798b3f4b0 100644 --- a/ppl/build.gradle +++ b/ppl/build.gradle @@ -27,13 +27,9 @@ plugins { id "io.freefair.lombok" id 'jacoco' id 'antlr' + id 'com.diffplug.spotless' version '6.19.0' } -// Being ignored as a temporary measure before being removed in favour of -// spotless https://github.com/opensearch-project/sql/issues/1101 -checkstyleTest.ignoreFailures = true -checkstyleMain.ignoreFailures = true - generateGrammarSource { arguments += ['-visitor', '-package', 'org.opensearch.sql.ppl.antlr.parser'] source = sourceSets.main.antlr @@ -65,6 +61,24 @@ dependencies { testImplementation(testFixtures(project(":core"))) } +spotless { + java { + target fileTree('.') { + include '**/*.java' + exclude '**/build/**', '**/build-*/**' + } + importOrder() +// licenseHeader("/*\n" + +// " * Copyright OpenSearch Contributors\n" + +// " * SPDX-License-Identifier: Apache-2.0\n" + +// " */\n\n") + removeUnusedImports() + trimTrailingWhitespace() + endWithNewline() + googleJavaFormat('1.17.0').reflowLongStrings().groupArtifact('com.google.googlejavaformat:google-java-format') + } +} + test { testLogging { events "passed", "skipped", "failed" diff --git a/prometheus/build.gradle b/prometheus/build.gradle index 0d915a6d4a..e98dfd83e4 100644 --- a/prometheus/build.gradle +++ b/prometheus/build.gradle @@ -13,9 +13,6 @@ repositories { mavenCentral() } -checkstyleTest.ignoreFailures = true -checkstyleMain.ignoreFailures = true - dependencies { api project(':core') implementation project(':datasources') diff --git a/protocol/build.gradle b/protocol/build.gradle index dcec1c675b..212f746b1d 100644 --- a/protocol/build.gradle +++ b/protocol/build.gradle @@ -26,6 +26,7 @@ plugins { id 'java' id "io.freefair.lombok" id 'jacoco' + id 'com.diffplug.spotless' version '6.19.0' } dependencies { @@ -43,9 +44,6 @@ dependencies { testImplementation group: 'org.mockito', name: 'mockito-junit-jupiter', version: '3.12.4' } -checkstyleTest.ignoreFailures = true -checkstyleMain.ignoreFailures = true - configurations.all { resolutionStrategy.force "com.fasterxml.jackson.core:jackson-databind:${versions.jackson_databind}" } @@ -58,6 +56,8 @@ test { } } + + jacocoTestReport { reports { html.enabled true @@ -71,6 +71,24 @@ jacocoTestReport { } test.finalizedBy(project.tasks.jacocoTestReport) +spotless { + java { + target fileTree('.') { + include '**/*.java' + exclude '**/build/**', '**/build-*/**' + } + importOrder() +// licenseHeader("/*\n" + +// " * Copyright OpenSearch Contributors\n" + +// " * SPDX-License-Identifier: Apache-2.0\n" + +// " */\n\n") + removeUnusedImports() + trimTrailingWhitespace() + endWithNewline() + googleJavaFormat('1.17.0').reflowLongStrings().groupArtifact('com.google.googlejavaformat:google-java-format') + } +} + jacocoTestCoverageVerification { violationRules { rule { diff --git a/spark/build.gradle b/spark/build.gradle index 2608b88ced..89842e5ea8 100644 --- a/spark/build.gradle +++ b/spark/build.gradle @@ -13,9 +13,6 @@ repositories { mavenCentral() } -checkstyleTest.ignoreFailures = true -checkstyleMain.ignoreFailures = true - dependencies { api project(':core') implementation project(':datasources') diff --git a/sql/build.gradle b/sql/build.gradle index d85cc4ca74..2984158e57 100644 --- a/sql/build.gradle +++ b/sql/build.gradle @@ -27,6 +27,7 @@ plugins { id "io.freefair.lombok" id 'jacoco' id 'antlr' + id 'com.diffplug.spotless' version '6.19.0' } generateGrammarSource { @@ -58,10 +59,23 @@ dependencies { testImplementation(testFixtures(project(":core"))) } -// Being ignored as a temporary measure before being removed in favour of -// spotless https://github.com/opensearch-project/sql/issues/1101 -checkstyleTest.ignoreFailures = true -checkstyleMain.ignoreFailures = true +spotless { + java { + target fileTree('.') { + include '**/*.java' + exclude '**/build/**', '**/build-*/**' + } + importOrder() +// licenseHeader("/*\n" + +// " * Copyright OpenSearch Contributors\n" + +// " * SPDX-License-Identifier: Apache-2.0\n" + +// " */\n\n") + removeUnusedImports() + trimTrailingWhitespace() + endWithNewline() + googleJavaFormat('1.17.0').reflowLongStrings().groupArtifact('com.google.googlejavaformat:google-java-format') + } +} test { useJUnitPlatform() From ed2b683064ee2adb2d449d0cbba565cd54137e3a Mon Sep 17 00:00:00 2001 From: Matthew Wells Date: Tue, 22 Aug 2023 16:02:01 -0700 Subject: [PATCH 42/42] Applied formatting improvements to Antlr files based on spotless changes (#2017) * Applied formatting improvements to Antlr files based on spotless changes (#342) Signed-off-by: Matthew Wells * added spacing to # names back into parser files Signed-off-by: Matthew Wells --------- Signed-off-by: Matthew Wells --- .../main/antlr/OpenSearchLegacySqlParser.g4 | 649 ++++---- ppl/src/main/antlr/OpenSearchPPLParser.g4 | 1301 +++++++++-------- sql/src/main/antlr/OpenSearchSQLParser.g4 | 1144 +++++++-------- 3 files changed, 1608 insertions(+), 1486 deletions(-) diff --git a/legacy/src/main/antlr/OpenSearchLegacySqlParser.g4 b/legacy/src/main/antlr/OpenSearchLegacySqlParser.g4 index 4b6d871faf..b64059ab7d 100644 --- a/legacy/src/main/antlr/OpenSearchLegacySqlParser.g4 +++ b/legacy/src/main/antlr/OpenSearchLegacySqlParser.g4 @@ -30,324 +30,280 @@ THE SOFTWARE. parser grammar OpenSearchLegacySqlParser; -options { tokenVocab=OpenSearchLegacySqlLexer; } - +options { tokenVocab = OpenSearchLegacySqlLexer; } // Top Level Description -// Root rule +// Root rule root - : sqlStatement? SEMI? EOF - ; + : sqlStatement? SEMI? EOF + ; -// Only SELECT, DELETE, SHOW and DSCRIBE are supported for now +// Only SELECT, DELETE, SHOW and DSCRIBE are supported for now sqlStatement - : dmlStatement | administrationStatement | utilityStatement - ; + : dmlStatement + | administrationStatement + | utilityStatement + ; dmlStatement - : selectStatement | deleteStatement - ; - - -// Data Manipulation Language + : selectStatement + | deleteStatement + ; -// Primary DML Statements + // Data Manipulation Language + // Primary DML Statements selectStatement - : querySpecification #simpleSelect - | queryExpression #parenthesisSelect - | querySpecification unionStatement+ - orderByClause? limitClause? #unionSelect - | querySpecification minusStatement+ - orderByClause? limitClause? #minusSelect - ; + : querySpecification # simpleSelect + | queryExpression # parenthesisSelect + | querySpecification unionStatement+ orderByClause? limitClause? # unionSelect + | querySpecification minusStatement+ orderByClause? limitClause? # minusSelect + ; deleteStatement - : singleDeleteStatement - ; - -// Detailed DML Statements + : singleDeleteStatement + ; +// Detailed DML Statements singleDeleteStatement - : DELETE FROM tableName - (WHERE expression)? - orderByClause? (LIMIT decimalLiteral)? - ; + : DELETE FROM tableName (WHERE expression)? orderByClause? (LIMIT decimalLiteral)? + ; orderByClause - : ORDER BY orderByExpression (',' orderByExpression)* - ; + : ORDER BY orderByExpression (',' orderByExpression)* + ; orderByExpression - : expression order=(ASC | DESC)? - ; + : expression order = (ASC | DESC)? + ; tableSources - : tableSource (',' tableSource)* - ; + : tableSource (',' tableSource)* + ; tableSource - : tableSourceItem joinPart* #tableSourceBase - | '(' tableSourceItem joinPart* ')' #tableSourceNested - ; + : tableSourceItem joinPart* # tableSourceBase + | '(' tableSourceItem joinPart* ')' # tableSourceNested + ; tableSourceItem - : tableName (AS? alias=uid)? #atomTableItem - | ( - selectStatement - | '(' parenthesisSubquery=selectStatement ')' - ) - AS? alias=uid #subqueryTableItem - | '(' tableSources ')' #tableSourcesItem - ; + : tableName (AS? alias = uid)? # atomTableItem + | (selectStatement | '(' parenthesisSubquery = selectStatement ')') AS? alias = uid # subqueryTableItem + | '(' tableSources ')' # tableSourcesItem + ; joinPart - : (INNER | CROSS)? JOIN tableSourceItem - ( - ON expression - | USING '(' uidList ')' - )? #innerJoin - | (LEFT | RIGHT) OUTER? JOIN tableSourceItem - ( - ON expression - | USING '(' uidList ')' - )? #outerJoin - | NATURAL ((LEFT | RIGHT) OUTER?)? JOIN tableSourceItem #naturalJoin - ; - -// Select Statement's Details + : (INNER | CROSS)? JOIN tableSourceItem (ON expression | USING '(' uidList ')')? # innerJoin + | (LEFT | RIGHT) OUTER? JOIN tableSourceItem (ON expression | USING '(' uidList ')')? # outerJoin + | NATURAL ((LEFT | RIGHT) OUTER?)? JOIN tableSourceItem # naturalJoin + ; +// Select Statement's Details queryExpression - : '(' querySpecification ')' - | '(' queryExpression ')' - ; + : '(' querySpecification ')' + | '(' queryExpression ')' + ; querySpecification - : SELECT selectSpec* selectElements - fromClause orderByClause? limitClause? - ; + : SELECT selectSpec* selectElements fromClause orderByClause? limitClause? + ; unionStatement - : UNION unionType=(ALL | DISTINCT)? - (querySpecification | queryExpression) - ; + : UNION unionType = (ALL | DISTINCT)? (querySpecification | queryExpression) + ; minusStatement - : EXCEPT (querySpecification | queryExpression) - ; + : EXCEPT (querySpecification | queryExpression) + ; selectSpec - : (ALL | DISTINCT) - ; + : (ALL | DISTINCT) + ; selectElements - : (star='*' | selectElement ) (',' selectElement)* - ; + : (star = '*' | selectElement) (',' selectElement)* + ; selectElement - : fullId '.' '*' #selectStarElement - | fullColumnName (AS? uid)? #selectColumnElement - | functionCall (AS? uid)? #selectFunctionElement - | expression (AS? uid)? #selectExpressionElement - | NESTED '(' fullId DOT STAR ')' #selectNestedStarElement - ; + : fullId '.' '*' # selectStarElement + | fullColumnName (AS? uid)? # selectColumnElement + | functionCall (AS? uid)? # selectFunctionElement + | expression (AS? uid)? # selectExpressionElement + | NESTED '(' fullId DOT STAR ')' # selectNestedStarElement + ; fromClause - : FROM tableSources - (WHERE whereExpr=expression)? - ( - GROUP BY - groupByItem (',' groupByItem)* - )? - (HAVING havingExpr=expression)? - ; + : FROM tableSources (WHERE whereExpr = expression)? (GROUP BY groupByItem (',' groupByItem)*)? (HAVING havingExpr = expression)? + ; groupByItem - : expression order=(ASC | DESC)? - ; + : expression order = (ASC | DESC)? + ; limitClause - : LIMIT - ( - (offset=limitClauseAtom ',')? limit=limitClauseAtom - | limit=limitClauseAtom OFFSET offset=limitClauseAtom - ) - ; + : LIMIT ((offset = limitClauseAtom ',')? limit = limitClauseAtom | limit = limitClauseAtom OFFSET offset = limitClauseAtom) + ; limitClauseAtom - : decimalLiteral - ; - - -// SHOW/DESCIRBE statements + : decimalLiteral + ; +// SHOW/DESCIRBE statements administrationStatement - : showStatement - ; + : showStatement + ; showStatement - : SHOW showSchemaEntity - (schemaFormat=(FROM | IN) uid)? showFilter? - ; + : SHOW showSchemaEntity (schemaFormat = (FROM | IN) uid)? showFilter? + ; utilityStatement - : simpleDescribeStatement - ; + : simpleDescribeStatement + ; simpleDescribeStatement - : command=DESCRIBE tableName - (column=uid | pattern=STRING_LITERAL)? - ; + : command = DESCRIBE tableName (column = uid | pattern = STRING_LITERAL)? + ; showFilter - : LIKE STRING_LITERAL - | WHERE expression - ; + : LIKE STRING_LITERAL + | WHERE expression + ; showSchemaEntity - : FULL? TABLES - ; - + : FULL? TABLES + ; // Common Clauses -// DB Objects - +// DB Objects fullId - : uid (DOT_ID | '.' uid)? - ; + : uid (DOT_ID | '.' uid)? + ; tableName - : fullId #simpleTableName - | uid STAR #tableNamePattern - | uid DIVIDE uid #tableAndTypeName - ; + : fullId # simpleTableName + | uid STAR # tableNamePattern + | uid DIVIDE uid # tableAndTypeName + ; fullColumnName - : uid dottedId* - ; + : uid dottedId* + ; uid - : simpleId - | REVERSE_QUOTE_ID - ; + : simpleId + | REVERSE_QUOTE_ID + ; simpleId - : ID - | DOT_ID // note: the current scope by adding DOT_ID to simpleId is large, move DOT_ID upwards tablename if needed - | DOUBLE_QUOTE_ID - | BACKTICK_QUOTE_ID - | keywordsCanBeId - | functionNameBase - ; + : ID + | DOT_ID // note: the current scope by adding DOT_ID to simpleId is large, move DOT_ID upwards tablename if needed + | DOUBLE_QUOTE_ID + | BACKTICK_QUOTE_ID + | keywordsCanBeId + | functionNameBase + ; dottedId - : DOT_ID - | '.' uid - ; - -// Literals + : DOT_ID + | '.' uid + ; +// Literals decimalLiteral - : DECIMAL_LITERAL | ZERO_DECIMAL | ONE_DECIMAL | TWO_DECIMAL - ; + : DECIMAL_LITERAL + | ZERO_DECIMAL + | ONE_DECIMAL + | TWO_DECIMAL + ; stringLiteral - : ( - STRING_LITERAL - | START_NATIONAL_STRING_LITERAL - ) STRING_LITERAL+ - | ( - STRING_LITERAL - | START_NATIONAL_STRING_LITERAL - ) - ; + : (STRING_LITERAL | START_NATIONAL_STRING_LITERAL) STRING_LITERAL+ + | (STRING_LITERAL | START_NATIONAL_STRING_LITERAL) + ; booleanLiteral - : TRUE | FALSE; + : TRUE + | FALSE + ; nullNotnull - : NOT? (NULL_LITERAL | NULL_SPEC_LITERAL) - ; + : NOT? (NULL_LITERAL | NULL_SPEC_LITERAL) + ; constant - : stringLiteral | decimalLiteral - | '-' decimalLiteral - | booleanLiteral - | REAL_LITERAL | BIT_STRING - | NOT? nullLiteral=(NULL_LITERAL | NULL_SPEC_LITERAL) - | LEFT_BRACE dateType=(D | T | TS | DATE | TIME | TIMESTAMP) stringLiteral RIGHT_BRACE - ; - - -// Common Lists - + : stringLiteral + | decimalLiteral + | '-' decimalLiteral + | booleanLiteral + | REAL_LITERAL + | BIT_STRING + | NOT? nullLiteral = (NULL_LITERAL | NULL_SPEC_LITERAL) + | LEFT_BRACE dateType = (D | T | TS | DATE | TIME | TIMESTAMP) stringLiteral RIGHT_BRACE + ; + +// Common Lists uidList - : uid (',' uid)* - ; + : uid (',' uid)* + ; expressions - : expression (',' expression)* - ; + : expression (',' expression)* + ; aggregateFunction - : functionAsAggregatorFunction #functionAsAggregatorFunctionCall - | aggregateWindowedFunction #aggregateWindowedFunctionCall - ; + : functionAsAggregatorFunction # functionAsAggregatorFunctionCall + | aggregateWindowedFunction # aggregateWindowedFunctionCall + ; scalarFunction - : scalarFunctionName '(' nestedFunctionArgs+ ')' #nestedFunctionCall - | scalarFunctionName '(' functionArgs? ')' #scalarFunctionCall - ; + : scalarFunctionName '(' nestedFunctionArgs+ ')' # nestedFunctionCall + | scalarFunctionName '(' functionArgs? ')' # scalarFunctionCall + ; functionCall - : aggregateFunction #aggregateFunctionCall - | scalarFunctionName '(' aggregateWindowedFunction ')' #aggregationAsArgFunctionCall - | scalarFunction #scalarFunctionsCall - | specificFunction #specificFunctionCall - | fullId '(' functionArgs? ')' #udfFunctionCall - ; + : aggregateFunction # aggregateFunctionCall + | scalarFunctionName '(' aggregateWindowedFunction ')' # aggregationAsArgFunctionCall + | scalarFunction # scalarFunctionsCall + | specificFunction # specificFunctionCall + | fullId '(' functionArgs? ')' # udfFunctionCall + ; specificFunction - : CAST '(' expression AS convertedDataType ')' #dataTypeFunctionCall - | CASE expression caseFuncAlternative+ - (ELSE elseArg=functionArg)? END #caseFunctionCall - | CASE caseFuncAlternative+ - (ELSE elseArg=functionArg)? END #caseFunctionCall - ; + : CAST '(' expression AS convertedDataType ')' # dataTypeFunctionCall + | CASE expression caseFuncAlternative+ (ELSE elseArg = functionArg)? END # caseFunctionCall + | CASE caseFuncAlternative+ (ELSE elseArg = functionArg)? END # caseFunctionCall + ; caseFuncAlternative - : WHEN condition=functionArg - THEN consequent=functionArg - ; + : WHEN condition = functionArg THEN consequent = functionArg + ; convertedDataType - : typeName=DATETIME - | typeName=INT - | typeName=DOUBLE - | typeName=LONG - | typeName=FLOAT - | typeName=STRING - ; + : typeName = DATETIME + | typeName = INT + | typeName = DOUBLE + | typeName = LONG + | typeName = FLOAT + | typeName = STRING + ; aggregateWindowedFunction - : (AVG | MAX | MIN | SUM) - '(' aggregator=(ALL | DISTINCT)? functionArg ')' - | COUNT '(' (starArg='*' | aggregator=ALL? functionArg) ')' - | COUNT '(' aggregator=DISTINCT functionArgs ')' - ; + : (AVG | MAX | MIN | SUM) '(' aggregator = (ALL | DISTINCT)? functionArg ')' + | COUNT '(' (starArg = '*' | aggregator = ALL? functionArg) ')' + | COUNT '(' aggregator = DISTINCT functionArgs ')' + ; functionAsAggregatorFunction - : (AVG | MAX | MIN | SUM) - '(' aggregator=(ALL | DISTINCT)? functionCall ')' - | COUNT '(' aggregator=(ALL | DISTINCT)? functionCall ')' - ; + : (AVG | MAX | MIN | SUM) '(' aggregator = (ALL | DISTINCT)? functionCall ')' + | COUNT '(' aggregator = (ALL | DISTINCT)? functionCall ')' + ; scalarFunctionName - : functionNameBase - ; - + : functionNameBase + ; /* Separated aggregate to function-aggregator and nonfunction-aggregator aggregations. @@ -387,107 +343,226 @@ functionCall | fullId '(' functionArgs? ')' #udfFunctionCall ; */ - + + functionArgs - : (constant | fullColumnName | expression) - ( - ',' - (constant | fullColumnName | expression) - )* - ; + : (constant | fullColumnName | expression) (',' (constant | fullColumnName | expression))* + ; functionArg - : constant | fullColumnName | expression - ; + : constant + | fullColumnName + | expression + ; nestedFunctionArgs - : functionCall (',' functionArgs)? - ; + : functionCall (',' functionArgs)? + ; - -// Expressions, predicates +// Expressions, predicates // Simplified approach for expression expression - : notOperator=(NOT | '!') expression #notExpression - | expression logicalOperator expression #logicalExpression - | predicate IS NOT? testValue=(TRUE | FALSE | MISSING) #isExpression - | predicate #predicateExpression - ; + : notOperator = (NOT | '!') expression # notExpression + | expression logicalOperator expression # logicalExpression + | predicate IS NOT? testValue = (TRUE | FALSE | MISSING) # isExpression + | predicate # predicateExpression + ; predicate - : predicate NOT? IN '(' (selectStatement | expressions) ')' #inPredicate - | predicate IS nullNotnull #isNullPredicate - | left=predicate comparisonOperator right=predicate #binaryComparisonPredicate - | predicate NOT? BETWEEN predicate AND predicate #betweenPredicate - | predicate NOT? LIKE predicate #likePredicate - | predicate NOT? regex=REGEXP predicate #regexpPredicate - | expressionAtom #expressionAtomPredicate - ; - + : predicate NOT? IN '(' (selectStatement | expressions) ')' # inPredicate + | predicate IS nullNotnull # isNullPredicate + | left = predicate comparisonOperator right = predicate # binaryComparisonPredicate + | predicate NOT? BETWEEN predicate AND predicate # betweenPredicate + | predicate NOT? LIKE predicate # likePredicate + | predicate NOT? regex = REGEXP predicate # regexpPredicate + | expressionAtom # expressionAtomPredicate + ; // Add in ASTVisitor nullNotnull in constant expressionAtom - : constant #constantExpressionAtom - | fullColumnName #fullColumnNameExpressionAtom - | functionCall #functionCallExpressionAtom - | unaryOperator expressionAtom #unaryExpressionAtom - | '(' expression (',' expression)* ')' #nestedExpressionAtom - | EXISTS '(' selectStatement ')' #existsExpessionAtom - | '(' selectStatement ')' #subqueryExpessionAtom - | left=expressionAtom bitOperator right=expressionAtom #bitExpressionAtom - | left=expressionAtom mathOperator right=expressionAtom #mathExpressionAtom - ; + : constant # constantExpressionAtom + | fullColumnName # fullColumnNameExpressionAtom + | functionCall # functionCallExpressionAtom + | unaryOperator expressionAtom # unaryExpressionAtom + | '(' expression (',' expression)* ')' # nestedExpressionAtom + | EXISTS '(' selectStatement ')' # existsExpessionAtom + | '(' selectStatement ')' # subqueryExpessionAtom + | left = expressionAtom bitOperator right = expressionAtom # bitExpressionAtom + | left = expressionAtom mathOperator right = expressionAtom # mathExpressionAtom + ; unaryOperator - : '!' | '~' | '+' | '-' | NOT - ; + : '!' + | '~' + | '+' + | '-' + | NOT + ; comparisonOperator - : '=' | '>' | '<' | '<' '=' | '>' '=' - | '<' '>' | '!' '=' - ; + : '=' + | '>' + | '<' + | '<' '=' + | '>' '=' + | '<' '>' + | '!' '=' + ; logicalOperator - : AND | '&' '&' | OR | '|' '|' - ; + : AND + | '&' '&' + | OR + | '|' '|' + ; bitOperator - : '<' '<' | '>' '>' | '&' | '^' | '|' - ; + : '<' '<' + | '>' '>' + | '&' + | '^' + | '|' + ; mathOperator - : '*' | '/' | '%' | DIV | MOD | '+' | '-' - ; - - -// Simple id sets -// (that keyword, which can be id) - + : '*' + | '/' + | '%' + | DIV + | MOD + | '+' + | '-' + ; + +// Simple id sets +// (that keyword, which can be id) keywordsCanBeId - : FULL - | FIELD | D | T | TS // OD SQL and ODBC special - | COUNT | MIN | MAX | AVG | SUM - ; + : FULL + | FIELD + | D + | T + | TS // OD SQL and ODBC special + | COUNT + | MIN + | MAX + | AVG + | SUM + ; functionNameBase - : openSearchFunctionNameBase - | ABS | ACOS | ADD | ASCII | ASIN | ATAN | ATAN2 | CBRT | CEIL | CONCAT | CONCAT_WS - | COS | COSH | COT | CURDATE | DATE | DATE_FORMAT | DAYOFMONTH | DEGREES - | E | EXP | EXPM1 | FLOOR | IF | IFNULL | ISNULL | LEFT | LENGTH | LN | LOCATE | LOG - | LOG10 | LOG2 | LOWER | LTRIM | MAKETIME | MODULUS | MONTH | MONTHNAME | MULTIPLY - | NOW | PI | POW | POWER | RADIANS | RAND | REPLACE | RIGHT | RINT | ROUND | RTRIM - | SIGN | SIGNUM | SIN | SINH | SQRT | SUBSTRING | SUBTRACT | TAN | TIMESTAMP | TRIM - | UPPER | YEAR | ADDDATE | ADDTIME | GREATEST | LEAST | STRCMP - ; + : openSearchFunctionNameBase + | ABS + | ACOS + | ADD + | ASCII + | ASIN + | ATAN + | ATAN2 + | CBRT + | CEIL + | CONCAT + | CONCAT_WS + | COS + | COSH + | COT + | CURDATE + | DATE + | DATE_FORMAT + | DAYOFMONTH + | DEGREES + | E + | EXP + | EXPM1 + | FLOOR + | IF + | IFNULL + | ISNULL + | LEFT + | LENGTH + | LN + | LOCATE + | LOG + | LOG10 + | LOG2 + | LOWER + | LTRIM + | MAKETIME + | MODULUS + | MONTH + | MONTHNAME + | MULTIPLY + | NOW + | PI + | POW + | POWER + | RADIANS + | RAND + | REPLACE + | RIGHT + | RINT + | ROUND + | RTRIM + | SIGN + | SIGNUM + | SIN + | SINH + | SQRT + | SUBSTRING + | SUBTRACT + | TAN + | TIMESTAMP + | TRIM + | UPPER + | YEAR + | ADDDATE + | ADDTIME + | GREATEST + | LEAST + | STRCMP + ; openSearchFunctionNameBase - : DATE_HISTOGRAM | DAY_OF_MONTH | DAY_OF_YEAR | DAY_OF_WEEK | EXCLUDE - | EXTENDED_STATS | FILTER | GEO_BOUNDING_BOX | GEO_CELL | GEO_DISTANCE | GEO_DISTANCE_RANGE | GEO_INTERSECTS - | GEO_POLYGON | INCLUDE | IN_TERMS | HISTOGRAM | HOUR_OF_DAY - | MATCHPHRASE | MATCH_PHRASE | MATCHQUERY | MATCH_QUERY | MINUTE_OF_DAY - | MINUTE_OF_HOUR | MISSING | MONTH_OF_YEAR | MULTIMATCH | MULTI_MATCH | NESTED - | PERCENTILES | QUERY | RANGE | REGEXP_QUERY | REVERSE_NESTED | SCORE - | SECOND_OF_MINUTE | STATS | TERM | TERMS | TOPHITS - | WEEK_OF_YEAR | WILDCARDQUERY | WILDCARD_QUERY - ; + : DATE_HISTOGRAM + | DAY_OF_MONTH + | DAY_OF_YEAR + | DAY_OF_WEEK + | EXCLUDE + | EXTENDED_STATS + | FILTER + | GEO_BOUNDING_BOX + | GEO_CELL + | GEO_DISTANCE + | GEO_DISTANCE_RANGE + | GEO_INTERSECTS + | GEO_POLYGON + | INCLUDE + | IN_TERMS + | HISTOGRAM + | HOUR_OF_DAY + | MATCHPHRASE + | MATCH_PHRASE + | MATCHQUERY + | MATCH_QUERY + | MINUTE_OF_DAY + | MINUTE_OF_HOUR + | MISSING + | MONTH_OF_YEAR + | MULTIMATCH + | MULTI_MATCH + | NESTED + | PERCENTILES + | QUERY + | RANGE + | REGEXP_QUERY + | REVERSE_NESTED + | SCORE + | SECOND_OF_MINUTE + | STATS + | TERM + | TERMS + | TOPHITS + | WEEK_OF_YEAR + | WILDCARDQUERY + | WILDCARD_QUERY + ; diff --git a/ppl/src/main/antlr/OpenSearchPPLParser.g4 b/ppl/src/main/antlr/OpenSearchPPLParser.g4 index 9cde1bfbb8..21cfc65aa1 100644 --- a/ppl/src/main/antlr/OpenSearchPPLParser.g4 +++ b/ppl/src/main/antlr/OpenSearchPPLParser.g4 @@ -4,854 +4,911 @@ */ - parser grammar OpenSearchPPLParser; -options { tokenVocab=OpenSearchPPLLexer; } + +options { tokenVocab = OpenSearchPPLLexer; } root - : pplStatement? EOF - ; + : pplStatement? EOF + ; -/** statement */ +// statement pplStatement - : dmlStatement - ; + : dmlStatement + ; dmlStatement - : queryStatement - ; + : queryStatement + ; queryStatement - : pplCommands (PIPE commands)* - ; + : pplCommands (PIPE commands)* + ; -/** commands */ +// commands pplCommands - : searchCommand - | describeCommand - | showDataSourcesCommand - ; + : searchCommand + | describeCommand + | showDataSourcesCommand + ; commands - : whereCommand - | fieldsCommand - | renameCommand - | statsCommand - | dedupCommand - | sortCommand - | evalCommand - | headCommand - | topCommand - | rareCommand - | grokCommand - | parseCommand - | patternsCommand - | kmeansCommand - | adCommand - | mlCommand; + : whereCommand + | fieldsCommand + | renameCommand + | statsCommand + | dedupCommand + | sortCommand + | evalCommand + | headCommand + | topCommand + | rareCommand + | grokCommand + | parseCommand + | patternsCommand + | kmeansCommand + | adCommand + | mlCommand + ; searchCommand - : (SEARCH)? fromClause #searchFrom - | (SEARCH)? fromClause logicalExpression #searchFromFilter - | (SEARCH)? logicalExpression fromClause #searchFilterFrom - ; + : (SEARCH)? fromClause # searchFrom + | (SEARCH)? fromClause logicalExpression # searchFromFilter + | (SEARCH)? logicalExpression fromClause # searchFilterFrom + ; describeCommand - : DESCRIBE tableSourceClause - ; + : DESCRIBE tableSourceClause + ; showDataSourcesCommand - : SHOW DATASOURCES - ; + : SHOW DATASOURCES + ; whereCommand - : WHERE logicalExpression - ; + : WHERE logicalExpression + ; fieldsCommand - : FIELDS (PLUS | MINUS)? fieldList - ; + : FIELDS (PLUS | MINUS)? fieldList + ; renameCommand - : RENAME renameClasue (COMMA renameClasue)* - ; + : RENAME renameClasue (COMMA renameClasue)* + ; statsCommand - : STATS - (PARTITIONS EQUAL partitions=integerLiteral)? - (ALLNUM EQUAL allnum=booleanLiteral)? - (DELIM EQUAL delim=stringLiteral)? - statsAggTerm (COMMA statsAggTerm)* - (statsByClause)? - (DEDUP_SPLITVALUES EQUAL dedupsplit=booleanLiteral)? - ; + : STATS (PARTITIONS EQUAL partitions = integerLiteral)? (ALLNUM EQUAL allnum = booleanLiteral)? (DELIM EQUAL delim = stringLiteral)? statsAggTerm (COMMA statsAggTerm)* (statsByClause)? (DEDUP_SPLITVALUES EQUAL dedupsplit = booleanLiteral)? + ; dedupCommand - : DEDUP - (number=integerLiteral)? - fieldList - (KEEPEMPTY EQUAL keepempty=booleanLiteral)? - (CONSECUTIVE EQUAL consecutive=booleanLiteral)? - ; + : DEDUP (number = integerLiteral)? fieldList (KEEPEMPTY EQUAL keepempty = booleanLiteral)? (CONSECUTIVE EQUAL consecutive = booleanLiteral)? + ; sortCommand - : SORT sortbyClause - ; + : SORT sortbyClause + ; evalCommand - : EVAL evalClause (COMMA evalClause)* - ; + : EVAL evalClause (COMMA evalClause)* + ; headCommand - : HEAD - (number=integerLiteral)? - (FROM from=integerLiteral)? - ; - + : HEAD (number = integerLiteral)? (FROM from = integerLiteral)? + ; + topCommand - : TOP - (number=integerLiteral)? - fieldList - (byClause)? - ; + : TOP (number = integerLiteral)? fieldList (byClause)? + ; rareCommand - : RARE - fieldList - (byClause)? - ; + : RARE fieldList (byClause)? + ; grokCommand - : GROK (source_field=expression) (pattern=stringLiteral) - ; + : GROK (source_field = expression) (pattern = stringLiteral) + ; parseCommand - : PARSE (source_field=expression) (pattern=stringLiteral) - ; + : PARSE (source_field = expression) (pattern = stringLiteral) + ; patternsCommand - : PATTERNS (patternsParameter)* (source_field=expression) - ; + : PATTERNS (patternsParameter)* (source_field = expression) + ; patternsParameter - : (NEW_FIELD EQUAL new_field=stringLiteral) - | (PATTERN EQUAL pattern=stringLiteral) - ; + : (NEW_FIELD EQUAL new_field = stringLiteral) + | (PATTERN EQUAL pattern = stringLiteral) + ; patternsMethod - : PUNCT | REGEX - ; + : PUNCT + | REGEX + ; kmeansCommand - : KMEANS (kmeansParameter)* - ; + : KMEANS (kmeansParameter)* + ; kmeansParameter - : (CENTROIDS EQUAL centroids=integerLiteral) - | (ITERATIONS EQUAL iterations=integerLiteral) - | (DISTANCE_TYPE EQUAL distance_type=stringLiteral) - ; + : (CENTROIDS EQUAL centroids = integerLiteral) + | (ITERATIONS EQUAL iterations = integerLiteral) + | (DISTANCE_TYPE EQUAL distance_type = stringLiteral) + ; adCommand - : AD (adParameter)* - ; + : AD (adParameter)* + ; adParameter - : (NUMBER_OF_TREES EQUAL number_of_trees=integerLiteral) - | (SHINGLE_SIZE EQUAL shingle_size=integerLiteral) - | (SAMPLE_SIZE EQUAL sample_size=integerLiteral) - | (OUTPUT_AFTER EQUAL output_after=integerLiteral) - | (TIME_DECAY EQUAL time_decay=decimalLiteral) - | (ANOMALY_RATE EQUAL anomaly_rate=decimalLiteral) - | (CATEGORY_FIELD EQUAL category_field=stringLiteral) - | (TIME_FIELD EQUAL time_field=stringLiteral) - | (DATE_FORMAT EQUAL date_format=stringLiteral) - | (TIME_ZONE EQUAL time_zone=stringLiteral) - | (TRAINING_DATA_SIZE EQUAL training_data_size=integerLiteral) - | (ANOMALY_SCORE_THRESHOLD EQUAL anomaly_score_threshold=decimalLiteral) - ; + : (NUMBER_OF_TREES EQUAL number_of_trees = integerLiteral) + | (SHINGLE_SIZE EQUAL shingle_size = integerLiteral) + | (SAMPLE_SIZE EQUAL sample_size = integerLiteral) + | (OUTPUT_AFTER EQUAL output_after = integerLiteral) + | (TIME_DECAY EQUAL time_decay = decimalLiteral) + | (ANOMALY_RATE EQUAL anomaly_rate = decimalLiteral) + | (CATEGORY_FIELD EQUAL category_field = stringLiteral) + | (TIME_FIELD EQUAL time_field = stringLiteral) + | (DATE_FORMAT EQUAL date_format = stringLiteral) + | (TIME_ZONE EQUAL time_zone = stringLiteral) + | (TRAINING_DATA_SIZE EQUAL training_data_size = integerLiteral) + | (ANOMALY_SCORE_THRESHOLD EQUAL anomaly_score_threshold = decimalLiteral) + ; mlCommand - : ML (mlArg)* - ; + : ML (mlArg)* + ; mlArg - : (argName=ident EQUAL argValue=literalValue) - ; + : (argName = ident EQUAL argValue = literalValue) + ; -/** clauses */ +// clauses fromClause - : SOURCE EQUAL tableSourceClause - | INDEX EQUAL tableSourceClause - | SOURCE EQUAL tableFunction - | INDEX EQUAL tableFunction - ; - + : SOURCE EQUAL tableSourceClause + | INDEX EQUAL tableSourceClause + | SOURCE EQUAL tableFunction + | INDEX EQUAL tableFunction + ; tableSourceClause - : tableSource (COMMA tableSource)* - ; + : tableSource (COMMA tableSource)* + ; renameClasue - : orignalField=wcFieldExpression AS renamedField=wcFieldExpression - ; + : orignalField = wcFieldExpression AS renamedField = wcFieldExpression + ; byClause - : BY fieldList - ; + : BY fieldList + ; statsByClause - : BY fieldList - | BY bySpanClause - | BY bySpanClause COMMA fieldList - ; + : BY fieldList + | BY bySpanClause + | BY bySpanClause COMMA fieldList + ; bySpanClause - : spanClause (AS alias=qualifiedName)? - ; + : spanClause (AS alias = qualifiedName)? + ; spanClause - : SPAN LT_PRTHS fieldExpression COMMA value=literalValue (unit=timespanUnit)? RT_PRTHS - ; + : SPAN LT_PRTHS fieldExpression COMMA value = literalValue (unit = timespanUnit)? RT_PRTHS + ; sortbyClause - : sortField (COMMA sortField)* - ; + : sortField (COMMA sortField)* + ; evalClause - : fieldExpression EQUAL expression - ; + : fieldExpression EQUAL expression + ; -/** aggregation terms */ +// aggregation terms statsAggTerm - : statsFunction (AS alias=wcFieldExpression)? - ; + : statsFunction (AS alias = wcFieldExpression)? + ; -/** aggregation functions */ +// aggregation functions statsFunction - : statsFunctionName LT_PRTHS valueExpression RT_PRTHS #statsFunctionCall - | COUNT LT_PRTHS RT_PRTHS #countAllFunctionCall - | (DISTINCT_COUNT | DC) LT_PRTHS valueExpression RT_PRTHS #distinctCountFunctionCall - | percentileAggFunction #percentileAggFunctionCall - | takeAggFunction #takeAggFunctionCall - ; + : statsFunctionName LT_PRTHS valueExpression RT_PRTHS # statsFunctionCall + | COUNT LT_PRTHS RT_PRTHS # countAllFunctionCall + | (DISTINCT_COUNT | DC) LT_PRTHS valueExpression RT_PRTHS # distinctCountFunctionCall + | percentileAggFunction # percentileAggFunctionCall + | takeAggFunction # takeAggFunctionCall + ; statsFunctionName - : AVG - | COUNT - | SUM - | MIN - | MAX - | VAR_SAMP - | VAR_POP - | STDDEV_SAMP - | STDDEV_POP - ; + : AVG + | COUNT + | SUM + | MIN + | MAX + | VAR_SAMP + | VAR_POP + | STDDEV_SAMP + | STDDEV_POP + ; takeAggFunction - : TAKE LT_PRTHS fieldExpression (COMMA size=integerLiteral)? RT_PRTHS - ; + : TAKE LT_PRTHS fieldExpression (COMMA size = integerLiteral)? RT_PRTHS + ; percentileAggFunction - : PERCENTILE LESS value=integerLiteral GREATER LT_PRTHS aggField=fieldExpression RT_PRTHS - ; + : PERCENTILE LESS value = integerLiteral GREATER LT_PRTHS aggField = fieldExpression RT_PRTHS + ; -/** expressions */ +// expressions expression - : logicalExpression - | comparisonExpression - | valueExpression - ; + : logicalExpression + | comparisonExpression + | valueExpression + ; logicalExpression - : comparisonExpression #comparsion - | NOT logicalExpression #logicalNot - | left=logicalExpression OR right=logicalExpression #logicalOr - | left=logicalExpression (AND)? right=logicalExpression #logicalAnd - | left=logicalExpression XOR right=logicalExpression #logicalXor - | booleanExpression #booleanExpr - | relevanceExpression #relevanceExpr - ; + : comparisonExpression # comparsion + | NOT logicalExpression # logicalNot + | left = logicalExpression OR right = logicalExpression # logicalOr + | left = logicalExpression (AND)? right = logicalExpression # logicalAnd + | left = logicalExpression XOR right = logicalExpression # logicalXor + | booleanExpression # booleanExpr + | relevanceExpression # relevanceExpr + ; comparisonExpression - : left=valueExpression comparisonOperator right=valueExpression #compareExpr - | valueExpression IN valueList #inExpr - ; + : left = valueExpression comparisonOperator right = valueExpression # compareExpr + | valueExpression IN valueList # inExpr + ; valueExpression - : left=valueExpression - binaryOperator=(STAR | DIVIDE | MODULE) - right=valueExpression #binaryArithmetic - | left=valueExpression - binaryOperator=(PLUS | MINUS) - right=valueExpression #binaryArithmetic - | primaryExpression #valueExpressionDefault - | positionFunction #positionFunctionCall - | extractFunction #extractFunctionCall - | getFormatFunction #getFormatFunctionCall - | timestampFunction #timestampFunctionCall - | LT_PRTHS valueExpression RT_PRTHS #parentheticValueExpr - ; + : left = valueExpression binaryOperator = (STAR | DIVIDE | MODULE) right = valueExpression # binaryArithmetic + | left = valueExpression binaryOperator = (PLUS | MINUS) right = valueExpression # binaryArithmetic + | primaryExpression # valueExpressionDefault + | positionFunction # positionFunctionCall + | extractFunction # extractFunctionCall + | getFormatFunction # getFormatFunctionCall + | timestampFunction # timestampFunctionCall + | LT_PRTHS valueExpression RT_PRTHS # parentheticValueExpr + ; primaryExpression - : evalFunctionCall - | dataTypeFunctionCall - | fieldExpression - | literalValue - ; + : evalFunctionCall + | dataTypeFunctionCall + | fieldExpression + | literalValue + ; positionFunction - : positionFunctionName LT_PRTHS functionArg IN functionArg RT_PRTHS - ; + : positionFunctionName LT_PRTHS functionArg IN functionArg RT_PRTHS + ; booleanExpression - : booleanFunctionCall - ; + : booleanFunctionCall + ; relevanceExpression - : singleFieldRelevanceFunction | multiFieldRelevanceFunction - ; + : singleFieldRelevanceFunction + | multiFieldRelevanceFunction + ; // Field is a single column singleFieldRelevanceFunction - : singleFieldRelevanceFunctionName LT_PRTHS - field=relevanceField COMMA query=relevanceQuery - (COMMA relevanceArg)* RT_PRTHS - ; + : singleFieldRelevanceFunctionName LT_PRTHS field = relevanceField COMMA query = relevanceQuery (COMMA relevanceArg)* RT_PRTHS + ; // Field is a list of columns multiFieldRelevanceFunction - : multiFieldRelevanceFunctionName LT_PRTHS - LT_SQR_PRTHS field=relevanceFieldAndWeight (COMMA field=relevanceFieldAndWeight)* RT_SQR_PRTHS - COMMA query=relevanceQuery (COMMA relevanceArg)* RT_PRTHS - ; + : multiFieldRelevanceFunctionName LT_PRTHS LT_SQR_PRTHS field = relevanceFieldAndWeight (COMMA field = relevanceFieldAndWeight)* RT_SQR_PRTHS COMMA query = relevanceQuery (COMMA relevanceArg)* RT_PRTHS + ; -/** tables */ +// tables tableSource - : tableQualifiedName - | ID_DATE_SUFFIX - ; + : tableQualifiedName + | ID_DATE_SUFFIX + ; tableFunction - : qualifiedName LT_PRTHS functionArgs RT_PRTHS - ; + : qualifiedName LT_PRTHS functionArgs RT_PRTHS + ; -/** fields */ +// fields fieldList - : fieldExpression (COMMA fieldExpression)* - ; + : fieldExpression (COMMA fieldExpression)* + ; wcFieldList - : wcFieldExpression (COMMA wcFieldExpression)* - ; + : wcFieldExpression (COMMA wcFieldExpression)* + ; sortField - : (PLUS | MINUS)? sortFieldExpression - ; + : (PLUS | MINUS)? sortFieldExpression + ; sortFieldExpression - : fieldExpression - | AUTO LT_PRTHS fieldExpression RT_PRTHS - | STR LT_PRTHS fieldExpression RT_PRTHS - | IP LT_PRTHS fieldExpression RT_PRTHS - | NUM LT_PRTHS fieldExpression RT_PRTHS - ; + : fieldExpression + | AUTO LT_PRTHS fieldExpression RT_PRTHS + | STR LT_PRTHS fieldExpression RT_PRTHS + | IP LT_PRTHS fieldExpression RT_PRTHS + | NUM LT_PRTHS fieldExpression RT_PRTHS + ; fieldExpression - : qualifiedName - ; + : qualifiedName + ; wcFieldExpression - : wcQualifiedName - ; + : wcQualifiedName + ; -/** functions */ +// functions evalFunctionCall - : evalFunctionName LT_PRTHS functionArgs RT_PRTHS - ; + : evalFunctionName LT_PRTHS functionArgs RT_PRTHS + ; -/** cast function */ +// cast function dataTypeFunctionCall - : CAST LT_PRTHS expression AS convertedDataType RT_PRTHS - ; + : CAST LT_PRTHS expression AS convertedDataType RT_PRTHS + ; -/** boolean functions */ +// boolean functions booleanFunctionCall - : conditionFunctionBase LT_PRTHS functionArgs RT_PRTHS - ; + : conditionFunctionBase LT_PRTHS functionArgs RT_PRTHS + ; convertedDataType - : typeName=DATE - | typeName=TIME - | typeName=TIMESTAMP - | typeName=INT - | typeName=INTEGER - | typeName=DOUBLE - | typeName=LONG - | typeName=FLOAT - | typeName=STRING - | typeName=BOOLEAN - ; + : typeName = DATE + | typeName = TIME + | typeName = TIMESTAMP + | typeName = INT + | typeName = INTEGER + | typeName = DOUBLE + | typeName = LONG + | typeName = FLOAT + | typeName = STRING + | typeName = BOOLEAN + ; evalFunctionName - : mathematicalFunctionName - | dateTimeFunctionName - | textFunctionName - | conditionFunctionBase - | systemFunctionName - | positionFunctionName - ; + : mathematicalFunctionName + | dateTimeFunctionName + | textFunctionName + | conditionFunctionBase + | systemFunctionName + | positionFunctionName + ; functionArgs - : (functionArg (COMMA functionArg)*)? - ; + : (functionArg (COMMA functionArg)*)? + ; functionArg - : (ident EQUAL)? valueExpression - ; + : (ident EQUAL)? valueExpression + ; relevanceArg - : relevanceArgName EQUAL relevanceArgValue - ; + : relevanceArgName EQUAL relevanceArgValue + ; relevanceArgName - : ALLOW_LEADING_WILDCARD - | ANALYZER - | ANALYZE_WILDCARD - | AUTO_GENERATE_SYNONYMS_PHRASE_QUERY - | BOOST - | CUTOFF_FREQUENCY - | DEFAULT_FIELD - | DEFAULT_OPERATOR - | ENABLE_POSITION_INCREMENTS - | ESCAPE - | FIELDS - | FLAGS - | FUZZINESS - | FUZZY_MAX_EXPANSIONS - | FUZZY_PREFIX_LENGTH - | FUZZY_REWRITE - | FUZZY_TRANSPOSITIONS - | LENIENT - | LOW_FREQ_OPERATOR - | MAX_DETERMINIZED_STATES - | MAX_EXPANSIONS - | MINIMUM_SHOULD_MATCH - | OPERATOR - | PHRASE_SLOP - | PREFIX_LENGTH - | QUOTE_ANALYZER - | QUOTE_FIELD_SUFFIX - | REWRITE - | SLOP - | TIE_BREAKER - | TIME_ZONE - | TYPE - | ZERO_TERMS_QUERY - ; + : ALLOW_LEADING_WILDCARD + | ANALYZER + | ANALYZE_WILDCARD + | AUTO_GENERATE_SYNONYMS_PHRASE_QUERY + | BOOST + | CUTOFF_FREQUENCY + | DEFAULT_FIELD + | DEFAULT_OPERATOR + | ENABLE_POSITION_INCREMENTS + | ESCAPE + | FIELDS + | FLAGS + | FUZZINESS + | FUZZY_MAX_EXPANSIONS + | FUZZY_PREFIX_LENGTH + | FUZZY_REWRITE + | FUZZY_TRANSPOSITIONS + | LENIENT + | LOW_FREQ_OPERATOR + | MAX_DETERMINIZED_STATES + | MAX_EXPANSIONS + | MINIMUM_SHOULD_MATCH + | OPERATOR + | PHRASE_SLOP + | PREFIX_LENGTH + | QUOTE_ANALYZER + | QUOTE_FIELD_SUFFIX + | REWRITE + | SLOP + | TIE_BREAKER + | TIME_ZONE + | TYPE + | ZERO_TERMS_QUERY + ; relevanceFieldAndWeight - : field=relevanceField - | field=relevanceField weight=relevanceFieldWeight - | field=relevanceField BIT_XOR_OP weight=relevanceFieldWeight - ; + : field = relevanceField + | field = relevanceField weight = relevanceFieldWeight + | field = relevanceField BIT_XOR_OP weight = relevanceFieldWeight + ; relevanceFieldWeight - : integerLiteral - | decimalLiteral - ; + : integerLiteral + | decimalLiteral + ; relevanceField - : qualifiedName - | stringLiteral - ; + : qualifiedName + | stringLiteral + ; relevanceQuery - : relevanceArgValue - ; + : relevanceArgValue + ; relevanceArgValue - : qualifiedName - | literalValue - ; + : qualifiedName + | literalValue + ; mathematicalFunctionName - : ABS - | CBRT - | CEIL - | CEILING - | CONV - | CRC32 - | E - | EXP - | FLOOR - | LN - | LOG - | LOG10 - | LOG2 - | MOD - | PI - | POW - | POWER - | RAND - | ROUND - | SIGN - | SQRT - | TRUNCATE - | trigonometricFunctionName - ; + : ABS + | CBRT + | CEIL + | CEILING + | CONV + | CRC32 + | E + | EXP + | FLOOR + | LN + | LOG + | LOG10 + | LOG2 + | MOD + | PI + | POW + | POWER + | RAND + | ROUND + | SIGN + | SQRT + | TRUNCATE + | trigonometricFunctionName + ; trigonometricFunctionName - : ACOS - | ASIN - | ATAN - | ATAN2 - | COS - | COT - | DEGREES - | RADIANS - | SIN - | TAN - ; + : ACOS + | ASIN + | ATAN + | ATAN2 + | COS + | COT + | DEGREES + | RADIANS + | SIN + | TAN + ; dateTimeFunctionName - : ADDDATE - | ADDTIME - | CONVERT_TZ - | CURDATE - | CURRENT_DATE - | CURRENT_TIME - | CURRENT_TIMESTAMP - | CURTIME - | DATE - | DATEDIFF - | DATETIME - | DATE_ADD - | DATE_FORMAT - | DATE_SUB - | DAY - | DAYNAME - | DAYOFMONTH - | DAYOFWEEK - | DAYOFYEAR - | DAY_OF_MONTH - | DAY_OF_WEEK - | DAY_OF_YEAR - | FROM_DAYS - | FROM_UNIXTIME - | HOUR - | HOUR_OF_DAY - | LAST_DAY - | LOCALTIME - | LOCALTIMESTAMP - | MAKEDATE - | MAKETIME - | MICROSECOND - | MINUTE - | MINUTE_OF_DAY - | MINUTE_OF_HOUR - | MONTH - | MONTHNAME - | MONTH_OF_YEAR - | NOW - | PERIOD_ADD - | PERIOD_DIFF - | QUARTER - | SECOND - | SECOND_OF_MINUTE - | SEC_TO_TIME - | STR_TO_DATE - | SUBDATE - | SUBTIME - | SYSDATE - | TIME - | TIMEDIFF - | TIMESTAMP - | TIME_FORMAT - | TIME_TO_SEC - | TO_DAYS - | TO_SECONDS - | UNIX_TIMESTAMP - | UTC_DATE - | UTC_TIME - | UTC_TIMESTAMP - | WEEK - | WEEKDAY - | WEEK_OF_YEAR - | YEAR - | YEARWEEK - ; + : ADDDATE + | ADDTIME + | CONVERT_TZ + | CURDATE + | CURRENT_DATE + | CURRENT_TIME + | CURRENT_TIMESTAMP + | CURTIME + | DATE + | DATEDIFF + | DATETIME + | DATE_ADD + | DATE_FORMAT + | DATE_SUB + | DAY + | DAYNAME + | DAYOFMONTH + | DAYOFWEEK + | DAYOFYEAR + | DAY_OF_MONTH + | DAY_OF_WEEK + | DAY_OF_YEAR + | FROM_DAYS + | FROM_UNIXTIME + | HOUR + | HOUR_OF_DAY + | LAST_DAY + | LOCALTIME + | LOCALTIMESTAMP + | MAKEDATE + | MAKETIME + | MICROSECOND + | MINUTE + | MINUTE_OF_DAY + | MINUTE_OF_HOUR + | MONTH + | MONTHNAME + | MONTH_OF_YEAR + | NOW + | PERIOD_ADD + | PERIOD_DIFF + | QUARTER + | SECOND + | SECOND_OF_MINUTE + | SEC_TO_TIME + | STR_TO_DATE + | SUBDATE + | SUBTIME + | SYSDATE + | TIME + | TIMEDIFF + | TIMESTAMP + | TIME_FORMAT + | TIME_TO_SEC + | TO_DAYS + | TO_SECONDS + | UNIX_TIMESTAMP + | UTC_DATE + | UTC_TIME + | UTC_TIMESTAMP + | WEEK + | WEEKDAY + | WEEK_OF_YEAR + | YEAR + | YEARWEEK + ; getFormatFunction - : GET_FORMAT LT_PRTHS getFormatType COMMA functionArg RT_PRTHS - ; + : GET_FORMAT LT_PRTHS getFormatType COMMA functionArg RT_PRTHS + ; getFormatType - : DATE - | DATETIME - | TIME - | TIMESTAMP - ; + : DATE + | DATETIME + | TIME + | TIMESTAMP + ; extractFunction - : EXTRACT LT_PRTHS datetimePart FROM functionArg RT_PRTHS - ; + : EXTRACT LT_PRTHS datetimePart FROM functionArg RT_PRTHS + ; simpleDateTimePart - : MICROSECOND - | SECOND - | MINUTE - | HOUR - | DAY - | WEEK - | MONTH - | QUARTER - | YEAR - ; + : MICROSECOND + | SECOND + | MINUTE + | HOUR + | DAY + | WEEK + | MONTH + | QUARTER + | YEAR + ; complexDateTimePart - : SECOND_MICROSECOND - | MINUTE_MICROSECOND - | MINUTE_SECOND - | HOUR_MICROSECOND - | HOUR_SECOND - | HOUR_MINUTE - | DAY_MICROSECOND - | DAY_SECOND - | DAY_MINUTE - | DAY_HOUR - | YEAR_MONTH - ; + : SECOND_MICROSECOND + | MINUTE_MICROSECOND + | MINUTE_SECOND + | HOUR_MICROSECOND + | HOUR_SECOND + | HOUR_MINUTE + | DAY_MICROSECOND + | DAY_SECOND + | DAY_MINUTE + | DAY_HOUR + | YEAR_MONTH + ; datetimePart - : simpleDateTimePart - | complexDateTimePart - ; + : simpleDateTimePart + | complexDateTimePart + ; timestampFunction - : timestampFunctionName LT_PRTHS simpleDateTimePart COMMA firstArg=functionArg COMMA secondArg=functionArg RT_PRTHS - ; + : timestampFunctionName LT_PRTHS simpleDateTimePart COMMA firstArg = functionArg COMMA secondArg = functionArg RT_PRTHS + ; timestampFunctionName - : TIMESTAMPADD - | TIMESTAMPDIFF - ; + : TIMESTAMPADD + | TIMESTAMPDIFF + ; -/** condition function return boolean value */ +// condition function return boolean value conditionFunctionBase - : LIKE - | IF - | ISNULL - | ISNOTNULL - | IFNULL - | NULLIF - ; + : LIKE + | IF + | ISNULL + | ISNOTNULL + | IFNULL + | NULLIF + ; systemFunctionName - : TYPEOF - ; + : TYPEOF + ; textFunctionName - : SUBSTR - | SUBSTRING - | TRIM - | LTRIM - | RTRIM - | LOWER - | UPPER - | CONCAT - | CONCAT_WS - | LENGTH - | STRCMP - | RIGHT - | LEFT - | ASCII - | LOCATE - | REPLACE - | REVERSE - ; + : SUBSTR + | SUBSTRING + | TRIM + | LTRIM + | RTRIM + | LOWER + | UPPER + | CONCAT + | CONCAT_WS + | LENGTH + | STRCMP + | RIGHT + | LEFT + | ASCII + | LOCATE + | REPLACE + | REVERSE + ; positionFunctionName - : POSITION - ; - -/** operators */ -comparisonOperator - : EQUAL - | NOT_EQUAL - | LESS - | NOT_LESS - | GREATER - | NOT_GREATER - | REGEXP - ; + : POSITION + ; + +// operators + comparisonOperator + : EQUAL + | NOT_EQUAL + | LESS + | NOT_LESS + | GREATER + | NOT_GREATER + | REGEXP + ; singleFieldRelevanceFunctionName - : MATCH - | MATCH_PHRASE - | MATCH_BOOL_PREFIX - | MATCH_PHRASE_PREFIX - ; + : MATCH + | MATCH_PHRASE + | MATCH_BOOL_PREFIX + | MATCH_PHRASE_PREFIX + ; multiFieldRelevanceFunctionName - : SIMPLE_QUERY_STRING - | MULTI_MATCH - | QUERY_STRING - ; + : SIMPLE_QUERY_STRING + | MULTI_MATCH + | QUERY_STRING + ; -/** literals and values*/ +// literals and values literalValue - : intervalLiteral - | stringLiteral - | integerLiteral - | decimalLiteral - | booleanLiteral - | datetimeLiteral //#datetime - ; + : intervalLiteral + | stringLiteral + | integerLiteral + | decimalLiteral + | booleanLiteral + | datetimeLiteral //#datetime + ; intervalLiteral - : INTERVAL valueExpression intervalUnit - ; + : INTERVAL valueExpression intervalUnit + ; stringLiteral - : DQUOTA_STRING | SQUOTA_STRING - ; + : DQUOTA_STRING + | SQUOTA_STRING + ; integerLiteral - : (PLUS | MINUS)? INTEGER_LITERAL - ; + : (PLUS | MINUS)? INTEGER_LITERAL + ; decimalLiteral - : (PLUS | MINUS)? DECIMAL_LITERAL - ; + : (PLUS | MINUS)? DECIMAL_LITERAL + ; booleanLiteral - : TRUE | FALSE - ; + : TRUE + | FALSE + ; // Date and Time Literal, follow ANSI 92 datetimeLiteral - : dateLiteral - | timeLiteral - | timestampLiteral - ; + : dateLiteral + | timeLiteral + | timestampLiteral + ; dateLiteral - : DATE date=stringLiteral - ; + : DATE date = stringLiteral + ; timeLiteral - : TIME time=stringLiteral - ; + : TIME time = stringLiteral + ; timestampLiteral - : TIMESTAMP timestamp=stringLiteral - ; + : TIMESTAMP timestamp = stringLiteral + ; intervalUnit - : MICROSECOND - | SECOND - | MINUTE - | HOUR - | DAY - | WEEK - | MONTH - | QUARTER - | YEAR - | SECOND_MICROSECOND - | MINUTE_MICROSECOND - | MINUTE_SECOND - | HOUR_MICROSECOND - | HOUR_SECOND - | HOUR_MINUTE - | DAY_MICROSECOND - | DAY_SECOND - | DAY_MINUTE - | DAY_HOUR - | YEAR_MONTH - ; + : MICROSECOND + | SECOND + | MINUTE + | HOUR + | DAY + | WEEK + | MONTH + | QUARTER + | YEAR + | SECOND_MICROSECOND + | MINUTE_MICROSECOND + | MINUTE_SECOND + | HOUR_MICROSECOND + | HOUR_SECOND + | HOUR_MINUTE + | DAY_MICROSECOND + | DAY_SECOND + | DAY_MINUTE + | DAY_HOUR + | YEAR_MONTH + ; timespanUnit - : MS - | S - | M - | H - | D - | W - | Q - | Y - | MILLISECOND - | SECOND - | MINUTE - | HOUR - | DAY - | WEEK - | MONTH - | QUARTER - | YEAR - ; - + : MS + | S + | M + | H + | D + | W + | Q + | Y + | MILLISECOND + | SECOND + | MINUTE + | HOUR + | DAY + | WEEK + | MONTH + | QUARTER + | YEAR + ; valueList - : LT_PRTHS literalValue (COMMA literalValue)* RT_PRTHS - ; + : LT_PRTHS literalValue (COMMA literalValue)* RT_PRTHS + ; qualifiedName - : ident (DOT ident)* #identsAsQualifiedName - ; + : ident (DOT ident)* # identsAsQualifiedName + ; tableQualifiedName - : tableIdent (DOT ident)* #identsAsTableQualifiedName - ; + : tableIdent (DOT ident)* # identsAsTableQualifiedName + ; wcQualifiedName - : wildcard (DOT wildcard)* #identsAsWildcardQualifiedName - ; + : wildcard (DOT wildcard)* # identsAsWildcardQualifiedName + ; ident - : (DOT)? ID - | BACKTICK ident BACKTICK - | BQUOTA_STRING - | keywordsCanBeId - ; + : (DOT)? ID + | BACKTICK ident BACKTICK + | BQUOTA_STRING + | keywordsCanBeId + ; tableIdent - : (CLUSTER)? ident - ; + : (CLUSTER)? ident + ; wildcard - : ident (MODULE ident)* (MODULE)? - | SINGLE_QUOTE wildcard SINGLE_QUOTE - | DOUBLE_QUOTE wildcard DOUBLE_QUOTE - | BACKTICK wildcard BACKTICK - ; + : ident (MODULE ident)* (MODULE)? + | SINGLE_QUOTE wildcard SINGLE_QUOTE + | DOUBLE_QUOTE wildcard DOUBLE_QUOTE + | BACKTICK wildcard BACKTICK + ; keywordsCanBeId - : D // OD SQL and ODBC special - | timespanUnit | SPAN - | evalFunctionName - | relevanceArgName - | intervalUnit - | dateTimeFunctionName - | textFunctionName - | mathematicalFunctionName - | positionFunctionName - // commands - | SEARCH | DESCRIBE | SHOW | FROM | WHERE | FIELDS | RENAME | STATS | DEDUP | SORT | EVAL | HEAD | TOP | RARE - | PARSE | METHOD | REGEX | PUNCT | GROK | PATTERN | PATTERNS | NEW_FIELD | KMEANS | AD | ML - // commands assist keywords - | SOURCE | INDEX | DESC | DATASOURCES - // CLAUSEKEYWORDS - | SORTBY - // FIELDKEYWORDSAUTO - | STR | IP | NUM - // ARGUMENT KEYWORDS - | KEEPEMPTY | CONSECUTIVE | DEDUP_SPLITVALUES | PARTITIONS | ALLNUM | DELIM | CENTROIDS | ITERATIONS | DISTANCE_TYPE - | NUMBER_OF_TREES | SHINGLE_SIZE | SAMPLE_SIZE | OUTPUT_AFTER | TIME_DECAY | ANOMALY_RATE | CATEGORY_FIELD - | TIME_FIELD | TIME_ZONE | TRAINING_DATA_SIZE | ANOMALY_SCORE_THRESHOLD - // AGGREGATIONS - | AVG | COUNT | DISTINCT_COUNT | ESTDC | ESTDC_ERROR | MAX | MEAN | MEDIAN | MIN | MODE | RANGE | STDEV | STDEVP - | SUM | SUMSQ | VAR_SAMP | VAR_POP | STDDEV_SAMP | STDDEV_POP | PERCENTILE | TAKE | FIRST | LAST | LIST | VALUES - | EARLIEST | EARLIEST_TIME | LATEST | LATEST_TIME | PER_DAY | PER_HOUR | PER_MINUTE | PER_SECOND | RATE | SPARKLINE - | C | DC - ; + : D // OD SQL and ODBC special + | timespanUnit + | SPAN + | evalFunctionName + | relevanceArgName + | intervalUnit + | dateTimeFunctionName + | textFunctionName + | mathematicalFunctionName + | positionFunctionName + // commands + | SEARCH + | DESCRIBE + | SHOW + | FROM + | WHERE + | FIELDS + | RENAME + | STATS + | DEDUP + | SORT + | EVAL + | HEAD + | TOP + | RARE + | PARSE + | METHOD + | REGEX + | PUNCT + | GROK + | PATTERN + | PATTERNS + | NEW_FIELD + | KMEANS + | AD + | ML + // commands assist keywords + | SOURCE + | INDEX + | DESC + | DATASOURCES + // CLAUSEKEYWORDS + | SORTBY + // FIELDKEYWORDSAUTO + | STR + | IP + | NUM + // ARGUMENT KEYWORDS + | KEEPEMPTY + | CONSECUTIVE + | DEDUP_SPLITVALUES + | PARTITIONS + | ALLNUM + | DELIM + | CENTROIDS + | ITERATIONS + | DISTANCE_TYPE + | NUMBER_OF_TREES + | SHINGLE_SIZE + | SAMPLE_SIZE + | OUTPUT_AFTER + | TIME_DECAY + | ANOMALY_RATE + | CATEGORY_FIELD + | TIME_FIELD + | TIME_ZONE + | TRAINING_DATA_SIZE + | ANOMALY_SCORE_THRESHOLD + // AGGREGATIONS + | AVG + | COUNT + | DISTINCT_COUNT + | ESTDC + | ESTDC_ERROR + | MAX + | MEAN + | MEDIAN + | MIN + | MODE + | RANGE + | STDEV + | STDEVP + | SUM + | SUMSQ + | VAR_SAMP + | VAR_POP + | STDDEV_SAMP + | STDDEV_POP + | PERCENTILE + | TAKE + | FIRST + | LAST + | LIST + | VALUES + | EARLIEST + | EARLIEST_TIME + | LATEST + | LATEST_TIME + | PER_DAY + | PER_HOUR + | PER_MINUTE + | PER_SECOND + | RATE + | SPARKLINE + | C + | DC + ; diff --git a/sql/src/main/antlr/OpenSearchSQLParser.g4 b/sql/src/main/antlr/OpenSearchSQLParser.g4 index 2c3defb9f1..c16bc9805e 100644 --- a/sql/src/main/antlr/OpenSearchSQLParser.g4 +++ b/sql/src/main/antlr/OpenSearchSQLParser.g4 @@ -30,815 +30,805 @@ THE SOFTWARE. parser grammar OpenSearchSQLParser; -options { tokenVocab=OpenSearchSQLLexer; } - +options { tokenVocab = OpenSearchSQLLexer; } // Top Level Description // Root rule + root - : sqlStatement? SEMI? EOF - ; + : sqlStatement? SEMI? EOF + ; -// Only SELECT +// Only SELECT sqlStatement - : dmlStatement | adminStatement - ; + : dmlStatement + | adminStatement + ; dmlStatement - : selectStatement - ; - + : selectStatement + ; // Data Manipulation Language -// Primary DML Statements - +// Primary DML Statements selectStatement - : querySpecification #simpleSelect - ; + : querySpecification # simpleSelect + ; adminStatement - : showStatement - | describeStatement - ; + : showStatement + | describeStatement + ; showStatement - : SHOW TABLES tableFilter - ; + : SHOW TABLES tableFilter + ; describeStatement - : DESCRIBE TABLES tableFilter columnFilter? - ; + : DESCRIBE TABLES tableFilter columnFilter? + ; columnFilter - : COLUMNS LIKE showDescribePattern - ; + : COLUMNS LIKE showDescribePattern + ; tableFilter - : LIKE showDescribePattern - ; + : LIKE showDescribePattern + ; showDescribePattern - : stringLiteral - ; -// Select Statement's Details + : stringLiteral + ; +// Select Statement's Details querySpecification - : selectClause - fromClause? - limitClause? - ; + : selectClause fromClause? limitClause? + ; selectClause - : SELECT selectSpec? selectElements - ; + : SELECT selectSpec? selectElements + ; selectSpec - : (ALL | DISTINCT) - ; + : (ALL | DISTINCT) + ; selectElements - : (star=STAR | selectElement) (COMMA selectElement)* - ; + : (star = STAR | selectElement) (COMMA selectElement)* + ; selectElement - : expression (AS? alias)? - ; + : expression (AS? alias)? + ; fromClause - : FROM relation - (whereClause)? - (groupByClause)? - (havingClause)? - (orderByClause)? // Place it under FROM for now but actually not necessary ex. A UNION B ORDER BY - ; + : FROM relation (whereClause)? (groupByClause)? (havingClause)? (orderByClause)? // Place it under FROM for now but actually not necessary ex. A UNION B ORDER BY + + ; relation - : tableName (AS? alias)? #tableAsRelation - | LR_BRACKET subquery=querySpecification RR_BRACKET AS? alias #subqueryAsRelation - ; + : tableName (AS? alias)? # tableAsRelation + | LR_BRACKET subquery = querySpecification RR_BRACKET AS? alias # subqueryAsRelation + ; whereClause - : WHERE expression - ; + : WHERE expression + ; groupByClause - : GROUP BY groupByElements - ; + : GROUP BY groupByElements + ; groupByElements - : groupByElement (COMMA groupByElement)* - ; + : groupByElement (COMMA groupByElement)* + ; groupByElement - : expression - ; + : expression + ; havingClause - : HAVING expression - ; + : HAVING expression + ; orderByClause - : ORDER BY orderByElement (COMMA orderByElement)* - ; + : ORDER BY orderByElement (COMMA orderByElement)* + ; orderByElement - : expression order=(ASC | DESC)? (NULLS (FIRST | LAST))? - ; + : expression order = (ASC | DESC)? (NULLS (FIRST | LAST))? + ; limitClause - : LIMIT (offset=decimalLiteral COMMA)? limit=decimalLiteral - | LIMIT limit=decimalLiteral OFFSET offset=decimalLiteral - ; + : LIMIT (offset = decimalLiteral COMMA)? limit = decimalLiteral + | LIMIT limit = decimalLiteral OFFSET offset = decimalLiteral + ; // Window Function's Details windowFunctionClause - : function=windowFunction overClause - ; + : function = windowFunction overClause + ; windowFunction - : functionName=(ROW_NUMBER | RANK | DENSE_RANK) - LR_BRACKET functionArgs? RR_BRACKET #scalarWindowFunction - | aggregateFunction #aggregateWindowFunction - ; + : functionName = (ROW_NUMBER | RANK | DENSE_RANK) LR_BRACKET functionArgs? RR_BRACKET # scalarWindowFunction + | aggregateFunction # aggregateWindowFunction + ; overClause - : OVER LR_BRACKET partitionByClause? orderByClause? RR_BRACKET - ; + : OVER LR_BRACKET partitionByClause? orderByClause? RR_BRACKET + ; partitionByClause - : PARTITION BY expression (COMMA expression)* - ; - - -// Literals + : PARTITION BY expression (COMMA expression)* + ; +// Literals constant - : stringLiteral #string - | sign? decimalLiteral #signedDecimal - | sign? realLiteral #signedReal - | booleanLiteral #boolean - | datetimeLiteral #datetime - | intervalLiteral #interval - | nullLiteral #null - // Doesn't support the following types for now - //| BIT_STRING - //| NOT? nullLiteral=(NULL_LITERAL | NULL_SPEC_LITERAL) - ; + : stringLiteral # string + | sign? decimalLiteral # signedDecimal + | sign? realLiteral # signedReal + | booleanLiteral # boolean + | datetimeLiteral # datetime + | intervalLiteral # interval + | nullLiteral # null + // Doesn't support the following types for now + //| BIT_STRING + //| NOT? nullLiteral=(NULL_LITERAL | NULL_SPEC_LITERAL) + ; decimalLiteral - : DECIMAL_LITERAL - | ZERO_DECIMAL - | ONE_DECIMAL - | TWO_DECIMAL - ; + : DECIMAL_LITERAL + | ZERO_DECIMAL + | ONE_DECIMAL + | TWO_DECIMAL + ; stringLiteral - : STRING_LITERAL - | DOUBLE_QUOTE_ID - ; + : STRING_LITERAL + | DOUBLE_QUOTE_ID + ; booleanLiteral - : TRUE - | FALSE - ; + : TRUE + | FALSE + ; realLiteral - : REAL_LITERAL - ; + : REAL_LITERAL + ; sign - : PLUS - | MINUS - ; + : PLUS + | MINUS + ; nullLiteral - : NULL_LITERAL - ; + : NULL_LITERAL + ; // Date and Time Literal, follow ANSI 92 datetimeLiteral - : dateLiteral - | timeLiteral - | timestampLiteral - ; + : dateLiteral + | timeLiteral + | timestampLiteral + ; dateLiteral - : DATE date=stringLiteral - | LEFT_BRACE (DATE | D) date=stringLiteral RIGHT_BRACE - ; + : DATE date = stringLiteral + | LEFT_BRACE (DATE | D) date = stringLiteral RIGHT_BRACE + ; timeLiteral - : TIME time=stringLiteral - | LEFT_BRACE (TIME | T) time=stringLiteral RIGHT_BRACE - ; + : TIME time = stringLiteral + | LEFT_BRACE (TIME | T) time = stringLiteral RIGHT_BRACE + ; timestampLiteral - : TIMESTAMP timestamp=stringLiteral - | LEFT_BRACE (TIMESTAMP | TS) timestamp=stringLiteral RIGHT_BRACE - ; + : TIMESTAMP timestamp = stringLiteral + | LEFT_BRACE (TIMESTAMP | TS) timestamp = stringLiteral RIGHT_BRACE + ; // Actually, these constants are shortcuts to the corresponding functions datetimeConstantLiteral - : CURRENT_DATE - | CURRENT_TIME - | CURRENT_TIMESTAMP - | LOCALTIME - | LOCALTIMESTAMP - | UTC_TIMESTAMP - | UTC_DATE - | UTC_TIME - ; + : CURRENT_DATE + | CURRENT_TIME + | CURRENT_TIMESTAMP + | LOCALTIME + | LOCALTIMESTAMP + | UTC_TIMESTAMP + | UTC_DATE + | UTC_TIME + ; intervalLiteral - : INTERVAL expression intervalUnit - ; + : INTERVAL expression intervalUnit + ; intervalUnit - : MICROSECOND - | SECOND - | MINUTE - | HOUR - | DAY - | WEEK - | MONTH - | QUARTER - | YEAR - | SECOND_MICROSECOND - | MINUTE_MICROSECOND - | MINUTE_SECOND - | HOUR_MICROSECOND - | HOUR_SECOND - | HOUR_MINUTE - | DAY_MICROSECOND - | DAY_SECOND - | DAY_MINUTE - | DAY_HOUR - | YEAR_MONTH - ; + : MICROSECOND + | SECOND + | MINUTE + | HOUR + | DAY + | WEEK + | MONTH + | QUARTER + | YEAR + | SECOND_MICROSECOND + | MINUTE_MICROSECOND + | MINUTE_SECOND + | HOUR_MICROSECOND + | HOUR_SECOND + | HOUR_MINUTE + | DAY_MICROSECOND + | DAY_SECOND + | DAY_MINUTE + | DAY_HOUR + | YEAR_MONTH + ; // predicates // Simplified approach for expression expression - : NOT expression #notExpression - | left=expression AND right=expression #andExpression - | left=expression OR right=expression #orExpression - | predicate #predicateExpression - ; + : NOT expression # notExpression + | left = expression AND right = expression # andExpression + | left = expression OR right = expression # orExpression + | predicate # predicateExpression + ; predicate - : expressionAtom #expressionAtomPredicate - | left=predicate comparisonOperator right=predicate #binaryComparisonPredicate - | predicate IS nullNotnull #isNullPredicate - | predicate NOT? BETWEEN predicate AND predicate #betweenPredicate - | left=predicate NOT? LIKE right=predicate #likePredicate - | left=predicate REGEXP right=predicate #regexpPredicate - | predicate NOT? IN '(' expressions ')' #inPredicate - ; + : expressionAtom # expressionAtomPredicate + | left = predicate comparisonOperator right = predicate # binaryComparisonPredicate + | predicate IS nullNotnull # isNullPredicate + | predicate NOT? BETWEEN predicate AND predicate # betweenPredicate + | left = predicate NOT? LIKE right = predicate # likePredicate + | left = predicate REGEXP right = predicate # regexpPredicate + | predicate NOT? IN '(' expressions ')' # inPredicate + ; expressions - : expression (',' expression)* - ; + : expression (',' expression)* + ; expressionAtom - : constant #constantExpressionAtom - | columnName #fullColumnNameExpressionAtom - | functionCall #functionCallExpressionAtom - | LR_BRACKET expression RR_BRACKET #nestedExpressionAtom - | left=expressionAtom - mathOperator=(STAR | SLASH | MODULE) - right=expressionAtom #mathExpressionAtom - | left=expressionAtom - mathOperator=(PLUS | MINUS) - right=expressionAtom #mathExpressionAtom - ; + : constant # constantExpressionAtom + | columnName # fullColumnNameExpressionAtom + | functionCall # functionCallExpressionAtom + | LR_BRACKET expression RR_BRACKET # nestedExpressionAtom + | left = expressionAtom mathOperator = (STAR | SLASH | MODULE) right = expressionAtom # mathExpressionAtom + | left = expressionAtom mathOperator = (PLUS | MINUS) right = expressionAtom # mathExpressionAtom + ; comparisonOperator - : '=' - | '>' - | '<' - | '<' '=' - | '>' '=' - | '<' '>' - | '!' '=' - ; + : '=' + | '>' + | '<' + | '<' '=' + | '>' '=' + | '<' '>' + | '!' '=' + ; nullNotnull - : NOT? NULL_LITERAL - ; + : NOT? NULL_LITERAL + ; functionCall - : nestedFunctionName LR_BRACKET allTupleFields RR_BRACKET #nestedAllFunctionCall - | scalarFunctionName LR_BRACKET functionArgs RR_BRACKET #scalarFunctionCall - | specificFunction #specificFunctionCall - | windowFunctionClause #windowFunctionCall - | aggregateFunction #aggregateFunctionCall - | aggregateFunction (orderByClause)? filterClause #filteredAggregationFunctionCall - | scoreRelevanceFunction #scoreRelevanceFunctionCall - | relevanceFunction #relevanceFunctionCall - | highlightFunction #highlightFunctionCall - | positionFunction #positionFunctionCall - | extractFunction #extractFunctionCall - | getFormatFunction #getFormatFunctionCall - | timestampFunction #timestampFunctionCall - ; + : nestedFunctionName LR_BRACKET allTupleFields RR_BRACKET # nestedAllFunctionCall + | scalarFunctionName LR_BRACKET functionArgs RR_BRACKET # scalarFunctionCall + | specificFunction # specificFunctionCall + | windowFunctionClause # windowFunctionCall + | aggregateFunction # aggregateFunctionCall + | aggregateFunction (orderByClause)? filterClause # filteredAggregationFunctionCall + | scoreRelevanceFunction # scoreRelevanceFunctionCall + | relevanceFunction # relevanceFunctionCall + | highlightFunction # highlightFunctionCall + | positionFunction # positionFunctionCall + | extractFunction # extractFunctionCall + | getFormatFunction # getFormatFunctionCall + | timestampFunction # timestampFunctionCall + ; timestampFunction - : timestampFunctionName LR_BRACKET simpleDateTimePart COMMA firstArg=functionArg COMMA secondArg=functionArg RR_BRACKET - ; + : timestampFunctionName LR_BRACKET simpleDateTimePart COMMA firstArg = functionArg COMMA secondArg = functionArg RR_BRACKET + ; timestampFunctionName - : TIMESTAMPADD - | TIMESTAMPDIFF - ; + : TIMESTAMPADD + | TIMESTAMPDIFF + ; getFormatFunction - : GET_FORMAT LR_BRACKET getFormatType COMMA functionArg RR_BRACKET - ; + : GET_FORMAT LR_BRACKET getFormatType COMMA functionArg RR_BRACKET + ; getFormatType - : DATE - | DATETIME - | TIME - | TIMESTAMP - ; + : DATE + | DATETIME + | TIME + | TIMESTAMP + ; extractFunction - : EXTRACT LR_BRACKET datetimePart FROM functionArg RR_BRACKET - ; + : EXTRACT LR_BRACKET datetimePart FROM functionArg RR_BRACKET + ; simpleDateTimePart - : MICROSECOND - | SECOND - | MINUTE - | HOUR - | DAY - | WEEK - | MONTH - | QUARTER - | YEAR - ; + : MICROSECOND + | SECOND + | MINUTE + | HOUR + | DAY + | WEEK + | MONTH + | QUARTER + | YEAR + ; complexDateTimePart - : SECOND_MICROSECOND - | MINUTE_MICROSECOND - | MINUTE_SECOND - | HOUR_MICROSECOND - | HOUR_SECOND - | HOUR_MINUTE - | DAY_MICROSECOND - | DAY_SECOND - | DAY_MINUTE - | DAY_HOUR - | YEAR_MONTH - ; + : SECOND_MICROSECOND + | MINUTE_MICROSECOND + | MINUTE_SECOND + | HOUR_MICROSECOND + | HOUR_SECOND + | HOUR_MINUTE + | DAY_MICROSECOND + | DAY_SECOND + | DAY_MINUTE + | DAY_HOUR + | YEAR_MONTH + ; datetimePart - : simpleDateTimePart - | complexDateTimePart - ; + : simpleDateTimePart + | complexDateTimePart + ; highlightFunction - : HIGHLIGHT LR_BRACKET relevanceField (COMMA highlightArg)* RR_BRACKET - ; + : HIGHLIGHT LR_BRACKET relevanceField (COMMA highlightArg)* RR_BRACKET + ; positionFunction - : POSITION LR_BRACKET functionArg IN functionArg RR_BRACKET - ; + : POSITION LR_BRACKET functionArg IN functionArg RR_BRACKET + ; matchQueryAltSyntaxFunction - : field=relevanceField EQUAL_SYMBOL MATCH_QUERY LR_BRACKET query=relevanceQuery RR_BRACKET - ; + : field = relevanceField EQUAL_SYMBOL MATCH_QUERY LR_BRACKET query = relevanceQuery RR_BRACKET + ; scalarFunctionName - : mathematicalFunctionName - | dateTimeFunctionName - | textFunctionName - | flowControlFunctionName - | systemFunctionName - | nestedFunctionName - ; + : mathematicalFunctionName + | dateTimeFunctionName + | textFunctionName + | flowControlFunctionName + | systemFunctionName + | nestedFunctionName + ; specificFunction - : CASE expression caseFuncAlternative+ - (ELSE elseArg=functionArg)? END #caseFunctionCall - | CASE caseFuncAlternative+ - (ELSE elseArg=functionArg)? END #caseFunctionCall - | CAST '(' expression AS convertedDataType ')' #dataTypeFunctionCall - ; + : CASE expression caseFuncAlternative+ (ELSE elseArg = functionArg)? END # caseFunctionCall + | CASE caseFuncAlternative+ (ELSE elseArg = functionArg)? END # caseFunctionCall + | CAST '(' expression AS convertedDataType ')' # dataTypeFunctionCall + ; relevanceFunction - : noFieldRelevanceFunction | singleFieldRelevanceFunction | multiFieldRelevanceFunction | altSingleFieldRelevanceFunction | altMultiFieldRelevanceFunction - ; + : noFieldRelevanceFunction + | singleFieldRelevanceFunction + | multiFieldRelevanceFunction + | altSingleFieldRelevanceFunction + | altMultiFieldRelevanceFunction + ; scoreRelevanceFunction - : scoreRelevanceFunctionName LR_BRACKET relevanceFunction (COMMA weight=relevanceFieldWeight)? RR_BRACKET - ; + : scoreRelevanceFunctionName LR_BRACKET relevanceFunction (COMMA weight = relevanceFieldWeight)? RR_BRACKET + ; noFieldRelevanceFunction - : noFieldRelevanceFunctionName LR_BRACKET query=relevanceQuery (COMMA relevanceArg)* RR_BRACKET - ; + : noFieldRelevanceFunctionName LR_BRACKET query = relevanceQuery (COMMA relevanceArg)* RR_BRACKET + ; // Field is a single column singleFieldRelevanceFunction - : singleFieldRelevanceFunctionName LR_BRACKET - field=relevanceField COMMA query=relevanceQuery - (COMMA relevanceArg)* RR_BRACKET; + : singleFieldRelevanceFunctionName LR_BRACKET field = relevanceField COMMA query = relevanceQuery (COMMA relevanceArg)* RR_BRACKET + ; // Field is a list of columns multiFieldRelevanceFunction - : multiFieldRelevanceFunctionName LR_BRACKET - LT_SQR_PRTHS field=relevanceFieldAndWeight (COMMA field=relevanceFieldAndWeight)* RT_SQR_PRTHS - COMMA query=relevanceQuery (COMMA relevanceArg)* RR_BRACKET - | multiFieldRelevanceFunctionName LR_BRACKET - alternateMultiMatchQuery COMMA alternateMultiMatchField (COMMA relevanceArg)* RR_BRACKET - ; + : multiFieldRelevanceFunctionName LR_BRACKET LT_SQR_PRTHS field = relevanceFieldAndWeight (COMMA field = relevanceFieldAndWeight)* RT_SQR_PRTHS COMMA query = relevanceQuery (COMMA relevanceArg)* RR_BRACKET + | multiFieldRelevanceFunctionName LR_BRACKET alternateMultiMatchQuery COMMA alternateMultiMatchField (COMMA relevanceArg)* RR_BRACKET + ; altSingleFieldRelevanceFunction - : field=relevanceField EQUAL_SYMBOL altSyntaxFunctionName=altSingleFieldRelevanceFunctionName LR_BRACKET query=relevanceQuery (COMMA relevanceArg)* RR_BRACKET - ; + : field = relevanceField EQUAL_SYMBOL altSyntaxFunctionName = altSingleFieldRelevanceFunctionName LR_BRACKET query = relevanceQuery (COMMA relevanceArg)* RR_BRACKET + ; altMultiFieldRelevanceFunction - : field=relevanceField EQUAL_SYMBOL altSyntaxFunctionName=altMultiFieldRelevanceFunctionName LR_BRACKET query=relevanceQuery (COMMA relevanceArg)* RR_BRACKET - ; + : field = relevanceField EQUAL_SYMBOL altSyntaxFunctionName = altMultiFieldRelevanceFunctionName LR_BRACKET query = relevanceQuery (COMMA relevanceArg)* RR_BRACKET + ; convertedDataType - : typeName=DATE - | typeName=TIME - | typeName=TIMESTAMP - | typeName=INT - | typeName=INTEGER - | typeName=DOUBLE - | typeName=LONG - | typeName=FLOAT - | typeName=STRING - | typeName=BOOLEAN - ; + : typeName = DATE + | typeName = TIME + | typeName = TIMESTAMP + | typeName = INT + | typeName = INTEGER + | typeName = DOUBLE + | typeName = LONG + | typeName = FLOAT + | typeName = STRING + | typeName = BOOLEAN + ; caseFuncAlternative - : WHEN condition=functionArg - THEN consequent=functionArg - ; + : WHEN condition = functionArg THEN consequent = functionArg + ; aggregateFunction - : functionName=aggregationFunctionName LR_BRACKET functionArg RR_BRACKET - #regularAggregateFunctionCall - | COUNT LR_BRACKET STAR RR_BRACKET #countStarFunctionCall - | COUNT LR_BRACKET DISTINCT functionArg RR_BRACKET #distinctCountFunctionCall - ; + : functionName = aggregationFunctionName LR_BRACKET functionArg RR_BRACKET # regularAggregateFunctionCall + | COUNT LR_BRACKET STAR RR_BRACKET # countStarFunctionCall + | COUNT LR_BRACKET DISTINCT functionArg RR_BRACKET # distinctCountFunctionCall + ; filterClause - : FILTER LR_BRACKET WHERE expression RR_BRACKET - ; + : FILTER LR_BRACKET WHERE expression RR_BRACKET + ; aggregationFunctionName - : AVG - | COUNT - | SUM - | MIN - | MAX - | VAR_POP - | VAR_SAMP - | VARIANCE - | STD - | STDDEV - | STDDEV_POP - | STDDEV_SAMP - ; + : AVG + | COUNT + | SUM + | MIN + | MAX + | VAR_POP + | VAR_SAMP + | VARIANCE + | STD + | STDDEV + | STDDEV_POP + | STDDEV_SAMP + ; mathematicalFunctionName - : ABS - | CBRT - | CEIL - | CEILING - | CONV - | CRC32 - | E - | EXP - | EXPM1 - | FLOOR - | LN - | LOG - | LOG10 - | LOG2 - | MOD - | PI - | POW - | POWER - | RAND - | RINT - | ROUND - | SIGN - | SIGNUM - | SQRT - | TRUNCATE - | trigonometricFunctionName - | arithmeticFunctionName - ; + : ABS + | CBRT + | CEIL + | CEILING + | CONV + | CRC32 + | E + | EXP + | EXPM1 + | FLOOR + | LN + | LOG + | LOG10 + | LOG2 + | MOD + | PI + | POW + | POWER + | RAND + | RINT + | ROUND + | SIGN + | SIGNUM + | SQRT + | TRUNCATE + | trigonometricFunctionName + | arithmeticFunctionName + ; trigonometricFunctionName - : ACOS - | ASIN - | ATAN - | ATAN2 - | COS - | COSH - | COT - | DEGREES - | RADIANS - | SIN - | SINH - | TAN - ; + : ACOS + | ASIN + | ATAN + | ATAN2 + | COS + | COSH + | COT + | DEGREES + | RADIANS + | SIN + | SINH + | TAN + ; arithmeticFunctionName - : ADD - | SUBTRACT - | MULTIPLY - | DIVIDE - | MOD - | MODULUS - ; + : ADD + | SUBTRACT + | MULTIPLY + | DIVIDE + | MOD + | MODULUS + ; dateTimeFunctionName - : datetimeConstantLiteral - | ADDDATE - | ADDTIME - | CONVERT_TZ - | CURDATE - | CURTIME - | DATE - | DATE_ADD - | DATE_FORMAT - | DATE_SUB - | DATEDIFF - | DATETIME - | DAY - | DAYNAME - | DAYOFMONTH - | DAY_OF_MONTH - | DAYOFWEEK - | DAYOFYEAR - | DAY_OF_YEAR - | DAY_OF_WEEK - | FROM_DAYS - | FROM_UNIXTIME - | HOUR - | HOUR_OF_DAY - | LAST_DAY - | MAKEDATE - | MAKETIME - | MICROSECOND - | MINUTE - | MINUTE_OF_DAY - | MINUTE_OF_HOUR - | MONTH - | MONTHNAME - | MONTH_OF_YEAR - | NOW - | PERIOD_ADD - | PERIOD_DIFF - | QUARTER - | SEC_TO_TIME - | SECOND - | SECOND_OF_MINUTE - | SUBDATE - | SUBTIME - | SYSDATE - | STR_TO_DATE - | TIME - | TIME_FORMAT - | TIME_TO_SEC - | TIMEDIFF - | TIMESTAMP - | TO_DAYS - | TO_SECONDS - | UNIX_TIMESTAMP - | WEEK - | WEEKDAY - | WEEK_OF_YEAR - | WEEKOFYEAR - | YEAR - | YEARWEEK - ; + : datetimeConstantLiteral + | ADDDATE + | ADDTIME + | CONVERT_TZ + | CURDATE + | CURTIME + | DATE + | DATE_ADD + | DATE_FORMAT + | DATE_SUB + | DATEDIFF + | DATETIME + | DAY + | DAYNAME + | DAYOFMONTH + | DAY_OF_MONTH + | DAYOFWEEK + | DAYOFYEAR + | DAY_OF_YEAR + | DAY_OF_WEEK + | FROM_DAYS + | FROM_UNIXTIME + | HOUR + | HOUR_OF_DAY + | LAST_DAY + | MAKEDATE + | MAKETIME + | MICROSECOND + | MINUTE + | MINUTE_OF_DAY + | MINUTE_OF_HOUR + | MONTH + | MONTHNAME + | MONTH_OF_YEAR + | NOW + | PERIOD_ADD + | PERIOD_DIFF + | QUARTER + | SEC_TO_TIME + | SECOND + | SECOND_OF_MINUTE + | SUBDATE + | SUBTIME + | SYSDATE + | STR_TO_DATE + | TIME + | TIME_FORMAT + | TIME_TO_SEC + | TIMEDIFF + | TIMESTAMP + | TO_DAYS + | TO_SECONDS + | UNIX_TIMESTAMP + | WEEK + | WEEKDAY + | WEEK_OF_YEAR + | WEEKOFYEAR + | YEAR + | YEARWEEK + ; textFunctionName - : SUBSTR - | SUBSTRING - | TRIM - | LTRIM - | RTRIM - | LOWER - | UPPER - | CONCAT - | CONCAT_WS - | SUBSTR - | LENGTH - | STRCMP - | RIGHT - | LEFT - | ASCII - | LOCATE - | REPLACE - | REVERSE - ; + : SUBSTR + | SUBSTRING + | TRIM + | LTRIM + | RTRIM + | LOWER + | UPPER + | CONCAT + | CONCAT_WS + | SUBSTR + | LENGTH + | STRCMP + | RIGHT + | LEFT + | ASCII + | LOCATE + | REPLACE + | REVERSE + ; flowControlFunctionName - : IF - | IFNULL - | NULLIF - | ISNULL - ; + : IF + | IFNULL + | NULLIF + | ISNULL + ; noFieldRelevanceFunctionName - : QUERY - ; + : QUERY + ; systemFunctionName - : TYPEOF - ; + : TYPEOF + ; nestedFunctionName - : NESTED - ; + : NESTED + ; scoreRelevanceFunctionName - : SCORE | SCOREQUERY | SCORE_QUERY - ; + : SCORE + | SCOREQUERY + | SCORE_QUERY + ; singleFieldRelevanceFunctionName - : MATCH - | MATCHQUERY - | MATCH_QUERY - | MATCH_PHRASE - | MATCHPHRASE - | MATCHPHRASEQUERY - | MATCH_BOOL_PREFIX - | MATCH_PHRASE_PREFIX - | WILDCARD_QUERY - | WILDCARDQUERY - ; + : MATCH + | MATCHQUERY + | MATCH_QUERY + | MATCH_PHRASE + | MATCHPHRASE + | MATCHPHRASEQUERY + | MATCH_BOOL_PREFIX + | MATCH_PHRASE_PREFIX + | WILDCARD_QUERY + | WILDCARDQUERY + ; multiFieldRelevanceFunctionName - : MULTI_MATCH - | MULTIMATCH - | MULTIMATCHQUERY - | SIMPLE_QUERY_STRING - | QUERY_STRING - ; + : MULTI_MATCH + | MULTIMATCH + | MULTIMATCHQUERY + | SIMPLE_QUERY_STRING + | QUERY_STRING + ; altSingleFieldRelevanceFunctionName - : MATCH_QUERY - | MATCHQUERY - | MATCH_PHRASE - | MATCHPHRASE - ; + : MATCH_QUERY + | MATCHQUERY + | MATCH_PHRASE + | MATCHPHRASE + ; altMultiFieldRelevanceFunctionName - : MULTI_MATCH - | MULTIMATCH - ; + : MULTI_MATCH + | MULTIMATCH + ; functionArgs - : (functionArg (COMMA functionArg)*)? - ; + : (functionArg (COMMA functionArg)*)? + ; functionArg - : expression - ; + : expression + ; relevanceArg - : relevanceArgName EQUAL_SYMBOL relevanceArgValue - | argName=stringLiteral EQUAL_SYMBOL argVal=relevanceArgValue - ; + : relevanceArgName EQUAL_SYMBOL relevanceArgValue + | argName = stringLiteral EQUAL_SYMBOL argVal = relevanceArgValue + ; highlightArg - : highlightArgName EQUAL_SYMBOL highlightArgValue - ; + : highlightArgName EQUAL_SYMBOL highlightArgValue + ; relevanceArgName - : ALLOW_LEADING_WILDCARD - | ANALYZER - | ANALYZE_WILDCARD - | AUTO_GENERATE_SYNONYMS_PHRASE_QUERY - | BOOST - | CASE_INSENSITIVE - | CUTOFF_FREQUENCY - | DEFAULT_FIELD - | DEFAULT_OPERATOR - | ENABLE_POSITION_INCREMENTS - | ESCAPE - | FIELDS - | FLAGS - | FUZZINESS - | FUZZY_MAX_EXPANSIONS - | FUZZY_PREFIX_LENGTH - | FUZZY_REWRITE - | FUZZY_TRANSPOSITIONS - | LENIENT - | LOW_FREQ_OPERATOR - | MAX_DETERMINIZED_STATES - | MAX_EXPANSIONS - | MINIMUM_SHOULD_MATCH - | OPERATOR - | PHRASE_SLOP - | PREFIX_LENGTH - | QUOTE_ANALYZER - | QUOTE_FIELD_SUFFIX - | REWRITE - | SLOP - | TIE_BREAKER - | TIME_ZONE - | TYPE - | ZERO_TERMS_QUERY - ; + : ALLOW_LEADING_WILDCARD + | ANALYZER + | ANALYZE_WILDCARD + | AUTO_GENERATE_SYNONYMS_PHRASE_QUERY + | BOOST + | CASE_INSENSITIVE + | CUTOFF_FREQUENCY + | DEFAULT_FIELD + | DEFAULT_OPERATOR + | ENABLE_POSITION_INCREMENTS + | ESCAPE + | FIELDS + | FLAGS + | FUZZINESS + | FUZZY_MAX_EXPANSIONS + | FUZZY_PREFIX_LENGTH + | FUZZY_REWRITE + | FUZZY_TRANSPOSITIONS + | LENIENT + | LOW_FREQ_OPERATOR + | MAX_DETERMINIZED_STATES + | MAX_EXPANSIONS + | MINIMUM_SHOULD_MATCH + | OPERATOR + | PHRASE_SLOP + | PREFIX_LENGTH + | QUOTE_ANALYZER + | QUOTE_FIELD_SUFFIX + | REWRITE + | SLOP + | TIE_BREAKER + | TIME_ZONE + | TYPE + | ZERO_TERMS_QUERY + ; highlightArgName - : HIGHLIGHT_POST_TAGS - | HIGHLIGHT_PRE_TAGS - ; + : HIGHLIGHT_POST_TAGS + | HIGHLIGHT_PRE_TAGS + ; relevanceFieldAndWeight - : field=relevanceField - | field=relevanceField weight=relevanceFieldWeight - | field=relevanceField BIT_XOR_OP weight=relevanceFieldWeight - ; + : field = relevanceField + | field = relevanceField weight = relevanceFieldWeight + | field = relevanceField BIT_XOR_OP weight = relevanceFieldWeight + ; relevanceFieldWeight - : realLiteral - | decimalLiteral - ; + : realLiteral + | decimalLiteral + ; relevanceField - : qualifiedName - | stringLiteral - ; + : qualifiedName + | stringLiteral + ; relevanceQuery - : relevanceArgValue - ; + : relevanceArgValue + ; relevanceArgValue - : qualifiedName - | constant - ; + : qualifiedName + | constant + ; highlightArgValue - : stringLiteral - ; + : stringLiteral + ; alternateMultiMatchArgName - : FIELDS - | QUERY - | stringLiteral - ; + : FIELDS + | QUERY + | stringLiteral + ; alternateMultiMatchQuery - : argName=alternateMultiMatchArgName EQUAL_SYMBOL argVal=relevanceArgValue - ; + : argName = alternateMultiMatchArgName EQUAL_SYMBOL argVal = relevanceArgValue + ; alternateMultiMatchField - : argName=alternateMultiMatchArgName EQUAL_SYMBOL argVal=relevanceArgValue - | argName=alternateMultiMatchArgName EQUAL_SYMBOL - LT_SQR_PRTHS argVal=relevanceArgValue RT_SQR_PRTHS - ; - - -// Identifiers + : argName = alternateMultiMatchArgName EQUAL_SYMBOL argVal = relevanceArgValue + | argName = alternateMultiMatchArgName EQUAL_SYMBOL LT_SQR_PRTHS argVal = relevanceArgValue RT_SQR_PRTHS + ; +// Identifiers tableName - : qualifiedName - ; + : qualifiedName + ; columnName - : qualifiedName - ; + : qualifiedName + ; allTupleFields - : path=qualifiedName DOT STAR - ; + : path = qualifiedName DOT STAR + ; alias - : ident - ; + : ident + ; qualifiedName - : ident (DOT ident)* - ; + : ident (DOT ident)* + ; ident - : DOT? ID - | BACKTICK_QUOTE_ID - | keywordsCanBeId - | scalarFunctionName - ; + : DOT? ID + | BACKTICK_QUOTE_ID + | keywordsCanBeId + | scalarFunctionName + ; keywordsCanBeId - : FULL - | FIELD | D | T | TS // OD SQL and ODBC special - | COUNT | SUM | AVG | MAX | MIN - | FIRST | LAST - | TYPE // TODO: Type is keyword required by relevancy function. Remove this when relevancy functions moved out - ; + : FULL + | FIELD + | D + | T + | TS // OD SQL and ODBC special + | COUNT + | SUM + | AVG + | MAX + | MIN + | FIRST + | LAST + | TYPE // TODO: Type is keyword required by relevancy function. Remove this when relevancy functions moved out + ;